New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@solana/codecs-data-structures

Package Overview
Dependencies
Maintainers
14
Versions
1185
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@solana/codecs-data-structures - npm Package Compare versions

Comparing version 2.0.0-experimental.735654a to 2.0.0-experimental.745437f

769

dist/index.browser.js

@@ -1,2 +0,2 @@

import { mergeBytes, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertFixedSizeCodec, assertByteArrayIsNotEmptyForCodec, fixEncoder, fixDecoder, fixBytes } from '@solana/codecs-core';
import { assertIsFixedSize, createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core';
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers';

@@ -6,3 +6,8 @@

// src/utils.ts
// src/assertions.ts
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
if (expected !== actual) {
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
}
}
function maxCodecSizes(sizes) {

@@ -17,90 +22,64 @@ return sizes.reduce(

}
// src/array-like-codec-size.ts
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
if (typeof size === "number") {
return [size, offset];
}
if (typeof size === "object") {
return size.decode(bytes, offset);
}
if (size === "remainder") {
const childrenSize = sumCodecSizes(childrenSizes);
if (childrenSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
const remainder = bytes.slice(offset).length;
if (remainder % childrenSize !== 0) {
throw new Error(
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
);
}
return [remainder / childrenSize, offset];
}
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
function getFixedSize(codec) {
return isFixedSize(codec) ? codec.fixedSize : null;
}
function getArrayLikeCodecSizeDescription(size) {
return typeof size === "object" ? size.description : `${size}`;
function getMaxSize(codec) {
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
}
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
if (typeof size !== "number")
return null;
if (size === 0)
return 0;
const childrenSize = sumCodecSizes(childrenSizes);
return childrenSize === null ? null : childrenSize * size;
}
function getArrayLikeCodecSizePrefix(size, realSize) {
return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
}
// src/assertions.ts
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
if (expected !== actual) {
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
}
}
// src/array.ts
function arrayCodecHelper(item, size, description) {
if (size === "remainder" && item.fixedSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
return {
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
};
}
function getArrayEncoder(item, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...arrayCodecHelper(item, size, config.description),
encode: (value) => {
if (size === "remainder") {
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
}
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
return createEncoder({
...fixedSize !== null ? { fixedSize } : {
getSizeFromValue: (array) => {
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
},
maxSize
},
write: (array, bytes, offset) => {
if (typeof size === "number") {
assertValidNumberOfItemsForCodec("array", size, value.length);
assertValidNumberOfItemsForCodec("array", size, array.length);
}
return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]);
if (typeof size === "object") {
offset = size.write(array.length, bytes, offset);
}
array.forEach((value) => {
offset = item.write(value, bytes, offset);
});
return offset;
}
};
});
}
function getArrayDecoder(item, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...arrayCodecHelper(item, size, config.description),
decode: (bytes, offset = 0) => {
if (size === "remainder") {
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
}
const itemSize = getFixedSize(item);
const fixedSize = computeArrayLikeCodecSize(size, itemSize);
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
return createDecoder({
...fixedSize !== null ? { fixedSize } : { maxSize },
read: (bytes, offset) => {
const array = [];
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [[], offset];
return [array, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset);
offset = newOffset;
const values = [];
for (let i = 0; i < resolvedSize; i += 1) {
const [value, newOffset2] = item.decode(bytes, offset);
values.push(value);
const [value, newOffset2] = item.read(bytes, offset);
offset = newOffset2;
array.push(value);
}
return [values, offset];
return [array, offset];
}
};
});
}

@@ -110,10 +89,37 @@ function getArrayCodec(item, config = {}) {

}
var getBitArrayEncoder = (size, config = {}) => {
function readArrayLikeCodecSize(size, itemSize, bytes, offset) {
if (typeof size === "number") {
return [size, offset];
}
if (typeof size === "object") {
return size.read(bytes, offset);
}
if (size === "remainder") {
if (itemSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
const remainder = Math.max(0, bytes.length - offset);
if (remainder % itemSize !== 0) {
throw new Error(
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.`
);
}
return [remainder / itemSize, offset];
}
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
}
function computeArrayLikeCodecSize(size, itemSize) {
if (typeof size !== "number")
return null;
if (size === 0)
return 0;
return itemSize === null ? null : itemSize * size;
}
function getBitArrayEncoder(size, config = {}) {
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
const backward = parsedConfig.backward ?? false;
const backwardSuffix = backward ? "; backward" : "";
return {
description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
encode(value) {
const bytes = [];
return createEncoder({
fixedSize: size,
write(value, bytes, offset) {
const bytesToAdd = [];
for (let i = 0; i < size; i += 1) {

@@ -126,19 +132,18 @@ let byte = 0;

if (backward) {
bytes.unshift(byte);
bytesToAdd.unshift(byte);
} else {
bytes.push(byte);
bytesToAdd.push(byte);
}
}
return new Uint8Array(bytes);
},
fixedSize: size,
maxSize: size
};
};
var getBitArrayDecoder = (size, config = {}) => {
bytes.set(bytesToAdd, offset);
return size;
}
});
}
function getBitArrayDecoder(size, config = {}) {
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
const backward = parsedConfig.backward ?? false;
const backwardSuffix = backward ? "; backward" : "";
return {
decode(bytes, offset = 0) {
return createDecoder({
fixedSize: size,
read(bytes, offset) {
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);

@@ -160,32 +165,17 @@ const booleans = [];

return [booleans, offset + size];
},
description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
fixedSize: size,
maxSize: size
};
};
var getBitArrayCodec = (size, config = {}) => combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
}
});
}
function getBitArrayCodec(size, config = {}) {
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
}
function getBooleanEncoder(config = {}) {
const size = config.size ?? getU8Encoder();
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
return {
description: config.description ?? `bool(${size.description})`,
encode: (value) => size.encode(value ? 1 : 0),
fixedSize: size.fixedSize,
maxSize: size.fixedSize
};
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
return mapEncoder(size, (value) => value ? 1 : 0);
}
function getBooleanDecoder(config = {}) {
const size = config.size ?? getU8Decoder();
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
return {
decode: (bytes, offset = 0) => {
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
const [value, vOffset] = size.decode(bytes, offset);
return [value === 1, vOffset];
},
description: config.description ?? `bool(${size.description})`,
fixedSize: size.fixedSize,
maxSize: size.fixedSize
};
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
return mapDecoder(size, (value) => Number(value) === 1);
}

@@ -197,10 +187,9 @@ function getBooleanCodec(config = {}) {

const size = config.size ?? "variable";
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
const description = config.description ?? `bytes(${sizeDescription})`;
const byteEncoder = {
description,
encode: (value) => value,
fixedSize: null,
maxSize: null
};
const byteEncoder = createEncoder({
getSizeFromValue: (value) => value.length,
write: (value, bytes, offset) => {
bytes.set(value, offset);
return offset + value.length;
}
});
if (size === "variable") {

@@ -210,26 +199,20 @@ return byteEncoder;

if (typeof size === "number") {
return fixEncoder(byteEncoder, size, description);
return fixEncoder(byteEncoder, size);
}
return {
...byteEncoder,
encode: (value) => {
const contentBytes = byteEncoder.encode(value);
const lengthBytes = size.encode(contentBytes.length);
return mergeBytes([lengthBytes, contentBytes]);
return createEncoder({
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
write: (value, bytes, offset) => {
offset = size.write(value.length, bytes, offset);
return byteEncoder.write(value, bytes, offset);
}
};
});
}
function getBytesDecoder(config = {}) {
const size = config.size ?? "variable";
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
const description = config.description ?? `bytes(${sizeDescription})`;
const byteDecoder = {
decode: (bytes, offset = 0) => {
const byteDecoder = createDecoder({
read: (bytes, offset) => {
const slice = bytes.slice(offset);
return [slice, offset + slice.length];
},
description,
fixedSize: null,
maxSize: null
};
}
});
if (size === "variable") {

@@ -239,9 +222,8 @@ return byteDecoder;

if (typeof size === "number") {
return fixDecoder(byteDecoder, size, description);
return fixDecoder(byteDecoder, size);
}
return {
...byteDecoder,
decode: (bytes, offset = 0) => {
return createDecoder({
read: (bytes, offset) => {
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
const [lengthBigInt, lengthOffset] = size.decode(bytes, offset);
const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
const length = Number(lengthBigInt);

@@ -251,7 +233,7 @@ offset = lengthOffset;

assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
const [value, contentOffset] = byteDecoder.decode(contentBytes);
const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
offset += contentOffset;
return [value, offset];
}
};
});
}

@@ -261,38 +243,30 @@ function getBytesCodec(config = {}) {

}
function dataEnumCodecHelper(variants, prefix, description) {
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
return {
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
};
}
function getDataEnumEncoder(variants, config = {}) {
const prefix = config.size ?? getU8Encoder();
return {
...dataEnumCodecHelper(variants, prefix, config.description),
encode: (variant) => {
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
if (discriminator < 0) {
throw new Error(
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
);
}
const variantPrefix = prefix.encode(discriminator);
const variantSerializer = variants[discriminator][1];
const variantBytes = variantSerializer.encode(variant);
return mergeBytes([variantPrefix, variantBytes]);
const fixedSize = getDataEnumFixedSize(variants, prefix);
return createEncoder({
...fixedSize !== null ? { fixedSize } : {
getSizeFromValue: (variant) => {
const discriminator = getVariantDiscriminator(variants, variant);
const variantEncoder = variants[discriminator][1];
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
},
maxSize: getDataEnumMaxSize(variants, prefix)
},
write: (variant, bytes, offset) => {
const discriminator = getVariantDiscriminator(variants, variant);
offset = prefix.write(discriminator, bytes, offset);
const variantEncoder = variants[discriminator][1];
return variantEncoder.write(variant, bytes, offset);
}
};
});
}
function getDataEnumDecoder(variants, config = {}) {
const prefix = config.size ?? getU8Decoder();
return {
...dataEnumCodecHelper(variants, prefix, config.description),
decode: (bytes, offset = 0) => {
const fixedSize = getDataEnumFixedSize(variants, prefix);
return createDecoder({
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
read: (bytes, offset) => {
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
const [discriminator, dOffset] = prefix.decode(bytes, offset);
const [discriminator, dOffset] = prefix.read(bytes, offset);
offset = dOffset;

@@ -305,7 +279,7 @@ const variantField = variants[Number(discriminator)] ?? null;

}
const [variant, vOffset] = variantField[1].decode(bytes, offset);
const [variant, vOffset] = variantField[1].read(bytes, offset);
offset = vOffset;
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
}
};
});
}

@@ -315,82 +289,114 @@ function getDataEnumCodec(variants, config = {}) {

}
function mapCodecHelper(key, value, size, description) {
if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
function getDataEnumFixedSize(variants, prefix) {
if (variants.length === 0)
return isFixedSize(prefix) ? prefix.fixedSize : null;
if (!isFixedSize(variants[0][1]))
return null;
const variantSize = variants[0][1].fixedSize;
const sameSizedVariants = variants.every(
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
);
if (!sameSizedVariants)
return null;
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
}
function getDataEnumMaxSize(variants, prefix) {
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
}
function getVariantDiscriminator(variants, variant) {
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
if (discriminator < 0) {
throw new Error(
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
);
}
return {
description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize])
};
return discriminator;
}
function getMapEncoder(key, value, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...mapCodecHelper(key, value, size, config.description),
encode: (map) => {
if (typeof size === "number") {
assertValidNumberOfItemsForCodec("map", size, map.size);
}
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)]));
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
function getTupleEncoder(items) {
const fixedSize = sumCodecSizes(items.map(getFixedSize));
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
return createEncoder({
...fixedSize === null ? {
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
maxSize
} : { fixedSize },
write: (value, bytes, offset) => {
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
items.forEach((item, index) => {
offset = item.write(value[index], bytes, offset);
});
return offset;
}
};
});
}
function getMapDecoder(key, value, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...mapCodecHelper(key, value, size, config.description),
decode: (bytes, offset = 0) => {
const map = /* @__PURE__ */ new Map();
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [map, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
size,
[key.fixedSize, value.fixedSize],
bytes,
offset
);
offset = newOffset;
for (let i = 0; i < resolvedSize; i += 1) {
const [decodedKey, kOffset] = key.decode(bytes, offset);
offset = kOffset;
const [decodedValue, vOffset] = value.decode(bytes, offset);
offset = vOffset;
map.set(decodedKey, decodedValue);
}
return [map, offset];
function getTupleDecoder(items) {
const fixedSize = sumCodecSizes(items.map(getFixedSize));
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
return createDecoder({
...fixedSize === null ? { maxSize } : { fixedSize },
read: (bytes, offset) => {
const values = [];
items.forEach((item) => {
const [newValue, newOffset] = item.read(bytes, offset);
values.push(newValue);
offset = newOffset;
});
return [values, offset];
}
};
});
}
function getTupleCodec(items) {
return combineCodec(
getTupleEncoder(items),
getTupleDecoder(items)
);
}
// src/map.ts
function getMapEncoder(key, value, config = {}) {
return mapEncoder(
getArrayEncoder(getTupleEncoder([key, value]), config),
(map) => [...map.entries()]
);
}
function getMapDecoder(key, value, config = {}) {
return mapDecoder(
getArrayDecoder(getTupleDecoder([key, value]), config),
(entries) => new Map(entries)
);
}
function getMapCodec(key, value, config = {}) {
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
}
function nullableCodecHelper(item, prefix, fixed, description) {
let descriptionSuffix = `; ${prefix.description}`;
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
if (fixed) {
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
descriptionSuffix += "; fixed";
fixedSize = prefix.fixedSize + item.fixedSize;
}
return {
description: description ?? `nullable(${item.description + descriptionSuffix})`,
fixedSize,
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
};
}
function getNullableEncoder(item, config = {}) {
const prefix = config.prefix ?? getU8Encoder();
const fixed = config.fixed ?? false;
return {
...nullableCodecHelper(item, prefix, fixed, config.description),
encode: (option) => {
const prefixByte = prefix.encode(Number(option !== null));
let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes;
return mergeBytes([prefixByte, itemBytes]);
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
if (fixed || isZeroSizeItem) {
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
const fixedSize = prefix.fixedSize + item.fixedSize;
return createEncoder({
fixedSize,
write: (option, bytes, offset) => {
const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
if (option !== null) {
item.write(option, bytes, prefixOffset);
}
return offset + fixedSize;
}
});
}
return createEncoder({
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
write: (option, bytes, offset) => {
offset = prefix.write(Number(option !== null), bytes, offset);
if (option !== null) {
offset = item.write(option, bytes, offset);
}
return offset;
}
};
});
}

@@ -400,28 +406,67 @@ function getNullableDecoder(item, config = {}) {

const fixed = config.fixed ?? false;
return {
...nullableCodecHelper(item, prefix, fixed, config.description),
decode: (bytes, offset = 0) => {
let fixedSize = null;
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
if (fixed || isZeroSizeItem) {
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
fixedSize = prefix.fixedSize + item.fixedSize;
}
return createDecoder({
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
read: (bytes, offset) => {
if (bytes.length - offset <= 0) {
return [null, offset];
}
const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0);
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
offset = prefixOffset;
const [isSome, prefixOffset] = prefix.read(bytes, offset);
if (isSome === 0) {
return [null, fixed ? fixedOffset : offset];
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
}
const [value, newOffset] = item.decode(bytes, offset);
offset = newOffset;
return [value, fixed ? fixedOffset : offset];
const [value, newOffset] = item.read(bytes, prefixOffset);
return [value, fixedSize !== null ? offset + fixedSize : newOffset];
}
};
});
}
function getNullableCodec(item, config = {}) {
return combineCodec(getNullableEncoder(item, config), getNullableDecoder(item, config));
const configCast = config;
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
}
function scalarEnumCoderHelper(constructor, prefix, description) {
function getScalarEnumEncoder(constructor, config = {}) {
const prefix = config.size ?? getU8Encoder();
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor);
return mapEncoder(prefix, (value) => {
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
if (isInvalidNumber || isInvalidString) {
throw new Error(
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
);
}
if (typeof value === "number")
return value;
const valueIndex = enumValues.indexOf(value);
if (valueIndex >= 0)
return valueIndex;
return enumKeys.indexOf(value);
});
}
function getScalarEnumDecoder(constructor, config = {}) {
const prefix = config.size ?? getU8Decoder();
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor);
return mapDecoder(prefix, (value) => {
const valueAsNumber = Number(value);
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
throw new Error(
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
);
}
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber];
});
}
function getScalarEnumCodec(constructor, config = {}) {
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
}
function getScalarEnumStats(constructor) {
const enumKeys = Object.keys(constructor);
const enumValues = Object.values(constructor);
const isNumericEnum = enumValues.some((v) => typeof v === "number");
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
const minRange = 0;

@@ -431,9 +476,6 @@ const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;

return {
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
enumKeys,
enumValues,
fixedSize: prefix.fixedSize,
isNumericEnum,
maxRange,
maxSize: prefix.maxSize,
minRange,

@@ -443,96 +485,7 @@ stringValues

}
function getScalarEnumEncoder(constructor, config = {}) {
const prefix = config.size ?? getU8Encoder();
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, config.description);
return {
description,
encode: (value) => {
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
if (isInvalidNumber || isInvalidString) {
throw new Error(
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
);
}
if (typeof value === "number")
return prefix.encode(value);
const valueIndex = enumValues.indexOf(value);
if (valueIndex >= 0)
return prefix.encode(valueIndex);
return prefix.encode(enumKeys.indexOf(value));
},
fixedSize,
maxSize
};
}
function getScalarEnumDecoder(constructor, config = {}) {
const prefix = config.size ?? getU8Decoder();
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
constructor,
prefix,
config.description
);
return {
decode: (bytes, offset = 0) => {
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
const [value, newOffset] = prefix.decode(bytes, offset);
const valueAsNumber = Number(value);
offset = newOffset;
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
throw new Error(
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
);
}
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
},
description,
fixedSize,
maxSize
};
}
function getScalarEnumCodec(constructor, config = {}) {
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
}
function setCodecHelper(item, size, description) {
if (size === "remainder" && item.fixedSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
return {
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
};
}
function getSetEncoder(item, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...setCodecHelper(item, size, config.description),
encode: (set) => {
if (typeof size === "number" && set.size !== size) {
assertValidNumberOfItemsForCodec("set", size, set.size);
}
const itemBytes = Array.from(set, (value) => item.encode(value));
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
}
};
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
}
function getSetDecoder(item, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...setCodecHelper(item, size, config.description),
decode: (bytes, offset = 0) => {
const set = /* @__PURE__ */ new Set();
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [set, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
offset = newOffset;
for (let i = 0; i < resolvedSize; i += 1) {
const [value, newOffset2] = item.decode(bytes, offset);
offset = newOffset2;
set.add(value);
}
return [set, offset];
}
};
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
}

@@ -542,26 +495,29 @@ function getSetCodec(item, config = {}) {

}
function structCodecHelper(fields, description) {
const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", ");
return {
description: description ?? `struct(${fieldDescriptions})`,
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
};
}
function getStructEncoder(fields, config = {}) {
return {
...structCodecHelper(fields, config.description),
encode: (struct) => {
const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key]));
return mergeBytes(fieldBytes);
function getStructEncoder(fields) {
const fieldCodecs = fields.map(([, codec]) => codec);
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
return createEncoder({
...fixedSize === null ? {
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
maxSize
} : { fixedSize },
write: (struct, bytes, offset) => {
fields.forEach(([key, codec]) => {
offset = codec.write(struct[key], bytes, offset);
});
return offset;
}
};
});
}
function getStructDecoder(fields, config = {}) {
return {
...structCodecHelper(fields, config.description),
decode: (bytes, offset = 0) => {
function getStructDecoder(fields) {
const fieldCodecs = fields.map(([, codec]) => codec);
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
return createDecoder({
...fixedSize === null ? { maxSize } : { fixedSize },
read: (bytes, offset) => {
const struct = {};
fields.forEach(([key, codec]) => {
const [value, newOffset] = codec.decode(bytes, offset);
const [value, newOffset] = codec.read(bytes, offset);
offset = newOffset;

@@ -572,66 +528,25 @@ struct[key] = value;

}
};
});
}
function getStructCodec(fields, config = {}) {
return combineCodec(getStructEncoder(fields, config), getStructDecoder(fields, config));
function getStructCodec(fields) {
return combineCodec(getStructEncoder(fields), getStructDecoder(fields));
}
function tupleCodecHelper(items, description) {
const itemDescriptions = items.map((item) => item.description).join(", ");
return {
description: description ?? `tuple(${itemDescriptions})`,
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
};
}
function getTupleEncoder(items, config = {}) {
return {
...tupleCodecHelper(items, config.description),
encode: (value) => {
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
return mergeBytes(items.map((item, index) => item.encode(value[index])));
}
};
}
function getTupleDecoder(items, config = {}) {
return {
...tupleCodecHelper(items, config.description),
decode: (bytes, offset = 0) => {
const values = [];
items.forEach((codec) => {
const [newValue, newOffset] = codec.decode(bytes, offset);
values.push(newValue);
offset = newOffset;
});
return [values, offset];
}
};
}
function getTupleCodec(items, config = {}) {
return combineCodec(
getTupleEncoder(items, config),
getTupleDecoder(items, config)
);
}
function getUnitEncoder(config = {}) {
return {
description: config.description ?? "unit",
encode: () => new Uint8Array(),
function getUnitEncoder() {
return createEncoder({
fixedSize: 0,
maxSize: 0
};
write: (_value, _bytes, offset) => offset
});
}
function getUnitDecoder(config = {}) {
return {
decode: (_bytes, offset = 0) => [void 0, offset],
description: config.description ?? "unit",
function getUnitDecoder() {
return createDecoder({
fixedSize: 0,
maxSize: 0
};
read: (_bytes, offset) => [void 0, offset]
});
}
function getUnitCodec(config = {}) {
return combineCodec(getUnitEncoder(config), getUnitDecoder(config));
function getUnitCodec() {
return combineCodec(getUnitEncoder(), getUnitDecoder());
}
export { assertValidNumberOfItemsForCodec, decodeArrayLikeCodecSize, getArrayCodec, getArrayDecoder, getArrayEncoder, getArrayLikeCodecSizeDescription, getArrayLikeCodecSizeFromChildren, getArrayLikeCodecSizePrefix, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
//# sourceMappingURL=out.js.map
//# sourceMappingURL=index.browser.js.map

@@ -17,24 +17,2 @@ this.globalThis = this.globalThis || {};

}
function assertFixedSizeCodec(data, message) {
if (data.fixedSize === null) {
throw new Error(message ?? "Expected a fixed-size codec, got a variable-size one.");
}
}
var mergeBytes = (byteArrays) => {
const nonEmptyByteArrays = byteArrays.filter((arr) => arr.length);
if (nonEmptyByteArrays.length === 0) {
return byteArrays.length ? byteArrays[0] : new Uint8Array();
}
if (nonEmptyByteArrays.length === 1) {
return nonEmptyByteArrays[0];
}
const totalLength = nonEmptyByteArrays.reduce((total, arr) => total + arr.length, 0);
const result = new Uint8Array(totalLength);
let offset = 0;
nonEmptyByteArrays.forEach((arr) => {
result.set(arr, offset);
offset += arr.length;
});
return result;
};
var padBytes = (bytes, length) => {

@@ -48,4 +26,37 @@ if (bytes.length >= length)

var fixBytes = (bytes, length) => padBytes(bytes.length <= length ? bytes : bytes.slice(0, length), length);
function combineCodec(encoder, decoder, description) {
if (encoder.fixedSize !== decoder.fixedSize) {
function getEncodedSize(value, encoder) {
return "fixedSize" in encoder ? encoder.fixedSize : encoder.getSizeFromValue(value);
}
function createEncoder(encoder) {
return Object.freeze({
...encoder,
encode: (value) => {
const bytes = new Uint8Array(getEncodedSize(value, encoder));
encoder.write(value, bytes, 0);
return bytes;
}
});
}
function createDecoder(decoder) {
return Object.freeze({
...decoder,
decode: (bytes, offset = 0) => decoder.read(bytes, offset)[0]
});
}
function isFixedSize(codec) {
return "fixedSize" in codec && typeof codec.fixedSize === "number";
}
function assertIsFixedSize(codec, message) {
if (!isFixedSize(codec)) {
throw new Error(message != null ? message : "Expected a fixed-size codec, got a variable-size one.");
}
}
function isVariableSize(codec) {
return !isFixedSize(codec);
}
function combineCodec(encoder, decoder) {
if (isFixedSize(encoder) !== isFixedSize(decoder)) {
throw new Error(`Encoder and decoder must either both be fixed-size or variable-size.`);
}
if (isFixedSize(encoder) && isFixedSize(decoder) && encoder.fixedSize !== decoder.fixedSize) {
throw new Error(

@@ -55,3 +66,3 @@ `Encoder and decoder must have the same fixed size, got [${encoder.fixedSize}] and [${decoder.fixedSize}].`

}
if (encoder.maxSize !== decoder.maxSize) {
if (!isFixedSize(encoder) && !isFixedSize(decoder) && encoder.maxSize !== decoder.maxSize) {
throw new Error(

@@ -61,32 +72,26 @@ `Encoder and decoder must have the same max size, got [${encoder.maxSize}] and [${decoder.maxSize}].`

}
if (description === void 0 && encoder.description !== decoder.description) {
throw new Error(
`Encoder and decoder must have the same description, got [${encoder.description}] and [${decoder.description}]. Pass a custom description as a third argument if you want to override the description and bypass this error.`
);
}
return {
...decoder,
...encoder,
decode: decoder.decode,
description: description ?? encoder.description,
encode: encoder.encode,
fixedSize: encoder.fixedSize,
maxSize: encoder.maxSize
read: decoder.read,
write: encoder.write
};
}
function fixCodecHelper(data, fixedBytes, description) {
return {
description: description ?? `fixed(${fixedBytes}, ${data.description})`,
function fixEncoder(encoder, fixedBytes) {
return createEncoder({
fixedSize: fixedBytes,
maxSize: fixedBytes
};
write: (value, bytes, offset) => {
const variableByteArray = encoder.encode(value);
const fixedByteArray = variableByteArray.length > fixedBytes ? variableByteArray.slice(0, fixedBytes) : variableByteArray;
bytes.set(fixedByteArray, offset);
return offset + fixedBytes;
}
});
}
function fixEncoder(encoder, fixedBytes, description) {
return {
...fixCodecHelper(encoder, fixedBytes, description),
encode: (value) => fixBytes(encoder.encode(value), fixedBytes)
};
}
function fixDecoder(decoder, fixedBytes, description) {
return {
...fixCodecHelper(decoder, fixedBytes, description),
decode: (bytes, offset = 0) => {
function fixDecoder(decoder, fixedBytes) {
return createDecoder({
fixedSize: fixedBytes,
read: (bytes, offset) => {
assertByteArrayHasEnoughBytesForCodec("fixCodec", fixedBytes, bytes, offset);

@@ -96,10 +101,25 @@ if (offset > 0 || bytes.length > fixedBytes) {

}
if (decoder.fixedSize !== null) {
if (isFixedSize(decoder)) {
bytes = fixBytes(bytes, decoder.fixedSize);
}
const [value] = decoder.decode(bytes, 0);
const [value] = decoder.read(bytes, 0);
return [value, offset + fixedBytes];
}
};
});
}
function mapEncoder(encoder, unmap) {
return createEncoder({
...isVariableSize(encoder) ? { ...encoder, getSizeFromValue: (value) => encoder.getSizeFromValue(unmap(value)) } : encoder,
write: (value, bytes, offset) => encoder.write(unmap(value), bytes, offset)
});
}
function mapDecoder(decoder, map) {
return createDecoder({
...decoder,
read: (bytes, offset) => {
const [value, newOffset] = decoder.read(bytes, offset);
return [map(value, bytes, offset), newOffset];
}
});
}

@@ -114,21 +134,9 @@ // ../codecs-numbers/dist/index.browser.js

}
function sharedNumberFactory(input) {
let littleEndian;
let defaultDescription = input.name;
if (input.size > 1) {
littleEndian = !("endian" in input.config) || input.config.endian === 0;
defaultDescription += littleEndian ? "(le)" : "(be)";
}
return {
description: input.config.description ?? defaultDescription,
fixedSize: input.size,
littleEndian,
maxSize: input.size
};
function isLittleEndian(config) {
return (config == null ? void 0 : config.endian) === 1 ? false : true;
}
function numberEncoderFactory(input) {
const codecData = sharedNumberFactory(input);
return {
description: codecData.description,
encode(value) {
return createEncoder({
fixedSize: input.size,
write(value, bytes, offset) {
if (input.range) {

@@ -138,26 +146,22 @@ assertNumberIsBetweenForCodec(input.name, input.range[0], input.range[1], value);

const arrayBuffer = new ArrayBuffer(input.size);
input.set(new DataView(arrayBuffer), value, codecData.littleEndian);
return new Uint8Array(arrayBuffer);
},
fixedSize: codecData.fixedSize,
maxSize: codecData.maxSize
};
input.set(new DataView(arrayBuffer), value, isLittleEndian(input.config));
bytes.set(new Uint8Array(arrayBuffer), offset);
return offset + input.size;
}
});
}
function numberDecoderFactory(input) {
const codecData = sharedNumberFactory(input);
return {
decode(bytes, offset = 0) {
assertByteArrayIsNotEmptyForCodec(codecData.description, bytes, offset);
assertByteArrayHasEnoughBytesForCodec(codecData.description, input.size, bytes, offset);
return createDecoder({
fixedSize: input.size,
read(bytes, offset = 0) {
assertByteArrayIsNotEmptyForCodec(input.name, bytes, offset);
assertByteArrayHasEnoughBytesForCodec(input.name, input.size, bytes, offset);
const view = new DataView(toArrayBuffer(bytes, offset, input.size));
return [input.get(view, codecData.littleEndian), offset + input.size];
},
description: codecData.description,
fixedSize: codecData.fixedSize,
maxSize: codecData.maxSize
};
return [input.get(view, isLittleEndian(input.config)), offset + input.size];
}
});
}
function toArrayBuffer(bytes, offset, length) {
const bytesOffset = bytes.byteOffset + (offset ?? 0);
const bytesLength = length ?? bytes.byteLength;
const bytesOffset = bytes.byteOffset + (offset != null ? offset : 0);
const bytesLength = length != null ? length : bytes.byteLength;
return bytes.buffer.slice(bytesOffset, bytesOffset + bytesLength);

@@ -178,4 +182,3 @@ }

});
var getU8Encoder = (config = {}) => numberEncoderFactory({
config,
var getU8Encoder = () => numberEncoderFactory({
name: "u8",

@@ -186,4 +189,3 @@ range: [0, Number("0xff")],

});
var getU8Decoder = (config = {}) => numberDecoderFactory({
config,
var getU8Decoder = () => numberDecoderFactory({
get: (view) => view.getUint8(0),

@@ -194,2 +196,9 @@ name: "u8",

// src/assertions.ts
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
if (expected !== actual) {
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
}
}
// src/utils.ts

@@ -205,90 +214,67 @@ function maxCodecSizes(sizes) {

}
// src/array-like-codec-size.ts
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
if (typeof size === "number") {
return [size, offset];
}
if (typeof size === "object") {
return size.decode(bytes, offset);
}
if (size === "remainder") {
const childrenSize = sumCodecSizes(childrenSizes);
if (childrenSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
const remainder = bytes.slice(offset).length;
if (remainder % childrenSize !== 0) {
throw new Error(
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
);
}
return [remainder / childrenSize, offset];
}
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
function getFixedSize(codec) {
return isFixedSize(codec) ? codec.fixedSize : null;
}
function getArrayLikeCodecSizeDescription(size) {
return typeof size === "object" ? size.description : `${size}`;
function getMaxSize(codec) {
var _a;
return isFixedSize(codec) ? codec.fixedSize : (_a = codec.maxSize) != null ? _a : null;
}
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
if (typeof size !== "number")
return null;
if (size === 0)
return 0;
const childrenSize = sumCodecSizes(childrenSizes);
return childrenSize === null ? null : childrenSize * size;
}
function getArrayLikeCodecSizePrefix(size, realSize) {
return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
}
// src/assertions.ts
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
if (expected !== actual) {
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
}
}
// src/array.ts
function arrayCodecHelper(item, size, description) {
if (size === "remainder" && item.fixedSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
function getArrayEncoder(item, config = {}) {
var _a, _b;
const size = (_a = config.size) != null ? _a : getU32Encoder();
if (size === "remainder") {
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
}
return {
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
};
}
function getArrayEncoder(item, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...arrayCodecHelper(item, size, config.description),
encode: (value) => {
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
const maxSize = (_b = computeArrayLikeCodecSize(size, getMaxSize(item))) != null ? _b : void 0;
return createEncoder({
...fixedSize !== null ? { fixedSize } : {
getSizeFromValue: (array) => {
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
},
maxSize
},
write: (array, bytes, offset) => {
if (typeof size === "number") {
assertValidNumberOfItemsForCodec("array", size, value.length);
assertValidNumberOfItemsForCodec("array", size, array.length);
}
return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]);
if (typeof size === "object") {
offset = size.write(array.length, bytes, offset);
}
array.forEach((value) => {
offset = item.write(value, bytes, offset);
});
return offset;
}
};
});
}
function getArrayDecoder(item, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...arrayCodecHelper(item, size, config.description),
decode: (bytes, offset = 0) => {
var _a, _b;
const size = (_a = config.size) != null ? _a : getU32Decoder();
if (size === "remainder") {
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
}
const itemSize = getFixedSize(item);
const fixedSize = computeArrayLikeCodecSize(size, itemSize);
const maxSize = (_b = computeArrayLikeCodecSize(size, getMaxSize(item))) != null ? _b : void 0;
return createDecoder({
...fixedSize !== null ? { fixedSize } : { maxSize },
read: (bytes, offset) => {
const array = [];
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [[], offset];
return [array, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset);
offset = newOffset;
const values = [];
for (let i = 0; i < resolvedSize; i += 1) {
const [value, newOffset2] = item.decode(bytes, offset);
values.push(value);
const [value, newOffset2] = item.read(bytes, offset);
offset = newOffset2;
array.push(value);
}
return [values, offset];
return [array, offset];
}
};
});
}

@@ -298,36 +284,65 @@ function getArrayCodec(item, config = {}) {

}
function readArrayLikeCodecSize(size, itemSize, bytes, offset) {
if (typeof size === "number") {
return [size, offset];
}
if (typeof size === "object") {
return size.read(bytes, offset);
}
if (size === "remainder") {
if (itemSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
const remainder = Math.max(0, bytes.length - offset);
if (remainder % itemSize !== 0) {
throw new Error(
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.`
);
}
return [remainder / itemSize, offset];
}
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
}
function computeArrayLikeCodecSize(size, itemSize) {
if (typeof size !== "number")
return null;
if (size === 0)
return 0;
return itemSize === null ? null : itemSize * size;
}
// src/bit-array.ts
var getBitArrayEncoder = (size, config = {}) => {
function getBitArrayEncoder(size, config = {}) {
var _a;
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
const backward = parsedConfig.backward ?? false;
const backwardSuffix = backward ? "; backward" : "";
return {
description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
encode(value) {
const bytes = [];
const backward = (_a = parsedConfig.backward) != null ? _a : false;
return createEncoder({
fixedSize: size,
write(value, bytes, offset) {
var _a2;
const bytesToAdd = [];
for (let i = 0; i < size; i += 1) {
let byte = 0;
for (let j = 0; j < 8; j += 1) {
const feature = Number(value[i * 8 + j] ?? 0);
const feature = Number((_a2 = value[i * 8 + j]) != null ? _a2 : 0);
byte |= feature << (backward ? j : 7 - j);
}
if (backward) {
bytes.unshift(byte);
bytesToAdd.unshift(byte);
} else {
bytes.push(byte);
bytesToAdd.push(byte);
}
}
return new Uint8Array(bytes);
},
bytes.set(bytesToAdd, offset);
return size;
}
});
}
function getBitArrayDecoder(size, config = {}) {
var _a;
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
const backward = (_a = parsedConfig.backward) != null ? _a : false;
return createDecoder({
fixedSize: size,
maxSize: size
};
};
var getBitArrayDecoder = (size, config = {}) => {
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
const backward = parsedConfig.backward ?? false;
const backwardSuffix = backward ? "; backward" : "";
return {
decode(bytes, offset = 0) {
read(bytes, offset) {
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);

@@ -349,34 +364,21 @@ const booleans = [];

return [booleans, offset + size];
},
description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
fixedSize: size,
maxSize: size
};
};
var getBitArrayCodec = (size, config = {}) => combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
}
});
}
function getBitArrayCodec(size, config = {}) {
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
}
// src/boolean.ts
function getBooleanEncoder(config = {}) {
const size = config.size ?? getU8Encoder();
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
return {
description: config.description ?? `bool(${size.description})`,
encode: (value) => size.encode(value ? 1 : 0),
fixedSize: size.fixedSize,
maxSize: size.fixedSize
};
var _a;
const size = (_a = config.size) != null ? _a : getU8Encoder();
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
return mapEncoder(size, (value) => value ? 1 : 0);
}
function getBooleanDecoder(config = {}) {
const size = config.size ?? getU8Decoder();
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
return {
decode: (bytes, offset = 0) => {
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
const [value, vOffset] = size.decode(bytes, offset);
return [value === 1, vOffset];
},
description: config.description ?? `bool(${size.description})`,
fixedSize: size.fixedSize,
maxSize: size.fixedSize
};
var _a;
const size = (_a = config.size) != null ? _a : getU8Decoder();
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
return mapDecoder(size, (value) => Number(value) === 1);
}

@@ -389,11 +391,11 @@ function getBooleanCodec(config = {}) {

function getBytesEncoder(config = {}) {
const size = config.size ?? "variable";
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
const description = config.description ?? `bytes(${sizeDescription})`;
const byteEncoder = {
description,
encode: (value) => value,
fixedSize: null,
maxSize: null
};
var _a;
const size = (_a = config.size) != null ? _a : "variable";
const byteEncoder = createEncoder({
getSizeFromValue: (value) => value.length,
write: (value, bytes, offset) => {
bytes.set(value, offset);
return offset + value.length;
}
});
if (size === "variable") {

@@ -403,26 +405,21 @@ return byteEncoder;

if (typeof size === "number") {
return fixEncoder(byteEncoder, size, description);
return fixEncoder(byteEncoder, size);
}
return {
...byteEncoder,
encode: (value) => {
const contentBytes = byteEncoder.encode(value);
const lengthBytes = size.encode(contentBytes.length);
return mergeBytes([lengthBytes, contentBytes]);
return createEncoder({
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
write: (value, bytes, offset) => {
offset = size.write(value.length, bytes, offset);
return byteEncoder.write(value, bytes, offset);
}
};
});
}
function getBytesDecoder(config = {}) {
const size = config.size ?? "variable";
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
const description = config.description ?? `bytes(${sizeDescription})`;
const byteDecoder = {
decode: (bytes, offset = 0) => {
var _a;
const size = (_a = config.size) != null ? _a : "variable";
const byteDecoder = createDecoder({
read: (bytes, offset) => {
const slice = bytes.slice(offset);
return [slice, offset + slice.length];
},
description,
fixedSize: null,
maxSize: null
};
}
});
if (size === "variable") {

@@ -432,9 +429,8 @@ return byteDecoder;

if (typeof size === "number") {
return fixDecoder(byteDecoder, size, description);
return fixDecoder(byteDecoder, size);
}
return {
...byteDecoder,
decode: (bytes, offset = 0) => {
return createDecoder({
read: (bytes, offset) => {
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
const [lengthBigInt, lengthOffset] = size.decode(bytes, offset);
const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
const length = Number(lengthBigInt);

@@ -444,7 +440,7 @@ offset = lengthOffset;

assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
const [value, contentOffset] = byteDecoder.decode(contentBytes);
const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
offset += contentOffset;
return [value, offset];
}
};
});
}

@@ -456,40 +452,35 @@ function getBytesCodec(config = {}) {

// src/data-enum.ts
function dataEnumCodecHelper(variants, prefix, description) {
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
return {
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
};
}
function getDataEnumEncoder(variants, config = {}) {
const prefix = config.size ?? getU8Encoder();
return {
...dataEnumCodecHelper(variants, prefix, config.description),
encode: (variant) => {
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
if (discriminator < 0) {
throw new Error(
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
);
}
const variantPrefix = prefix.encode(discriminator);
const variantSerializer = variants[discriminator][1];
const variantBytes = variantSerializer.encode(variant);
return mergeBytes([variantPrefix, variantBytes]);
var _a;
const prefix = (_a = config.size) != null ? _a : getU8Encoder();
const fixedSize = getDataEnumFixedSize(variants, prefix);
return createEncoder({
...fixedSize !== null ? { fixedSize } : {
getSizeFromValue: (variant) => {
const discriminator = getVariantDiscriminator(variants, variant);
const variantEncoder = variants[discriminator][1];
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
},
maxSize: getDataEnumMaxSize(variants, prefix)
},
write: (variant, bytes, offset) => {
const discriminator = getVariantDiscriminator(variants, variant);
offset = prefix.write(discriminator, bytes, offset);
const variantEncoder = variants[discriminator][1];
return variantEncoder.write(variant, bytes, offset);
}
};
});
}
function getDataEnumDecoder(variants, config = {}) {
const prefix = config.size ?? getU8Decoder();
return {
...dataEnumCodecHelper(variants, prefix, config.description),
decode: (bytes, offset = 0) => {
var _a;
const prefix = (_a = config.size) != null ? _a : getU8Decoder();
const fixedSize = getDataEnumFixedSize(variants, prefix);
return createDecoder({
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
read: (bytes, offset) => {
var _a2;
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
const [discriminator, dOffset] = prefix.decode(bytes, offset);
const [discriminator, dOffset] = prefix.read(bytes, offset);
offset = dOffset;
const variantField = variants[Number(discriminator)] ?? null;
const variantField = (_a2 = variants[Number(discriminator)]) != null ? _a2 : null;
if (!variantField) {

@@ -500,7 +491,7 @@ throw new Error(

}
const [variant, vOffset] = variantField[1].decode(bytes, offset);
const [variant, vOffset] = variantField[1].read(bytes, offset);
offset = vOffset;
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
return [{ __kind: variantField[0], ...variant != null ? variant : {} }, offset];
}
};
});
}

@@ -510,54 +501,86 @@ function getDataEnumCodec(variants, config = {}) {

}
// src/map.ts
function mapCodecHelper(key, value, size, description) {
if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
function getDataEnumFixedSize(variants, prefix) {
if (variants.length === 0)
return isFixedSize(prefix) ? prefix.fixedSize : null;
if (!isFixedSize(variants[0][1]))
return null;
const variantSize = variants[0][1].fixedSize;
const sameSizedVariants = variants.every(
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
);
if (!sameSizedVariants)
return null;
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
}
function getDataEnumMaxSize(variants, prefix) {
var _a;
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
return (_a = sumCodecSizes([getMaxSize(prefix), maxVariantSize])) != null ? _a : void 0;
}
function getVariantDiscriminator(variants, variant) {
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
if (discriminator < 0) {
throw new Error(
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
);
}
return {
description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize])
};
return discriminator;
}
function getMapEncoder(key, value, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...mapCodecHelper(key, value, size, config.description),
encode: (map) => {
if (typeof size === "number") {
assertValidNumberOfItemsForCodec("map", size, map.size);
}
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)]));
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
// src/tuple.ts
function getTupleEncoder(items) {
var _a;
const fixedSize = sumCodecSizes(items.map(getFixedSize));
const maxSize = (_a = sumCodecSizes(items.map(getMaxSize))) != null ? _a : void 0;
return createEncoder({
...fixedSize === null ? {
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
maxSize
} : { fixedSize },
write: (value, bytes, offset) => {
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
items.forEach((item, index) => {
offset = item.write(value[index], bytes, offset);
});
return offset;
}
};
});
}
function getMapDecoder(key, value, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...mapCodecHelper(key, value, size, config.description),
decode: (bytes, offset = 0) => {
const map = /* @__PURE__ */ new Map();
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [map, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
size,
[key.fixedSize, value.fixedSize],
bytes,
offset
);
offset = newOffset;
for (let i = 0; i < resolvedSize; i += 1) {
const [decodedKey, kOffset] = key.decode(bytes, offset);
offset = kOffset;
const [decodedValue, vOffset] = value.decode(bytes, offset);
offset = vOffset;
map.set(decodedKey, decodedValue);
}
return [map, offset];
function getTupleDecoder(items) {
var _a;
const fixedSize = sumCodecSizes(items.map(getFixedSize));
const maxSize = (_a = sumCodecSizes(items.map(getMaxSize))) != null ? _a : void 0;
return createDecoder({
...fixedSize === null ? { maxSize } : { fixedSize },
read: (bytes, offset) => {
const values = [];
items.forEach((item) => {
const [newValue, newOffset] = item.read(bytes, offset);
values.push(newValue);
offset = newOffset;
});
return [values, offset];
}
};
});
}
function getTupleCodec(items) {
return combineCodec(
getTupleEncoder(items),
getTupleDecoder(items)
);
}
// src/map.ts
function getMapEncoder(key, value, config = {}) {
return mapEncoder(
getArrayEncoder(getTupleEncoder([key, value]), config),
(map) => [...map.entries()]
);
}
function getMapDecoder(key, value, config = {}) {
return mapDecoder(
getArrayDecoder(getTupleDecoder([key, value]), config),
(entries) => new Map(entries)
);
}
function getMapCodec(key, value, config = {}) {

@@ -568,61 +591,107 @@ return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));

// src/nullable.ts
function nullableCodecHelper(item, prefix, fixed, description) {
let descriptionSuffix = `; ${prefix.description}`;
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
if (fixed) {
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
descriptionSuffix += "; fixed";
fixedSize = prefix.fixedSize + item.fixedSize;
function getNullableEncoder(item, config = {}) {
var _a, _b, _c;
const prefix = (_a = config.prefix) != null ? _a : getU8Encoder();
const fixed = (_b = config.fixed) != null ? _b : false;
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
if (fixed || isZeroSizeItem) {
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
const fixedSize = prefix.fixedSize + item.fixedSize;
return createEncoder({
fixedSize,
write: (option, bytes, offset) => {
const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
if (option !== null) {
item.write(option, bytes, prefixOffset);
}
return offset + fixedSize;
}
});
}
return {
description: description ?? `nullable(${item.description + descriptionSuffix})`,
fixedSize,
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
};
}
function getNullableEncoder(item, config = {}) {
const prefix = config.prefix ?? getU8Encoder();
const fixed = config.fixed ?? false;
return {
...nullableCodecHelper(item, prefix, fixed, config.description),
encode: (option) => {
const prefixByte = prefix.encode(Number(option !== null));
let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes;
return mergeBytes([prefixByte, itemBytes]);
return createEncoder({
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
maxSize: (_c = sumCodecSizes([prefix, item].map(getMaxSize))) != null ? _c : void 0,
write: (option, bytes, offset) => {
offset = prefix.write(Number(option !== null), bytes, offset);
if (option !== null) {
offset = item.write(option, bytes, offset);
}
return offset;
}
};
});
}
function getNullableDecoder(item, config = {}) {
const prefix = config.prefix ?? getU8Decoder();
const fixed = config.fixed ?? false;
return {
...nullableCodecHelper(item, prefix, fixed, config.description),
decode: (bytes, offset = 0) => {
var _a, _b, _c;
const prefix = (_a = config.prefix) != null ? _a : getU8Decoder();
const fixed = (_b = config.fixed) != null ? _b : false;
let fixedSize = null;
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
if (fixed || isZeroSizeItem) {
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
fixedSize = prefix.fixedSize + item.fixedSize;
}
return createDecoder({
...fixedSize === null ? { maxSize: (_c = sumCodecSizes([prefix, item].map(getMaxSize))) != null ? _c : void 0 } : { fixedSize },
read: (bytes, offset) => {
if (bytes.length - offset <= 0) {
return [null, offset];
}
const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0);
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
offset = prefixOffset;
const [isSome, prefixOffset] = prefix.read(bytes, offset);
if (isSome === 0) {
return [null, fixed ? fixedOffset : offset];
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
}
const [value, newOffset] = item.decode(bytes, offset);
offset = newOffset;
return [value, fixed ? fixedOffset : offset];
const [value, newOffset] = item.read(bytes, prefixOffset);
return [value, fixedSize !== null ? offset + fixedSize : newOffset];
}
};
});
}
function getNullableCodec(item, config = {}) {
return combineCodec(getNullableEncoder(item, config), getNullableDecoder(item, config));
const configCast = config;
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
}
// src/scalar-enum.ts
function scalarEnumCoderHelper(constructor, prefix, description) {
function getScalarEnumEncoder(constructor, config = {}) {
var _a;
const prefix = (_a = config.size) != null ? _a : getU8Encoder();
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor);
return mapEncoder(prefix, (value) => {
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
if (isInvalidNumber || isInvalidString) {
throw new Error(
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
);
}
if (typeof value === "number")
return value;
const valueIndex = enumValues.indexOf(value);
if (valueIndex >= 0)
return valueIndex;
return enumKeys.indexOf(value);
});
}
function getScalarEnumDecoder(constructor, config = {}) {
var _a;
const prefix = (_a = config.size) != null ? _a : getU8Decoder();
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor);
return mapDecoder(prefix, (value) => {
const valueAsNumber = Number(value);
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
throw new Error(
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
);
}
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber];
});
}
function getScalarEnumCodec(constructor, config = {}) {
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
}
function getScalarEnumStats(constructor) {
const enumKeys = Object.keys(constructor);
const enumValues = Object.values(constructor);
const isNumericEnum = enumValues.some((v) => typeof v === "number");
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
const minRange = 0;

@@ -632,9 +701,6 @@ const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;

return {
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
enumKeys,
enumValues,
fixedSize: prefix.fixedSize,
isNumericEnum,
maxRange,
maxSize: prefix.maxSize,
minRange,

@@ -644,98 +710,9 @@ stringValues

}
function getScalarEnumEncoder(constructor, config = {}) {
const prefix = config.size ?? getU8Encoder();
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, config.description);
return {
description,
encode: (value) => {
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
if (isInvalidNumber || isInvalidString) {
throw new Error(
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
);
}
if (typeof value === "number")
return prefix.encode(value);
const valueIndex = enumValues.indexOf(value);
if (valueIndex >= 0)
return prefix.encode(valueIndex);
return prefix.encode(enumKeys.indexOf(value));
},
fixedSize,
maxSize
};
}
function getScalarEnumDecoder(constructor, config = {}) {
const prefix = config.size ?? getU8Decoder();
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
constructor,
prefix,
config.description
);
return {
decode: (bytes, offset = 0) => {
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
const [value, newOffset] = prefix.decode(bytes, offset);
const valueAsNumber = Number(value);
offset = newOffset;
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
throw new Error(
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
);
}
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
},
description,
fixedSize,
maxSize
};
}
function getScalarEnumCodec(constructor, config = {}) {
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
}
// src/set.ts
function setCodecHelper(item, size, description) {
if (size === "remainder" && item.fixedSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
return {
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
};
}
function getSetEncoder(item, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...setCodecHelper(item, size, config.description),
encode: (set) => {
if (typeof size === "number" && set.size !== size) {
assertValidNumberOfItemsForCodec("set", size, set.size);
}
const itemBytes = Array.from(set, (value) => item.encode(value));
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
}
};
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
}
function getSetDecoder(item, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...setCodecHelper(item, size, config.description),
decode: (bytes, offset = 0) => {
const set = /* @__PURE__ */ new Set();
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [set, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
offset = newOffset;
for (let i = 0; i < resolvedSize; i += 1) {
const [value, newOffset2] = item.decode(bytes, offset);
offset = newOffset2;
set.add(value);
}
return [set, offset];
}
};
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
}

@@ -747,26 +724,31 @@ function getSetCodec(item, config = {}) {

// src/struct.ts
function structCodecHelper(fields, description) {
const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", ");
return {
description: description ?? `struct(${fieldDescriptions})`,
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
};
}
function getStructEncoder(fields, config = {}) {
return {
...structCodecHelper(fields, config.description),
encode: (struct) => {
const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key]));
return mergeBytes(fieldBytes);
function getStructEncoder(fields) {
var _a;
const fieldCodecs = fields.map(([, codec]) => codec);
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
const maxSize = (_a = sumCodecSizes(fieldCodecs.map(getMaxSize))) != null ? _a : void 0;
return createEncoder({
...fixedSize === null ? {
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
maxSize
} : { fixedSize },
write: (struct, bytes, offset) => {
fields.forEach(([key, codec]) => {
offset = codec.write(struct[key], bytes, offset);
});
return offset;
}
};
});
}
function getStructDecoder(fields, config = {}) {
return {
...structCodecHelper(fields, config.description),
decode: (bytes, offset = 0) => {
function getStructDecoder(fields) {
var _a;
const fieldCodecs = fields.map(([, codec]) => codec);
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
const maxSize = (_a = sumCodecSizes(fieldCodecs.map(getMaxSize))) != null ? _a : void 0;
return createDecoder({
...fixedSize === null ? { maxSize } : { fixedSize },
read: (bytes, offset) => {
const struct = {};
fields.forEach(([key, codec]) => {
const [value, newOffset] = codec.decode(bytes, offset);
const [value, newOffset] = codec.read(bytes, offset);
offset = newOffset;

@@ -777,76 +759,29 @@ struct[key] = value;

}
};
});
}
function getStructCodec(fields, config = {}) {
return combineCodec(getStructEncoder(fields, config), getStructDecoder(fields, config));
function getStructCodec(fields) {
return combineCodec(getStructEncoder(fields), getStructDecoder(fields));
}
// src/tuple.ts
function tupleCodecHelper(items, description) {
const itemDescriptions = items.map((item) => item.description).join(", ");
return {
description: description ?? `tuple(${itemDescriptions})`,
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
};
}
function getTupleEncoder(items, config = {}) {
return {
...tupleCodecHelper(items, config.description),
encode: (value) => {
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
return mergeBytes(items.map((item, index) => item.encode(value[index])));
}
};
}
function getTupleDecoder(items, config = {}) {
return {
...tupleCodecHelper(items, config.description),
decode: (bytes, offset = 0) => {
const values = [];
items.forEach((codec) => {
const [newValue, newOffset] = codec.decode(bytes, offset);
values.push(newValue);
offset = newOffset;
});
return [values, offset];
}
};
}
function getTupleCodec(items, config = {}) {
return combineCodec(
getTupleEncoder(items, config),
getTupleDecoder(items, config)
);
}
// src/unit.ts
function getUnitEncoder(config = {}) {
return {
description: config.description ?? "unit",
encode: () => new Uint8Array(),
function getUnitEncoder() {
return createEncoder({
fixedSize: 0,
maxSize: 0
};
write: (_value, _bytes, offset) => offset
});
}
function getUnitDecoder(config = {}) {
return {
decode: (_bytes, offset = 0) => [void 0, offset],
description: config.description ?? "unit",
function getUnitDecoder() {
return createDecoder({
fixedSize: 0,
maxSize: 0
};
read: (_bytes, offset) => [void 0, offset]
});
}
function getUnitCodec(config = {}) {
return combineCodec(getUnitEncoder(config), getUnitDecoder(config));
function getUnitCodec() {
return combineCodec(getUnitEncoder(), getUnitDecoder());
}
exports.assertValidNumberOfItemsForCodec = assertValidNumberOfItemsForCodec;
exports.decodeArrayLikeCodecSize = decodeArrayLikeCodecSize;
exports.getArrayCodec = getArrayCodec;
exports.getArrayDecoder = getArrayDecoder;
exports.getArrayEncoder = getArrayEncoder;
exports.getArrayLikeCodecSizeDescription = getArrayLikeCodecSizeDescription;
exports.getArrayLikeCodecSizeFromChildren = getArrayLikeCodecSizeFromChildren;
exports.getArrayLikeCodecSizePrefix = getArrayLikeCodecSizePrefix;
exports.getBitArrayCodec = getBitArrayCodec;

@@ -853,0 +788,0 @@ exports.getBitArrayDecoder = getBitArrayDecoder;

@@ -1,2 +0,2 @@

import { mergeBytes, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertFixedSizeCodec, assertByteArrayIsNotEmptyForCodec, fixEncoder, fixDecoder, fixBytes } from '@solana/codecs-core';
import { assertIsFixedSize, createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core';
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers';

@@ -6,3 +6,8 @@

// src/utils.ts
// src/assertions.ts
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
if (expected !== actual) {
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
}
}
function maxCodecSizes(sizes) {

@@ -17,90 +22,64 @@ return sizes.reduce(

}
// src/array-like-codec-size.ts
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
if (typeof size === "number") {
return [size, offset];
}
if (typeof size === "object") {
return size.decode(bytes, offset);
}
if (size === "remainder") {
const childrenSize = sumCodecSizes(childrenSizes);
if (childrenSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
const remainder = bytes.slice(offset).length;
if (remainder % childrenSize !== 0) {
throw new Error(
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
);
}
return [remainder / childrenSize, offset];
}
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
function getFixedSize(codec) {
return isFixedSize(codec) ? codec.fixedSize : null;
}
function getArrayLikeCodecSizeDescription(size) {
return typeof size === "object" ? size.description : `${size}`;
function getMaxSize(codec) {
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
}
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
if (typeof size !== "number")
return null;
if (size === 0)
return 0;
const childrenSize = sumCodecSizes(childrenSizes);
return childrenSize === null ? null : childrenSize * size;
}
function getArrayLikeCodecSizePrefix(size, realSize) {
return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
}
// src/assertions.ts
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
if (expected !== actual) {
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
}
}
// src/array.ts
function arrayCodecHelper(item, size, description) {
if (size === "remainder" && item.fixedSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
return {
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
};
}
function getArrayEncoder(item, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...arrayCodecHelper(item, size, config.description),
encode: (value) => {
if (size === "remainder") {
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
}
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
return createEncoder({
...fixedSize !== null ? { fixedSize } : {
getSizeFromValue: (array) => {
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
},
maxSize
},
write: (array, bytes, offset) => {
if (typeof size === "number") {
assertValidNumberOfItemsForCodec("array", size, value.length);
assertValidNumberOfItemsForCodec("array", size, array.length);
}
return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]);
if (typeof size === "object") {
offset = size.write(array.length, bytes, offset);
}
array.forEach((value) => {
offset = item.write(value, bytes, offset);
});
return offset;
}
};
});
}
function getArrayDecoder(item, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...arrayCodecHelper(item, size, config.description),
decode: (bytes, offset = 0) => {
if (size === "remainder") {
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
}
const itemSize = getFixedSize(item);
const fixedSize = computeArrayLikeCodecSize(size, itemSize);
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
return createDecoder({
...fixedSize !== null ? { fixedSize } : { maxSize },
read: (bytes, offset) => {
const array = [];
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [[], offset];
return [array, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset);
offset = newOffset;
const values = [];
for (let i = 0; i < resolvedSize; i += 1) {
const [value, newOffset2] = item.decode(bytes, offset);
values.push(value);
const [value, newOffset2] = item.read(bytes, offset);
offset = newOffset2;
array.push(value);
}
return [values, offset];
return [array, offset];
}
};
});
}

@@ -110,10 +89,37 @@ function getArrayCodec(item, config = {}) {

}
var getBitArrayEncoder = (size, config = {}) => {
function readArrayLikeCodecSize(size, itemSize, bytes, offset) {
if (typeof size === "number") {
return [size, offset];
}
if (typeof size === "object") {
return size.read(bytes, offset);
}
if (size === "remainder") {
if (itemSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
const remainder = Math.max(0, bytes.length - offset);
if (remainder % itemSize !== 0) {
throw new Error(
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.`
);
}
return [remainder / itemSize, offset];
}
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
}
function computeArrayLikeCodecSize(size, itemSize) {
if (typeof size !== "number")
return null;
if (size === 0)
return 0;
return itemSize === null ? null : itemSize * size;
}
function getBitArrayEncoder(size, config = {}) {
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
const backward = parsedConfig.backward ?? false;
const backwardSuffix = backward ? "; backward" : "";
return {
description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
encode(value) {
const bytes = [];
return createEncoder({
fixedSize: size,
write(value, bytes, offset) {
const bytesToAdd = [];
for (let i = 0; i < size; i += 1) {

@@ -126,19 +132,18 @@ let byte = 0;

if (backward) {
bytes.unshift(byte);
bytesToAdd.unshift(byte);
} else {
bytes.push(byte);
bytesToAdd.push(byte);
}
}
return new Uint8Array(bytes);
},
fixedSize: size,
maxSize: size
};
};
var getBitArrayDecoder = (size, config = {}) => {
bytes.set(bytesToAdd, offset);
return size;
}
});
}
function getBitArrayDecoder(size, config = {}) {
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
const backward = parsedConfig.backward ?? false;
const backwardSuffix = backward ? "; backward" : "";
return {
decode(bytes, offset = 0) {
return createDecoder({
fixedSize: size,
read(bytes, offset) {
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);

@@ -160,32 +165,17 @@ const booleans = [];

return [booleans, offset + size];
},
description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
fixedSize: size,
maxSize: size
};
};
var getBitArrayCodec = (size, config = {}) => combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
}
});
}
function getBitArrayCodec(size, config = {}) {
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
}
function getBooleanEncoder(config = {}) {
const size = config.size ?? getU8Encoder();
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
return {
description: config.description ?? `bool(${size.description})`,
encode: (value) => size.encode(value ? 1 : 0),
fixedSize: size.fixedSize,
maxSize: size.fixedSize
};
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
return mapEncoder(size, (value) => value ? 1 : 0);
}
function getBooleanDecoder(config = {}) {
const size = config.size ?? getU8Decoder();
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
return {
decode: (bytes, offset = 0) => {
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
const [value, vOffset] = size.decode(bytes, offset);
return [value === 1, vOffset];
},
description: config.description ?? `bool(${size.description})`,
fixedSize: size.fixedSize,
maxSize: size.fixedSize
};
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
return mapDecoder(size, (value) => Number(value) === 1);
}

@@ -197,10 +187,9 @@ function getBooleanCodec(config = {}) {

const size = config.size ?? "variable";
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
const description = config.description ?? `bytes(${sizeDescription})`;
const byteEncoder = {
description,
encode: (value) => value,
fixedSize: null,
maxSize: null
};
const byteEncoder = createEncoder({
getSizeFromValue: (value) => value.length,
write: (value, bytes, offset) => {
bytes.set(value, offset);
return offset + value.length;
}
});
if (size === "variable") {

@@ -210,26 +199,20 @@ return byteEncoder;

if (typeof size === "number") {
return fixEncoder(byteEncoder, size, description);
return fixEncoder(byteEncoder, size);
}
return {
...byteEncoder,
encode: (value) => {
const contentBytes = byteEncoder.encode(value);
const lengthBytes = size.encode(contentBytes.length);
return mergeBytes([lengthBytes, contentBytes]);
return createEncoder({
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
write: (value, bytes, offset) => {
offset = size.write(value.length, bytes, offset);
return byteEncoder.write(value, bytes, offset);
}
};
});
}
function getBytesDecoder(config = {}) {
const size = config.size ?? "variable";
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
const description = config.description ?? `bytes(${sizeDescription})`;
const byteDecoder = {
decode: (bytes, offset = 0) => {
const byteDecoder = createDecoder({
read: (bytes, offset) => {
const slice = bytes.slice(offset);
return [slice, offset + slice.length];
},
description,
fixedSize: null,
maxSize: null
};
}
});
if (size === "variable") {

@@ -239,9 +222,8 @@ return byteDecoder;

if (typeof size === "number") {
return fixDecoder(byteDecoder, size, description);
return fixDecoder(byteDecoder, size);
}
return {
...byteDecoder,
decode: (bytes, offset = 0) => {
return createDecoder({
read: (bytes, offset) => {
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
const [lengthBigInt, lengthOffset] = size.decode(bytes, offset);
const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
const length = Number(lengthBigInt);

@@ -251,7 +233,7 @@ offset = lengthOffset;

assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
const [value, contentOffset] = byteDecoder.decode(contentBytes);
const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
offset += contentOffset;
return [value, offset];
}
};
});
}

@@ -261,38 +243,30 @@ function getBytesCodec(config = {}) {

}
function dataEnumCodecHelper(variants, prefix, description) {
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
return {
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
};
}
function getDataEnumEncoder(variants, config = {}) {
const prefix = config.size ?? getU8Encoder();
return {
...dataEnumCodecHelper(variants, prefix, config.description),
encode: (variant) => {
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
if (discriminator < 0) {
throw new Error(
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
);
}
const variantPrefix = prefix.encode(discriminator);
const variantSerializer = variants[discriminator][1];
const variantBytes = variantSerializer.encode(variant);
return mergeBytes([variantPrefix, variantBytes]);
const fixedSize = getDataEnumFixedSize(variants, prefix);
return createEncoder({
...fixedSize !== null ? { fixedSize } : {
getSizeFromValue: (variant) => {
const discriminator = getVariantDiscriminator(variants, variant);
const variantEncoder = variants[discriminator][1];
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
},
maxSize: getDataEnumMaxSize(variants, prefix)
},
write: (variant, bytes, offset) => {
const discriminator = getVariantDiscriminator(variants, variant);
offset = prefix.write(discriminator, bytes, offset);
const variantEncoder = variants[discriminator][1];
return variantEncoder.write(variant, bytes, offset);
}
};
});
}
function getDataEnumDecoder(variants, config = {}) {
const prefix = config.size ?? getU8Decoder();
return {
...dataEnumCodecHelper(variants, prefix, config.description),
decode: (bytes, offset = 0) => {
const fixedSize = getDataEnumFixedSize(variants, prefix);
return createDecoder({
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
read: (bytes, offset) => {
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
const [discriminator, dOffset] = prefix.decode(bytes, offset);
const [discriminator, dOffset] = prefix.read(bytes, offset);
offset = dOffset;

@@ -305,7 +279,7 @@ const variantField = variants[Number(discriminator)] ?? null;

}
const [variant, vOffset] = variantField[1].decode(bytes, offset);
const [variant, vOffset] = variantField[1].read(bytes, offset);
offset = vOffset;
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
}
};
});
}

@@ -315,82 +289,114 @@ function getDataEnumCodec(variants, config = {}) {

}
function mapCodecHelper(key, value, size, description) {
if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
function getDataEnumFixedSize(variants, prefix) {
if (variants.length === 0)
return isFixedSize(prefix) ? prefix.fixedSize : null;
if (!isFixedSize(variants[0][1]))
return null;
const variantSize = variants[0][1].fixedSize;
const sameSizedVariants = variants.every(
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
);
if (!sameSizedVariants)
return null;
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
}
function getDataEnumMaxSize(variants, prefix) {
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
}
function getVariantDiscriminator(variants, variant) {
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
if (discriminator < 0) {
throw new Error(
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
);
}
return {
description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize])
};
return discriminator;
}
function getMapEncoder(key, value, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...mapCodecHelper(key, value, size, config.description),
encode: (map) => {
if (typeof size === "number") {
assertValidNumberOfItemsForCodec("map", size, map.size);
}
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)]));
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
function getTupleEncoder(items) {
const fixedSize = sumCodecSizes(items.map(getFixedSize));
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
return createEncoder({
...fixedSize === null ? {
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
maxSize
} : { fixedSize },
write: (value, bytes, offset) => {
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
items.forEach((item, index) => {
offset = item.write(value[index], bytes, offset);
});
return offset;
}
};
});
}
function getMapDecoder(key, value, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...mapCodecHelper(key, value, size, config.description),
decode: (bytes, offset = 0) => {
const map = /* @__PURE__ */ new Map();
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [map, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
size,
[key.fixedSize, value.fixedSize],
bytes,
offset
);
offset = newOffset;
for (let i = 0; i < resolvedSize; i += 1) {
const [decodedKey, kOffset] = key.decode(bytes, offset);
offset = kOffset;
const [decodedValue, vOffset] = value.decode(bytes, offset);
offset = vOffset;
map.set(decodedKey, decodedValue);
}
return [map, offset];
function getTupleDecoder(items) {
const fixedSize = sumCodecSizes(items.map(getFixedSize));
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
return createDecoder({
...fixedSize === null ? { maxSize } : { fixedSize },
read: (bytes, offset) => {
const values = [];
items.forEach((item) => {
const [newValue, newOffset] = item.read(bytes, offset);
values.push(newValue);
offset = newOffset;
});
return [values, offset];
}
};
});
}
function getTupleCodec(items) {
return combineCodec(
getTupleEncoder(items),
getTupleDecoder(items)
);
}
// src/map.ts
function getMapEncoder(key, value, config = {}) {
return mapEncoder(
getArrayEncoder(getTupleEncoder([key, value]), config),
(map) => [...map.entries()]
);
}
function getMapDecoder(key, value, config = {}) {
return mapDecoder(
getArrayDecoder(getTupleDecoder([key, value]), config),
(entries) => new Map(entries)
);
}
function getMapCodec(key, value, config = {}) {
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
}
function nullableCodecHelper(item, prefix, fixed, description) {
let descriptionSuffix = `; ${prefix.description}`;
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
if (fixed) {
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
descriptionSuffix += "; fixed";
fixedSize = prefix.fixedSize + item.fixedSize;
}
return {
description: description ?? `nullable(${item.description + descriptionSuffix})`,
fixedSize,
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
};
}
function getNullableEncoder(item, config = {}) {
const prefix = config.prefix ?? getU8Encoder();
const fixed = config.fixed ?? false;
return {
...nullableCodecHelper(item, prefix, fixed, config.description),
encode: (option) => {
const prefixByte = prefix.encode(Number(option !== null));
let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes;
return mergeBytes([prefixByte, itemBytes]);
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
if (fixed || isZeroSizeItem) {
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
const fixedSize = prefix.fixedSize + item.fixedSize;
return createEncoder({
fixedSize,
write: (option, bytes, offset) => {
const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
if (option !== null) {
item.write(option, bytes, prefixOffset);
}
return offset + fixedSize;
}
});
}
return createEncoder({
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
write: (option, bytes, offset) => {
offset = prefix.write(Number(option !== null), bytes, offset);
if (option !== null) {
offset = item.write(option, bytes, offset);
}
return offset;
}
};
});
}

@@ -400,28 +406,67 @@ function getNullableDecoder(item, config = {}) {

const fixed = config.fixed ?? false;
return {
...nullableCodecHelper(item, prefix, fixed, config.description),
decode: (bytes, offset = 0) => {
let fixedSize = null;
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
if (fixed || isZeroSizeItem) {
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
fixedSize = prefix.fixedSize + item.fixedSize;
}
return createDecoder({
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
read: (bytes, offset) => {
if (bytes.length - offset <= 0) {
return [null, offset];
}
const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0);
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
offset = prefixOffset;
const [isSome, prefixOffset] = prefix.read(bytes, offset);
if (isSome === 0) {
return [null, fixed ? fixedOffset : offset];
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
}
const [value, newOffset] = item.decode(bytes, offset);
offset = newOffset;
return [value, fixed ? fixedOffset : offset];
const [value, newOffset] = item.read(bytes, prefixOffset);
return [value, fixedSize !== null ? offset + fixedSize : newOffset];
}
};
});
}
function getNullableCodec(item, config = {}) {
return combineCodec(getNullableEncoder(item, config), getNullableDecoder(item, config));
const configCast = config;
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
}
function scalarEnumCoderHelper(constructor, prefix, description) {
function getScalarEnumEncoder(constructor, config = {}) {
const prefix = config.size ?? getU8Encoder();
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor);
return mapEncoder(prefix, (value) => {
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
if (isInvalidNumber || isInvalidString) {
throw new Error(
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
);
}
if (typeof value === "number")
return value;
const valueIndex = enumValues.indexOf(value);
if (valueIndex >= 0)
return valueIndex;
return enumKeys.indexOf(value);
});
}
function getScalarEnumDecoder(constructor, config = {}) {
const prefix = config.size ?? getU8Decoder();
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor);
return mapDecoder(prefix, (value) => {
const valueAsNumber = Number(value);
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
throw new Error(
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
);
}
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber];
});
}
function getScalarEnumCodec(constructor, config = {}) {
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
}
function getScalarEnumStats(constructor) {
const enumKeys = Object.keys(constructor);
const enumValues = Object.values(constructor);
const isNumericEnum = enumValues.some((v) => typeof v === "number");
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
const minRange = 0;

@@ -431,9 +476,6 @@ const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;

return {
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
enumKeys,
enumValues,
fixedSize: prefix.fixedSize,
isNumericEnum,
maxRange,
maxSize: prefix.maxSize,
minRange,

@@ -443,96 +485,7 @@ stringValues

}
function getScalarEnumEncoder(constructor, config = {}) {
const prefix = config.size ?? getU8Encoder();
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, config.description);
return {
description,
encode: (value) => {
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
if (isInvalidNumber || isInvalidString) {
throw new Error(
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
);
}
if (typeof value === "number")
return prefix.encode(value);
const valueIndex = enumValues.indexOf(value);
if (valueIndex >= 0)
return prefix.encode(valueIndex);
return prefix.encode(enumKeys.indexOf(value));
},
fixedSize,
maxSize
};
}
function getScalarEnumDecoder(constructor, config = {}) {
const prefix = config.size ?? getU8Decoder();
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
constructor,
prefix,
config.description
);
return {
decode: (bytes, offset = 0) => {
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
const [value, newOffset] = prefix.decode(bytes, offset);
const valueAsNumber = Number(value);
offset = newOffset;
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
throw new Error(
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
);
}
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
},
description,
fixedSize,
maxSize
};
}
function getScalarEnumCodec(constructor, config = {}) {
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
}
function setCodecHelper(item, size, description) {
if (size === "remainder" && item.fixedSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
return {
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
};
}
function getSetEncoder(item, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...setCodecHelper(item, size, config.description),
encode: (set) => {
if (typeof size === "number" && set.size !== size) {
assertValidNumberOfItemsForCodec("set", size, set.size);
}
const itemBytes = Array.from(set, (value) => item.encode(value));
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
}
};
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
}
function getSetDecoder(item, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...setCodecHelper(item, size, config.description),
decode: (bytes, offset = 0) => {
const set = /* @__PURE__ */ new Set();
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [set, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
offset = newOffset;
for (let i = 0; i < resolvedSize; i += 1) {
const [value, newOffset2] = item.decode(bytes, offset);
offset = newOffset2;
set.add(value);
}
return [set, offset];
}
};
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
}

@@ -542,26 +495,29 @@ function getSetCodec(item, config = {}) {

}
function structCodecHelper(fields, description) {
const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", ");
return {
description: description ?? `struct(${fieldDescriptions})`,
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
};
}
function getStructEncoder(fields, config = {}) {
return {
...structCodecHelper(fields, config.description),
encode: (struct) => {
const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key]));
return mergeBytes(fieldBytes);
function getStructEncoder(fields) {
const fieldCodecs = fields.map(([, codec]) => codec);
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
return createEncoder({
...fixedSize === null ? {
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
maxSize
} : { fixedSize },
write: (struct, bytes, offset) => {
fields.forEach(([key, codec]) => {
offset = codec.write(struct[key], bytes, offset);
});
return offset;
}
};
});
}
function getStructDecoder(fields, config = {}) {
return {
...structCodecHelper(fields, config.description),
decode: (bytes, offset = 0) => {
function getStructDecoder(fields) {
const fieldCodecs = fields.map(([, codec]) => codec);
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
return createDecoder({
...fixedSize === null ? { maxSize } : { fixedSize },
read: (bytes, offset) => {
const struct = {};
fields.forEach(([key, codec]) => {
const [value, newOffset] = codec.decode(bytes, offset);
const [value, newOffset] = codec.read(bytes, offset);
offset = newOffset;

@@ -572,66 +528,25 @@ struct[key] = value;

}
};
});
}
function getStructCodec(fields, config = {}) {
return combineCodec(getStructEncoder(fields, config), getStructDecoder(fields, config));
function getStructCodec(fields) {
return combineCodec(getStructEncoder(fields), getStructDecoder(fields));
}
function tupleCodecHelper(items, description) {
const itemDescriptions = items.map((item) => item.description).join(", ");
return {
description: description ?? `tuple(${itemDescriptions})`,
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
};
}
function getTupleEncoder(items, config = {}) {
return {
...tupleCodecHelper(items, config.description),
encode: (value) => {
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
return mergeBytes(items.map((item, index) => item.encode(value[index])));
}
};
}
function getTupleDecoder(items, config = {}) {
return {
...tupleCodecHelper(items, config.description),
decode: (bytes, offset = 0) => {
const values = [];
items.forEach((codec) => {
const [newValue, newOffset] = codec.decode(bytes, offset);
values.push(newValue);
offset = newOffset;
});
return [values, offset];
}
};
}
function getTupleCodec(items, config = {}) {
return combineCodec(
getTupleEncoder(items, config),
getTupleDecoder(items, config)
);
}
function getUnitEncoder(config = {}) {
return {
description: config.description ?? "unit",
encode: () => new Uint8Array(),
function getUnitEncoder() {
return createEncoder({
fixedSize: 0,
maxSize: 0
};
write: (_value, _bytes, offset) => offset
});
}
function getUnitDecoder(config = {}) {
return {
decode: (_bytes, offset = 0) => [void 0, offset],
description: config.description ?? "unit",
function getUnitDecoder() {
return createDecoder({
fixedSize: 0,
maxSize: 0
};
read: (_bytes, offset) => [void 0, offset]
});
}
function getUnitCodec(config = {}) {
return combineCodec(getUnitEncoder(config), getUnitDecoder(config));
function getUnitCodec() {
return combineCodec(getUnitEncoder(), getUnitDecoder());
}
export { assertValidNumberOfItemsForCodec, decodeArrayLikeCodecSize, getArrayCodec, getArrayDecoder, getArrayEncoder, getArrayLikeCodecSizeDescription, getArrayLikeCodecSizeFromChildren, getArrayLikeCodecSizePrefix, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
//# sourceMappingURL=out.js.map
//# sourceMappingURL=index.native.js.map

@@ -1,2 +0,2 @@

import { mergeBytes, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertFixedSizeCodec, assertByteArrayIsNotEmptyForCodec, fixEncoder, fixDecoder, fixBytes } from '@solana/codecs-core';
import { assertIsFixedSize, createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core';
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers';

@@ -6,3 +6,8 @@

// src/utils.ts
// src/assertions.ts
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
if (expected !== actual) {
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
}
}
function maxCodecSizes(sizes) {

@@ -17,90 +22,64 @@ return sizes.reduce(

}
// src/array-like-codec-size.ts
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) {
if (typeof size === "number") {
return [size, offset];
}
if (typeof size === "object") {
return size.decode(bytes, offset);
}
if (size === "remainder") {
const childrenSize = sumCodecSizes(childrenSizes);
if (childrenSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
const remainder = bytes.slice(offset).length;
if (remainder % childrenSize !== 0) {
throw new Error(
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.`
);
}
return [remainder / childrenSize, offset];
}
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
function getFixedSize(codec) {
return isFixedSize(codec) ? codec.fixedSize : null;
}
function getArrayLikeCodecSizeDescription(size) {
return typeof size === "object" ? size.description : `${size}`;
function getMaxSize(codec) {
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null;
}
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) {
if (typeof size !== "number")
return null;
if (size === 0)
return 0;
const childrenSize = sumCodecSizes(childrenSizes);
return childrenSize === null ? null : childrenSize * size;
}
function getArrayLikeCodecSizePrefix(size, realSize) {
return typeof size === "object" ? size.encode(realSize) : new Uint8Array();
}
// src/assertions.ts
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) {
if (expected !== actual) {
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`);
}
}
// src/array.ts
function arrayCodecHelper(item, size, description) {
if (size === "remainder" && item.fixedSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
return {
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
};
}
function getArrayEncoder(item, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...arrayCodecHelper(item, size, config.description),
encode: (value) => {
if (size === "remainder") {
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
}
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item));
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
return createEncoder({
...fixedSize !== null ? { fixedSize } : {
getSizeFromValue: (array) => {
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0;
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0);
},
maxSize
},
write: (array, bytes, offset) => {
if (typeof size === "number") {
assertValidNumberOfItemsForCodec("array", size, value.length);
assertValidNumberOfItemsForCodec("array", size, array.length);
}
return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]);
if (typeof size === "object") {
offset = size.write(array.length, bytes, offset);
}
array.forEach((value) => {
offset = item.write(value, bytes, offset);
});
return offset;
}
};
});
}
function getArrayDecoder(item, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...arrayCodecHelper(item, size, config.description),
decode: (bytes, offset = 0) => {
if (size === "remainder") {
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.');
}
const itemSize = getFixedSize(item);
const fixedSize = computeArrayLikeCodecSize(size, itemSize);
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0;
return createDecoder({
...fixedSize !== null ? { fixedSize } : { maxSize },
read: (bytes, offset) => {
const array = [];
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [[], offset];
return [array, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset);
offset = newOffset;
const values = [];
for (let i = 0; i < resolvedSize; i += 1) {
const [value, newOffset2] = item.decode(bytes, offset);
values.push(value);
const [value, newOffset2] = item.read(bytes, offset);
offset = newOffset2;
array.push(value);
}
return [values, offset];
return [array, offset];
}
};
});
}

@@ -110,10 +89,37 @@ function getArrayCodec(item, config = {}) {

}
var getBitArrayEncoder = (size, config = {}) => {
function readArrayLikeCodecSize(size, itemSize, bytes, offset) {
if (typeof size === "number") {
return [size, offset];
}
if (typeof size === "object") {
return size.read(bytes, offset);
}
if (size === "remainder") {
if (itemSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
const remainder = Math.max(0, bytes.length - offset);
if (remainder % itemSize !== 0) {
throw new Error(
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.`
);
}
return [remainder / itemSize, offset];
}
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`);
}
function computeArrayLikeCodecSize(size, itemSize) {
if (typeof size !== "number")
return null;
if (size === 0)
return 0;
return itemSize === null ? null : itemSize * size;
}
function getBitArrayEncoder(size, config = {}) {
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
const backward = parsedConfig.backward ?? false;
const backwardSuffix = backward ? "; backward" : "";
return {
description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
encode(value) {
const bytes = [];
return createEncoder({
fixedSize: size,
write(value, bytes, offset) {
const bytesToAdd = [];
for (let i = 0; i < size; i += 1) {

@@ -126,19 +132,18 @@ let byte = 0;

if (backward) {
bytes.unshift(byte);
bytesToAdd.unshift(byte);
} else {
bytes.push(byte);
bytesToAdd.push(byte);
}
}
return new Uint8Array(bytes);
},
fixedSize: size,
maxSize: size
};
};
var getBitArrayDecoder = (size, config = {}) => {
bytes.set(bytesToAdd, offset);
return size;
}
});
}
function getBitArrayDecoder(size, config = {}) {
const parsedConfig = typeof config === "boolean" ? { backward: config } : config;
const backward = parsedConfig.backward ?? false;
const backwardSuffix = backward ? "; backward" : "";
return {
decode(bytes, offset = 0) {
return createDecoder({
fixedSize: size,
read(bytes, offset) {
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset);

@@ -160,32 +165,17 @@ const booleans = [];

return [booleans, offset + size];
},
description: parsedConfig.description ?? `bitArray(${size}${backwardSuffix})`,
fixedSize: size,
maxSize: size
};
};
var getBitArrayCodec = (size, config = {}) => combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
}
});
}
function getBitArrayCodec(size, config = {}) {
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config));
}
function getBooleanEncoder(config = {}) {
const size = config.size ?? getU8Encoder();
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
return {
description: config.description ?? `bool(${size.description})`,
encode: (value) => size.encode(value ? 1 : 0),
fixedSize: size.fixedSize,
maxSize: size.fixedSize
};
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
return mapEncoder(size, (value) => value ? 1 : 0);
}
function getBooleanDecoder(config = {}) {
const size = config.size ?? getU8Decoder();
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size.");
return {
decode: (bytes, offset = 0) => {
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset);
const [value, vOffset] = size.decode(bytes, offset);
return [value === 1, vOffset];
},
description: config.description ?? `bool(${size.description})`,
fixedSize: size.fixedSize,
maxSize: size.fixedSize
};
assertIsFixedSize(size, "Codec [bool] requires a fixed size.");
return mapDecoder(size, (value) => Number(value) === 1);
}

@@ -197,10 +187,9 @@ function getBooleanCodec(config = {}) {

const size = config.size ?? "variable";
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
const description = config.description ?? `bytes(${sizeDescription})`;
const byteEncoder = {
description,
encode: (value) => value,
fixedSize: null,
maxSize: null
};
const byteEncoder = createEncoder({
getSizeFromValue: (value) => value.length,
write: (value, bytes, offset) => {
bytes.set(value, offset);
return offset + value.length;
}
});
if (size === "variable") {

@@ -210,26 +199,20 @@ return byteEncoder;

if (typeof size === "number") {
return fixEncoder(byteEncoder, size, description);
return fixEncoder(byteEncoder, size);
}
return {
...byteEncoder,
encode: (value) => {
const contentBytes = byteEncoder.encode(value);
const lengthBytes = size.encode(contentBytes.length);
return mergeBytes([lengthBytes, contentBytes]);
return createEncoder({
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length,
write: (value, bytes, offset) => {
offset = size.write(value.length, bytes, offset);
return byteEncoder.write(value, bytes, offset);
}
};
});
}
function getBytesDecoder(config = {}) {
const size = config.size ?? "variable";
const sizeDescription = typeof size === "object" ? size.description : `${size}`;
const description = config.description ?? `bytes(${sizeDescription})`;
const byteDecoder = {
decode: (bytes, offset = 0) => {
const byteDecoder = createDecoder({
read: (bytes, offset) => {
const slice = bytes.slice(offset);
return [slice, offset + slice.length];
},
description,
fixedSize: null,
maxSize: null
};
}
});
if (size === "variable") {

@@ -239,9 +222,8 @@ return byteDecoder;

if (typeof size === "number") {
return fixDecoder(byteDecoder, size, description);
return fixDecoder(byteDecoder, size);
}
return {
...byteDecoder,
decode: (bytes, offset = 0) => {
return createDecoder({
read: (bytes, offset) => {
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset);
const [lengthBigInt, lengthOffset] = size.decode(bytes, offset);
const [lengthBigInt, lengthOffset] = size.read(bytes, offset);
const length = Number(lengthBigInt);

@@ -251,7 +233,7 @@ offset = lengthOffset;

assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes);
const [value, contentOffset] = byteDecoder.decode(contentBytes);
const [value, contentOffset] = byteDecoder.read(contentBytes, 0);
offset += contentOffset;
return [value, offset];
}
};
});
}

@@ -261,38 +243,30 @@ function getBytesCodec(config = {}) {

}
function dataEnumCodecHelper(variants, prefix, description) {
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", ");
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize);
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null;
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize));
return {
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`,
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]),
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize])
};
}
function getDataEnumEncoder(variants, config = {}) {
const prefix = config.size ?? getU8Encoder();
return {
...dataEnumCodecHelper(variants, prefix, config.description),
encode: (variant) => {
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
if (discriminator < 0) {
throw new Error(
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
);
}
const variantPrefix = prefix.encode(discriminator);
const variantSerializer = variants[discriminator][1];
const variantBytes = variantSerializer.encode(variant);
return mergeBytes([variantPrefix, variantBytes]);
const fixedSize = getDataEnumFixedSize(variants, prefix);
return createEncoder({
...fixedSize !== null ? { fixedSize } : {
getSizeFromValue: (variant) => {
const discriminator = getVariantDiscriminator(variants, variant);
const variantEncoder = variants[discriminator][1];
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder);
},
maxSize: getDataEnumMaxSize(variants, prefix)
},
write: (variant, bytes, offset) => {
const discriminator = getVariantDiscriminator(variants, variant);
offset = prefix.write(discriminator, bytes, offset);
const variantEncoder = variants[discriminator][1];
return variantEncoder.write(variant, bytes, offset);
}
};
});
}
function getDataEnumDecoder(variants, config = {}) {
const prefix = config.size ?? getU8Decoder();
return {
...dataEnumCodecHelper(variants, prefix, config.description),
decode: (bytes, offset = 0) => {
const fixedSize = getDataEnumFixedSize(variants, prefix);
return createDecoder({
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) },
read: (bytes, offset) => {
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset);
const [discriminator, dOffset] = prefix.decode(bytes, offset);
const [discriminator, dOffset] = prefix.read(bytes, offset);
offset = dOffset;

@@ -305,7 +279,7 @@ const variantField = variants[Number(discriminator)] ?? null;

}
const [variant, vOffset] = variantField[1].decode(bytes, offset);
const [variant, vOffset] = variantField[1].read(bytes, offset);
offset = vOffset;
return [{ __kind: variantField[0], ...variant ?? {} }, offset];
}
};
});
}

@@ -315,82 +289,114 @@ function getDataEnumCodec(variants, config = {}) {

}
function mapCodecHelper(key, value, size, description) {
if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
function getDataEnumFixedSize(variants, prefix) {
if (variants.length === 0)
return isFixedSize(prefix) ? prefix.fixedSize : null;
if (!isFixedSize(variants[0][1]))
return null;
const variantSize = variants[0][1].fixedSize;
const sameSizedVariants = variants.every(
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize
);
if (!sameSizedVariants)
return null;
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null;
}
function getDataEnumMaxSize(variants, prefix) {
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec)));
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0;
}
function getVariantDiscriminator(variants, variant) {
const discriminator = variants.findIndex(([key]) => variant.__kind === key);
if (discriminator < 0) {
throw new Error(
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".`
);
}
return {
description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize])
};
return discriminator;
}
function getMapEncoder(key, value, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...mapCodecHelper(key, value, size, config.description),
encode: (map) => {
if (typeof size === "number") {
assertValidNumberOfItemsForCodec("map", size, map.size);
}
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)]));
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]);
function getTupleEncoder(items) {
const fixedSize = sumCodecSizes(items.map(getFixedSize));
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
return createEncoder({
...fixedSize === null ? {
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0),
maxSize
} : { fixedSize },
write: (value, bytes, offset) => {
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
items.forEach((item, index) => {
offset = item.write(value[index], bytes, offset);
});
return offset;
}
};
});
}
function getMapDecoder(key, value, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...mapCodecHelper(key, value, size, config.description),
decode: (bytes, offset = 0) => {
const map = /* @__PURE__ */ new Map();
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [map, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(
size,
[key.fixedSize, value.fixedSize],
bytes,
offset
);
offset = newOffset;
for (let i = 0; i < resolvedSize; i += 1) {
const [decodedKey, kOffset] = key.decode(bytes, offset);
offset = kOffset;
const [decodedValue, vOffset] = value.decode(bytes, offset);
offset = vOffset;
map.set(decodedKey, decodedValue);
}
return [map, offset];
function getTupleDecoder(items) {
const fixedSize = sumCodecSizes(items.map(getFixedSize));
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0;
return createDecoder({
...fixedSize === null ? { maxSize } : { fixedSize },
read: (bytes, offset) => {
const values = [];
items.forEach((item) => {
const [newValue, newOffset] = item.read(bytes, offset);
values.push(newValue);
offset = newOffset;
});
return [values, offset];
}
};
});
}
function getTupleCodec(items) {
return combineCodec(
getTupleEncoder(items),
getTupleDecoder(items)
);
}
// src/map.ts
function getMapEncoder(key, value, config = {}) {
return mapEncoder(
getArrayEncoder(getTupleEncoder([key, value]), config),
(map) => [...map.entries()]
);
}
function getMapDecoder(key, value, config = {}) {
return mapDecoder(
getArrayDecoder(getTupleDecoder([key, value]), config),
(entries) => new Map(entries)
);
}
function getMapCodec(key, value, config = {}) {
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config));
}
function nullableCodecHelper(item, prefix, fixed, description) {
let descriptionSuffix = `; ${prefix.description}`;
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null;
if (fixed) {
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs.");
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix.");
descriptionSuffix += "; fixed";
fixedSize = prefix.fixedSize + item.fixedSize;
}
return {
description: description ?? `nullable(${item.description + descriptionSuffix})`,
fixedSize,
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize])
};
}
function getNullableEncoder(item, config = {}) {
const prefix = config.prefix ?? getU8Encoder();
const fixed = config.fixed ?? false;
return {
...nullableCodecHelper(item, prefix, fixed, config.description),
encode: (option) => {
const prefixByte = prefix.encode(Number(option !== null));
let itemBytes = option !== null ? item.encode(option) : new Uint8Array();
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes;
return mergeBytes([prefixByte, itemBytes]);
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
if (fixed || isZeroSizeItem) {
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
const fixedSize = prefix.fixedSize + item.fixedSize;
return createEncoder({
fixedSize,
write: (option, bytes, offset) => {
const prefixOffset = prefix.write(Number(option !== null), bytes, offset);
if (option !== null) {
item.write(option, bytes, prefixOffset);
}
return offset + fixedSize;
}
});
}
return createEncoder({
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0),
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0,
write: (option, bytes, offset) => {
offset = prefix.write(Number(option !== null), bytes, offset);
if (option !== null) {
offset = item.write(option, bytes, offset);
}
return offset;
}
};
});
}

@@ -400,28 +406,67 @@ function getNullableDecoder(item, config = {}) {

const fixed = config.fixed ?? false;
return {
...nullableCodecHelper(item, prefix, fixed, config.description),
decode: (bytes, offset = 0) => {
let fixedSize = null;
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0;
if (fixed || isZeroSizeItem) {
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs.");
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix.");
fixedSize = prefix.fixedSize + item.fixedSize;
}
return createDecoder({
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize },
read: (bytes, offset) => {
if (bytes.length - offset <= 0) {
return [null, offset];
}
const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0);
const [isSome, prefixOffset] = prefix.decode(bytes, offset);
offset = prefixOffset;
const [isSome, prefixOffset] = prefix.read(bytes, offset);
if (isSome === 0) {
return [null, fixed ? fixedOffset : offset];
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset];
}
const [value, newOffset] = item.decode(bytes, offset);
offset = newOffset;
return [value, fixed ? fixedOffset : offset];
const [value, newOffset] = item.read(bytes, prefixOffset);
return [value, fixedSize !== null ? offset + fixedSize : newOffset];
}
};
});
}
function getNullableCodec(item, config = {}) {
return combineCodec(getNullableEncoder(item, config), getNullableDecoder(item, config));
const configCast = config;
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast));
}
function scalarEnumCoderHelper(constructor, prefix, description) {
function getScalarEnumEncoder(constructor, config = {}) {
const prefix = config.size ?? getU8Encoder();
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor);
return mapEncoder(prefix, (value) => {
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
if (isInvalidNumber || isInvalidString) {
throw new Error(
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
);
}
if (typeof value === "number")
return value;
const valueIndex = enumValues.indexOf(value);
if (valueIndex >= 0)
return valueIndex;
return enumKeys.indexOf(value);
});
}
function getScalarEnumDecoder(constructor, config = {}) {
const prefix = config.size ?? getU8Decoder();
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor);
return mapDecoder(prefix, (value) => {
const valueAsNumber = Number(value);
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
throw new Error(
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
);
}
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber];
});
}
function getScalarEnumCodec(constructor, config = {}) {
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
}
function getScalarEnumStats(constructor) {
const enumKeys = Object.keys(constructor);
const enumValues = Object.values(constructor);
const isNumericEnum = enumValues.some((v) => typeof v === "number");
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", ");
const minRange = 0;

@@ -431,9 +476,6 @@ const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1;

return {
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`,
enumKeys,
enumValues,
fixedSize: prefix.fixedSize,
isNumericEnum,
maxRange,
maxSize: prefix.maxSize,
minRange,

@@ -443,96 +485,7 @@ stringValues

}
function getScalarEnumEncoder(constructor, config = {}) {
const prefix = config.size ?? getU8Encoder();
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, config.description);
return {
description,
encode: (value) => {
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange);
const isInvalidString = typeof value === "string" && !stringValues.includes(value);
if (isInvalidNumber || isInvalidString) {
throw new Error(
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".`
);
}
if (typeof value === "number")
return prefix.encode(value);
const valueIndex = enumValues.indexOf(value);
if (valueIndex >= 0)
return prefix.encode(valueIndex);
return prefix.encode(enumKeys.indexOf(value));
},
fixedSize,
maxSize
};
}
function getScalarEnumDecoder(constructor, config = {}) {
const prefix = config.size ?? getU8Decoder();
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper(
constructor,
prefix,
config.description
);
return {
decode: (bytes, offset = 0) => {
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset);
const [value, newOffset] = prefix.decode(bytes, offset);
const valueAsNumber = Number(value);
offset = newOffset;
if (valueAsNumber < minRange || valueAsNumber > maxRange) {
throw new Error(
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.`
);
}
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset];
},
description,
fixedSize,
maxSize
};
}
function getScalarEnumCodec(constructor, config = {}) {
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config));
}
function setCodecHelper(item, size, description) {
if (size === "remainder" && item.fixedSize === null) {
throw new Error('Codecs of "remainder" size must have fixed-size items.');
}
return {
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`,
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]),
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize])
};
}
function getSetEncoder(item, config = {}) {
const size = config.size ?? getU32Encoder();
return {
...setCodecHelper(item, size, config.description),
encode: (set) => {
if (typeof size === "number" && set.size !== size) {
assertValidNumberOfItemsForCodec("set", size, set.size);
}
const itemBytes = Array.from(set, (value) => item.encode(value));
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]);
}
};
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]);
}
function getSetDecoder(item, config = {}) {
const size = config.size ?? getU32Decoder();
return {
...setCodecHelper(item, size, config.description),
decode: (bytes, offset = 0) => {
const set = /* @__PURE__ */ new Set();
if (typeof size === "object" && bytes.slice(offset).length === 0) {
return [set, offset];
}
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset);
offset = newOffset;
for (let i = 0; i < resolvedSize; i += 1) {
const [value, newOffset2] = item.decode(bytes, offset);
offset = newOffset2;
set.add(value);
}
return [set, offset];
}
};
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries));
}

@@ -542,26 +495,29 @@ function getSetCodec(item, config = {}) {

}
function structCodecHelper(fields, description) {
const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", ");
return {
description: description ?? `struct(${fieldDescriptions})`,
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)),
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize))
};
}
function getStructEncoder(fields, config = {}) {
return {
...structCodecHelper(fields, config.description),
encode: (struct) => {
const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key]));
return mergeBytes(fieldBytes);
function getStructEncoder(fields) {
const fieldCodecs = fields.map(([, codec]) => codec);
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
return createEncoder({
...fixedSize === null ? {
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0),
maxSize
} : { fixedSize },
write: (struct, bytes, offset) => {
fields.forEach(([key, codec]) => {
offset = codec.write(struct[key], bytes, offset);
});
return offset;
}
};
});
}
function getStructDecoder(fields, config = {}) {
return {
...structCodecHelper(fields, config.description),
decode: (bytes, offset = 0) => {
function getStructDecoder(fields) {
const fieldCodecs = fields.map(([, codec]) => codec);
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize));
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0;
return createDecoder({
...fixedSize === null ? { maxSize } : { fixedSize },
read: (bytes, offset) => {
const struct = {};
fields.forEach(([key, codec]) => {
const [value, newOffset] = codec.decode(bytes, offset);
const [value, newOffset] = codec.read(bytes, offset);
offset = newOffset;

@@ -572,66 +528,25 @@ struct[key] = value;

}
};
});
}
function getStructCodec(fields, config = {}) {
return combineCodec(getStructEncoder(fields, config), getStructDecoder(fields, config));
function getStructCodec(fields) {
return combineCodec(getStructEncoder(fields), getStructDecoder(fields));
}
function tupleCodecHelper(items, description) {
const itemDescriptions = items.map((item) => item.description).join(", ");
return {
description: description ?? `tuple(${itemDescriptions})`,
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)),
maxSize: sumCodecSizes(items.map((item) => item.maxSize))
};
}
function getTupleEncoder(items, config = {}) {
return {
...tupleCodecHelper(items, config.description),
encode: (value) => {
assertValidNumberOfItemsForCodec("tuple", items.length, value.length);
return mergeBytes(items.map((item, index) => item.encode(value[index])));
}
};
}
function getTupleDecoder(items, config = {}) {
return {
...tupleCodecHelper(items, config.description),
decode: (bytes, offset = 0) => {
const values = [];
items.forEach((codec) => {
const [newValue, newOffset] = codec.decode(bytes, offset);
values.push(newValue);
offset = newOffset;
});
return [values, offset];
}
};
}
function getTupleCodec(items, config = {}) {
return combineCodec(
getTupleEncoder(items, config),
getTupleDecoder(items, config)
);
}
function getUnitEncoder(config = {}) {
return {
description: config.description ?? "unit",
encode: () => new Uint8Array(),
function getUnitEncoder() {
return createEncoder({
fixedSize: 0,
maxSize: 0
};
write: (_value, _bytes, offset) => offset
});
}
function getUnitDecoder(config = {}) {
return {
decode: (_bytes, offset = 0) => [void 0, offset],
description: config.description ?? "unit",
function getUnitDecoder() {
return createDecoder({
fixedSize: 0,
maxSize: 0
};
read: (_bytes, offset) => [void 0, offset]
});
}
function getUnitCodec(config = {}) {
return combineCodec(getUnitEncoder(config), getUnitDecoder(config));
function getUnitCodec() {
return combineCodec(getUnitEncoder(), getUnitDecoder());
}
export { assertValidNumberOfItemsForCodec, decodeArrayLikeCodecSize, getArrayCodec, getArrayDecoder, getArrayEncoder, getArrayLikeCodecSizeDescription, getArrayLikeCodecSizeFromChildren, getArrayLikeCodecSizePrefix, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder };
//# sourceMappingURL=out.js.map
//# sourceMappingURL=index.node.js.map

@@ -5,44 +5,40 @@ this.globalThis = this.globalThis || {};

function l(e,r,o=0){if(r.length-o<=0)throw new Error(`Codec [${e}] cannot decode empty byte arrays.`)}function E(e,r,o,n=0){let t=o.length-n;if(t<r)throw new Error(`Codec [${e}] expected ${r} bytes, got ${t}.`)}function D(e,r){if(e.fixedSize===null)throw new Error(r??"Expected a fixed-size codec, got a variable-size one.")}var p=e=>{let r=e.filter(i=>i.length);if(r.length===0)return e.length?e[0]:new Uint8Array;if(r.length===1)return r[0];let o=r.reduce((i,d)=>i+d.length,0),n=new Uint8Array(o),t=0;return r.forEach(i=>{n.set(i,t),t+=i.length;}),n},G=(e,r)=>{if(e.length>=r)return e;let o=new Uint8Array(r).fill(0);return o.set(e),o},w=(e,r)=>G(e.length<=r?e:e.slice(0,r),r);function f(e,r,o){if(e.fixedSize!==r.fixedSize)throw new Error(`Encoder and decoder must have the same fixed size, got [${e.fixedSize}] and [${r.fixedSize}].`);if(e.maxSize!==r.maxSize)throw new Error(`Encoder and decoder must have the same max size, got [${e.maxSize}] and [${r.maxSize}].`);if(o===void 0&&e.description!==r.description)throw new Error(`Encoder and decoder must have the same description, got [${e.description}] and [${r.description}]. Pass a custom description as a third argument if you want to override the description and bypass this error.`);return {decode:r.decode,description:o??e.description,encode:e.encode,fixedSize:e.fixedSize,maxSize:e.maxSize}}function k(e,r,o){return {description:o??`fixed(${r}, ${e.description})`,fixedSize:r,maxSize:r}}function I(e,r,o){return {...k(e,r,o),encode:n=>w(e.encode(n),r)}}function $(e,r,o){return {...k(e,r,o),decode:(n,t=0)=>{E("fixCodec",r,n,t),(t>0||n.length>r)&&(n=n.slice(t,t+r)),e.fixedSize!==null&&(n=w(n,e.fixedSize));let[i]=e.decode(n,0);return [i,t+r]}}}function q(e,r,o,n){if(n<r||n>o)throw new Error(`Codec [${e}] expected number to be in the range [${r}, ${o}], got ${n}.`)}function O(e){let r,o=e.name;return e.size>1&&(r=!("endian"in e.config)||e.config.endian===0,o+=r?"(le)":"(be)"),{description:e.config.description??o,fixedSize:e.size,littleEndian:r,maxSize:e.size}}function _(e){let r=O(e);return {description:r.description,encode(o){e.range&&q(e.name,e.range[0],e.range[1],o);let n=new ArrayBuffer(e.size);return e.set(new DataView(n),o,r.littleEndian),new Uint8Array(n)},fixedSize:r.fixedSize,maxSize:r.maxSize}}function F(e){let r=O(e);return {decode(o,n=0){l(r.description,o,n),E(r.description,e.size,o,n);let t=new DataView(J(o,n,e.size));return [e.get(t,r.littleEndian),n+e.size]},description:r.description,fixedSize:r.fixedSize,maxSize:r.maxSize}}function J(e,r,o){let n=e.byteOffset+(r??0),t=o??e.byteLength;return e.buffer.slice(n,n+t)}var N=(e={})=>_({config:e,name:"u32",range:[0,+"0xffffffff"],set:(r,o,n)=>r.setUint32(0,o,n),size:4}),U=(e={})=>F({config:e,get:(r,o)=>r.getUint32(0,o),name:"u32",size:4});var S=(e={})=>_({config:e,name:"u8",range:[0,+"0xff"],set:(r,o)=>r.setUint8(0,o),size:1}),b=(e={})=>F({config:e,get:r=>r.getUint8(0),name:"u8",size:1});function V(e){return e.reduce((r,o)=>r===null||o===null?null:Math.max(r,o),0)}function x(e){return e.reduce((r,o)=>r===null||o===null?null:r+o,0)}function h(e,r,o,n){if(typeof e=="number")return [e,n];if(typeof e=="object")return e.decode(o,n);if(e==="remainder"){let t=x(r);if(t===null)throw new Error('Codecs of "remainder" size must have fixed-size items.');let i=o.slice(n).length;if(i%t!==0)throw new Error(`The remainder of the byte array (${i} bytes) cannot be split into chunks of ${t} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${i} modulo ${t} should be equal to zero.`);return [i/t,n]}throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(e)}`)}function B(e){return typeof e=="object"?e.description:`${e}`}function g(e,r){if(typeof e!="number")return null;if(e===0)return 0;let o=x(r);return o===null?null:o*e}function A(e,r){return typeof e=="object"?e.encode(r):new Uint8Array}function y(e,r,o){if(r!==o)throw new Error(`Expected [${e}] to have ${r} items, got ${o}.`)}function L(e,r,o){if(r==="remainder"&&e.fixedSize===null)throw new Error('Codecs of "remainder" size must have fixed-size items.');return {description:o??`array(${e.description}; ${B(r)})`,fixedSize:g(r,[e.fixedSize]),maxSize:g(r,[e.maxSize])}}function Q(e,r={}){let o=r.size??N();return {...L(e,o,r.description),encode:n=>(typeof o=="number"&&y("array",o,n.length),p([A(o,n.length),...n.map(t=>e.encode(t))]))}}function X(e,r={}){let o=r.size??U();return {...L(e,o,r.description),decode:(n,t=0)=>{if(typeof o=="object"&&n.slice(t).length===0)return [[],t];let[i,d]=h(o,[e.fixedSize],n,t);t=d;let c=[];for(let a=0;a<i;a+=1){let[s,u]=e.decode(n,t);c.push(s),t=u;}return [c,t]}}}function We(e,r={}){return f(Q(e,r),X(e,r))}var Y=(e,r={})=>{let o=typeof r=="boolean"?{backward:r}:r,n=o.backward??!1,t=n?"; backward":"";return {description:o.description??`bitArray(${e}${t})`,encode(i){let d=[];for(let c=0;c<e;c+=1){let a=0;for(let s=0;s<8;s+=1){let u=Number(i[c*8+s]??0);a|=u<<(n?s:7-s);}n?d.unshift(a):d.push(a);}return new Uint8Array(d)},fixedSize:e,maxSize:e}},Z=(e,r={})=>{let o=typeof r=="boolean"?{backward:r}:r,n=o.backward??!1,t=n?"; backward":"";return {decode(i,d=0){E("bitArray",e,i,d);let c=[],a=i.slice(d,d+e);return a=n?a.reverse():a,a.forEach(s=>{for(let u=0;u<8;u+=1)n?(c.push(!!(s&1)),s>>=1):(c.push(!!(s&128)),s<<=1);}),[c,d+e]},description:o.description??`bitArray(${e}${t})`,fixedSize:e,maxSize:e}},er=(e,r={})=>f(Y(e,r),Z(e,r));function ee(e={}){let r=e.size??S();return D(r,"Codec [bool] requires a fixed size."),{description:e.description??`bool(${r.description})`,encode:o=>r.encode(o?1:0),fixedSize:r.fixedSize,maxSize:r.fixedSize}}function re(e={}){let r=e.size??b();return D(r,"Codec [bool] requires a fixed size."),{decode:(o,n=0)=>{l("bool",o,n);let[t,i]=r.decode(o,n);return [t===1,i]},description:e.description??`bool(${r.description})`,fixedSize:r.fixedSize,maxSize:r.fixedSize}}function mr(e={}){return f(ee(e),re(e))}function oe(e={}){let r=e.size??"variable",o=typeof r=="object"?r.description:`${r}`,n=e.description??`bytes(${o})`,t={description:n,encode:i=>i,fixedSize:null,maxSize:null};return r==="variable"?t:typeof r=="number"?I(t,r,n):{...t,encode:i=>{let d=t.encode(i),c=r.encode(d.length);return p([c,d])}}}function ne(e={}){let r=e.size??"variable",o=typeof r=="object"?r.description:`${r}`,n=e.description??`bytes(${o})`,t={decode:(i,d=0)=>{let c=i.slice(d);return [c,d+c.length]},description:n,fixedSize:null,maxSize:null};return r==="variable"?t:typeof r=="number"?$(t,r,n):{...t,decode:(i,d=0)=>{l("bytes",i,d);let[c,a]=r.decode(i,d),s=Number(c);d=a;let u=i.slice(d,d+s);E("bytes",s,u);let[m,T]=t.decode(u);return d+=T,[m,d]}}}function br(e={}){return f(oe(e),ne(e))}function K(e,r,o){let n=e.map(([c,a])=>`${String(c)}${a?`: ${a.description}`:""}`).join(", "),i=e.every((c,a,s)=>c[1].fixedSize===s[0][1].fixedSize)?e[0][1].fixedSize:null,d=V(e.map(([,c])=>c.maxSize));return {description:o??`dataEnum(${n}; ${r.description})`,fixedSize:e.length===0?r.fixedSize:x([r.fixedSize,i]),maxSize:e.length===0?r.maxSize:x([r.maxSize,d])}}function te(e,r={}){let o=r.size??S();return {...K(e,o,r.description),encode:n=>{let t=e.findIndex(([a])=>n.__kind===a);if(t<0)throw new Error(`Invalid data enum variant. Expected one of [${e.map(([a])=>a).join(", ")}], got "${n.__kind}".`);let i=o.encode(t),c=e[t][1].encode(n);return p([i,c])}}}function ie(e,r={}){let o=r.size??b();return {...K(e,o,r.description),decode:(n,t=0)=>{l("dataEnum",n,t);let[i,d]=o.decode(n,t);t=d;let c=e[Number(i)]??null;if(!c)throw new Error(`Enum discriminator out of range. Expected a number between 0 and ${e.length-1}, got ${i}.`);let[a,s]=c[1].decode(n,t);return t=s,[{__kind:c[0],...a??{}},t]}}}function $r(e,r={}){return f(te(e,r),ie(e,r))}function P(e,r,o,n){if(o==="remainder"&&(e.fixedSize===null||r.fixedSize===null))throw new Error('Codecs of "remainder" size must have fixed-size items.');return {description:n??`map(${e.description}, ${r.description}; ${B(o)})`,fixedSize:g(o,[e.fixedSize,r.fixedSize]),maxSize:g(o,[e.maxSize,r.maxSize])}}function de(e,r,o={}){let n=o.size??N();return {...P(e,r,n,o.description),encode:t=>{typeof n=="number"&&y("map",n,t.size);let i=Array.from(t,([d,c])=>p([e.encode(d),r.encode(c)]));return p([A(n,t.size),...i])}}}function ce(e,r,o={}){let n=o.size??U();return {...P(e,r,n,o.description),decode:(t,i=0)=>{let d=new Map;if(typeof n=="object"&&t.slice(i).length===0)return [d,i];let[c,a]=h(n,[e.fixedSize,r.fixedSize],t,i);i=a;for(let s=0;s<c;s+=1){let[u,m]=e.decode(t,i);i=m;let[T,v]=r.decode(t,i);i=v,d.set(u,T);}return [d,i]}}}function Qr(e,r,o={}){return f(de(e,r,o),ce(e,r,o))}function j(e,r,o,n){let t=`; ${r.description}`,i=e.fixedSize===0?r.fixedSize:null;return o&&(D(e,"Fixed nullables can only be used with fixed-size codecs."),D(r,"Fixed nullables can only be used with fixed-size prefix."),t+="; fixed",i=r.fixedSize+e.fixedSize),{description:n??`nullable(${e.description+t})`,fixedSize:i,maxSize:x([r.maxSize,e.maxSize])}}function ae(e,r={}){let o=r.prefix??S(),n=r.fixed??!1;return {...j(e,o,n,r.description),encode:t=>{let i=o.encode(+(t!==null)),d=t!==null?e.encode(t):new Uint8Array;return d=n?w(d,e.fixedSize):d,p([i,d])}}}function se(e,r={}){let o=r.prefix??b(),n=r.fixed??!1;return {...j(e,o,n,r.description),decode:(t,i=0)=>{if(t.length-i<=0)return [null,i];let d=i+(o.fixedSize??0)+(e.fixedSize??0),[c,a]=o.decode(t,i);if(i=a,c===0)return [null,n?d:i];let[s,u]=e.decode(t,i);return i=u,[s,n?d:i]}}}function uo(e,r={}){return f(ae(e,r),se(e,r))}function M(e,r,o){let n=Object.keys(e),t=Object.values(e),i=t.some(u=>typeof u=="number"),d=t.filter(u=>typeof u=="string").join(", "),c=0,a=i?t.length/2-1:t.length-1,s=i?[...n]:[...new Set([...n,...t])];return {description:o??`enum(${d}; ${r.description})`,enumKeys:n,enumValues:t,fixedSize:r.fixedSize,isNumericEnum:i,maxRange:a,maxSize:r.maxSize,minRange:c,stringValues:s}}function fe(e,r={}){let o=r.size??S(),{description:n,fixedSize:t,maxSize:i,minRange:d,maxRange:c,stringValues:a,enumKeys:s,enumValues:u}=M(e,o,r.description);return {description:n,encode:m=>{let T=typeof m=="number"&&(m<d||m>c),v=typeof m=="string"&&!a.includes(m);if(T||v)throw new Error(`Invalid scalar enum variant. Expected one of [${a.join(", ")}] or a number between ${d} and ${c}, got "${m}".`);if(typeof m=="number")return o.encode(m);let z=u.indexOf(m);return z>=0?o.encode(z):o.encode(s.indexOf(m))},fixedSize:t,maxSize:i}}function ue(e,r={}){let o=r.size??b(),{description:n,fixedSize:t,maxSize:i,minRange:d,maxRange:c,isNumericEnum:a,enumValues:s}=M(e,o,r.description);return {decode:(u,m=0)=>{l("enum",u,m);let[T,v]=o.decode(u,m),z=Number(T);if(m=v,z<d||z>c)throw new Error(`Enum discriminator out of range. Expected a number between ${d} and ${c}, got ${z}.`);return [a?z:s[z],m]},description:n,fixedSize:t,maxSize:i}}function Do(e,r={}){return f(fe(e,r),ue(e,r))}function H(e,r,o){if(r==="remainder"&&e.fixedSize===null)throw new Error('Codecs of "remainder" size must have fixed-size items.');return {description:o??`set(${e.description}; ${B(r)})`,fixedSize:g(r,[e.fixedSize]),maxSize:g(r,[e.maxSize])}}function me(e,r={}){let o=r.size??N();return {...H(e,o,r.description),encode:n=>{typeof o=="number"&&n.size!==o&&y("set",o,n.size);let t=Array.from(n,i=>e.encode(i));return p([A(o,n.size),...t])}}}function Ce(e,r={}){let o=r.size??U();return {...H(e,o,r.description),decode:(n,t=0)=>{let i=new Set;if(typeof o=="object"&&n.slice(t).length===0)return [i,t];let[d,c]=h(o,[e.fixedSize],n,t);t=c;for(let a=0;a<d;a+=1){let[s,u]=e.decode(n,t);t=u,i.add(s);}return [i,t]}}}function Ko(e,r={}){return f(me(e,r),Ce(e,r))}function R(e,r){let o=e.map(([n,t])=>`${String(n)}: ${t.description}`).join(", ");return {description:r??`struct(${o})`,fixedSize:x(e.map(([,n])=>n.fixedSize)),maxSize:x(e.map(([,n])=>n.maxSize))}}function pe(e,r={}){return {...R(e,r.description),encode:o=>{let n=e.map(([t,i])=>i.encode(o[t]));return p(n)}}}function xe(e,r={}){return {...R(e,r.description),decode:(o,n=0)=>{let t={};return e.forEach(([i,d])=>{let[c,a]=d.decode(o,n);n=a,t[i]=c;}),[t,n]}}}function Qo(e,r={}){return f(pe(e,r),xe(e,r))}function W(e,r){let o=e.map(n=>n.description).join(", ");return {description:r??`tuple(${o})`,fixedSize:x(e.map(n=>n.fixedSize)),maxSize:x(e.map(n=>n.maxSize))}}function le(e,r={}){return {...W(e,r.description),encode:o=>(y("tuple",e.length,o.length),p(e.map((n,t)=>n.encode(o[t]))))}}function ge(e,r={}){return {...W(e,r.description),decode:(o,n=0)=>{let t=[];return e.forEach(i=>{let[d,c]=i.decode(o,n);t.push(d),n=c;}),[t,n]}}}function an(e,r={}){return f(le(e,r),ge(e,r))}function ze(e={}){return {description:e.description??"unit",encode:()=>new Uint8Array,fixedSize:0,maxSize:0}}function Se(e={}){return {decode:(r,o=0)=>[void 0,o],description:e.description??"unit",fixedSize:0,maxSize:0}}function ln(e={}){return f(ze(e),Se(e))}
function h(e,o,r=0){if(o.length-r<=0)throw new Error(`Codec [${e}] cannot decode empty byte arrays.`)}function V(e,o,r,n=0){let t=r.length-n;if(t<o)throw new Error(`Codec [${e}] expected ${o} bytes, got ${t}.`)}var J=(e,o)=>{if(e.length>=o)return e;let r=new Uint8Array(o).fill(0);return r.set(e),r},Q=(e,o)=>J(e.length<=o?e:e.slice(0,o),o);function S(e,o){return "fixedSize"in o?o.fixedSize:o.getSizeFromValue(e)}function l(e){return Object.freeze({...e,encode:o=>{let r=new Uint8Array(S(o,e));return e.write(o,r,0),r}})}function x(e){return Object.freeze({...e,decode:(o,r=0)=>e.read(o,r)[0]})}function f(e){return "fixedSize"in e&&typeof e.fixedSize=="number"}function b(e,o){if(!f(e))throw new Error(o!=null?o:"Expected a fixed-size codec, got a variable-size one.")}function X(e){return !f(e)}function m(e,o){if(f(e)!==f(o))throw new Error("Encoder and decoder must either both be fixed-size or variable-size.");if(f(e)&&f(o)&&e.fixedSize!==o.fixedSize)throw new Error(`Encoder and decoder must have the same fixed size, got [${e.fixedSize}] and [${o.fixedSize}].`);if(!f(e)&&!f(o)&&e.maxSize!==o.maxSize)throw new Error(`Encoder and decoder must have the same max size, got [${e.maxSize}] and [${o.maxSize}].`);return {...o,...e,decode:o.decode,encode:e.encode,read:o.read,write:e.write}}function M(e,o){return l({fixedSize:o,write:(r,n,t)=>{let i=e.encode(r),c=i.length>o?i.slice(0,o):i;return n.set(c,t),t+o}})}function _(e,o){return x({fixedSize:o,read:(r,n)=>{V("fixCodec",o,r,n),(n>0||r.length>o)&&(r=r.slice(n,n+o)),f(e)&&(r=Q(r,e.fixedSize));let[t]=e.read(r,0);return [t,n+o]}})}function E(e,o){return l({...X(e)?{...e,getSizeFromValue:r=>e.getSizeFromValue(o(r))}:e,write:(r,n,t)=>e.write(o(r),n,t)})}function y(e,o){return x({...e,read:(r,n)=>{let[t,i]=e.read(r,n);return [o(t,r,n),i]}})}function Y(e,o,r,n){if(n<o||n>r)throw new Error(`Codec [${e}] expected number to be in the range [${o}, ${r}], got ${n}.`)}function O(e){return (e==null?void 0:e.endian)!==1}function j(e){return l({fixedSize:e.size,write(o,r,n){e.range&&Y(e.name,e.range[0],e.range[1],o);let t=new ArrayBuffer(e.size);return e.set(new DataView(t),o,O(e.config)),r.set(new Uint8Array(t),n),n+e.size}})}function $(e){return x({fixedSize:e.size,read(o,r=0){h(e.name,o,r),V(e.name,e.size,o,r);let n=new DataView(ee(o,r,e.size));return [e.get(n,O(e.config)),r+e.size]}})}function ee(e,o,r){let n=e.byteOffset+(o!=null?o:0),t=r!=null?r:e.byteLength;return e.buffer.slice(n,n+t)}var P=(e={})=>j({config:e,name:"u32",range:[0,+"0xffffffff"],set:(o,r,n)=>o.setUint32(0,r,n),size:4}),L=(e={})=>$({config:e,get:(o,r)=>o.getUint32(0,r),name:"u32",size:4});var D=()=>j({name:"u8",range:[0,+"0xff"],set:(e,o)=>e.setUint8(0,o),size:1}),N=()=>$({get:e=>e.getUint8(0),name:"u8",size:1});function B(e,o,r){if(o!==r)throw new Error(`Expected [${e}] to have ${o} items, got ${r}.`)}function W(e){return e.reduce((o,r)=>o===null||r===null?null:Math.max(o,r),0)}function g(e){return e.reduce((o,r)=>o===null||r===null?null:o+r,0)}function p(e){return f(e)?e.fixedSize:null}function z(e){var o;return f(e)?e.fixedSize:(o=e.maxSize)!=null?o:null}function K(e,o={}){var i,c;let r=(i=o.size)!=null?i:P();r==="remainder"&&b(e,'Codecs of "remainder" size must have fixed-size items.');let n=v(r,p(e)),t=(c=v(r,z(e)))!=null?c:void 0;return l({...n!==null?{fixedSize:n}:{getSizeFromValue:a=>(typeof r=="object"?S(a.length,r):0)+[...a].reduce((d,u)=>d+S(u,e),0),maxSize:t},write:(a,T,d)=>(typeof r=="number"&&B("array",r,a.length),typeof r=="object"&&(d=r.write(a.length,T,d)),a.forEach(u=>{d=e.write(u,T,d);}),d)})}function w(e,o={}){var c,a;let r=(c=o.size)!=null?c:L();r==="remainder"&&b(e,'Codecs of "remainder" size must have fixed-size items.');let n=p(e),t=v(r,n),i=(a=v(r,z(e)))!=null?a:void 0;return x({...t!==null?{fixedSize:t}:{maxSize:i},read:(T,d)=>{let u=[];if(typeof r=="object"&&T.slice(d).length===0)return [u,d];let[s,F]=oe(r,n,T,d);d=F;for(let A=0;A<s;A+=1){let[U,Z]=e.read(T,d);d=Z,u.push(U);}return [u,d]}})}function Je(e,o={}){return m(K(e,o),w(e,o))}function oe(e,o,r,n){if(typeof e=="number")return [e,n];if(typeof e=="object")return e.read(r,n);if(e==="remainder"){if(o===null)throw new Error('Codecs of "remainder" size must have fixed-size items.');let t=Math.max(0,r.length-n);if(t%o!==0)throw new Error(`The remainder of the byte array (${t} bytes) cannot be split into chunks of ${o} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${t} modulo ${o} should be equal to zero.`);return [t/o,n]}throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(e)}`)}function v(e,o){return typeof e!="number"?null:e===0?0:o===null?null:o*e}function re(e,o={}){var t;let n=(t=(typeof o=="boolean"?{backward:o}:o).backward)!=null?t:!1;return l({fixedSize:e,write(i,c,a){var d;let T=[];for(let u=0;u<e;u+=1){let s=0;for(let F=0;F<8;F+=1){let A=Number((d=i[u*8+F])!=null?d:0);s|=A<<(n?F:7-F);}n?T.unshift(s):T.push(s);}return c.set(T,a),e}})}function ne(e,o={}){var t;let n=(t=(typeof o=="boolean"?{backward:o}:o).backward)!=null?t:!1;return x({fixedSize:e,read(i,c){V("bitArray",e,i,c);let a=[],T=i.slice(c,c+e);return T=n?T.reverse():T,T.forEach(d=>{for(let u=0;u<8;u+=1)n?(a.push(!!(d&1)),d>>=1):(a.push(!!(d&128)),d<<=1);}),[a,c+e]}})}function no(e,o={}){return m(re(e,o),ne(e,o))}function te(e={}){var r;let o=(r=e.size)!=null?r:D();return b(o,"Codec [bool] requires a fixed size."),E(o,n=>n?1:0)}function ie(e={}){var r;let o=(r=e.size)!=null?r:N();return b(o,"Codec [bool] requires a fixed size."),y(o,n=>Number(n)===1)}function bo(e={}){return m(te(e),ie(e))}function ce(e={}){var n;let o=(n=e.size)!=null?n:"variable",r=l({getSizeFromValue:t=>t.length,write:(t,i,c)=>(i.set(t,c),c+t.length)});return o==="variable"?r:typeof o=="number"?M(r,o):l({getSizeFromValue:t=>S(t.length,o)+t.length,write:(t,i,c)=>(c=o.write(t.length,i,c),r.write(t,i,c))})}function de(e={}){var n;let o=(n=e.size)!=null?n:"variable",r=x({read:(t,i)=>{let c=t.slice(i);return [c,i+c.length]}});return o==="variable"?r:typeof o=="number"?_(r,o):x({read:(t,i)=>{h("bytes",t,i);let[c,a]=o.read(t,i),T=Number(c);i=a;let d=t.slice(i,i+T);V("bytes",T,d);let[u,s]=r.read(d,0);return i+=s,[u,i]}})}function Uo(e={}){return m(ce(e),de(e))}function ae(e,o={}){var t;let r=(t=o.size)!=null?t:D(),n=G(e,r);return l({...n!==null?{fixedSize:n}:{getSizeFromValue:i=>{let c=R(e,i),a=e[c][1];return S(c,r)+S(i,a)},maxSize:H(e,r)},write:(i,c,a)=>{let T=R(e,i);return a=r.write(T,c,a),e[T][1].write(i,c,a)}})}function Te(e,o={}){var t;let r=(t=o.size)!=null?t:N(),n=G(e,r);return x({...n!==null?{fixedSize:n}:{maxSize:H(e,r)},read:(i,c)=>{var F;h("dataEnum",i,c);let[a,T]=r.read(i,c);c=T;let d=(F=e[Number(a)])!=null?F:null;if(!d)throw new Error(`Enum discriminator out of range. Expected a number between 0 and ${e.length-1}, got ${a}.`);let[u,s]=d[1].read(i,c);return c=s,[{__kind:d[0],...u!=null?u:{}},c]}})}function Go(e,o={}){return m(ae(e,o),Te(e,o))}function G(e,o){if(e.length===0)return f(o)?o.fixedSize:null;if(!f(e[0][1]))return null;let r=e[0][1].fixedSize;return e.every(t=>f(t[1])&&t[1].fixedSize===r)&&f(o)?o.fixedSize+r:null}function H(e,o){var n;let r=W(e.map(([,t])=>z(t)));return (n=g([z(o),r]))!=null?n:void 0}function R(e,o){let r=e.findIndex(([n])=>o.__kind===n);if(r<0)throw new Error(`Invalid data enum variant. Expected one of [${e.map(([n])=>n).join(", ")}], got "${o.__kind}".`);return r}function I(e){var n;let o=g(e.map(p)),r=(n=g(e.map(z)))!=null?n:void 0;return l({...o===null?{getSizeFromValue:t=>e.map((i,c)=>S(t[c],i)).reduce((i,c)=>i+c,0),maxSize:r}:{fixedSize:o},write:(t,i,c)=>(B("tuple",e.length,t.length),e.forEach((a,T)=>{c=a.write(t[T],i,c);}),c)})}function k(e){var n;let o=g(e.map(p)),r=(n=g(e.map(z)))!=null?n:void 0;return x({...o===null?{maxSize:r}:{fixedSize:o},read:(t,i)=>{let c=[];return e.forEach(a=>{let[T,d]=a.read(t,i);c.push(T),i=d;}),[c,i]}})}function dr(e){return m(I(e),k(e))}function ue(e,o,r={}){return E(K(I([e,o]),r),n=>[...n.entries()])}function me(e,o,r={}){return y(w(k([e,o]),r),n=>new Map(n))}function Er(e,o,r={}){return m(ue(e,o,r),me(e,o,r))}function fe(e,o={}){var i,c,a;let r=(i=o.prefix)!=null?i:D(),n=(c=o.fixed)!=null?c:!1,t=f(e)&&f(r)&&e.fixedSize===0;if(n||t){b(e,"Fixed nullables can only be used with fixed-size codecs."),b(r,"Fixed nullables can only be used with fixed-size prefix.");let T=r.fixedSize+e.fixedSize;return l({fixedSize:T,write:(d,u,s)=>{let F=r.write(+(d!==null),u,s);return d!==null&&e.write(d,u,F),s+T}})}return l({getSizeFromValue:T=>S(+(T!==null),r)+(T!==null?S(T,e):0),maxSize:(a=g([r,e].map(z)))!=null?a:void 0,write:(T,d,u)=>(u=r.write(+(T!==null),d,u),T!==null&&(u=e.write(T,d,u)),u)})}function le(e,o={}){var c,a,T;let r=(c=o.prefix)!=null?c:N(),n=(a=o.fixed)!=null?a:!1,t=null,i=f(e)&&f(r)&&e.fixedSize===0;return (n||i)&&(b(e,"Fixed nullables can only be used with fixed-size codecs."),b(r,"Fixed nullables can only be used with fixed-size prefix."),t=r.fixedSize+e.fixedSize),x({...t===null?{maxSize:(T=g([r,e].map(z)))!=null?T:void 0}:{fixedSize:t},read:(d,u)=>{if(d.length-u<=0)return [null,u];let[s,F]=r.read(d,u);if(s===0)return [null,t!==null?u+t:F];let[A,U]=e.read(d,F);return [A,t!==null?u+t:U]}})}function Wr(e,o={}){let r=o;return m(fe(e,r),le(e,r))}function xe(e,o={}){var T;let r=(T=o.size)!=null?T:D(),{minRange:n,maxRange:t,stringValues:i,enumKeys:c,enumValues:a}=q(e);return E(r,d=>{let u=typeof d=="number"&&(d<n||d>t),s=typeof d=="string"&&!i.includes(d);if(u||s)throw new Error(`Invalid scalar enum variant. Expected one of [${i.join(", ")}] or a number between ${n} and ${t}, got "${d}".`);if(typeof d=="number")return d;let F=a.indexOf(d);return F>=0?F:c.indexOf(d)})}function se(e,o={}){var a;let r=(a=o.size)!=null?a:N(),{minRange:n,maxRange:t,isNumericEnum:i,enumValues:c}=q(e);return y(r,T=>{let d=Number(T);if(d<n||d>t)throw new Error(`Enum discriminator out of range. Expected a number between ${n} and ${t}, got ${d}.`);return i?d:c[d]})}function mn(e,o={}){return m(xe(e,o),se(e,o))}function q(e){let o=Object.keys(e),r=Object.values(e),n=r.some(a=>typeof a=="number"),t=0,i=n?r.length/2-1:r.length-1,c=n?[...o]:[...new Set([...o,...r])];return {enumKeys:o,enumValues:r,isNumericEnum:n,maxRange:i,minRange:t,stringValues:c}}function Fe(e,o={}){return E(K(e,o),r=>[...r])}function Ce(e,o={}){return y(w(e,o),r=>new Set(r))}function Nn(e,o={}){return m(Fe(e,o),Ce(e,o))}function Se(e){var t;let o=e.map(([,i])=>i),r=g(o.map(p)),n=(t=g(o.map(z)))!=null?t:void 0;return l({...r===null?{getSizeFromValue:i=>e.map(([c,a])=>S(i[c],a)).reduce((c,a)=>c+a,0),maxSize:n}:{fixedSize:r},write:(i,c,a)=>(e.forEach(([T,d])=>{a=d.write(i[T],c,a);}),a)})}function ge(e){var t;let o=e.map(([,i])=>i),r=g(o.map(p)),n=(t=g(o.map(z)))!=null?t:void 0;return x({...r===null?{maxSize:n}:{fixedSize:r},read:(i,c)=>{let a={};return e.forEach(([T,d])=>{let[u,s]=d.read(i,c);c=s,a[T]=u;}),[a,c]}})}function jn(e){return m(Se(e),ge(e))}function ze(){return l({fixedSize:0,write:(e,o,r)=>r})}function be(){return x({fixedSize:0,read:(e,o)=>[void 0,o]})}function Hn(){return m(ze(),be())}
exports.assertValidNumberOfItemsForCodec = y;
exports.decodeArrayLikeCodecSize = h;
exports.getArrayCodec = We;
exports.getArrayDecoder = X;
exports.getArrayEncoder = Q;
exports.getArrayLikeCodecSizeDescription = B;
exports.getArrayLikeCodecSizeFromChildren = g;
exports.getArrayLikeCodecSizePrefix = A;
exports.getBitArrayCodec = er;
exports.getBitArrayDecoder = Z;
exports.getBitArrayEncoder = Y;
exports.getBooleanCodec = mr;
exports.getBooleanDecoder = re;
exports.getBooleanEncoder = ee;
exports.getBytesCodec = br;
exports.getBytesDecoder = ne;
exports.getBytesEncoder = oe;
exports.getDataEnumCodec = $r;
exports.getDataEnumDecoder = ie;
exports.getDataEnumEncoder = te;
exports.getMapCodec = Qr;
exports.getMapDecoder = ce;
exports.getMapEncoder = de;
exports.getNullableCodec = uo;
exports.getNullableDecoder = se;
exports.getNullableEncoder = ae;
exports.getScalarEnumCodec = Do;
exports.getScalarEnumDecoder = ue;
exports.getScalarEnumEncoder = fe;
exports.getSetCodec = Ko;
exports.assertValidNumberOfItemsForCodec = B;
exports.getArrayCodec = Je;
exports.getArrayDecoder = w;
exports.getArrayEncoder = K;
exports.getBitArrayCodec = no;
exports.getBitArrayDecoder = ne;
exports.getBitArrayEncoder = re;
exports.getBooleanCodec = bo;
exports.getBooleanDecoder = ie;
exports.getBooleanEncoder = te;
exports.getBytesCodec = Uo;
exports.getBytesDecoder = de;
exports.getBytesEncoder = ce;
exports.getDataEnumCodec = Go;
exports.getDataEnumDecoder = Te;
exports.getDataEnumEncoder = ae;
exports.getMapCodec = Er;
exports.getMapDecoder = me;
exports.getMapEncoder = ue;
exports.getNullableCodec = Wr;
exports.getNullableDecoder = le;
exports.getNullableEncoder = fe;
exports.getScalarEnumCodec = mn;
exports.getScalarEnumDecoder = se;
exports.getScalarEnumEncoder = xe;
exports.getSetCodec = Nn;
exports.getSetDecoder = Ce;
exports.getSetEncoder = me;
exports.getStructCodec = Qo;
exports.getStructDecoder = xe;
exports.getStructEncoder = pe;
exports.getTupleCodec = an;
exports.getTupleDecoder = ge;
exports.getTupleEncoder = le;
exports.getUnitCodec = ln;
exports.getUnitDecoder = Se;
exports.getSetEncoder = Fe;
exports.getStructCodec = jn;
exports.getStructDecoder = ge;
exports.getStructEncoder = Se;
exports.getTupleCodec = dr;
exports.getTupleDecoder = k;
exports.getTupleEncoder = I;
exports.getUnitCodec = Hn;
exports.getUnitDecoder = be;
exports.getUnitEncoder = ze;

@@ -49,0 +45,0 @@

@@ -1,6 +0,17 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core';
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
import { ArrayLikeCodecSize } from './array-like-codec-size';
/**
* Represents all the size options for array-like codecs
* — i.e. `array`, `map` and `set`.
*
* It can be one of the following:
* - a {@link NumberCodec} that prefixes its content with its size.
* - a fixed number of items.
* - or `'remainder'` to infer the number of items by dividing
* the rest of the byte array by the fixed size of its item.
* Note that this option is only available for fixed-size items.
*/
export type ArrayLikeCodecSize<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = TPrefix | number | 'remainder';
/** Defines the configs for array codecs. */
export type ArrayCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecConfig & {
export type ArrayCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = {
/**

@@ -18,3 +29,14 @@ * The size of the array.

*/
export declare function getArrayEncoder<T>(item: Encoder<T>, config?: ArrayCodecConfig<NumberEncoder>): Encoder<T[]>;
export declare function getArrayEncoder<TFrom>(item: Encoder<TFrom>, config: ArrayCodecConfig<NumberEncoder> & {
size: 0;
}): FixedSizeEncoder<TFrom[], 0>;
export declare function getArrayEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: ArrayCodecConfig<NumberEncoder> & {
size: number;
}): FixedSizeEncoder<TFrom[]>;
export declare function getArrayEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: ArrayCodecConfig<NumberEncoder> & {
size: 'remainder';
}): VariableSizeEncoder<TFrom[]>;
export declare function getArrayEncoder<TFrom>(item: Encoder<TFrom>, config?: ArrayCodecConfig<NumberEncoder> & {
size?: number | NumberEncoder;
}): VariableSizeEncoder<TFrom[]>;
/**

@@ -26,3 +48,14 @@ * Decodes an array of items.

*/
export declare function getArrayDecoder<T>(item: Decoder<T>, config?: ArrayCodecConfig<NumberDecoder>): Decoder<T[]>;
export declare function getArrayDecoder<TTo>(item: Decoder<TTo>, config: ArrayCodecConfig<NumberDecoder> & {
size: 0;
}): FixedSizeDecoder<TTo[], 0>;
export declare function getArrayDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: ArrayCodecConfig<NumberDecoder> & {
size: number;
}): FixedSizeDecoder<TTo[]>;
export declare function getArrayDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: ArrayCodecConfig<NumberDecoder> & {
size: 'remainder';
}): VariableSizeDecoder<TTo[]>;
export declare function getArrayDecoder<TTo>(item: Decoder<TTo>, config?: ArrayCodecConfig<NumberDecoder> & {
size?: number | NumberDecoder;
}): VariableSizeDecoder<TTo[]>;
/**

@@ -34,3 +67,14 @@ * Creates a codec for an array of items.

*/
export declare function getArrayCodec<T, U extends T = T>(item: Codec<T, U>, config?: ArrayCodecConfig<NumberCodec>): Codec<T[], U[]>;
export declare function getArrayCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config: ArrayCodecConfig<NumberCodec> & {
size: 0;
}): FixedSizeCodec<TFrom[], TTo[], 0>;
export declare function getArrayCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: ArrayCodecConfig<NumberCodec> & {
size: number;
}): FixedSizeCodec<TFrom[], TTo[]>;
export declare function getArrayCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: ArrayCodecConfig<NumberCodec> & {
size: 'remainder';
}): VariableSizeCodec<TFrom[], TTo[]>;
export declare function getArrayCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config?: ArrayCodecConfig<NumberCodec> & {
size?: number | NumberCodec;
}): VariableSizeCodec<TFrom[], TTo[]>;
//# sourceMappingURL=array.d.ts.map

@@ -1,4 +0,4 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder } from '@solana/codecs-core';
/** Defines the config for bitArray codecs. */
export type BitArrayCodecConfig = BaseCodecConfig & {
export type BitArrayCodecConfig = {
/**

@@ -16,3 +16,3 @@ * Whether to read the bits in reverse order.

*/
export declare const getBitArrayEncoder: (size: number, config?: BitArrayCodecConfig | boolean) => Encoder<boolean[]>;
export declare function getBitArrayEncoder<TSize extends number>(size: TSize, config?: BitArrayCodecConfig | boolean): FixedSizeEncoder<boolean[], TSize>;
/**

@@ -24,3 +24,3 @@ * Decodes bits into an array of booleans.

*/
export declare const getBitArrayDecoder: (size: number, config?: BitArrayCodecConfig | boolean) => Decoder<boolean[]>;
export declare function getBitArrayDecoder<TSize extends number>(size: TSize, config?: BitArrayCodecConfig | boolean): FixedSizeDecoder<boolean[], TSize>;
/**

@@ -32,3 +32,3 @@ * An array of boolean codec that converts booleans to bits and vice versa.

*/
export declare const getBitArrayCodec: (size: number, config?: BitArrayCodecConfig | boolean) => Codec<boolean[]>;
export declare function getBitArrayCodec<TSize extends number>(size: TSize, config?: BitArrayCodecConfig | boolean): FixedSizeCodec<boolean[], boolean[], TSize>;
//# sourceMappingURL=bit-array.d.ts.map

@@ -1,5 +0,5 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder } from '@solana/codecs-core';
import { FixedSizeNumberCodec, FixedSizeNumberDecoder, FixedSizeNumberEncoder, NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
/** Defines the config for boolean codecs. */
export type BooleanCodecConfig<TSize extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecConfig & {
export type BooleanCodecConfig<TSize extends NumberCodec | NumberEncoder | NumberDecoder> = {
/**

@@ -16,3 +16,7 @@ * The number codec to delegate to.

*/
export declare function getBooleanEncoder(config?: BooleanCodecConfig<NumberEncoder>): Encoder<boolean>;
export declare function getBooleanEncoder(): FixedSizeEncoder<boolean, 1>;
export declare function getBooleanEncoder<TSize extends number>(config: BooleanCodecConfig<NumberEncoder> & {
size: FixedSizeNumberEncoder<TSize>;
}): FixedSizeEncoder<boolean, TSize>;
export declare function getBooleanEncoder(config: BooleanCodecConfig<NumberEncoder>): Encoder<boolean>;
/**

@@ -23,3 +27,7 @@ * Decodes booleans.

*/
export declare function getBooleanDecoder(config?: BooleanCodecConfig<NumberDecoder>): Decoder<boolean>;
export declare function getBooleanDecoder(): FixedSizeDecoder<boolean, 1>;
export declare function getBooleanDecoder<TSize extends number>(config: BooleanCodecConfig<NumberDecoder> & {
size: FixedSizeNumberDecoder<TSize>;
}): FixedSizeDecoder<boolean, TSize>;
export declare function getBooleanDecoder(config: BooleanCodecConfig<NumberDecoder>): Decoder<boolean>;
/**

@@ -30,3 +38,7 @@ * Creates a boolean codec.

*/
export declare function getBooleanCodec(config?: BooleanCodecConfig<NumberCodec>): Codec<boolean>;
export declare function getBooleanCodec(): FixedSizeCodec<boolean, boolean, 1>;
export declare function getBooleanCodec<TSize extends number>(config: BooleanCodecConfig<NumberCodec> & {
size: FixedSizeNumberCodec<TSize>;
}): FixedSizeCodec<boolean, boolean, TSize>;
export declare function getBooleanCodec(config: BooleanCodecConfig<NumberCodec>): Codec<boolean>;
//# sourceMappingURL=boolean.d.ts.map

@@ -1,5 +0,5 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core';
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
/** Defines the config for bytes codecs. */
export type BytesCodecConfig<TSize extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecConfig & {
export type BytesCodecConfig<TSize extends NumberCodec | NumberEncoder | NumberDecoder> = {
/**

@@ -19,3 +19,6 @@ * The size of the byte array. It can be one of the following:

*/
export declare function getBytesEncoder(config?: BytesCodecConfig<NumberEncoder>): Encoder<Uint8Array>;
export declare function getBytesEncoder<TSize extends number>(config: BytesCodecConfig<NumberEncoder> & {
size: TSize;
}): FixedSizeEncoder<Uint8Array, TSize>;
export declare function getBytesEncoder(config?: BytesCodecConfig<NumberEncoder>): VariableSizeEncoder<Uint8Array>;
/**

@@ -26,3 +29,6 @@ * Decodes sized bytes.

*/
export declare function getBytesDecoder(config?: BytesCodecConfig<NumberDecoder>): Decoder<Uint8Array>;
export declare function getBytesDecoder<TSize extends number>(config: BytesCodecConfig<NumberDecoder> & {
size: TSize;
}): FixedSizeDecoder<Uint8Array, TSize>;
export declare function getBytesDecoder(config?: BytesCodecConfig<NumberDecoder>): VariableSizeDecoder<Uint8Array>;
/**

@@ -33,3 +39,6 @@ * Creates a sized bytes codec.

*/
export declare function getBytesCodec(config?: BytesCodecConfig<NumberCodec>): Codec<Uint8Array>;
export declare function getBytesCodec<TSize extends number>(config: BytesCodecConfig<NumberCodec> & {
size: TSize;
}): FixedSizeCodec<Uint8Array, Uint8Array, TSize>;
export declare function getBytesCodec(config?: BytesCodecConfig<NumberCodec>): VariableSizeCodec<Uint8Array>;
//# sourceMappingURL=bytes.d.ts.map

@@ -1,2 +0,2 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { Codec, Decoder, Encoder } from '@solana/codecs-core';
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';

@@ -46,19 +46,19 @@ /**

}>, '__kind'>;
/** Get the name and codec of each variant in a data enum. */
export type DataEnumToCodecTuple<T extends DataEnum, U extends T = T> = Array<T extends never ? never : [
T['__kind'],
keyof Omit<T, '__kind'> extends never ? Codec<Omit<T, '__kind'>, Omit<U, '__kind'>> | Codec<void> : Codec<Omit<T, '__kind'>, Omit<U, '__kind'>>
]>;
/** Get the name and encoder of each variant in a data enum. */
export type DataEnumToEncoderTuple<T extends DataEnum> = Array<T extends never ? never : [
T['__kind'],
keyof Omit<T, '__kind'> extends never ? Encoder<Omit<T, '__kind'>> | Encoder<void> : Encoder<Omit<T, '__kind'>>
export type DataEnumToEncoderTuple<TFrom extends DataEnum> = Array<TFrom extends never ? never : [
TFrom['__kind'],
keyof Omit<TFrom, '__kind'> extends never ? Encoder<Omit<TFrom, '__kind'>> | Encoder<void> : Encoder<Omit<TFrom, '__kind'>>
]>;
/** Get the name and decoder of each variant in a data enum. */
export type DataEnumToDecoderTuple<T extends DataEnum> = Array<T extends never ? never : [
T['__kind'],
keyof Omit<T, '__kind'> extends never ? Decoder<Omit<T, '__kind'>> | Decoder<void> : Decoder<Omit<T, '__kind'>>
export type DataEnumToDecoderTuple<TTo extends DataEnum> = Array<TTo extends never ? never : [
TTo['__kind'],
keyof Omit<TTo, '__kind'> extends never ? Decoder<Omit<TTo, '__kind'>> | Decoder<void> : Decoder<Omit<TTo, '__kind'>>
]>;
/** Get the name and codec of each variant in a data enum. */
export type DataEnumToCodecTuple<TFrom extends DataEnum, TTo extends TFrom = TFrom> = Array<TFrom extends never ? never : [
TFrom['__kind'],
keyof Omit<TFrom, '__kind'> extends never ? Codec<Omit<TFrom, '__kind'>, Omit<TTo, '__kind'>> | Codec<void> : Codec<Omit<TFrom, '__kind'>, Omit<TTo, '__kind'>>
]>;
/** Defines the config for data enum codecs. */
export type DataEnumCodecConfig<TDiscriminator = NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecConfig & {
export type DataEnumCodecConfig<TDiscriminator = NumberCodec | NumberEncoder | NumberDecoder> = {
/**

@@ -76,3 +76,3 @@ * The codec to use for the enum discriminator prefixing the variant.

*/
export declare function getDataEnumEncoder<T extends DataEnum>(variants: DataEnumToEncoderTuple<T>, config?: DataEnumCodecConfig<NumberEncoder>): Encoder<T>;
export declare function getDataEnumEncoder<TFrom extends DataEnum>(variants: DataEnumToEncoderTuple<TFrom>, config?: DataEnumCodecConfig<NumberEncoder>): Encoder<TFrom>;
/**

@@ -79,0 +79,0 @@ * Creates a data enum decoder.

export * from './array';
export * from './array-like-codec-size';
export * from './assertions';

@@ -4,0 +3,0 @@ export * from './bit-array';

@@ -1,6 +0,6 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core';
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
import { ArrayLikeCodecSize } from './array-like-codec-size';
import { ArrayLikeCodecSize } from './array';
/** Defines the config for Map codecs. */
export type MapCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecConfig & {
export type MapCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = {
/**

@@ -19,3 +19,14 @@ * The size of the array.

*/
export declare function getMapEncoder<K, V>(key: Encoder<K>, value: Encoder<V>, config?: MapCodecConfig<NumberEncoder>): Encoder<Map<K, V>>;
export declare function getMapEncoder<TFromKey, TFromValue>(key: Encoder<TFromKey>, value: Encoder<TFromValue>, config: MapCodecConfig<NumberEncoder> & {
size: 0;
}): FixedSizeEncoder<Map<TFromKey, TFromValue>, 0>;
export declare function getMapEncoder<TFromKey, TFromValue>(key: FixedSizeEncoder<TFromKey>, value: FixedSizeEncoder<TFromValue>, config: MapCodecConfig<NumberEncoder> & {
size: number;
}): FixedSizeEncoder<Map<TFromKey, TFromValue>>;
export declare function getMapEncoder<TFromKey, TFromValue>(key: FixedSizeEncoder<TFromKey>, value: FixedSizeEncoder<TFromValue>, config: MapCodecConfig<NumberEncoder> & {
size: 'remainder';
}): VariableSizeEncoder<Map<TFromKey, TFromValue>>;
export declare function getMapEncoder<TFromKey, TFromValue>(key: Encoder<TFromKey>, value: Encoder<TFromValue>, config?: MapCodecConfig<NumberEncoder> & {
size?: number | NumberEncoder;
}): VariableSizeEncoder<Map<TFromKey, TFromValue>>;
/**

@@ -28,3 +39,14 @@ * Creates a decoder for a map.

*/
export declare function getMapDecoder<K, V>(key: Decoder<K>, value: Decoder<V>, config?: MapCodecConfig<NumberDecoder>): Decoder<Map<K, V>>;
export declare function getMapDecoder<TToKey, TToValue>(key: Decoder<TToKey>, value: Decoder<TToValue>, config: MapCodecConfig<NumberDecoder> & {
size: 0;
}): FixedSizeDecoder<Map<TToKey, TToValue>, 0>;
export declare function getMapDecoder<TToKey, TToValue>(key: FixedSizeDecoder<TToKey>, value: FixedSizeDecoder<TToValue>, config: MapCodecConfig<NumberDecoder> & {
size: number;
}): FixedSizeDecoder<Map<TToKey, TToValue>>;
export declare function getMapDecoder<TToKey, TToValue>(key: FixedSizeDecoder<TToKey>, value: FixedSizeDecoder<TToValue>, config: MapCodecConfig<NumberDecoder> & {
size: 'remainder';
}): VariableSizeDecoder<Map<TToKey, TToValue>>;
export declare function getMapDecoder<TToKey, TToValue>(key: Decoder<TToKey>, value: Decoder<TToValue>, config?: MapCodecConfig<NumberDecoder> & {
size?: number | NumberDecoder;
}): VariableSizeDecoder<Map<TToKey, TToValue>>;
/**

@@ -37,3 +59,14 @@ * Creates a codec for a map.

*/
export declare function getMapCodec<TK, TV, UK extends TK = TK, UV extends TV = TV>(key: Codec<TK, UK>, value: Codec<TV, UV>, config?: MapCodecConfig<NumberCodec>): Codec<Map<TK, TV>, Map<UK, UV>>;
export declare function getMapCodec<TFromKey, TFromValue, TToKey extends TFromKey = TFromKey, TToValue extends TFromValue = TFromValue>(key: Codec<TFromKey, TToKey>, value: Codec<TFromValue, TToValue>, config: MapCodecConfig<NumberCodec> & {
size: 0;
}): FixedSizeCodec<Map<TFromKey, TFromValue>, Map<TToKey, TToValue>, 0>;
export declare function getMapCodec<TFromKey, TFromValue, TToKey extends TFromKey = TFromKey, TToValue extends TFromValue = TFromValue>(key: FixedSizeCodec<TFromKey, TToKey>, value: FixedSizeCodec<TFromValue, TToValue>, config: MapCodecConfig<NumberCodec> & {
size: number;
}): FixedSizeCodec<Map<TFromKey, TFromValue>, Map<TToKey, TToValue>>;
export declare function getMapCodec<TFromKey, TFromValue, TToKey extends TFromKey = TFromKey, TToValue extends TFromValue = TFromValue>(key: FixedSizeCodec<TFromKey, TToKey>, value: FixedSizeCodec<TFromValue, TToValue>, config: MapCodecConfig<NumberCodec> & {
size: 'remainder';
}): VariableSizeCodec<Map<TFromKey, TFromValue>, Map<TToKey, TToValue>>;
export declare function getMapCodec<TFromKey, TFromValue, TToKey extends TFromKey = TFromKey, TToValue extends TFromValue = TFromValue>(key: Codec<TFromKey, TToKey>, value: Codec<TFromValue, TToValue>, config?: MapCodecConfig<NumberCodec> & {
size?: number | NumberCodec;
}): VariableSizeCodec<Map<TFromKey, TFromValue>, Map<TToKey, TToValue>>;
//# sourceMappingURL=map.d.ts.map

@@ -1,5 +0,5 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core';
import { FixedSizeNumberCodec, FixedSizeNumberDecoder, FixedSizeNumberEncoder, NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
/** Defines the config for nullable codecs. */
export type NullableCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecConfig & {
export type NullableCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = {
/**

@@ -26,3 +26,9 @@ * The codec to use for the boolean prefix.

*/
export declare function getNullableEncoder<T>(item: Encoder<T>, config?: NullableCodecConfig<NumberEncoder>): Encoder<T | null>;
export declare function getNullableEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: NullableCodecConfig<FixedSizeNumberEncoder> & {
fixed: true;
}): FixedSizeEncoder<TFrom | null>;
export declare function getNullableEncoder<TFrom>(item: FixedSizeEncoder<TFrom, 0>, config?: NullableCodecConfig<FixedSizeNumberEncoder>): FixedSizeEncoder<TFrom | null>;
export declare function getNullableEncoder<TFrom>(item: Encoder<TFrom>, config?: NullableCodecConfig<NumberEncoder> & {
fixed?: false;
}): VariableSizeEncoder<TFrom | null>;
/**

@@ -34,3 +40,9 @@ * Creates a decoder for an optional value using `null` as the `None` value.

*/
export declare function getNullableDecoder<T>(item: Decoder<T>, config?: NullableCodecConfig<NumberDecoder>): Decoder<T | null>;
export declare function getNullableDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: NullableCodecConfig<FixedSizeNumberDecoder> & {
fixed: true;
}): FixedSizeDecoder<TTo | null>;
export declare function getNullableDecoder<TTo>(item: FixedSizeDecoder<TTo, 0>, config?: NullableCodecConfig<FixedSizeNumberDecoder>): FixedSizeDecoder<TTo | null>;
export declare function getNullableDecoder<TTo>(item: Decoder<TTo>, config?: NullableCodecConfig<NumberDecoder> & {
fixed?: false;
}): VariableSizeDecoder<TTo | null>;
/**

@@ -42,3 +54,9 @@ * Creates a codec for an optional value using `null` as the `None` value.

*/
export declare function getNullableCodec<T, U extends T = T>(item: Codec<T, U>, config?: NullableCodecConfig<NumberCodec>): Codec<T | null, U | null>;
export declare function getNullableCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: NullableCodecConfig<FixedSizeNumberCodec> & {
fixed: true;
}): FixedSizeCodec<TFrom | null, TTo | null>;
export declare function getNullableCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo, 0>, config?: NullableCodecConfig<FixedSizeNumberCodec>): FixedSizeCodec<TFrom | null, TTo | null>;
export declare function getNullableCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config?: NullableCodecConfig<NumberCodec> & {
fixed?: false;
}): VariableSizeCodec<TFrom | null, TTo | null>;
//# sourceMappingURL=nullable.d.ts.map

@@ -1,3 +0,3 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
import { FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core';
import { FixedSizeNumberCodec, FixedSizeNumberDecoder, FixedSizeNumberEncoder, NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
/**

@@ -16,3 +16,3 @@ * Defines a scalar enum as a type from its constructor.

/** Defines the config for scalar enum codecs. */
export type ScalarEnumCodecConfig<TDiscriminator extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecConfig & {
export type ScalarEnumCodecConfig<TDiscriminator extends NumberCodec | NumberEncoder | NumberDecoder> = {
/**

@@ -30,3 +30,7 @@ * The codec to use for the enum discriminator.

*/
export declare function getScalarEnumEncoder<T>(constructor: ScalarEnum<T>, config?: ScalarEnumCodecConfig<NumberEncoder>): Encoder<T>;
export declare function getScalarEnumEncoder<TFrom, TFromConstructor extends ScalarEnum<TFrom>>(constructor: TFromConstructor): FixedSizeEncoder<TFrom, 1>;
export declare function getScalarEnumEncoder<TFrom, TFromConstructor extends ScalarEnum<TFrom>, TSize extends number>(constructor: TFromConstructor, config: ScalarEnumCodecConfig<NumberEncoder> & {
size: FixedSizeNumberEncoder<TSize>;
}): FixedSizeEncoder<TFrom, TSize>;
export declare function getScalarEnumEncoder<TFrom, TFromConstructor extends ScalarEnum<TFrom>>(constructor: TFromConstructor, config?: ScalarEnumCodecConfig<NumberEncoder>): VariableSizeEncoder<TFrom>;
/**

@@ -38,3 +42,7 @@ * Creates a scalar enum decoder.

*/
export declare function getScalarEnumDecoder<T>(constructor: ScalarEnum<T>, config?: ScalarEnumCodecConfig<NumberDecoder>): Decoder<T>;
export declare function getScalarEnumDecoder<TTo, TToConstructor extends ScalarEnum<TTo>>(constructor: TToConstructor): FixedSizeDecoder<TTo, 1>;
export declare function getScalarEnumDecoder<TTo, TToConstructor extends ScalarEnum<TTo>, TSize extends number>(constructor: TToConstructor, config: ScalarEnumCodecConfig<NumberDecoder> & {
size: FixedSizeNumberDecoder<TSize>;
}): FixedSizeDecoder<TTo, TSize>;
export declare function getScalarEnumDecoder<TTo, TToConstructor extends ScalarEnum<TTo>>(constructor: TToConstructor, config?: ScalarEnumCodecConfig<NumberDecoder>): VariableSizeDecoder<TTo>;
/**

@@ -46,3 +54,7 @@ * Creates a scalar enum codec.

*/
export declare function getScalarEnumCodec<T>(constructor: ScalarEnum<T>, config?: ScalarEnumCodecConfig<NumberCodec>): Codec<T>;
export declare function getScalarEnumCodec<TFrom, TFromConstructor extends ScalarEnum<TFrom>>(constructor: TFromConstructor): FixedSizeCodec<TFrom, TFrom, 1>;
export declare function getScalarEnumCodec<TFrom, TFromConstructor extends ScalarEnum<TFrom>, TSize extends number>(constructor: TFromConstructor, config: ScalarEnumCodecConfig<NumberCodec> & {
size: FixedSizeNumberCodec<TSize>;
}): FixedSizeCodec<TFrom, TFrom, TSize>;
export declare function getScalarEnumCodec<TFrom, TFromConstructor extends ScalarEnum<TFrom>>(constructor: TFromConstructor, config?: ScalarEnumCodecConfig<NumberCodec>): VariableSizeCodec<TFrom>;
//# sourceMappingURL=scalar-enum.d.ts.map

@@ -1,6 +0,6 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core';
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers';
import { ArrayLikeCodecSize } from './array-like-codec-size';
import { ArrayLikeCodecSize } from './array';
/** Defines the config for set codecs. */
export type SetCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecConfig & {
export type SetCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = {
/**

@@ -18,3 +18,14 @@ * The size of the set.

*/
export declare function getSetEncoder<T>(item: Encoder<T>, config?: SetCodecConfig<NumberEncoder>): Encoder<Set<T>>;
export declare function getSetEncoder<TFrom>(item: Encoder<TFrom>, config: SetCodecConfig<NumberEncoder> & {
size: 0;
}): FixedSizeEncoder<Set<TFrom>, 0>;
export declare function getSetEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: SetCodecConfig<NumberEncoder> & {
size: number;
}): FixedSizeEncoder<Set<TFrom>>;
export declare function getSetEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: SetCodecConfig<NumberEncoder> & {
size: 'remainder';
}): VariableSizeEncoder<Set<TFrom>>;
export declare function getSetEncoder<TFrom>(item: Encoder<TFrom>, config?: SetCodecConfig<NumberEncoder> & {
size?: number | NumberEncoder;
}): VariableSizeEncoder<Set<TFrom>>;
/**

@@ -26,3 +37,14 @@ * Decodes an set of items.

*/
export declare function getSetDecoder<T>(item: Decoder<T>, config?: SetCodecConfig<NumberDecoder>): Decoder<Set<T>>;
export declare function getSetDecoder<TTo>(item: Decoder<TTo>, config: SetCodecConfig<NumberDecoder> & {
size: 0;
}): FixedSizeDecoder<Set<TTo>, 0>;
export declare function getSetDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: SetCodecConfig<NumberDecoder> & {
size: number;
}): FixedSizeDecoder<Set<TTo>>;
export declare function getSetDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: SetCodecConfig<NumberDecoder> & {
size: 'remainder';
}): VariableSizeDecoder<Set<TTo>>;
export declare function getSetDecoder<TTo>(item: Decoder<TTo>, config?: SetCodecConfig<NumberDecoder> & {
size?: number | NumberDecoder;
}): VariableSizeDecoder<Set<TTo>>;
/**

@@ -34,3 +56,14 @@ * Creates a codec for an set of items.

*/
export declare function getSetCodec<T, U extends T = T>(item: Codec<T, U>, config?: SetCodecConfig<NumberCodec>): Codec<Set<T>, Set<U>>;
export declare function getSetCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config: SetCodecConfig<NumberCodec> & {
size: 0;
}): FixedSizeCodec<Set<TFrom>, Set<TTo>, 0>;
export declare function getSetCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: SetCodecConfig<NumberCodec> & {
size: number;
}): FixedSizeCodec<Set<TFrom>, Set<TTo>>;
export declare function getSetCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: SetCodecConfig<NumberCodec> & {
size: 'remainder';
}): VariableSizeCodec<Set<TFrom>, Set<TTo>>;
export declare function getSetCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config?: SetCodecConfig<NumberCodec> & {
size?: number | NumberCodec;
}): VariableSizeCodec<Set<TFrom>, Set<TTo>>;
//# sourceMappingURL=set.d.ts.map

@@ -1,16 +0,26 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core';
/** Get the name and encoder of each field in a struct. */
export type StructToEncoderTuple<T extends object> = Array<{
[K in keyof T]: [K, Encoder<T[K]>];
}[keyof T]>;
export type StructToEncoderTuple<TFrom extends object> = Array<{
[K in keyof TFrom]: [K, Encoder<TFrom[K]>];
}[keyof TFrom]>;
/** Get the name and fixed-size encoder of each field in a struct. */
export type StructToFixedSizeEncoderTuple<TFrom extends object> = Array<{
[K in keyof TFrom]: [K, FixedSizeEncoder<TFrom[K]>];
}[keyof TFrom]>;
/** Get the name and decoder of each field in a struct. */
export type StructToDecoderTuple<T extends object> = Array<{
[K in keyof T]: [K, Decoder<T[K]>];
}[keyof T]>;
export type StructToDecoderTuple<TTo extends object> = Array<{
[K in keyof TTo]: [K, Decoder<TTo[K]>];
}[keyof TTo]>;
/** Get the name and fixed-size decoder of each field in a struct. */
export type StructToFixedSizeDecoderTuple<TTo extends object> = Array<{
[K in keyof TTo]: [K, FixedSizeDecoder<TTo[K]>];
}[keyof TTo]>;
/** Get the name and codec of each field in a struct. */
export type StructToCodecTuple<T extends object, U extends T> = Array<{
[K in keyof T]: [K, Codec<T[K], U[K]>];
}[keyof T]>;
/** Defines the config for struct codecs. */
export type StructCodecConfig = BaseCodecConfig;
export type StructToCodecTuple<TFrom extends object, TTo extends TFrom> = Array<{
[K in keyof TFrom]: [K, Codec<TFrom[K], TTo[K]>];
}[keyof TFrom]>;
/** Get the name and fixed-size codec of each field in a struct. */
export type StructToFixedSizeCodecTuple<TFrom extends object, TTo extends TFrom> = Array<{
[K in keyof TFrom]: [K, FixedSizeCodec<TFrom[K], TTo[K]>];
}[keyof TFrom]>;
/**

@@ -20,5 +30,5 @@ * Creates a encoder for a custom object.

* @param fields - The name and encoder of each field.
* @param config - A set of config for the encoder.
*/
export declare function getStructEncoder<T extends object>(fields: StructToEncoderTuple<T>, config?: StructCodecConfig): Encoder<T>;
export declare function getStructEncoder<TFrom extends object>(fields: StructToFixedSizeEncoderTuple<TFrom>): FixedSizeEncoder<TFrom>;
export declare function getStructEncoder<TFrom extends object>(fields: StructToEncoderTuple<TFrom>): VariableSizeEncoder<TFrom>;
/**

@@ -28,5 +38,5 @@ * Creates a decoder for a custom object.

* @param fields - The name and decoder of each field.
* @param config - A set of config for the decoder.
*/
export declare function getStructDecoder<T extends object>(fields: StructToDecoderTuple<T>, config?: StructCodecConfig): Decoder<T>;
export declare function getStructDecoder<TTo extends object>(fields: StructToFixedSizeDecoderTuple<TTo>): FixedSizeDecoder<TTo>;
export declare function getStructDecoder<TTo extends object>(fields: StructToDecoderTuple<TTo>): VariableSizeDecoder<TTo>;
/**

@@ -36,5 +46,5 @@ * Creates a codec for a custom object.

* @param fields - The name and codec of each field.
* @param config - A set of config for the codec.
*/
export declare function getStructCodec<T extends object, U extends T = T>(fields: StructToCodecTuple<T, U>, config?: StructCodecConfig): Codec<T, U>;
export declare function getStructCodec<TFrom extends object, TTo extends TFrom = TFrom>(fields: StructToFixedSizeCodecTuple<TFrom, TTo>): FixedSizeCodec<TFrom, TTo>;
export declare function getStructCodec<TFrom extends object, TTo extends TFrom = TFrom>(fields: StructToCodecTuple<TFrom, TTo>): VariableSizeCodec<TFrom, TTo>;
//# sourceMappingURL=struct.d.ts.map

@@ -1,13 +0,20 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
/** Defines the config for tuple codecs. */
export type TupleCodecConfig = BaseCodecConfig;
type WrapInEncoder<T> = {
[P in keyof T]: Encoder<T[P]>;
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core';
type WrapInFixedSizeEncoder<TFrom> = {
[P in keyof TFrom]: FixedSizeEncoder<TFrom[P]>;
};
type WrapInDecoder<T> = {
[P in keyof T]: Decoder<T[P]>;
type WrapInEncoder<TFrom> = {
[P in keyof TFrom]: Encoder<TFrom[P]>;
};
type WrapInCodec<T, U extends T = T> = {
[P in keyof T]: Codec<T[P], U[P]>;
type WrapInFixedSizeDecoder<TTo> = {
[P in keyof TTo]: FixedSizeDecoder<TTo[P]>;
};
type WrapInDecoder<TTo> = {
[P in keyof TTo]: Decoder<TTo[P]>;
};
type WrapInCodec<TFrom, TTo extends TFrom> = {
[P in keyof TFrom]: Codec<TFrom[P], TTo[P]>;
};
type WrapInFixedSizeCodec<TFrom, TTo extends TFrom> = {
[P in keyof TFrom]: FixedSizeCodec<TFrom[P], TTo[P]>;
};
type AnyArray = any[];

@@ -18,5 +25,5 @@ /**

* @param items - The encoders to use for each item in the tuple.
* @param config - A set of config for the encoder.
*/
export declare function getTupleEncoder<T extends AnyArray>(items: WrapInEncoder<[...T]>, config?: TupleCodecConfig): Encoder<T>;
export declare function getTupleEncoder<TFrom extends AnyArray>(items: WrapInFixedSizeEncoder<[...TFrom]>): FixedSizeEncoder<TFrom>;
export declare function getTupleEncoder<TFrom extends AnyArray>(items: WrapInEncoder<[...TFrom]>): VariableSizeEncoder<TFrom>;
/**

@@ -26,5 +33,5 @@ * Creates a decoder for a tuple-like array.

* @param items - The decoders to use for each item in the tuple.
* @param config - A set of config for the decoder.
*/
export declare function getTupleDecoder<T extends AnyArray>(items: WrapInDecoder<[...T]>, config?: TupleCodecConfig): Decoder<T>;
export declare function getTupleDecoder<TTo extends AnyArray>(items: WrapInFixedSizeDecoder<[...TTo]>): FixedSizeDecoder<TTo>;
export declare function getTupleDecoder<TTo extends AnyArray>(items: WrapInDecoder<[...TTo]>): VariableSizeDecoder<TTo>;
/**

@@ -34,6 +41,6 @@ * Creates a codec for a tuple-like array.

* @param items - The codecs to use for each item in the tuple.
* @param config - A set of config for the codec.
*/
export declare function getTupleCodec<T extends AnyArray, U extends T = T>(items: WrapInCodec<[...T], [...U]>, config?: TupleCodecConfig): Codec<T, U>;
export declare function getTupleCodec<TFrom extends AnyArray, TTo extends TFrom = TFrom>(items: WrapInFixedSizeCodec<[...TFrom], [...TTo]>): FixedSizeCodec<TFrom, TTo>;
export declare function getTupleCodec<TFrom extends AnyArray, TTo extends TFrom = TFrom>(items: WrapInCodec<[...TFrom], [...TTo]>): VariableSizeCodec<TFrom, TTo>;
export {};
//# sourceMappingURL=tuple.d.ts.map

@@ -1,22 +0,14 @@

import { BaseCodecConfig, Codec, Decoder, Encoder } from '@solana/codecs-core';
/** Defines the config for unit codecs. */
export type UnitSerializerconfig = BaseCodecConfig;
import { FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder } from '@solana/codecs-core';
/**
* Creates a void encoder.
*
* @param config - A set of config for the encoder.
*/
export declare function getUnitEncoder(config?: UnitSerializerconfig): Encoder<void>;
export declare function getUnitEncoder(): FixedSizeEncoder<void, 0>;
/**
* Creates a void decoder.
*
* @param config - A set of config for the decoder.
*/
export declare function getUnitDecoder(config?: UnitSerializerconfig): Decoder<void>;
export declare function getUnitDecoder(): FixedSizeDecoder<void, 0>;
/**
* Creates a void codec.
*
* @param config - A set of config for the codec.
*/
export declare function getUnitCodec(config?: UnitSerializerconfig): Codec<void>;
export declare function getUnitCodec(): FixedSizeCodec<void, void, 0>;
//# sourceMappingURL=unit.d.ts.map

@@ -1,5 +0,13 @@

/** Returns the max size or null if at least one size is null. */
export declare function maxCodecSizes(sizes: (number | null)[]): number | null;
/** Returns the sum of all sizes or null if at least one size is null. */
export declare function sumCodecSizes(sizes: (number | null)[]): number | null;
export declare function getFixedSize(codec: {
fixedSize: number;
} | {
maxSize?: number;
}): number | null;
export declare function getMaxSize(codec: {
fixedSize: number;
} | {
maxSize?: number;
}): number | null;
//# sourceMappingURL=utils.d.ts.map
{
"name": "@solana/codecs-data-structures",
"version": "2.0.0-experimental.735654a",
"version": "2.0.0-experimental.745437f",
"description": "Codecs for various data structures",

@@ -52,4 +52,4 @@ "exports": {

"dependencies": {
"@solana/codecs-core": "2.0.0-experimental.735654a",
"@solana/codecs-numbers": "2.0.0-experimental.735654a"
"@solana/codecs-core": "2.0.0-experimental.745437f",
"@solana/codecs-numbers": "2.0.0-experimental.745437f"
},

@@ -70,8 +70,8 @@ "devDependencies": {

"jest-runner-prettier": "^1.0.0",
"prettier": "^2.8",
"tsup": "7.2.0",
"prettier": "^3.1",
"tsup": "^8.0.1",
"typescript": "^5.2.2",
"version-from-git": "^1.1.1",
"@solana/codecs-strings": "2.0.0-experimental.745437f",
"build-scripts": "0.0.0",
"@solana/codecs-strings": "2.0.0-experimental.735654a",
"test-config": "0.0.0",

@@ -78,0 +78,0 @@ "tsconfig": "0.0.0"

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc