Socket
Socket
Sign inDemoInstall

micro-eth-signer

Package Overview
Dependencies
Maintainers
1
Versions
39
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

micro-eth-signer - npm Package Compare versions

Comparing version 0.11.0 to 0.12.0

17

esm/index.d.ts
import { addr } from './address.js';
import { TxType, TxCoder } from './tx.js';
import { TxType, TxCoder, AuthorizationItem, AuthorizationRequest } from './tx.js';
import { weieth, weigwei } from './utils.js';
export { addr, weigwei, weieth };
/**
* Basic message signing & verification. Matches ethers and etherscan behavior.
* TODO: research whether EIP-191 and EIP-712 are popular, add them.
* EIP-7702 Authorizations
*/
export declare const messenger: {
sign(msg: string, privateKey: string, extraEntropy?: undefined): string;
verify(signature: string, msg: string, address: string): boolean;
export declare const authorization: {
_getHash(req: AuthorizationRequest): Uint8Array;
sign(req: AuthorizationRequest, privateKey: string): AuthorizationItem;
getAuthority(item: AuthorizationItem): string;
};
declare const DEFAULTS: {
readonly accessList: readonly [];
readonly authorizationList: readonly [];
readonly chainId: 1n;

@@ -67,4 +68,4 @@ readonly data: "";

setWholeAmount(accountBalance: bigint, burnRemaining?: boolean): Transaction<T>;
static fromRawBytes(bytes: Uint8Array, strict?: boolean): Transaction<"legacy" | "eip2930" | "eip1559" | "eip4844">;
static fromHex(hex: string, strict?: boolean): Transaction<"legacy" | "eip2930" | "eip1559" | "eip4844">;
static fromRawBytes(bytes: Uint8Array, strict?: boolean): Transaction<"legacy" | "eip2930" | "eip1559" | "eip4844" | "eip7702">;
static fromHex(hex: string, strict?: boolean): Transaction<"legacy" | "eip2930" | "eip1559" | "eip4844" | "eip7702">;
private assertIsSigned;

@@ -71,0 +72,0 @@ /**

/*! micro-eth-signer - MIT License (c) 2021 Paul Miller (paulmillr.com) */
import { secp256k1 } from '@noble/curves/secp256k1';
import { keccak_256 } from '@noble/hashes/sha3';
import { bytesToHex } from '@noble/hashes/utils';
import { bytesToHex, concatBytes } from '@noble/hashes/utils';
import { addr } from './address.js';
// prettier-ignore
import { TxVersions, RawTx, decodeLegacyV, removeSig, sortRawData, validateFields, } from './tx.js';
import { TxVersions, RawTx, decodeLegacyV, removeSig, sortRawData, validateFields, authorizationRequest } from './tx.js';
import { RLP } from './rlp.js';
// prettier-ignore
import { amounts, astr, add0x, ethHex, ethHexNoLeadingZero, strip0x, weieth, weigwei, cloneDeep, } from './utils.js';
import { amounts, astr, ethHex, ethHexNoLeadingZero, strip0x, weieth, weigwei, cloneDeep, } from './utils.js';
export { addr, weigwei, weieth };
// The file exports Transaction, but actual (RLP) parsing logic is done in `./tx`
/**
* Basic message signing & verification. Matches ethers and etherscan behavior.
* TODO: research whether EIP-191 and EIP-712 are popular, add them.
* EIP-7702 Authorizations
*/
export const messenger = {
sign(msg, privateKey, extraEntropy = undefined) {
astr(msg);
export const authorization = {
_getHash(req) {
const msg = RLP.encode(authorizationRequest.decode(req));
return keccak_256(concatBytes(new Uint8Array([0x05]), msg));
},
sign(req, privateKey) {
astr(privateKey);
const hash = keccak_256(msg);
const sig = secp256k1.sign(hash, ethHex.decode(privateKey), { extraEntropy });
const end = sig.recovery === 0 ? '1b' : '1c';
return add0x(sig.toCompactHex() + end);
const sig = secp256k1.sign(this._getHash(req), ethHex.decode(privateKey));
return { ...req, r: sig.r, s: sig.s, yParity: sig.recovery };
},
verify(signature, msg, address) {
astr(signature);
astr(msg);
astr(address);
signature = strip0x(signature);
if (signature.length !== 65 * 2)
throw new Error('invalid signature length');
const sigh = signature.slice(0, -2);
const end = signature.slice(-2);
if (!['1b', '1c'].includes(end))
throw new Error('invalid recovery bit');
const sig = secp256k1.Signature.fromCompact(sigh).addRecoveryBit(end === '1b' ? 0 : 1);
const hash = keccak_256(msg);
const pub = sig.recoverPublicKey(hash).toHex(false);
const recoveredAddr = addr.fromPublicKey(pub);
return recoveredAddr === address && secp256k1.verify(sig, hash, pub);
getAuthority(item) {
const { r, s, yParity, ...req } = item;
const hash = this._getHash(req);
const sig = new secp256k1.Signature(r, s).addRecoveryBit(yParity);
const point = sig.recoverPublicKey(hash);
return addr.fromPublicKey(point.toHex(false));
},

@@ -47,2 +38,3 @@ };

accessList: [], // needs to be .slice()-d to create new reference
authorizationList: [],
chainId: 1n, // mainnet

@@ -81,3 +73,3 @@ data: '',

raw[f] = DEFAULTS[f];
if (f === 'accessList')
if (['accessList', 'authorizationList'].includes(f))
raw[f] = cloneDeep(raw[f]);

@@ -84,0 +76,0 @@ }

import { IWeb3Provider, Web3CallArgs } from '../utils.js';
import { TxVersions } from '../tx.js';
import { TxVersions, AccessList } from '../tx.js';
import { ContractInfo } from '../abi/index.js';

@@ -63,3 +63,3 @@ export type BlockInfo = {

hash: string;
accessList?: [string, string[]][];
accessList?: AccessList;
transactionIndex: number;

@@ -196,3 +196,3 @@ type: number;

txInfo(txHash: string, opts?: TxInfoOpts): Promise<{
type: "legacy" | "eip2930" | "eip1559" | "eip4844";
type: "legacy" | "eip2930" | "eip1559" | "eip4844" | "eip7702";
info: any;

@@ -199,0 +199,0 @@ receipt: any;

@@ -100,5 +100,2 @@ import { hexToNumber, amounts } from '../utils.js';

}
// Same API as Transaction, so we can re-create easily
if (info.accessList)
info.accessList = info.accessList.map((i) => [i.address, i.storageKeys]);
return info;

@@ -105,0 +102,0 @@ }

import * as P from 'micro-packed';
import { sha256 } from '@noble/hashes/sha2';
import { isBytes } from './utils.js';
import { isBytes, isObject } from './utils.js';
/*

@@ -84,3 +84,61 @@

};
const basic = (inner, def) => ({
// TODO: improve
const isStableCompat = (a, b) => {
if (a === b)
return true; // fast path
const _a = a;
const _b = b;
if (_a.info && _b.info) {
const aI = _a.info;
const bI = _b.info;
// Bitlist[N] / Bitvector[N] field types are compatible if they share the same capacity N.
const bitTypes = ['bitList', 'bitVector'];
if (bitTypes.includes(aI.type) && bitTypes.includes(bI.type) && aI.N === bI.N)
return true;
// List[T, N] / Vector[T, N] field types are compatible if T is compatible and if they also share the same capacity N.
const listTypes = ['list', 'vector'];
if (listTypes.includes(aI.type) &&
listTypes.includes(bI.type) &&
aI.N === bI.N &&
aI.inner._isStableCompat(bI.inner)) {
return true;
}
// Container / StableContainer[N] field types are compatible if all inner field types are compatible,
// if they also share the same field names in the same order, and for StableContainer[N] if they also
// share the same capacity N.
const contType = ['container', 'stableContainer'];
if (contType.includes(aI.type) && contType.includes(bI.type)) {
// both stable containers, but different capacity
if (aI.N !== undefined && bI.N !== undefined && aI.N !== bI.N)
return false;
const kA = Object.keys(aI.fields);
const kB = Object.keys(bI.fields);
if (kA.length !== kB.length)
return false;
for (let i = 0; i < kA.length; i++) {
const fA = kA[i];
const fB = kB[i];
if (fA !== fB)
return false;
if (!aI.fields[fA]._isStableCompat(bI.fields[fA]))
return false;
}
return true;
}
// Profile[X] field types are compatible with StableContainer types compatible with X, and
// are compatible with Profile[Y] where Y is compatible with X if also all inner field types
// are compatible. Differences solely in optionality do not affect merkleization compatibility.
if (aI.type === 'profile' || bI.type === 'profile') {
//console.log('PROF PROF?', aI.type, bI.type, aI.container._isStableCompat(bI));
if (aI.type === 'profile' && bI.type === 'stableContainer')
return aI.container._isStableCompat(b);
if (aI.type === 'stableContainer' && bI.type === 'profile')
return a._isStableCompat(bI.container);
if (aI.type === 'profile' && bI.type === 'profile')
return aI.container._isStableCompat(bI.container);
}
}
return false;
};
const basic = (type, inner, def) => ({
...inner,

@@ -90,2 +148,6 @@ default: def,

composite: false,
info: { type },
_isStableCompat(other) {
return isStableCompat(this, other);
},
chunks(value) {

@@ -116,9 +178,9 @@ return [this.merkleRoot(value)];

});
export const uint8 = basic(int(1), 0);
export const uint16 = basic(int(2), 0);
export const uint32 = basic(int(4), 0);
export const uint64 = basic(int(8, false), 0n);
export const uint128 = basic(int(16, false), 0n);
export const uint256 = basic(int(32, false), 0n);
export const boolean = basic(P.bool, false);
export const uint8 = basic('uint8', int(1), 0);
export const uint16 = basic('uint16', int(2), 0);
export const uint32 = basic('uint32', int(4), 0);
export const uint64 = basic('uint64', int(8, false), 0n);
export const uint128 = basic('uint128', int(16, false), 0n);
export const uint256 = basic('uint256', int(32, false), 0n);
export const boolean = basic('boolean', P.bool, false);
const array = (len, inner) => {

@@ -168,2 +230,6 @@ checkSSZ(inner);

...array(len, inner),
info: { type: 'vector', N: len, inner },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Array(len).fill(inner.default),

@@ -194,2 +260,6 @@ composite: true,

...coder,
info: { type: 'list', N: maxLen, inner },
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,

@@ -208,2 +278,23 @@ chunkCount: !inner.composite ? Math.ceil((maxLen * inner.size) / BYTES_PER_CHUNK) : maxLen,

};
const wrapPointer = (p) => (p.size === undefined ? P.pointer(P.U32LE, p) : p);
const wrapRawPointer = (p) => (p.size === undefined ? P.U32LE : p);
// TODO: improve, unclear how
const fixOffsets = (r, fields, offsetFields, obj, offset) => {
const offsets = [];
for (const f of offsetFields)
offsets.push(obj[f] + offset);
for (let i = 0; i < offsets.length; i++) {
// TODO: how to merge this with array?
const name = offsetFields[i];
const pos = offsets[i];
const next = i + 1 < offsets.length ? offsets[i + 1] : r.totalBytes;
if (next < pos)
throw r.err('SSZ/container: decreasing offset');
const len = next - pos;
if (r.pos !== pos)
throw r.err('SSZ/container: wrong offset');
obj[name] = fields[name].decode(r.bytes(len));
}
return obj;
};
/**

@@ -215,30 +306,15 @@ * Container: Encodes object with multiple fields. P.struct for SSZ.

throw new Error('SSZ/container: no fields');
const ptrCoder = P.struct(Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, v.size === undefined ? P.pointer(P.U32LE, v) : v])));
const fixedCoder = P.struct(Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, v.size === undefined ? P.U32LE : v])));
const ptrCoder = P.struct(Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, wrapPointer(v)])));
const fixedCoder = P.struct(Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, wrapRawPointer(v)])));
const offsetFields = Object.keys(fields).filter((i) => fields[i].size === undefined);
const coder = P.wrap({
encodeStream: ptrCoder.encodeStream,
decodeStream: (r) => {
const fixed = fixedCoder.decodeStream(r);
const offsets = [];
for (const f in fields)
if (fields[f].size === undefined)
offsets.push(fixed[f]);
for (let i = 0; i < offsets.length; i++) {
// TODO: how to merge this with array?
const name = offsetFields[i];
const pos = offsets[i];
const next = i + 1 < offsets.length ? offsets[i + 1] : r.totalBytes;
if (next < pos)
throw r.err('SSZ/container: decreasing offset');
const len = next - pos;
if (r.pos !== pos)
throw r.err('SSZ/container: wrong offset');
fixed[name] = fields[name].decode(r.bytes(len));
}
return fixed;
},
decodeStream: (r) => fixOffsets(r, fields, offsetFields, fixedCoder.decodeStream(r), 0),
});
return {
...coder,
info: { type: 'container', fields },
_isStableCompat(other) {
return isStableCompat(this, other);
},
size: offsetFields.length ? undefined : fixedCoder.size, // structure is fixed size if all fields is fixed size

@@ -287,2 +363,6 @@ default: Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, v.default])),

...coder,
info: { type: 'bitVector', N: len },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Array(len).fill(false),

@@ -327,2 +407,6 @@ composite: true,

...coder,
info: { type: 'bitList', N: maxLen },
_isStableCompat(other) {
return isStableCompat(this, other);
},
size: undefined,

@@ -364,2 +448,5 @@ default: [],

default: { selector: 0, value: types[0] === null ? null : types[0].default },
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,

@@ -391,2 +478,6 @@ chunks({ selector, value }) {

...coder,
info: { type: 'list', N: maxLen, inner: byte },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Uint8Array([]),

@@ -411,2 +502,6 @@ composite: true,

...P.bytes(len),
info: { type: 'vector', N: len, inner: byte },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Uint8Array(len),

@@ -423,2 +518,194 @@ composite: true,

};
/**
* Same as container, but all values are optional using bitvector as prefix which indicates active fields
*/
export const stableContainer = (N, fields) => {
const fieldsNames = Object.keys(fields);
const fieldsLen = fieldsNames.length;
if (!fieldsLen)
throw new Error('SSZ/stableContainer: no fields');
if (fieldsLen > N)
throw new Error('SSZ/stableContainer: more fields than N');
const bv = bitvector(N);
const coder = P.wrap({
encodeStream: (w, value) => {
const bsVal = new Array(N).fill(false);
for (let i = 0; i < fieldsLen; i++)
if (value[fieldsNames[i]] !== undefined)
bsVal[i] = true;
bv.encodeStream(w, bsVal);
const activeFields = fieldsNames.filter((_, i) => bsVal[i]);
const ptrCoder = P.struct(Object.fromEntries(activeFields.map((k) => [k, wrapPointer(fields[k])])));
w.bytes(ptrCoder.encode(value));
},
decodeStream: (r) => {
const bsVal = bv.decodeStream(r);
for (let i = fieldsLen; i < bsVal.length; i++) {
if (bsVal[i] !== false)
throw new Error('stableContainer: non-zero padding');
}
const activeFields = fieldsNames.filter((_, i) => bsVal[i]);
const fixedCoder = P.struct(Object.fromEntries(activeFields.map((k) => [k, wrapRawPointer(fields[k])])));
const offsetFields = activeFields.filter((i) => fields[i].size === undefined);
return fixOffsets(r, fields, offsetFields, fixedCoder.decodeStream(r), bv.size);
},
});
return {
...coder,
info: { type: 'stableContainer', N, fields },
size: undefined,
default: Object.fromEntries(Object.entries(fields).map(([k, _v]) => [k, undefined])),
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,
chunkCount: N,
chunks(value) {
const res = Object.entries(fields).map(([k, v]) => value[k] === undefined ? new Uint8Array(32) : v.merkleRoot(value[k]));
while (res.length < N)
res.push(new Uint8Array(32));
return res;
},
merkleRoot(value) {
const bsVal = new Array(N).fill(false);
for (let i = 0; i < fieldsLen; i++)
if (value[fieldsNames[i]] !== undefined)
bsVal[i] = true;
return hash(merkleize(this.chunks(value)), bv.merkleRoot(bsVal));
},
};
};
/**
* Profile - fixed subset of stableContainer.
* - fields and order of fields is exactly same as in underlying container
* - some fields may be excluded or required in profile (all fields in stable container are always optional)
* - adding new fields to underlying container won't change profile's constructed on top of it,
* because it is required to provide all list of optional fields.
* - type of field can be changed inside profile (but we should be very explicit about this) to same shape type.
*
* @example
* // class Shape(StableContainer[4]):
* // side: Optional[uint16]
* // color: Optional[uint8]
* // radius: Optional[uint16]
*
* // class Square(Profile[Shape]):
* // side: uint16
* // color: uint8
*
* // class Circle(Profile[Shape]):
* // color: uint8
* // radius: Optional[uint16]
* // ->
* const Shape = SSZ.stableContainer(4, {
* side: SSZ.uint16,
* color: SSZ.uint8,
* radius: SSZ.uint16,
* });
* const Square = profile(Shape, [], ['side', 'color']);
* const Circle = profile(Shape, ['radius'], ['color']);
* const Circle2 = profile(Shape, ['radius'], ['color'], { color: SSZ.byte });
*/
export const profile = (c, optFields, requiredFields = [], replaceType = {}) => {
checkSSZ(c);
if (c.info.type !== 'stableContainer')
throw new Error('profile: expected stableContainer');
const containerFields = new Set(Object.keys(c.info.fields));
if (!Array.isArray(optFields))
throw new Error('profile: optional fields should be array');
const optFS = new Set(optFields);
for (const f of optFS) {
if (!containerFields.has(f))
throw new Error(`profile: unexpected optional field ${f}`);
}
if (!Array.isArray(requiredFields))
throw new Error('profile: required fields should be array');
const reqFS = new Set(requiredFields);
for (const f of reqFS) {
if (!containerFields.has(f))
throw new Error(`profile: unexpected required field ${f}`);
if (optFS.has(f))
throw new Error(`profile: field ${f} is declared both as optional and required`);
}
if (!isObject(replaceType))
throw new Error('profile: replaceType should be object');
for (const k in replaceType) {
if (!containerFields.has(k))
throw new Error(`profile/replaceType: unexpected field ${k}`);
if (!replaceType[k]._isStableCompat(c.info.fields[k]))
throw new Error(`profile/replaceType: incompatible field ${k}`);
}
// Order should be same
const allFields = Object.keys(c.info.fields).filter((i) => optFS.has(i) || reqFS.has(i));
// bv is omitted if all fields are required!
const fieldCoders = { ...c.info.fields, ...replaceType };
let coder;
if (optFS.size === 0) {
// All fields are required, it is just container, possible with size
coder = container(Object.fromEntries(allFields.map((k) => [k, fieldCoders[k]])));
}
else {
// NOTE: we cannot merge this with stable container,
// because some fields are active and some is not (based on required/non-required)
const bv = bitvector(optFS.size);
const forFields = (fn) => {
let optPos = 0;
for (const f of allFields) {
const isOpt = optFS.has(f);
fn(f, isOpt ? optPos : undefined);
if (isOpt)
optPos++;
}
};
coder = {
...P.wrap({
encodeStream: (w, value) => {
const bsVal = new Array(optFS.size).fill(false);
const ptrCoder = {};
forFields((f, optPos) => {
const val = value[f];
if (optPos !== undefined && val !== undefined)
bsVal[optPos] = true;
if (optPos === undefined && val === undefined)
throw new Error(`profile.encode: empty required field ${f}`);
if (val !== undefined)
ptrCoder[f] = wrapPointer(fieldCoders[f]);
});
bv.encodeStream(w, bsVal);
w.bytes(P.struct(ptrCoder).encode(value));
},
decodeStream: (r) => {
let bsVal = bv.decodeStream(r);
const fixedCoder = {};
const offsetFields = [];
forFields((f, optPos) => {
if (optPos !== undefined && bsVal[optPos] === false)
return;
if (fieldCoders[f].size === undefined)
offsetFields.push(f);
fixedCoder[f] = wrapRawPointer(fieldCoders[f]);
});
return fixOffsets(r, fieldCoders, offsetFields, P.struct(fixedCoder).decodeStream(r), bv.size);
},
}),
size: undefined,
};
}
return {
...coder,
info: { type: 'profile', container: c },
default: Object.fromEntries(Array.from(reqFS).map((f) => [f, fieldCoders[f].default])),
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,
chunkCount: c.info.N,
chunks(value) {
return c.chunks(value);
},
merkleRoot(value) {
return c.merkleRoot(value);
},
};
};
// Aliases

@@ -462,2 +749,12 @@ export const byte = uint8;

const FINALIZED_ROOT_DEPTH = 6;
// Electra
const MAX_COMMITTEES_PER_SLOT = 64;
const PENDING_PARTIAL_WITHDRAWALS_LIMIT = 134217728;
const PENDING_BALANCE_DEPOSITS_LIMIT = 134217728;
const PENDING_CONSOLIDATIONS_LIMIT = 262144;
const MAX_ATTESTER_SLASHINGS_ELECTRA = 1;
const MAX_ATTESTATIONS_ELECTRA = 8;
const MAX_DEPOSIT_REQUESTS_PER_PAYLOAD = 8192;
const MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD = 16;
const MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD = 1;
// We can reduce size if we inline these. But updates for new forks would be hard.

@@ -771,2 +1068,33 @@ const Slot = uint64;

});
// Electra
const DepositRequest = container({
pubkey: BLSPubkey,
withdrawal_credentials: Bytes32,
amount: Gwei,
signature: BLSSignature,
index: uint64,
});
const WithdrawalRequest = container({
source_address: ExecutionAddress,
validator_pubkey: BLSPubkey,
amount: Gwei,
});
const ConsolidationRequest = container({
source_address: ExecutionAddress,
source_pubkey: BLSPubkey,
target_pubkey: BLSPubkey,
});
const PendingBalanceDeposit = container({
index: ValidatorIndex,
amount: Gwei,
});
const PendingPartialWithdrawal = container({
index: ValidatorIndex,
amount: Gwei,
withdrawable_epoch: Epoch,
});
const PendingConsolidation = container({
source_index: ValidatorIndex,
target_index: ValidatorIndex,
});
export const ETH2_TYPES = {

@@ -845,3 +1173,264 @@ Slot,

LightClientFinalityUpdate,
// Electra
DepositRequest,
WithdrawalRequest,
ConsolidationRequest,
PendingBalanceDeposit,
PendingPartialWithdrawal,
PendingConsolidation,
};
// EIP-7688
const MAX_ATTESTATION_FIELDS = 8;
const MAX_INDEXED_ATTESTATION_FIELDS = 8;
const MAX_EXECUTION_PAYLOAD_FIELDS = 64;
const MAX_BEACON_BLOCK_BODY_FIELDS = 64;
const MAX_BEACON_STATE_FIELDS = 128;
const MAX_EXECUTION_REQUESTS_FIELDS = 16;
const StableAttestation = stableContainer(MAX_ATTESTATION_FIELDS, {
aggregation_bits: bitlist(MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT),
data: AttestationData,
signature: BLSSignature,
committee_bits: bitvector(MAX_COMMITTEES_PER_SLOT),
});
const StableIndexedAttestation = stableContainer(MAX_INDEXED_ATTESTATION_FIELDS, {
attesting_indices: list(MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT, ValidatorIndex),
data: AttestationData,
signature: BLSSignature,
});
const StableAttesterSlashing = container({
attestation_1: StableIndexedAttestation,
attestation_2: StableIndexedAttestation,
});
const StableExecutionRequests = stableContainer(MAX_EXECUTION_REQUESTS_FIELDS, {
deposits: list(MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, DepositRequest), // [New in Electra:EIP6110]
withdrawals: list(MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, WithdrawalRequest), // [New in Electra:EIP7002:EIP7251]
consolidations: list(MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, ConsolidationRequest), // [New in Electra:EIP7251]
});
const StableExecutionPayload = stableContainer(MAX_EXECUTION_PAYLOAD_FIELDS, {
parent_hash: Hash32,
fee_recipient: ExecutionAddress,
state_root: Bytes32,
receipts_root: Bytes32,
logs_bloom: bytevector(BYTES_PER_LOGS_BLOOM),
prev_randao: Bytes32,
block_number: uint64,
gas_limit: uint64,
gas_used: uint64,
timestamp: uint64,
extra_data: bytelist(MAX_EXTRA_DATA_BYTES),
base_fee_per_gas: uint256,
block_hash: Hash32,
transactions: list(MAX_TRANSACTIONS_PER_PAYLOAD, Transaction),
withdrawals: list(MAX_WITHDRAWALS_PER_PAYLOAD, Withdrawal), // [New in Capella]
blob_gas_used: uint64,
excess_blob_gas: uint64,
deposit_requests: list(MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, DepositRequest), // [New in Electra:EIP6110]
withdrawal_requests: list(MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, WithdrawalRequest), // [New in Electra:EIP7002:EIP7251]
consolidation_requests: list(MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, ConsolidationRequest), // [New in Electra:EIP7251]
});
const StableExecutionPayloadHeader = stableContainer(MAX_EXECUTION_PAYLOAD_FIELDS, {
parent_hash: Hash32,
fee_recipient: ExecutionAddress,
state_root: Bytes32,
receipts_root: Bytes32,
logs_bloom: bytevector(BYTES_PER_LOGS_BLOOM),
prev_randao: Bytes32,
block_number: uint64,
gas_limit: uint64,
gas_used: uint64,
timestamp: uint64,
extra_data: bytelist(MAX_EXTRA_DATA_BYTES),
base_fee_per_gas: uint256,
block_hash: Hash32,
transactions_root: Root,
withdrawals_root: Root, // [New in Capella]
blob_gas_used: uint64, // [New in Deneb:EIP4844]
excess_blob_gas: uint64, // [New in Deneb:EIP4844]
deposit_requests_root: Root, // [New in Electra:EIP6110]
withdrawal_requests_root: Root, // [New in Electra:EIP7002:EIP7251]
consolidation_requests_root: Root, // [New in Electra:EIP7251]
});
const StableBeaconBlockBody = stableContainer(MAX_BEACON_BLOCK_BODY_FIELDS, {
randao_reveal: BLSSignature,
eth1_data: Eth1Data,
graffiti: Bytes32,
proposer_slashings: list(MAX_PROPOSER_SLASHINGS, ProposerSlashing),
attester_slashings: list(MAX_ATTESTER_SLASHINGS_ELECTRA, StableAttesterSlashing), // [Modified in Electra:EIP7549]
attestations: list(MAX_ATTESTATIONS_ELECTRA, StableAttestation), // [Modified in Electra:EIP7549]
deposits: list(MAX_DEPOSITS, Deposit),
voluntary_exits: list(MAX_VOLUNTARY_EXITS, SignedVoluntaryExit),
sync_aggregate: SyncAggregate,
execution_payload: StableExecutionPayload,
bls_to_execution_changes: list(MAX_BLS_TO_EXECUTION_CHANGES, SignedBLSToExecutionChange),
blob_kzg_commitments: list(MAX_BLOB_COMMITMENTS_PER_BLOCK, KZGCommitment),
execution_requests: StableExecutionRequests,
});
const StableBeaconState = stableContainer(MAX_BEACON_STATE_FIELDS, {
genesis_time: uint64,
genesis_validators_root: Root,
slot: Slot,
fork: Fork,
latest_block_header: BeaconBlockHeader,
block_roots: vector(SLOTS_PER_HISTORICAL_ROOT, Root),
state_roots: vector(SLOTS_PER_HISTORICAL_ROOT, Root),
historical_roots: list(HISTORICAL_ROOTS_LIMIT, Root),
eth1_data: Eth1Data,
eth1_data_votes: list(EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH, Eth1Data),
eth1_deposit_index: uint64,
validators: list(VALIDATOR_REGISTRY_LIMIT, Validator),
balances: list(VALIDATOR_REGISTRY_LIMIT, Gwei),
randao_mixes: vector(EPOCHS_PER_HISTORICAL_VECTOR, Bytes32),
slashings: vector(EPOCHS_PER_SLASHINGS_VECTOR, Gwei),
previous_epoch_participation: list(VALIDATOR_REGISTRY_LIMIT, ParticipationFlags),
current_epoch_participation: list(VALIDATOR_REGISTRY_LIMIT, ParticipationFlags),
justification_bits: bitvector(JUSTIFICATION_BITS_LENGTH),
previous_justified_checkpoint: Checkpoint,
current_justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
inactivity_scores: list(VALIDATOR_REGISTRY_LIMIT, uint64),
current_sync_committee: SyncCommittee,
next_sync_committee: SyncCommittee,
latest_execution_payload_header: StableExecutionPayloadHeader,
next_withdrawal_index: WithdrawalIndex,
next_withdrawal_validator_index: ValidatorIndex,
historical_summaries: list(HISTORICAL_ROOTS_LIMIT, HistoricalSummary),
deposit_requests_start_index: uint64, // [New in Electra:EIP6110]
deposit_balance_to_consume: Gwei, // [New in Electra:EIP7251]
exit_balance_to_consume: Gwei, // [New in Electra:EIP7251]
earliest_exit_epoch: Epoch, // [New in Electra:EIP7251]
consolidation_balance_to_consume: Gwei, // [New in Electra:EIP7251]
earliest_consolidation_epoch: Epoch, // [New in Electra:EIP7251]
pending_balance_deposits: list(PENDING_BALANCE_DEPOSITS_LIMIT, PendingBalanceDeposit), // [New in Electra:EIP7251]
pending_partial_withdrawals: list(PENDING_PARTIAL_WITHDRAWALS_LIMIT, PendingPartialWithdrawal), // [New in Electra:EIP7251]
pending_consolidations: list(PENDING_CONSOLIDATIONS_LIMIT, PendingConsolidation), // [New in Electra:EIP7251]
});
export const ETH2_CONSENSUS = {
StableAttestation,
StableIndexedAttestation,
StableAttesterSlashing,
StableExecutionPayload,
StableExecutionRequests,
StableExecutionPayloadHeader,
StableBeaconBlockBody,
StableBeaconState,
};
// Tests (electra profiles): https://github.com/ethereum/consensus-specs/pull/3844#issuecomment-2239285376
// NOTE: these are different from EIP-7688 by some reasons, but since nothing is merged/completed in eth side, we just trying
// to pass these tests for now.
const IndexedAttestationElectra = profile(StableIndexedAttestation, [], ['attesting_indices', 'data', 'signature']);
const AttesterSlashingElectra = container({
attestation_1: IndexedAttestationElectra,
attestation_2: IndexedAttestationElectra,
});
const ExecutionPayloadHeaderElectra = profile(StableExecutionPayloadHeader, [], [
'parent_hash',
'fee_recipient',
'state_root',
'receipts_root',
'logs_bloom',
'prev_randao',
'block_number',
'gas_limit',
'gas_used',
'timestamp',
'extra_data',
'base_fee_per_gas',
'block_hash',
'transactions_root',
'withdrawals_root',
'blob_gas_used',
'excess_blob_gas',
]);
const ExecutionRequests = profile(StableExecutionRequests, [], ['deposits', 'withdrawals', 'consolidations']);
const AttestationElectra = profile(StableAttestation, [], ['aggregation_bits', 'data', 'signature', 'committee_bits']);
const ExecutionPayloadElectra = profile(StableExecutionPayload, [], [
'parent_hash',
'fee_recipient',
'state_root',
'receipts_root',
'logs_bloom',
'prev_randao',
'block_number',
'gas_limit',
'gas_used',
'timestamp',
'extra_data',
'base_fee_per_gas',
'block_hash',
'transactions',
'withdrawals',
'blob_gas_used',
'excess_blob_gas',
]);
export const ETH2_PROFILES = {
electra: {
Attestation: AttestationElectra,
AttesterSlashing: AttesterSlashingElectra,
IndexedAttestation: IndexedAttestationElectra,
ExecutionRequests,
ExecutionPayloadHeader: ExecutionPayloadHeaderElectra,
ExecutionPayload: ExecutionPayloadElectra,
BeaconBlockBody: profile(StableBeaconBlockBody, [], [
'randao_reveal',
'eth1_data',
'graffiti',
'proposer_slashings',
'attester_slashings',
'attestations',
'deposits',
'voluntary_exits',
'sync_aggregate',
'execution_payload',
'bls_to_execution_changes',
'blob_kzg_commitments',
'execution_requests',
], {
attester_slashings: list(MAX_ATTESTER_SLASHINGS_ELECTRA, AttesterSlashingElectra),
attestations: list(MAX_ATTESTATIONS_ELECTRA, AttestationElectra),
execution_payload: ExecutionPayloadElectra,
execution_requests: ExecutionRequests,
}),
BeaconState: profile(StableBeaconState, [], [
'genesis_time',
'genesis_validators_root',
'slot',
'fork',
'latest_block_header',
'block_roots',
'state_roots',
'historical_roots',
'eth1_data',
'eth1_data_votes',
'eth1_deposit_index',
'validators',
'balances',
'randao_mixes',
'slashings',
'previous_epoch_participation',
'current_epoch_participation',
'justification_bits',
'previous_justified_checkpoint',
'current_justified_checkpoint',
'finalized_checkpoint',
'inactivity_scores',
'current_sync_committee',
'next_sync_committee',
'latest_execution_payload_header',
'next_withdrawal_index',
'next_withdrawal_validator_index',
'historical_summaries',
'deposit_requests_start_index',
'deposit_balance_to_consume',
'exit_balance_to_consume',
'earliest_exit_epoch',
'consolidation_balance_to_consume',
'earliest_consolidation_epoch',
'pending_balance_deposits',
'pending_partial_withdrawals',
'pending_consolidations',
], {
latest_execution_payload_header: ExecutionPayloadHeaderElectra,
}),
},
};
//# sourceMappingURL=ssz.js.map

@@ -11,4 +11,2 @@ import * as P from 'micro-packed';

export type TxCoder<T extends TxType> = P.UnwrapCoder<(typeof TxVersions)[T]>;
export type AccessList = [string, string[]][];
export type BytesAccessList = [Uint8Array, Uint8Array[]][];
type VRS = Partial<{

@@ -26,2 +24,23 @@ v: bigint;

export declare const legacySig: P.Coder<VRS, YRS>;
type CoderOutput<F> = F extends P.Coder<any, infer T> ? T : never;
declare const accessListItem: P.Coder<(Uint8Array | Uint8Array[])[], {
address: string;
storageKeys: string[];
}>;
export type AccessList = CoderOutput<typeof accessListItem>[];
export declare const authorizationRequest: P.Coder<Uint8Array[], {
chainId: bigint;
address: string;
nonce: bigint;
}>;
declare const authorizationItem: P.Coder<Uint8Array[], {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}>;
export type AuthorizationItem = CoderOutput<typeof authorizationItem>;
export type AuthorizationRequest = CoderOutput<typeof authorizationRequest>;
/**

@@ -40,3 +59,6 @@ * Field types, matching geth. Either u64 or u256.

data: P.Coder<Uint8Array, string>;
accessList: P.Coder<BytesAccessList, AccessList>;
accessList: P.Coder<(Uint8Array | Uint8Array[])[][], {
address: string;
storageKeys: string[];
}[]>;
maxFeePerBlobGas: P.Coder<Uint8Array, bigint>;

@@ -48,2 +70,10 @@ blobVersionedHashes: P.Coder<Uint8Array[], string[]>;

s: P.Coder<Uint8Array, bigint>;
authorizationList: P.Coder<Uint8Array[][], {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}[]>;
};

@@ -86,3 +116,6 @@ type Coders = typeof coders;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -102,3 +135,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -118,3 +154,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
maxFeePerBlobGas: bigint;

@@ -127,2 +166,28 @@ blobVersionedHashes: string[];

}>>;
eip7702: FieldCoder<OptFields<{
to: string;
data: string;
nonce: bigint;
value: bigint;
chainId: bigint;
maxPriorityFeePerGas: bigint;
maxFeePerGas: bigint;
gasLimit: bigint;
accessList: {
address: string;
storageKeys: string[];
}[];
authorizationList: {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}[];
}, {
yParity: number;
r: bigint;
s: bigint;
}>>;
};

@@ -139,3 +204,6 @@ export declare const RawTx: P.CoderType<VersionType<{

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -155,3 +223,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -171,3 +242,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
maxFeePerBlobGas: bigint;

@@ -180,2 +254,28 @@ blobVersionedHashes: string[];

}>>;
eip7702: FieldCoder<OptFields<{
to: string;
data: string;
nonce: bigint;
value: bigint;
chainId: bigint;
maxPriorityFeePerGas: bigint;
maxFeePerGas: bigint;
gasLimit: bigint;
accessList: {
address: string;
storageKeys: string[];
}[];
authorizationList: {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}[];
}, {
yParity: number;
r: bigint;
s: bigint;
}>>;
}>>;

@@ -224,3 +324,6 @@ /**

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -240,3 +343,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -256,3 +362,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
maxFeePerBlobGas: bigint;

@@ -265,2 +374,28 @@ blobVersionedHashes: string[];

}>>;
eip7702: FieldCoder<OptFields<{
to: string;
data: string;
nonce: bigint;
value: bigint;
chainId: bigint;
maxPriorityFeePerGas: bigint;
maxFeePerGas: bigint;
gasLimit: bigint;
accessList: {
address: string;
storageKeys: string[];
}[];
authorizationList: {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}[];
}, {
yParity: number;
r: bigint;
s: bigint;
}>>;
};

@@ -267,0 +402,0 @@ };

import * as P from 'micro-packed';
import { addr } from './address.js';
import { RLP } from './rlp.js';
import { isBytes, amounts, ethHex } from './utils.js';
import { isObject, amounts, ethHex, isBytes } from './utils.js';
const createTxMap = (versions) => {

@@ -56,32 +56,12 @@ const ent = Object.entries(versions);

const addrCoder = ethHex;
function accessListParser(coder, mapper) {
return (data) => {
if (!Array.isArray(data))
throw new Error('access list must be an array');
return data.map((pair) => {
if (!Array.isArray(pair) || pair.length !== 2)
throw new Error('access list must have 2 elements');
return [coder(pair[0]), pair[1].map(mapper)];
});
};
}
// Parses eip4844 blobs:
// ["0x0000000000000000000000000000000000000000000000000000000000000003"...]
function blobParser(fn) {
return (data) => {
if (!Array.isArray(data))
throw new Error('blobVersionedHashes must be an array');
return data.map(fn);
};
}
// Bytes32: VersionedHash, AccessListKey
function ensure32(b) {
if (b.length !== 32)
throw new Error('slot must be 32 bytes');
if (!isBytes(b) || b.length !== 32)
throw new Error('expected 32 bytes');
return b;
}
function ensureBlob(hash) {
if (!isBytes(hash) || hash.length !== 32)
throw new Error('blobVersionedHashes must contain 32-byte Uint8Array-s');
return hash;
}
const Bytes32 = {
encode: (from) => ethHex.encode(ensure32(from)),
decode: (to) => ensure32(ethHex.decode(to)),
};
// Process v as (chainId, yParity) pair. Ethers.js-inspired logic:

@@ -145,2 +125,54 @@ // - v=27/28 -> no chainId (pre eip155)

const U256BE = P.coders.reverse(P.bigint(32, false, false, false));
// Small coder utils
// TODO: seems generic enought for packed? or RLP (seems useful for structured encoding/decoding of RLP stuff)
// Basic array coder
const array = (coder) => ({
encode(from) {
if (!Array.isArray(from))
throw new Error('expected array');
return from.map((i) => coder.encode(i));
},
decode(to) {
if (!Array.isArray(to))
throw new Error('expected array');
return to.map((i) => coder.decode(i));
},
});
// tuple -> struct
const struct = (fields) => ({
encode(from) {
if (!Array.isArray(from))
throw new Error('expected array');
const fNames = Object.keys(fields);
if (from.length !== fNames.length)
throw new Error('wrong array length');
return Object.fromEntries(fNames.map((f, i) => [f, fields[f].encode(from[i])]));
},
decode(to) {
const fNames = Object.keys(fields);
if (!isObject(to))
throw new Error('wrong struct object');
return fNames.map((i) => fields[i].decode(to[i]));
},
});
// U256BE in geth. But it is either 0 or 1. TODO: is this good enough?
const yParityCoder = P.coders.reverse(P.validate(P.int(1, false, false, false), (elm) => {
assertYParityValid(elm);
return elm;
}));
const accessListItem = struct({ address: addrCoder, storageKeys: array(Bytes32) });
export const authorizationRequest = struct({
chainId: U256BE,
address: addrCoder,
nonce: U64BE,
});
// [chain_id, address, nonce, y_parity, r, s]
const authorizationItem = struct({
chainId: U256BE,
address: addrCoder,
nonce: U64BE,
yParity: yParityCoder,
r: U256BE,
s: U256BE,
});
/**

@@ -159,19 +191,10 @@ * Field types, matching geth. Either u64 or u256.

data: ethHex,
accessList: {
decode: accessListParser(addrCoder.decode, (k) => ensure32(ethHex.decode(k))),
encode: accessListParser(addrCoder.encode, (k) => ethHex.encode(ensure32(k))),
},
accessList: array(accessListItem),
maxFeePerBlobGas: U256BE,
blobVersionedHashes: {
decode: blobParser((b) => ensureBlob(ethHex.decode(b))),
encode: blobParser((b) => ethHex.encode(ensureBlob(b))),
},
// U256BE in geth. But it is either 0 or 1. TODO: is this good enough?
yParity: P.coders.reverse(P.validate(P.int(1, false, false, false), (elm) => {
assertYParityValid(elm);
return elm;
})),
blobVersionedHashes: array(Bytes32),
yParity: yParityCoder,
v: U256BE,
r: U256BE,
s: U256BE,
authorizationList: array(authorizationItem),
};

@@ -193,7 +216,10 @@ const signatureFields = new Set(['v', 'yParity', 'r', 's']);

const txStruct = (reqf, optf) => {
const allFields = reqf.concat(optf);
// Check that all fields have known coders
reqf.concat(optf).forEach((f) => {
allFields.forEach((f) => {
if (!coders.hasOwnProperty(f))
throw new Error(`coder for field ${f} is not defined`);
});
const reqS = struct(Object.fromEntries(reqf.map((i) => [i, coders[i]])));
const allS = struct(Object.fromEntries(allFields.map((i) => [i, coders[i]])));
// e.g. eip1559 txs have valid lengths of 9 or 12 (unsigned / signed)

@@ -208,9 +234,6 @@ const reql = reqf.length;

encodeStream(w, raw) {
// @ts-ignore TODO: fix type
const values = reqf.map((f) => coders[f].decode(raw[f]));
// If at least one optional key is present, we add whole optional block
if (optf.some((f) => raw.hasOwnProperty(f)))
// @ts-ignore TODO: fix type
optf.forEach((f) => values.push(coders[f].decode(raw[f])));
RLP.encodeStream(w, values);
const hasOptional = optf.some((f) => raw.hasOwnProperty(f));
const sCoder = hasOptional ? allS : reqS;
RLP.encodeStream(w, sCoder.decode(raw));
},

@@ -224,14 +247,7 @@ decodeStream(r) {

throw new Error(`txStruct: wrong inner length=${length}`);
const raw = Object.fromEntries(
// @ts-ignore TODO: fix type
reqf.map((f, i) => [f, coders[f].encode(decoded[i])]));
if (length === optl) {
if (optf.every((_, i) => isEmpty(decoded[optFieldAt(i)])))
throw new Error('all optional fields empty');
const rawSig = Object.fromEntries(
// @ts-ignore TODO: fix type
optf.map((f, i) => [f, coders[f].encode(decoded[optFieldAt(i)])]));
Object.assign(raw, rawSig); // mutate raw
}
return raw;
const sCoder = length === optl ? allS : reqS;
if (length === optl && optf.every((_, i) => isEmpty(decoded[optFieldAt(i)])))
throw new Error('all optional fields empty');
// @ts-ignore TODO: fix type (there can be null in RLP)
return sCoder.encode(decoded);
},

@@ -241,3 +257,3 @@ });

fcoder.optionalFields = optf;
fcoder.setOfAllFields = new Set(reqf.concat(optf, ['type']));
fcoder.setOfAllFields = new Set(allFields.concat(['type']));
return fcoder;

@@ -283,2 +299,7 @@ };

], ['yParity', 'r', 's']);
// prettier-ignore
const eip7702 = txStruct([
'chainId', 'nonce', 'maxPriorityFeePerGas', 'maxFeePerGas', 'gasLimit', 'to', 'value', 'data', 'accessList',
'authorizationList'
], ['yParity', 'r', 's']);
export const TxVersions = {

@@ -289,2 +310,3 @@ legacy, // 0x00 (kinda)

eip4844, // 0x03
eip7702, // 0x04
};

@@ -298,5 +320,10 @@ export const RawTx = P.apply(createTxMap(TxVersions), {

for (const item of data.data.accessList) {
item[0] = addr.addChecksum(item[0]);
item.address = addr.addChecksum(item.address);
}
}
if (data.type === 'eip7702' && data.data.authorizationList) {
for (const item of data.data.authorizationList) {
item.address = addr.addChecksum(item.address);
}
}
return data;

@@ -392,3 +419,3 @@ },

// NOTE: we cannot handle this validation in coder, since it requires chainId to calculate correct checksum
for (const [address, _] of list) {
for (const { address } of list) {
if (!addr.isValid(address))

@@ -398,2 +425,13 @@ throw new Error('address checksum does not match');

},
authorizationList(list, opts) {
for (const { address, nonce, chainId } of list) {
if (!addr.isValid(address))
throw new Error('address checksum does not match');
// chainId in authorization list can be zero (==allow any chain)
abig(chainId);
if (opts.strict)
minmax(chainId, 0n, amounts.maxChainId, '>= 0 and <= 2**32-1');
this.nonce(nonce, opts);
}
},
};

@@ -456,3 +494,3 @@ export class AggregatedError extends Error {

'maxFeePerGas', 'maxFeePerBlobGas', 'maxPriorityFeePerGas', 'gasPrice', 'gasLimit',
'accessList', 'blobVersionedHashes', 'chainId', 'data', 'type',
'accessList', 'authorizationList', 'blobVersionedHashes', 'chainId', 'data', 'type',
'r', 's', 'yParity', 'v'

@@ -459,0 +497,0 @@ ];

@@ -78,3 +78,6 @@ import { isBytes as _isBytes, hexToBytes as _hexToBytes, bytesToHex } from '@noble/hashes/utils';

export function cloneDeep(obj) {
if (Array.isArray(obj)) {
if (obj instanceof Uint8Array) {
return Uint8Array.from(obj);
}
else if (Array.isArray(obj)) {
return obj.map(cloneDeep);

@@ -81,0 +84,0 @@ }

import { addr } from './address.js';
import { TxType, TxCoder } from './tx.js';
import { TxType, TxCoder, AuthorizationItem, AuthorizationRequest } from './tx.js';
import { weieth, weigwei } from './utils.js';
export { addr, weigwei, weieth };
/**
* Basic message signing & verification. Matches ethers and etherscan behavior.
* TODO: research whether EIP-191 and EIP-712 are popular, add them.
* EIP-7702 Authorizations
*/
export declare const messenger: {
sign(msg: string, privateKey: string, extraEntropy?: undefined): string;
verify(signature: string, msg: string, address: string): boolean;
export declare const authorization: {
_getHash(req: AuthorizationRequest): Uint8Array;
sign(req: AuthorizationRequest, privateKey: string): AuthorizationItem;
getAuthority(item: AuthorizationItem): string;
};
declare const DEFAULTS: {
readonly accessList: readonly [];
readonly authorizationList: readonly [];
readonly chainId: 1n;

@@ -67,4 +68,4 @@ readonly data: "";

setWholeAmount(accountBalance: bigint, burnRemaining?: boolean): Transaction<T>;
static fromRawBytes(bytes: Uint8Array, strict?: boolean): Transaction<"legacy" | "eip2930" | "eip1559" | "eip4844">;
static fromHex(hex: string, strict?: boolean): Transaction<"legacy" | "eip2930" | "eip1559" | "eip4844">;
static fromRawBytes(bytes: Uint8Array, strict?: boolean): Transaction<"legacy" | "eip2930" | "eip1559" | "eip4844" | "eip7702">;
static fromHex(hex: string, strict?: boolean): Transaction<"legacy" | "eip2930" | "eip1559" | "eip4844" | "eip7702">;
private assertIsSigned;

@@ -71,0 +72,0 @@ /**

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Transaction = exports.messenger = exports.weieth = exports.weigwei = exports.addr = void 0;
exports.Transaction = exports.authorization = exports.weieth = exports.weigwei = exports.addr = void 0;
/*! micro-eth-signer - MIT License (c) 2021 Paul Miller (paulmillr.com) */

@@ -12,2 +12,3 @@ const secp256k1_1 = require("@noble/curves/secp256k1");

const tx_js_1 = require("./tx.js");
const rlp_js_1 = require("./rlp.js");
// prettier-ignore

@@ -19,30 +20,20 @@ const utils_js_1 = require("./utils.js");

/**
* Basic message signing & verification. Matches ethers and etherscan behavior.
* TODO: research whether EIP-191 and EIP-712 are popular, add them.
* EIP-7702 Authorizations
*/
exports.messenger = {
sign(msg, privateKey, extraEntropy = undefined) {
(0, utils_js_1.astr)(msg);
exports.authorization = {
_getHash(req) {
const msg = rlp_js_1.RLP.encode(tx_js_1.authorizationRequest.decode(req));
return (0, sha3_1.keccak_256)((0, utils_1.concatBytes)(new Uint8Array([0x05]), msg));
},
sign(req, privateKey) {
(0, utils_js_1.astr)(privateKey);
const hash = (0, sha3_1.keccak_256)(msg);
const sig = secp256k1_1.secp256k1.sign(hash, utils_js_1.ethHex.decode(privateKey), { extraEntropy });
const end = sig.recovery === 0 ? '1b' : '1c';
return (0, utils_js_1.add0x)(sig.toCompactHex() + end);
const sig = secp256k1_1.secp256k1.sign(this._getHash(req), utils_js_1.ethHex.decode(privateKey));
return { ...req, r: sig.r, s: sig.s, yParity: sig.recovery };
},
verify(signature, msg, address) {
(0, utils_js_1.astr)(signature);
(0, utils_js_1.astr)(msg);
(0, utils_js_1.astr)(address);
signature = (0, utils_js_1.strip0x)(signature);
if (signature.length !== 65 * 2)
throw new Error('invalid signature length');
const sigh = signature.slice(0, -2);
const end = signature.slice(-2);
if (!['1b', '1c'].includes(end))
throw new Error('invalid recovery bit');
const sig = secp256k1_1.secp256k1.Signature.fromCompact(sigh).addRecoveryBit(end === '1b' ? 0 : 1);
const hash = (0, sha3_1.keccak_256)(msg);
const pub = sig.recoverPublicKey(hash).toHex(false);
const recoveredAddr = address_js_1.addr.fromPublicKey(pub);
return recoveredAddr === address && secp256k1_1.secp256k1.verify(sig, hash, pub);
getAuthority(item) {
const { r, s, yParity, ...req } = item;
const hash = this._getHash(req);
const sig = new secp256k1_1.secp256k1.Signature(r, s).addRecoveryBit(yParity);
const point = sig.recoverPublicKey(hash);
return address_js_1.addr.fromPublicKey(point.toHex(false));
},

@@ -54,2 +45,3 @@ };

accessList: [], // needs to be .slice()-d to create new reference
authorizationList: [],
chainId: 1n, // mainnet

@@ -88,3 +80,3 @@ data: '',

raw[f] = DEFAULTS[f];
if (f === 'accessList')
if (['accessList', 'authorizationList'].includes(f))
raw[f] = (0, utils_js_1.cloneDeep)(raw[f]);

@@ -91,0 +83,0 @@ }

import { IWeb3Provider, Web3CallArgs } from '../utils.js';
import { TxVersions } from '../tx.js';
import { TxVersions, AccessList } from '../tx.js';
import { ContractInfo } from '../abi/index.js';

@@ -63,3 +63,3 @@ export type BlockInfo = {

hash: string;
accessList?: [string, string[]][];
accessList?: AccessList;
transactionIndex: number;

@@ -196,3 +196,3 @@ type: number;

txInfo(txHash: string, opts?: TxInfoOpts): Promise<{
type: "legacy" | "eip2930" | "eip1559" | "eip4844";
type: "legacy" | "eip2930" | "eip1559" | "eip4844" | "eip7702";
info: any;

@@ -199,0 +199,0 @@ receipt: any;

@@ -104,5 +104,2 @@ "use strict";

}
// Same API as Transaction, so we can re-create easily
if (info.accessList)
info.accessList = info.accessList.map((i) => [i.address, i.storageKeys]);
return info;

@@ -109,0 +106,0 @@ }

{
"name": "micro-eth-signer",
"version": "0.11.0",
"version": "0.12.0",
"description": "Minimal library for Ethereum transactions, addresses and smart contracts",

@@ -25,2 +25,3 @@ "files": [

"@paulmillr/jsbt": "0.2.1",
"@paulmillr/trusted-setups": "0.1.2",
"micro-bmark": "0.3.1",

@@ -27,0 +28,0 @@ "micro-ftch": "0.4.0",

@@ -45,3 +45,3 @@ # micro-eth-signer

- [Send whole account balance](#send-whole-account-balance)
- [Sign and verify messages with EIP-191 and EIP-712](#sign-and-verify-messages-with-eip-191-and-eip-712)
- [Sign and verify messages with EIP-191, EIP-712](#sign-and-verify-messages)
- [Security](#security)

@@ -363,5 +363,5 @@ - [Performance](#performance)

### Sign and verify messages with EIP-191 and EIP-712
### Sign and verify messages
We support EIP-191 and EIP-712.
#### EIP-191

@@ -371,58 +371,73 @@ ```ts

// EIP-191
const sig = typed.personal.sign(message, privateKey);
typed.personal.verify(sig, message, address);
typed.personal.recoverPublicKey(sig, message);
// Example message
const message = "Hello, Ethereum!";
const privateKey = "0x4c0883a69102937d6231471b5dbb6204fe512961708279f1d7b1b8e7e8b1b1e1";
// EIP-712
const typedData = {
types: {
EIP712Domain: [
{ name: 'name', type: 'string' },
{ name: 'version', type: 'string' },
{ name: 'chainId', type: 'uint256' },
{ name: 'verifyingContract', type: 'address' },
],
Person: [
{ name: 'name', type: 'string' },
{ name: 'wallet', type: 'address' },
],
Mail: [
{ name: 'from', type: 'Person' },
{ name: 'to', type: 'Person' },
{ name: 'contents', type: 'string' },
],
// Sign the message
const signature = typed.personal.sign(message, privateKey);
console.log("Signature:", signature);
// Verify the signature
const address = "0xYourEthereumAddress";
const isValid = typed.personal.verify(signature, message, address);
console.log("Is valid:", isValid);
```
#### EIP-712
```ts
import { signTyped, verifyTyped, recoverPublicKeyTyped, EIP712Domain, TypedData } from './typed-data';
const types = {
Person: [
{ name: 'name', type: 'string' },
{ name: 'wallet', type: 'address' }
],
Mail: [
{ name: 'from', type: 'Person' },
{ name: 'to', type: 'Person' },
{ name: 'contents', type: 'string' }
]
};
// Define the domain
const domain: EIP712Domain = {
name: 'Ether Mail',
version: '1',
chainId: 1,
verifyingContract: '0xCcCCccccCCCCcCCCCCCcCcCccCcCCCcCcccccccC',
salt: '0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef'
};
// Define the message
const message = {
from: {
name: 'Alice',
wallet: '0xCcCCccccCCCCcCCCCCCcCcCccCcCCCcCcccccccC'
},
primaryType: 'Mail',
domain: {
name: 'Ether Mail',
version: '1',
chainId: 1,
verifyingContract: '0xCcCCccccCCCCcCCCCCCcCcCccCcCCCcCcccccccC',
to: {
name: 'Bob',
wallet: '0xbBbBBBBbbBBBbbbBbbBbbbbBBbBbbbbBbBbbBBbB'
},
message: {
from: { name: 'Cow', wallet: '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826' },
to: { name: 'Bob', wallet: '0xbBbBBBBbbBBBbbbBbbBbbbbBBbBbbbbBbBbbBBbB' },
contents: 'Hello, Bob!',
},
contents: 'Hello, Bob!'
};
const privateKey = keccak_256('cow');
const address = addr.fromPrivateKey(privateKey);
// address === '0xCD2a3d9F938E13CD947Ec05AbC7FE734Df8DD826';
// typed.encodeData(typedData) === '0xa0ce...'
);
const e = typed.encoder(typedData.types, typedData.domain);
// e._getHash(typedData.primaryType, typedData.message) == '0xbe60...';
const sig =
'0x4355c47d63924e8a72e509b65029052eb6c299d53a04e167c5775fd466751c9d' +
'07299936d304c153f6443dfa05f40ff007d72911b6f72307f996231605b91562' +
(28).toString(16);
// e.sign(typedData.primaryType, typedData.message, privateKey) === sig;
// e.verify(typedData.primaryType, sig, typedData.message, address) === true;
// e.recoverPublicKey(typedData.primaryType, sig, typedData.message) === address;
// // Utils
// typed.signTyped(typedData, privateKey) === sig;
// typed.verifyTyped(sig, typedData, address) === true;
// typed.recoverPublicKeyTyped(sig, typedData) === address;
// Create the typed data
const typedData: TypedData<typeof types, 'Mail'> = {
types,
primaryType: 'Mail',
domain,
message
};
// Sign the typed data
const privateKey = "0x4c0883a69102937d6231471b5dbb6204fe512961708279f1d7b1b8e7e8b1b1e1";
const signature = signTyped(typedData, privateKey);
console.log("Signature:", signature);
// Verify the signature
const address = "0xYourEthereumAddress";
const isValid = verifyTyped(signature, typedData, address);
// Recover the public key
const publicKey = recoverPublicKeyTyped(signature, typedData);
```

@@ -436,24 +451,30 @@

import { KZG } from 'micro-eth-signer/kzg';
import { trustedSetup } from 'trusted-setups'; // 400kb, 4-sec init
import { trustedSetup as fastSetup } from 'trusted-setups/fast.js'; // 800kb, instant init
// 400kb, 4-sec init
import { trustedSetup } from '@paulmillr/trusted-setups';
// 800kb, instant init
import { trustedSetup as fastSetup } from '@paulmillr/trusted-setups/fast.js';
const kzg = new KZG(trustedSetup);
// kzg.computeProof(blob, z);
// kzg.verifyBlobProof(blobs, commitments, proofs);
```
All methods:
// Example blob and scalar
const blob = '0x1234567890abcdef'; // Add actual blob data
const z = '0x1'; // Add actual scalar
```ts
type Blob = string | string[] | bigint[];
type Scalar = string | bigint;
export declare class KZG {
constructor(setup: SetupData);
computeProof(blob: Blob, z: bigint | string): [string, string];
verifyProof(commitment: string, z: Scalar, y: Scalar, proof: string): boolean;
blobToKzgCommitment(blob: Blob): string;
computeBlobProof(blob: Blob, commitment: string): string;
verifyBlobProof(blob: Blob, commitment: string, proof: string): boolean;
verifyBlobProofBatch(blobs: string[], commitments: string[], proofs: string[]): boolean;
}
// Compute and verify proof
const [proof, y] = kzg.computeProof(blob, z);
console.log('Proof:', proof);
console.log('Y:', y);
const commitment = '0x1234567890abcdef'; // Add actual commitment
const z = '0x1'; // Add actual scalar
// const y = '0x2'; // Add actual y value
const proof = '0x3'; // Add actual proof
const isValid = kzg.verifyProof(commitment, z, y, proof);
console.log('Is valid:', isValid);
// Compute and verify blob proof
const blob = '0x1234567890abcdef'; // Add actual blob data
const commitment = '0x1'; // Add actual commitment
const proof = kzg.computeBlobProof(blob, commitment);
console.log('Blob proof:', proof);
const isValidB = kzg.verifyBlobProof(blob, commitment, proof);
```

@@ -530,3 +551,3 @@

Make sure to use recursive cloning for tests:
Make sure to use recursive cloning for the [eth-vectors](https://github.com/paulmillr/eth-vectors) submodule:

@@ -533,0 +554,0 @@ git clone --recursive https://github.com/paulmillr/micro-eth-signer.git

/*! micro-eth-signer - MIT License (c) 2021 Paul Miller (paulmillr.com) */
import { secp256k1 } from '@noble/curves/secp256k1';
import { keccak_256 } from '@noble/hashes/sha3';
import { bytesToHex } from '@noble/hashes/utils';
import { bytesToHex, concatBytes } from '@noble/hashes/utils';
import { UnwrapCoder } from 'micro-packed';

@@ -11,6 +11,8 @@ import { addr } from './address.js';

decodeLegacyV, removeSig, sortRawData, validateFields,
AuthorizationItem, AuthorizationRequest, authorizationRequest
} from './tx.js';
import { RLP } from './rlp.js';
// prettier-ignore
import {
amounts, astr, add0x, ethHex, ethHexNoLeadingZero, strip0x, weieth, weigwei, cloneDeep,
amounts, astr, ethHex, ethHexNoLeadingZero, strip0x, weieth, weigwei, cloneDeep,
} from './utils.js';

@@ -22,31 +24,22 @@ export { addr, weigwei, weieth };

/**
* Basic message signing & verification. Matches ethers and etherscan behavior.
* TODO: research whether EIP-191 and EIP-712 are popular, add them.
* EIP-7702 Authorizations
*/
export const messenger = {
sign(msg: string, privateKey: string, extraEntropy = undefined) {
astr(msg);
export const authorization = {
_getHash(req: AuthorizationRequest) {
const msg = RLP.encode(authorizationRequest.decode(req));
return keccak_256(concatBytes(new Uint8Array([0x05]), msg));
},
sign(req: AuthorizationRequest, privateKey: string): AuthorizationItem {
astr(privateKey);
const hash = keccak_256(msg);
const sig = secp256k1.sign(hash, ethHex.decode(privateKey), { extraEntropy });
const end = sig.recovery === 0 ? '1b' : '1c';
return add0x(sig.toCompactHex() + end);
const sig = secp256k1.sign(this._getHash(req), ethHex.decode(privateKey));
return { ...req, r: sig.r, s: sig.s, yParity: sig.recovery };
},
verify(signature: string, msg: string, address: string) {
astr(signature);
astr(msg);
astr(address);
signature = strip0x(signature);
if (signature.length !== 65 * 2) throw new Error('invalid signature length');
const sigh = signature.slice(0, -2);
const end = signature.slice(-2);
if (!['1b', '1c'].includes(end)) throw new Error('invalid recovery bit');
const sig = secp256k1.Signature.fromCompact(sigh).addRecoveryBit(end === '1b' ? 0 : 1);
const hash = keccak_256(msg);
const pub = sig.recoverPublicKey(hash).toHex(false);
const recoveredAddr = addr.fromPublicKey(pub);
return recoveredAddr === address && secp256k1.verify(sig, hash, pub);
getAuthority(item: AuthorizationItem) {
const { r, s, yParity, ...req } = item;
const hash = this._getHash(req);
const sig = new secp256k1.Signature(r, s).addRecoveryBit(yParity);
const point = sig.recoverPublicKey(hash);
return addr.fromPublicKey(point.toHex(false));
},
};
// Transaction-related utils.

@@ -57,2 +50,3 @@

accessList: [], // needs to be .slice()-d to create new reference
authorizationList: [],
chainId: 1n, // mainnet

@@ -134,3 +128,3 @@ data: '',

raw[f] = DEFAULTS[f as DefaultField];
if (f === 'accessList') raw[f] = cloneDeep(raw[f]);
if (['accessList', 'authorizationList'].includes(f)) raw[f] = cloneDeep(raw[f]);
}

@@ -137,0 +131,0 @@ }

import { IWeb3Provider, Web3CallArgs, hexToNumber, amounts } from '../utils.js';
import { Transaction } from '../index.js';
import { TxVersions, legacySig } from '../tx.js';
import { TxVersions, legacySig, AccessList } from '../tx.js';
import { ContractInfo, createContract, events, ERC20, WETH } from '../abi/index.js';

@@ -107,3 +107,3 @@

hash: string;
accessList?: [string, string[]][];
accessList?: AccessList;
transactionIndex: number;

@@ -267,5 +267,2 @@ type: number;

}
// Same API as Transaction, so we can re-create easily
if (info.accessList)
info.accessList = info.accessList.map((i: any) => [i.address, i.storageKeys]);
return info;

@@ -272,0 +269,0 @@ }

import * as P from 'micro-packed';
import { sha256 } from '@noble/hashes/sha2';
import { isBytes } from './utils.js';
import { isBytes, isObject } from './utils.js';
/*

@@ -32,2 +32,3 @@

default: T;
info: { type: string };
// merkleRoot calculated differently for composite types (even if they are fixed size)

@@ -39,2 +40,3 @@ composite: boolean;

merkleRoot: (value: T) => Uint8Array;
_isStableCompat: (other: SSZCoder<any>) => boolean;
};

@@ -104,3 +106,58 @@

const basic = <T>(inner: P.CoderType<T>, def: T): SSZCoder<T> => ({
// TODO: improve
const isStableCompat = <T>(a: SSZCoder<T>, b: SSZCoder<any>): boolean => {
if (a === b) return true; // fast path
const _a = a as any;
const _b = b as any;
if (_a.info && _b.info) {
const aI = _a.info;
const bI = _b.info;
// Bitlist[N] / Bitvector[N] field types are compatible if they share the same capacity N.
const bitTypes = ['bitList', 'bitVector'];
if (bitTypes.includes(aI.type) && bitTypes.includes(bI.type) && aI.N === bI.N) return true;
// List[T, N] / Vector[T, N] field types are compatible if T is compatible and if they also share the same capacity N.
const listTypes = ['list', 'vector'];
if (
listTypes.includes(aI.type) &&
listTypes.includes(bI.type) &&
aI.N === bI.N &&
aI.inner._isStableCompat(bI.inner)
) {
return true;
}
// Container / StableContainer[N] field types are compatible if all inner field types are compatible,
// if they also share the same field names in the same order, and for StableContainer[N] if they also
// share the same capacity N.
const contType = ['container', 'stableContainer'];
if (contType.includes(aI.type) && contType.includes(bI.type)) {
// both stable containers, but different capacity
if (aI.N !== undefined && bI.N !== undefined && aI.N !== bI.N) return false;
const kA = Object.keys(aI.fields);
const kB = Object.keys(bI.fields);
if (kA.length !== kB.length) return false;
for (let i = 0; i < kA.length; i++) {
const fA = kA[i];
const fB = kB[i];
if (fA !== fB) return false;
if (!aI.fields[fA]._isStableCompat(bI.fields[fA])) return false;
}
return true;
}
// Profile[X] field types are compatible with StableContainer types compatible with X, and
// are compatible with Profile[Y] where Y is compatible with X if also all inner field types
// are compatible. Differences solely in optionality do not affect merkleization compatibility.
if (aI.type === 'profile' || bI.type === 'profile') {
//console.log('PROF PROF?', aI.type, bI.type, aI.container._isStableCompat(bI));
if (aI.type === 'profile' && bI.type === 'stableContainer')
return aI.container._isStableCompat(b);
if (aI.type === 'stableContainer' && bI.type === 'profile')
return a._isStableCompat(bI.container);
if (aI.type === 'profile' && bI.type === 'profile')
return aI.container._isStableCompat(bI.container);
}
}
return false;
};
const basic = <T>(type: string, inner: P.CoderType<T>, def: T): SSZCoder<T> => ({
...inner,

@@ -110,2 +167,6 @@ default: def,

composite: false,
info: { type },
_isStableCompat(other) {
return isStableCompat(this, other);
},
chunks(value: T) {

@@ -137,9 +198,9 @@ return [this.merkleRoot(value)];

export const uint8 = basic(int(1), 0);
export const uint16 = basic(int(2), 0);
export const uint32 = basic(int(4), 0);
export const uint64 = basic(int(8, false), 0n);
export const uint128 = basic(int(16, false), 0n);
export const uint256 = basic(int(32, false), 0n);
export const boolean = basic(P.bool, false);
export const uint8 = basic('uint8', int(1), 0);
export const uint16 = basic('uint16', int(2), 0);
export const uint32 = basic('uint32', int(4), 0);
export const uint64 = basic('uint64', int(8, false), 0n);
export const uint128 = basic('uint128', int(16, false), 0n);
export const uint256 = basic('uint256', int(32, false), 0n);
export const boolean = basic('boolean', P.bool, false);

@@ -179,6 +240,7 @@ const array = <T>(len: P.Length, inner: SSZCoder<T>): P.CoderType<T[]> => {

type VectorType<T> = SSZCoder<T[]> & { info: { type: 'vector'; N: number; inner: SSZCoder<T> } };
/**
* Vector: fixed size ('len') array of elements 'inner'
*/
export const vector = <T>(len: number, inner: SSZCoder<T>): SSZCoder<T[]> => {
export const vector = <T>(len: number, inner: SSZCoder<T>): VectorType<T> => {
if (!Number.isSafeInteger(len) || len <= 0)

@@ -188,2 +250,6 @@ throw new Error(`SSZ/vector: wrong length=${len} (should be positive integer)`);

...array(len, inner),
info: { type: 'vector', N: len, inner },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Array(len).fill(inner.default),

@@ -201,6 +267,7 @@ composite: true,

};
type ListType<T> = SSZCoder<T[]> & { info: { type: 'list'; N: number; inner: SSZCoder<T> } };
/**
* List: dynamic array of 'inner' elements with length limit maxLen
*/
export const list = <T>(maxLen: number, inner: SSZCoder<T>): SSZCoder<T[]> => {
export const list = <T>(maxLen: number, inner: SSZCoder<T>): ListType<T> => {
checkSSZ(inner);

@@ -214,2 +281,6 @@ const coder = P.validate(array(null, inner), (value) => {

...coder,
info: { type: 'list', N: maxLen, inner },
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,

@@ -228,5 +299,31 @@ chunkCount: !inner.composite ? Math.ceil((maxLen * inner.size!) / BYTES_PER_CHUNK) : maxLen,

const wrapPointer = <T>(p: P.CoderType<T>) => (p.size === undefined ? P.pointer(P.U32LE, p) : p);
const wrapRawPointer = <T>(p: P.CoderType<T>) => (p.size === undefined ? P.U32LE : p);
// TODO: improve, unclear how
const fixOffsets = (
r: P.Reader,
fields: Record<string, P.CoderType<any>>,
offsetFields: string[],
obj: Record<string, any>,
offset: number
) => {
const offsets = [];
for (const f of offsetFields) offsets.push(obj[f] + offset);
for (let i = 0; i < offsets.length; i++) {
// TODO: how to merge this with array?
const name = offsetFields[i];
const pos = offsets[i];
const next = i + 1 < offsets.length ? offsets[i + 1] : r.totalBytes;
if (next < pos) throw r.err('SSZ/container: decreasing offset');
const len = next - pos;
if (r.pos !== pos) throw r.err('SSZ/container: wrong offset');
obj[name] = fields[name].decode(r.bytes(len));
}
return obj;
};
type ContainerCoder<T extends Record<string, SSZCoder<any>>> = SSZCoder<{
[K in keyof T]: P.UnwrapCoder<T[K]>;
}>;
}> & { info: { type: 'container'; fields: T } };

@@ -241,10 +338,6 @@ /**

const ptrCoder = P.struct(
Object.fromEntries(
Object.entries(fields).map(([k, v]) => [k, v.size === undefined ? P.pointer(P.U32LE, v) : v])
)
Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, wrapPointer(v)]))
) as ContainerCoder<T>;
const fixedCoder = P.struct(
Object.fromEntries(
Object.entries(fields).map(([k, v]) => [k, v.size === undefined ? P.U32LE : v])
)
Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, wrapRawPointer(v)]))
);

@@ -254,21 +347,10 @@ const offsetFields = Object.keys(fields).filter((i) => fields[i].size === undefined);

encodeStream: ptrCoder.encodeStream,
decodeStream: (r) => {
const fixed = fixedCoder.decodeStream(r);
const offsets = [];
for (const f in fields) if (fields[f].size === undefined) offsets.push(fixed[f]);
for (let i = 0; i < offsets.length; i++) {
// TODO: how to merge this with array?
const name = offsetFields[i];
const pos = offsets[i];
const next = i + 1 < offsets.length ? offsets[i + 1] : r.totalBytes;
if (next < pos) throw r.err('SSZ/container: decreasing offset');
const len = next - pos;
if (r.pos !== pos) throw r.err('SSZ/container: wrong offset');
fixed[name] = fields[name].decode(r.bytes(len));
}
return fixed as any;
},
decodeStream: (r) => fixOffsets(r, fields, offsetFields, fixedCoder.decodeStream(r), 0) as any,
}) as ContainerCoder<T>;
return {
...coder,
info: { type: 'container', fields },
_isStableCompat(other) {
return isStableCompat(this, other);
},
size: offsetFields.length ? undefined : fixedCoder.size, // structure is fixed size if all fields is fixed size

@@ -305,7 +387,7 @@ default: Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, v.default])) as {

});
type BitVectorType = SSZCoder<boolean[]> & { info: { type: 'bitVector'; N: number } };
/**
* BitVector: array of booleans with fixed size
*/
export const bitvector = (len: number): SSZCoder<boolean[]> => {
export const bitvector = (len: number): BitVectorType => {
if (!Number.isSafeInteger(len) || len <= 0)

@@ -317,2 +399,6 @@ throw new Error(`SSZ/bitVector: wrong length=${len} (should be positive integer)`);

...coder,
info: { type: 'bitVector', N: len },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Array(len).fill(false),

@@ -329,7 +415,7 @@ composite: true,

};
type BitListType = SSZCoder<boolean[]> & { info: { type: 'bitList'; N: number } };
/**
* BitList: array of booleans with dynamic size (but maxLen limit)
*/
export const bitlist = (maxLen: number): SSZCoder<boolean[]> => {
export const bitlist = (maxLen: number): BitListType => {
if (!Number.isSafeInteger(maxLen) || maxLen <= 0)

@@ -358,2 +444,6 @@ throw new Error(`SSZ/bitList: wrong max length=${maxLen} (should be positive integer)`);

...coder,
info: { type: 'bitList', N: maxLen },
_isStableCompat(other) {
return isStableCompat(this, other);
},
size: undefined,

@@ -404,2 +494,5 @@ default: [],

default: { selector: 0, value: types[0] === null ? null : types[0].default },
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,

@@ -418,7 +511,9 @@ chunks({ selector, value }) {

};
type ByteListType = SSZCoder<Uint8Array> & {
info: { type: 'list'; N: number; inner: typeof byte };
};
/**
* ByteList: same as List(len, SSZ.byte), but returns Uint8Array
*/
export const bytelist = (maxLen: number): SSZCoder<Uint8Array> => {
export const bytelist = (maxLen: number): ByteListType => {
const coder = P.validate(P.bytes(null), (value) => {

@@ -431,2 +526,6 @@ if (!isBytes(value) || value.length > maxLen)

...coder,
info: { type: 'list', N: maxLen, inner: byte },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Uint8Array([]),

@@ -443,7 +542,9 @@ composite: true,

};
type ByteVectorType = SSZCoder<Uint8Array> & {
info: { type: 'vector'; N: number; inner: typeof byte };
};
/**
* ByteVector: same as Vector(len, SSZ.byte), but returns Uint8Array
*/
export const bytevector = (len: number): SSZCoder<Uint8Array> => {
export const bytevector = (len: number): ByteVectorType => {
if (!Number.isSafeInteger(len) || len <= 0)

@@ -453,2 +554,6 @@ throw new Error(`SSZ/vector: wrong length=${len} (should be positive integer)`);

...P.bytes(len),
info: { type: 'vector', N: len, inner: byte },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Uint8Array(len),

@@ -466,2 +571,216 @@ composite: true,

type StableContainerCoder<T extends Record<string, SSZCoder<any>>> = SSZCoder<{
[K in keyof T]?: P.UnwrapCoder<T[K]>;
}> & { info: { type: 'stableContainer'; N: number; fields: T } };
/**
* Same as container, but all values are optional using bitvector as prefix which indicates active fields
*/
export const stableContainer = <T extends Record<string, SSZCoder<any>>>(
N: number,
fields: T
): StableContainerCoder<T> => {
const fieldsNames = Object.keys(fields) as (keyof T)[];
const fieldsLen = fieldsNames.length;
if (!fieldsLen) throw new Error('SSZ/stableContainer: no fields');
if (fieldsLen > N) throw new Error('SSZ/stableContainer: more fields than N');
const bv = bitvector(N);
const coder = P.wrap({
encodeStream: (w, value) => {
const bsVal = new Array(N).fill(false);
for (let i = 0; i < fieldsLen; i++) if (value[fieldsNames[i]] !== undefined) bsVal[i] = true;
bv.encodeStream(w, bsVal);
const activeFields = fieldsNames.filter((_, i) => bsVal[i]);
const ptrCoder = P.struct(
Object.fromEntries(activeFields.map((k) => [k, wrapPointer(fields[k])]))
) as StableContainerCoder<T>;
w.bytes(ptrCoder.encode(value));
},
decodeStream: (r) => {
const bsVal = bv.decodeStream(r);
for (let i = fieldsLen; i < bsVal.length; i++) {
if (bsVal[i] !== false) throw new Error('stableContainer: non-zero padding');
}
const activeFields = fieldsNames.filter((_, i) => bsVal[i]);
const fixedCoder = P.struct(
Object.fromEntries(activeFields.map((k) => [k, wrapRawPointer(fields[k])]))
);
const offsetFields = activeFields.filter((i) => fields[i].size === undefined);
return fixOffsets(r, fields, offsetFields as any, fixedCoder.decodeStream(r), bv.size!);
},
}) as StableContainerCoder<T>;
return {
...coder,
info: { type: 'stableContainer', N, fields },
size: undefined,
default: Object.fromEntries(Object.entries(fields).map(([k, _v]) => [k, undefined])) as {
[K in keyof T]: P.UnwrapCoder<T[K]>;
},
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,
chunkCount: N,
chunks(value: P.UnwrapCoder<StableContainerCoder<T>>) {
const res = Object.entries(fields).map(([k, v]) =>
value[k] === undefined ? new Uint8Array(32) : v.merkleRoot(value[k])
);
while (res.length < N) res.push(new Uint8Array(32));
return res;
},
merkleRoot(value: P.UnwrapCoder<StableContainerCoder<T>>) {
const bsVal = new Array(N).fill(false);
for (let i = 0; i < fieldsLen; i++) if (value[fieldsNames[i]] !== undefined) bsVal[i] = true;
return hash(merkleize(this.chunks(value as any)), bv.merkleRoot(bsVal));
},
};
};
type ProfileCoder<
T extends Record<string, SSZCoder<any>>,
OptK extends keyof T & string,
ReqK extends keyof T & string,
> = SSZCoder<{ [K in ReqK]: P.UnwrapCoder<T[K]> } & { [K in OptK]?: P.UnwrapCoder<T[K]> }> & {
info: { type: 'profile'; container: StableContainerCoder<T> };
};
/**
* Profile - fixed subset of stableContainer.
* - fields and order of fields is exactly same as in underlying container
* - some fields may be excluded or required in profile (all fields in stable container are always optional)
* - adding new fields to underlying container won't change profile's constructed on top of it,
* because it is required to provide all list of optional fields.
* - type of field can be changed inside profile (but we should be very explicit about this) to same shape type.
*
* @example
* // class Shape(StableContainer[4]):
* // side: Optional[uint16]
* // color: Optional[uint8]
* // radius: Optional[uint16]
*
* // class Square(Profile[Shape]):
* // side: uint16
* // color: uint8
*
* // class Circle(Profile[Shape]):
* // color: uint8
* // radius: Optional[uint16]
* // ->
* const Shape = SSZ.stableContainer(4, {
* side: SSZ.uint16,
* color: SSZ.uint8,
* radius: SSZ.uint16,
* });
* const Square = profile(Shape, [], ['side', 'color']);
* const Circle = profile(Shape, ['radius'], ['color']);
* const Circle2 = profile(Shape, ['radius'], ['color'], { color: SSZ.byte });
*/
export const profile = <
T extends Record<string, SSZCoder<any>>,
OptK extends keyof T & string,
ReqK extends keyof T & string,
>(
c: StableContainerCoder<T>,
optFields: OptK[],
requiredFields: ReqK[] = [],
replaceType: Record<string, any> = {}
): ProfileCoder<T, OptK, ReqK> => {
checkSSZ(c);
if (c.info.type !== 'stableContainer') throw new Error('profile: expected stableContainer');
const containerFields: Set<string> = new Set(Object.keys(c.info.fields));
if (!Array.isArray(optFields)) throw new Error('profile: optional fields should be array');
const optFS: Set<string> = new Set(optFields);
for (const f of optFS) {
if (!containerFields.has(f)) throw new Error(`profile: unexpected optional field ${f}`);
}
if (!Array.isArray(requiredFields)) throw new Error('profile: required fields should be array');
const reqFS: Set<string> = new Set(requiredFields);
for (const f of reqFS) {
if (!containerFields.has(f)) throw new Error(`profile: unexpected required field ${f}`);
if (optFS.has(f as any as OptK))
throw new Error(`profile: field ${f} is declared both as optional and required`);
}
if (!isObject(replaceType)) throw new Error('profile: replaceType should be object');
for (const k in replaceType) {
if (!containerFields.has(k)) throw new Error(`profile/replaceType: unexpected field ${k}`);
if (!replaceType[k]._isStableCompat(c.info.fields[k]))
throw new Error(`profile/replaceType: incompatible field ${k}`);
}
// Order should be same
const allFields = Object.keys(c.info.fields).filter((i) => optFS.has(i) || reqFS.has(i));
// bv is omitted if all fields are required!
const fieldCoders = { ...c.info.fields, ...replaceType };
let coder: ProfileCoder<T, OptK, ReqK>;
if (optFS.size === 0) {
// All fields are required, it is just container, possible with size
coder = container(
Object.fromEntries(allFields.map((k) => [k, fieldCoders[k]]))
) as any as ProfileCoder<T, OptK, ReqK>;
} else {
// NOTE: we cannot merge this with stable container,
// because some fields are active and some is not (based on required/non-required)
const bv = bitvector(optFS.size);
const forFields = (fn: (f: string, optPos: number | undefined) => void) => {
let optPos = 0;
for (const f of allFields) {
const isOpt = optFS.has(f);
fn(f, isOpt ? optPos : undefined);
if (isOpt) optPos++;
}
};
coder = {
...P.wrap({
encodeStream: (w, value) => {
const bsVal = new Array(optFS.size).fill(false);
const ptrCoder: any = {};
forFields((f, optPos) => {
const val = (value as any)[f];
if (optPos !== undefined && val !== undefined) bsVal[optPos] = true;
if (optPos === undefined && val === undefined)
throw new Error(`profile.encode: empty required field ${f}`);
if (val !== undefined) ptrCoder[f] = wrapPointer(fieldCoders[f]);
});
bv.encodeStream(w, bsVal);
w.bytes(P.struct(ptrCoder).encode(value));
},
decodeStream: (r) => {
let bsVal = bv.decodeStream(r);
const fixedCoder: any = {};
const offsetFields: string[] = [];
forFields((f, optPos) => {
if (optPos !== undefined && bsVal[optPos] === false) return;
if (fieldCoders[f].size === undefined) offsetFields.push(f);
fixedCoder[f] = wrapRawPointer(fieldCoders[f]);
});
return fixOffsets(
r,
fieldCoders,
offsetFields,
P.struct(fixedCoder).decodeStream(r),
bv.size!
) as any;
},
}),
size: undefined,
} as ProfileCoder<T, OptK, ReqK>;
}
return {
...coder,
info: { type: 'profile', container: c },
default: Object.fromEntries(Array.from(reqFS).map((f) => [f, fieldCoders[f].default])) as {
[K in ReqK]: P.UnwrapCoder<T[K]>;
} & { [K in OptK]?: P.UnwrapCoder<T[K]> },
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,
chunkCount: c.info.N,
chunks(value: P.UnwrapCoder<ProfileCoder<T, OptK, ReqK>>) {
return c.chunks(value);
},
merkleRoot(value: P.UnwrapCoder<ProfileCoder<T, OptK, ReqK>>) {
return c.merkleRoot(value);
},
};
};
// Aliases

@@ -506,2 +825,12 @@ export const byte = uint8;

const FINALIZED_ROOT_DEPTH = 6;
// Electra
const MAX_COMMITTEES_PER_SLOT = 64;
const PENDING_PARTIAL_WITHDRAWALS_LIMIT = 134217728;
const PENDING_BALANCE_DEPOSITS_LIMIT = 134217728;
const PENDING_CONSOLIDATIONS_LIMIT = 262144;
const MAX_ATTESTER_SLASHINGS_ELECTRA = 1;
const MAX_ATTESTATIONS_ELECTRA = 8;
const MAX_DEPOSIT_REQUESTS_PER_PAYLOAD = 8192;
const MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD = 16;
const MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD = 1;

@@ -818,2 +1147,33 @@ // We can reduce size if we inline these. But updates for new forks would be hard.

});
// Electra
const DepositRequest = container({
pubkey: BLSPubkey,
withdrawal_credentials: Bytes32,
amount: Gwei,
signature: BLSSignature,
index: uint64,
});
const WithdrawalRequest = container({
source_address: ExecutionAddress,
validator_pubkey: BLSPubkey,
amount: Gwei,
});
const ConsolidationRequest = container({
source_address: ExecutionAddress,
source_pubkey: BLSPubkey,
target_pubkey: BLSPubkey,
});
const PendingBalanceDeposit = container({
index: ValidatorIndex,
amount: Gwei,
});
const PendingPartialWithdrawal = container({
index: ValidatorIndex,
amount: Gwei,
withdrawable_epoch: Epoch,
});
const PendingConsolidation = container({
source_index: ValidatorIndex,
target_index: ValidatorIndex,
});

@@ -893,2 +1253,297 @@ export const ETH2_TYPES = {

LightClientFinalityUpdate,
// Electra
DepositRequest,
WithdrawalRequest,
ConsolidationRequest,
PendingBalanceDeposit,
PendingPartialWithdrawal,
PendingConsolidation,
};
// EIP-7688
const MAX_ATTESTATION_FIELDS = 8;
const MAX_INDEXED_ATTESTATION_FIELDS = 8;
const MAX_EXECUTION_PAYLOAD_FIELDS = 64;
const MAX_BEACON_BLOCK_BODY_FIELDS = 64;
const MAX_BEACON_STATE_FIELDS = 128;
const MAX_EXECUTION_REQUESTS_FIELDS = 16;
const StableAttestation = stableContainer(MAX_ATTESTATION_FIELDS, {
aggregation_bits: bitlist(MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT),
data: AttestationData,
signature: BLSSignature,
committee_bits: bitvector(MAX_COMMITTEES_PER_SLOT),
});
const StableIndexedAttestation = stableContainer(MAX_INDEXED_ATTESTATION_FIELDS, {
attesting_indices: list(MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT, ValidatorIndex),
data: AttestationData,
signature: BLSSignature,
});
const StableAttesterSlashing = container({
attestation_1: StableIndexedAttestation,
attestation_2: StableIndexedAttestation,
});
const StableExecutionRequests = stableContainer(MAX_EXECUTION_REQUESTS_FIELDS, {
deposits: list(MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, DepositRequest), // [New in Electra:EIP6110]
withdrawals: list(MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, WithdrawalRequest), // [New in Electra:EIP7002:EIP7251]
consolidations: list(MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, ConsolidationRequest), // [New in Electra:EIP7251]
});
const StableExecutionPayload = stableContainer(MAX_EXECUTION_PAYLOAD_FIELDS, {
parent_hash: Hash32,
fee_recipient: ExecutionAddress,
state_root: Bytes32,
receipts_root: Bytes32,
logs_bloom: bytevector(BYTES_PER_LOGS_BLOOM),
prev_randao: Bytes32,
block_number: uint64,
gas_limit: uint64,
gas_used: uint64,
timestamp: uint64,
extra_data: bytelist(MAX_EXTRA_DATA_BYTES),
base_fee_per_gas: uint256,
block_hash: Hash32,
transactions: list(MAX_TRANSACTIONS_PER_PAYLOAD, Transaction),
withdrawals: list(MAX_WITHDRAWALS_PER_PAYLOAD, Withdrawal), // [New in Capella]
blob_gas_used: uint64,
excess_blob_gas: uint64,
deposit_requests: list(MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, DepositRequest), // [New in Electra:EIP6110]
withdrawal_requests: list(MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, WithdrawalRequest), // [New in Electra:EIP7002:EIP7251]
consolidation_requests: list(MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, ConsolidationRequest), // [New in Electra:EIP7251]
});
const StableExecutionPayloadHeader = stableContainer(MAX_EXECUTION_PAYLOAD_FIELDS, {
parent_hash: Hash32,
fee_recipient: ExecutionAddress,
state_root: Bytes32,
receipts_root: Bytes32,
logs_bloom: bytevector(BYTES_PER_LOGS_BLOOM),
prev_randao: Bytes32,
block_number: uint64,
gas_limit: uint64,
gas_used: uint64,
timestamp: uint64,
extra_data: bytelist(MAX_EXTRA_DATA_BYTES),
base_fee_per_gas: uint256,
block_hash: Hash32,
transactions_root: Root,
withdrawals_root: Root, // [New in Capella]
blob_gas_used: uint64, // [New in Deneb:EIP4844]
excess_blob_gas: uint64, // [New in Deneb:EIP4844]
deposit_requests_root: Root, // [New in Electra:EIP6110]
withdrawal_requests_root: Root, // [New in Electra:EIP7002:EIP7251]
consolidation_requests_root: Root, // [New in Electra:EIP7251]
});
const StableBeaconBlockBody = stableContainer(MAX_BEACON_BLOCK_BODY_FIELDS, {
randao_reveal: BLSSignature,
eth1_data: Eth1Data,
graffiti: Bytes32,
proposer_slashings: list(MAX_PROPOSER_SLASHINGS, ProposerSlashing),
attester_slashings: list(MAX_ATTESTER_SLASHINGS_ELECTRA, StableAttesterSlashing), // [Modified in Electra:EIP7549]
attestations: list(MAX_ATTESTATIONS_ELECTRA, StableAttestation), // [Modified in Electra:EIP7549]
deposits: list(MAX_DEPOSITS, Deposit),
voluntary_exits: list(MAX_VOLUNTARY_EXITS, SignedVoluntaryExit),
sync_aggregate: SyncAggregate,
execution_payload: StableExecutionPayload,
bls_to_execution_changes: list(MAX_BLS_TO_EXECUTION_CHANGES, SignedBLSToExecutionChange),
blob_kzg_commitments: list(MAX_BLOB_COMMITMENTS_PER_BLOCK, KZGCommitment),
execution_requests: StableExecutionRequests,
});
const StableBeaconState = stableContainer(MAX_BEACON_STATE_FIELDS, {
genesis_time: uint64,
genesis_validators_root: Root,
slot: Slot,
fork: Fork,
latest_block_header: BeaconBlockHeader,
block_roots: vector(SLOTS_PER_HISTORICAL_ROOT, Root),
state_roots: vector(SLOTS_PER_HISTORICAL_ROOT, Root),
historical_roots: list(HISTORICAL_ROOTS_LIMIT, Root),
eth1_data: Eth1Data,
eth1_data_votes: list(EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH, Eth1Data),
eth1_deposit_index: uint64,
validators: list(VALIDATOR_REGISTRY_LIMIT, Validator),
balances: list(VALIDATOR_REGISTRY_LIMIT, Gwei),
randao_mixes: vector(EPOCHS_PER_HISTORICAL_VECTOR, Bytes32),
slashings: vector(EPOCHS_PER_SLASHINGS_VECTOR, Gwei),
previous_epoch_participation: list(VALIDATOR_REGISTRY_LIMIT, ParticipationFlags),
current_epoch_participation: list(VALIDATOR_REGISTRY_LIMIT, ParticipationFlags),
justification_bits: bitvector(JUSTIFICATION_BITS_LENGTH),
previous_justified_checkpoint: Checkpoint,
current_justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
inactivity_scores: list(VALIDATOR_REGISTRY_LIMIT, uint64),
current_sync_committee: SyncCommittee,
next_sync_committee: SyncCommittee,
latest_execution_payload_header: StableExecutionPayloadHeader,
next_withdrawal_index: WithdrawalIndex,
next_withdrawal_validator_index: ValidatorIndex,
historical_summaries: list(HISTORICAL_ROOTS_LIMIT, HistoricalSummary),
deposit_requests_start_index: uint64, // [New in Electra:EIP6110]
deposit_balance_to_consume: Gwei, // [New in Electra:EIP7251]
exit_balance_to_consume: Gwei, // [New in Electra:EIP7251]
earliest_exit_epoch: Epoch, // [New in Electra:EIP7251]
consolidation_balance_to_consume: Gwei, // [New in Electra:EIP7251]
earliest_consolidation_epoch: Epoch, // [New in Electra:EIP7251]
pending_balance_deposits: list(PENDING_BALANCE_DEPOSITS_LIMIT, PendingBalanceDeposit), // [New in Electra:EIP7251]
pending_partial_withdrawals: list(PENDING_PARTIAL_WITHDRAWALS_LIMIT, PendingPartialWithdrawal), // [New in Electra:EIP7251]
pending_consolidations: list(PENDING_CONSOLIDATIONS_LIMIT, PendingConsolidation), // [New in Electra:EIP7251]
});
export const ETH2_CONSENSUS = {
StableAttestation,
StableIndexedAttestation,
StableAttesterSlashing,
StableExecutionPayload,
StableExecutionRequests,
StableExecutionPayloadHeader,
StableBeaconBlockBody,
StableBeaconState,
};
// Tests (electra profiles): https://github.com/ethereum/consensus-specs/pull/3844#issuecomment-2239285376
// NOTE: these are different from EIP-7688 by some reasons, but since nothing is merged/completed in eth side, we just trying
// to pass these tests for now.
const IndexedAttestationElectra = profile(
StableIndexedAttestation,
[],
['attesting_indices', 'data', 'signature']
);
const AttesterSlashingElectra = container({
attestation_1: IndexedAttestationElectra,
attestation_2: IndexedAttestationElectra,
});
const ExecutionPayloadHeaderElectra = profile(
StableExecutionPayloadHeader,
[],
[
'parent_hash',
'fee_recipient',
'state_root',
'receipts_root',
'logs_bloom',
'prev_randao',
'block_number',
'gas_limit',
'gas_used',
'timestamp',
'extra_data',
'base_fee_per_gas',
'block_hash',
'transactions_root',
'withdrawals_root',
'blob_gas_used',
'excess_blob_gas',
]
);
const ExecutionRequests = profile(
StableExecutionRequests,
[],
['deposits', 'withdrawals', 'consolidations']
);
const AttestationElectra = profile(
StableAttestation,
[],
['aggregation_bits', 'data', 'signature', 'committee_bits']
);
const ExecutionPayloadElectra = profile(
StableExecutionPayload,
[],
[
'parent_hash',
'fee_recipient',
'state_root',
'receipts_root',
'logs_bloom',
'prev_randao',
'block_number',
'gas_limit',
'gas_used',
'timestamp',
'extra_data',
'base_fee_per_gas',
'block_hash',
'transactions',
'withdrawals',
'blob_gas_used',
'excess_blob_gas',
]
);
export const ETH2_PROFILES = {
electra: {
Attestation: AttestationElectra,
AttesterSlashing: AttesterSlashingElectra,
IndexedAttestation: IndexedAttestationElectra,
ExecutionRequests,
ExecutionPayloadHeader: ExecutionPayloadHeaderElectra,
ExecutionPayload: ExecutionPayloadElectra,
BeaconBlockBody: profile(
StableBeaconBlockBody,
[],
[
'randao_reveal',
'eth1_data',
'graffiti',
'proposer_slashings',
'attester_slashings',
'attestations',
'deposits',
'voluntary_exits',
'sync_aggregate',
'execution_payload',
'bls_to_execution_changes',
'blob_kzg_commitments',
'execution_requests',
],
{
attester_slashings: list(MAX_ATTESTER_SLASHINGS_ELECTRA, AttesterSlashingElectra),
attestations: list(MAX_ATTESTATIONS_ELECTRA, AttestationElectra),
execution_payload: ExecutionPayloadElectra,
execution_requests: ExecutionRequests,
}
),
BeaconState: profile(
StableBeaconState,
[],
[
'genesis_time',
'genesis_validators_root',
'slot',
'fork',
'latest_block_header',
'block_roots',
'state_roots',
'historical_roots',
'eth1_data',
'eth1_data_votes',
'eth1_deposit_index',
'validators',
'balances',
'randao_mixes',
'slashings',
'previous_epoch_participation',
'current_epoch_participation',
'justification_bits',
'previous_justified_checkpoint',
'current_justified_checkpoint',
'finalized_checkpoint',
'inactivity_scores',
'current_sync_committee',
'next_sync_committee',
'latest_execution_payload_header',
'next_withdrawal_index',
'next_withdrawal_validator_index',
'historical_summaries',
'deposit_requests_start_index',
'deposit_balance_to_consume',
'exit_balance_to_consume',
'earliest_exit_epoch',
'consolidation_balance_to_consume',
'earliest_consolidation_epoch',
'pending_balance_deposits',
'pending_partial_withdrawals',
'pending_consolidations',
],
{
latest_execution_payload_header: ExecutionPayloadHeaderElectra,
}
),
},
};
import * as P from 'micro-packed';
import { addr } from './address.js';
import { RLP } from './rlp.js';
import { isBytes, amounts, ethHex } from './utils.js';
import { isObject, amounts, ethHex, isBytes } from './utils.js';

@@ -67,39 +67,11 @@ // Transaction parsers

const addrCoder = ethHex;
// Parses eip2930 access lists:
// ["0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae", [
// "0x0000000000000000000000000000000000000000000000000000000000000003",
// "0x0000000000000000000000000000000000000000000000000000000000000007"
// ]]
export type AccessList = [string, string[]][];
export type BytesAccessList = [Uint8Array, Uint8Array[]][];
function accessListParser<T, K>(coder: (a: T) => K, mapper: (a: T) => K) {
return (data: [T, T[]][]) => {
if (!Array.isArray(data)) throw new Error('access list must be an array');
return data.map((pair) => {
if (!Array.isArray(pair) || pair.length !== 2)
throw new Error('access list must have 2 elements');
return [coder(pair[0]), pair[1].map(mapper)];
});
};
}
// Parses eip4844 blobs:
// ["0x0000000000000000000000000000000000000000000000000000000000000003"...]
function blobParser<T, K>(fn: (item: T) => K) {
return (data: T[]) => {
if (!Array.isArray(data)) throw new Error('blobVersionedHashes must be an array');
return data.map(fn);
};
}
function ensure32<T>(b: any & { length: number }): T {
if (b.length !== 32) throw new Error('slot must be 32 bytes');
// Bytes32: VersionedHash, AccessListKey
function ensure32(b: Uint8Array): Uint8Array {
if (!isBytes(b) || b.length !== 32) throw new Error('expected 32 bytes');
return b;
}
function ensureBlob(hash: Uint8Array): Uint8Array {
if (!isBytes(hash) || hash.length !== 32)
throw new Error('blobVersionedHashes must contain 32-byte Uint8Array-s');
return hash;
}
const Bytes32: P.Coder<Uint8Array, string> = {
encode: (from) => ethHex.encode(ensure32(from)),
decode: (to) => ensure32(ethHex.decode(to)),
};

@@ -159,2 +131,68 @@ type VRS = Partial<{ v: bigint; r: bigint; s: bigint }>;

const U256BE = P.coders.reverse(P.bigint(32, false, false, false));
// Small coder utils
// TODO: seems generic enought for packed? or RLP (seems useful for structured encoding/decoding of RLP stuff)
// Basic array coder
const array = <F, T>(coder: P.Coder<F, T>): P.Coder<F[], T[]> => ({
encode(from: F[]) {
if (!Array.isArray(from)) throw new Error('expected array');
return from.map((i) => coder.encode(i));
},
decode(to: T[]) {
if (!Array.isArray(to)) throw new Error('expected array');
return to.map((i) => coder.decode(i));
},
});
// tuple -> struct
const struct = <
Fields extends Record<string, P.Coder<any, any>>,
FromTuple extends {
[K in keyof Fields]: Fields[K] extends P.Coder<infer F, any> ? F : never;
}[keyof Fields][],
ToObject extends { [K in keyof Fields]: Fields[K] extends P.Coder<any, infer T> ? T : never },
>(
fields: Fields
): P.Coder<FromTuple, ToObject> => ({
encode(from: FromTuple) {
if (!Array.isArray(from)) throw new Error('expected array');
const fNames = Object.keys(fields);
if (from.length !== fNames.length) throw new Error('wrong array length');
return Object.fromEntries(fNames.map((f, i) => [f, fields[f].encode(from[i])])) as ToObject;
},
decode(to: ToObject): FromTuple {
const fNames = Object.keys(fields);
if (!isObject(to)) throw new Error('wrong struct object');
return fNames.map((i) => fields[i].decode(to[i])) as FromTuple;
},
});
// U256BE in geth. But it is either 0 or 1. TODO: is this good enough?
const yParityCoder = P.coders.reverse(
P.validate(P.int(1, false, false, false), (elm) => {
assertYParityValid(elm);
return elm;
})
);
type CoderOutput<F> = F extends P.Coder<any, infer T> ? T : never;
const accessListItem = struct({ address: addrCoder, storageKeys: array(Bytes32) });
export type AccessList = CoderOutput<typeof accessListItem>[];
export const authorizationRequest = struct({
chainId: U256BE,
address: addrCoder,
nonce: U64BE,
});
// [chain_id, address, nonce, y_parity, r, s]
const authorizationItem = struct({
chainId: U256BE,
address: addrCoder,
nonce: U64BE,
yParity: yParityCoder,
r: U256BE,
s: U256BE,
});
export type AuthorizationItem = CoderOutput<typeof authorizationItem>;
export type AuthorizationRequest = CoderOutput<typeof authorizationRequest>;
/**

@@ -173,21 +211,10 @@ * Field types, matching geth. Either u64 or u256.

data: ethHex,
accessList: {
decode: accessListParser(addrCoder.decode, (k) => ensure32(ethHex.decode(k))),
encode: accessListParser(addrCoder.encode, (k) => ethHex.encode(ensure32(k))),
} as P.Coder<BytesAccessList, AccessList>,
accessList: array(accessListItem),
maxFeePerBlobGas: U256BE,
blobVersionedHashes: {
decode: blobParser((b) => ensureBlob(ethHex.decode(b))),
encode: blobParser((b) => ethHex.encode(ensureBlob(b))),
} as P.Coder<Uint8Array[], string[]>,
// U256BE in geth. But it is either 0 or 1. TODO: is this good enough?
yParity: P.coders.reverse(
P.validate(P.int(1, false, false, false), (elm) => {
assertYParityValid(elm);
return elm;
})
),
blobVersionedHashes: array(Bytes32),
yParity: yParityCoder,
v: U256BE,
r: U256BE,
s: U256BE,
authorizationList: array(authorizationItem),
};

@@ -228,6 +255,9 @@ type Coders = typeof coders;

> => {
const allFields = reqf.concat(optf);
// Check that all fields have known coders
reqf.concat(optf).forEach((f) => {
allFields.forEach((f) => {
if (!coders.hasOwnProperty(f)) throw new Error(`coder for field ${f} is not defined`);
});
const reqS = struct(Object.fromEntries(reqf.map((i) => [i, coders[i]])));
const allS = struct(Object.fromEntries(allFields.map((i) => [i, coders[i]])));
// e.g. eip1559 txs have valid lengths of 9 or 12 (unsigned / signed)

@@ -242,9 +272,6 @@ const reql = reqf.length;

encodeStream(w, raw: Record<string, any>) {
// @ts-ignore TODO: fix type
const values = reqf.map((f) => coders[f].decode(raw[f]));
// If at least one optional key is present, we add whole optional block
if (optf.some((f) => raw.hasOwnProperty(f)))
// @ts-ignore TODO: fix type
optf.forEach((f) => values.push(coders[f].decode(raw[f])));
RLP.encodeStream(w, values);
const hasOptional = optf.some((f) => raw.hasOwnProperty(f));
const sCoder = hasOptional ? allS : reqS;
RLP.encodeStream(w, sCoder.decode(raw));
},

@@ -257,16 +284,7 @@ decodeStream(r): Record<string, any> {

throw new Error(`txStruct: wrong inner length=${length}`);
const raw = Object.fromEntries(
// @ts-ignore TODO: fix type
reqf.map((f, i) => [f, coders[f].encode(decoded[i])])
);
if (length === optl) {
if (optf.every((_, i) => isEmpty(decoded[optFieldAt(i)])))
throw new Error('all optional fields empty');
const rawSig = Object.fromEntries(
// @ts-ignore TODO: fix type
optf.map((f, i) => [f, coders[f].encode(decoded[optFieldAt(i)])])
);
Object.assign(raw, rawSig); // mutate raw
}
return raw;
const sCoder = length === optl ? allS : reqS;
if (length === optl && optf.every((_, i) => isEmpty(decoded[optFieldAt(i)])))
throw new Error('all optional fields empty');
// @ts-ignore TODO: fix type (there can be null in RLP)
return sCoder.encode(decoded);
},

@@ -277,3 +295,3 @@ });

fcoder.optionalFields = optf;
fcoder.setOfAllFields = new Set(reqf.concat(optf, ['type'] as any));
fcoder.setOfAllFields = new Set(allFields.concat(['type'] as any));
return fcoder;

@@ -326,2 +344,7 @@ };

['yParity', 'r', 's'] as const);
// prettier-ignore
const eip7702 = txStruct([
'chainId', 'nonce', 'maxPriorityFeePerGas', 'maxFeePerGas', 'gasLimit', 'to', 'value', 'data', 'accessList',
'authorizationList'] as const,
['yParity', 'r', 's'] as const);

@@ -333,2 +356,3 @@ export const TxVersions = {

eip4844, // 0x03
eip7702, // 0x04
};

@@ -343,5 +367,10 @@

for (const item of data.data.accessList) {
item[0] = addr.addChecksum(item[0]);
item.address = addr.addChecksum(item.address);
}
}
if (data.type === 'eip7702' && data.data.authorizationList) {
for (const item of data.data.authorizationList) {
item.address = addr.addChecksum(item.address);
}
}
return data;

@@ -437,8 +466,17 @@ },

},
accessList(list: [string, string[]][]) {
accessList(list: AccessList) {
// NOTE: we cannot handle this validation in coder, since it requires chainId to calculate correct checksum
for (const [address, _] of list) {
for (const { address } of list) {
if (!addr.isValid(address)) throw new Error('address checksum does not match');
}
},
authorizationList(list: AuthorizationItem[], opts: ValidationOpts) {
for (const { address, nonce, chainId } of list) {
if (!addr.isValid(address)) throw new Error('address checksum does not match');
// chainId in authorization list can be zero (==allow any chain)
abig(chainId);
if (opts.strict) minmax(chainId, 0n, amounts.maxChainId, '>= 0 and <= 2**32-1');
this.nonce(nonce, opts);
}
},
};

@@ -508,3 +546,3 @@

'maxFeePerGas', 'maxFeePerBlobGas', 'maxPriorityFeePerGas', 'gasPrice', 'gasLimit',
'accessList', 'blobVersionedHashes', 'chainId', 'data', 'type',
'accessList', 'authorizationList', 'blobVersionedHashes', 'chainId', 'data', 'type',
'r', 's', 'yParity', 'v'

@@ -511,0 +549,0 @@ ] as const;

@@ -103,3 +103,5 @@ import { isBytes as _isBytes, hexToBytes as _hexToBytes, bytesToHex } from '@noble/hashes/utils';

export function cloneDeep<T>(obj: T): T {
if (Array.isArray(obj)) {
if (obj instanceof Uint8Array) {
return Uint8Array.from(obj) as T;
} else if (Array.isArray(obj)) {
return obj.map(cloneDeep) as unknown as T;

@@ -106,0 +108,0 @@ } else if (typeof obj === 'bigint') {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ETH2_TYPES = exports.bytes = exports.bool = exports.bit = exports.byte = exports.bytevector = exports.bytelist = exports.union = exports.bitlist = exports.bitvector = exports.container = exports.list = exports.vector = exports.boolean = exports.uint256 = exports.uint128 = exports.uint64 = exports.uint32 = exports.uint16 = exports.uint8 = void 0;
exports.ETH2_PROFILES = exports.ETH2_CONSENSUS = exports.ETH2_TYPES = exports.bytes = exports.bool = exports.bit = exports.byte = exports.profile = exports.stableContainer = exports.bytevector = exports.bytelist = exports.union = exports.bitlist = exports.bitvector = exports.container = exports.list = exports.vector = exports.boolean = exports.uint256 = exports.uint128 = exports.uint64 = exports.uint32 = exports.uint16 = exports.uint8 = void 0;
const P = require("micro-packed");

@@ -87,3 +87,61 @@ const sha2_1 = require("@noble/hashes/sha2");

};
const basic = (inner, def) => ({
// TODO: improve
const isStableCompat = (a, b) => {
if (a === b)
return true; // fast path
const _a = a;
const _b = b;
if (_a.info && _b.info) {
const aI = _a.info;
const bI = _b.info;
// Bitlist[N] / Bitvector[N] field types are compatible if they share the same capacity N.
const bitTypes = ['bitList', 'bitVector'];
if (bitTypes.includes(aI.type) && bitTypes.includes(bI.type) && aI.N === bI.N)
return true;
// List[T, N] / Vector[T, N] field types are compatible if T is compatible and if they also share the same capacity N.
const listTypes = ['list', 'vector'];
if (listTypes.includes(aI.type) &&
listTypes.includes(bI.type) &&
aI.N === bI.N &&
aI.inner._isStableCompat(bI.inner)) {
return true;
}
// Container / StableContainer[N] field types are compatible if all inner field types are compatible,
// if they also share the same field names in the same order, and for StableContainer[N] if they also
// share the same capacity N.
const contType = ['container', 'stableContainer'];
if (contType.includes(aI.type) && contType.includes(bI.type)) {
// both stable containers, but different capacity
if (aI.N !== undefined && bI.N !== undefined && aI.N !== bI.N)
return false;
const kA = Object.keys(aI.fields);
const kB = Object.keys(bI.fields);
if (kA.length !== kB.length)
return false;
for (let i = 0; i < kA.length; i++) {
const fA = kA[i];
const fB = kB[i];
if (fA !== fB)
return false;
if (!aI.fields[fA]._isStableCompat(bI.fields[fA]))
return false;
}
return true;
}
// Profile[X] field types are compatible with StableContainer types compatible with X, and
// are compatible with Profile[Y] where Y is compatible with X if also all inner field types
// are compatible. Differences solely in optionality do not affect merkleization compatibility.
if (aI.type === 'profile' || bI.type === 'profile') {
//console.log('PROF PROF?', aI.type, bI.type, aI.container._isStableCompat(bI));
if (aI.type === 'profile' && bI.type === 'stableContainer')
return aI.container._isStableCompat(b);
if (aI.type === 'stableContainer' && bI.type === 'profile')
return a._isStableCompat(bI.container);
if (aI.type === 'profile' && bI.type === 'profile')
return aI.container._isStableCompat(bI.container);
}
}
return false;
};
const basic = (type, inner, def) => ({
...inner,

@@ -93,2 +151,6 @@ default: def,

composite: false,
info: { type },
_isStableCompat(other) {
return isStableCompat(this, other);
},
chunks(value) {

@@ -119,9 +181,9 @@ return [this.merkleRoot(value)];

});
exports.uint8 = basic(int(1), 0);
exports.uint16 = basic(int(2), 0);
exports.uint32 = basic(int(4), 0);
exports.uint64 = basic(int(8, false), 0n);
exports.uint128 = basic(int(16, false), 0n);
exports.uint256 = basic(int(32, false), 0n);
exports.boolean = basic(P.bool, false);
exports.uint8 = basic('uint8', int(1), 0);
exports.uint16 = basic('uint16', int(2), 0);
exports.uint32 = basic('uint32', int(4), 0);
exports.uint64 = basic('uint64', int(8, false), 0n);
exports.uint128 = basic('uint128', int(16, false), 0n);
exports.uint256 = basic('uint256', int(32, false), 0n);
exports.boolean = basic('boolean', P.bool, false);
const array = (len, inner) => {

@@ -171,2 +233,6 @@ checkSSZ(inner);

...array(len, inner),
info: { type: 'vector', N: len, inner },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Array(len).fill(inner.default),

@@ -198,2 +264,6 @@ composite: true,

...coder,
info: { type: 'list', N: maxLen, inner },
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,

@@ -213,2 +283,23 @@ chunkCount: !inner.composite ? Math.ceil((maxLen * inner.size) / BYTES_PER_CHUNK) : maxLen,

exports.list = list;
const wrapPointer = (p) => (p.size === undefined ? P.pointer(P.U32LE, p) : p);
const wrapRawPointer = (p) => (p.size === undefined ? P.U32LE : p);
// TODO: improve, unclear how
const fixOffsets = (r, fields, offsetFields, obj, offset) => {
const offsets = [];
for (const f of offsetFields)
offsets.push(obj[f] + offset);
for (let i = 0; i < offsets.length; i++) {
// TODO: how to merge this with array?
const name = offsetFields[i];
const pos = offsets[i];
const next = i + 1 < offsets.length ? offsets[i + 1] : r.totalBytes;
if (next < pos)
throw r.err('SSZ/container: decreasing offset');
const len = next - pos;
if (r.pos !== pos)
throw r.err('SSZ/container: wrong offset');
obj[name] = fields[name].decode(r.bytes(len));
}
return obj;
};
/**

@@ -220,30 +311,15 @@ * Container: Encodes object with multiple fields. P.struct for SSZ.

throw new Error('SSZ/container: no fields');
const ptrCoder = P.struct(Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, v.size === undefined ? P.pointer(P.U32LE, v) : v])));
const fixedCoder = P.struct(Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, v.size === undefined ? P.U32LE : v])));
const ptrCoder = P.struct(Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, wrapPointer(v)])));
const fixedCoder = P.struct(Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, wrapRawPointer(v)])));
const offsetFields = Object.keys(fields).filter((i) => fields[i].size === undefined);
const coder = P.wrap({
encodeStream: ptrCoder.encodeStream,
decodeStream: (r) => {
const fixed = fixedCoder.decodeStream(r);
const offsets = [];
for (const f in fields)
if (fields[f].size === undefined)
offsets.push(fixed[f]);
for (let i = 0; i < offsets.length; i++) {
// TODO: how to merge this with array?
const name = offsetFields[i];
const pos = offsets[i];
const next = i + 1 < offsets.length ? offsets[i + 1] : r.totalBytes;
if (next < pos)
throw r.err('SSZ/container: decreasing offset');
const len = next - pos;
if (r.pos !== pos)
throw r.err('SSZ/container: wrong offset');
fixed[name] = fields[name].decode(r.bytes(len));
}
return fixed;
},
decodeStream: (r) => fixOffsets(r, fields, offsetFields, fixedCoder.decodeStream(r), 0),
});
return {
...coder,
info: { type: 'container', fields },
_isStableCompat(other) {
return isStableCompat(this, other);
},
size: offsetFields.length ? undefined : fixedCoder.size, // structure is fixed size if all fields is fixed size

@@ -293,2 +369,6 @@ default: Object.fromEntries(Object.entries(fields).map(([k, v]) => [k, v.default])),

...coder,
info: { type: 'bitVector', N: len },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Array(len).fill(false),

@@ -334,2 +414,6 @@ composite: true,

...coder,
info: { type: 'bitList', N: maxLen },
_isStableCompat(other) {
return isStableCompat(this, other);
},
size: undefined,

@@ -372,2 +456,5 @@ default: [],

default: { selector: 0, value: types[0] === null ? null : types[0].default },
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,

@@ -400,2 +487,6 @@ chunks({ selector, value }) {

...coder,
info: { type: 'list', N: maxLen, inner: exports.byte },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Uint8Array([]),

@@ -421,2 +512,6 @@ composite: true,

...P.bytes(len),
info: { type: 'vector', N: len, inner: exports.byte },
_isStableCompat(other) {
return isStableCompat(this, other);
},
default: new Uint8Array(len),

@@ -434,2 +529,196 @@ composite: true,

exports.bytevector = bytevector;
/**
* Same as container, but all values are optional using bitvector as prefix which indicates active fields
*/
const stableContainer = (N, fields) => {
const fieldsNames = Object.keys(fields);
const fieldsLen = fieldsNames.length;
if (!fieldsLen)
throw new Error('SSZ/stableContainer: no fields');
if (fieldsLen > N)
throw new Error('SSZ/stableContainer: more fields than N');
const bv = (0, exports.bitvector)(N);
const coder = P.wrap({
encodeStream: (w, value) => {
const bsVal = new Array(N).fill(false);
for (let i = 0; i < fieldsLen; i++)
if (value[fieldsNames[i]] !== undefined)
bsVal[i] = true;
bv.encodeStream(w, bsVal);
const activeFields = fieldsNames.filter((_, i) => bsVal[i]);
const ptrCoder = P.struct(Object.fromEntries(activeFields.map((k) => [k, wrapPointer(fields[k])])));
w.bytes(ptrCoder.encode(value));
},
decodeStream: (r) => {
const bsVal = bv.decodeStream(r);
for (let i = fieldsLen; i < bsVal.length; i++) {
if (bsVal[i] !== false)
throw new Error('stableContainer: non-zero padding');
}
const activeFields = fieldsNames.filter((_, i) => bsVal[i]);
const fixedCoder = P.struct(Object.fromEntries(activeFields.map((k) => [k, wrapRawPointer(fields[k])])));
const offsetFields = activeFields.filter((i) => fields[i].size === undefined);
return fixOffsets(r, fields, offsetFields, fixedCoder.decodeStream(r), bv.size);
},
});
return {
...coder,
info: { type: 'stableContainer', N, fields },
size: undefined,
default: Object.fromEntries(Object.entries(fields).map(([k, _v]) => [k, undefined])),
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,
chunkCount: N,
chunks(value) {
const res = Object.entries(fields).map(([k, v]) => value[k] === undefined ? new Uint8Array(32) : v.merkleRoot(value[k]));
while (res.length < N)
res.push(new Uint8Array(32));
return res;
},
merkleRoot(value) {
const bsVal = new Array(N).fill(false);
for (let i = 0; i < fieldsLen; i++)
if (value[fieldsNames[i]] !== undefined)
bsVal[i] = true;
return hash(merkleize(this.chunks(value)), bv.merkleRoot(bsVal));
},
};
};
exports.stableContainer = stableContainer;
/**
* Profile - fixed subset of stableContainer.
* - fields and order of fields is exactly same as in underlying container
* - some fields may be excluded or required in profile (all fields in stable container are always optional)
* - adding new fields to underlying container won't change profile's constructed on top of it,
* because it is required to provide all list of optional fields.
* - type of field can be changed inside profile (but we should be very explicit about this) to same shape type.
*
* @example
* // class Shape(StableContainer[4]):
* // side: Optional[uint16]
* // color: Optional[uint8]
* // radius: Optional[uint16]
*
* // class Square(Profile[Shape]):
* // side: uint16
* // color: uint8
*
* // class Circle(Profile[Shape]):
* // color: uint8
* // radius: Optional[uint16]
* // ->
* const Shape = SSZ.stableContainer(4, {
* side: SSZ.uint16,
* color: SSZ.uint8,
* radius: SSZ.uint16,
* });
* const Square = profile(Shape, [], ['side', 'color']);
* const Circle = profile(Shape, ['radius'], ['color']);
* const Circle2 = profile(Shape, ['radius'], ['color'], { color: SSZ.byte });
*/
const profile = (c, optFields, requiredFields = [], replaceType = {}) => {
checkSSZ(c);
if (c.info.type !== 'stableContainer')
throw new Error('profile: expected stableContainer');
const containerFields = new Set(Object.keys(c.info.fields));
if (!Array.isArray(optFields))
throw new Error('profile: optional fields should be array');
const optFS = new Set(optFields);
for (const f of optFS) {
if (!containerFields.has(f))
throw new Error(`profile: unexpected optional field ${f}`);
}
if (!Array.isArray(requiredFields))
throw new Error('profile: required fields should be array');
const reqFS = new Set(requiredFields);
for (const f of reqFS) {
if (!containerFields.has(f))
throw new Error(`profile: unexpected required field ${f}`);
if (optFS.has(f))
throw new Error(`profile: field ${f} is declared both as optional and required`);
}
if (!(0, utils_js_1.isObject)(replaceType))
throw new Error('profile: replaceType should be object');
for (const k in replaceType) {
if (!containerFields.has(k))
throw new Error(`profile/replaceType: unexpected field ${k}`);
if (!replaceType[k]._isStableCompat(c.info.fields[k]))
throw new Error(`profile/replaceType: incompatible field ${k}`);
}
// Order should be same
const allFields = Object.keys(c.info.fields).filter((i) => optFS.has(i) || reqFS.has(i));
// bv is omitted if all fields are required!
const fieldCoders = { ...c.info.fields, ...replaceType };
let coder;
if (optFS.size === 0) {
// All fields are required, it is just container, possible with size
coder = (0, exports.container)(Object.fromEntries(allFields.map((k) => [k, fieldCoders[k]])));
}
else {
// NOTE: we cannot merge this with stable container,
// because some fields are active and some is not (based on required/non-required)
const bv = (0, exports.bitvector)(optFS.size);
const forFields = (fn) => {
let optPos = 0;
for (const f of allFields) {
const isOpt = optFS.has(f);
fn(f, isOpt ? optPos : undefined);
if (isOpt)
optPos++;
}
};
coder = {
...P.wrap({
encodeStream: (w, value) => {
const bsVal = new Array(optFS.size).fill(false);
const ptrCoder = {};
forFields((f, optPos) => {
const val = value[f];
if (optPos !== undefined && val !== undefined)
bsVal[optPos] = true;
if (optPos === undefined && val === undefined)
throw new Error(`profile.encode: empty required field ${f}`);
if (val !== undefined)
ptrCoder[f] = wrapPointer(fieldCoders[f]);
});
bv.encodeStream(w, bsVal);
w.bytes(P.struct(ptrCoder).encode(value));
},
decodeStream: (r) => {
let bsVal = bv.decodeStream(r);
const fixedCoder = {};
const offsetFields = [];
forFields((f, optPos) => {
if (optPos !== undefined && bsVal[optPos] === false)
return;
if (fieldCoders[f].size === undefined)
offsetFields.push(f);
fixedCoder[f] = wrapRawPointer(fieldCoders[f]);
});
return fixOffsets(r, fieldCoders, offsetFields, P.struct(fixedCoder).decodeStream(r), bv.size);
},
}),
size: undefined,
};
}
return {
...coder,
info: { type: 'profile', container: c },
default: Object.fromEntries(Array.from(reqFS).map((f) => [f, fieldCoders[f].default])),
_isStableCompat(other) {
return isStableCompat(this, other);
},
composite: true,
chunkCount: c.info.N,
chunks(value) {
return c.chunks(value);
},
merkleRoot(value) {
return c.merkleRoot(value);
},
};
};
exports.profile = profile;
// Aliases

@@ -473,2 +762,12 @@ exports.byte = exports.uint8;

const FINALIZED_ROOT_DEPTH = 6;
// Electra
const MAX_COMMITTEES_PER_SLOT = 64;
const PENDING_PARTIAL_WITHDRAWALS_LIMIT = 134217728;
const PENDING_BALANCE_DEPOSITS_LIMIT = 134217728;
const PENDING_CONSOLIDATIONS_LIMIT = 262144;
const MAX_ATTESTER_SLASHINGS_ELECTRA = 1;
const MAX_ATTESTATIONS_ELECTRA = 8;
const MAX_DEPOSIT_REQUESTS_PER_PAYLOAD = 8192;
const MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD = 16;
const MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD = 1;
// We can reduce size if we inline these. But updates for new forks would be hard.

@@ -782,2 +1081,33 @@ const Slot = exports.uint64;

});
// Electra
const DepositRequest = (0, exports.container)({
pubkey: BLSPubkey,
withdrawal_credentials: Bytes32,
amount: Gwei,
signature: BLSSignature,
index: exports.uint64,
});
const WithdrawalRequest = (0, exports.container)({
source_address: ExecutionAddress,
validator_pubkey: BLSPubkey,
amount: Gwei,
});
const ConsolidationRequest = (0, exports.container)({
source_address: ExecutionAddress,
source_pubkey: BLSPubkey,
target_pubkey: BLSPubkey,
});
const PendingBalanceDeposit = (0, exports.container)({
index: ValidatorIndex,
amount: Gwei,
});
const PendingPartialWithdrawal = (0, exports.container)({
index: ValidatorIndex,
amount: Gwei,
withdrawable_epoch: Epoch,
});
const PendingConsolidation = (0, exports.container)({
source_index: ValidatorIndex,
target_index: ValidatorIndex,
});
exports.ETH2_TYPES = {

@@ -856,3 +1186,264 @@ Slot,

LightClientFinalityUpdate,
// Electra
DepositRequest,
WithdrawalRequest,
ConsolidationRequest,
PendingBalanceDeposit,
PendingPartialWithdrawal,
PendingConsolidation,
};
// EIP-7688
const MAX_ATTESTATION_FIELDS = 8;
const MAX_INDEXED_ATTESTATION_FIELDS = 8;
const MAX_EXECUTION_PAYLOAD_FIELDS = 64;
const MAX_BEACON_BLOCK_BODY_FIELDS = 64;
const MAX_BEACON_STATE_FIELDS = 128;
const MAX_EXECUTION_REQUESTS_FIELDS = 16;
const StableAttestation = (0, exports.stableContainer)(MAX_ATTESTATION_FIELDS, {
aggregation_bits: (0, exports.bitlist)(MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT),
data: AttestationData,
signature: BLSSignature,
committee_bits: (0, exports.bitvector)(MAX_COMMITTEES_PER_SLOT),
});
const StableIndexedAttestation = (0, exports.stableContainer)(MAX_INDEXED_ATTESTATION_FIELDS, {
attesting_indices: (0, exports.list)(MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT, ValidatorIndex),
data: AttestationData,
signature: BLSSignature,
});
const StableAttesterSlashing = (0, exports.container)({
attestation_1: StableIndexedAttestation,
attestation_2: StableIndexedAttestation,
});
const StableExecutionRequests = (0, exports.stableContainer)(MAX_EXECUTION_REQUESTS_FIELDS, {
deposits: (0, exports.list)(MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, DepositRequest), // [New in Electra:EIP6110]
withdrawals: (0, exports.list)(MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, WithdrawalRequest), // [New in Electra:EIP7002:EIP7251]
consolidations: (0, exports.list)(MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, ConsolidationRequest), // [New in Electra:EIP7251]
});
const StableExecutionPayload = (0, exports.stableContainer)(MAX_EXECUTION_PAYLOAD_FIELDS, {
parent_hash: Hash32,
fee_recipient: ExecutionAddress,
state_root: Bytes32,
receipts_root: Bytes32,
logs_bloom: (0, exports.bytevector)(BYTES_PER_LOGS_BLOOM),
prev_randao: Bytes32,
block_number: exports.uint64,
gas_limit: exports.uint64,
gas_used: exports.uint64,
timestamp: exports.uint64,
extra_data: (0, exports.bytelist)(MAX_EXTRA_DATA_BYTES),
base_fee_per_gas: exports.uint256,
block_hash: Hash32,
transactions: (0, exports.list)(MAX_TRANSACTIONS_PER_PAYLOAD, Transaction),
withdrawals: (0, exports.list)(MAX_WITHDRAWALS_PER_PAYLOAD, Withdrawal), // [New in Capella]
blob_gas_used: exports.uint64,
excess_blob_gas: exports.uint64,
deposit_requests: (0, exports.list)(MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, DepositRequest), // [New in Electra:EIP6110]
withdrawal_requests: (0, exports.list)(MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, WithdrawalRequest), // [New in Electra:EIP7002:EIP7251]
consolidation_requests: (0, exports.list)(MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, ConsolidationRequest), // [New in Electra:EIP7251]
});
const StableExecutionPayloadHeader = (0, exports.stableContainer)(MAX_EXECUTION_PAYLOAD_FIELDS, {
parent_hash: Hash32,
fee_recipient: ExecutionAddress,
state_root: Bytes32,
receipts_root: Bytes32,
logs_bloom: (0, exports.bytevector)(BYTES_PER_LOGS_BLOOM),
prev_randao: Bytes32,
block_number: exports.uint64,
gas_limit: exports.uint64,
gas_used: exports.uint64,
timestamp: exports.uint64,
extra_data: (0, exports.bytelist)(MAX_EXTRA_DATA_BYTES),
base_fee_per_gas: exports.uint256,
block_hash: Hash32,
transactions_root: Root,
withdrawals_root: Root, // [New in Capella]
blob_gas_used: exports.uint64, // [New in Deneb:EIP4844]
excess_blob_gas: exports.uint64, // [New in Deneb:EIP4844]
deposit_requests_root: Root, // [New in Electra:EIP6110]
withdrawal_requests_root: Root, // [New in Electra:EIP7002:EIP7251]
consolidation_requests_root: Root, // [New in Electra:EIP7251]
});
const StableBeaconBlockBody = (0, exports.stableContainer)(MAX_BEACON_BLOCK_BODY_FIELDS, {
randao_reveal: BLSSignature,
eth1_data: Eth1Data,
graffiti: Bytes32,
proposer_slashings: (0, exports.list)(MAX_PROPOSER_SLASHINGS, ProposerSlashing),
attester_slashings: (0, exports.list)(MAX_ATTESTER_SLASHINGS_ELECTRA, StableAttesterSlashing), // [Modified in Electra:EIP7549]
attestations: (0, exports.list)(MAX_ATTESTATIONS_ELECTRA, StableAttestation), // [Modified in Electra:EIP7549]
deposits: (0, exports.list)(MAX_DEPOSITS, Deposit),
voluntary_exits: (0, exports.list)(MAX_VOLUNTARY_EXITS, SignedVoluntaryExit),
sync_aggregate: SyncAggregate,
execution_payload: StableExecutionPayload,
bls_to_execution_changes: (0, exports.list)(MAX_BLS_TO_EXECUTION_CHANGES, SignedBLSToExecutionChange),
blob_kzg_commitments: (0, exports.list)(MAX_BLOB_COMMITMENTS_PER_BLOCK, KZGCommitment),
execution_requests: StableExecutionRequests,
});
const StableBeaconState = (0, exports.stableContainer)(MAX_BEACON_STATE_FIELDS, {
genesis_time: exports.uint64,
genesis_validators_root: Root,
slot: Slot,
fork: Fork,
latest_block_header: BeaconBlockHeader,
block_roots: (0, exports.vector)(SLOTS_PER_HISTORICAL_ROOT, Root),
state_roots: (0, exports.vector)(SLOTS_PER_HISTORICAL_ROOT, Root),
historical_roots: (0, exports.list)(HISTORICAL_ROOTS_LIMIT, Root),
eth1_data: Eth1Data,
eth1_data_votes: (0, exports.list)(EPOCHS_PER_ETH1_VOTING_PERIOD * SLOTS_PER_EPOCH, Eth1Data),
eth1_deposit_index: exports.uint64,
validators: (0, exports.list)(VALIDATOR_REGISTRY_LIMIT, Validator),
balances: (0, exports.list)(VALIDATOR_REGISTRY_LIMIT, Gwei),
randao_mixes: (0, exports.vector)(EPOCHS_PER_HISTORICAL_VECTOR, Bytes32),
slashings: (0, exports.vector)(EPOCHS_PER_SLASHINGS_VECTOR, Gwei),
previous_epoch_participation: (0, exports.list)(VALIDATOR_REGISTRY_LIMIT, ParticipationFlags),
current_epoch_participation: (0, exports.list)(VALIDATOR_REGISTRY_LIMIT, ParticipationFlags),
justification_bits: (0, exports.bitvector)(JUSTIFICATION_BITS_LENGTH),
previous_justified_checkpoint: Checkpoint,
current_justified_checkpoint: Checkpoint,
finalized_checkpoint: Checkpoint,
inactivity_scores: (0, exports.list)(VALIDATOR_REGISTRY_LIMIT, exports.uint64),
current_sync_committee: SyncCommittee,
next_sync_committee: SyncCommittee,
latest_execution_payload_header: StableExecutionPayloadHeader,
next_withdrawal_index: WithdrawalIndex,
next_withdrawal_validator_index: ValidatorIndex,
historical_summaries: (0, exports.list)(HISTORICAL_ROOTS_LIMIT, HistoricalSummary),
deposit_requests_start_index: exports.uint64, // [New in Electra:EIP6110]
deposit_balance_to_consume: Gwei, // [New in Electra:EIP7251]
exit_balance_to_consume: Gwei, // [New in Electra:EIP7251]
earliest_exit_epoch: Epoch, // [New in Electra:EIP7251]
consolidation_balance_to_consume: Gwei, // [New in Electra:EIP7251]
earliest_consolidation_epoch: Epoch, // [New in Electra:EIP7251]
pending_balance_deposits: (0, exports.list)(PENDING_BALANCE_DEPOSITS_LIMIT, PendingBalanceDeposit), // [New in Electra:EIP7251]
pending_partial_withdrawals: (0, exports.list)(PENDING_PARTIAL_WITHDRAWALS_LIMIT, PendingPartialWithdrawal), // [New in Electra:EIP7251]
pending_consolidations: (0, exports.list)(PENDING_CONSOLIDATIONS_LIMIT, PendingConsolidation), // [New in Electra:EIP7251]
});
exports.ETH2_CONSENSUS = {
StableAttestation,
StableIndexedAttestation,
StableAttesterSlashing,
StableExecutionPayload,
StableExecutionRequests,
StableExecutionPayloadHeader,
StableBeaconBlockBody,
StableBeaconState,
};
// Tests (electra profiles): https://github.com/ethereum/consensus-specs/pull/3844#issuecomment-2239285376
// NOTE: these are different from EIP-7688 by some reasons, but since nothing is merged/completed in eth side, we just trying
// to pass these tests for now.
const IndexedAttestationElectra = (0, exports.profile)(StableIndexedAttestation, [], ['attesting_indices', 'data', 'signature']);
const AttesterSlashingElectra = (0, exports.container)({
attestation_1: IndexedAttestationElectra,
attestation_2: IndexedAttestationElectra,
});
const ExecutionPayloadHeaderElectra = (0, exports.profile)(StableExecutionPayloadHeader, [], [
'parent_hash',
'fee_recipient',
'state_root',
'receipts_root',
'logs_bloom',
'prev_randao',
'block_number',
'gas_limit',
'gas_used',
'timestamp',
'extra_data',
'base_fee_per_gas',
'block_hash',
'transactions_root',
'withdrawals_root',
'blob_gas_used',
'excess_blob_gas',
]);
const ExecutionRequests = (0, exports.profile)(StableExecutionRequests, [], ['deposits', 'withdrawals', 'consolidations']);
const AttestationElectra = (0, exports.profile)(StableAttestation, [], ['aggregation_bits', 'data', 'signature', 'committee_bits']);
const ExecutionPayloadElectra = (0, exports.profile)(StableExecutionPayload, [], [
'parent_hash',
'fee_recipient',
'state_root',
'receipts_root',
'logs_bloom',
'prev_randao',
'block_number',
'gas_limit',
'gas_used',
'timestamp',
'extra_data',
'base_fee_per_gas',
'block_hash',
'transactions',
'withdrawals',
'blob_gas_used',
'excess_blob_gas',
]);
exports.ETH2_PROFILES = {
electra: {
Attestation: AttestationElectra,
AttesterSlashing: AttesterSlashingElectra,
IndexedAttestation: IndexedAttestationElectra,
ExecutionRequests,
ExecutionPayloadHeader: ExecutionPayloadHeaderElectra,
ExecutionPayload: ExecutionPayloadElectra,
BeaconBlockBody: (0, exports.profile)(StableBeaconBlockBody, [], [
'randao_reveal',
'eth1_data',
'graffiti',
'proposer_slashings',
'attester_slashings',
'attestations',
'deposits',
'voluntary_exits',
'sync_aggregate',
'execution_payload',
'bls_to_execution_changes',
'blob_kzg_commitments',
'execution_requests',
], {
attester_slashings: (0, exports.list)(MAX_ATTESTER_SLASHINGS_ELECTRA, AttesterSlashingElectra),
attestations: (0, exports.list)(MAX_ATTESTATIONS_ELECTRA, AttestationElectra),
execution_payload: ExecutionPayloadElectra,
execution_requests: ExecutionRequests,
}),
BeaconState: (0, exports.profile)(StableBeaconState, [], [
'genesis_time',
'genesis_validators_root',
'slot',
'fork',
'latest_block_header',
'block_roots',
'state_roots',
'historical_roots',
'eth1_data',
'eth1_data_votes',
'eth1_deposit_index',
'validators',
'balances',
'randao_mixes',
'slashings',
'previous_epoch_participation',
'current_epoch_participation',
'justification_bits',
'previous_justified_checkpoint',
'current_justified_checkpoint',
'finalized_checkpoint',
'inactivity_scores',
'current_sync_committee',
'next_sync_committee',
'latest_execution_payload_header',
'next_withdrawal_index',
'next_withdrawal_validator_index',
'historical_summaries',
'deposit_requests_start_index',
'deposit_balance_to_consume',
'exit_balance_to_consume',
'earliest_exit_epoch',
'consolidation_balance_to_consume',
'earliest_consolidation_epoch',
'pending_balance_deposits',
'pending_partial_withdrawals',
'pending_consolidations',
], {
latest_execution_payload_header: ExecutionPayloadHeaderElectra,
}),
},
};
//# sourceMappingURL=ssz.js.map

@@ -11,4 +11,2 @@ import * as P from 'micro-packed';

export type TxCoder<T extends TxType> = P.UnwrapCoder<(typeof TxVersions)[T]>;
export type AccessList = [string, string[]][];
export type BytesAccessList = [Uint8Array, Uint8Array[]][];
type VRS = Partial<{

@@ -26,2 +24,23 @@ v: bigint;

export declare const legacySig: P.Coder<VRS, YRS>;
type CoderOutput<F> = F extends P.Coder<any, infer T> ? T : never;
declare const accessListItem: P.Coder<(Uint8Array | Uint8Array[])[], {
address: string;
storageKeys: string[];
}>;
export type AccessList = CoderOutput<typeof accessListItem>[];
export declare const authorizationRequest: P.Coder<Uint8Array[], {
chainId: bigint;
address: string;
nonce: bigint;
}>;
declare const authorizationItem: P.Coder<Uint8Array[], {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}>;
export type AuthorizationItem = CoderOutput<typeof authorizationItem>;
export type AuthorizationRequest = CoderOutput<typeof authorizationRequest>;
/**

@@ -40,3 +59,6 @@ * Field types, matching geth. Either u64 or u256.

data: P.Coder<Uint8Array, string>;
accessList: P.Coder<BytesAccessList, AccessList>;
accessList: P.Coder<(Uint8Array | Uint8Array[])[][], {
address: string;
storageKeys: string[];
}[]>;
maxFeePerBlobGas: P.Coder<Uint8Array, bigint>;

@@ -48,2 +70,10 @@ blobVersionedHashes: P.Coder<Uint8Array[], string[]>;

s: P.Coder<Uint8Array, bigint>;
authorizationList: P.Coder<Uint8Array[][], {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}[]>;
};

@@ -86,3 +116,6 @@ type Coders = typeof coders;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -102,3 +135,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -118,3 +154,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
maxFeePerBlobGas: bigint;

@@ -127,2 +166,28 @@ blobVersionedHashes: string[];

}>>;
eip7702: FieldCoder<OptFields<{
to: string;
data: string;
nonce: bigint;
value: bigint;
chainId: bigint;
maxPriorityFeePerGas: bigint;
maxFeePerGas: bigint;
gasLimit: bigint;
accessList: {
address: string;
storageKeys: string[];
}[];
authorizationList: {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}[];
}, {
yParity: number;
r: bigint;
s: bigint;
}>>;
};

@@ -139,3 +204,6 @@ export declare const RawTx: P.CoderType<VersionType<{

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -155,3 +223,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -171,3 +242,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
maxFeePerBlobGas: bigint;

@@ -180,2 +254,28 @@ blobVersionedHashes: string[];

}>>;
eip7702: FieldCoder<OptFields<{
to: string;
data: string;
nonce: bigint;
value: bigint;
chainId: bigint;
maxPriorityFeePerGas: bigint;
maxFeePerGas: bigint;
gasLimit: bigint;
accessList: {
address: string;
storageKeys: string[];
}[];
authorizationList: {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}[];
}, {
yParity: number;
r: bigint;
s: bigint;
}>>;
}>>;

@@ -224,3 +324,6 @@ /**

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -240,3 +343,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
}, {

@@ -256,3 +362,6 @@ yParity: number;

gasLimit: bigint;
accessList: AccessList;
accessList: {
address: string;
storageKeys: string[];
}[];
maxFeePerBlobGas: bigint;

@@ -265,2 +374,28 @@ blobVersionedHashes: string[];

}>>;
eip7702: FieldCoder<OptFields<{
to: string;
data: string;
nonce: bigint;
value: bigint;
chainId: bigint;
maxPriorityFeePerGas: bigint;
maxFeePerGas: bigint;
gasLimit: bigint;
accessList: {
address: string;
storageKeys: string[];
}[];
authorizationList: {
chainId: bigint;
address: string;
nonce: bigint;
yParity: number;
r: bigint;
s: bigint;
}[];
}, {
yParity: number;
r: bigint;
s: bigint;
}>>;
};

@@ -267,0 +402,0 @@ };

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.__tests = exports.AggregatedError = exports.RlpTx = exports.RawTx = exports.TxVersions = exports.legacySig = void 0;
exports.__tests = exports.AggregatedError = exports.RlpTx = exports.RawTx = exports.TxVersions = exports.authorizationRequest = exports.legacySig = void 0;
exports.removeSig = removeSig;

@@ -63,32 +63,12 @@ exports.validateFields = validateFields;

const addrCoder = utils_js_1.ethHex;
function accessListParser(coder, mapper) {
return (data) => {
if (!Array.isArray(data))
throw new Error('access list must be an array');
return data.map((pair) => {
if (!Array.isArray(pair) || pair.length !== 2)
throw new Error('access list must have 2 elements');
return [coder(pair[0]), pair[1].map(mapper)];
});
};
}
// Parses eip4844 blobs:
// ["0x0000000000000000000000000000000000000000000000000000000000000003"...]
function blobParser(fn) {
return (data) => {
if (!Array.isArray(data))
throw new Error('blobVersionedHashes must be an array');
return data.map(fn);
};
}
// Bytes32: VersionedHash, AccessListKey
function ensure32(b) {
if (b.length !== 32)
throw new Error('slot must be 32 bytes');
if (!(0, utils_js_1.isBytes)(b) || b.length !== 32)
throw new Error('expected 32 bytes');
return b;
}
function ensureBlob(hash) {
if (!(0, utils_js_1.isBytes)(hash) || hash.length !== 32)
throw new Error('blobVersionedHashes must contain 32-byte Uint8Array-s');
return hash;
}
const Bytes32 = {
encode: (from) => utils_js_1.ethHex.encode(ensure32(from)),
decode: (to) => ensure32(utils_js_1.ethHex.decode(to)),
};
// Process v as (chainId, yParity) pair. Ethers.js-inspired logic:

@@ -152,2 +132,54 @@ // - v=27/28 -> no chainId (pre eip155)

const U256BE = P.coders.reverse(P.bigint(32, false, false, false));
// Small coder utils
// TODO: seems generic enought for packed? or RLP (seems useful for structured encoding/decoding of RLP stuff)
// Basic array coder
const array = (coder) => ({
encode(from) {
if (!Array.isArray(from))
throw new Error('expected array');
return from.map((i) => coder.encode(i));
},
decode(to) {
if (!Array.isArray(to))
throw new Error('expected array');
return to.map((i) => coder.decode(i));
},
});
// tuple -> struct
const struct = (fields) => ({
encode(from) {
if (!Array.isArray(from))
throw new Error('expected array');
const fNames = Object.keys(fields);
if (from.length !== fNames.length)
throw new Error('wrong array length');
return Object.fromEntries(fNames.map((f, i) => [f, fields[f].encode(from[i])]));
},
decode(to) {
const fNames = Object.keys(fields);
if (!(0, utils_js_1.isObject)(to))
throw new Error('wrong struct object');
return fNames.map((i) => fields[i].decode(to[i]));
},
});
// U256BE in geth. But it is either 0 or 1. TODO: is this good enough?
const yParityCoder = P.coders.reverse(P.validate(P.int(1, false, false, false), (elm) => {
assertYParityValid(elm);
return elm;
}));
const accessListItem = struct({ address: addrCoder, storageKeys: array(Bytes32) });
exports.authorizationRequest = struct({
chainId: U256BE,
address: addrCoder,
nonce: U64BE,
});
// [chain_id, address, nonce, y_parity, r, s]
const authorizationItem = struct({
chainId: U256BE,
address: addrCoder,
nonce: U64BE,
yParity: yParityCoder,
r: U256BE,
s: U256BE,
});
/**

@@ -166,19 +198,10 @@ * Field types, matching geth. Either u64 or u256.

data: utils_js_1.ethHex,
accessList: {
decode: accessListParser(addrCoder.decode, (k) => ensure32(utils_js_1.ethHex.decode(k))),
encode: accessListParser(addrCoder.encode, (k) => utils_js_1.ethHex.encode(ensure32(k))),
},
accessList: array(accessListItem),
maxFeePerBlobGas: U256BE,
blobVersionedHashes: {
decode: blobParser((b) => ensureBlob(utils_js_1.ethHex.decode(b))),
encode: blobParser((b) => utils_js_1.ethHex.encode(ensureBlob(b))),
},
// U256BE in geth. But it is either 0 or 1. TODO: is this good enough?
yParity: P.coders.reverse(P.validate(P.int(1, false, false, false), (elm) => {
assertYParityValid(elm);
return elm;
})),
blobVersionedHashes: array(Bytes32),
yParity: yParityCoder,
v: U256BE,
r: U256BE,
s: U256BE,
authorizationList: array(authorizationItem),
};

@@ -200,7 +223,10 @@ const signatureFields = new Set(['v', 'yParity', 'r', 's']);

const txStruct = (reqf, optf) => {
const allFields = reqf.concat(optf);
// Check that all fields have known coders
reqf.concat(optf).forEach((f) => {
allFields.forEach((f) => {
if (!coders.hasOwnProperty(f))
throw new Error(`coder for field ${f} is not defined`);
});
const reqS = struct(Object.fromEntries(reqf.map((i) => [i, coders[i]])));
const allS = struct(Object.fromEntries(allFields.map((i) => [i, coders[i]])));
// e.g. eip1559 txs have valid lengths of 9 or 12 (unsigned / signed)

@@ -215,9 +241,6 @@ const reql = reqf.length;

encodeStream(w, raw) {
// @ts-ignore TODO: fix type
const values = reqf.map((f) => coders[f].decode(raw[f]));
// If at least one optional key is present, we add whole optional block
if (optf.some((f) => raw.hasOwnProperty(f)))
// @ts-ignore TODO: fix type
optf.forEach((f) => values.push(coders[f].decode(raw[f])));
rlp_js_1.RLP.encodeStream(w, values);
const hasOptional = optf.some((f) => raw.hasOwnProperty(f));
const sCoder = hasOptional ? allS : reqS;
rlp_js_1.RLP.encodeStream(w, sCoder.decode(raw));
},

@@ -231,14 +254,7 @@ decodeStream(r) {

throw new Error(`txStruct: wrong inner length=${length}`);
const raw = Object.fromEntries(
// @ts-ignore TODO: fix type
reqf.map((f, i) => [f, coders[f].encode(decoded[i])]));
if (length === optl) {
if (optf.every((_, i) => isEmpty(decoded[optFieldAt(i)])))
throw new Error('all optional fields empty');
const rawSig = Object.fromEntries(
// @ts-ignore TODO: fix type
optf.map((f, i) => [f, coders[f].encode(decoded[optFieldAt(i)])]));
Object.assign(raw, rawSig); // mutate raw
}
return raw;
const sCoder = length === optl ? allS : reqS;
if (length === optl && optf.every((_, i) => isEmpty(decoded[optFieldAt(i)])))
throw new Error('all optional fields empty');
// @ts-ignore TODO: fix type (there can be null in RLP)
return sCoder.encode(decoded);
},

@@ -248,3 +264,3 @@ });

fcoder.optionalFields = optf;
fcoder.setOfAllFields = new Set(reqf.concat(optf, ['type']));
fcoder.setOfAllFields = new Set(allFields.concat(['type']));
return fcoder;

@@ -290,2 +306,7 @@ };

], ['yParity', 'r', 's']);
// prettier-ignore
const eip7702 = txStruct([
'chainId', 'nonce', 'maxPriorityFeePerGas', 'maxFeePerGas', 'gasLimit', 'to', 'value', 'data', 'accessList',
'authorizationList'
], ['yParity', 'r', 's']);
exports.TxVersions = {

@@ -296,2 +317,3 @@ legacy, // 0x00 (kinda)

eip4844, // 0x03
eip7702, // 0x04
};

@@ -305,5 +327,10 @@ exports.RawTx = P.apply(createTxMap(exports.TxVersions), {

for (const item of data.data.accessList) {
item[0] = address_js_1.addr.addChecksum(item[0]);
item.address = address_js_1.addr.addChecksum(item.address);
}
}
if (data.type === 'eip7702' && data.data.authorizationList) {
for (const item of data.data.authorizationList) {
item.address = address_js_1.addr.addChecksum(item.address);
}
}
return data;

@@ -399,3 +426,3 @@ },

// NOTE: we cannot handle this validation in coder, since it requires chainId to calculate correct checksum
for (const [address, _] of list) {
for (const { address } of list) {
if (!address_js_1.addr.isValid(address))

@@ -405,2 +432,13 @@ throw new Error('address checksum does not match');

},
authorizationList(list, opts) {
for (const { address, nonce, chainId } of list) {
if (!address_js_1.addr.isValid(address))
throw new Error('address checksum does not match');
// chainId in authorization list can be zero (==allow any chain)
abig(chainId);
if (opts.strict)
minmax(chainId, 0n, utils_js_1.amounts.maxChainId, '>= 0 and <= 2**32-1');
this.nonce(nonce, opts);
}
},
};

@@ -464,3 +502,3 @@ class AggregatedError extends Error {

'maxFeePerGas', 'maxFeePerBlobGas', 'maxPriorityFeePerGas', 'gasPrice', 'gasLimit',
'accessList', 'blobVersionedHashes', 'chainId', 'data', 'type',
'accessList', 'authorizationList', 'blobVersionedHashes', 'chainId', 'data', 'type',
'r', 's', 'yParity', 'v'

@@ -467,0 +505,0 @@ ];

@@ -90,3 +90,6 @@ "use strict";

function cloneDeep(obj) {
if (Array.isArray(obj)) {
if (obj instanceof Uint8Array) {
return Uint8Array.from(obj);
}
else if (Array.isArray(obj)) {
return obj.map(cloneDeep);

@@ -93,0 +96,0 @@ }

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc