Socket
Socket
Sign inDemoInstall

@nomicfoundation/ethereumjs-common

Package Overview
Dependencies
42
Maintainers
2
Versions
12
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 3.1.1 to 3.1.2

4

dist/chains/goerli.json

@@ -75,6 +75,6 @@ {

{
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge, terminal block: https://goerli.etherscan.io/block/7382818",
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge",
"name": "merge",
"ttd": "10790000",
"block": 7382819,
"block": null,
"forkHash": "0xb8c6299d"

@@ -81,0 +81,0 @@ },

@@ -91,10 +91,9 @@ {

{
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge, terminal block: https://etherscan.io/block/15537393",
"name": "mergeForkIdTransition",
"block": null,
"forkHash": null
},
{
"name": "merge",
"ttd": "58750000000000000000000",
"block": 15537394,
"forkHash": "0xf0afd0e3"
},
{
"name": "mergeForkIdTransition",
"block": null,

@@ -101,0 +100,0 @@ "forkHash": null

@@ -77,6 +77,6 @@ {

{
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge, terminal block: https://sepolia.etherscan.io/block/1450408",
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge",
"name": "merge",
"ttd": "17000000000000000",
"block": 1450409,
"block": null,
"forkHash": "0xfe3366e7"

@@ -92,4 +92,3 @@ },

"block": null,
"timestamp": "1677557088",
"forkHash": "0xf7f9bc08"
"forkHash": null
}

@@ -96,0 +95,0 @@ ],

@@ -6,3 +6,3 @@ /// <reference types="node" />

import type { ConsensusAlgorithm, ConsensusType } from './enums';
import type { BootstrapNodeConfig, CasperConfig, ChainConfig, ChainsConfig, CliqueConfig, CommonOpts, CustomCommonOpts, EthashConfig, GenesisBlockConfig, GethConfigOpts, HardforkConfig } from './types';
import type { BootstrapNodeConfig, CasperConfig, ChainConfig, ChainsConfig, CliqueConfig, CommonOpts, CustomCommonOpts, EthashConfig, GenesisBlockConfig, HardforkConfig } from './types';
import type { BigIntLike } from '@nomicfoundation/ethereumjs-util';

@@ -23,3 +23,2 @@ /**

private _customChains;
private HARDFORK_CHANGES;
/**

@@ -51,9 +50,2 @@ * Creates a {@link Common} object for a custom chain, based on a standard one.

/**
* Static method to load and set common from a geth genesis json
* @param genesisJson json of geth configuration
* @param { chain, eips, genesisHash, hardfork, mergeForkIdPostMerge } to further configure the common instance
* @returns Common
*/
static fromGethGenesis(genesisJson: any, { chain, eips, genesisHash, hardfork, mergeForkIdPostMerge }: GethConfigOpts): Common;
/**
* Static method to determine if a {@link chainId} is supported as a standard chain

@@ -87,7 +79,6 @@ * @param chainId bigint id (`1`) of a standard chain

* @param blockNumber
* @param td : total difficulty of the parent block (for block hf) OR of the chain latest (for chain hf)
* @param timestamp: timestamp in seconds at which block was/is to be minted
* @param td
* @returns The name of the HF
*/
getHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike, timestamp?: BigIntLike): string;
getHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike): string;
/**

@@ -103,6 +94,5 @@ * Sets a new hardfork based on the block number or an optional

* @param td
* @param timestamp
* @returns The name of the HF set
*/
setHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike, timestamp?: BigIntLike): string;
setHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike): string;
/**

@@ -122,3 +112,3 @@ * Internal helper function, returns the params for the given hardfork for the chain set

*
* If the parameter is present in an EIP, the EIP always takes precedence.
* If the parameter is present in an EIP, the EIP always takes precendence.
* Otherwise the parameter if taken from the latest applied HF with

@@ -157,3 +147,3 @@ * a change on the respective parameter.

*/
paramByBlock(topic: string, name: string, blockNumber: BigIntLike, td?: BigIntLike, timestamp?: BigIntLike): bigint;
paramByBlock(topic: string, name: string, blockNumber: BigIntLike, td?: BigIntLike): bigint;
/**

@@ -202,3 +192,2 @@ * Checks if an EIP is activated by either being included in the EIPs

hardforkBlock(hardfork?: string | Hardfork): bigint | null;
hardforkTimestamp(hardfork?: string | Hardfork): bigint | null;
/**

@@ -221,3 +210,2 @@ * Returns the hardfork change block for eip

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -228,10 +216,3 @@ isHardforkBlock(blockNumber: BigIntLike, hardfork?: string | Hardfork): boolean;

* @param hardfork Hardfork name, optional if HF set
* @returns Block timestamp, number or null if not available
*/
nextHardforkBlockOrTimestamp(hardfork?: string | Hardfork): bigint | null;
/**
* Returns the change block for the next hardfork after the hardfork provided or set
* @param hardfork Hardfork name, optional if HF set
* @returns Block number or null if not available
* @deprecated
*/

@@ -244,3 +225,2 @@ nextHardforkBlock(hardfork?: string | Hardfork): bigint | null;

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -268,8 +248,2 @@ isNextHardforkBlock(blockNumber: BigIntLike, hardfork?: string | Hardfork): boolean;

/**
* Sets any missing forkHashes on the passed-in {@link Common} instance
* @param common The {@link Common} to set the forkHashes for
* @param genesisHash The genesis block hash
*/
setForkHashes(genesisHash: Buffer): void;
/**
* Returns the Genesis parameters of the current chain

@@ -343,5 +317,6 @@ * @returns Genesis dictionary

*
* ethash: empty object
* ethash: -
* clique: period, epoch
* casper: empty object
* aura: -
* casper: -
*

@@ -348,0 +323,0 @@ * Note: This value can update along a Hardfork.

@@ -15,3 +15,2 @@ "use strict";

const hardforks_1 = require("./hardforks");
const utils_1 = require("./utils");
/**

@@ -32,7 +31,2 @@ * Common class to access chain and hardfork parameters and to provide

this.DEFAULT_HARDFORK = this._chainParams.defaultHardfork ?? enums_1.Hardfork.Merge;
// Assign hardfork changes in the sequence of the applied hardforks
this.HARDFORK_CHANGES = this.hardforks().map((hf) => [
hf.name,
hardforks_1.hardforks[hf.name],
]);
this._hardfork = this.DEFAULT_HARDFORK;

@@ -105,9 +99,2 @@ if (opts.hardfork !== undefined) {

}
if (chainParamsOrName === enums_1.CustomChain.ArbitrumOne) {
return Common.custom({
name: enums_1.CustomChain.ArbitrumOne,
chainId: 42161,
networkId: 42161,
}, opts);
}
if (chainParamsOrName === enums_1.CustomChain.xDaiChain) {

@@ -142,21 +129,2 @@ return Common.custom({

/**
* Static method to load and set common from a geth genesis json
* @param genesisJson json of geth configuration
* @param { chain, eips, genesisHash, hardfork, mergeForkIdPostMerge } to further configure the common instance
* @returns Common
*/
static fromGethGenesis(genesisJson, { chain, eips, genesisHash, hardfork, mergeForkIdPostMerge }) {
const genesisParams = (0, utils_1.parseGethGenesis)(genesisJson, chain, mergeForkIdPostMerge);
const common = new Common({
chain: genesisParams.name ?? 'custom',
customChains: [genesisParams],
eips,
hardfork: hardfork ?? genesisParams.hardfork,
});
if (genesisHash !== undefined) {
common.setForkHashes(genesisHash);
}
return common;
}
/**
* Static method to determine if a {@link chainId} is supported as a standard chain

@@ -223,3 +191,3 @@ * @param chainId bigint id (`1`) of a standard chain

let existing = false;
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of hardforks_1.hardforks) {
if (hfChanges[0] === hardfork) {

@@ -246,89 +214,53 @@ if (this._hardfork !== hardfork) {

* @param blockNumber
* @param td : total difficulty of the parent block (for block hf) OR of the chain latest (for chain hf)
* @param timestamp: timestamp in seconds at which block was/is to be minted
* @param td
* @returns The name of the HF
*/
getHardforkByBlockNumber(blockNumber, td, timestamp) {
getHardforkByBlockNumber(blockNumber, td) {
blockNumber = (0, ethereumjs_util_1.toType)(blockNumber, ethereumjs_util_1.TypeOutput.BigInt);
td = (0, ethereumjs_util_1.toType)(td, ethereumjs_util_1.TypeOutput.BigInt);
timestamp = (0, ethereumjs_util_1.toType)(timestamp, ethereumjs_util_1.TypeOutput.Number);
// Filter out hardforks with no block number, no ttd or no timestamp (i.e. unapplied hardforks)
const hfs = this.hardforks().filter((hf) => hf.block !== null || (hf.ttd !== null && hf.ttd !== undefined) || hf.timestamp !== undefined);
const mergeIndex = hfs.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined);
const doubleTTDHF = hfs
.slice(mergeIndex + 1)
.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined);
if (doubleTTDHF >= 0) {
throw Error(`More than one merge hardforks found with ttd specified`);
}
// Find the first hardfork that has a block number greater than `blockNumber`
// (skips the merge hardfork since it cannot have a block number specified).
// If timestamp is not provided, it also skips timestamps hardforks to continue
// discovering/checking number hardforks.
let hfIndex = hfs.findIndex((hf) => (hf.block !== null && hf.block > blockNumber) ||
(timestamp !== undefined && Number(hf.timestamp) > timestamp));
if (hfIndex === -1) {
// all hardforks apply, set hfIndex to the last one as that's the candidate
hfIndex = hfs.length;
}
else if (hfIndex === 0) {
// cannot have a case where a block number is before all applied hardforks
// since the chain has to start with a hardfork
throw Error('Must have at least one hardfork at block 0');
}
// If timestamp is not provided, we need to rollback to the last hf with block or ttd
if (timestamp === undefined) {
const stepBack = hfs
.slice(0, hfIndex)
.reverse()
.findIndex((hf) => hf.block !== null || hf.ttd !== undefined);
hfIndex = hfIndex - stepBack;
}
// Move hfIndex one back to arrive at candidate hardfork
hfIndex = hfIndex - 1;
// If the timestamp was not provided, we could have skipped timestamp hardforks to look for number
// hardforks. so it will now be needed to rollback
if (hfs[hfIndex].block === null && hfs[hfIndex].timestamp === undefined) {
// We're on the merge hardfork. Let's check the TTD
if (td === undefined || td === null || BigInt(hfs[hfIndex].ttd) > td) {
// Merge ttd greater than current td so we're on hardfork before merge
hfIndex -= 1;
let hardfork = enums_1.Hardfork.Chainstart;
let minTdHF;
let maxTdHF;
let previousHF;
for (const hf of this.hardforks()) {
// Skip comparison for not applied HFs
if (hf.block === null) {
if (td !== undefined && td !== null && hf.ttd !== undefined && hf.ttd !== null) {
if (td >= BigInt(hf.ttd)) {
return hf.name;
}
}
continue;
}
}
else {
if (mergeIndex >= 0 && td !== undefined && td !== null) {
if (hfIndex >= mergeIndex && BigInt(hfs[mergeIndex].ttd) > td) {
throw Error('Maximum HF determined by total difficulty is lower than the block number HF');
if (blockNumber >= BigInt(hf.block)) {
hardfork = hf.name;
}
if (td && (typeof hf.ttd === 'string' || typeof hf.ttd === 'bigint')) {
if (td >= BigInt(hf.ttd)) {
minTdHF = hf.name;
}
else if (hfIndex < mergeIndex && BigInt(hfs[mergeIndex].ttd) <= td) {
throw Error('HF determined by block number is lower than the minimum total difficulty HF');
else {
maxTdHF = previousHF;
}
}
previousHF = hf.name;
}
const hfStartIndex = hfIndex;
// Move the hfIndex to the end of the hardforks that might be scheduled on the same block/timestamp
// This won't anyway be the case with Merge hfs
for (; hfIndex < hfs.length - 1; hfIndex++) {
// break out if hfIndex + 1 is not scheduled at hfIndex
if (hfs[hfIndex].block !== hfs[hfIndex + 1].block ||
hfs[hfIndex].timestamp !== hfs[hfIndex + 1].timestamp) {
break;
if (td) {
let msgAdd = `block number: ${blockNumber} (-> ${hardfork}), `;
if (minTdHF !== undefined) {
if (!this.hardforkGteHardfork(hardfork, minTdHF)) {
const msg = 'HF determined by block number is lower than the minimum total difficulty HF';
msgAdd += `total difficulty: ${td} (-> ${minTdHF})`;
throw new Error(`${msg}: ${msgAdd}`);
}
}
}
if (timestamp) {
const minTimeStamp = hfs
.slice(0, hfStartIndex)
.reduce((acc, hf) => Math.max(Number(hf.timestamp ?? '0'), acc), 0);
if (minTimeStamp > timestamp) {
throw Error(`Maximum HF determined by timestamp is lower than the block number/ttd HF`);
if (maxTdHF !== undefined) {
if (!this.hardforkGteHardfork(maxTdHF, hardfork)) {
const msg = 'Maximum HF determined by total difficulty is lower than the block number HF';
msgAdd += `total difficulty: ${td} (-> ${maxTdHF})`;
throw new Error(`${msg}: ${msgAdd}`);
}
}
const maxTimeStamp = hfs
.slice(hfIndex + 1)
.reduce((acc, hf) => Math.min(Number(hf.timestamp ?? timestamp), acc), timestamp);
if (maxTimeStamp < timestamp) {
throw Error(`Maximum HF determined by block number/ttd is lower than timestamp HF`);
}
}
const hardfork = hfs[hfIndex];
return hardfork.name;
return hardfork;
}

@@ -345,7 +277,6 @@ /**

* @param td
* @param timestamp
* @returns The name of the HF set
*/
setHardforkByBlockNumber(blockNumber, td, timestamp) {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td, timestamp);
setHardforkByBlockNumber(blockNumber, td) {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td);
this.setHardfork(hardfork);

@@ -393,3 +324,3 @@ return hardfork;

*
* If the parameter is present in an EIP, the EIP always takes precedence.
* If the parameter is present in an EIP, the EIP always takes precendence.
* Otherwise the parameter if taken from the latest applied HF with

@@ -422,3 +353,3 @@ * a change on the respective parameter.

let value = null;
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of hardforks_1.hardforks) {
// EIP-referencing HF file (e.g. berlin.json)

@@ -476,4 +407,4 @@ if ('eips' in hfChanges[1]) {

*/
paramByBlock(topic, name, blockNumber, td, timestamp) {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td, timestamp);
paramByBlock(topic, name, blockNumber, td) {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td);
return this.paramByHardfork(topic, name, hardfork);

@@ -494,3 +425,3 @@ }

}
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of hardforks_1.hardforks) {
const hf = hfChanges[1];

@@ -570,10 +501,2 @@ if (this.gteHardfork(hf['name']) && 'eips' in hf) {

}
hardforkTimestamp(hardfork) {
hardfork = hardfork ?? this._hardfork;
const timestamp = this._getHardfork(hardfork)?.['timestamp'];
if (timestamp === undefined || timestamp === null) {
return null;
}
return BigInt(timestamp);
}
/**

@@ -585,3 +508,3 @@ * Returns the hardfork change block for eip

eipBlock(eip) {
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of hardforks_1.hardforks) {
const hf = hfChanges[1];

@@ -615,3 +538,2 @@ if ('eips' in hf) {

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -627,60 +549,7 @@ isHardforkBlock(blockNumber, hardfork) {

* @param hardfork Hardfork name, optional if HF set
* @returns Block timestamp, number or null if not available
*/
nextHardforkBlockOrTimestamp(hardfork) {
hardfork = hardfork ?? this._hardfork;
const hfs = this.hardforks();
let hfIndex = hfs.findIndex((hf) => hf.name === hardfork);
// If the current hardfork is merge, go one behind as merge hf is not part of these
// calcs even if the merge hf block is set
if (hardfork === enums_1.Hardfork.Merge) {
hfIndex -= 1;
}
// Hardfork not found
if (hfIndex < 0) {
return null;
}
let currHfTimeOrBlock = hfs[hfIndex].timestamp ?? hfs[hfIndex].block;
currHfTimeOrBlock =
currHfTimeOrBlock !== null && currHfTimeOrBlock !== undefined
? Number(currHfTimeOrBlock)
: null;
const nextHf = hfs.slice(hfIndex + 1).find((hf) => {
let hfTimeOrBlock = hf.timestamp ?? hf.block;
hfTimeOrBlock =
hfTimeOrBlock !== null && hfTimeOrBlock !== undefined ? Number(hfTimeOrBlock) : null;
return (hf.name !== enums_1.Hardfork.Merge &&
hfTimeOrBlock !== null &&
hfTimeOrBlock !== undefined &&
hfTimeOrBlock !== currHfTimeOrBlock);
});
// If no next hf found with valid block or timestamp return null
if (nextHf === undefined) {
return null;
}
const nextHfBlock = nextHf.timestamp ?? nextHf.block;
if (nextHfBlock === null || nextHfBlock === undefined) {
return null;
}
return BigInt(nextHfBlock);
}
/**
* Returns the change block for the next hardfork after the hardfork provided or set
* @param hardfork Hardfork name, optional if HF set
* @returns Block number or null if not available
* @deprecated
*/
nextHardforkBlock(hardfork) {
hardfork = hardfork ?? this._hardfork;
let hfBlock = this.hardforkBlock(hardfork);
// If this is a merge hardfork with block not set, then we fallback to previous hardfork
// to find the nextHardforkBlock
if (hfBlock === null && hardfork === enums_1.Hardfork.Merge) {
const hfs = this.hardforks();
const mergeIndex = hfs.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined);
if (mergeIndex < 0) {
throw Error(`Merge hardfork should have been found`);
}
hfBlock = this.hardforkBlock(hfs[mergeIndex - 1].name);
}
const hfBlock = this.hardforkBlock(hardfork);
if (hfBlock === null) {

@@ -694,5 +563,3 @@ return null;

const nextHfBlock = this.hardforks().reduce((acc, hf) => {
// We need to ignore the merge block in our next hardfork calc
const block = BigInt(hf.block === null || (hf.ttd !== undefined && hf.ttd !== null) ? 0 : hf.block);
// Typescript can't seem to follow that the hfBlock is not null at this point
const block = BigInt(typeof hf.block !== 'number' ? 0 : hf.block);
return block > hfBlock && acc === null ? block : acc;

@@ -707,3 +574,2 @@ }, null);

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -724,22 +590,16 @@ isNextHardforkBlock(blockNumber, hardfork) {

let hfBuffer = Buffer.alloc(0);
let prevBlockOrTime = 0;
let prevBlock = 0;
for (const hf of this.hardforks()) {
const { block, timestamp, name } = hf;
// Timestamp to be used for timestamp based hfs even if we may bundle
// block number with them retrospectively
let blockOrTime = timestamp ?? block;
blockOrTime = blockOrTime !== null ? Number(blockOrTime) : null;
const block = hf.block;
// Skip for chainstart (0), not applied HFs (null) and
// when already applied on same blockOrTime HFs
// and on the merge since forkhash doesn't change on merge hf
if (typeof blockOrTime === 'number' &&
blockOrTime !== 0 &&
blockOrTime !== prevBlockOrTime &&
name !== enums_1.Hardfork.Merge) {
const hfBlockBuffer = Buffer.from(blockOrTime.toString(16).padStart(16, '0'), 'hex');
// when already applied on same block number HFs
if (typeof block === 'number' && block !== 0 && block !== prevBlock) {
const hfBlockBuffer = Buffer.from(block.toString(16).padStart(16, '0'), 'hex');
hfBuffer = Buffer.concat([hfBuffer, hfBlockBuffer]);
prevBlockOrTime = blockOrTime;
}
if (hf.name === hardfork)
break;
if (typeof block === 'number') {
prevBlock = block;
}
}

@@ -760,4 +620,3 @@ const inputBuffer = Buffer.concat([genesisHash, hfBuffer]);

const data = this._getHardfork(hardfork);
if (data === null ||
(data?.block === null && data?.timestamp === undefined && data?.ttd === undefined)) {
if (data === null || (data?.block === null && data?.ttd === undefined)) {
const msg = 'No fork hash calculation possible for future hardfork';

@@ -785,16 +644,2 @@ throw new Error(msg);

/**
* Sets any missing forkHashes on the passed-in {@link Common} instance
* @param common The {@link Common} to set the forkHashes for
* @param genesisHash The genesis block hash
*/
setForkHashes(genesisHash) {
for (const hf of this.hardforks()) {
const blockOrTime = hf.timestamp ?? hf.block;
if ((hf.forkHash === null || hf.forkHash === undefined) &&
((blockOrTime !== null && blockOrTime !== undefined) || typeof hf.ttd !== 'undefined')) {
hf.forkHash = this.forkHash(hf.name, genesisHash);
}
}
}
/**
* Returns the Genesis parameters of the current chain

@@ -871,3 +716,3 @@ * @returns Genesis dictionary

let value;
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of hardforks_1.hardforks) {
if ('consensus' in hfChanges[1]) {

@@ -893,3 +738,3 @@ value = hfChanges[1]['consensus']['type'];

let value;
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of hardforks_1.hardforks) {
if ('consensus' in hfChanges[1]) {

@@ -910,5 +755,6 @@ value = hfChanges[1]['consensus']['algorithm'];

*
* ethash: empty object
* ethash: -
* clique: period, epoch
* casper: empty object
* aura: -
* casper: -
*

@@ -920,3 +766,3 @@ * Note: This value can update along a Hardfork.

let value;
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of hardforks_1.hardforks) {
if ('consensus' in hfChanges[1]) {

@@ -929,3 +775,3 @@ // The config parameter is named after the respective consensus algorithm

}
return (value ?? this._chainParams['consensus'][this.consensusAlgorithm()] ?? {});
return value ?? this._chainParams['consensus'][this.consensusAlgorithm()];
}

@@ -932,0 +778,0 @@ /**

@@ -6,3 +6,3 @@ {

"url": "https://eips.ethereum.org/EIPS/eip-3675",
"status": "Final",
"status": "Review",
"minimumHardfork": "london",

@@ -9,0 +9,0 @@ "requiredEIPs": [],

@@ -27,6 +27,4 @@ "use strict";

4399: require('./4399.json'),
4844: require('./4844.json'),
4895: require('./4895.json'),
5133: require('./5133.json'),
};
//# sourceMappingURL=index.js.map

@@ -25,4 +25,3 @@ export declare enum Chain {

Merge = "merge",
Shanghai = "shanghai",
ShardingForkDev = "shardingFork"
Shanghai = "shanghai"
}

@@ -59,8 +58,2 @@ export declare enum ConsensusType {

/**
* Arbitrum One - mainnet for Arbitrum roll-up
*
* - [Documentation](https://developer.offchainlabs.com/public-chains)
*/
ArbitrumOne = "arbitrum-one",
/**
* xDai EVM sidechain with a native stable token

@@ -67,0 +60,0 @@ *

@@ -31,3 +31,2 @@ "use strict";

Hardfork["Shanghai"] = "shanghai";
Hardfork["ShardingForkDev"] = "shardingFork";
})(Hardfork = exports.Hardfork || (exports.Hardfork = {}));

@@ -67,8 +66,2 @@ var ConsensusType;

/**
* Arbitrum One - mainnet for Arbitrum roll-up
*
* - [Documentation](https://developer.offchainlabs.com/public-chains)
*/
CustomChain["ArbitrumOne"] = "arbitrum-one";
/**
* xDai EVM sidechain with a native stable token

@@ -75,0 +68,0 @@ *

@@ -1,21 +0,2 @@

export declare const hardforks: {
chainstart: any;
homestead: any;
dao: any;
tangerineWhistle: any;
spuriousDragon: any;
byzantium: any;
constantinople: any;
petersburg: any;
istanbul: any;
muirGlacier: any;
berlin: any;
london: any;
shanghai: any;
arrowGlacier: any;
grayGlacier: any;
mergeForkIdTransition: any;
merge: any;
shardingFork: any;
};
export declare const hardforks: any[][];
//# sourceMappingURL=index.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.hardforks = void 0;
exports.hardforks = {
chainstart: require('./chainstart.json'),
homestead: require('./homestead.json'),
dao: require('./dao.json'),
tangerineWhistle: require('./tangerineWhistle.json'),
spuriousDragon: require('./spuriousDragon.json'),
byzantium: require('./byzantium.json'),
constantinople: require('./constantinople.json'),
petersburg: require('./petersburg.json'),
istanbul: require('./istanbul.json'),
muirGlacier: require('./muirGlacier.json'),
berlin: require('./berlin.json'),
london: require('./london.json'),
shanghai: require('./shanghai.json'),
arrowGlacier: require('./arrowGlacier.json'),
grayGlacier: require('./grayGlacier.json'),
mergeForkIdTransition: require('./mergeForkIdTransition.json'),
merge: require('./merge.json'),
shardingFork: require('./sharding.json'),
};
exports.hardforks = [
['chainstart', require('./chainstart.json')],
['homestead', require('./homestead.json')],
['dao', require('./dao.json')],
['tangerineWhistle', require('./tangerineWhistle.json')],
['spuriousDragon', require('./spuriousDragon.json')],
['byzantium', require('./byzantium.json')],
['constantinople', require('./constantinople.json')],
['petersburg', require('./petersburg.json')],
['istanbul', require('./istanbul.json')],
['muirGlacier', require('./muirGlacier.json')],
['berlin', require('./berlin.json')],
['london', require('./london.json')],
['shanghai', require('./shanghai.json')],
['arrowGlacier', require('./arrowGlacier.json')],
['grayGlacier', require('./grayGlacier.json')],
['mergeForkIdTransition', require('./mergeForkIdTransition.json')],
['merge', require('./merge.json')],
];
//# sourceMappingURL=index.js.map

@@ -5,3 +5,3 @@ {

"url": "https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/merge.md",
"status": "Final",
"status": "Draft",
"consensus": {

@@ -8,0 +8,0 @@ "type": "pos",

{
"name": "shanghai",
"comment": "Next feature hardfork after the merge hardfork having withdrawals, warm coinbase, push0, limit/meter initcode",
"url": "https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/shanghai.md",
"status": "Final",
"eips": [3651, 3855, 3860, 4895]
"comment": "Next feature hardfork after the merge hardfork",
"url": "https://github.com/ethereum/pm/issues/356",
"status": "Pre-Draft",
"eips": []
}
export * from './common';
export * from './enums';
export * from './types';
export * from './utils';
//# sourceMappingURL=index.d.ts.map

@@ -20,3 +20,2 @@ "use strict";

__exportStar(require("./types"), exports);
__exportStar(require("./utils"), exports);
//# sourceMappingURL=index.js.map

@@ -1,2 +0,1 @@

/// <reference types="node" />
import type { Chain, ConsensusAlgorithm, ConsensusType, Hardfork } from './enums';

@@ -19,5 +18,5 @@ export interface ChainName {

networkId: number | bigint;
defaultHardfork?: string;
comment?: string;
url?: string;
defaultHardfork: string;
comment: string;
url: string;
genesis: GenesisBlockConfig;

@@ -47,3 +46,2 @@ hardforks: HardforkConfig[];

ttd?: bigint | string;
timestamp?: number | string;
forkHash?: string | null;

@@ -110,8 +108,3 @@ }

}
export interface GethConfigOpts extends BaseOpts {
chain?: string;
genesisHash?: Buffer;
mergeForkIdPostMerge?: boolean;
}
export {};
//# sourceMappingURL=types.d.ts.map
{
"name": "@nomicfoundation/ethereumjs-common",
"version": "3.1.1",
"version": "3.1.2",
"description": "Resources common to all Ethereum implementations",

@@ -46,9 +46,9 @@ "keywords": [

"test:browser": "karma start karma.conf.js",
"test:node": "npm run tape -- ./test/*.spec.ts",
"test:node": "npm run tape -- ./tests/*.spec.ts",
"tsc": "../../config/cli/ts-compile.sh"
},
"dependencies": {
"@nomicfoundation/ethereumjs-util": "8.0.5",
"@nomicfoundation/ethereumjs-util": "8.0.6",
"crc-32": "^1.2.0"
}
}

@@ -130,3 +130,3 @@ # @ethereumjs/common

To get an overview of the different parameters have a look at one of the chain-specific
To get an overview of the different parameters have a look at one of the chain-specifc
files like `mainnet.json` in the `chains` directory, or to the `Chain` type in [./src/types.ts](./src/types.ts).

@@ -199,18 +199,2 @@

#### Initialize using Geth's genesis json
For lots of custom chains (for e.g. devnets and testnets), you might come across a genesis json config which
has both config specification for the chain as well as the genesis state specification. You can derive the
common from such configuration in the following manner:
```typescript
import { Common } from '@ethereumjs/common'
// Load geth genesis json file into lets say `genesisJson` and optional `chain` and `genesisHash`
const common = Common.fromGethGenesis(genesisJson, { chain: 'customChain', genesisHash })
// If you don't have `genesisHash` while initiating common, you can later configure common (for e.g.
// post calculating it via `blockchain`)
common.setForkHashes(genesisHash)
```
### Hardforks

@@ -244,7 +228,6 @@

- `merge` (`Hardfork.Merge`) (`DEFAULT_HARDFORK`) (since `v2.5.0`)
- `shanghai` (`Hardfork.Shanghai`) (since `v3.1.0`)
### Future Hardforks
The next upcoming HF `Hardfork.Cancun` is currently not yet supported by this library.
The next upcoming HF `Hardfork.Shanghai` is currently not yet supported by this library.

@@ -260,3 +243,2 @@ ### Parameter Access

- `pow`
- `sharding`

@@ -287,3 +269,3 @@ See one of the hardfork files like `byzantium.json` in the `hardforks` directory

- [EIP-2929](https://eips.ethereum.org/EIPS/eip-2929): gas cost increases for state access opcodes
- [EIP-2930](https://eips.ethereum.org/EIPS/eip-2930): Optional access list tx type
- [EIP-2930](https://eips.ethereum.org/EIPS/eip-2930): Optional accesss list tx type
- [EIP-3198](https://eips.ethereum.org/EIPS/eip-3198): Base fee Opcode

@@ -295,10 +277,8 @@ - [EIP-3529](https://eips.ethereum.org/EIPS/eip-3529): Reduction in refunds

- [EIP-3607](https://eips.ethereum.org/EIPS/eip-3607): Reject transactions from senders with deployed code
- [EIP-3651](https://eips.ethereum.org/EIPS/eip-3651): Warm COINBASE (Shanghai)
- [EIP-3670](https://eips.ethereum.org/EIPS/eip-3670): EOF - Code Validation (`experimental`)
- [EIP-3675](https://eips.ethereum.org/EIPS/eip-3675): Upgrade consensus to Proof-of-Stake
- [EIP-3855](https://eips.ethereum.org/EIPS/eip-3855): Push0 opcode (Shanghai)
- [EIP-3860](https://eips.ethereum.org/EIPS/eip-3860): Limit and meter initcode (Shanghai)
- [EIP-3675](https://eips.ethereum.org/EIPS/eip-3675): Upgrade consensus to Proof-of-Stake (`experimental`)
- [EIP-3855](https://eips.ethereum.org/EIPS/eip-3855): Push0 opcode (`v2.6.1`+)
- [EIP-3860](https://eips.ethereum.org/EIPS/eip-3855): Limit and meter initcode (`experimental`)
- [EIP-4345](https://eips.ethereum.org/EIPS/eip-4345): Difficulty Bomb Delay to June 2022
- [EIP-4399](https://eips.ethereum.org/EIPS/eip-4399): Supplant DIFFICULTY opcode with PREVRANDAO (Merge) (`experimental`)
- [EIP-4895](https://eips.ethereum.org/EIPS/eip-4895): Beacon chain push withdrawals as operations (Shanghai)

@@ -305,0 +285,0 @@ ### Bootstrap Nodes

@@ -75,6 +75,6 @@ {

{
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge, terminal block: https://goerli.etherscan.io/block/7382818",
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge",
"name": "merge",
"ttd": "10790000",
"block": 7382819,
"block": null,
"forkHash": "0xb8c6299d"

@@ -81,0 +81,0 @@ },

@@ -91,10 +91,9 @@ {

{
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge, terminal block: https://etherscan.io/block/15537393",
"name": "mergeForkIdTransition",
"block": null,
"forkHash": null
},
{
"name": "merge",
"ttd": "58750000000000000000000",
"block": 15537394,
"forkHash": "0xf0afd0e3"
},
{
"name": "mergeForkIdTransition",
"block": null,

@@ -101,0 +100,0 @@ "forkHash": null

@@ -77,6 +77,6 @@ {

{
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge, terminal block: https://sepolia.etherscan.io/block/1450408",
"//_comment": "The forkHash will remain same as mergeForkIdTransition is post merge",
"name": "merge",
"ttd": "17000000000000000",
"block": 1450409,
"block": null,
"forkHash": "0xfe3366e7"

@@ -92,4 +92,3 @@ },

"block": null,
"timestamp": "1677557088",
"forkHash": "0xf7f9bc08"
"forkHash": null
}

@@ -96,0 +95,0 @@ ],

@@ -12,4 +12,3 @@ import { TypeOutput, intToBuffer, toType } from '@nomicfoundation/ethereumjs-util'

import { Chain, CustomChain, Hardfork } from './enums'
import { hardforks as HARDFORK_SPECS } from './hardforks'
import { parseGethGenesis } from './utils'
import { hardforks as HARDFORK_CHANGES } from './hardforks'

@@ -28,3 +27,2 @@ import type { ConsensusAlgorithm, ConsensusType } from './enums'

GenesisBlockConfig,
GethConfigOpts,
HardforkConfig,

@@ -34,4 +32,2 @@ } from './types'

type HardforkSpecKeys = keyof typeof HARDFORK_SPECS
type HardforkSpecValues = typeof HARDFORK_SPECS[HardforkSpecKeys]
/**

@@ -53,4 +49,2 @@ * Common class to access chain and hardfork parameters and to provide

private HARDFORK_CHANGES: [HardforkSpecKeys, HardforkSpecValues][]
/**

@@ -127,12 +121,2 @@ * Creates a {@link Common} object for a custom chain, based on a standard one.

}
if (chainParamsOrName === CustomChain.ArbitrumOne) {
return Common.custom(
{
name: CustomChain.ArbitrumOne,
chainId: 42161,
networkId: 42161,
},
opts
)
}
if (chainParamsOrName === CustomChain.xDaiChain) {

@@ -177,25 +161,2 @@ return Common.custom(

/**
* Static method to load and set common from a geth genesis json
* @param genesisJson json of geth configuration
* @param { chain, eips, genesisHash, hardfork, mergeForkIdPostMerge } to further configure the common instance
* @returns Common
*/
static fromGethGenesis(
genesisJson: any,
{ chain, eips, genesisHash, hardfork, mergeForkIdPostMerge }: GethConfigOpts
): Common {
const genesisParams = parseGethGenesis(genesisJson, chain, mergeForkIdPostMerge)
const common = new Common({
chain: genesisParams.name ?? 'custom',
customChains: [genesisParams],
eips,
hardfork: hardfork ?? genesisParams.hardfork,
})
if (genesisHash !== undefined) {
common.setForkHashes(genesisHash)
}
return common
}
/**
* Static method to determine if a {@link chainId} is supported as a standard chain

@@ -238,7 +199,2 @@ * @param chainId bigint id (`1`) of a standard chain

this.DEFAULT_HARDFORK = this._chainParams.defaultHardfork ?? Hardfork.Merge
// Assign hardfork changes in the sequence of the applied hardforks
this.HARDFORK_CHANGES = this.hardforks().map((hf) => [
hf.name as HardforkSpecKeys,
HARDFORK_SPECS[hf.name as HardforkSpecKeys],
])
this._hardfork = this.DEFAULT_HARDFORK

@@ -292,3 +248,3 @@ if (opts.hardfork !== undefined) {

let existing = false
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of HARDFORK_CHANGES) {
if (hfChanges[0] === hardfork) {

@@ -316,109 +272,53 @@ if (this._hardfork !== hardfork) {

* @param blockNumber
* @param td : total difficulty of the parent block (for block hf) OR of the chain latest (for chain hf)
* @param timestamp: timestamp in seconds at which block was/is to be minted
* @param td
* @returns The name of the HF
*/
getHardforkByBlockNumber(
blockNumber: BigIntLike,
td?: BigIntLike,
timestamp?: BigIntLike
): string {
getHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike): string {
blockNumber = toType(blockNumber, TypeOutput.BigInt)
td = toType(td, TypeOutput.BigInt)
timestamp = toType(timestamp, TypeOutput.Number)
// Filter out hardforks with no block number, no ttd or no timestamp (i.e. unapplied hardforks)
const hfs = this.hardforks().filter(
(hf) =>
hf.block !== null || (hf.ttd !== null && hf.ttd !== undefined) || hf.timestamp !== undefined
)
const mergeIndex = hfs.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined)
const doubleTTDHF = hfs
.slice(mergeIndex + 1)
.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined)
if (doubleTTDHF >= 0) {
throw Error(`More than one merge hardforks found with ttd specified`)
}
// Find the first hardfork that has a block number greater than `blockNumber`
// (skips the merge hardfork since it cannot have a block number specified).
// If timestamp is not provided, it also skips timestamps hardforks to continue
// discovering/checking number hardforks.
let hfIndex = hfs.findIndex(
(hf) =>
(hf.block !== null && hf.block > blockNumber) ||
(timestamp !== undefined && Number(hf.timestamp) > timestamp)
)
if (hfIndex === -1) {
// all hardforks apply, set hfIndex to the last one as that's the candidate
hfIndex = hfs.length
} else if (hfIndex === 0) {
// cannot have a case where a block number is before all applied hardforks
// since the chain has to start with a hardfork
throw Error('Must have at least one hardfork at block 0')
}
// If timestamp is not provided, we need to rollback to the last hf with block or ttd
if (timestamp === undefined) {
const stepBack = hfs
.slice(0, hfIndex)
.reverse()
.findIndex((hf) => hf.block !== null || hf.ttd !== undefined)
hfIndex = hfIndex - stepBack
}
// Move hfIndex one back to arrive at candidate hardfork
hfIndex = hfIndex - 1
// If the timestamp was not provided, we could have skipped timestamp hardforks to look for number
// hardforks. so it will now be needed to rollback
if (hfs[hfIndex].block === null && hfs[hfIndex].timestamp === undefined) {
// We're on the merge hardfork. Let's check the TTD
if (td === undefined || td === null || BigInt(hfs[hfIndex].ttd!) > td) {
// Merge ttd greater than current td so we're on hardfork before merge
hfIndex -= 1
let hardfork = Hardfork.Chainstart
let minTdHF
let maxTdHF
let previousHF
for (const hf of this.hardforks()) {
// Skip comparison for not applied HFs
if (hf.block === null) {
if (td !== undefined && td !== null && hf.ttd !== undefined && hf.ttd !== null) {
if (td >= BigInt(hf.ttd)) {
return hf.name
}
}
continue
}
} else {
if (mergeIndex >= 0 && td !== undefined && td !== null) {
if (hfIndex >= mergeIndex && BigInt(hfs[mergeIndex].ttd!) > td) {
throw Error('Maximum HF determined by total difficulty is lower than the block number HF')
} else if (hfIndex < mergeIndex && BigInt(hfs[mergeIndex].ttd!) <= td) {
throw Error('HF determined by block number is lower than the minimum total difficulty HF')
if (blockNumber >= BigInt(hf.block)) {
hardfork = hf.name as Hardfork
}
if (td && (typeof hf.ttd === 'string' || typeof hf.ttd === 'bigint')) {
if (td >= BigInt(hf.ttd)) {
minTdHF = hf.name
} else {
maxTdHF = previousHF
}
}
previousHF = hf.name
}
const hfStartIndex = hfIndex
// Move the hfIndex to the end of the hardforks that might be scheduled on the same block/timestamp
// This won't anyway be the case with Merge hfs
for (; hfIndex < hfs.length - 1; hfIndex++) {
// break out if hfIndex + 1 is not scheduled at hfIndex
if (
hfs[hfIndex].block !== hfs[hfIndex + 1].block ||
hfs[hfIndex].timestamp !== hfs[hfIndex + 1].timestamp
) {
break
if (td) {
let msgAdd = `block number: ${blockNumber} (-> ${hardfork}), `
if (minTdHF !== undefined) {
if (!this.hardforkGteHardfork(hardfork, minTdHF)) {
const msg = 'HF determined by block number is lower than the minimum total difficulty HF'
msgAdd += `total difficulty: ${td} (-> ${minTdHF})`
throw new Error(`${msg}: ${msgAdd}`)
}
}
}
if (timestamp) {
const minTimeStamp = hfs
.slice(0, hfStartIndex)
.reduce((acc: number, hf: HardforkConfig) => Math.max(Number(hf.timestamp ?? '0'), acc), 0)
if (minTimeStamp > timestamp) {
throw Error(`Maximum HF determined by timestamp is lower than the block number/ttd HF`)
if (maxTdHF !== undefined) {
if (!this.hardforkGteHardfork(maxTdHF, hardfork)) {
const msg = 'Maximum HF determined by total difficulty is lower than the block number HF'
msgAdd += `total difficulty: ${td} (-> ${maxTdHF})`
throw new Error(`${msg}: ${msgAdd}`)
}
}
const maxTimeStamp = hfs
.slice(hfIndex + 1)
.reduce(
(acc: number, hf: HardforkConfig) => Math.min(Number(hf.timestamp ?? timestamp), acc),
timestamp
)
if (maxTimeStamp < timestamp) {
throw Error(`Maximum HF determined by block number/ttd is lower than timestamp HF`)
}
}
const hardfork = hfs[hfIndex]
return hardfork.name
return hardfork
}

@@ -436,11 +336,6 @@

* @param td
* @param timestamp
* @returns The name of the HF set
*/
setHardforkByBlockNumber(
blockNumber: BigIntLike,
td?: BigIntLike,
timestamp?: BigIntLike
): string {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td, timestamp)
setHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike): string {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td)
this.setHardfork(hardfork)

@@ -492,3 +387,3 @@ return hardfork

*
* If the parameter is present in an EIP, the EIP always takes precedence.
* If the parameter is present in an EIP, the EIP always takes precendence.
* Otherwise the parameter if taken from the latest applied HF with

@@ -521,3 +416,3 @@ * a change on the respective parameter.

let value = null
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of HARDFORK_CHANGES) {
// EIP-referencing HF file (e.g. berlin.json)

@@ -576,10 +471,4 @@ if ('eips' in hfChanges[1]) {

*/
paramByBlock(
topic: string,
name: string,
blockNumber: BigIntLike,
td?: BigIntLike,
timestamp?: BigIntLike
): bigint {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td, timestamp)
paramByBlock(topic: string, name: string, blockNumber: BigIntLike, td?: BigIntLike): bigint {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td)
return this.paramByHardfork(topic, name, hardfork)

@@ -601,3 +490,3 @@ }

}
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of HARDFORK_CHANGES) {
const hf = hfChanges[1]

@@ -683,11 +572,2 @@ if (this.gteHardfork(hf['name']) && 'eips' in hf) {

hardforkTimestamp(hardfork?: string | Hardfork): bigint | null {
hardfork = hardfork ?? this._hardfork
const timestamp = this._getHardfork(hardfork)?.['timestamp']
if (timestamp === undefined || timestamp === null) {
return null
}
return BigInt(timestamp)
}
/**

@@ -699,3 +579,3 @@ * Returns the hardfork change block for eip

eipBlock(eip: number): bigint | null {
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of HARDFORK_CHANGES) {
const hf = hfChanges[1]

@@ -731,3 +611,2 @@ if ('eips' in hf) {

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -744,67 +623,7 @@ isHardforkBlock(blockNumber: BigIntLike, hardfork?: string | Hardfork): boolean {

* @param hardfork Hardfork name, optional if HF set
* @returns Block timestamp, number or null if not available
*/
nextHardforkBlockOrTimestamp(hardfork?: string | Hardfork): bigint | null {
hardfork = hardfork ?? this._hardfork
const hfs = this.hardforks()
let hfIndex = hfs.findIndex((hf) => hf.name === hardfork)
// If the current hardfork is merge, go one behind as merge hf is not part of these
// calcs even if the merge hf block is set
if (hardfork === Hardfork.Merge) {
hfIndex -= 1
}
// Hardfork not found
if (hfIndex < 0) {
return null
}
let currHfTimeOrBlock = hfs[hfIndex].timestamp ?? hfs[hfIndex].block
currHfTimeOrBlock =
currHfTimeOrBlock !== null && currHfTimeOrBlock !== undefined
? Number(currHfTimeOrBlock)
: null
const nextHf = hfs.slice(hfIndex + 1).find((hf) => {
let hfTimeOrBlock = hf.timestamp ?? hf.block
hfTimeOrBlock =
hfTimeOrBlock !== null && hfTimeOrBlock !== undefined ? Number(hfTimeOrBlock) : null
return (
hf.name !== Hardfork.Merge &&
hfTimeOrBlock !== null &&
hfTimeOrBlock !== undefined &&
hfTimeOrBlock !== currHfTimeOrBlock
)
})
// If no next hf found with valid block or timestamp return null
if (nextHf === undefined) {
return null
}
const nextHfBlock = nextHf.timestamp ?? nextHf.block
if (nextHfBlock === null || nextHfBlock === undefined) {
return null
}
return BigInt(nextHfBlock)
}
/**
* Returns the change block for the next hardfork after the hardfork provided or set
* @param hardfork Hardfork name, optional if HF set
* @returns Block number or null if not available
* @deprecated
*/
nextHardforkBlock(hardfork?: string | Hardfork): bigint | null {
hardfork = hardfork ?? this._hardfork
let hfBlock = this.hardforkBlock(hardfork)
// If this is a merge hardfork with block not set, then we fallback to previous hardfork
// to find the nextHardforkBlock
if (hfBlock === null && hardfork === Hardfork.Merge) {
const hfs = this.hardforks()
const mergeIndex = hfs.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined)
if (mergeIndex < 0) {
throw Error(`Merge hardfork should have been found`)
}
hfBlock = this.hardforkBlock(hfs[mergeIndex - 1].name)
}
const hfBlock = this.hardforkBlock(hardfork)
if (hfBlock === null) {

@@ -818,8 +637,4 @@ return null

const nextHfBlock = this.hardforks().reduce((acc: bigint | null, hf: HardforkConfig) => {
// We need to ignore the merge block in our next hardfork calc
const block = BigInt(
hf.block === null || (hf.ttd !== undefined && hf.ttd !== null) ? 0 : hf.block
)
// Typescript can't seem to follow that the hfBlock is not null at this point
return block > hfBlock! && acc === null ? block : acc
const block = BigInt(typeof hf.block !== 'number' ? 0 : hf.block)
return block > hfBlock && acc === null ? block : acc
}, null)

@@ -834,3 +649,2 @@ return nextHfBlock

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -853,25 +667,17 @@ isNextHardforkBlock(blockNumber: BigIntLike, hardfork?: string | Hardfork): boolean {

let hfBuffer = Buffer.alloc(0)
let prevBlockOrTime = 0
let prevBlock = 0
for (const hf of this.hardforks()) {
const { block, timestamp, name } = hf
// Timestamp to be used for timestamp based hfs even if we may bundle
// block number with them retrospectively
let blockOrTime = timestamp ?? block
blockOrTime = blockOrTime !== null ? Number(blockOrTime) : null
const block = hf.block
// Skip for chainstart (0), not applied HFs (null) and
// when already applied on same blockOrTime HFs
// and on the merge since forkhash doesn't change on merge hf
if (
typeof blockOrTime === 'number' &&
blockOrTime !== 0 &&
blockOrTime !== prevBlockOrTime &&
name !== Hardfork.Merge
) {
const hfBlockBuffer = Buffer.from(blockOrTime.toString(16).padStart(16, '0'), 'hex')
// when already applied on same block number HFs
if (typeof block === 'number' && block !== 0 && block !== prevBlock) {
const hfBlockBuffer = Buffer.from(block.toString(16).padStart(16, '0'), 'hex')
hfBuffer = Buffer.concat([hfBuffer, hfBlockBuffer])
prevBlockOrTime = blockOrTime
}
if (hf.name === hardfork) break
if (typeof block === 'number') {
prevBlock = block
}
}

@@ -894,6 +700,3 @@ const inputBuffer = Buffer.concat([genesisHash, hfBuffer])

const data = this._getHardfork(hardfork)
if (
data === null ||
(data?.block === null && data?.timestamp === undefined && data?.ttd === undefined)
) {
if (data === null || (data?.block === null && data?.ttd === undefined)) {
const msg = 'No fork hash calculation possible for future hardfork'

@@ -922,19 +725,2 @@ throw new Error(msg)

/**
* Sets any missing forkHashes on the passed-in {@link Common} instance
* @param common The {@link Common} to set the forkHashes for
* @param genesisHash The genesis block hash
*/
setForkHashes(genesisHash: Buffer) {
for (const hf of this.hardforks()) {
const blockOrTime = hf.timestamp ?? hf.block
if (
(hf.forkHash === null || hf.forkHash === undefined) &&
((blockOrTime !== null && blockOrTime !== undefined) || typeof hf.ttd !== 'undefined')
) {
hf.forkHash = this.forkHash(hf.name, genesisHash)
}
}
}
/**
* Returns the Genesis parameters of the current chain

@@ -1021,3 +807,3 @@ * @returns Genesis dictionary

let value
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of HARDFORK_CHANGES) {
if ('consensus' in hfChanges[1]) {

@@ -1044,3 +830,3 @@ value = hfChanges[1]['consensus']['type']

let value
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of HARDFORK_CHANGES) {
if ('consensus' in hfChanges[1]) {

@@ -1061,5 +847,6 @@ value = hfChanges[1]['consensus']['algorithm']

*
* ethash: empty object
* ethash: -
* clique: period, epoch
* casper: empty object
* aura: -
* casper: -
*

@@ -1072,3 +859,3 @@ * Note: This value can update along a Hardfork.

let value
for (const hfChanges of this.HARDFORK_CHANGES) {
for (const hfChanges of HARDFORK_CHANGES) {
if ('consensus' in hfChanges[1]) {

@@ -1080,5 +867,3 @@ // The config parameter is named after the respective consensus algorithm

}
return (
value ?? this._chainParams['consensus'][this.consensusAlgorithm() as ConsensusAlgorithm] ?? {}
)
return value ?? this._chainParams['consensus'][this.consensusAlgorithm() as ConsensusAlgorithm]!
}

@@ -1085,0 +870,0 @@

@@ -6,3 +6,3 @@ {

"url": "https://eips.ethereum.org/EIPS/eip-3675",
"status": "Final",
"status": "Review",
"minimumHardfork": "london",

@@ -9,0 +9,0 @@ "requiredEIPs": [],

@@ -24,5 +24,3 @@ export const EIPs: { [key: number]: any } = {

4399: require('./4399.json'),
4844: require('./4844.json'),
4895: require('./4895.json'),
5133: require('./5133.json'),
}

@@ -27,3 +27,2 @@ export enum Chain {

Shanghai = 'shanghai',
ShardingForkDev = 'shardingFork',
}

@@ -66,9 +65,2 @@

/**
* Arbitrum One - mainnet for Arbitrum roll-up
*
* - [Documentation](https://developer.offchainlabs.com/public-chains)
*/
ArbitrumOne = 'arbitrum-one',
/**
* xDai EVM sidechain with a native stable token

@@ -75,0 +67,0 @@ *

@@ -1,20 +0,19 @@

export const hardforks = {
chainstart: require('./chainstart.json'),
homestead: require('./homestead.json'),
dao: require('./dao.json'),
tangerineWhistle: require('./tangerineWhistle.json'),
spuriousDragon: require('./spuriousDragon.json'),
byzantium: require('./byzantium.json'),
constantinople: require('./constantinople.json'),
petersburg: require('./petersburg.json'),
istanbul: require('./istanbul.json'),
muirGlacier: require('./muirGlacier.json'),
berlin: require('./berlin.json'),
london: require('./london.json'),
shanghai: require('./shanghai.json'),
arrowGlacier: require('./arrowGlacier.json'),
grayGlacier: require('./grayGlacier.json'),
mergeForkIdTransition: require('./mergeForkIdTransition.json'),
merge: require('./merge.json'),
shardingFork: require('./sharding.json'),
}
export const hardforks = [
['chainstart', require('./chainstart.json')],
['homestead', require('./homestead.json')],
['dao', require('./dao.json')],
['tangerineWhistle', require('./tangerineWhistle.json')],
['spuriousDragon', require('./spuriousDragon.json')],
['byzantium', require('./byzantium.json')],
['constantinople', require('./constantinople.json')],
['petersburg', require('./petersburg.json')],
['istanbul', require('./istanbul.json')],
['muirGlacier', require('./muirGlacier.json')],
['berlin', require('./berlin.json')],
['london', require('./london.json')],
['shanghai', require('./shanghai.json')],
['arrowGlacier', require('./arrowGlacier.json')],
['grayGlacier', require('./grayGlacier.json')],
['mergeForkIdTransition', require('./mergeForkIdTransition.json')],
['merge', require('./merge.json')],
]

@@ -5,3 +5,3 @@ {

"url": "https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/merge.md",
"status": "Final",
"status": "Draft",
"consensus": {

@@ -8,0 +8,0 @@ "type": "pos",

{
"name": "shanghai",
"comment": "Next feature hardfork after the merge hardfork having withdrawals, warm coinbase, push0, limit/meter initcode",
"url": "https://github.com/ethereum/execution-specs/blob/master/network-upgrades/mainnet-upgrades/shanghai.md",
"status": "Final",
"eips": [3651, 3855, 3860, 4895]
"comment": "Next feature hardfork after the merge hardfork",
"url": "https://github.com/ethereum/pm/issues/356",
"status": "Pre-Draft",
"eips": []
}
export * from './common'
export * from './enums'
export * from './types'
export * from './utils'

@@ -22,5 +22,5 @@ import type { Chain, ConsensusAlgorithm, ConsensusType, Hardfork } from './enums'

networkId: number | bigint
defaultHardfork?: string
comment?: string
url?: string
defaultHardfork: string
comment: string
url: string
genesis: GenesisBlockConfig

@@ -52,3 +52,2 @@ hardforks: HardforkConfig[]

ttd?: bigint | string
timestamp?: number | string
forkHash?: string | null

@@ -119,7 +118,1 @@ }

}
export interface GethConfigOpts extends BaseOpts {
chain?: string
genesisHash?: Buffer
mergeForkIdPostMerge?: boolean
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc