Socket
Socket
Sign inDemoInstall

@ethereumjs/common

Package Overview
Dependencies
Maintainers
3
Versions
33
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ethereumjs/common - npm Package Compare versions

Comparing version 3.0.1 to 3.0.2

dist/eips/4895.json

25

dist/common.d.ts

@@ -54,3 +54,3 @@ /// <reference types="node" />

*/
static fromGethGenesis(genesisJson: any, { chain, genesisHash, hardfork }: GethConfigOpts): Common;
static fromGethGenesis(genesisJson: any, { chain, genesisHash, hardfork, mergeForkIdPostMerge }: GethConfigOpts): Common;
/**

@@ -86,5 +86,6 @@ * Static method to determine if a {@link chainId} is supported as a standard chain

* @param td : total difficulty of the parent block (for block hf) OR of the the chain latest (for chain hf)
* @param timestamp: timestamp in seconds at which block was/is to be minted
* @returns The name of the HF
*/
getHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike): string;
getHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike, timestamp?: BigIntLike): string;
/**

@@ -100,5 +101,6 @@ * Sets a new hardfork based on the block number or an optional

* @param td
* @param timestamp
* @returns The name of the HF set
*/
setHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike): string;
setHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike, timestamp?: BigIntLike): string;
/**

@@ -152,3 +154,3 @@ * Internal helper function, returns the params for the given hardfork for the chain set

*/
paramByBlock(topic: string, name: string, blockNumber: BigIntLike, td?: BigIntLike): bigint;
paramByBlock(topic: string, name: string, blockNumber: BigIntLike, td?: BigIntLike, timestamp?: BigIntLike): bigint;
/**

@@ -197,2 +199,3 @@ * Checks if an EIP is activated by either being included in the EIPs

hardforkBlock(hardfork?: string | Hardfork): bigint | null;
hardforkTimestamp(hardfork?: string | Hardfork): bigint | null;
/**

@@ -215,2 +218,3 @@ * Returns the hardfork change block for eip

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -221,3 +225,10 @@ isHardforkBlock(blockNumber: BigIntLike, hardfork?: string | Hardfork): boolean;

* @param hardfork Hardfork name, optional if HF set
* @returns Block timestamp, number or null if not available
*/
nextHardforkBlockOrTimestamp(hardfork?: string | Hardfork): bigint | null;
/**
* Returns the change block for the next hardfork after the hardfork provided or set
* @param hardfork Hardfork name, optional if HF set
* @returns Block number or null if not available
* @deprecated
*/

@@ -230,2 +241,3 @@ nextHardforkBlock(hardfork?: string | Hardfork): bigint | null;

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -327,6 +339,5 @@ isNextHardforkBlock(blockNumber: BigIntLike, hardfork?: string | Hardfork): boolean;

*
* ethash: -
* ethash: empty object
* clique: period, epoch
* aura: -
* casper: -
* casper: empty object
*

@@ -333,0 +344,0 @@ * Note: This value can update along a Hardfork.

@@ -103,2 +103,9 @@ "use strict";

}
if (chainParamsOrName === enums_1.CustomChain.ArbitrumOne) {
return Common.custom({
name: enums_1.CustomChain.ArbitrumOne,
chainId: 42161,
networkId: 42161,
}, opts);
}
if (chainParamsOrName === enums_1.CustomChain.xDaiChain) {

@@ -138,4 +145,4 @@ return Common.custom({

*/
static fromGethGenesis(genesisJson, { chain, genesisHash, hardfork }) {
const genesisParams = (0, utils_1.parseGethGenesis)(genesisJson, chain);
static fromGethGenesis(genesisJson, { chain, genesisHash, hardfork, mergeForkIdPostMerge }) {
const genesisParams = (0, utils_1.parseGethGenesis)(genesisJson, chain, mergeForkIdPostMerge);
const common = new Common({

@@ -236,9 +243,11 @@ chain: genesisParams.name ?? 'custom',

* @param td : total difficulty of the parent block (for block hf) OR of the the chain latest (for chain hf)
* @param timestamp: timestamp in seconds at which block was/is to be minted
* @returns The name of the HF
*/
getHardforkByBlockNumber(blockNumber, td) {
getHardforkByBlockNumber(blockNumber, td, timestamp) {
blockNumber = (0, util_1.toType)(blockNumber, util_1.TypeOutput.BigInt);
td = (0, util_1.toType)(td, util_1.TypeOutput.BigInt);
// Filter out hardforks with no block number and no ttd (i.e. unapplied hardforks)
const hfs = this.hardforks().filter((hf) => hf.block !== null || (hf.ttd !== null && hf.ttd !== undefined));
timestamp = (0, util_1.toType)(timestamp, util_1.TypeOutput.Number);
// Filter out hardforks with no block number, no ttd or no timestamp (i.e. unapplied hardforks)
const hfs = this.hardforks().filter((hf) => hf.block !== null || (hf.ttd !== null && hf.ttd !== undefined) || hf.timestamp !== undefined);
const mergeIndex = hfs.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined);

@@ -251,9 +260,11 @@ const doubleTTDHF = hfs

}
// Find the first hardfork that has a block number greater than `blockNumber` (skips the merge hardfork since
// it cannot have a block number specified).
let hfIndex = hfs.findIndex((hf) => hf.block !== null && hf.block > blockNumber);
// Move hfIndex one back to arrive at candidate hardfork
// Find the first hardfork that has a block number greater than `blockNumber`
// (skips the merge hardfork since it cannot have a block number specified).
// If timestamp is not provided, it also skips timestamps hardforks to continue
// discovering/checking number hardforks.
let hfIndex = hfs.findIndex((hf) => (hf.block !== null && hf.block > blockNumber) ||
(timestamp !== undefined && Number(hf.timestamp) > timestamp));
if (hfIndex === -1) {
// all hardforks apply, set hfIndex to the last one as thats the candidate
hfIndex = hfs.length - 1;
hfIndex = hfs.length;
}

@@ -265,17 +276,20 @@ else if (hfIndex === 0) {

}
else {
// The previous hardfork is the candidate here
hfIndex = hfIndex - 1;
// If timestamp is not provided, we need to rollback to the last hf with block or ttd
if (timestamp === undefined) {
const stepBack = hfs
.slice(0, hfIndex)
.reverse()
.findIndex((hf) => hf.block !== null || hf.ttd !== undefined);
hfIndex = hfIndex - stepBack;
}
let hardfork;
if (hfs[hfIndex].block === null) {
// Move hfIndex one back to arrive at candidate hardfork
hfIndex = hfIndex - 1;
// If the timestamp was not provided, we could have skipped timestamp hardforks to look for number
// hardforks. so it will now be needed to rollback
if (hfs[hfIndex].block === null && hfs[hfIndex].timestamp === undefined) {
// We're on the merge hardfork. Let's check the TTD
if (td === undefined || td === null || BigInt(hfs[hfIndex].ttd) > td) {
// Merge ttd greater than current td so we're on hardfork before merge
hardfork = hfs[hfIndex - 1];
hfIndex -= 1;
}
else {
// Merge ttd equal or less than current td so we're on merge hardfork
hardfork = hfs[hfIndex];
}
}

@@ -291,4 +305,28 @@ else {

}
hardfork = hfs[hfIndex];
}
const hfStartIndex = hfIndex;
// Move the hfIndex to the end of the hardforks that might be scheduled on the same block/timestamp
// This won't anyway be the case with Merge hfs
for (; hfIndex < hfs.length - 1; hfIndex++) {
// break out if hfIndex + 1 is not scheduled at hfIndex
if (hfs[hfIndex].block !== hfs[hfIndex + 1].block ||
hfs[hfIndex].timestamp !== hfs[hfIndex + 1].timestamp) {
break;
}
}
if (timestamp) {
const minTimeStamp = hfs
.slice(0, hfStartIndex)
.reduce((acc, hf) => Math.max(Number(hf.timestamp ?? '0'), acc), 0);
if (minTimeStamp > timestamp) {
throw Error(`Maximum HF determined by timestamp is lower than the block number/ttd HF`);
}
const maxTimeStamp = hfs
.slice(hfIndex + 1)
.reduce((acc, hf) => Math.min(Number(hf.timestamp ?? timestamp), acc), timestamp);
if (maxTimeStamp < timestamp) {
throw Error(`Maximum HF determined by block number/ttd is lower than timestamp HF`);
}
}
const hardfork = hfs[hfIndex];
return hardfork.name;

@@ -306,6 +344,7 @@ }

* @param td
* @param timestamp
* @returns The name of the HF set
*/
setHardforkByBlockNumber(blockNumber, td) {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td);
setHardforkByBlockNumber(blockNumber, td, timestamp) {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td, timestamp);
this.setHardfork(hardfork);

@@ -434,4 +473,4 @@ return hardfork;

*/
paramByBlock(topic, name, blockNumber, td) {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td);
paramByBlock(topic, name, blockNumber, td, timestamp) {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td, timestamp);
return this.paramByHardfork(topic, name, hardfork);

@@ -527,2 +566,10 @@ }

}
hardforkTimestamp(hardfork) {
hardfork = hardfork ?? this._hardfork;
const timestamp = this._getHardfork(hardfork)?.['timestamp'];
if (timestamp === undefined || timestamp === null) {
return null;
}
return BigInt(timestamp);
}
/**

@@ -563,2 +610,3 @@ * Returns the hardfork change block for eip

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -574,3 +622,38 @@ isHardforkBlock(blockNumber, hardfork) {

* @param hardfork Hardfork name, optional if HF set
* @returns Block timestamp, number or null if not available
*/
nextHardforkBlockOrTimestamp(hardfork) {
hardfork = hardfork ?? this._hardfork;
const hfs = this.hardforks();
let hfIndex = hfs.findIndex((hf) => hf.name === hardfork);
// If the current hardfork is merge, go one behind as merge hf is not part of these
// calcs even if the merge hf block is set
if (hardfork === enums_1.Hardfork.Merge) {
hfIndex -= 1;
}
// Hardfork not found
if (hfIndex < 0) {
return null;
}
const currHf = hfs[hfIndex];
const nextHf = hfs
.slice(hfIndex + 1)
.find((hf) => hf.name !== enums_1.Hardfork.Merge &&
((hf.block !== null && hf.block !== currHf.block) ||
(hf.timestamp !== undefined && hf.timestamp !== currHf.timestamp)));
// If no next hf found with valid block or timestamp return null
if (nextHf === undefined) {
return null;
}
const nextHfBlock = nextHf.timestamp ?? nextHf.block;
if (nextHfBlock === null || nextHfBlock === undefined) {
return null;
}
return BigInt(nextHfBlock);
}
/**
* Returns the change block for the next hardfork after the hardfork provided or set
* @param hardfork Hardfork name, optional if HF set
* @returns Block number or null if not available
* @deprecated
*/

@@ -610,2 +693,3 @@ nextHardforkBlock(hardfork) {

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -804,6 +888,5 @@ isNextHardforkBlock(blockNumber, hardfork) {

*
* ethash: -
* ethash: empty object
* clique: period, epoch
* aura: -
* casper: -
* casper: empty object
*

@@ -823,3 +906,3 @@ * Note: This value can update along a Hardfork.

}
return value ?? this._chainParams['consensus'][this.consensusAlgorithm()];
return (value ?? this._chainParams['consensus'][this.consensusAlgorithm()] ?? {});
}

@@ -826,0 +909,0 @@ /**

@@ -27,4 +27,5 @@ "use strict";

4399: require('./4399.json'),
4895: require('./4895.json'),
5133: require('./5133.json'),
};
//# sourceMappingURL=index.js.map

@@ -57,2 +57,8 @@ export declare enum Chain {

/**
* Arbitrum One - mainnet for Arbitrum roll-up
*
* - [Documentation](https://developer.offchainlabs.com/public-chains)
*/
ArbitrumOne = "arbitrum-one",
/**
* xDai EVM sidechain with a native stable token

@@ -59,0 +65,0 @@ *

@@ -65,2 +65,8 @@ "use strict";

/**
* Arbitrum One - mainnet for Arbitrum roll-up
*
* - [Documentation](https://developer.offchainlabs.com/public-chains)
*/
CustomChain["ArbitrumOne"] = "arbitrum-one";
/**
* xDai EVM sidechain with a native stable token

@@ -67,0 +73,0 @@ *

@@ -6,3 +6,3 @@ {

"status": "Pre-Draft",
"eips": []
"eips": [4895]
}

@@ -46,2 +46,3 @@ /// <reference types="node" />

ttd?: bigint | string;
timestamp?: number | string;
forkHash?: string | null;

@@ -112,4 +113,5 @@ }

genesisHash?: Buffer;
mergeForkIdPostMerge?: boolean;
}
export {};
//# sourceMappingURL=types.d.ts.map

@@ -7,3 +7,3 @@ /**

*/
export declare function parseGethGenesis(json: any, name?: string): any;
export declare function parseGethGenesis(json: any, name?: string, mergeForkIdPostMerge?: boolean): any;
//# sourceMappingURL=utils.d.ts.map

@@ -23,7 +23,11 @@ "use strict";

* @param json object representing the Geth genesis file
* @param optional mergeForkIdPostMerge which clarifies the placement of MergeForkIdTransition
* hardfork, which by default is post merge as with the merged eth networks but could also come
* before merge like in kiln genesis
* @returns genesis parameters in a `CommonOpts` compliant object
*/
function parseGethParams(json) {
function parseGethParams(json, mergeForkIdPostMerge = true) {
const { name, config, difficulty, mixHash, gasLimit, coinbase, baseFeePerGas } = json;
let { extraData, timestamp, nonce } = json;
const genesisTimestamp = Number(timestamp);
const { chainId } = config;

@@ -67,4 +71,7 @@ // geth is not strictly putting empty fields with a 0x prefix

clique: {
period: config.clique.period,
epoch: config.clique.epoch,
// The recent geth genesis seems to be using blockperiodseconds
// and epochlength for clique specification
// see: https://hackmd.io/PqZgMpnkSWCWv5joJoFymQ
period: config.clique.period ?? config.clique.blockperiodseconds,
epoch: config.clique.epoch ?? config.clique.epochlength,
},

@@ -79,27 +86,61 @@ }

const forkMap = {
[enums_1.Hardfork.Homestead]: 'homesteadBlock',
[enums_1.Hardfork.Dao]: 'daoForkBlock',
[enums_1.Hardfork.TangerineWhistle]: 'eip150Block',
[enums_1.Hardfork.SpuriousDragon]: 'eip155Block',
[enums_1.Hardfork.Byzantium]: 'byzantiumBlock',
[enums_1.Hardfork.Constantinople]: 'constantinopleBlock',
[enums_1.Hardfork.Petersburg]: 'petersburgBlock',
[enums_1.Hardfork.Istanbul]: 'istanbulBlock',
[enums_1.Hardfork.MuirGlacier]: 'muirGlacierBlock',
[enums_1.Hardfork.Berlin]: 'berlinBlock',
[enums_1.Hardfork.London]: 'londonBlock',
[enums_1.Hardfork.MergeForkIdTransition]: 'mergeForkBlock',
[enums_1.Hardfork.Homestead]: { name: 'homesteadBlock' },
[enums_1.Hardfork.Dao]: { name: 'daoForkBlock' },
[enums_1.Hardfork.TangerineWhistle]: { name: 'eip150Block' },
[enums_1.Hardfork.SpuriousDragon]: { name: 'eip155Block' },
[enums_1.Hardfork.Byzantium]: { name: 'byzantiumBlock' },
[enums_1.Hardfork.Constantinople]: { name: 'constantinopleBlock' },
[enums_1.Hardfork.Petersburg]: { name: 'petersburgBlock' },
[enums_1.Hardfork.Istanbul]: { name: 'istanbulBlock' },
[enums_1.Hardfork.MuirGlacier]: { name: 'muirGlacierBlock' },
[enums_1.Hardfork.Berlin]: { name: 'berlinBlock' },
[enums_1.Hardfork.London]: { name: 'londonBlock' },
[enums_1.Hardfork.MergeForkIdTransition]: { name: 'mergeForkBlock', postMerge: mergeForkIdPostMerge },
[enums_1.Hardfork.Shanghai]: { name: 'shanghaiTime', postMerge: true, isTimestamp: true },
};
params.hardforks = Object.values(enums_1.Hardfork)
.map((name) => ({
name,
block: name === enums_1.Hardfork.Chainstart ? 0 : config[forkMap[name]] ?? null,
// forkMapRev is the map from config field name to Hardfork
const forkMapRev = Object.keys(forkMap).reduce((acc, elem) => {
acc[forkMap[elem].name] = elem;
return acc;
}, {});
const configHardforkNames = Object.keys(config).filter((key) => forkMapRev[key] !== undefined && config[key] !== undefined && config[key] !== null);
params.hardforks = configHardforkNames
.map((nameBlock) => ({
name: forkMapRev[nameBlock],
block: forkMap[forkMapRev[nameBlock]].isTimestamp === true ? null : config[nameBlock],
timestamp: forkMap[forkMapRev[nameBlock]].isTimestamp === true ? config[nameBlock] : undefined,
}))
.filter((fork) => fork.block !== null);
.filter((fork) => (fork.block !== null && fork.block !== undefined) || fork.timestamp !== undefined);
params.hardforks.sort(function (a, b) {
return (a.block ?? Infinity) - (b.block ?? Infinity);
});
params.hardforks.sort(function (a, b) {
return (a.timestamp ?? genesisTimestamp) - (b.timestamp ?? genesisTimestamp);
});
params.hardforks.unshift({ name: enums_1.Hardfork.Chainstart, block: 0 });
if (config.terminalTotalDifficulty !== undefined) {
params.hardforks.push({
// Following points need to be considered for placement of merge hf
// - Merge hardfork can't be placed at genesis
// - Place merge hf before any hardforks that require CL participation for e.g. withdrawals
// - Merge hardfork has to be placed just after genesis if any of the genesis hardforks make CL
// necessary for e.g. withdrawals
const mergeConfig = {
name: enums_1.Hardfork.Merge,
ttd: config.terminalTotalDifficulty,
block: null,
});
};
// If any of the genesis block require merge, then we need merge just right after genesis
const isMergeJustPostGenesis = params.hardforks
.filter((hf) => hf.block === 0)
.reduce((acc, hf) => acc || forkMap[hf.name]?.postMerge === true, false);
// Merge hardfork has to be placed before first non-zero block hardfork that is dependent
// on merge or first non zero block hardfork if any of genesis hardforks require merge
const postMergeIndex = params.hardforks.findIndex((hf) => (isMergeJustPostGenesis || forkMap[hf.name]?.postMerge === true) &&
(hf.block > 0 || (hf.timestamp ?? 0) > 0));
if (postMergeIndex !== -1) {
params.hardforks.splice(postMergeIndex, 0, mergeConfig);
}
else {
params.hardforks.push(mergeConfig);
}
}

@@ -114,3 +155,3 @@ return params;

*/
function parseGethGenesis(json, name) {
function parseGethGenesis(json, name, mergeForkIdPostMerge) {
try {

@@ -123,3 +164,3 @@ if (['config', 'difficulty', 'gasLimit', 'alloc'].some((field) => !(field in json))) {

}
return parseGethParams(json);
return parseGethParams(json, mergeForkIdPostMerge);
}

@@ -126,0 +167,0 @@ catch (e) {

{
"name": "@ethereumjs/common",
"version": "3.0.1",
"version": "3.0.2",
"description": "Resources common to all Ethereum implementations",

@@ -51,5 +51,5 @@ "keywords": [

"dependencies": {
"@ethereumjs/util": "^8.0.0",
"@ethereumjs/util": "^8.0.3",
"crc-32": "^1.2.0"
}
}

@@ -123,2 +123,12 @@ import { TypeOutput, intToBuffer, toType } from '@ethereumjs/util'

}
if (chainParamsOrName === CustomChain.ArbitrumOne) {
return Common.custom(
{
name: CustomChain.ArbitrumOne,
chainId: 42161,
networkId: 42161,
},
opts
)
}
if (chainParamsOrName === CustomChain.xDaiChain) {

@@ -170,5 +180,5 @@ return Common.custom(

genesisJson: any,
{ chain, genesisHash, hardfork }: GethConfigOpts
{ chain, genesisHash, hardfork, mergeForkIdPostMerge }: GethConfigOpts
): Common {
const genesisParams = parseGethGenesis(genesisJson, chain)
const genesisParams = parseGethGenesis(genesisJson, chain, mergeForkIdPostMerge)
const common = new Common({

@@ -300,11 +310,18 @@ chain: genesisParams.name ?? 'custom',

* @param td : total difficulty of the parent block (for block hf) OR of the the chain latest (for chain hf)
* @param timestamp: timestamp in seconds at which block was/is to be minted
* @returns The name of the HF
*/
getHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike): string {
getHardforkByBlockNumber(
blockNumber: BigIntLike,
td?: BigIntLike,
timestamp?: BigIntLike
): string {
blockNumber = toType(blockNumber, TypeOutput.BigInt)
td = toType(td, TypeOutput.BigInt)
timestamp = toType(timestamp, TypeOutput.Number)
// Filter out hardforks with no block number and no ttd (i.e. unapplied hardforks)
// Filter out hardforks with no block number, no ttd or no timestamp (i.e. unapplied hardforks)
const hfs = this.hardforks().filter(
(hf) => hf.block !== null || (hf.ttd !== null && hf.ttd !== undefined)
(hf) =>
hf.block !== null || (hf.ttd !== null && hf.ttd !== undefined) || hf.timestamp !== undefined
)

@@ -319,10 +336,15 @@ const mergeIndex = hfs.findIndex((hf) => hf.ttd !== null && hf.ttd !== undefined)

// Find the first hardfork that has a block number greater than `blockNumber` (skips the merge hardfork since
// it cannot have a block number specified).
let hfIndex = hfs.findIndex((hf) => hf.block !== null && hf.block > blockNumber)
// Find the first hardfork that has a block number greater than `blockNumber`
// (skips the merge hardfork since it cannot have a block number specified).
// If timestamp is not provided, it also skips timestamps hardforks to continue
// discovering/checking number hardforks.
let hfIndex = hfs.findIndex(
(hf) =>
(hf.block !== null && hf.block > blockNumber) ||
(timestamp !== undefined && Number(hf.timestamp) > timestamp)
)
// Move hfIndex one back to arrive at candidate hardfork
if (hfIndex === -1) {
// all hardforks apply, set hfIndex to the last one as thats the candidate
hfIndex = hfs.length - 1
hfIndex = hfs.length
} else if (hfIndex === 0) {

@@ -332,16 +354,22 @@ // cannot have a case where a block number is before all applied hardforks

throw Error('Must have at least one hardfork at block 0')
} else {
// The previous hardfork is the candidate here
hfIndex = hfIndex - 1
}
let hardfork
if (hfs[hfIndex].block === null) {
// If timestamp is not provided, we need to rollback to the last hf with block or ttd
if (timestamp === undefined) {
const stepBack = hfs
.slice(0, hfIndex)
.reverse()
.findIndex((hf) => hf.block !== null || hf.ttd !== undefined)
hfIndex = hfIndex - stepBack
}
// Move hfIndex one back to arrive at candidate hardfork
hfIndex = hfIndex - 1
// If the timestamp was not provided, we could have skipped timestamp hardforks to look for number
// hardforks. so it will now be needed to rollback
if (hfs[hfIndex].block === null && hfs[hfIndex].timestamp === undefined) {
// We're on the merge hardfork. Let's check the TTD
if (td === undefined || td === null || BigInt(hfs[hfIndex].ttd!) > td) {
// Merge ttd greater than current td so we're on hardfork before merge
hardfork = hfs[hfIndex - 1]
} else {
// Merge ttd equal or less than current td so we're on merge hardfork
hardfork = hfs[hfIndex]
hfIndex -= 1
}

@@ -356,4 +384,36 @@ } else {

}
hardfork = hfs[hfIndex]
}
const hfStartIndex = hfIndex
// Move the hfIndex to the end of the hardforks that might be scheduled on the same block/timestamp
// This won't anyway be the case with Merge hfs
for (; hfIndex < hfs.length - 1; hfIndex++) {
// break out if hfIndex + 1 is not scheduled at hfIndex
if (
hfs[hfIndex].block !== hfs[hfIndex + 1].block ||
hfs[hfIndex].timestamp !== hfs[hfIndex + 1].timestamp
) {
break
}
}
if (timestamp) {
const minTimeStamp = hfs
.slice(0, hfStartIndex)
.reduce((acc: number, hf: HardforkConfig) => Math.max(Number(hf.timestamp ?? '0'), acc), 0)
if (minTimeStamp > timestamp) {
throw Error(`Maximum HF determined by timestamp is lower than the block number/ttd HF`)
}
const maxTimeStamp = hfs
.slice(hfIndex + 1)
.reduce(
(acc: number, hf: HardforkConfig) => Math.min(Number(hf.timestamp ?? timestamp), acc),
timestamp
)
if (maxTimeStamp < timestamp) {
throw Error(`Maximum HF determined by block number/ttd is lower than timestamp HF`)
}
}
const hardfork = hfs[hfIndex]
return hardfork.name

@@ -372,6 +432,11 @@ }

* @param td
* @param timestamp
* @returns The name of the HF set
*/
setHardforkByBlockNumber(blockNumber: BigIntLike, td?: BigIntLike): string {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td)
setHardforkByBlockNumber(
blockNumber: BigIntLike,
td?: BigIntLike,
timestamp?: BigIntLike
): string {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td, timestamp)
this.setHardfork(hardfork)

@@ -505,4 +570,10 @@ return hardfork

*/
paramByBlock(topic: string, name: string, blockNumber: BigIntLike, td?: BigIntLike): bigint {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td)
paramByBlock(
topic: string,
name: string,
blockNumber: BigIntLike,
td?: BigIntLike,
timestamp?: BigIntLike
): bigint {
const hardfork = this.getHardforkByBlockNumber(blockNumber, td, timestamp)
return this.paramByHardfork(topic, name, hardfork)

@@ -605,2 +676,11 @@ }

hardforkTimestamp(hardfork?: string | Hardfork): bigint | null {
hardfork = hardfork ?? this._hardfork
const timestamp = this._getHardfork(hardfork)?.['timestamp']
if (timestamp === undefined || timestamp === null) {
return null
}
return BigInt(timestamp)
}
/**

@@ -643,2 +723,3 @@ * Returns the hardfork change block for eip

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -655,3 +736,45 @@ isHardforkBlock(blockNumber: BigIntLike, hardfork?: string | Hardfork): boolean {

* @param hardfork Hardfork name, optional if HF set
* @returns Block timestamp, number or null if not available
*/
nextHardforkBlockOrTimestamp(hardfork?: string | Hardfork): bigint | null {
hardfork = hardfork ?? this._hardfork
const hfs = this.hardforks()
let hfIndex = hfs.findIndex((hf) => hf.name === hardfork)
// If the current hardfork is merge, go one behind as merge hf is not part of these
// calcs even if the merge hf block is set
if (hardfork === Hardfork.Merge) {
hfIndex -= 1
}
// Hardfork not found
if (hfIndex < 0) {
return null
}
const currHf = hfs[hfIndex]
const nextHf = hfs
.slice(hfIndex + 1)
.find(
(hf) =>
hf.name !== Hardfork.Merge &&
((hf.block !== null && hf.block !== currHf.block) ||
(hf.timestamp !== undefined && hf.timestamp !== currHf.timestamp))
)
// If no next hf found with valid block or timestamp return null
if (nextHf === undefined) {
return null
}
const nextHfBlock = nextHf.timestamp ?? nextHf.block
if (nextHfBlock === null || nextHfBlock === undefined) {
return null
}
return BigInt(nextHfBlock)
}
/**
* Returns the change block for the next hardfork after the hardfork provided or set
* @param hardfork Hardfork name, optional if HF set
* @returns Block number or null if not available
* @deprecated
*/

@@ -694,2 +817,3 @@ nextHardforkBlock(hardfork?: string | Hardfork): bigint | null {

* @returns True if blockNumber is HF block
* @deprecated
*/

@@ -910,6 +1034,5 @@ isNextHardforkBlock(blockNumber: BigIntLike, hardfork?: string | Hardfork): boolean {

*
* ethash: -
* ethash: empty object
* clique: period, epoch
* aura: -
* casper: -
* casper: empty object
*

@@ -929,3 +1052,5 @@ * Note: This value can update along a Hardfork.

}
return value ?? this._chainParams['consensus'][this.consensusAlgorithm() as ConsensusAlgorithm]!
return (
value ?? this._chainParams['consensus'][this.consensusAlgorithm() as ConsensusAlgorithm] ?? {}
)
}

@@ -932,0 +1057,0 @@

@@ -24,3 +24,4 @@ export const EIPs: { [key: number]: any } = {

4399: require('./4399.json'),
4895: require('./4895.json'),
5133: require('./5133.json'),
}

@@ -64,2 +64,9 @@ export enum Chain {

/**
* Arbitrum One - mainnet for Arbitrum roll-up
*
* - [Documentation](https://developer.offchainlabs.com/public-chains)
*/
ArbitrumOne = 'arbitrum-one',
/**
* xDai EVM sidechain with a native stable token

@@ -66,0 +73,0 @@ *

@@ -6,3 +6,3 @@ {

"status": "Pre-Draft",
"eips": []
"eips": [4895]
}

@@ -51,2 +51,3 @@ import type { Chain, ConsensusAlgorithm, ConsensusType, Hardfork } from './enums'

ttd?: bigint | string
timestamp?: number | string
forkHash?: string | null

@@ -122,2 +123,3 @@ }

genesisHash?: Buffer
mergeForkIdPostMerge?: boolean
}

@@ -5,2 +5,5 @@ import { intToHex, isHexPrefixed, stripHexPrefix } from '@ethereumjs/util'

type ConfigHardfork =
| { name: string; block: null; timestamp: number }
| { name: string; block: number; timestamp?: number }
/**

@@ -24,7 +27,11 @@ * Transforms Geth formatted nonce (i.e. hex string) to 8 byte 0x-prefixed string used internally

* @param json object representing the Geth genesis file
* @param optional mergeForkIdPostMerge which clarifies the placement of MergeForkIdTransition
* hardfork, which by default is post merge as with the merged eth networks but could also come
* before merge like in kiln genesis
* @returns genesis parameters in a `CommonOpts` compliant object
*/
function parseGethParams(json: any) {
function parseGethParams(json: any, mergeForkIdPostMerge: boolean = true) {
const { name, config, difficulty, mixHash, gasLimit, coinbase, baseFeePerGas } = json
let { extraData, timestamp, nonce } = json
const genesisTimestamp = Number(timestamp)
const { chainId } = config

@@ -74,4 +81,7 @@

clique: {
period: config.clique.period,
epoch: config.clique.epoch,
// The recent geth genesis seems to be using blockperiodseconds
// and epochlength for clique specification
// see: https://hackmd.io/PqZgMpnkSWCWv5joJoFymQ
period: config.clique.period ?? config.clique.blockperiodseconds,
epoch: config.clique.epoch ?? config.clique.epochlength,
},

@@ -86,28 +96,80 @@ }

const forkMap: { [key: string]: string } = {
[Hardfork.Homestead]: 'homesteadBlock',
[Hardfork.Dao]: 'daoForkBlock',
[Hardfork.TangerineWhistle]: 'eip150Block',
[Hardfork.SpuriousDragon]: 'eip155Block',
[Hardfork.Byzantium]: 'byzantiumBlock',
[Hardfork.Constantinople]: 'constantinopleBlock',
[Hardfork.Petersburg]: 'petersburgBlock',
[Hardfork.Istanbul]: 'istanbulBlock',
[Hardfork.MuirGlacier]: 'muirGlacierBlock',
[Hardfork.Berlin]: 'berlinBlock',
[Hardfork.London]: 'londonBlock',
[Hardfork.MergeForkIdTransition]: 'mergeForkBlock',
const forkMap: { [key: string]: { name: string; postMerge?: boolean; isTimestamp?: boolean } } = {
[Hardfork.Homestead]: { name: 'homesteadBlock' },
[Hardfork.Dao]: { name: 'daoForkBlock' },
[Hardfork.TangerineWhistle]: { name: 'eip150Block' },
[Hardfork.SpuriousDragon]: { name: 'eip155Block' },
[Hardfork.Byzantium]: { name: 'byzantiumBlock' },
[Hardfork.Constantinople]: { name: 'constantinopleBlock' },
[Hardfork.Petersburg]: { name: 'petersburgBlock' },
[Hardfork.Istanbul]: { name: 'istanbulBlock' },
[Hardfork.MuirGlacier]: { name: 'muirGlacierBlock' },
[Hardfork.Berlin]: { name: 'berlinBlock' },
[Hardfork.London]: { name: 'londonBlock' },
[Hardfork.MergeForkIdTransition]: { name: 'mergeForkBlock', postMerge: mergeForkIdPostMerge },
[Hardfork.Shanghai]: { name: 'shanghaiTime', postMerge: true, isTimestamp: true },
}
params.hardforks = Object.values(Hardfork)
.map((name) => ({
name,
block: name === Hardfork.Chainstart ? 0 : config[forkMap[name]] ?? null,
// forkMapRev is the map from config field name to Hardfork
const forkMapRev = Object.keys(forkMap).reduce((acc, elem) => {
acc[forkMap[elem].name] = elem
return acc
}, {} as { [key: string]: string })
const configHardforkNames = Object.keys(config).filter(
(key) => forkMapRev[key] !== undefined && config[key] !== undefined && config[key] !== null
)
params.hardforks = configHardforkNames
.map((nameBlock) => ({
name: forkMapRev[nameBlock],
block: forkMap[forkMapRev[nameBlock]].isTimestamp === true ? null : config[nameBlock],
timestamp:
forkMap[forkMapRev[nameBlock]].isTimestamp === true ? config[nameBlock] : undefined,
}))
.filter((fork) => fork.block !== null)
.filter(
(fork) => (fork.block !== null && fork.block !== undefined) || fork.timestamp !== undefined
)
params.hardforks.sort(function (a: ConfigHardfork, b: ConfigHardfork) {
return (a.block ?? Infinity) - (b.block ?? Infinity)
})
params.hardforks.sort(function (a: ConfigHardfork, b: ConfigHardfork) {
return (a.timestamp ?? genesisTimestamp) - (b.timestamp ?? genesisTimestamp)
})
params.hardforks.unshift({ name: Hardfork.Chainstart, block: 0 })
if (config.terminalTotalDifficulty !== undefined) {
params.hardforks.push({
// Following points need to be considered for placement of merge hf
// - Merge hardfork can't be placed at genesis
// - Place merge hf before any hardforks that require CL participation for e.g. withdrawals
// - Merge hardfork has to be placed just after genesis if any of the genesis hardforks make CL
// necessary for e.g. withdrawals
const mergeConfig = {
name: Hardfork.Merge,
ttd: config.terminalTotalDifficulty,
block: null,
})
}
// If any of the genesis block require merge, then we need merge just right after genesis
const isMergeJustPostGenesis: boolean = params.hardforks
.filter((hf: ConfigHardfork) => hf.block === 0)
.reduce(
(acc: boolean, hf: ConfigHardfork) => acc || forkMap[hf.name]?.postMerge === true,
false
)
// Merge hardfork has to be placed before first non-zero block hardfork that is dependent
// on merge or first non zero block hardfork if any of genesis hardforks require merge
const postMergeIndex = params.hardforks.findIndex(
(hf: any) =>
(isMergeJustPostGenesis || forkMap[hf.name]?.postMerge === true) &&
(hf.block > 0 || (hf.timestamp ?? 0) > 0)
)
if (postMergeIndex !== -1) {
params.hardforks.splice(postMergeIndex, 0, mergeConfig)
} else {
params.hardforks.push(mergeConfig)
}
}

@@ -123,3 +185,3 @@ return params

*/
export function parseGethGenesis(json: any, name?: string) {
export function parseGethGenesis(json: any, name?: string, mergeForkIdPostMerge?: boolean) {
try {

@@ -132,3 +194,3 @@ if (['config', 'difficulty', 'gasLimit', 'alloc'].some((field) => !(field in json))) {

}
return parseGethParams(json)
return parseGethParams(json, mergeForkIdPostMerge)
} catch (e: any) {

@@ -135,0 +197,0 @@ throw new Error(`Error parsing parameters file: ${e.message}`)

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc