diff --git a/packages/api/package.json b/packages/api/package.json index 49bcbbc34f5..138964ed946 100644 --- a/packages/api/package.json +++ b/packages/api/package.json @@ -71,7 +71,7 @@ }, "dependencies": { "@chainsafe/persistent-merkle-tree": "^0.8.0", - "@chainsafe/ssz": "^0.17.0", + "@chainsafe/ssz": "^0.17.1", "@lodestar/config": "^1.21.0", "@lodestar/params": "^1.21.0", "@lodestar/types": "^1.21.0", diff --git a/packages/api/src/beacon/client/events.ts b/packages/api/src/beacon/client/events.ts index 2d63925a738..34f14f2e839 100644 --- a/packages/api/src/beacon/client/events.ts +++ b/packages/api/src/beacon/client/events.ts @@ -13,7 +13,7 @@ export type ApiClient = ApiClientMethods; */ export function getClient(config: ChainForkConfig, baseUrl: string): ApiClient { const definitions = getDefinitions(config); - const eventSerdes = getEventSerdes(); + const eventSerdes = getEventSerdes(config); return { eventstream: async ({topics, signal, onEvent, onError, onClose}) => { diff --git a/packages/api/src/beacon/routes/beacon/block.ts b/packages/api/src/beacon/routes/beacon/block.ts index 99fbf7d4564..be6789753e0 100644 --- a/packages/api/src/beacon/routes/beacon/block.ts +++ b/packages/api/src/beacon/routes/beacon/block.ts @@ -14,7 +14,7 @@ import { SignedBlockContents, sszTypesFor, } from "@lodestar/types"; -import {ForkName, ForkPreExecution, isForkBlobs, isForkExecution} from "@lodestar/params"; +import {ForkName, ForkPreElectra, ForkPreExecution, isForkBlobs, isForkExecution} from "@lodestar/params"; import {Endpoint, RequestCodec, RouteDefinitions, Schema} from "../../../utils/index.js"; import {EmptyMeta, EmptyResponseCodec, EmptyResponseData, WithVersion} from "../../../utils/codecs.js"; import { @@ -101,10 +101,22 @@ export type Endpoints = { "GET", BlockArgs, {params: {block_id: string}}, - BeaconBlockBody["attestations"], + BeaconBlockBody["attestations"], ExecutionOptimisticAndFinalizedMeta >; + /** + * Get block attestations + * Retrieves attestation included in requested block. + */ + getBlockAttestationsV2: Endpoint< + "GET", + BlockArgs, + {params: {block_id: string}}, + BeaconBlockBody["attestations"], + ExecutionOptimisticFinalizedAndVersionMeta + >; + /** * Get block header * Retrieves block header for given block id. @@ -251,6 +263,15 @@ export function getDefinitions(config: ChainForkConfig): RouteDefinitions ssz[fork].BeaconBlockBody.fields.attestations), + meta: ExecutionOptimisticFinalizedAndVersionCodec, + }, + }, getBlockHeader: { url: "/eth/v1/beacon/headers/{block_id}", method: "GET", diff --git a/packages/api/src/beacon/routes/beacon/pool.ts b/packages/api/src/beacon/routes/beacon/pool.ts index f957390131f..4fe3efd4daf 100644 --- a/packages/api/src/beacon/routes/beacon/pool.ts +++ b/packages/api/src/beacon/routes/beacon/pool.ts @@ -1,7 +1,8 @@ /* eslint-disable @typescript-eslint/naming-convention */ import {ValueOf} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; -import {phase0, capella, CommitteeIndex, Slot, ssz} from "@lodestar/types"; +import {isForkPostElectra} from "@lodestar/params"; +import {phase0, capella, CommitteeIndex, Slot, ssz, electra, AttesterSlashing} from "@lodestar/types"; import {Schema, Endpoint, RouteDefinitions} from "../../../utils/index.js"; import { ArrayOf, @@ -12,19 +13,31 @@ import { EmptyRequest, EmptyResponseCodec, EmptyResponseData, + WithVersion, } from "../../../utils/codecs.js"; +import {MetaHeader, VersionCodec, VersionMeta} from "../../../utils/metadata.js"; +import {toForkName} from "../../../utils/fork.js"; +import {fromHeaders} from "../../../utils/headers.js"; // See /packages/api/src/routes/index.ts for reasoning and instructions to add new routes -const AttestationListType = ArrayOf(ssz.phase0.Attestation); -const AttesterSlashingListType = ArrayOf(ssz.phase0.AttesterSlashing); +const AttestationListTypePhase0 = ArrayOf(ssz.phase0.Attestation); +const AttestationListTypeElectra = ArrayOf(ssz.electra.Attestation); +const AttesterSlashingListTypePhase0 = ArrayOf(ssz.phase0.AttesterSlashing); +const AttesterSlashingListTypeElectra = ArrayOf(ssz.electra.AttesterSlashing); const ProposerSlashingListType = ArrayOf(ssz.phase0.ProposerSlashing); const SignedVoluntaryExitListType = ArrayOf(ssz.phase0.SignedVoluntaryExit); const SignedBLSToExecutionChangeListType = ArrayOf(ssz.capella.SignedBLSToExecutionChange); const SyncCommitteeMessageListType = ArrayOf(ssz.altair.SyncCommitteeMessage); -type AttestationList = ValueOf; -type AttesterSlashingList = ValueOf; +type AttestationListPhase0 = ValueOf; +type AttestationListElectra = ValueOf; +type AttestationList = AttestationListPhase0 | AttestationListElectra; + +type AttesterSlashingListPhase0 = ValueOf; +type AttesterSlashingListElectra = ValueOf; +type AttesterSlashingList = AttesterSlashingListPhase0 | AttesterSlashingListElectra; + type ProposerSlashingList = ValueOf; type SignedVoluntaryExitList = ValueOf; type SignedBLSToExecutionChangeList = ValueOf; @@ -39,10 +52,22 @@ export type Endpoints = { "GET", {slot?: Slot; committeeIndex?: CommitteeIndex}, {query: {slot?: number; committee_index?: number}}, - AttestationList, + AttestationListPhase0, EmptyMeta >; + /** + * Get Attestations from operations pool + * Retrieves attestations known by the node but not necessarily incorporated into any block + */ + getPoolAttestationsV2: Endpoint< + "GET", + {slot?: Slot; committeeIndex?: CommitteeIndex}, + {query: {slot?: number; committee_index?: number}}, + AttestationList, + VersionMeta + >; + /** * Get AttesterSlashings from operations pool * Retrieves attester slashings known by the node but not necessarily incorporated into any block @@ -52,10 +77,23 @@ export type Endpoints = { "GET", EmptyArgs, EmptyRequest, - AttesterSlashingList, + AttesterSlashingListPhase0, EmptyMeta >; + /** + * Get AttesterSlashings from operations pool + * Retrieves attester slashings known by the node but not necessarily incorporated into any block + */ + getPoolAttesterSlashingsV2: Endpoint< + // ⏎ + "GET", + EmptyArgs, + EmptyRequest, + AttesterSlashingList, + VersionMeta + >; + /** * Get ProposerSlashings from operations pool * Retrieves proposer slashings known by the node but not necessarily incorporated into any block @@ -105,12 +143,28 @@ export type Endpoints = { */ submitPoolAttestations: Endpoint< "POST", - {signedAttestations: AttestationList}, + {signedAttestations: AttestationListPhase0}, {body: unknown}, EmptyResponseData, EmptyMeta >; + /** + * Submit Attestation objects to node + * Submits Attestation objects to the node. Each attestation in the request body is processed individually. + * + * If an attestation is validated successfully the node MUST publish that attestation on the appropriate subnet. + * + * If one or more attestations fail validation the node MUST return a 400 error with details of which attestations have failed, and why. + */ + submitPoolAttestationsV2: Endpoint< + "POST", + {signedAttestations: AttestationList}, + {body: unknown; headers: {[MetaHeader.Version]: string}}, + EmptyResponseData, + EmptyMeta + >; + /** * Submit AttesterSlashing object to node's pool * Submits AttesterSlashing object to node's pool and if passes validation node MUST broadcast it to network. @@ -123,6 +177,18 @@ export type Endpoints = { EmptyMeta >; + /** + * Submit AttesterSlashing object to node's pool + * Submits AttesterSlashing object to node's pool and if passes validation node MUST broadcast it to network. + */ + submitPoolAttesterSlashingsV2: Endpoint< + "POST", + {attesterSlashing: AttesterSlashing}, + {body: unknown; headers: {[MetaHeader.Version]: string}}, + EmptyResponseData, + EmptyMeta + >; + /** * Submit ProposerSlashing object to node's pool * Submits ProposerSlashing object to node's pool and if passes validation node MUST broadcast it to network. @@ -172,7 +238,7 @@ export type Endpoints = { >; }; -export function getDefinitions(_config: ChainForkConfig): RouteDefinitions { +export function getDefinitions(config: ChainForkConfig): RouteDefinitions { return { getPoolAttestations: { url: "/eth/v1/beacon/pool/attestations", @@ -183,19 +249,43 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({query: {slot, committee_index: committeeIndex}}), + parseReq: ({query}) => ({slot: query.slot, committeeIndex: query.committee_index}), + schema: {query: {slot: Schema.Uint, committee_index: Schema.Uint}}, + }, + resp: { + data: WithVersion((fork) => (isForkPostElectra(fork) ? AttestationListTypeElectra : AttestationListTypePhase0)), + meta: VersionCodec, + }, + }, getPoolAttesterSlashings: { url: "/eth/v1/beacon/pool/attester_slashings", method: "GET", req: EmptyRequestCodec, resp: { - data: AttesterSlashingListType, + data: AttesterSlashingListTypePhase0, meta: EmptyMetaCodec, }, }, + getPoolAttesterSlashingsV2: { + url: "/eth/v2/beacon/pool/attester_slashings", + method: "GET", + req: EmptyRequestCodec, + resp: { + data: WithVersion((fork) => + isForkPostElectra(fork) ? AttesterSlashingListTypeElectra : AttesterSlashingListTypePhase0 + ), + meta: VersionCodec, + }, + }, getPoolProposerSlashings: { url: "/eth/v1/beacon/pool/proposer_slashings", method: "GET", @@ -227,16 +317,61 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({body: AttestationListType.toJson(signedAttestations)}), - parseReqJson: ({body}) => ({signedAttestations: AttestationListType.fromJson(body)}), - writeReqSsz: ({signedAttestations}) => ({body: AttestationListType.serialize(signedAttestations)}), - parseReqSsz: ({body}) => ({signedAttestations: AttestationListType.deserialize(body)}), + writeReqJson: ({signedAttestations}) => ({body: AttestationListTypePhase0.toJson(signedAttestations)}), + parseReqJson: ({body}) => ({signedAttestations: AttestationListTypePhase0.fromJson(body)}), + writeReqSsz: ({signedAttestations}) => ({body: AttestationListTypePhase0.serialize(signedAttestations)}), + parseReqSsz: ({body}) => ({signedAttestations: AttestationListTypePhase0.deserialize(body)}), schema: { body: Schema.ObjectArray, }, }, resp: EmptyResponseCodec, }, + submitPoolAttestationsV2: { + url: "/eth/v2/beacon/pool/attestations", + method: "POST", + req: { + writeReqJson: ({signedAttestations}) => { + const fork = config.getForkName(signedAttestations[0]?.data.slot ?? 0); + return { + body: isForkPostElectra(fork) + ? AttestationListTypeElectra.toJson(signedAttestations as AttestationListElectra) + : AttestationListTypePhase0.toJson(signedAttestations as AttestationListPhase0), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqJson: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + signedAttestations: isForkPostElectra(fork) + ? AttestationListTypeElectra.fromJson(body) + : AttestationListTypePhase0.fromJson(body), + }; + }, + writeReqSsz: ({signedAttestations}) => { + const fork = config.getForkName(signedAttestations[0]?.data.slot ?? 0); + return { + body: isForkPostElectra(fork) + ? AttestationListTypeElectra.serialize(signedAttestations as AttestationListElectra) + : AttestationListTypePhase0.serialize(signedAttestations as AttestationListPhase0), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqSsz: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + signedAttestations: isForkPostElectra(fork) + ? AttestationListTypeElectra.deserialize(body) + : AttestationListTypePhase0.deserialize(body), + }; + }, + schema: { + body: Schema.ObjectArray, + headers: {[MetaHeader.Version]: Schema.String}, + }, + }, + resp: EmptyResponseCodec, + }, submitPoolAttesterSlashings: { url: "/eth/v1/beacon/pool/attester_slashings", method: "POST", @@ -251,6 +386,51 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions { + const fork = config.getForkName(Number(attesterSlashing.attestation1.data.slot)); + return { + body: isForkPostElectra(fork) + ? ssz.electra.AttesterSlashing.toJson(attesterSlashing) + : ssz.phase0.AttesterSlashing.toJson(attesterSlashing), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqJson: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + attesterSlashing: isForkPostElectra(fork) + ? ssz.electra.AttesterSlashing.fromJson(body) + : ssz.phase0.AttesterSlashing.fromJson(body), + }; + }, + writeReqSsz: ({attesterSlashing}) => { + const fork = config.getForkName(Number(attesterSlashing.attestation1.data.slot)); + return { + body: isForkPostElectra(fork) + ? ssz.electra.AttesterSlashing.serialize(attesterSlashing as electra.AttesterSlashing) + : ssz.phase0.AttesterSlashing.serialize(attesterSlashing as phase0.AttesterSlashing), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqSsz: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + attesterSlashing: isForkPostElectra(fork) + ? ssz.electra.AttesterSlashing.deserialize(body) + : ssz.phase0.AttesterSlashing.deserialize(body), + }; + }, + schema: { + body: Schema.Object, + headers: {[MetaHeader.Version]: Schema.String}, + }, + }, + resp: EmptyResponseCodec, + }, submitPoolProposerSlashings: { url: "/eth/v1/beacon/pool/proposer_slashings", method: "POST", diff --git a/packages/api/src/beacon/routes/events.ts b/packages/api/src/beacon/routes/events.ts index 23be5e7c228..1f041aa3019 100644 --- a/packages/api/src/beacon/routes/events.ts +++ b/packages/api/src/beacon/routes/events.ts @@ -13,6 +13,9 @@ import { LightClientOptimisticUpdate, LightClientFinalityUpdate, SSEPayloadAttributes, + Attestation, + AttesterSlashing, + sszTypesFor, } from "@lodestar/types"; import {ForkName} from "@lodestar/params"; @@ -104,10 +107,10 @@ export type EventData = { block: RootHex; executionOptimistic: boolean; }; - [EventType.attestation]: phase0.Attestation; + [EventType.attestation]: Attestation; [EventType.voluntaryExit]: phase0.SignedVoluntaryExit; [EventType.proposerSlashing]: phase0.ProposerSlashing; - [EventType.attesterSlashing]: phase0.AttesterSlashing; + [EventType.attesterSlashing]: AttesterSlashing; [EventType.blsToExecutionChange]: capella.SignedBLSToExecutionChange; [EventType.finalizedCheckpoint]: { block: RootHex; @@ -184,7 +187,7 @@ export type TypeJson = { fromJson: (data: unknown) => T; // client }; -export function getTypeByEvent(): {[K in EventType]: TypeJson} { +export function getTypeByEvent(config: ChainForkConfig): {[K in EventType]: TypeJson} { // eslint-disable-next-line @typescript-eslint/naming-convention const WithVersion = (getType: (fork: ForkName) => TypeJson): TypeJson<{data: T; version: ForkName}> => { return { @@ -225,10 +228,28 @@ export function getTypeByEvent(): {[K in EventType]: TypeJson} { {jsonCase: "eth2"} ), - [EventType.attestation]: ssz.phase0.Attestation, + [EventType.attestation]: { + toJson: (attestation) => { + const fork = config.getForkName(attestation.data.slot); + return sszTypesFor(fork).Attestation.toJson(attestation); + }, + fromJson: (attestation) => { + const fork = config.getForkName((attestation as Attestation).data.slot); + return sszTypesFor(fork).Attestation.fromJson(attestation); + }, + }, [EventType.voluntaryExit]: ssz.phase0.SignedVoluntaryExit, [EventType.proposerSlashing]: ssz.phase0.ProposerSlashing, - [EventType.attesterSlashing]: ssz.phase0.AttesterSlashing, + [EventType.attesterSlashing]: { + toJson: (attesterSlashing) => { + const fork = config.getForkName(Number(attesterSlashing.attestation1.data.slot)); + return sszTypesFor(fork).AttesterSlashing.toJson(attesterSlashing); + }, + fromJson: (attesterSlashing) => { + const fork = config.getForkName(Number((attesterSlashing as AttesterSlashing).attestation1.data.slot)); + return sszTypesFor(fork).AttesterSlashing.fromJson(attesterSlashing); + }, + }, [EventType.blsToExecutionChange]: ssz.capella.SignedBLSToExecutionChange, [EventType.finalizedCheckpoint]: new ContainerType( @@ -269,8 +290,8 @@ export function getTypeByEvent(): {[K in EventType]: TypeJson} { } // eslint-disable-next-line @typescript-eslint/explicit-function-return-type -export function getEventSerdes() { - const typeByEvent = getTypeByEvent(); +export function getEventSerdes(config: ChainForkConfig) { + const typeByEvent = getTypeByEvent(config); return { toJson: (event: BeaconEvent): unknown => { diff --git a/packages/api/src/beacon/routes/validator.ts b/packages/api/src/beacon/routes/validator.ts index 33161ec789e..664caf44a2c 100644 --- a/packages/api/src/beacon/routes/validator.ts +++ b/packages/api/src/beacon/routes/validator.ts @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ import {ContainerType, fromHexString, toHexString, Type, ValueOf} from "@chainsafe/ssz"; import {ChainForkConfig} from "@lodestar/config"; -import {isForkBlobs} from "@lodestar/params"; +import {isForkBlobs, isForkPostElectra} from "@lodestar/params"; import { altair, BLSSignature, @@ -17,6 +17,8 @@ import { stringType, BeaconBlockOrContents, BlindedBeaconBlock, + Attestation, + sszTypesFor, } from "@lodestar/types"; import {Endpoint, RouteDefinitions, Schema} from "../../utils/index.js"; import {fromGraffitiHex, toBoolean, toGraffitiHex} from "../../utils/serdes.js"; @@ -41,6 +43,7 @@ import { VersionMeta, VersionType, } from "../../utils/metadata.js"; +import {fromHeaders} from "../../utils/headers.js"; // See /packages/api/src/routes/index.ts for reasoning and instructions to add new routes @@ -81,15 +84,6 @@ export type ProduceBlockV3Meta = ValueOf & { executionPayloadSource: ProducedBlockSource; }; -export const BlockContentsType = new ContainerType( - { - block: ssz.deneb.BeaconBlock, - kzgProofs: ssz.deneb.KZGProofs, - blobs: ssz.deneb.Blobs, - }, - {jsonCase: "eth2"} -); - export const AttesterDutyType = new ContainerType( { /** The validator's public key, uniquely identifying them */ @@ -208,7 +202,8 @@ export const ValidatorIndicesType = ArrayOf(ssz.ValidatorIndex); export const AttesterDutyListType = ArrayOf(AttesterDutyType); export const ProposerDutyListType = ArrayOf(ProposerDutyType); export const SyncDutyListType = ArrayOf(SyncDutyType); -export const SignedAggregateAndProofListType = ArrayOf(ssz.phase0.SignedAggregateAndProof); +export const SignedAggregateAndProofListPhase0Type = ArrayOf(ssz.phase0.SignedAggregateAndProof); +export const SignedAggregateAndProofListElectraType = ArrayOf(ssz.electra.SignedAggregateAndProof); export const SignedContributionAndProofListType = ArrayOf(ssz.altair.SignedContributionAndProof); export const BeaconCommitteeSubscriptionListType = ArrayOf(BeaconCommitteeSubscriptionType); export const SyncCommitteeSubscriptionListType = ArrayOf(SyncCommitteeSubscriptionType); @@ -225,7 +220,9 @@ export type ProposerDuty = ValueOf; export type ProposerDutyList = ValueOf; export type SyncDuty = ValueOf; export type SyncDutyList = ValueOf; -export type SignedAggregateAndProofList = ValueOf; +export type SignedAggregateAndProofListPhase0 = ValueOf; +export type SignedAggregateAndProofListElectra = ValueOf; +export type SignedAggregateAndProofList = SignedAggregateAndProofListPhase0 | SignedAggregateAndProofListElectra; export type SignedContributionAndProofList = ValueOf; export type BeaconCommitteeSubscription = ValueOf; export type BeaconCommitteeSubscriptionList = ValueOf; @@ -412,18 +409,48 @@ export type Endpoints = { EmptyMeta >; + /** + * Get aggregated attestation + * Aggregates all attestations matching given attestation data root, slot and committee index + * Returns an aggregated `Attestation` object with same `AttestationData` root. + */ + getAggregatedAttestationV2: Endpoint< + "GET", + { + /** HashTreeRoot of AttestationData that validator want's aggregated */ + attestationDataRoot: Root; + slot: Slot; + committeeIndex: number; + }, + {query: {attestation_data_root: string; slot: number; committee_index: number}}, + Attestation, + VersionMeta + >; + /** * Publish multiple aggregate and proofs * Verifies given aggregate and proofs and publishes them on appropriate gossipsub topic. */ publishAggregateAndProofs: Endpoint< "POST", - {signedAggregateAndProofs: SignedAggregateAndProofList}, + {signedAggregateAndProofs: SignedAggregateAndProofListPhase0}, {body: unknown}, EmptyResponseData, EmptyMeta >; + /** + * Publish multiple aggregate and proofs + * Verifies given aggregate and proofs and publishes them on appropriate gossipsub topic. + */ + publishAggregateAndProofsV2: Endpoint< + "POST", + {signedAggregateAndProofs: SignedAggregateAndProofList}, + {body: unknown; headers: {[MetaHeader.Version]: string}}, + EmptyResponseData, + EmptyMeta + >; + publishContributionAndProofs: Endpoint< "POST", {contributionAndProofs: SignedContributionAndProofList}, @@ -536,7 +563,7 @@ export type Endpoints = { >; }; -export function getDefinitions(_config: ChainForkConfig): RouteDefinitions { +export function getDefinitions(config: ChainForkConfig): RouteDefinitions { return { getAttesterDuties: { url: "/eth/v1/validator/duties/attester/{epoch}", @@ -624,7 +651,8 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions (isForkBlobs(fork) ? BlockContentsType : ssz[fork].BeaconBlock) as Type + (fork) => + (isForkBlobs(fork) ? sszTypesFor(fork).BlockContents : ssz[fork].BeaconBlock) as Type ), meta: VersionCodec, }, @@ -687,7 +715,7 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ), meta: { @@ -804,9 +832,15 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ query: {attestation_data_root: toHexString(attestationDataRoot), slot}, }), - parseReq: ({query}) => ({attestationDataRoot: fromHexString(query.attestation_data_root), slot: query.slot}), + parseReq: ({query}) => ({ + attestationDataRoot: fromHexString(query.attestation_data_root), + slot: query.slot, + }), schema: { - query: {attestation_data_root: Schema.StringRequired, slot: Schema.UintRequired}, + query: { + attestation_data_root: Schema.StringRequired, + slot: Schema.UintRequired, + }, }, }, resp: { @@ -814,24 +848,106 @@ export function getDefinitions(_config: ChainForkConfig): RouteDefinitions ({ + query: {attestation_data_root: toHexString(attestationDataRoot), slot, committee_index: committeeIndex}, + }), + parseReq: ({query}) => ({ + attestationDataRoot: fromHexString(query.attestation_data_root), + slot: query.slot, + committeeIndex: query.committee_index, + }), + schema: { + query: { + attestation_data_root: Schema.StringRequired, + slot: Schema.UintRequired, + committee_index: Schema.UintRequired, + }, + }, + }, + resp: { + data: WithVersion((fork) => (isForkPostElectra(fork) ? ssz.electra.Attestation : ssz.phase0.Attestation)), + meta: VersionCodec, + }, + }, publishAggregateAndProofs: { url: "/eth/v1/validator/aggregate_and_proofs", method: "POST", req: { writeReqJson: ({signedAggregateAndProofs}) => ({ - body: SignedAggregateAndProofListType.toJson(signedAggregateAndProofs), + body: SignedAggregateAndProofListPhase0Type.toJson(signedAggregateAndProofs), + }), + parseReqJson: ({body}) => ({ + signedAggregateAndProofs: SignedAggregateAndProofListPhase0Type.fromJson(body), }), - parseReqJson: ({body}) => ({signedAggregateAndProofs: SignedAggregateAndProofListType.fromJson(body)}), writeReqSsz: ({signedAggregateAndProofs}) => ({ - body: SignedAggregateAndProofListType.serialize(signedAggregateAndProofs), + body: SignedAggregateAndProofListPhase0Type.serialize(signedAggregateAndProofs), + }), + parseReqSsz: ({body}) => ({ + signedAggregateAndProofs: SignedAggregateAndProofListPhase0Type.deserialize(body), }), - parseReqSsz: ({body}) => ({signedAggregateAndProofs: SignedAggregateAndProofListType.deserialize(body)}), schema: { body: Schema.ObjectArray, }, }, resp: EmptyResponseCodec, }, + publishAggregateAndProofsV2: { + url: "/eth/v2/validator/aggregate_and_proofs", + method: "POST", + req: { + writeReqJson: ({signedAggregateAndProofs}) => { + const fork = config.getForkName(signedAggregateAndProofs[0]?.message.aggregate.data.slot ?? 0); + return { + body: isForkPostElectra(fork) + ? SignedAggregateAndProofListElectraType.toJson( + signedAggregateAndProofs as SignedAggregateAndProofListElectra + ) + : SignedAggregateAndProofListPhase0Type.toJson( + signedAggregateAndProofs as SignedAggregateAndProofListPhase0 + ), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqJson: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + signedAggregateAndProofs: isForkPostElectra(fork) + ? SignedAggregateAndProofListElectraType.fromJson(body) + : SignedAggregateAndProofListPhase0Type.fromJson(body), + }; + }, + writeReqSsz: ({signedAggregateAndProofs}) => { + const fork = config.getForkName(signedAggregateAndProofs[0]?.message.aggregate.data.slot ?? 0); + return { + body: isForkPostElectra(fork) + ? SignedAggregateAndProofListElectraType.serialize( + signedAggregateAndProofs as SignedAggregateAndProofListElectra + ) + : SignedAggregateAndProofListPhase0Type.serialize( + signedAggregateAndProofs as SignedAggregateAndProofListPhase0 + ), + headers: {[MetaHeader.Version]: fork}, + }; + }, + parseReqSsz: ({body, headers}) => { + const fork = toForkName(fromHeaders(headers, MetaHeader.Version)); + return { + signedAggregateAndProofs: isForkPostElectra(fork) + ? SignedAggregateAndProofListElectraType.deserialize(body) + : SignedAggregateAndProofListPhase0Type.deserialize(body), + }; + }, + schema: { + body: Schema.ObjectArray, + headers: {[MetaHeader.Version]: Schema.String}, + }, + }, + resp: EmptyResponseCodec, + }, publishContributionAndProofs: { url: "/eth/v1/validator/contribution_and_proofs", method: "POST", diff --git a/packages/api/src/beacon/server/events.ts b/packages/api/src/beacon/server/events.ts index cbeae24f690..96212f006d8 100644 --- a/packages/api/src/beacon/server/events.ts +++ b/packages/api/src/beacon/server/events.ts @@ -3,7 +3,7 @@ import {ApiError, ApplicationMethods, FastifyRoutes, createFastifyRoutes} from " import {Endpoints, getDefinitions, eventTypes, getEventSerdes} from "../routes/events.js"; export function getRoutes(config: ChainForkConfig, methods: ApplicationMethods): FastifyRoutes { - const eventSerdes = getEventSerdes(); + const eventSerdes = getEventSerdes(config); const serverRoutes = createFastifyRoutes(getDefinitions(config), methods); return { diff --git a/packages/api/test/unit/beacon/oapiSpec.test.ts b/packages/api/test/unit/beacon/oapiSpec.test.ts index e5d473ab6a5..a84b2cc3676 100644 --- a/packages/api/test/unit/beacon/oapiSpec.test.ts +++ b/packages/api/test/unit/beacon/oapiSpec.test.ts @@ -21,9 +21,9 @@ import {testData as validatorTestData} from "./testData/validator.js"; // eslint-disable-next-line @typescript-eslint/naming-convention const __dirname = path.dirname(fileURLToPath(import.meta.url)); -const version = "v2.5.0"; +const version = "v2.6.0-alpha.1"; const openApiFile: OpenApiFile = { - url: `https://github.com/ethereum/beacon-APIs/releases/download/${version}/beacon-node-oapi.json`, + url: `https://raw.githubusercontent.com/nflaig/beacon-api-spec/main/${version}/beacon-node-oapi.json`, filepath: path.join(__dirname, "../../../oapi-schemas/beacon-node-oapi.json"), version: RegExp(version), }; @@ -108,7 +108,7 @@ describe("eventstream event data", () => { } }); - const eventSerdes = routes.events.getEventSerdes(); + const eventSerdes = routes.events.getEventSerdes(config); const knownTopics = new Set(Object.values(routes.events.eventTypes)); for (const [topic, {value}] of Object.entries(eventstreamExamples ?? {}).filter( diff --git a/packages/api/test/unit/beacon/testData/beacon.ts b/packages/api/test/unit/beacon/testData/beacon.ts index 9a89abd68a1..b0e958f4bc5 100644 --- a/packages/api/test/unit/beacon/testData/beacon.ts +++ b/packages/api/test/unit/beacon/testData/beacon.ts @@ -49,6 +49,13 @@ export const testData: GenericServerTestCases = { args: {blockId: "head"}, res: {data: [ssz.phase0.Attestation.defaultValue()], meta: {executionOptimistic: true, finalized: false}}, }, + getBlockAttestationsV2: { + args: {blockId: "head"}, + res: { + data: [ssz.electra.Attestation.defaultValue()], + meta: {executionOptimistic: true, finalized: false, version: ForkName.electra}, + }, + }, getBlockHeader: { args: {blockId: "head"}, res: {data: blockHeaderResponse, meta: {executionOptimistic: true, finalized: false}}, @@ -94,10 +101,18 @@ export const testData: GenericServerTestCases = { args: {slot: 1, committeeIndex: 2}, res: {data: [ssz.phase0.Attestation.defaultValue()]}, }, + getPoolAttestationsV2: { + args: {slot: 1, committeeIndex: 2}, + res: {data: [ssz.electra.Attestation.defaultValue()], meta: {version: ForkName.electra}}, + }, getPoolAttesterSlashings: { args: undefined, res: {data: [ssz.phase0.AttesterSlashing.defaultValue()]}, }, + getPoolAttesterSlashingsV2: { + args: undefined, + res: {data: [ssz.electra.AttesterSlashing.defaultValue()], meta: {version: ForkName.electra}}, + }, getPoolProposerSlashings: { args: undefined, res: {data: [ssz.phase0.ProposerSlashing.defaultValue()]}, @@ -114,10 +129,18 @@ export const testData: GenericServerTestCases = { args: {signedAttestations: [ssz.phase0.Attestation.defaultValue()]}, res: undefined, }, + submitPoolAttestationsV2: { + args: {signedAttestations: [ssz.phase0.Attestation.defaultValue()]}, + res: undefined, + }, submitPoolAttesterSlashings: { args: {attesterSlashing: ssz.phase0.AttesterSlashing.defaultValue()}, res: undefined, }, + submitPoolAttesterSlashingsV2: { + args: {attesterSlashing: ssz.phase0.AttesterSlashing.defaultValue()}, + res: undefined, + }, submitPoolProposerSlashings: { args: {proposerSlashing: ssz.phase0.ProposerSlashing.defaultValue()}, res: undefined, diff --git a/packages/api/test/unit/beacon/testData/validator.ts b/packages/api/test/unit/beacon/testData/validator.ts index 11fd7dd2642..d4fae4bfe29 100644 --- a/packages/api/test/unit/beacon/testData/validator.ts +++ b/packages/api/test/unit/beacon/testData/validator.ts @@ -102,10 +102,18 @@ export const testData: GenericServerTestCases = { args: {attestationDataRoot: ZERO_HASH, slot: 32000}, res: {data: ssz.phase0.Attestation.defaultValue()}, }, + getAggregatedAttestationV2: { + args: {attestationDataRoot: ZERO_HASH, slot: 32000, committeeIndex: 2}, + res: {data: ssz.electra.Attestation.defaultValue(), meta: {version: ForkName.electra}}, + }, publishAggregateAndProofs: { args: {signedAggregateAndProofs: [ssz.phase0.SignedAggregateAndProof.defaultValue()]}, res: undefined, }, + publishAggregateAndProofsV2: { + args: {signedAggregateAndProofs: [ssz.phase0.SignedAggregateAndProof.defaultValue()]}, + res: undefined, + }, publishContributionAndProofs: { args: {contributionAndProofs: [ssz.altair.SignedContributionAndProof.defaultValue()]}, res: undefined, diff --git a/packages/beacon-node/package.json b/packages/beacon-node/package.json index b56e1f1bb3e..b41450f12d3 100644 --- a/packages/beacon-node/package.json +++ b/packages/beacon-node/package.json @@ -103,7 +103,7 @@ "@chainsafe/libp2p-noise": "^15.0.0", "@chainsafe/persistent-merkle-tree": "^0.8.0", "@chainsafe/prometheus-gc-stats": "^1.0.0", - "@chainsafe/ssz": "^0.17.0", + "@chainsafe/ssz": "^0.17.1", "@chainsafe/threads": "^1.11.1", "@ethersproject/abi": "^5.7.0", "@fastify/bearer-auth": "^9.0.0", diff --git a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts index 4d0f96d1ea0..65e7b9373a2 100644 --- a/packages/beacon-node/src/api/impl/beacon/blocks/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/blocks/index.ts @@ -1,12 +1,12 @@ import {routes} from "@lodestar/api"; -import {ApplicationMethods} from "@lodestar/api/server"; +import {ApiError, ApplicationMethods} from "@lodestar/api/server"; import { computeEpochAtSlot, computeTimeAtSlot, reconstructFullBlockOrContents, signedBeaconBlockToBlinded, } from "@lodestar/state-transition"; -import {ForkExecution, SLOTS_PER_HISTORICAL_ROOT, isForkExecution} from "@lodestar/params"; +import {ForkExecution, SLOTS_PER_HISTORICAL_ROOT, isForkExecution, isForkPostElectra} from "@lodestar/params"; import {sleep, fromHex, toRootHex} from "@lodestar/utils"; import { deneb, @@ -407,12 +407,29 @@ export function getBeaconBlockApi({ async getBlockAttestations({blockId}) { const {block, executionOptimistic, finalized} = await getBlockResponse(chain, blockId); + const fork = config.getForkName(block.message.slot); + + if (isForkPostElectra(fork)) { + throw new ApiError( + 400, + `Use getBlockAttestationsV2 to retrieve block attestations for post-electra fork=${fork}` + ); + } + return { - data: Array.from(block.message.body.attestations), + data: block.message.body.attestations, meta: {executionOptimistic, finalized}, }; }, + async getBlockAttestationsV2({blockId}) { + const {block, executionOptimistic, finalized} = await getBlockResponse(chain, blockId); + return { + data: block.message.body.attestations, + meta: {executionOptimistic, finalized, version: config.getForkName(block.message.slot)}, + }; + }, + async getBlockRoot({blockId}) { // Fast path: From head state already available in memory get historical blockRoot const slot = typeof blockId === "string" ? parseInt(blockId) : blockId; diff --git a/packages/beacon-node/src/api/impl/beacon/pool/index.ts b/packages/beacon-node/src/api/impl/beacon/pool/index.ts index 8372b84db3b..398238aa250 100644 --- a/packages/beacon-node/src/api/impl/beacon/pool/index.ts +++ b/packages/beacon-node/src/api/impl/beacon/pool/index.ts @@ -1,7 +1,7 @@ import {routes} from "@lodestar/api"; import {ApplicationMethods} from "@lodestar/api/server"; -import {Epoch, ssz} from "@lodestar/types"; -import {SYNC_COMMITTEE_SUBNET_SIZE} from "@lodestar/params"; +import {Attestation, Epoch, isElectraAttestation, ssz} from "@lodestar/types"; +import {ForkName, SYNC_COMMITTEE_SUBNET_SIZE, isForkPostElectra} from "@lodestar/params"; import {validateApiAttestation} from "../../../../chain/validation/index.js"; import {validateApiAttesterSlashing} from "../../../../chain/validation/attesterSlashing.js"; import {validateApiProposerSlashing} from "../../../../chain/validation/proposerSlashing.js"; @@ -16,6 +16,7 @@ import { SyncCommitteeError, } from "../../../../chain/errors/index.js"; import {validateGossipFnRetryUnknownRoot} from "../../../../network/processor/gossipHandlers.js"; +import {ApiError} from "../../errors.js"; export function getBeaconPoolApi({ chain, @@ -26,7 +27,15 @@ export function getBeaconPoolApi({ return { async getPoolAttestations({slot, committeeIndex}) { // Already filtered by slot - let attestations = chain.aggregatedAttestationPool.getAll(slot); + let attestations: Attestation[] = chain.aggregatedAttestationPool.getAll(slot); + const fork = chain.config.getForkName(slot ?? chain.clock.currentSlot); + + if (isForkPostElectra(fork)) { + throw new ApiError( + 400, + `Use getPoolAttestationsV2 to retrieve pool attestations for post-electra fork=${fork}` + ); + } if (committeeIndex !== undefined) { attestations = attestations.filter((attestation) => committeeIndex === attestation.data.index); @@ -35,10 +44,32 @@ export function getBeaconPoolApi({ return {data: attestations}; }, + async getPoolAttestationsV2({slot, committeeIndex}) { + // Already filtered by slot + let attestations = chain.aggregatedAttestationPool.getAll(slot); + const fork = chain.config.getForkName(slot ?? attestations[0]?.data.slot ?? chain.clock.currentSlot); + const isPostElectra = isForkPostElectra(fork); + + attestations = attestations.filter((attestation) => + isPostElectra ? isElectraAttestation(attestation) : !isElectraAttestation(attestation) + ); + + if (committeeIndex !== undefined) { + attestations = attestations.filter((attestation) => committeeIndex === attestation.data.index); + } + + return {data: attestations, meta: {version: fork}}; + }, + async getPoolAttesterSlashings() { return {data: chain.opPool.getAllAttesterSlashings()}; }, + async getPoolAttesterSlashingsV2() { + // TODO Electra: Determine fork based on data returned by api + return {data: chain.opPool.getAllAttesterSlashings(), meta: {version: ForkName.phase0}}; + }, + async getPoolProposerSlashings() { return {data: chain.opPool.getAllProposerSlashings()}; }, @@ -52,6 +83,10 @@ export function getBeaconPoolApi({ }, async submitPoolAttestations({signedAttestations}) { + await this.submitPoolAttestationsV2({signedAttestations}); + }, + + async submitPoolAttestationsV2({signedAttestations}) { const seenTimestampSec = Date.now() / 1000; const errors: Error[] = []; @@ -65,7 +100,7 @@ export function getBeaconPoolApi({ // when a validator is configured with multiple beacon node urls, this attestation data may come from another beacon node // and the block hasn't been in our forkchoice since we haven't seen / processing that block // see https://github.com/ChainSafe/lodestar/issues/5098 - const {indexedAttestation, subnet, attDataRootHex} = await validateGossipFnRetryUnknownRoot( + const {indexedAttestation, subnet, attDataRootHex, committeeIndex} = await validateGossipFnRetryUnknownRoot( validateFn, network, chain, @@ -74,7 +109,7 @@ export function getBeaconPoolApi({ ); if (network.shouldAggregate(subnet, slot)) { - const insertOutcome = chain.attestationPool.add(attestation, attDataRootHex); + const insertOutcome = chain.attestationPool.add(committeeIndex, attestation, attDataRootHex); metrics?.opPool.attestationPoolInsertOutcome.inc({insertOutcome}); } @@ -114,6 +149,11 @@ export function getBeaconPoolApi({ await network.publishAttesterSlashing(attesterSlashing); }, + async submitPoolAttesterSlashingsV2({attesterSlashing}) { + // TODO Electra: Refactor submitPoolAttesterSlashings and submitPoolAttesterSlashingsV2 + await this.submitPoolAttesterSlashings({attesterSlashing}); + }, + async submitPoolProposerSlashings({proposerSlashing}) { await validateApiProposerSlashing(chain, proposerSlashing); chain.opPool.insertProposerSlashing(proposerSlashing); diff --git a/packages/beacon-node/src/api/impl/config/constants.ts b/packages/beacon-node/src/api/impl/config/constants.ts index 87ffce91b4d..4b239ee4cac 100644 --- a/packages/beacon-node/src/api/impl/config/constants.ts +++ b/packages/beacon-node/src/api/impl/config/constants.ts @@ -36,13 +36,17 @@ import { SYNC_COMMITTEE_SUBNET_COUNT, BLOB_TX_TYPE, VERSIONED_HASH_VERSION_KZG, + COMPOUNDING_WITHDRAWAL_PREFIX, + DOMAIN_CONSOLIDATION, + UNSET_DEPOSIT_REQUESTS_START_INDEX, + FULL_EXIT_REQUEST_AMOUNT, } from "@lodestar/params"; /* eslint-disable @typescript-eslint/naming-convention */ /** * Hand-picked list of constants declared in consensus-spec .md files. - * This list is asserted to be up-to-date with the test `test/e2e/api/specConstants.test.ts` + * This list is asserted to be up-to-date with the test `test/e2e/api/impl/config.test.ts` */ export const specConstants = { // phase0/beacon-chain.md @@ -57,6 +61,7 @@ export const specConstants = { // ## Withdrawal prefixes BLS_WITHDRAWAL_PREFIX, ETH1_ADDRESS_WITHDRAWAL_PREFIX, + COMPOUNDING_WITHDRAWAL_PREFIX, // ## Domain types DOMAIN_BEACON_PROPOSER, DOMAIN_BEACON_ATTESTER, @@ -66,6 +71,7 @@ export const specConstants = { DOMAIN_SELECTION_PROOF, DOMAIN_AGGREGATE_AND_PROOF, DOMAIN_APPLICATION_BUILDER, + DOMAIN_CONSOLIDATION, // phase0/validator.md TARGET_AGGREGATORS_PER_COMMITTEE, @@ -100,4 +106,8 @@ export const specConstants = { // Deneb types BLOB_TX_TYPE, VERSIONED_HASH_VERSION_KZG, + + // electra + UNSET_DEPOSIT_REQUESTS_START_INDEX, + FULL_EXIT_REQUEST_AMOUNT, }; diff --git a/packages/beacon-node/src/api/impl/validator/index.ts b/packages/beacon-node/src/api/impl/validator/index.ts index ca0fe1ac95a..a17c1418809 100644 --- a/packages/beacon-node/src/api/impl/validator/index.ts +++ b/packages/beacon-node/src/api/impl/validator/index.ts @@ -21,6 +21,7 @@ import { ForkPreBlobs, ForkBlobs, ForkExecution, + isForkPostElectra, } from "@lodestar/params"; import {MAX_BUILDER_BOOST_FACTOR} from "@lodestar/validator"; import { @@ -814,6 +815,7 @@ export function getValidatorApi( const attEpoch = computeEpochAtSlot(slot); const headBlockRootHex = chain.forkChoice.getHead().blockRoot; const headBlockRoot = fromHex(headBlockRootHex); + const fork = config.getForkName(slot); const beaconBlockRoot = slot >= headSlot @@ -845,7 +847,7 @@ export function getValidatorApi( return { data: { slot, - index: committeeIndex, + index: isForkPostElectra(fork) ? 0 : committeeIndex, beaconBlockRoot, source: attEpochState.currentJustifiedCheckpoint, target: {epoch: attEpoch, root: targetRoot}, @@ -1076,8 +1078,16 @@ export function getValidatorApi( await waitForSlot(slot); // Must never request for a future slot > currentSlot - const dataRootHex = toRootHex(attestationDataRoot); - const aggregate = chain.attestationPool.getAggregate(slot, dataRootHex); + const dataRootHex = toHex(attestationDataRoot); + const aggregate = chain.attestationPool.getAggregate(slot, null, dataRootHex); + const fork = chain.config.getForkName(slot); + + if (isForkPostElectra(fork)) { + throw new ApiError( + 400, + `Use getAggregatedAttestationV2 to retrieve aggregated attestations for post-electra fork=${fork}` + ); + } if (!aggregate) { throw new ApiError(404, `No aggregated attestation for slot=${slot}, dataRoot=${dataRootHex}`); @@ -1090,7 +1100,34 @@ export function getValidatorApi( }; }, + async getAggregatedAttestationV2({attestationDataRoot, slot, committeeIndex}) { + notWhileSyncing(); + + await waitForSlot(slot); // Must never request for a future slot > currentSlot + + const dataRootHex = toRootHex(attestationDataRoot); + const aggregate = chain.attestationPool.getAggregate(slot, committeeIndex, dataRootHex); + + if (!aggregate) { + throw new ApiError( + 404, + `No aggregated attestation for slot=${slot}, committeeIndex=${committeeIndex}, dataRoot=${dataRootHex}` + ); + } + + metrics?.production.producedAggregateParticipants.observe(aggregate.aggregationBits.getTrueBitIndexes().length); + + return { + data: aggregate, + meta: {version: config.getForkName(slot)}, + }; + }, + async publishAggregateAndProofs({signedAggregateAndProofs}) { + await this.publishAggregateAndProofsV2({signedAggregateAndProofs}); + }, + + async publishAggregateAndProofsV2({signedAggregateAndProofs}) { notWhileSyncing(); const seenTimestampSec = Date.now() / 1000; diff --git a/packages/beacon-node/src/chain/blocks/importBlock.ts b/packages/beacon-node/src/chain/blocks/importBlock.ts index 360a3f8f5db..de5ecf607d9 100644 --- a/packages/beacon-node/src/chain/blocks/importBlock.ts +++ b/packages/beacon-node/src/chain/blocks/importBlock.ts @@ -69,6 +69,7 @@ export async function importBlock( const prevFinalizedEpoch = this.forkChoice.getFinalizedCheckpoint().epoch; const blockDelaySec = (fullyVerifiedBlock.seenTimestampSec - postState.genesisTime) % this.config.SECONDS_PER_SLOT; const recvToValLatency = Date.now() / 1000 - (opts.seenTimestampSec ?? Date.now() / 1000); + const fork = this.config.getForkSeq(blockSlot); // this is just a type assertion since blockinput with dataPromise type will not end up here if (blockInput.type === BlockInputType.dataPromise) { @@ -120,7 +121,8 @@ export async function importBlock( for (const attestation of attestations) { try { - const indexedAttestation = postState.epochCtx.getIndexedAttestation(attestation); + // TODO Electra: figure out how to reuse the attesting indices computed from state transition + const indexedAttestation = postState.epochCtx.getIndexedAttestation(fork, attestation); const {target, beaconBlockRoot} = attestation.data; const attDataRoot = toRootHex(ssz.phase0.AttestationData.hashTreeRoot(indexedAttestation.data)); diff --git a/packages/beacon-node/src/chain/blocks/types.ts b/packages/beacon-node/src/chain/blocks/types.ts index da573bb7633..8b793932e95 100644 --- a/packages/beacon-node/src/chain/blocks/types.ts +++ b/packages/beacon-node/src/chain/blocks/types.ts @@ -1,7 +1,7 @@ import {CachedBeaconStateAllForks, computeEpochAtSlot} from "@lodestar/state-transition"; import {MaybeValidExecutionStatus, DataAvailabilityStatus} from "@lodestar/fork-choice"; import {deneb, Slot, RootHex, SignedBeaconBlock} from "@lodestar/types"; -import {ForkSeq, ForkName} from "@lodestar/params"; +import {ForkSeq, ForkBlobs} from "@lodestar/params"; import {ChainForkConfig} from "@lodestar/config"; export enum BlockInputType { @@ -36,7 +36,7 @@ export enum GossipedInputType { type BlobsCacheMap = Map; -type ForkBlobsInfo = {fork: ForkName.deneb}; +type ForkBlobsInfo = {fork: ForkBlobs}; type BlobsData = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]; blobsSource: BlobsSource}; export type BlockInputDataBlobs = ForkBlobsInfo & BlobsData; export type BlockInputData = BlockInputDataBlobs; diff --git a/packages/beacon-node/src/chain/chain.ts b/packages/beacon-node/src/chain/chain.ts index 75608ab33b2..b410a1e8465 100644 --- a/packages/beacon-node/src/chain/chain.ts +++ b/packages/beacon-node/src/chain/chain.ts @@ -133,7 +133,7 @@ export class BeaconChain implements IBeaconChain { // Ops pool readonly attestationPool: AttestationPool; - readonly aggregatedAttestationPool = new AggregatedAttestationPool(); + readonly aggregatedAttestationPool: AggregatedAttestationPool; readonly syncCommitteeMessagePool: SyncCommitteeMessagePool; readonly syncContributionAndProofPool = new SyncContributionAndProofPool(); readonly opPool = new OpPool(); @@ -226,7 +226,13 @@ export class BeaconChain implements IBeaconChain { if (!clock) clock = new Clock({config, genesisTime: this.genesisTime, signal}); const preAggregateCutOffTime = (2 / 3) * this.config.SECONDS_PER_SLOT; - this.attestationPool = new AttestationPool(clock, preAggregateCutOffTime, this.opts?.preaggregateSlotDistance); + this.attestationPool = new AttestationPool( + config, + clock, + preAggregateCutOffTime, + this.opts?.preaggregateSlotDistance + ); + this.aggregatedAttestationPool = new AggregatedAttestationPool(this.config); this.syncCommitteeMessagePool = new SyncCommitteeMessagePool( clock, preAggregateCutOffTime, @@ -1027,6 +1033,9 @@ export class BeaconChain implements IBeaconChain { metrics.forkChoice.balancesLength.set(forkChoiceMetrics.balancesLength); metrics.forkChoice.nodes.set(forkChoiceMetrics.nodes); metrics.forkChoice.indices.set(forkChoiceMetrics.indices); + + const headState = this.getHeadState(); + metrics.headState.unfinalizedPubkeyCacheSize.set(headState.epochCtx.unfinalizedPubkey2index.size); } private onClockSlot(slot: Slot): void { @@ -1115,6 +1124,39 @@ export class BeaconChain implements IBeaconChain { if (headState) { this.opPool.pruneAll(headBlock, headState); } + + const cpEpoch = cp.epoch; + + if (headState === null) { + this.logger.verbose("Head state is null"); + } else if (cpEpoch >= this.config.ELECTRA_FORK_EPOCH) { + // Get the validator.length from the state at cpEpoch + // We are confident the last element in the list is from headEpoch + // Thus we query from the end of the list. (cpEpoch - headEpoch - 1) is negative number + const pivotValidatorIndex = headState.epochCtx.getValidatorCountAtEpoch(cpEpoch); + + if (pivotValidatorIndex !== undefined) { + // Note EIP-6914 will break this logic + const newFinalizedValidators = headState.epochCtx.unfinalizedPubkey2index.filter( + (index, _pubkey) => index < pivotValidatorIndex + ); + + // Populate finalized pubkey cache and remove unfinalized pubkey cache + if (!newFinalizedValidators.isEmpty()) { + this.regen.updateUnfinalizedPubkeys(newFinalizedValidators); + } + } + } + + // TODO-Electra: Deprecating eth1Data poll requires a check on a finalized checkpoint state. + // Will resolve this later + // if (cpEpoch >= (this.config.ELECTRA_FORK_EPOCH ?? Infinity)) { + // // finalizedState can be safely casted to Electra state since cp is already post-Electra + // if (finalizedState.eth1DepositIndex >= (finalizedState as CachedBeaconStateElectra).depositRequestsStartIndex) { + // // Signal eth1 to stop polling eth1Data + // this.eth1.stopPollingEth1Data(); + // } + // } } async updateBeaconProposerData(epoch: Epoch, proposers: ProposerPreparationData[]): Promise { diff --git a/packages/beacon-node/src/chain/errors/attestationError.ts b/packages/beacon-node/src/chain/errors/attestationError.ts index fa0635bc011..9f8e86cea1a 100644 --- a/packages/beacon-node/src/chain/errors/attestationError.ts +++ b/packages/beacon-node/src/chain/errors/attestationError.ts @@ -127,6 +127,14 @@ export enum AttestationErrorCode { INVALID_SERIALIZED_BYTES = "ATTESTATION_ERROR_INVALID_SERIALIZED_BYTES", /** Too many skipped slots. */ TOO_MANY_SKIPPED_SLOTS = "ATTESTATION_ERROR_TOO_MANY_SKIPPED_SLOTS", + /** + * Electra: The aggregated attestation does not have exactly one committee bit set. + */ + NOT_EXACTLY_ONE_COMMITTEE_BIT_SET = "ATTESTATION_ERROR_NOT_EXACTLY_ONE_COMMITTEE_BIT_SET", + /** + * Electra: Invalid attestationData index: is non-zero + */ + NON_ZERO_ATTESTATION_DATA_INDEX = "ATTESTATION_ERROR_NON_ZERO_ATTESTATION_DATA_INDEX", } export type AttestationErrorType = @@ -160,7 +168,9 @@ export type AttestationErrorType = | {code: AttestationErrorCode.INVALID_AGGREGATOR} | {code: AttestationErrorCode.INVALID_INDEXED_ATTESTATION} | {code: AttestationErrorCode.INVALID_SERIALIZED_BYTES} - | {code: AttestationErrorCode.TOO_MANY_SKIPPED_SLOTS; headBlockSlot: Slot; attestationSlot: Slot}; + | {code: AttestationErrorCode.TOO_MANY_SKIPPED_SLOTS; headBlockSlot: Slot; attestationSlot: Slot} + | {code: AttestationErrorCode.NOT_EXACTLY_ONE_COMMITTEE_BIT_SET} + | {code: AttestationErrorCode.NON_ZERO_ATTESTATION_DATA_INDEX}; export class AttestationError extends GossipActionError { getMetadata(): Record { diff --git a/packages/beacon-node/src/chain/historicalState/worker.ts b/packages/beacon-node/src/chain/historicalState/worker.ts index 9a9f9cc9cd0..a07207cac5f 100644 --- a/packages/beacon-node/src/chain/historicalState/worker.ts +++ b/packages/beacon-node/src/chain/historicalState/worker.ts @@ -82,6 +82,10 @@ if (metricsRegister) { buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5], labelNames: ["source"], }), + numEffectiveBalanceUpdates: metricsRegister.gauge({ + name: "lodestar_historical_state_stfn_num_effective_balance_updates_count", + help: "Count of effective balance updates in epoch transition", + }), preStateBalancesNodesPopulatedMiss: metricsRegister.gauge<{source: StateCloneSource}>({ name: "lodestar_historical_state_stfn_balances_nodes_populated_miss_total", help: "Total count state.balances nodesPopulated is false on stfn", diff --git a/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts b/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts index 8277732ea1d..9a3e6622d1f 100644 --- a/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts +++ b/packages/beacon-node/src/chain/opPools/aggregatedAttestationPool.ts @@ -1,6 +1,26 @@ -import {aggregateSignatures} from "@chainsafe/blst"; -import {ForkName, ForkSeq, MAX_ATTESTATIONS, MIN_ATTESTATION_INCLUSION_DELAY, SLOTS_PER_EPOCH} from "@lodestar/params"; -import {phase0, Epoch, Slot, ssz, ValidatorIndex, RootHex} from "@lodestar/types"; +import {aggregateSignatures, Signature} from "@chainsafe/blst"; +import {BitArray} from "@chainsafe/ssz"; +import { + ForkName, + ForkSeq, + isForkPostElectra, + MAX_ATTESTATIONS, + MAX_ATTESTATIONS_ELECTRA, + MAX_COMMITTEES_PER_SLOT, + MIN_ATTESTATION_INCLUSION_DELAY, + SLOTS_PER_EPOCH, +} from "@lodestar/params"; +import { + phase0, + Epoch, + Slot, + ssz, + ValidatorIndex, + RootHex, + electra, + isElectraAttestation, + Attestation, +} from "@lodestar/types"; import { CachedBeaconStateAllForks, CachedBeaconStatePhase0, @@ -10,7 +30,8 @@ import { getBlockRootAtSlot, } from "@lodestar/state-transition"; import {IForkChoice, EpochDifference} from "@lodestar/fork-choice"; -import {MapDef, toRootHex} from "@lodestar/utils"; +import {MapDef, toRootHex, assert} from "@lodestar/utils"; +import {ChainForkConfig} from "@lodestar/config"; import {intersectUint8Arrays, IntersectResult} from "../../util/bitArray.js"; import {pruneBySlot, signatureFromBytesNoCheck} from "./utils.js"; import {InsertOutcome} from "./types.js"; @@ -19,13 +40,24 @@ type DataRootHex = string; type CommitteeIndex = number; -type AttestationWithScore = {attestation: phase0.Attestation; score: number}; +// for pre-electra +type AttestationWithScore = {attestation: Attestation; score: number}; +/** + * for electra, this is to consolidate aggregated attestations of the same attestation data into a single attestation to be included in block + * note that this is local definition in this file and it's NOT validator consolidation + */ +export type AttestationsConsolidation = { + byCommittee: Map; + attData: phase0.AttestationData; + totalNotSeenCount: number; + score: number; +}; /** - * This function returns not seen participation for a given epoch and committee. + * This function returns not seen participation for a given epoch and slot and committe index. * Return null if all validators are seen or no info to check. */ -type GetNotSeenValidatorsFn = (epoch: Epoch, committee: Uint32Array) => Set | null; +type GetNotSeenValidatorsFn = (epoch: Epoch, slot: Slot, committeeIndex: number) => Set | null; type ValidateAttestationDataFn = (attData: phase0.AttestationData) => boolean; @@ -38,14 +70,21 @@ type ValidateAttestationDataFn = (attData: phase0.AttestationData) => boolean; const MAX_RETAINED_ATTESTATIONS_PER_GROUP = 4; /** - * On mainnet, each slot has 64 committees, and each block has 128 attestations max so in average + * Pre-electra, each slot has 64 committees, and each block has 128 attestations max so in average * we get 2 attestation per groups. * Starting from Jan 2024, we have a performance issue getting attestations for a block. Based on the - * fact that lot of groups will have only 1 attestation since it's full of participation increase this number + * fact that lot of groups will have only 1 full participation attestation, increase this number * a bit higher than average. This also help decrease number of slots to search for attestations. */ const MAX_ATTESTATIONS_PER_GROUP = 3; +/** + * For electra, each block has up to 8 aggregated attestations, assuming there are 3 for the "best" + * attestation data, there are still 5 for other attestation data so this constant is still good. + * We should separate to 2 constant based on conditions of different networks + */ +const MAX_ATTESTATIONS_PER_GROUP_ELECTRA = 3; + /** * Maintain a pool of aggregated attestations. Attestations can be retrieved for inclusion in a block * or api. The returned attestations are aggregated to maximise the number of validators that can be @@ -53,20 +92,27 @@ const MAX_ATTESTATIONS_PER_GROUP = 3; * Note that we want to remove attestations with attesters that were included in the chain. */ export class AggregatedAttestationPool { - private readonly attestationGroupByDataHashByIndexBySlot = new MapDef< + /** + * post electra, different committees could have the same AttData and we have to consolidate attestations of the same + * data to be included in block, so we should group by data before index + * // TODO: make sure it does not affect performance for pre electra forks + */ + private readonly attestationGroupByIndexByDataHexBySlot = new MapDef< Slot, - Map> - >(() => new Map>()); + Map> + >(() => new Map>()); private lowestPermissibleSlot = 0; + constructor(private readonly config: ChainForkConfig) {} + /** For metrics to track size of the pool */ getAttestationCount(): {attestationCount: number; attestationDataCount: number} { let attestationCount = 0; let attestationDataCount = 0; - for (const attestationGroupByDataByIndex of this.attestationGroupByDataHashByIndexBySlot.values()) { - for (const attestationGroupByData of attestationGroupByDataByIndex.values()) { - attestationDataCount += attestationGroupByData.size; - for (const attestationGroup of attestationGroupByData.values()) { + for (const attestationGroupByIndexByDataHex of this.attestationGroupByIndexByDataHexBySlot.values()) { + for (const attestationGroupByIndex of attestationGroupByIndexByDataHex.values()) { + attestationDataCount += attestationGroupByIndex.size; + for (const attestationGroup of attestationGroupByIndex.values()) { attestationCount += attestationGroup.getAttestationCount(); } } @@ -75,7 +121,7 @@ export class AggregatedAttestationPool { } add( - attestation: phase0.Attestation, + attestation: Attestation, dataRootHex: RootHex, attestingIndicesCount: number, committee: Uint32Array @@ -88,16 +134,32 @@ export class AggregatedAttestationPool { return InsertOutcome.Old; } - const attestationGroupByDataHashByIndex = this.attestationGroupByDataHashByIndexBySlot.getOrDefault(slot); - let attestationGroupByDataHash = attestationGroupByDataHashByIndex.get(attestation.data.index); - if (!attestationGroupByDataHash) { - attestationGroupByDataHash = new Map(); - attestationGroupByDataHashByIndex.set(attestation.data.index, attestationGroupByDataHash); + const attestationGroupByIndexByDataHash = this.attestationGroupByIndexByDataHexBySlot.getOrDefault(slot); + let attestationGroupByIndex = attestationGroupByIndexByDataHash.get(dataRootHex); + if (!attestationGroupByIndex) { + attestationGroupByIndex = new Map(); + attestationGroupByIndexByDataHash.set(dataRootHex, attestationGroupByIndex); } - let attestationGroup = attestationGroupByDataHash.get(dataRootHex); + + let committeeIndex; + + if (isForkPostElectra(this.config.getForkName(slot))) { + if (!isElectraAttestation(attestation)) { + throw Error(`Attestation should be type electra.Attestation for slot ${slot}`); + } + committeeIndex = attestation.committeeBits.getSingleTrueBit(); + } else { + if (isElectraAttestation(attestation)) { + throw Error(`Attestation should be type phase0.Attestation for slot ${slot}`); + } + committeeIndex = attestation.data.index; + } + // this should not happen because attestation should be validated before reaching this + assert.notNull(committeeIndex, "Committee index should not be null in aggregated attestation pool"); + let attestationGroup = attestationGroupByIndex.get(committeeIndex); if (!attestationGroup) { attestationGroup = new MatchingDataAttestationGroup(committee, attestation.data); - attestationGroupByDataHash.set(dataRootHex, attestationGroup); + attestationGroupByIndex.set(committeeIndex, attestationGroup); } return attestationGroup.add({ @@ -109,14 +171,21 @@ export class AggregatedAttestationPool { /** Remove attestations which are too old to be included in a block. */ prune(clockSlot: Slot): void { // Only retain SLOTS_PER_EPOCH slots - pruneBySlot(this.attestationGroupByDataHashByIndexBySlot, clockSlot, SLOTS_PER_EPOCH); + pruneBySlot(this.attestationGroupByIndexByDataHexBySlot, clockSlot, SLOTS_PER_EPOCH); this.lowestPermissibleSlot = Math.max(clockSlot - SLOTS_PER_EPOCH, 0); } + getAttestationsForBlock(fork: ForkName, forkChoice: IForkChoice, state: CachedBeaconStateAllForks): Attestation[] { + const forkSeq = ForkSeq[fork]; + return forkSeq >= ForkSeq.electra + ? this.getAttestationsForBlockElectra(fork, forkChoice, state) + : this.getAttestationsForBlockPreElectra(fork, forkChoice, state); + } + /** - * Get attestations to be included in a block. Returns $MAX_ATTESTATIONS items + * Get attestations to be included in a block pre-electra. Returns up to $MAX_ATTESTATIONS items */ - getAttestationsForBlock( + getAttestationsForBlockPreElectra( fork: ForkName, forkChoice: IForkChoice, state: CachedBeaconStateAllForks @@ -130,14 +199,14 @@ export class AggregatedAttestationPool { const attestationsByScore: AttestationWithScore[] = []; - const slots = Array.from(this.attestationGroupByDataHashByIndexBySlot.keys()).sort((a, b) => b - a); + const slots = Array.from(this.attestationGroupByIndexByDataHexBySlot.keys()).sort((a, b) => b - a); let minScore = Number.MAX_SAFE_INTEGER; let slotCount = 0; slot: for (const slot of slots) { slotCount++; - const attestationGroupByDataHashByIndex = this.attestationGroupByDataHashByIndexBySlot.get(slot); + const attestationGroupByIndexByDataHash = this.attestationGroupByIndexByDataHexBySlot.get(slot); // should not happen - if (!attestationGroupByDataHashByIndex) { + if (!attestationGroupByIndexByDataHash) { throw Error(`No aggregated attestation pool for slot=${slot}`); } @@ -158,35 +227,25 @@ export class AggregatedAttestationPool { } const slotDelta = stateSlot - slot; - const shuffling = state.epochCtx.getShufflingAtEpoch(epoch); - const slotCommittees = shuffling.committees[slot % SLOTS_PER_EPOCH]; - for (const [committeeIndex, attestationGroupByData] of attestationGroupByDataHashByIndex.entries()) { - // all attestations will be validated against the state in next step so we can get committee from the state - // this is an improvement to save the notSeenValidatorsFn call for the same slot/index instead of the same attestation data - if (committeeIndex > slotCommittees.length) { - // invalid index, should not happen - continue; - } - - const committee = slotCommittees[committeeIndex]; - const notSeenAttestingIndices = notSeenValidatorsFn(epoch, committee); - if (notSeenAttestingIndices === null || notSeenAttestingIndices.size === 0) { - continue; - } + for (const attestationGroupByIndex of attestationGroupByIndexByDataHash.values()) { + for (const [committeeIndex, attestationGroup] of attestationGroupByIndex.entries()) { + const notSeenAttestingIndices = notSeenValidatorsFn(epoch, slot, committeeIndex); + if (notSeenAttestingIndices === null || notSeenAttestingIndices.size === 0) { + continue; + } - if ( - slotCount > 2 && - attestationsByScore.length >= MAX_ATTESTATIONS && - notSeenAttestingIndices.size / slotDelta < minScore - ) { - // after 2 slots, there are a good chance that we have 2 * MAX_ATTESTATIONS attestations and break the for loop early - // if not, we may have to scan all slots in the pool - // if we have enough attestations and the max possible score is lower than scores of `attestationsByScore`, we should skip - // otherwise it takes time to check attestation, add it and remove it later after the sort by score - continue; - } + if ( + slotCount > 2 && + attestationsByScore.length >= MAX_ATTESTATIONS && + notSeenAttestingIndices.size / slotDelta < minScore + ) { + // after 2 slots, there are a good chance that we have 2 * MAX_ATTESTATIONS attestations and break the for loop early + // if not, we may have to scan all slots in the pool + // if we have enough attestations and the max possible score is lower than scores of `attestationsByScore`, we should skip + // otherwise it takes time to check attestation, add it and remove it later after the sort by score + continue; + } - for (const attestationGroup of attestationGroupByData.values()) { if (!validateAttestationDataFn(attestationGroup.data)) { continue; } @@ -199,6 +258,7 @@ export class AggregatedAttestationPool { // IF they have to be validated, do it only with one attestation per group since same data // The committeeCountPerSlot can be precomputed once per slot for (const {attestation, notSeenAttesterCount} of attestationGroup.getAttestationsForBlock( + fork, notSeenAttestingIndices )) { const score = notSeenAttesterCount / slotDelta; @@ -231,23 +291,138 @@ export class AggregatedAttestationPool { return attestationsForBlock; } + /** + * Get attestations to be included in an electra block. Returns up to $MAX_ATTESTATIONS_ELECTRA items + */ + getAttestationsForBlockElectra( + fork: ForkName, + forkChoice: IForkChoice, + state: CachedBeaconStateAllForks + ): electra.Attestation[] { + const stateSlot = state.slot; + const stateEpoch = state.epochCtx.epoch; + const statePrevEpoch = stateEpoch - 1; + + const notSeenValidatorsFn = getNotSeenValidatorsFn(state); + const validateAttestationDataFn = getValidateAttestationDataFn(forkChoice, state); + + const slots = Array.from(this.attestationGroupByIndexByDataHexBySlot.keys()).sort((a, b) => b - a); + const consolidations: AttestationsConsolidation[] = []; + let minScore = Number.MAX_SAFE_INTEGER; + let slotCount = 0; + slot: for (const slot of slots) { + slotCount++; + const attestationGroupByIndexByDataHash = this.attestationGroupByIndexByDataHexBySlot.get(slot); + // should not happen + if (!attestationGroupByIndexByDataHash) { + throw Error(`No aggregated attestation pool for slot=${slot}`); + } + + const epoch = computeEpochAtSlot(slot); + // validateAttestation condition: Attestation target epoch not in previous or current epoch + if (!(epoch === stateEpoch || epoch === statePrevEpoch)) { + continue; // Invalid attestations + } + // validateAttestation condition: Attestation slot not within inclusion window + if (!(slot + MIN_ATTESTATION_INCLUSION_DELAY <= stateSlot)) { + continue; // Invalid attestations + } + + const slotDelta = stateSlot - slot; + // CommitteeIndex 0 1 2 ... Consolidation + // Attestations att00 --- att10 --- att20 --- 0 (att 00 10 20) + // att01 --- - --- att21 --- 1 (att 01 __ 21) + // - --- - --- att22 --- 2 (att __ __ 22) + for (const attestationGroupByIndex of attestationGroupByIndexByDataHash.values()) { + // sameAttDataCons could be up to MAX_ATTESTATIONS_PER_GROUP_ELECTRA + const sameAttDataCons: AttestationsConsolidation[] = []; + for (const [committeeIndex, attestationGroup] of attestationGroupByIndex.entries()) { + const notSeenAttestingIndices = notSeenValidatorsFn(epoch, slot, committeeIndex); + if (notSeenAttestingIndices === null || notSeenAttestingIndices.size === 0) { + continue; + } + + if ( + slotCount > 2 && + consolidations.length >= MAX_ATTESTATIONS_ELECTRA && + notSeenAttestingIndices.size / slotDelta < minScore + ) { + // after 2 slots, there are a good chance that we have 2 * MAX_ATTESTATIONS_ELECTRA attestations and break the for loop early + // if not, we may have to scan all slots in the pool + // if we have enough attestations and the max possible score is lower than scores of `attestationsByScore`, we should skip + // otherwise it takes time to check attestation, add it and remove it later after the sort by score + continue; + } + + if (!validateAttestationDataFn(attestationGroup.data)) { + continue; + } + + // TODO: Is it necessary to validateAttestation for: + // - Attestation committee index not within current committee count + // - Attestation aggregation bits length does not match committee length + // + // These properties should not change after being validate in gossip + // IF they have to be validated, do it only with one attestation per group since same data + // The committeeCountPerSlot can be precomputed once per slot + for (const [i, attestationNonParticipation] of attestationGroup + .getAttestationsForBlock(fork, notSeenAttestingIndices) + .entries()) { + if (sameAttDataCons[i] === undefined) { + sameAttDataCons[i] = { + byCommittee: new Map(), + attData: attestationNonParticipation.attestation.data, + totalNotSeenCount: 0, + // only update score after we have full data + score: 0, + }; + } + sameAttDataCons[i].byCommittee.set(committeeIndex, attestationNonParticipation); + sameAttDataCons[i].totalNotSeenCount += attestationNonParticipation.notSeenAttesterCount; + } + for (const consolidation of sameAttDataCons) { + const score = consolidation.totalNotSeenCount / slotDelta; + if (score < minScore) { + minScore = score; + } + consolidations.push({...consolidation, score}); + // Stop accumulating attestations there are enough that may have good scoring + if (consolidations.length >= MAX_ATTESTATIONS_ELECTRA * 2) { + break slot; + } + } + } + } + } + + const sortedConsolidationsByScore = consolidations + .sort((a, b) => b.score - a.score) + .slice(0, MAX_ATTESTATIONS_ELECTRA); + // on chain aggregation is expensive, only do it after all + return sortedConsolidationsByScore.map(aggregateConsolidation); + } + /** * Get all attestations optionally filtered by `attestation.data.slot` + * Note this function is not fork aware and can potentially return a mix + * of phase0.Attestations and electra.Attestations. + * Caller of this function is expected to filtered result if they desire + * a homogenous array. * @param bySlot slot to filter, `bySlot === attestation.data.slot` */ - getAll(bySlot?: Slot): phase0.Attestation[] { - let attestationGroupsArr: Map[]; + getAll(bySlot?: Slot): Attestation[] { + let attestationGroupsArr: Map[]; if (bySlot === undefined) { - attestationGroupsArr = Array.from(this.attestationGroupByDataHashByIndexBySlot.values()).flatMap((byIndex) => + attestationGroupsArr = Array.from(this.attestationGroupByIndexByDataHexBySlot.values()).flatMap((byIndex) => Array.from(byIndex.values()) ); } else { - const attestationGroupsByIndex = this.attestationGroupByDataHashByIndexBySlot.get(bySlot); + const attestationGroupsByIndex = this.attestationGroupByIndexByDataHexBySlot.get(bySlot); if (!attestationGroupsByIndex) throw Error(`No attestations for slot ${bySlot}`); attestationGroupsArr = Array.from(attestationGroupsByIndex.values()); } - const attestations: phase0.Attestation[] = []; + const attestations: Attestation[] = []; for (const attestationGroups of attestationGroupsArr) { for (const attestationGroup of attestationGroups.values()) { attestations.push(...attestationGroup.getAttestations()); @@ -258,12 +433,12 @@ export class AggregatedAttestationPool { } interface AttestationWithIndex { - attestation: phase0.Attestation; + attestation: Attestation; trueBitsCount: number; } type AttestationNonParticipant = { - attestation: phase0.Attestation; + attestation: Attestation; // this is <= attestingIndices.count since some attesters may be seen by the chain // this is only updated and used in removeBySeenValidators function notSeenAttesterCount: number; @@ -345,9 +520,17 @@ export class MatchingDataAttestationGroup { * @param notSeenAttestingIndices not seen attestting indices, i.e. indices in the same committee * @returns an array of AttestationNonParticipant */ - getAttestationsForBlock(notSeenAttestingIndices: Set): AttestationNonParticipant[] { + getAttestationsForBlock(fork: ForkName, notSeenAttestingIndices: Set): AttestationNonParticipant[] { const attestations: AttestationNonParticipant[] = []; + const isPostElectra = isForkPostElectra(fork); for (const {attestation} of this.attestations) { + if ( + (isPostElectra && !isElectraAttestation(attestation)) || + (!isPostElectra && isElectraAttestation(attestation)) + ) { + continue; + } + let notSeenAttesterCount = 0; const {aggregationBits} = attestation; for (const notSeenIndex of notSeenAttestingIndices) { @@ -361,17 +544,16 @@ export class MatchingDataAttestationGroup { } } - if (attestations.length <= MAX_ATTESTATIONS_PER_GROUP) { + const maxAttestation = isPostElectra ? MAX_ATTESTATIONS_PER_GROUP_ELECTRA : MAX_ATTESTATIONS_PER_GROUP; + if (attestations.length <= maxAttestation) { return attestations; } else { - return attestations - .sort((a, b) => b.notSeenAttesterCount - a.notSeenAttesterCount) - .slice(0, MAX_ATTESTATIONS_PER_GROUP); + return attestations.sort((a, b) => b.notSeenAttesterCount - a.notSeenAttesterCount).slice(0, maxAttestation); } } /** Get attestations for API. */ - getAttestations(): phase0.Attestation[] { + getAttestations(): Attestation[] { return this.attestations.map((attestation) => attestation.attestation); } } @@ -385,6 +567,34 @@ export function aggregateInto(attestation1: AttestationWithIndex, attestation2: attestation1.attestation.signature = aggregateSignatures([signature1, signature2]).toBytes(); } +/** + * Electra and after: Block proposer consolidates attestations with the same + * attestation data from different committee into a single attestation + * https://github.com/ethereum/consensus-specs/blob/aba6345776aa876dad368cab27fbbb23fae20455/specs/_features/eip7549/validator.md?plain=1#L39 + */ +export function aggregateConsolidation({byCommittee, attData}: AttestationsConsolidation): electra.Attestation { + const committeeBits = BitArray.fromBitLen(MAX_COMMITTEES_PER_SLOT); + // TODO: can we improve this? + let aggregationBits: boolean[] = []; + const signatures: Signature[] = []; + const sortedCommittees = Array.from(byCommittee.keys()).sort((a, b) => a - b); + for (const committeeIndex of sortedCommittees) { + const attestationNonParticipation = byCommittee.get(committeeIndex); + if (attestationNonParticipation !== undefined) { + const {attestation} = attestationNonParticipation; + committeeBits.set(committeeIndex, true); + aggregationBits = [...aggregationBits, ...attestation.aggregationBits.toBoolArray()]; + signatures.push(signatureFromBytesNoCheck(attestation.signature)); + } + } + return { + aggregationBits: BitArray.fromBoolArray(aggregationBits), + data: attData, + committeeBits, + signature: aggregateSignatures(signatures).toBytes(), + }; +} + /** * Pre-compute participation from a CachedBeaconStateAllForks, for use to check if an attestation's committee * has already attested or not. @@ -407,12 +617,13 @@ export function getNotSeenValidatorsFn(state: CachedBeaconStateAllForks): GetNot state ); - return (epoch: Epoch, committee: Uint32Array) => { + return (epoch: Epoch, slot: Slot, committeeIndex: number) => { const participants = epoch === stateEpoch ? currentEpochParticipants : epoch === stateEpoch - 1 ? previousEpochParticipants : null; if (participants === null) { return null; } + const committee = state.epochCtx.getBeaconCommittee(slot, committeeIndex); const notSeenAttestingIndices = new Set(); for (const [i, validatorIndex] of committee.entries()) { @@ -434,22 +645,32 @@ export function getNotSeenValidatorsFn(state: CachedBeaconStateAllForks): GetNot const previousParticipation = altairState.previousEpochParticipation.getAll(); const currentParticipation = altairState.currentEpochParticipation.getAll(); const stateEpoch = computeEpochAtSlot(state.slot); + // this function could be called multiple times with same slot + committeeIndex + const cachedNotSeenValidators = new Map>(); - return (epoch: Epoch, committee: Uint32Array) => { + return (epoch: Epoch, slot: Slot, committeeIndex: number) => { const participationStatus = epoch === stateEpoch ? currentParticipation : epoch === stateEpoch - 1 ? previousParticipation : null; if (participationStatus === null) { return null; } + const cacheKey = slot + "_" + committeeIndex; + let notSeenAttestingIndices = cachedNotSeenValidators.get(cacheKey); + if (notSeenAttestingIndices != null) { + // if all validators are seen then return null, we don't need to check for any attestations of same committee again + return notSeenAttestingIndices.size === 0 ? null : notSeenAttestingIndices; + } - const notSeenAttestingIndices = new Set(); + const committee = state.epochCtx.getBeaconCommittee(slot, committeeIndex); + notSeenAttestingIndices = new Set(); for (const [i, validatorIndex] of committee.entries()) { // no need to check flagIsTimelySource as if validator is not seen, it's participation status is 0 if (participationStatus[validatorIndex] === 0) { notSeenAttestingIndices.add(i); } } + cachedNotSeenValidators.set(cacheKey, notSeenAttestingIndices); // if all validators are seen then return null, we don't need to check for any attestations of same committee again return notSeenAttestingIndices.size === 0 ? null : notSeenAttestingIndices; }; diff --git a/packages/beacon-node/src/chain/opPools/attestationPool.ts b/packages/beacon-node/src/chain/opPools/attestationPool.ts index 2b511598f9a..887448b1e55 100644 --- a/packages/beacon-node/src/chain/opPools/attestationPool.ts +++ b/packages/beacon-node/src/chain/opPools/attestationPool.ts @@ -1,10 +1,12 @@ import {BitArray} from "@chainsafe/ssz"; import {Signature, aggregateSignatures} from "@chainsafe/blst"; -import {phase0, Slot, RootHex} from "@lodestar/types"; -import {MapDef} from "@lodestar/utils"; +import {Slot, RootHex, isElectraAttestation, Attestation} from "@lodestar/types"; +import {MapDef, assert} from "@lodestar/utils"; +import {isForkPostElectra} from "@lodestar/params"; +import {ChainForkConfig} from "@lodestar/config"; import {IClock} from "../../util/clock.js"; import {InsertOutcome, OpPoolError, OpPoolErrorCode} from "./types.js"; -import {pruneBySlot, signatureFromBytesNoCheck} from "./utils.js"; +import {isElectraAggregate, pruneBySlot, signatureFromBytesNoCheck} from "./utils.js"; /** * The number of slots that will be stored in the pool. @@ -22,15 +24,22 @@ const SLOTS_RETAINED = 3; */ const MAX_ATTESTATIONS_PER_SLOT = 16_384; -type AggregateFast = { - data: phase0.Attestation["data"]; +type AggregateFastPhase0 = { + data: Attestation["data"]; aggregationBits: BitArray; signature: Signature; }; +export type AggregateFastElectra = AggregateFastPhase0 & {committeeBits: BitArray}; + +export type AggregateFast = AggregateFastPhase0 | AggregateFastElectra; + /** Hex string of DataRoot `TODO` */ type DataRootHex = string; +/** CommitteeIndex must be null for pre-electra. Must not be null post-electra */ +type CommitteeIndex = number | null; + /** * A pool of `Attestation` that is specially designed to store "unaggregated" attestations from * the native aggregation scheme. @@ -55,12 +64,14 @@ type DataRootHex = string; * receives and it can be triggered manually. */ export class AttestationPool { - private readonly attestationByRootBySlot = new MapDef>( - () => new Map() - ); + private readonly aggregateByIndexByRootBySlot = new MapDef< + Slot, + Map> + >(() => new Map>()); private lowestPermissibleSlot = 0; constructor( + private readonly config: ChainForkConfig, private readonly clock: IClock, private readonly cutOffSecFromSlot: number, private readonly preaggregateSlotDistance = 0 @@ -69,8 +80,10 @@ export class AttestationPool { /** Returns current count of pre-aggregated attestations with unique data */ getAttestationCount(): number { let attestationCount = 0; - for (const attestationByRoot of this.attestationByRootBySlot.values()) { - attestationCount += attestationByRoot.size; + for (const attestationByIndexByRoot of this.aggregateByIndexByRootBySlot.values()) { + for (const attestationByIndex of attestationByIndexByRoot.values()) { + attestationCount += attestationByIndex.size; + } } return attestationCount; } @@ -92,8 +105,9 @@ export class AttestationPool { * - Valid committeeIndex * - Valid data */ - add(attestation: phase0.Attestation, attDataRootHex: RootHex): InsertOutcome { + add(committeeIndex: CommitteeIndex, attestation: Attestation, attDataRootHex: RootHex): InsertOutcome { const slot = attestation.data.slot; + const fork = this.config.getForkName(slot); const lowestPermissibleSlot = this.lowestPermissibleSlot; // Reject any attestations that are too old. @@ -107,19 +121,33 @@ export class AttestationPool { } // Limit object per slot - const aggregateByRoot = this.attestationByRootBySlot.getOrDefault(slot); + const aggregateByRoot = this.aggregateByIndexByRootBySlot.getOrDefault(slot); if (aggregateByRoot.size >= MAX_ATTESTATIONS_PER_SLOT) { throw new OpPoolError({code: OpPoolErrorCode.REACHED_MAX_PER_SLOT}); } + if (isForkPostElectra(fork)) { + // Electra only: this should not happen because attestation should be validated before reaching this + assert.notNull(committeeIndex, "Committee index should not be null in attestation pool post-electra"); + assert.true(isElectraAttestation(attestation), "Attestation should be type electra.Attestation"); + } else { + assert.true(!isElectraAttestation(attestation), "Attestation should be type phase0.Attestation"); + committeeIndex = null; // For pre-electra, committee index info is encoded in attDataRootIndex + } + // Pre-aggregate the contribution with existing items - const aggregate = aggregateByRoot.get(attDataRootHex); + let aggregateByIndex = aggregateByRoot.get(attDataRootHex); + if (aggregateByIndex === undefined) { + aggregateByIndex = new Map(); + aggregateByRoot.set(attDataRootHex, aggregateByIndex); + } + const aggregate = aggregateByIndex.get(committeeIndex); if (aggregate) { // Aggregate mutating return aggregateAttestationInto(aggregate, attestation); } else { // Create new aggregate - aggregateByRoot.set(attDataRootHex, attestationToAggregate(attestation)); + aggregateByIndex.set(committeeIndex, attestationToAggregate(attestation)); return InsertOutcome.NewData; } } @@ -127,13 +155,23 @@ export class AttestationPool { /** * For validator API to get an aggregate */ - getAggregate(slot: Slot, dataRootHex: RootHex): phase0.Attestation | null { - const aggregate = this.attestationByRootBySlot.get(slot)?.get(dataRootHex); + getAggregate(slot: Slot, committeeIndex: CommitteeIndex, dataRootHex: RootHex): Attestation | null { + const fork = this.config.getForkName(slot); + const isPostElectra = isForkPostElectra(fork); + committeeIndex = isPostElectra ? committeeIndex : null; + + const aggregate = this.aggregateByIndexByRootBySlot.get(slot)?.get(dataRootHex)?.get(committeeIndex); if (!aggregate) { // TODO: Add metric for missing aggregates return null; } + if (isPostElectra) { + assert.true(isElectraAggregate(aggregate), "Aggregate should be type AggregateFastElectra"); + } else { + assert.true(!isElectraAggregate(aggregate), "Aggregate should be type AggregateFastPhase0"); + } + return fastToAttestation(aggregate); } @@ -142,7 +180,7 @@ export class AttestationPool { * By default, not interested in attestations in old slots, we only preaggregate attestations for the current slot. */ prune(clockSlot: Slot): void { - pruneBySlot(this.attestationByRootBySlot, clockSlot, SLOTS_RETAINED); + pruneBySlot(this.aggregateByIndexByRootBySlot, clockSlot, SLOTS_RETAINED); // by default preaggregateSlotDistance is 0, i.e only accept attestations in the same clock slot. this.lowestPermissibleSlot = Math.max(clockSlot - this.preaggregateSlotDistance, 0); } @@ -151,18 +189,20 @@ export class AttestationPool { * Get all attestations optionally filtered by `attestation.data.slot` * @param bySlot slot to filter, `bySlot === attestation.data.slot` */ - getAll(bySlot?: Slot): phase0.Attestation[] { - const attestations: phase0.Attestation[] = []; + getAll(bySlot?: Slot): Attestation[] { + const attestations: Attestation[] = []; const aggregateByRoots = bySlot === undefined - ? Array.from(this.attestationByRootBySlot.values()) - : [this.attestationByRootBySlot.get(bySlot)]; + ? Array.from(this.aggregateByIndexByRootBySlot.values()) + : [this.aggregateByIndexByRootBySlot.get(bySlot)]; for (const aggregateByRoot of aggregateByRoots) { if (aggregateByRoot) { - for (const aggFast of aggregateByRoot.values()) { - attestations.push(fastToAttestation(aggFast)); + for (const aggFastByIndex of aggregateByRoot.values()) { + for (const aggFast of aggFastByIndex.values()) { + attestations.push(fastToAttestation(aggFast)); + } } } } @@ -175,15 +215,13 @@ export class AttestationPool { // - Insert attestations coming from gossip and API /** - * Aggregate a new contribution into `aggregate` mutating it + * Aggregate a new attestation into `aggregate` mutating it */ -function aggregateAttestationInto(aggregate: AggregateFast, attestation: phase0.Attestation): InsertOutcome { +function aggregateAttestationInto(aggregate: AggregateFast, attestation: Attestation): InsertOutcome { const bitIndex = attestation.aggregationBits.getSingleTrueBit(); // Should never happen, attestations are verified against this exact condition before - if (bitIndex === null) { - throw Error("Invalid attestation not exactly one bit set"); - } + assert.notNull(bitIndex, "Invalid attestation in pool, not exactly one bit set"); if (aggregate.aggregationBits.get(bitIndex) === true) { return InsertOutcome.AlreadyKnown; @@ -197,7 +235,16 @@ function aggregateAttestationInto(aggregate: AggregateFast, attestation: phase0. /** * Format `contribution` into an efficient `aggregate` to add more contributions in with aggregateContributionInto() */ -function attestationToAggregate(attestation: phase0.Attestation): AggregateFast { +function attestationToAggregate(attestation: Attestation): AggregateFast { + if (isElectraAttestation(attestation)) { + return { + data: attestation.data, + // clone because it will be mutated + aggregationBits: attestation.aggregationBits.clone(), + committeeBits: attestation.committeeBits, + signature: signatureFromBytesNoCheck(attestation.signature), + }; + } return { data: attestation.data, // clone because it will be mutated @@ -207,12 +254,8 @@ function attestationToAggregate(attestation: phase0.Attestation): AggregateFast } /** - * Unwrap AggregateFast to phase0.Attestation + * Unwrap AggregateFast to Attestation */ -function fastToAttestation(aggFast: AggregateFast): phase0.Attestation { - return { - data: aggFast.data, - aggregationBits: aggFast.aggregationBits, - signature: aggFast.signature.toBytes(), - }; +function fastToAttestation(aggFast: AggregateFast): Attestation { + return {...aggFast, signature: aggFast.signature.toBytes()}; } diff --git a/packages/beacon-node/src/chain/opPools/opPool.ts b/packages/beacon-node/src/chain/opPools/opPool.ts index a991a6e8630..a2180a718fe 100644 --- a/packages/beacon-node/src/chain/opPools/opPool.ts +++ b/packages/beacon-node/src/chain/opPools/opPool.ts @@ -14,9 +14,10 @@ import { BLS_WITHDRAWAL_PREFIX, MAX_ATTESTER_SLASHINGS, ForkSeq, + MAX_ATTESTER_SLASHINGS_ELECTRA, } from "@lodestar/params"; -import {Epoch, phase0, capella, ssz, ValidatorIndex, SignedBeaconBlock} from "@lodestar/types"; import {toRootHex} from "@lodestar/utils"; +import {Epoch, phase0, capella, ssz, ValidatorIndex, SignedBeaconBlock, AttesterSlashing} from "@lodestar/types"; import {IBeaconDb} from "../../db/index.js"; import {SignedBLSToExecutionChangeVersioned} from "../../util/types.js"; import {BlockType} from "../interface.js"; @@ -174,7 +175,7 @@ export class OpPool { blockType: BlockType, metrics: Metrics | null ): [ - phase0.AttesterSlashing[], + AttesterSlashing[], phase0.ProposerSlashing[], phase0.SignedVoluntaryExit[], capella.SignedBLSToExecutionChange[], @@ -208,7 +209,8 @@ export class OpPool { }); const endAttesterSlashings = stepsMetrics?.startTimer(); - const attesterSlashings: phase0.AttesterSlashing[] = []; + const attesterSlashings: AttesterSlashing[] = []; + const maxAttesterSlashings = stateFork >= ForkSeq.electra ? MAX_ATTESTER_SLASHINGS_ELECTRA : MAX_ATTESTER_SLASHINGS; attesterSlashing: for (const attesterSlashing of this.attesterSlashings.values()) { /** Indices slashable in this attester slashing */ const slashableIndices = new Set(); @@ -223,7 +225,7 @@ export class OpPool { if (isSlashableAtEpoch(validator, stateEpoch)) { slashableIndices.add(index); } - if (attesterSlashings.length >= MAX_ATTESTER_SLASHINGS) { + if (attesterSlashings.length >= maxAttesterSlashings) { break attesterSlashing; } } @@ -283,6 +285,7 @@ export class OpPool { } /** For beacon pool API */ + // TODO Electra: Update to adapt electra.AttesterSlashing getAllAttesterSlashings(): phase0.AttesterSlashing[] { return Array.from(this.attesterSlashings.values()).map((attesterSlashings) => attesterSlashings.attesterSlashing); } diff --git a/packages/beacon-node/src/chain/opPools/utils.ts b/packages/beacon-node/src/chain/opPools/utils.ts index 039e95af6c9..e136bf1d409 100644 --- a/packages/beacon-node/src/chain/opPools/utils.ts +++ b/packages/beacon-node/src/chain/opPools/utils.ts @@ -2,6 +2,7 @@ import {Signature} from "@chainsafe/blst"; import {BLS_WITHDRAWAL_PREFIX} from "@lodestar/params"; import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {Slot, capella} from "@lodestar/types"; +import {AggregateFast, AggregateFastElectra} from "./attestationPool.js"; /** * Prune a Map indexed by slot to keep the most recent slots, up to `slotsRetained` @@ -58,3 +59,7 @@ export function isValidBlsToExecutionChangeForBlockInclusion( return true; } + +export function isElectraAggregate(aggregate: AggregateFast): aggregate is AggregateFastElectra { + return (aggregate as AggregateFastElectra).committeeBits !== undefined; +} diff --git a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts index d1b410610a4..ba560d5a7ff 100644 --- a/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts +++ b/packages/beacon-node/src/chain/produceBlock/produceBlockBody.ts @@ -559,7 +559,9 @@ function preparePayloadAttributes( }; if (ForkSeq[fork] >= ForkSeq.capella) { + // withdrawals logic is now fork aware as it changes on electra fork post capella (payloadAttributes as capella.SSEPayloadAttributes["payloadAttributes"]).withdrawals = getExpectedWithdrawals( + ForkSeq[fork], prepareState as CachedBeaconStateCapella ).withdrawals; } diff --git a/packages/beacon-node/src/chain/regen/queued.ts b/packages/beacon-node/src/chain/regen/queued.ts index 082ffbe271e..57e64bd364e 100644 --- a/packages/beacon-node/src/chain/regen/queued.ts +++ b/packages/beacon-node/src/chain/regen/queued.ts @@ -1,6 +1,6 @@ import {phase0, Slot, RootHex, Epoch, BeaconBlock} from "@lodestar/types"; import {IForkChoice, ProtoBlock} from "@lodestar/fork-choice"; -import {CachedBeaconStateAllForks, computeEpochAtSlot} from "@lodestar/state-transition"; +import {CachedBeaconStateAllForks, UnfinalizedPubkeyIndexMap, computeEpochAtSlot} from "@lodestar/state-transition"; import {Logger, toRootHex} from "@lodestar/utils"; import {routes} from "@lodestar/api"; import {CheckpointHex, toCheckpointHex} from "../stateCache/index.js"; @@ -206,6 +206,54 @@ export class QueuedStateRegenerator implements IStateRegenerator { return this.checkpointStateCache.updatePreComputedCheckpoint(rootHex, epoch); } + /** + * Remove `validators` from all unfinalized cache's epochCtx.UnfinalizedPubkey2Index, + * and add them to epochCtx.pubkey2index and epochCtx.index2pubkey + */ + updateUnfinalizedPubkeys(validators: UnfinalizedPubkeyIndexMap): void { + let numStatesUpdated = 0; + const states = this.blockStateCache.getStates(); + const cpStates = this.checkpointStateCache.getStates(); + + // Add finalized pubkeys to all states. + const addTimer = this.metrics?.regenFnAddPubkeyTime.startTimer(); + + // We only need to add pubkeys to any one of the states since the finalized caches is shared globally across all states + const firstState = (states.next().value ?? cpStates.next().value) as CachedBeaconStateAllForks | undefined; + + if (firstState !== undefined) { + firstState.epochCtx.addFinalizedPubkeys(validators, this.metrics?.epochCache ?? undefined); + } else { + this.logger.warn("Attempt to delete finalized pubkey from unfinalized pubkey cache. But no state is available"); + } + + addTimer?.(); + + // Delete finalized pubkeys from unfinalized pubkey cache for all states + const deleteTimer = this.metrics?.regenFnDeletePubkeyTime.startTimer(); + const pubkeysToDelete = Array.from(validators.keys()); + + for (const s of states) { + s.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + numStatesUpdated++; + } + + for (const s of cpStates) { + s.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + numStatesUpdated++; + } + + // Since first state is consumed from the iterator. Will need to perform delete explicitly + if (firstState !== undefined) { + firstState?.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + numStatesUpdated++; + } + + deleteTimer?.(); + + this.metrics?.regenFnNumStatesUpdated.observe(numStatesUpdated); + } + /** * Get the state to run with `block`. * - State after `block.parentRoot` dialed forward to block.slot diff --git a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts index 9312f3b517a..a0aa6db3589 100644 --- a/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts +++ b/packages/beacon-node/src/chain/seenCache/seenAttestationData.ts @@ -1,12 +1,21 @@ -import {phase0, RootHex, Slot} from "@lodestar/types"; +import {BitArray} from "@chainsafe/ssz"; +import {CommitteeIndex, phase0, RootHex, Slot} from "@lodestar/types"; import {MapDef} from "@lodestar/utils"; import {Metrics} from "../../metrics/metrics.js"; -import {AttDataBase64} from "../../util/sszBytes.js"; import {InsertOutcome} from "../opPools/types.js"; +export type SeenAttDataKey = AttDataBase64 | AttDataCommitteeBitsBase64; +// pre-electra, AttestationData is used to cache attestations +type AttDataBase64 = string; +// electra, AttestationData + CommitteeBits are used to cache attestations +type AttDataCommitteeBitsBase64 = string; + export type AttestationDataCacheEntry = { // part of shuffling data, so this does not take memory - committeeIndices: Uint32Array; + committeeValidatorIndices: Uint32Array; + // undefined for phase0 Attestation + committeeBits?: BitArray; + committeeIndex: CommitteeIndex; // IndexedAttestationData signing root, 32 bytes signingRoot: Uint8Array; // to be consumed by forkchoice and oppool @@ -38,12 +47,14 @@ const DEFAULT_MAX_CACHE_SIZE_PER_SLOT = 200; const DEFAULT_CACHE_SLOT_DISTANCE = 2; /** + * Cached seen AttestationData to improve gossip validation. For Electra, this still take into account attestationIndex + * even through it is moved outside of AttestationData. * As of April 2023, validating gossip attestation takes ~12% of cpu time for a node subscribing to all subnets on mainnet. * Having this cache help saves a lot of cpu time since most of the gossip attestations are on the same slot. */ export class SeenAttestationDatas { - private cacheEntryByAttDataBase64BySlot = new MapDef>( - () => new Map() + private cacheEntryByAttDataBase64BySlot = new MapDef>( + () => new Map() ); private lowestPermissibleSlot = 0; @@ -57,14 +68,14 @@ export class SeenAttestationDatas { } // TODO: Move InsertOutcome type definition to a common place - add(slot: Slot, attDataBase64: AttDataBase64, cacheEntry: AttestationDataCacheEntry): InsertOutcome { + add(slot: Slot, attDataKey: SeenAttDataKey, cacheEntry: AttestationDataCacheEntry): InsertOutcome { if (slot < this.lowestPermissibleSlot) { this.metrics?.seenCache.attestationData.reject.inc({reason: RejectReason.too_old}); return InsertOutcome.Old; } const cacheEntryByAttDataBase64 = this.cacheEntryByAttDataBase64BySlot.getOrDefault(slot); - if (cacheEntryByAttDataBase64.has(attDataBase64)) { + if (cacheEntryByAttDataBase64.has(attDataKey)) { this.metrics?.seenCache.attestationData.reject.inc({reason: RejectReason.already_known}); return InsertOutcome.AlreadyKnown; } @@ -74,11 +85,11 @@ export class SeenAttestationDatas { return InsertOutcome.ReachLimit; } - cacheEntryByAttDataBase64.set(attDataBase64, cacheEntry); + cacheEntryByAttDataBase64.set(attDataKey, cacheEntry); return InsertOutcome.NewData; } - get(slot: Slot, attDataBase64: AttDataBase64): AttestationDataCacheEntry | null { + get(slot: Slot, attDataBase64: SeenAttDataKey): AttestationDataCacheEntry | null { const cacheEntryByAttDataBase64 = this.cacheEntryByAttDataBase64BySlot.get(slot); const cacheEntry = cacheEntryByAttDataBase64?.get(attDataBase64); if (cacheEntry) { diff --git a/packages/beacon-node/src/chain/stateCache/blockStateCacheImpl.ts b/packages/beacon-node/src/chain/stateCache/blockStateCacheImpl.ts index 05073d0e515..1cb67cd6cf0 100644 --- a/packages/beacon-node/src/chain/stateCache/blockStateCacheImpl.ts +++ b/packages/beacon-node/src/chain/stateCache/blockStateCacheImpl.ts @@ -34,7 +34,7 @@ export class BlockStateCacheImpl implements BlockStateCache { this.maxStates = maxStates; this.cache = new MapTracker(metrics?.stateCache); if (metrics) { - this.metrics = metrics.stateCache; + this.metrics = {...metrics.stateCache, ...metrics.epochCache}; metrics.stateCache.size.addCollect(() => metrics.stateCache.size.set(this.cache.size)); } } @@ -137,6 +137,10 @@ export class BlockStateCacheImpl implements BlockStateCache { })); } + getStates(): IterableIterator { + return this.cache.values(); + } + private deleteAllEpochItems(epoch: Epoch): void { for (const rootHex of this.epochIndex.get(epoch) || []) { this.cache.delete(rootHex); diff --git a/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts index 602b0abaee8..d38fc323174 100644 --- a/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts +++ b/packages/beacon-node/src/chain/stateCache/fifoBlockStateCache.ts @@ -190,6 +190,10 @@ export class FIFOBlockStateCache implements BlockStateCache { })); } + getStates(): IterableIterator { + return this.cache.values(); + } + /** * For unit test only. */ diff --git a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts index 03cdc84de16..38aeabb9795 100644 --- a/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/inMemoryCheckpointsCache.ts @@ -176,6 +176,10 @@ export class InMemoryCheckpointStateCache implements CheckpointStateCache { })); } + getStates(): IterableIterator { + return this.cache.values(); + } + /** ONLY FOR DEBUGGING PURPOSES. For spec tests on error */ dumpCheckpointKeys(): string[] { return Array.from(this.cache.keys()); diff --git a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts index 5c5901583ad..b315beba46d 100644 --- a/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts +++ b/packages/beacon-node/src/chain/stateCache/persistentCheckpointsCache.ts @@ -594,6 +594,14 @@ export class PersistentCheckpointStateCache implements CheckpointStateCache { }); } + getStates(): IterableIterator { + const items = Array.from(this.cache.values()) + .filter(isInMemoryCacheItem) + .map((item) => item.state); + + return items.values(); + } + /** ONLY FOR DEBUGGING PURPOSES. For spec tests on error */ dumpCheckpointKeys(): string[] { return Array.from(this.cache.keys()); diff --git a/packages/beacon-node/src/chain/stateCache/types.ts b/packages/beacon-node/src/chain/stateCache/types.ts index 41e9b91aaa4..1e8d6bd1bd6 100644 --- a/packages/beacon-node/src/chain/stateCache/types.ts +++ b/packages/beacon-node/src/chain/stateCache/types.ts @@ -33,6 +33,8 @@ export interface BlockStateCache { prune(headStateRootHex: RootHex): void; deleteAllBeforeEpoch(finalizedEpoch: Epoch): void; dumpSummary(): routes.lodestar.StateCacheItem[]; + /** Expose beacon states stored in cache. Use with caution */ + getStates(): IterableIterator; } /** @@ -74,6 +76,8 @@ export interface CheckpointStateCache { processState(blockRootHex: RootHex, state: CachedBeaconStateAllForks): Promise; clear(): void; dumpSummary(): routes.lodestar.StateCacheItem[]; + /** Expose beacon states stored in cache. Use with caution */ + getStates(): IterableIterator; } export enum CacheItemType { diff --git a/packages/beacon-node/src/chain/validation/aggregateAndProof.ts b/packages/beacon-node/src/chain/validation/aggregateAndProof.ts index 3c32ffe1a9e..39a3700aacf 100644 --- a/packages/beacon-node/src/chain/validation/aggregateAndProof.ts +++ b/packages/beacon-node/src/chain/validation/aggregateAndProof.ts @@ -1,5 +1,5 @@ -import {ForkName} from "@lodestar/params"; -import {phase0, RootHex, ssz} from "@lodestar/types"; +import {ForkName, ForkSeq} from "@lodestar/params"; +import {electra, phase0, RootHex, ssz, IndexedAttestation, SignedAggregateAndProof} from "@lodestar/types"; import { computeEpochAtSlot, isAggregatorFromCommitteeLength, @@ -9,18 +9,18 @@ import {toRootHex} from "@lodestar/utils"; import {IBeaconChain} from ".."; import {AttestationError, AttestationErrorCode, GossipAction} from "../errors/index.js"; import {RegenCaller} from "../regen/index.js"; -import {getAttDataBase64FromSignedAggregateAndProofSerialized} from "../../util/sszBytes.js"; import {getSelectionProofSignatureSet, getAggregateAndProofSignatureSet} from "./signatureSets/index.js"; import { getAttestationDataSigningRoot, getCommitteeIndices, + getSeenAttDataKeyFromSignedAggregateAndProof, getShufflingForAttestationVerification, verifyHeadBlockAndTargetRoot, verifyPropagationSlotRange, } from "./attestation.js"; export type AggregateAndProofValidationResult = { - indexedAttestation: phase0.IndexedAttestation; + indexedAttestation: IndexedAttestation; committeeIndices: Uint32Array; attDataRootHex: RootHex; }; @@ -28,7 +28,7 @@ export type AggregateAndProofValidationResult = { export async function validateApiAggregateAndProof( fork: ForkName, chain: IBeaconChain, - signedAggregateAndProof: phase0.SignedAggregateAndProof + signedAggregateAndProof: SignedAggregateAndProof ): Promise { const skipValidationKnownAttesters = true; const prioritizeBls = true; @@ -41,7 +41,7 @@ export async function validateApiAggregateAndProof( export async function validateGossipAggregateAndProof( fork: ForkName, chain: IBeaconChain, - signedAggregateAndProof: phase0.SignedAggregateAndProof, + signedAggregateAndProof: SignedAggregateAndProof, serializedData: Uint8Array ): Promise { return validateAggregateAndProof(fork, chain, signedAggregateAndProof, serializedData); @@ -50,7 +50,7 @@ export async function validateGossipAggregateAndProof( async function validateAggregateAndProof( fork: ForkName, chain: IBeaconChain, - signedAggregateAndProof: phase0.SignedAggregateAndProof, + signedAggregateAndProof: SignedAggregateAndProof, serializedData: Uint8Array | null = null, opts: {skipValidationKnownAttesters: boolean; prioritizeBls: boolean} = { skipValidationKnownAttesters: false, @@ -71,10 +71,24 @@ async function validateAggregateAndProof( const attData = aggregate.data; const attSlot = attData.slot; - const attDataBase64 = serializedData ? getAttDataBase64FromSignedAggregateAndProofSerialized(serializedData) : null; - const cachedAttData = attDataBase64 ? chain.seenAttestationDatas.get(attSlot, attDataBase64) : null; + const seenAttDataKey = serializedData ? getSeenAttDataKeyFromSignedAggregateAndProof(fork, serializedData) : null; + const cachedAttData = seenAttDataKey ? chain.seenAttestationDatas.get(attSlot, seenAttDataKey) : null; + + let attIndex; + if (ForkSeq[fork] >= ForkSeq.electra) { + attIndex = (aggregate as electra.Attestation).committeeBits.getSingleTrueBit(); + // [REJECT] len(committee_indices) == 1, where committee_indices = get_committee_indices(aggregate) + if (attIndex === null) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NOT_EXACTLY_ONE_COMMITTEE_BIT_SET}); + } + // [REJECT] aggregate.data.index == 0 + if (attData.index !== 0) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NON_ZERO_ATTESTATION_DATA_INDEX}); + } + } else { + attIndex = attData.index; + } - const attIndex = attData.index; const attEpoch = computeEpochAtSlot(attSlot); const attTarget = attData.target; const targetEpoch = attTarget.epoch; @@ -154,7 +168,7 @@ async function validateAggregateAndProof( // [REJECT] The committee index is within the expected range // -- i.e. data.index < get_committee_count_per_slot(state, data.target.epoch) const committeeIndices = cachedAttData - ? cachedAttData.committeeIndices + ? cachedAttData.committeeValidatorIndices : getCommitteeIndices(shuffling, attSlot, attIndex); // [REJECT] The number of aggregation bits matches the committee size @@ -163,11 +177,16 @@ async function validateAggregateAndProof( throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS}); } const attestingIndices = aggregate.aggregationBits.intersectValues(committeeIndices); - const indexedAttestation: phase0.IndexedAttestation = { + + const indexedAttestationContent: IndexedAttestation = { attestingIndices, data: attData, signature: aggregate.signature, }; + const indexedAttestation = + ForkSeq[fork] >= ForkSeq.electra + ? (indexedAttestationContent as electra.IndexedAttestation) + : (indexedAttestationContent as phase0.IndexedAttestation); // TODO: Check this before regen // [REJECT] The attestation has participants -- that is, diff --git a/packages/beacon-node/src/chain/validation/attestation.ts b/packages/beacon-node/src/chain/validation/attestation.ts index 1116e87e1d2..4ceaf64d658 100644 --- a/packages/beacon-node/src/chain/validation/attestation.ts +++ b/packages/beacon-node/src/chain/validation/attestation.ts @@ -1,6 +1,26 @@ -import {phase0, Epoch, Root, Slot, RootHex, ssz} from "@lodestar/types"; +import {BitArray} from "@chainsafe/ssz"; +import { + phase0, + Epoch, + Root, + Slot, + RootHex, + ssz, + electra, + isElectraAttestation, + CommitteeIndex, + Attestation, + IndexedAttestation, +} from "@lodestar/types"; import {ProtoBlock} from "@lodestar/fork-choice"; -import {ATTESTATION_SUBNET_COUNT, SLOTS_PER_EPOCH, ForkName, ForkSeq, DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; +import { + ATTESTATION_SUBNET_COUNT, + SLOTS_PER_EPOCH, + ForkName, + ForkSeq, + DOMAIN_BEACON_ATTESTER, + isForkPostElectra, +} from "@lodestar/params"; import { computeEpochAtSlot, createSingleSignatureSetFromComponents, @@ -17,12 +37,15 @@ import {AttestationError, AttestationErrorCode, GossipAction} from "../errors/in import {MAXIMUM_GOSSIP_CLOCK_DISPARITY_SEC} from "../../constants/index.js"; import {RegenCaller} from "../regen/index.js"; import { - AttDataBase64, getAggregationBitsFromAttestationSerialized, - getAttDataBase64FromAttestationSerialized, + getAttDataFromAttestationSerialized, + getAttDataFromSignedAggregateAndProofElectra, + getCommitteeBitsFromAttestationSerialized, + getCommitteeBitsFromSignedAggregateAndProofElectra, + getAttDataFromSignedAggregateAndProofPhase0, getSignatureFromAttestationSerialized, } from "../../util/sszBytes.js"; -import {AttestationDataCacheEntry} from "../seenCache/seenAttestationData.js"; +import {AttestationDataCacheEntry, SeenAttDataKey} from "../seenCache/seenAttestationData.js"; import {sszDeserializeAttestation} from "../../network/gossip/topic.js"; import {Result, wrapError} from "../../util/wrapError.js"; import {IBeaconChain} from "../interface.js"; @@ -34,16 +57,17 @@ export type BatchResult = { }; export type AttestationValidationResult = { - attestation: phase0.Attestation; - indexedAttestation: phase0.IndexedAttestation; + attestation: Attestation; + indexedAttestation: IndexedAttestation; subnet: number; attDataRootHex: RootHex; + committeeIndex: CommitteeIndex; }; export type AttestationOrBytes = ApiAttestation | GossipAttestation; /** attestation from api */ -export type ApiAttestation = {attestation: phase0.Attestation; serializedData: null}; +export type ApiAttestation = {attestation: Attestation; serializedData: null}; /** attestation from gossip */ export type GossipAttestation = { @@ -51,7 +75,9 @@ export type GossipAttestation = { serializedData: Uint8Array; // available in NetworkProcessor since we check for unknown block root attestations attSlot: Slot; - attDataBase64?: string | null; + // for old LIFO linear gossip queue we don't have attDataBase64 + // for indexed gossip queue we have attDataBase64 + attDataBase64?: SeenAttDataKey | null; }; export type Step0Result = AttestationValidationResult & { @@ -82,7 +108,7 @@ export async function validateGossipAttestation( export async function validateGossipAttestationsSameAttData( fork: ForkName, chain: IBeaconChain, - attestationOrBytesArr: AttestationOrBytes[], + attestationOrBytesArr: GossipAttestation[], subnet: number, // for unit test, consumers do not need to pass this step0ValidationFn = validateGossipAttestationNoSignatureCheck @@ -248,19 +274,16 @@ async function validateGossipAttestationNoSignatureCheck( // Run the checks that happen before an indexed attestation is constructed. let attestationOrCache: - | {attestation: phase0.Attestation; cache: null} + | {attestation: Attestation; cache: null} | {attestation: null; cache: AttestationDataCacheEntry; serializedData: Uint8Array}; - let attDataBase64: AttDataBase64 | null = null; + let attDataKey: SeenAttDataKey | null = null; if (attestationOrBytes.serializedData) { // gossip const attSlot = attestationOrBytes.attSlot; - // for old LIFO linear gossip queue we don't have attDataBase64 - // for indexed gossip queue we have attDataBase64 - attDataBase64 = - attestationOrBytes.attDataBase64 ?? getAttDataBase64FromAttestationSerialized(attestationOrBytes.serializedData); - const cachedAttData = attDataBase64 !== null ? chain.seenAttestationDatas.get(attSlot, attDataBase64) : null; + attDataKey = getSeenAttDataKeyFromGossipAttestation(fork, attestationOrBytes); + const cachedAttData = attDataKey !== null ? chain.seenAttestationDatas.get(attSlot, attDataKey) : null; if (cachedAttData === null) { - const attestation = sszDeserializeAttestation(attestationOrBytes.serializedData); + const attestation = sszDeserializeAttestation(fork, attestationOrBytes.serializedData); // only deserialize on the first AttestationData that's not cached attestationOrCache = {attestation, cache: null}; } else { @@ -268,7 +291,7 @@ async function validateGossipAttestationNoSignatureCheck( } } else { // api - attDataBase64 = null; + attDataKey = null; attestationOrCache = {attestation: attestationOrBytes.attestation, cache: null}; } @@ -276,10 +299,37 @@ async function validateGossipAttestationNoSignatureCheck( ? attestationOrCache.attestation.data : attestationOrCache.cache.attestationData; const attSlot = attData.slot; - const attIndex = attData.index; const attEpoch = computeEpochAtSlot(attSlot); const attTarget = attData.target; const targetEpoch = attTarget.epoch; + let committeeIndex; + if (attestationOrCache.attestation) { + if (isElectraAttestation(attestationOrCache.attestation)) { + // api or first time validation of a gossip attestation + const {committeeBits} = attestationOrCache.attestation; + // throw in both in case of undefined and null + if (committeeBits == null) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.INVALID_SERIALIZED_BYTES}); + } + + committeeIndex = committeeBits.getSingleTrueBit(); + // [REJECT] len(committee_indices) == 1, where committee_indices = get_committee_indices(aggregate) + if (committeeIndex === null) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NOT_EXACTLY_ONE_COMMITTEE_BIT_SET}); + } + + // [REJECT] aggregate.data.index == 0 + if (attData.index !== 0) { + throw new AttestationError(GossipAction.REJECT, {code: AttestationErrorCode.NON_ZERO_ATTESTATION_DATA_INDEX}); + } + } else { + // phase0 attestation + committeeIndex = attData.index; + } + } else { + // found a seen AttestationData + committeeIndex = attestationOrCache.cache.committeeIndex; + } chain.metrics?.gossipAttestation.attestationSlotToClockSlot.observe( {caller: RegenCaller.validateGossipAttestation}, @@ -305,7 +355,7 @@ async function validateGossipAttestationNoSignatureCheck( // > TODO: Do this check **before** getting the target state but don't recompute zipIndexes const aggregationBits = attestationOrCache.attestation ? attestationOrCache.attestation.aggregationBits - : getAggregationBitsFromAttestationSerialized(attestationOrCache.serializedData); + : getAggregationBitsFromAttestationSerialized(fork, attestationOrCache.serializedData); if (aggregationBits === null) { throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.INVALID_SERIALIZED_BYTES, @@ -319,11 +369,11 @@ async function validateGossipAttestationNoSignatureCheck( }); } - let committeeIndices: Uint32Array; + let committeeValidatorIndices: Uint32Array; let getSigningRoot: () => Uint8Array; let expectedSubnet: number; if (attestationOrCache.cache) { - committeeIndices = attestationOrCache.cache.committeeIndices; + committeeValidatorIndices = attestationOrCache.cache.committeeValidatorIndices; const signingRoot = attestationOrCache.cache.signingRoot; getSigningRoot = () => signingRoot; expectedSubnet = attestationOrCache.cache.subnet; @@ -365,17 +415,17 @@ async function validateGossipAttestationNoSignatureCheck( // [REJECT] The committee index is within the expected range // -- i.e. data.index < get_committee_count_per_slot(state, data.target.epoch) - committeeIndices = getCommitteeIndices(shuffling, attSlot, attIndex); + committeeValidatorIndices = getCommitteeIndices(shuffling, attSlot, committeeIndex); getSigningRoot = () => getAttestationDataSigningRoot(chain.config, attData); - expectedSubnet = computeSubnetForSlot(shuffling, attSlot, attIndex); + expectedSubnet = computeSubnetForSlot(shuffling, attSlot, committeeIndex); } - const validatorIndex = committeeIndices[bitIndex]; + const validatorIndex = committeeValidatorIndices[bitIndex]; // [REJECT] The number of aggregation bits matches the committee size // -- i.e. len(attestation.aggregation_bits) == len(get_beacon_committee(state, data.slot, data.index)). // > TODO: Is this necessary? Lighthouse does not do this check. - if (aggregationBits.bitLen !== committeeIndices.length) { + if (aggregationBits.bitLen !== committeeValidatorIndices.length) { throw new AttestationError(GossipAction.REJECT, { code: AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS, }); @@ -421,6 +471,7 @@ async function validateGossipAttestationNoSignatureCheck( }); } + let committeeBits: BitArray | undefined = undefined; if (attestationOrCache.cache) { // there could be up to 6% of cpu time to compute signing root if we don't clone the signature set signatureSet = createSingleSignatureSetFromComponents( @@ -429,6 +480,7 @@ async function validateGossipAttestationNoSignatureCheck( signature ); attDataRootHex = attestationOrCache.cache.attDataRootHex; + committeeBits = attestationOrCache.cache.committeeBits; } else { signatureSet = createSingleSignatureSetFromComponents( chain.index2pubkey[validatorIndex], @@ -438,9 +490,15 @@ async function validateGossipAttestationNoSignatureCheck( // add cached attestation data before verifying signature attDataRootHex = toRootHex(ssz.phase0.AttestationData.hashTreeRoot(attData)); - if (attDataBase64) { - chain.seenAttestationDatas.add(attSlot, attDataBase64, { - committeeIndices, + // if attestation is phase0 the committeeBits is undefined anyway + committeeBits = isElectraAttestation(attestationOrCache.attestation) + ? attestationOrCache.attestation.committeeBits.clone() + : undefined; + if (attDataKey) { + chain.seenAttestationDatas.add(attSlot, attDataKey, { + committeeValidatorIndices, + committeeBits, + committeeIndex, signingRoot: signatureSet.signingRoot, subnet: expectedSubnet, // precompute this to be used in forkchoice @@ -452,20 +510,31 @@ async function validateGossipAttestationNoSignatureCheck( } // no signature check, leave that for step1 - const indexedAttestation: phase0.IndexedAttestation = { + const indexedAttestationContent = { attestingIndices, data: attData, signature, }; + const indexedAttestation = + ForkSeq[fork] >= ForkSeq.electra + ? (indexedAttestationContent as electra.IndexedAttestation) + : (indexedAttestationContent as phase0.IndexedAttestation); - const attestation: phase0.Attestation = attestationOrCache.attestation - ? attestationOrCache.attestation - : { - aggregationBits, - data: attData, - signature, - }; - return {attestation, indexedAttestation, subnet: expectedSubnet, attDataRootHex, signatureSet, validatorIndex}; + const attestation: Attestation = attestationOrCache.attestation ?? { + aggregationBits, + data: attData, + committeeBits, + signature, + }; + return { + attestation, + indexedAttestation, + subnet: expectedSubnet, + attDataRootHex, + signatureSet, + validatorIndex, + committeeIndex, + }; } /** @@ -698,6 +767,10 @@ function verifyAttestationTargetRoot(headBlock: ProtoBlock, targetRoot: Root, at } } +/** + * Get a list of indices of validators in the given committee + * attestationIndex - Index of the committee in shuffling.committees + */ export function getCommitteeIndices( shuffling: EpochShuffling, attestationSlot: Slot, @@ -723,3 +796,45 @@ export function computeSubnetForSlot(shuffling: EpochShuffling, slot: number, co const committeesSinceEpochStart = shuffling.committeesPerSlot * slotsSinceEpochStart; return (committeesSinceEpochStart + committeeIndex) % ATTESTATION_SUBNET_COUNT; } + +/** + * Return fork-dependent seen attestation key + * - for pre-electra, it's the AttestationData base64 + * - for electra and later, it's the AttestationData base64 + committeeBits base64 + * + * we always have attDataBase64 from the IndexedGossipQueue, getAttDataFromAttestationSerialized() just for backward compatible when beaconAttestationBatchValidation is false + * TODO: remove beaconAttestationBatchValidation flag since the batch attestation is stable + */ +export function getSeenAttDataKeyFromGossipAttestation( + fork: ForkName, + attestation: GossipAttestation +): SeenAttDataKey | null { + const {attDataBase64, serializedData} = attestation; + if (isForkPostElectra(fork)) { + const attData = attDataBase64 ?? getAttDataFromAttestationSerialized(serializedData); + const committeeBits = getCommitteeBitsFromAttestationSerialized(serializedData); + return attData && committeeBits ? attDataBase64 + committeeBits : null; + } + + // pre-electra + return attDataBase64 ?? getAttDataFromAttestationSerialized(serializedData); +} + +/** + * Extract attestation data key from SignedAggregateAndProof Uint8Array to use cached data from SeenAttestationDatas + * - for pre-electra, it's the AttestationData base64 + * - for electra and later, it's the AttestationData base64 + committeeBits base64 + */ +export function getSeenAttDataKeyFromSignedAggregateAndProof( + fork: ForkName, + aggregateAndProof: Uint8Array +): SeenAttDataKey | null { + if (isForkPostElectra(fork)) { + const attData = getAttDataFromSignedAggregateAndProofElectra(aggregateAndProof); + const committeeBits = getCommitteeBitsFromSignedAggregateAndProofElectra(aggregateAndProof); + return attData && committeeBits ? attData + committeeBits : null; + } + + // pre-electra + return getAttDataFromSignedAggregateAndProofPhase0(aggregateAndProof); +} diff --git a/packages/beacon-node/src/chain/validation/attesterSlashing.ts b/packages/beacon-node/src/chain/validation/attesterSlashing.ts index 818812526fb..11a499c9bb5 100644 --- a/packages/beacon-node/src/chain/validation/attesterSlashing.ts +++ b/packages/beacon-node/src/chain/validation/attesterSlashing.ts @@ -9,7 +9,7 @@ import {AttesterSlashingError, AttesterSlashingErrorCode, GossipAction} from ".. export async function validateApiAttesterSlashing( chain: IBeaconChain, - attesterSlashing: phase0.AttesterSlashing + attesterSlashing: phase0.AttesterSlashing // TODO Electra: Handle electra.AttesterSlashing ): Promise { const prioritizeBls = true; return validateAttesterSlashing(chain, attesterSlashing, prioritizeBls); diff --git a/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts b/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts index 59787341cfb..31d93181859 100644 --- a/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts +++ b/packages/beacon-node/src/chain/validation/signatureSets/aggregateAndProof.ts @@ -1,7 +1,7 @@ import {PublicKey} from "@chainsafe/blst"; -import {DOMAIN_AGGREGATE_AND_PROOF} from "@lodestar/params"; -import {ssz} from "@lodestar/types"; -import {Epoch, phase0} from "@lodestar/types"; +import {DOMAIN_AGGREGATE_AND_PROOF, ForkSeq} from "@lodestar/params"; +import {ssz, SignedAggregateAndProof} from "@lodestar/types"; +import {Epoch} from "@lodestar/types"; import { computeSigningRoot, computeStartSlotAtEpoch, @@ -13,7 +13,7 @@ import {BeaconConfig} from "@lodestar/config"; export function getAggregateAndProofSigningRoot( config: BeaconConfig, epoch: Epoch, - aggregateAndProof: phase0.SignedAggregateAndProof + aggregateAndProof: SignedAggregateAndProof ): Uint8Array { // previously, we call `const aggregatorDomain = state.config.getDomain(state.slot, DOMAIN_AGGREGATE_AND_PROOF, slot);` // at fork boundary, it's required to dial to target epoch https://github.com/ChainSafe/lodestar/blob/v1.11.3/packages/beacon-node/src/chain/validation/attestation.ts#L573 @@ -21,14 +21,15 @@ export function getAggregateAndProofSigningRoot( const slot = computeStartSlotAtEpoch(epoch); const fork = config.getForkName(slot); const aggregatorDomain = config.getDomainAtFork(fork, DOMAIN_AGGREGATE_AND_PROOF); - return computeSigningRoot(ssz.phase0.AggregateAndProof, aggregateAndProof.message, aggregatorDomain); + const sszType = ForkSeq[fork] >= ForkSeq.electra ? ssz.electra.AggregateAndProof : ssz.phase0.AggregateAndProof; + return computeSigningRoot(sszType, aggregateAndProof.message, aggregatorDomain); } export function getAggregateAndProofSignatureSet( config: BeaconConfig, epoch: Epoch, aggregator: PublicKey, - aggregateAndProof: phase0.SignedAggregateAndProof + aggregateAndProof: SignedAggregateAndProof ): ISignatureSet { return createSingleSignatureSetFromComponents( aggregator, diff --git a/packages/beacon-node/src/eth1/eth1DepositDataTracker.ts b/packages/beacon-node/src/eth1/eth1DepositDataTracker.ts index f36f70abbbc..a38b3f9987d 100644 --- a/packages/beacon-node/src/eth1/eth1DepositDataTracker.ts +++ b/packages/beacon-node/src/eth1/eth1DepositDataTracker.ts @@ -1,6 +1,11 @@ import {phase0, ssz} from "@lodestar/types"; import {ChainForkConfig} from "@lodestar/config"; -import {BeaconStateAllForks, becomesNewEth1Data} from "@lodestar/state-transition"; +import { + BeaconStateAllForks, + CachedBeaconStateAllForks, + CachedBeaconStateElectra, + becomesNewEth1Data, +} from "@lodestar/state-transition"; import {ErrorAborted, TimeoutError, fromHex, Logger, isErrorAborted, sleep} from "@lodestar/utils"; import {IBeaconDb} from "../db/index.js"; @@ -67,6 +72,8 @@ export class Eth1DepositDataTracker { /** Dynamically adjusted batch size to fetch deposit logs */ private eth1GetLogsBatchSizeDynamic = MAX_BLOCKS_PER_LOG_QUERY; private readonly forcedEth1DataVote: phase0.Eth1Data | null; + /** To stop `runAutoUpdate()` in addition to AbortSignal */ + private stopPolling: boolean; constructor( opts: Eth1Options, @@ -81,6 +88,8 @@ export class Eth1DepositDataTracker { this.depositsCache = new Eth1DepositsCache(opts, config, db); this.eth1DataCache = new Eth1DataCache(config, db); this.eth1FollowDistance = config.ETH1_FOLLOW_DISTANCE; + // TODO Electra: fix scenario where node starts post-Electra and `stopPolling` will always be false + this.stopPolling = false; this.forcedEth1DataVote = opts.forcedEth1DataVote ? ssz.phase0.Eth1Data.deserialize(fromHex(opts.forcedEth1DataVote)) @@ -109,10 +118,22 @@ export class Eth1DepositDataTracker { } } + // TODO Electra: Figure out how an elegant way to stop eth1data polling + stopPollingEth1Data(): void { + this.stopPolling = true; + } + /** * Return eth1Data and deposits ready for block production for a given state */ - async getEth1DataAndDeposits(state: BeaconStateAllForks): Promise { + async getEth1DataAndDeposits(state: CachedBeaconStateAllForks): Promise { + if ( + state.epochCtx.isPostElectra() && + state.eth1DepositIndex >= (state as CachedBeaconStateElectra).depositRequestsStartIndex + ) { + // No need to poll eth1Data since Electra deprecates the mechanism after depositRequestsStartIndex is reached + return {eth1Data: state.eth1Data, deposits: []}; + } const eth1Data = this.forcedEth1DataVote ?? (await this.getEth1Data(state)); const deposits = await this.getDeposits(state, eth1Data); return {eth1Data, deposits}; @@ -141,7 +162,10 @@ export class Eth1DepositDataTracker { * Returns deposits to be included for a given state and eth1Data vote. * Requires internal caches to be updated regularly to return good results */ - private async getDeposits(state: BeaconStateAllForks, eth1DataVote: phase0.Eth1Data): Promise { + private async getDeposits( + state: CachedBeaconStateAllForks, + eth1DataVote: phase0.Eth1Data + ): Promise { // No new deposits have to be included, continue if (eth1DataVote.depositCount === state.eth1DepositIndex) { return []; @@ -162,7 +186,7 @@ export class Eth1DepositDataTracker { private async runAutoUpdate(): Promise { let lastRunMs = 0; - while (!this.signal.aborted) { + while (!this.signal.aborted && !this.stopPolling) { lastRunMs = Date.now(); try { diff --git a/packages/beacon-node/src/eth1/index.ts b/packages/beacon-node/src/eth1/index.ts index 9fdba90258a..a8ba55c5414 100644 --- a/packages/beacon-node/src/eth1/index.ts +++ b/packages/beacon-node/src/eth1/index.ts @@ -106,6 +106,10 @@ export class Eth1ForBlockProduction implements IEth1ForBlockProduction { startPollingMergeBlock(): void { return this.eth1MergeBlockTracker.startPollingMergeBlock(); } + + stopPollingEth1Data(): void { + return this.eth1DepositDataTracker?.stopPollingEth1Data(); + } } /** @@ -140,4 +144,8 @@ export class Eth1ForBlockProductionDisabled implements IEth1ForBlockProduction { startPollingMergeBlock(): void { // Ignore } + + stopPollingEth1Data(): void { + // Ignore + } } diff --git a/packages/beacon-node/src/eth1/interface.ts b/packages/beacon-node/src/eth1/interface.ts index fc9626eb5b8..54fcdd12492 100644 --- a/packages/beacon-node/src/eth1/interface.ts +++ b/packages/beacon-node/src/eth1/interface.ts @@ -62,6 +62,11 @@ export interface IEth1ForBlockProduction { * - head state not isMergeTransitionComplete */ startPollingMergeBlock(): void; + + /** + * Should stop polling eth1Data after a Electra block is finalized AND deposit_requests_start_index is reached + */ + stopPollingEth1Data(): void; } /** Different Eth1Block from phase0.Eth1Block with blockHash */ diff --git a/packages/beacon-node/src/eth1/utils/deposits.ts b/packages/beacon-node/src/eth1/utils/deposits.ts index 24916264e6d..8d0331fc01d 100644 --- a/packages/beacon-node/src/eth1/utils/deposits.ts +++ b/packages/beacon-node/src/eth1/utils/deposits.ts @@ -1,9 +1,9 @@ import {toGindex, Tree} from "@chainsafe/persistent-merkle-tree"; -import {MAX_DEPOSITS} from "@lodestar/params"; -import {BeaconStateAllForks} from "@lodestar/state-transition"; +import {toRootHex} from "@lodestar/utils"; +import {CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {phase0, ssz} from "@lodestar/types"; import {FilterOptions} from "@lodestar/db"; -import {toRootHex} from "@lodestar/utils"; +import {getEth1DepositCount} from "@lodestar/state-transition"; import {Eth1Error, Eth1ErrorCode} from "../errors.js"; import {DepositTree} from "../../db/repositories/depositDataRoot.js"; @@ -11,7 +11,7 @@ export type DepositGetter = (indexRange: FilterOptions, eth1Data: pha export async function getDeposits( // eth1_deposit_index represents the next deposit index to be added - state: BeaconStateAllForks, + state: CachedBeaconStateAllForks, eth1Data: phase0.Eth1Data, depositsGetter: DepositGetter ): Promise { @@ -22,9 +22,11 @@ export async function getDeposits( throw new Eth1Error({code: Eth1ErrorCode.DEPOSIT_INDEX_TOO_HIGH, depositIndex, depositCount}); } - // Spec v0.12.2 - // assert len(body.deposits) == min(MAX_DEPOSITS, state.eth1_data.deposit_count - state.eth1_deposit_index) - const depositsLen = Math.min(MAX_DEPOSITS, depositCount - depositIndex); + const depositsLen = getEth1DepositCount(state, eth1Data); + + if (depositsLen === 0) { + return []; // If depositsLen === 0, we can return early since no deposit with be returned from depositsGetter + } const indexRange = {gte: depositIndex, lt: depositIndex + depositsLen}; const deposits = await depositsGetter(indexRange, eth1Data); diff --git a/packages/beacon-node/src/execution/engine/http.ts b/packages/beacon-node/src/execution/engine/http.ts index c64a9715589..a69a5b94bd6 100644 --- a/packages/beacon-node/src/execution/engine/http.ts +++ b/packages/beacon-node/src/execution/engine/http.ts @@ -198,11 +198,13 @@ export class ExecutionEngineHttp implements IExecutionEngine { parentBlockRoot?: Root ): Promise { const method = - ForkSeq[fork] >= ForkSeq.deneb - ? "engine_newPayloadV3" - : ForkSeq[fork] >= ForkSeq.capella - ? "engine_newPayloadV2" - : "engine_newPayloadV1"; + ForkSeq[fork] >= ForkSeq.electra + ? "engine_newPayloadV4" + : ForkSeq[fork] >= ForkSeq.deneb + ? "engine_newPayloadV3" + : ForkSeq[fork] >= ForkSeq.capella + ? "engine_newPayloadV2" + : "engine_newPayloadV1"; const serializedExecutionPayload = serializeExecutionPayload(fork, executionPayload); @@ -218,7 +220,7 @@ export class ExecutionEngineHttp implements IExecutionEngine { const serializedVersionedHashes = serializeVersionedHashes(versionedHashes); const parentBeaconBlockRoot = serializeBeaconBlockRoot(parentBlockRoot); - const method = "engine_newPayloadV3"; + const method = ForkSeq[fork] >= ForkSeq.electra ? "engine_newPayloadV4" : "engine_newPayloadV3"; engineRequest = { method, params: [serializedExecutionPayload, serializedVersionedHashes, parentBeaconBlockRoot], @@ -392,11 +394,13 @@ export class ExecutionEngineHttp implements IExecutionEngine { shouldOverrideBuilder?: boolean; }> { const method = - ForkSeq[fork] >= ForkSeq.deneb - ? "engine_getPayloadV3" - : ForkSeq[fork] >= ForkSeq.capella - ? "engine_getPayloadV2" - : "engine_getPayloadV1"; + ForkSeq[fork] >= ForkSeq.electra + ? "engine_getPayloadV4" + : ForkSeq[fork] >= ForkSeq.deneb + ? "engine_getPayloadV3" + : ForkSeq[fork] >= ForkSeq.capella + ? "engine_getPayloadV2" + : "engine_getPayloadV1"; const payloadResponse = await this.rpc.fetchWithRetries< EngineApiRpcReturnTypes[typeof method], EngineApiRpcParamTypes[typeof method] @@ -414,8 +418,9 @@ export class ExecutionEngineHttp implements IExecutionEngine { this.payloadIdCache.prune(); } - async getPayloadBodiesByHash(blockHashes: RootHex[]): Promise<(ExecutionPayloadBody | null)[]> { - const method = "engine_getPayloadBodiesByHashV1"; + async getPayloadBodiesByHash(fork: ForkName, blockHashes: RootHex[]): Promise<(ExecutionPayloadBody | null)[]> { + const method = + ForkSeq[fork] >= ForkSeq.electra ? "engine_getPayloadBodiesByHashV2" : "engine_getPayloadBodiesByHashV1"; assertReqSizeLimit(blockHashes.length, 32); const response = await this.rpc.fetchWithRetries< EngineApiRpcReturnTypes[typeof method], @@ -425,10 +430,12 @@ export class ExecutionEngineHttp implements IExecutionEngine { } async getPayloadBodiesByRange( + fork: ForkName, startBlockNumber: number, blockCount: number ): Promise<(ExecutionPayloadBody | null)[]> { - const method = "engine_getPayloadBodiesByRangeV1"; + const method = + ForkSeq[fork] >= ForkSeq.electra ? "engine_getPayloadBodiesByRangeV2" : "engine_getPayloadBodiesByRangeV1"; assertReqSizeLimit(blockCount, 32); const start = numToQuantity(startBlockNumber); const count = numToQuantity(blockCount); diff --git a/packages/beacon-node/src/execution/engine/interface.ts b/packages/beacon-node/src/execution/engine/interface.ts index fa1da210cd1..5226a46ac72 100644 --- a/packages/beacon-node/src/execution/engine/interface.ts +++ b/packages/beacon-node/src/execution/engine/interface.ts @@ -3,10 +3,10 @@ import {KZGCommitment, Blob, KZGProof} from "@lodestar/types/deneb"; import {Root, RootHex, capella, Wei, ExecutionPayload} from "@lodestar/types"; import {DATA} from "../../eth1/provider/utils.js"; -import {PayloadIdCache, PayloadId, WithdrawalV1} from "./payloadIdCache.js"; +import {PayloadIdCache, PayloadId, WithdrawalV1, DepositRequestV1} from "./payloadIdCache.js"; import {ExecutionPayloadBody} from "./types.js"; -export {PayloadIdCache, type PayloadId, type WithdrawalV1}; +export {PayloadIdCache, type PayloadId, type WithdrawalV1, type DepositRequestV1}; export enum ExecutionPayloadStatus { /** given payload is valid */ @@ -174,7 +174,7 @@ export interface IExecutionEngine { shouldOverrideBuilder?: boolean; }>; - getPayloadBodiesByHash(blockHash: DATA[]): Promise<(ExecutionPayloadBody | null)[]>; + getPayloadBodiesByHash(fork: ForkName, blockHash: DATA[]): Promise<(ExecutionPayloadBody | null)[]>; - getPayloadBodiesByRange(start: number, count: number): Promise<(ExecutionPayloadBody | null)[]>; + getPayloadBodiesByRange(fork: ForkName, start: number, count: number): Promise<(ExecutionPayloadBody | null)[]>; } diff --git a/packages/beacon-node/src/execution/engine/mock.ts b/packages/beacon-node/src/execution/engine/mock.ts index a99a76508df..9fed781fa52 100644 --- a/packages/beacon-node/src/execution/engine/mock.ts +++ b/packages/beacon-node/src/execution/engine/mock.ts @@ -35,6 +35,7 @@ export type ExecutionEngineMockOpts = { onlyPredefinedResponses?: boolean; capellaForkTimestamp?: number; denebForkTimestamp?: number; + electraForkTimestamp?: number; }; type ExecutionBlock = { @@ -88,15 +89,19 @@ export class ExecutionEngineMockBackend implements JsonRpcBackend { engine_newPayloadV1: this.notifyNewPayload.bind(this), engine_newPayloadV2: this.notifyNewPayload.bind(this), engine_newPayloadV3: this.notifyNewPayload.bind(this), + engine_newPayloadV4: this.notifyNewPayload.bind(this), engine_forkchoiceUpdatedV1: this.notifyForkchoiceUpdate.bind(this), engine_forkchoiceUpdatedV2: this.notifyForkchoiceUpdate.bind(this), engine_forkchoiceUpdatedV3: this.notifyForkchoiceUpdate.bind(this), engine_getPayloadV1: this.getPayload.bind(this), engine_getPayloadV2: this.getPayload.bind(this), engine_getPayloadV3: this.getPayload.bind(this), + engine_getPayloadV4: this.getPayload.bind(this), engine_getPayloadBodiesByHashV1: this.getPayloadBodiesByHash.bind(this), + engine_getPayloadBodiesByHashV2: this.getPayloadBodiesByHash.bind(this), engine_getPayloadBodiesByRangeV1: this.getPayloadBodiesByRange.bind(this), engine_getClientVersionV1: this.getClientVersionV1.bind(this), + engine_getPayloadBodiesByRangeV2: this.getPayloadBodiesByRange.bind(this), }; } @@ -394,6 +399,7 @@ export class ExecutionEngineMockBackend implements JsonRpcBackend { } private timestampToFork(timestamp: number): ForkExecution { + if (timestamp > (this.opts.electraForkTimestamp ?? Infinity)) return ForkName.electra; if (timestamp > (this.opts.denebForkTimestamp ?? Infinity)) return ForkName.deneb; if (timestamp > (this.opts.capellaForkTimestamp ?? Infinity)) return ForkName.capella; return ForkName.bellatrix; diff --git a/packages/beacon-node/src/execution/engine/payloadIdCache.ts b/packages/beacon-node/src/execution/engine/payloadIdCache.ts index ea37e0922e9..005a1ef1432 100644 --- a/packages/beacon-node/src/execution/engine/payloadIdCache.ts +++ b/packages/beacon-node/src/execution/engine/payloadIdCache.ts @@ -18,6 +18,26 @@ export type WithdrawalV1 = { amount: QUANTITY; }; +export type DepositRequestV1 = { + pubkey: DATA; + withdrawalCredentials: DATA; + amount: QUANTITY; + signature: DATA; + index: QUANTITY; +}; + +export type WithdrawalRequestV1 = { + sourceAddress: DATA; + validatorPubkey: DATA; + amount: QUANTITY; +}; + +export type ConsolidationRequestV1 = { + sourceAddress: DATA; + sourcePubkey: DATA; + targetPubkey: DATA; +}; + type FcuAttributes = {headBlockHash: DATA; finalizedBlockHash: DATA} & Omit; export class PayloadIdCache { diff --git a/packages/beacon-node/src/execution/engine/types.ts b/packages/beacon-node/src/execution/engine/types.ts index 85f514c953b..63cb4da88b6 100644 --- a/packages/beacon-node/src/execution/engine/types.ts +++ b/packages/beacon-node/src/execution/engine/types.ts @@ -1,4 +1,4 @@ -import {capella, deneb, Wei, bellatrix, Root, ExecutionPayload} from "@lodestar/types"; +import {capella, deneb, electra, Wei, bellatrix, Root, ExecutionPayload} from "@lodestar/types"; import { BYTES_PER_LOGS_BLOOM, FIELD_ELEMENTS_PER_BLOB, @@ -17,7 +17,7 @@ import { quantityToBigint, } from "../../eth1/provider/utils.js"; import {ExecutionPayloadStatus, BlobsBundle, PayloadAttributes, VersionedHashes} from "./interface.js"; -import {WithdrawalV1} from "./payloadIdCache.js"; +import {WithdrawalV1, DepositRequestV1, WithdrawalRequestV1, ConsolidationRequestV1} from "./payloadIdCache.js"; /* eslint-disable @typescript-eslint/naming-convention */ @@ -28,6 +28,7 @@ export type EngineApiRpcParamTypes = { engine_newPayloadV1: [ExecutionPayloadRpc]; engine_newPayloadV2: [ExecutionPayloadRpc]; engine_newPayloadV3: [ExecutionPayloadRpc, VersionedHashesRpc, DATA]; + engine_newPayloadV4: [ExecutionPayloadRpc, VersionedHashesRpc, DATA]; /** * 1. Object - Payload validity status with respect to the consensus rules: * - blockHash: DATA, 32 Bytes - block hash value of the payload @@ -51,11 +52,13 @@ export type EngineApiRpcParamTypes = { engine_getPayloadV1: [QUANTITY]; engine_getPayloadV2: [QUANTITY]; engine_getPayloadV3: [QUANTITY]; + engine_getPayloadV4: [QUANTITY]; /** * 1. Array of DATA - Array of block_hash field values of the ExecutionPayload structure * */ engine_getPayloadBodiesByHashV1: DATA[][]; + engine_getPayloadBodiesByHashV2: DATA[][]; /** * 1. start: QUANTITY, 64 bits - Starting block number @@ -67,6 +70,7 @@ export type EngineApiRpcParamTypes = { * Object - Instance of ClientVersion */ engine_getClientVersionV1: [ClientVersionRpc]; + engine_getPayloadBodiesByRangeV2: [start: QUANTITY, count: QUANTITY]; }; export type PayloadStatus = { @@ -83,6 +87,7 @@ export type EngineApiRpcReturnTypes = { engine_newPayloadV1: PayloadStatus; engine_newPayloadV2: PayloadStatus; engine_newPayloadV3: PayloadStatus; + engine_newPayloadV4: PayloadStatus; engine_forkchoiceUpdatedV1: { payloadStatus: PayloadStatus; payloadId: QUANTITY | null; @@ -101,12 +106,15 @@ export type EngineApiRpcReturnTypes = { engine_getPayloadV1: ExecutionPayloadRpc; engine_getPayloadV2: ExecutionPayloadResponse; engine_getPayloadV3: ExecutionPayloadResponse; + engine_getPayloadV4: ExecutionPayloadResponse; engine_getPayloadBodiesByHashV1: (ExecutionPayloadBodyRpc | null)[]; + engine_getPayloadBodiesByHashV2: (ExecutionPayloadBodyRpc | null)[]; engine_getPayloadBodiesByRangeV1: (ExecutionPayloadBodyRpc | null)[]; engine_getClientVersionV1: ClientVersionRpc[]; + engine_getPayloadBodiesByRangeV2: (ExecutionPayloadBodyRpc | null)[]; }; type ExecutionPayloadRpcWithValue = { @@ -118,9 +126,23 @@ type ExecutionPayloadRpcWithValue = { }; type ExecutionPayloadResponse = ExecutionPayloadRpc | ExecutionPayloadRpcWithValue; -export type ExecutionPayloadBodyRpc = {transactions: DATA[]; withdrawals: WithdrawalV1[] | null}; +export type ExecutionPayloadBodyRpc = { + transactions: DATA[]; + withdrawals: WithdrawalV1[] | null | undefined; + // currently there is a discepancy between EL and CL field name references for deposit requests + // its likely CL receipt will be renamed to requests + depositRequests: DepositRequestV1[] | null | undefined; + withdrawalRequests: WithdrawalRequestV1[] | null | undefined; + consolidationRequests: ConsolidationRequestV1[] | null | undefined; +}; -export type ExecutionPayloadBody = {transactions: bellatrix.Transaction[]; withdrawals: capella.Withdrawals | null}; +export type ExecutionPayloadBody = { + transactions: bellatrix.Transaction[]; + withdrawals: capella.Withdrawals | null; + depositRequests: electra.DepositRequests | null; + withdrawalRequests: electra.WithdrawalRequests | null; + consolidationRequests: electra.ConsolidationRequests | null; +}; export type ExecutionPayloadRpc = { parentHash: DATA; // 32 bytes @@ -141,6 +163,9 @@ export type ExecutionPayloadRpc = { blobGasUsed?: QUANTITY; // DENEB excessBlobGas?: QUANTITY; // DENEB parentBeaconBlockRoot?: QUANTITY; // DENEB + depositRequests?: DepositRequestRpc[]; // ELECTRA + withdrawalRequests?: WithdrawalRequestRpc[]; // ELECTRA + consolidationRequests?: ConsolidationRequestRpc[]; // ELECTRA }; export type WithdrawalRpc = { @@ -150,6 +175,10 @@ export type WithdrawalRpc = { amount: QUANTITY; }; +export type DepositRequestRpc = DepositRequestV1; +export type WithdrawalRequestRpc = WithdrawalRequestV1; +export type ConsolidationRequestRpc = ConsolidationRequestV1; + export type VersionedHashesRpc = DATA[]; export type PayloadAttributesRpc = { @@ -212,6 +241,14 @@ export function serializeExecutionPayload(fork: ForkName, data: ExecutionPayload payload.excessBlobGas = numToQuantity(excessBlobGas); } + // ELECTRA adds depositRequests/depositRequests to the ExecutionPayload + if (ForkSeq[fork] >= ForkSeq.electra) { + const {depositRequests, withdrawalRequests, consolidationRequests} = data as electra.ExecutionPayload; + payload.depositRequests = depositRequests.map(serializeDepositRequest); + payload.withdrawalRequests = withdrawalRequests.map(serializeWithdrawalRequest); + payload.consolidationRequests = consolidationRequests.map(serializeConsolidationRequest); + } + return payload; } @@ -297,6 +334,35 @@ export function parseExecutionPayload( (executionPayload as deneb.ExecutionPayload).excessBlobGas = quantityToBigint(excessBlobGas); } + if (ForkSeq[fork] >= ForkSeq.electra) { + // electra adds depositRequests/depositRequests + const {depositRequests, withdrawalRequests, consolidationRequests} = data; + // Geth can also reply with null + if (depositRequests == null) { + throw Error( + `depositRequests missing for ${fork} >= electra executionPayload number=${executionPayload.blockNumber} hash=${data.blockHash}` + ); + } + (executionPayload as electra.ExecutionPayload).depositRequests = depositRequests.map(deserializeDepositRequest); + + if (withdrawalRequests == null) { + throw Error( + `withdrawalRequests missing for ${fork} >= electra executionPayload number=${executionPayload.blockNumber} hash=${data.blockHash}` + ); + } + (executionPayload as electra.ExecutionPayload).withdrawalRequests = + withdrawalRequests.map(deserializeWithdrawalRequest); + + if (consolidationRequests == null) { + throw Error( + `consolidationRequests missing for ${fork} >= electra executionPayload number=${executionPayload.blockNumber} hash=${data.blockHash}` + ); + } + (executionPayload as electra.ExecutionPayload).consolidationRequests = consolidationRequests.map( + deserializeConsolidationRequest + ); + } + return {executionPayload, executionPayloadValue, blobsBundle, shouldOverrideBuilder}; } @@ -363,11 +429,72 @@ export function deserializeWithdrawal(serialized: WithdrawalRpc): capella.Withdr } as capella.Withdrawal; } +export function serializeDepositRequest(depositRequest: electra.DepositRequest): DepositRequestRpc { + return { + pubkey: bytesToData(depositRequest.pubkey), + withdrawalCredentials: bytesToData(depositRequest.withdrawalCredentials), + amount: numToQuantity(depositRequest.amount), + signature: bytesToData(depositRequest.signature), + index: numToQuantity(depositRequest.index), + }; +} + +export function deserializeDepositRequest(serialized: DepositRequestRpc): electra.DepositRequest { + return { + pubkey: dataToBytes(serialized.pubkey, 48), + withdrawalCredentials: dataToBytes(serialized.withdrawalCredentials, 32), + amount: quantityToNum(serialized.amount), + signature: dataToBytes(serialized.signature, 96), + index: quantityToNum(serialized.index), + } as electra.DepositRequest; +} + +export function serializeWithdrawalRequest(withdrawalRequest: electra.WithdrawalRequest): WithdrawalRequestRpc { + return { + sourceAddress: bytesToData(withdrawalRequest.sourceAddress), + validatorPubkey: bytesToData(withdrawalRequest.validatorPubkey), + amount: numToQuantity(withdrawalRequest.amount), + }; +} + +export function deserializeWithdrawalRequest(withdrawalRequest: WithdrawalRequestRpc): electra.WithdrawalRequest { + return { + sourceAddress: dataToBytes(withdrawalRequest.sourceAddress, 20), + validatorPubkey: dataToBytes(withdrawalRequest.validatorPubkey, 48), + amount: quantityToNum(withdrawalRequest.amount), + }; +} + +export function serializeConsolidationRequest( + consolidationRequest: electra.ConsolidationRequest +): ConsolidationRequestRpc { + return { + sourceAddress: bytesToData(consolidationRequest.sourceAddress), + sourcePubkey: bytesToData(consolidationRequest.sourcePubkey), + targetPubkey: bytesToData(consolidationRequest.targetPubkey), + }; +} + +export function deserializeConsolidationRequest( + consolidationRequest: ConsolidationRequestRpc +): electra.ConsolidationRequest { + return { + sourceAddress: dataToBytes(consolidationRequest.sourceAddress, 20), + sourcePubkey: dataToBytes(consolidationRequest.sourcePubkey, 48), + targetPubkey: dataToBytes(consolidationRequest.targetPubkey, 48), + }; +} + export function deserializeExecutionPayloadBody(data: ExecutionPayloadBodyRpc | null): ExecutionPayloadBody | null { return data ? { transactions: data.transactions.map((tran) => dataToBytes(tran, null)), withdrawals: data.withdrawals ? data.withdrawals.map(deserializeWithdrawal) : null, + depositRequests: data.depositRequests ? data.depositRequests.map(deserializeDepositRequest) : null, + withdrawalRequests: data.withdrawalRequests ? data.withdrawalRequests.map(deserializeWithdrawalRequest) : null, + consolidationRequests: data.consolidationRequests + ? data.consolidationRequests.map(deserializeConsolidationRequest) + : null, } : null; } @@ -377,6 +504,11 @@ export function serializeExecutionPayloadBody(data: ExecutionPayloadBody | null) ? { transactions: data.transactions.map((tran) => bytesToData(tran)), withdrawals: data.withdrawals ? data.withdrawals.map(serializeWithdrawal) : null, + depositRequests: data.depositRequests ? data.depositRequests.map(serializeDepositRequest) : null, + withdrawalRequests: data.withdrawalRequests ? data.withdrawalRequests.map(serializeWithdrawalRequest) : null, + consolidationRequests: data.consolidationRequests + ? data.consolidationRequests.map(serializeConsolidationRequest) + : null, } : null; } diff --git a/packages/beacon-node/src/metrics/metrics/beacon.ts b/packages/beacon-node/src/metrics/metrics/beacon.ts index 141121de907..96bb6bea417 100644 --- a/packages/beacon-node/src/metrics/metrics/beacon.ts +++ b/packages/beacon-node/src/metrics/metrics/beacon.ts @@ -120,6 +120,13 @@ export function createBeaconMetrics(register: RegistryMetricCreator) { }), }, + headState: { + unfinalizedPubkeyCacheSize: register.gauge({ + name: "head_state_unfinalized_pubkey_cache_size", + help: "Current size of the unfinalizedPubkey2Index cache in the head state", + }), + }, + parentBlockDistance: register.histogram({ name: "beacon_imported_block_parent_distance", help: "Histogram of distance to parent block of valid imported blocks", diff --git a/packages/beacon-node/src/metrics/metrics/lodestar.ts b/packages/beacon-node/src/metrics/metrics/lodestar.ts index f43a3f1cdbe..a0cf0a185c2 100644 --- a/packages/beacon-node/src/metrics/metrics/lodestar.ts +++ b/packages/beacon-node/src/metrics/metrics/lodestar.ts @@ -332,6 +332,10 @@ export function createLodestarMetrics( buckets: [0.05, 0.1, 0.2, 0.5, 1, 1.5], labelNames: ["source"], }), + numEffectiveBalanceUpdates: register.gauge({ + name: "lodestar_stfn_effective_balance_updates_count", + help: "Total count of effective balance updates", + }), preStateBalancesNodesPopulatedMiss: register.gauge<{source: StateCloneSource}>({ name: "lodestar_stfn_balances_nodes_populated_miss_total", help: "Total count state.balances nodesPopulated is false on stfn", @@ -374,6 +378,17 @@ export function createLodestarMetrics( help: "Total count state.validators nodesPopulated is false on stfn for post state", }), + epochCache: { + finalizedPubkeyDuplicateInsert: register.gauge({ + name: "lodestar_epoch_cache_finalized_pubkey_duplicate_insert", + help: "Total count of duplicate insert of finalized pubkeys", + }), + newUnFinalizedPubkey: register.gauge({ + name: "lodestar_epoch_cache_new_unfinalized_pubkey", + help: "Total count of unfinalized pubkeys added", + }), + }, + // BLS verifier thread pool and queue bls: { @@ -1205,6 +1220,11 @@ export function createLodestarMetrics( help: "Histogram of time to serialize state to db", buckets: [0.1, 0.5, 1, 2, 3, 4], }), + numStatesUpdated: register.histogram({ + name: "lodestar_cp_state_cache_state_updated_count", + help: "Histogram of number of state cache items updated every time removing and adding pubkeys to pubkey cache", + buckets: [1, 2, 5, 10, 50, 250], + }), statePruneFromMemoryCount: register.gauge({ name: "lodestar_cp_state_cache_state_prune_from_memory_count", help: "Total number of states pruned from memory", @@ -1373,6 +1393,21 @@ export function createLodestarMetrics( help: "regen function total errors", labelNames: ["entrypoint", "caller"], }), + regenFnAddPubkeyTime: register.histogram({ + name: "lodestar_regen_fn_add_pubkey_time_seconds", + help: "Historgram of time spent on adding pubkeys to all state cache items in seconds", + buckets: [0.01, 0.1, 0.5, 1, 2, 5], + }), + regenFnDeletePubkeyTime: register.histogram({ + name: "lodestar_regen_fn_delete_pubkey_time_seconds", + help: "Histrogram of time spent on deleting pubkeys from all state cache items in seconds", + buckets: [0.01, 0.1, 0.5, 1], + }), + regenFnNumStatesUpdated: register.histogram({ + name: "lodestar_regen_state_cache_state_updated_count", + help: "Histogram of number of state cache items updated every time removing pubkeys from unfinalized cache", + buckets: [1, 2, 5, 10, 50, 250], + }), unhandledPromiseRejections: register.gauge({ name: "lodestar_unhandled_promise_rejections_total", help: "UnhandledPromiseRejection total count", diff --git a/packages/beacon-node/src/metrics/validatorMonitor.ts b/packages/beacon-node/src/metrics/validatorMonitor.ts index 5492f57edc0..34dfa6b72e0 100644 --- a/packages/beacon-node/src/metrics/validatorMonitor.ts +++ b/packages/beacon-node/src/metrics/validatorMonitor.ts @@ -13,7 +13,7 @@ import {BeaconBlock, RootHex, altair, deneb} from "@lodestar/types"; import {ChainConfig, ChainForkConfig} from "@lodestar/config"; import {ForkSeq, INTERVALS_PER_SLOT, MIN_ATTESTATION_INCLUSION_DELAY, SLOTS_PER_EPOCH} from "@lodestar/params"; import {Epoch, Slot, ValidatorIndex} from "@lodestar/types"; -import {IndexedAttestation, SignedAggregateAndProof} from "@lodestar/types/phase0"; +import {IndexedAttestation, SignedAggregateAndProof} from "@lodestar/types"; import {GENESIS_SLOT} from "../constants/constants.js"; import {LodestarMetrics} from "./metrics/lodestar.js"; @@ -644,7 +644,14 @@ export function createValidatorMonitor( } // Compute summaries of previous epoch attestation performance - const prevEpoch = Math.max(0, computeEpochAtSlot(headState.slot) - 1); + const prevEpoch = computeEpochAtSlot(headState.slot) - 1; + + // During the end of first epoch, the prev epoch with be -1 + // Skip this as there is no attestation and block proposal summary in epoch -1 + if (prevEpoch === -1) { + return; + } + const rootCache = new RootHexCache(headState); if (config.getForkSeq(headState.slot) >= ForkSeq.altair) { diff --git a/packages/beacon-node/src/network/gossip/interface.ts b/packages/beacon-node/src/network/gossip/interface.ts index 25a871b4e2a..ab9b8a65978 100644 --- a/packages/beacon-node/src/network/gossip/interface.ts +++ b/packages/beacon-node/src/network/gossip/interface.ts @@ -11,6 +11,8 @@ import { phase0, SignedBeaconBlock, Slot, + Attestation, + SignedAggregateAndProof, } from "@lodestar/types"; import {BeaconConfig} from "@lodestar/config"; import {Logger} from "@lodestar/utils"; @@ -80,8 +82,8 @@ export type SSZTypeOfGossipTopic = T extends {type: infer export type GossipTypeMap = { [GossipType.beacon_block]: SignedBeaconBlock; [GossipType.blob_sidecar]: deneb.BlobSidecar; - [GossipType.beacon_aggregate_and_proof]: phase0.SignedAggregateAndProof; - [GossipType.beacon_attestation]: phase0.Attestation; + [GossipType.beacon_aggregate_and_proof]: SignedAggregateAndProof; + [GossipType.beacon_attestation]: Attestation; [GossipType.voluntary_exit]: phase0.SignedVoluntaryExit; [GossipType.proposer_slashing]: phase0.ProposerSlashing; [GossipType.attester_slashing]: phase0.AttesterSlashing; @@ -95,8 +97,8 @@ export type GossipTypeMap = { export type GossipFnByType = { [GossipType.beacon_block]: (signedBlock: SignedBeaconBlock) => Promise | void; [GossipType.blob_sidecar]: (blobSidecar: deneb.BlobSidecar) => Promise | void; - [GossipType.beacon_aggregate_and_proof]: (aggregateAndProof: phase0.SignedAggregateAndProof) => Promise | void; - [GossipType.beacon_attestation]: (attestation: phase0.Attestation) => Promise | void; + [GossipType.beacon_aggregate_and_proof]: (aggregateAndProof: SignedAggregateAndProof) => Promise | void; + [GossipType.beacon_attestation]: (attestation: Attestation) => Promise | void; [GossipType.voluntary_exit]: (voluntaryExit: phase0.SignedVoluntaryExit) => Promise | void; [GossipType.proposer_slashing]: (proposerSlashing: phase0.ProposerSlashing) => Promise | void; [GossipType.attester_slashing]: (attesterSlashing: phase0.AttesterSlashing) => Promise | void; diff --git a/packages/beacon-node/src/network/gossip/topic.ts b/packages/beacon-node/src/network/gossip/topic.ts index 4923b71e688..b7c7425584c 100644 --- a/packages/beacon-node/src/network/gossip/topic.ts +++ b/packages/beacon-node/src/network/gossip/topic.ts @@ -1,4 +1,4 @@ -import {phase0, ssz, sszTypesFor} from "@lodestar/types"; +import {ssz, Attestation, sszTypesFor} from "@lodestar/types"; import {ForkDigestContext} from "@lodestar/config"; import { ATTESTATION_SUBNET_COUNT, @@ -87,13 +87,13 @@ export function getGossipSSZType(topic: GossipTopic) { case GossipType.blob_sidecar: return ssz.deneb.BlobSidecar; case GossipType.beacon_aggregate_and_proof: - return ssz.phase0.SignedAggregateAndProof; + return sszTypesFor(topic.fork).SignedAggregateAndProof; case GossipType.beacon_attestation: - return ssz.phase0.Attestation; + return sszTypesFor(topic.fork).Attestation; case GossipType.proposer_slashing: return ssz.phase0.ProposerSlashing; case GossipType.attester_slashing: - return ssz.phase0.AttesterSlashing; + return sszTypesFor(topic.fork).AttesterSlashing; case GossipType.voluntary_exit: return ssz.phase0.SignedVoluntaryExit; case GossipType.sync_committee_contribution_and_proof: @@ -128,9 +128,9 @@ export function sszDeserialize(topic: T, serializedData: /** * Deserialize a gossip serialized data into an Attestation object. */ -export function sszDeserializeAttestation(serializedData: Uint8Array): phase0.Attestation { +export function sszDeserializeAttestation(fork: ForkName, serializedData: Uint8Array): Attestation { try { - return ssz.phase0.Attestation.deserialize(serializedData); + return sszTypesFor(fork).Attestation.deserialize(serializedData); } catch (e) { throw new GossipActionError(GossipAction.REJECT, {code: GossipErrorCode.INVALID_SERIALIZED_BYTES_ERROR_CODE}); } diff --git a/packages/beacon-node/src/network/interface.ts b/packages/beacon-node/src/network/interface.ts index 5012650e229..8d73379af22 100644 --- a/packages/beacon-node/src/network/interface.ts +++ b/packages/beacon-node/src/network/interface.ts @@ -26,6 +26,7 @@ import { capella, deneb, phase0, + SignedAggregateAndProof, } from "@lodestar/types"; import {PeerIdStr} from "../util/peerId.js"; import {INetworkEventBus} from "./events.js"; @@ -71,7 +72,7 @@ export interface INetwork extends INetworkCorePublic { // Gossip publishBeaconBlock(signedBlock: SignedBeaconBlock): Promise; publishBlobSidecar(blobSidecar: deneb.BlobSidecar): Promise; - publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise; + publishBeaconAggregateAndProof(aggregateAndProof: SignedAggregateAndProof): Promise; publishBeaconAttestation(attestation: phase0.Attestation, subnet: number): Promise; publishVoluntaryExit(voluntaryExit: phase0.SignedVoluntaryExit): Promise; publishBlsToExecutionChange(blsToExecutionChange: capella.SignedBLSToExecutionChange): Promise; diff --git a/packages/beacon-node/src/network/network.ts b/packages/beacon-node/src/network/network.ts index 52b9d85c006..1b3ccaaaf75 100644 --- a/packages/beacon-node/src/network/network.ts +++ b/packages/beacon-node/src/network/network.ts @@ -17,6 +17,7 @@ import { LightClientFinalityUpdate, LightClientOptimisticUpdate, LightClientUpdate, + SignedAggregateAndProof, } from "@lodestar/types"; import {routes} from "@lodestar/api"; import {ResponseIncoming} from "@lodestar/reqresp"; @@ -316,7 +317,7 @@ export class Network implements INetwork { }); } - async publishBeaconAggregateAndProof(aggregateAndProof: phase0.SignedAggregateAndProof): Promise { + async publishBeaconAggregateAndProof(aggregateAndProof: SignedAggregateAndProof): Promise { const fork = this.config.getForkName(aggregateAndProof.message.aggregate.data.slot); return this.publishGossip( {type: GossipType.beacon_aggregate_and_proof, fork}, diff --git a/packages/beacon-node/src/network/processor/gossipHandlers.ts b/packages/beacon-node/src/network/processor/gossipHandlers.ts index 6f64148b87e..9e6f08a803c 100644 --- a/packages/beacon-node/src/network/processor/gossipHandlers.ts +++ b/packages/beacon-node/src/network/processor/gossipHandlers.ts @@ -1,6 +1,6 @@ import {BeaconConfig, ChainForkConfig} from "@lodestar/config"; import {LogLevel, Logger, prettyBytes, toRootHex} from "@lodestar/utils"; -import {Root, Slot, ssz, deneb, UintNum64, SignedBeaconBlock} from "@lodestar/types"; +import {Root, Slot, ssz, deneb, UintNum64, SignedBeaconBlock, sszTypesFor} from "@lodestar/types"; import {ForkName, ForkSeq} from "@lodestar/params"; import {routes} from "@lodestar/api"; import {computeTimeAtSlot} from "@lodestar/state-transition"; @@ -37,8 +37,8 @@ import { AggregateAndProofValidationResult, validateGossipAttestationsSameAttData, validateGossipAttestation, - AttestationOrBytes, AttestationValidationResult, + GossipAttestation, } from "../../chain/validation/index.js"; import {NetworkEvent, NetworkEventBus} from "../events.js"; import {PeerAction} from "../peers/index.js"; @@ -421,7 +421,11 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler validationResult = await validateGossipAggregateAndProof(fork, chain, signedAggregateAndProof, serializedData); } catch (e) { if (e instanceof AttestationError && e.action === GossipAction.REJECT) { - chain.persistInvalidSszValue(ssz.phase0.SignedAggregateAndProof, signedAggregateAndProof, "gossip_reject"); + chain.persistInvalidSszValue( + sszTypesFor(fork).SignedAggregateAndProof, + signedAggregateAndProof, + "gossip_reject" + ); } throw e; } @@ -480,14 +484,14 @@ function getDefaultHandlers(modules: ValidatorFnsModules, options: GossipHandler } // Handler - const {indexedAttestation, attDataRootHex, attestation} = validationResult; + const {indexedAttestation, attDataRootHex, attestation, committeeIndex} = validationResult; metrics?.registerGossipUnaggregatedAttestation(seenTimestampSec, indexedAttestation); try { // Node may be subscribe to extra subnets (long-lived random subnets). For those, validate the messages // but don't add to attestation pool, to save CPU and RAM if (aggregatorTracker.shouldAggregate(subnet, indexedAttestation.data.slot)) { - const insertOutcome = chain.attestationPool.add(attestation, attDataRootHex); + const insertOutcome = chain.attestationPool.add(committeeIndex, attestation, attDataRootHex); metrics?.opPool.attestationPoolInsertOutcome.inc({insertOutcome}); } } catch (e) { @@ -672,7 +676,7 @@ function getBatchHandlers(modules: ValidatorFnsModules, options: GossipHandlerOp serializedData: param.gossipData.serializedData, attSlot: param.gossipData.msgSlot, attDataBase64: param.gossipData.indexed, - })) as AttestationOrBytes[]; + })) as GossipAttestation[]; const {results: validationResults, batchableBls} = await validateGossipAttestationsSameAttData( fork, chain, @@ -688,14 +692,14 @@ function getBatchHandlers(modules: ValidatorFnsModules, options: GossipHandlerOp results.push(null); // Handler - const {indexedAttestation, attDataRootHex, attestation} = validationResult.result; + const {indexedAttestation, attDataRootHex, attestation, committeeIndex} = validationResult.result; metrics?.registerGossipUnaggregatedAttestation(gossipHandlerParams[i].seenTimestampSec, indexedAttestation); try { // Node may be subscribe to extra subnets (long-lived random subnets). For those, validate the messages // but don't add to attestation pool, to save CPU and RAM if (aggregatorTracker.shouldAggregate(subnet, indexedAttestation.data.slot)) { - const insertOutcome = chain.attestationPool.add(attestation, attDataRootHex); + const insertOutcome = chain.attestationPool.add(committeeIndex, attestation, attDataRootHex); metrics?.opPool.attestationPoolInsertOutcome.inc({insertOutcome}); } } catch (e) { diff --git a/packages/beacon-node/src/network/processor/gossipQueues/index.ts b/packages/beacon-node/src/network/processor/gossipQueues/index.ts index 366b23b3067..347458c9144 100644 --- a/packages/beacon-node/src/network/processor/gossipQueues/index.ts +++ b/packages/beacon-node/src/network/processor/gossipQueues/index.ts @@ -1,7 +1,7 @@ import {mapValues} from "@lodestar/utils"; import {GossipType} from "../../gossip/interface.js"; import {PendingGossipsubMessage} from "../types.js"; -import {getAttDataBase64FromAttestationSerialized} from "../../../util/sszBytes.js"; +import {getGossipAttestationIndex} from "../../../util/sszBytes.js"; import {LinearGossipQueue} from "./linear.js"; import { DropType, @@ -86,7 +86,10 @@ const indexedGossipQueueOpts: { } = { [GossipType.beacon_attestation]: { maxLength: 24576, - indexFn: (item: PendingGossipsubMessage) => getAttDataBase64FromAttestationSerialized(item.msg.data), + indexFn: (item: PendingGossipsubMessage) => { + // Note indexFn is fork agnostic despite changes introduced in Electra + return getGossipAttestationIndex(item.msg.data); + }, minChunkSize: MIN_SIGNATURE_SETS_TO_BATCH_VERIFY, maxChunkSize: MAX_GOSSIP_ATTESTATION_BATCH_SIZE, }, diff --git a/packages/beacon-node/src/network/processor/gossipQueues/indexed.ts b/packages/beacon-node/src/network/processor/gossipQueues/indexed.ts index 4e29a52173f..8edba7dfaad 100644 --- a/packages/beacon-node/src/network/processor/gossipQueues/indexed.ts +++ b/packages/beacon-node/src/network/processor/gossipQueues/indexed.ts @@ -84,6 +84,7 @@ export class IndexedGossipQueueMinSize= ForkSeq.electra + ? VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE + COMMITTEE_BITS_SIZE + : VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE; + + if (data.length < aggregationBitsStartIndex) { return null; } - const {uint8Array, bitLen} = deserializeUint8ArrayBitListFromBytes( - data, - VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE, - data.length - ); + const {uint8Array, bitLen} = deserializeUint8ArrayBitListFromBytes(data, aggregationBitsStartIndex, data.length); return new BitArray(uint8Array, bitLen); } @@ -91,14 +120,28 @@ export function getAggregationBitsFromAttestationSerialized(data: Uint8Array): B * Return null if data is not long enough to extract signature. */ export function getSignatureFromAttestationSerialized(data: Uint8Array): BLSSignature | null { - if (data.length < VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE) { + const signatureStartIndex = VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE; + + if (data.length < signatureStartIndex + SIGNATURE_SIZE) { return null; } - return data.subarray( - VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE, - VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE - ); + return data.subarray(signatureStartIndex, signatureStartIndex + SIGNATURE_SIZE); +} + +/** + * Extract committee bits from Electra attestation serialized bytes. + * Return null if data is not long enough to extract committee bits. + */ +export function getCommitteeBitsFromAttestationSerialized(data: Uint8Array): CommitteeBitsBase64 | null { + const committeeBitsStartIndex = VARIABLE_FIELD_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE; + + if (data.length < committeeBitsStartIndex + COMMITTEE_BITS_SIZE) { + return null; + } + + committeeBitsDataBuf.set(data.subarray(committeeBitsStartIndex, committeeBitsStartIndex + COMMITTEE_BITS_SIZE)); + return committeeBitsDataBuf.toString("base64"); } // @@ -117,8 +160,9 @@ const SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET = AGGREGATE_OFFSET + VARIABLE_FIELD const SIGNED_AGGREGATE_AND_PROOF_BLOCK_ROOT_OFFSET = SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + 8 + 8; /** - * Extract slot from signed aggregate and proof serialized bytes. - * Return null if data is not long enough to extract slot. + * Extract slot from signed aggregate and proof serialized bytes + * Return null if data is not long enough to extract slot + * This works for both phase + electra */ export function getSlotFromSignedAggregateAndProofSerialized(data: Uint8Array): Slot | null { if (data.length < SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + SLOT_SIZE) { @@ -129,8 +173,9 @@ export function getSlotFromSignedAggregateAndProofSerialized(data: Uint8Array): } /** - * Extract block root from signed aggregate and proof serialized bytes. - * Return null if data is not long enough to extract block root. + * Extract block root from signed aggregate and proof serialized bytes + * Return null if data is not long enough to extract block root + * This works for both phase + electra */ export function getBlockRootFromSignedAggregateAndProofSerialized(data: Uint8Array): BlockRootHex | null { if (data.length < SIGNED_AGGREGATE_AND_PROOF_BLOCK_ROOT_OFFSET + ROOT_SIZE) { @@ -146,11 +191,42 @@ export function getBlockRootFromSignedAggregateAndProofSerialized(data: Uint8Arr return "0x" + blockRootBuf.toString("hex"); } +/** + * Extract AttestationData base64 from SignedAggregateAndProof for electra + * Return null if data is not long enough + */ +export function getAttDataFromSignedAggregateAndProofElectra(data: Uint8Array): AttDataBase64 | null { + const startIndex = SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET; + const endIndex = startIndex + ATTESTATION_DATA_SIZE; + + if (data.length < endIndex + SIGNATURE_SIZE + COMMITTEE_BITS_SIZE) { + return null; + } + attDataBuf.set(data.subarray(startIndex, endIndex)); + return attDataBuf.toString("base64"); +} + +/** + * Extract CommitteeBits base64 from SignedAggregateAndProof for electra + * Return null if data is not long enough + */ +export function getCommitteeBitsFromSignedAggregateAndProofElectra(data: Uint8Array): CommitteeBitsBase64 | null { + const startIndex = SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + ATTESTATION_DATA_SIZE + SIGNATURE_SIZE; + const endIndex = startIndex + COMMITTEE_BITS_SIZE; + + if (data.length < endIndex) { + return null; + } + + committeeBitsDataBuf.set(data.subarray(startIndex, endIndex)); + return committeeBitsDataBuf.toString("base64"); +} + /** * Extract attestation data base64 from signed aggregate and proof serialized bytes. * Return null if data is not long enough to extract attestation data. */ -export function getAttDataBase64FromSignedAggregateAndProofSerialized(data: Uint8Array): AttDataBase64 | null { +export function getAttDataFromSignedAggregateAndProofPhase0(data: Uint8Array): AttDataBase64 | null { if (data.length < SIGNED_AGGREGATE_AND_PROOF_SLOT_OFFSET + ATTESTATION_DATA_SIZE) { return null; } diff --git a/packages/beacon-node/test/memory/unfinalizedPubkey2Index.ts b/packages/beacon-node/test/memory/unfinalizedPubkey2Index.ts new file mode 100644 index 00000000000..b37967d16ca --- /dev/null +++ b/packages/beacon-node/test/memory/unfinalizedPubkey2Index.ts @@ -0,0 +1,54 @@ +import crypto from "node:crypto"; +import {Map} from "immutable"; +import {ValidatorIndex} from "@lodestar/types"; +import {toMemoryEfficientHexStr} from "@lodestar/state-transition/src/cache/pubkeyCache.js"; +import {testRunnerMemory} from "./testRunnerMemory.js"; + +// Results in MacOS Nov 2023 +// +// UnfinalizedPubkey2Index 1000 keys - 274956.5 bytes / instance +// UnfinalizedPubkey2Index 10000 keys - 2591129.3 bytes / instance +// UnfinalizedPubkey2Index 100000 keys - 27261443.4 bytes / instance + +testRunnerMemoryBpi([ + { + id: "UnfinalizedPubkey2Index 1000 keys", + getInstance: () => getRandomMap(1000, () => toMemoryEfficientHexStr(crypto.randomBytes(48))), + }, + { + id: "UnfinalizedPubkey2Index 10000 keys", + getInstance: () => getRandomMap(10000, () => toMemoryEfficientHexStr(crypto.randomBytes(48))), + }, + { + id: "UnfinalizedPubkey2Index 100000 keys", + getInstance: () => getRandomMap(100000, () => toMemoryEfficientHexStr(crypto.randomBytes(48))), + }, +]); + +function getRandomMap(n: number, getKey: (i: number) => string): Map { + const map = Map(); + + return map.withMutations((m) => { + for (let i = 0; i < n; i++) { + m.set(getKey(i), i); + } + }); +} + +/** + * Test bytes per instance in different representations of raw binary data + */ +function testRunnerMemoryBpi(testCases: {getInstance: (bytes: number) => unknown; id: string}[]): void { + const longestId = Math.max(...testCases.map(({id}) => id.length)); + + for (const {id, getInstance} of testCases) { + const bpi = testRunnerMemory({ + getInstance, + convergeFactor: 1 / 100, + sampleEvery: 5, + }); + + // eslint-disable-next-line no-console + console.log(`${id.padEnd(longestId)} - ${bpi.toFixed(1)} bytes / instance`); + } +} diff --git a/packages/beacon-node/test/mocks/clock.ts b/packages/beacon-node/test/mocks/clock.ts index c38794bf16d..6f09bd29249 100644 --- a/packages/beacon-node/test/mocks/clock.ts +++ b/packages/beacon-node/test/mocks/clock.ts @@ -74,5 +74,6 @@ export function getMockedClock(): Mocked { }, currentSlotWithGossipDisparity: undefined, isCurrentSlotGivenGossipDisparity: vi.fn(), + secFromSlot: vi.fn(), } as unknown as Mocked; } diff --git a/packages/beacon-node/test/mocks/mockedBeaconChain.ts b/packages/beacon-node/test/mocks/mockedBeaconChain.ts index cc85cfd7d55..addeacf26a8 100644 --- a/packages/beacon-node/test/mocks/mockedBeaconChain.ts +++ b/packages/beacon-node/test/mocks/mockedBeaconChain.ts @@ -124,7 +124,7 @@ vi.mock("../../src/chain/chain.js", async (importActual) => { // @ts-expect-error eth1: new Eth1ForBlockProduction(), opPool: new OpPool(), - aggregatedAttestationPool: new AggregatedAttestationPool(), + aggregatedAttestationPool: new AggregatedAttestationPool(config), // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-expect-error beaconProposerCache: new BeaconProposerCache(), diff --git a/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts b/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts index 60ff6ce4830..63fc4ee2e12 100644 --- a/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts +++ b/packages/beacon-node/test/perf/chain/opPools/aggregatedAttestationPool.test.ts @@ -10,6 +10,8 @@ import { import {HISTORICAL_ROOTS_LIMIT, SLOTS_PER_EPOCH} from "@lodestar/params"; import {ExecutionStatus, ForkChoice, IForkChoiceStore, ProtoArray, DataAvailabilityStatus} from "@lodestar/fork-choice"; import {ssz} from "@lodestar/types"; + +import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; // eslint-disable-next-line import/no-relative-packages import {generatePerfTestCachedStateAltair} from "../../../../../state-transition/test/perf/util.js"; import {AggregatedAttestationPool} from "../../../../src/chain/opPools/aggregatedAttestationPool.js"; @@ -230,7 +232,9 @@ function getAggregatedAttestationPool( numMissedVotes: number, numBadVotes: number ): AggregatedAttestationPool { - const pool = new AggregatedAttestationPool(); + const config = createChainForkConfig(defaultChainConfig); + + const pool = new AggregatedAttestationPool(config); for (let epochSlot = 0; epochSlot < SLOTS_PER_EPOCH; epochSlot++) { const slot = state.slot - 1 - epochSlot; const epoch = computeEpochAtSlot(slot); diff --git a/packages/beacon-node/test/perf/chain/stateCache/updateUnfinalizedPubkeys.test.ts b/packages/beacon-node/test/perf/chain/stateCache/updateUnfinalizedPubkeys.test.ts new file mode 100644 index 00000000000..46659ab3d28 --- /dev/null +++ b/packages/beacon-node/test/perf/chain/stateCache/updateUnfinalizedPubkeys.test.ts @@ -0,0 +1,111 @@ +import {itBench, setBenchOpts} from "@dapplion/benchmark"; +import {Map} from "immutable"; +import {toBufferBE} from "bigint-buffer"; +import {digest} from "@chainsafe/as-sha256"; +import {SecretKey} from "@chainsafe/blst"; +import {ssz} from "@lodestar/types"; +import {type CachedBeaconStateAllForks, PubkeyIndexMap} from "@lodestar/state-transition"; +import {bytesToBigInt, intToBytes} from "@lodestar/utils"; +import {InMemoryCheckpointStateCache, BlockStateCacheImpl} from "../../../../src/chain/stateCache/index.js"; +import {BlockStateCache} from "../../../../src/chain/stateCache/types.js"; +import {generateCachedElectraState} from "../../../utils/state.js"; + +// Benchmark date from Mon Nov 21 2023 - Intel Core i7-9750H @ 2.60Ghz +// ✔ updateUnfinalizedPubkeys - updating 10 pubkeys 1444.173 ops/s 692.4380 us/op - 1057 runs 6.03 s +// ✔ updateUnfinalizedPubkeys - updating 100 pubkeys 189.5965 ops/s 5.274358 ms/op - 57 runs 1.15 s +// ✔ updateUnfinalizedPubkeys - updating 1000 pubkeys 12.90495 ops/s 77.48967 ms/op - 13 runs 1.62 s +describe("updateUnfinalizedPubkeys perf tests", function () { + setBenchOpts({noThreshold: true}); + + const numPubkeysToBeFinalizedCases = [10, 100, 1000]; + const numCheckpointStateCache = 8; + const numStateCache = 3 * 32; + + let checkpointStateCache: InMemoryCheckpointStateCache; + let stateCache: BlockStateCache; + + const unfinalizedPubkey2Index = generatePubkey2Index(0, Math.max.apply(null, numPubkeysToBeFinalizedCases)); + const baseState = generateCachedElectraState(); + + for (const numPubkeysToBeFinalized of numPubkeysToBeFinalizedCases) { + itBench({ + id: `updateUnfinalizedPubkeys - updating ${numPubkeysToBeFinalized} pubkeys`, + beforeEach: async () => { + baseState.epochCtx.unfinalizedPubkey2index = Map(unfinalizedPubkey2Index.map); + baseState.epochCtx.pubkey2index = new PubkeyIndexMap(); + baseState.epochCtx.index2pubkey = []; + + checkpointStateCache = new InMemoryCheckpointStateCache({}); + stateCache = new BlockStateCacheImpl({}); + + for (let i = 0; i < numCheckpointStateCache; i++) { + const clonedState = baseState.clone(); + const checkpoint = ssz.phase0.Checkpoint.defaultValue(); + + clonedState.slot = i; + checkpoint.epoch = i; // Assigning arbitrary non-duplicate values to ensure checkpointStateCache correctly saves all the states + + checkpointStateCache.add(checkpoint, clonedState); + } + + for (let i = 0; i < numStateCache; i++) { + const clonedState = baseState.clone(); + clonedState.slot = i; + stateCache.add(clonedState); + } + }, + fn: async () => { + const newFinalizedValidators = baseState.epochCtx.unfinalizedPubkey2index.filter( + (index, _pubkey) => index < numPubkeysToBeFinalized + ); + + const states = stateCache.getStates(); + const cpStates = checkpointStateCache.getStates(); + + const firstState = states.next().value as CachedBeaconStateAllForks; + firstState.epochCtx.addFinalizedPubkeys(newFinalizedValidators); + + const pubkeysToDelete = Array.from(newFinalizedValidators.keys()); + + firstState.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + + for (const s of states) { + s.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + } + + for (const s of cpStates) { + s.epochCtx.deleteUnfinalizedPubkeys(pubkeysToDelete); + } + }, + }); + } + + function generatePubkey2Index(startIndex: number, endIndex: number): PubkeyIndexMap { + const pubkey2Index = new PubkeyIndexMap(); + const pubkeys = generatePubkeys(endIndex - startIndex); + + for (let i = startIndex; i < endIndex; i++) { + pubkey2Index.set(pubkeys[i], i); + } + + return pubkey2Index; + } + + function generatePubkeys(validatorCount: number): Uint8Array[] { + const keys = []; + + for (let i = 0; i < validatorCount; i++) { + const sk = generatePrivateKey(i); + const pk = sk.toPublicKey().toBytes(); + keys.push(pk); + } + + return keys; + } + + function generatePrivateKey(index: number): SecretKey { + const secretKeyBytes = toBufferBE(bytesToBigInt(digest(intToBytes(index, 32))) % BigInt("38581184513"), 32); + const secret: SecretKey = SecretKey.fromBytes(secretKeyBytes); + return secret; + } +}); diff --git a/packages/beacon-node/test/perf/chain/validation/attestation.test.ts b/packages/beacon-node/test/perf/chain/validation/attestation.test.ts index 5fce9a34250..f285317474d 100644 --- a/packages/beacon-node/test/perf/chain/validation/attestation.test.ts +++ b/packages/beacon-node/test/perf/chain/validation/attestation.test.ts @@ -5,7 +5,7 @@ import {ssz} from "@lodestar/types"; import {generateTestCachedBeaconStateOnlyValidators} from "../../../../../state-transition/test/perf/util.js"; import {validateAttestation, validateGossipAttestationsSameAttData} from "../../../../src/chain/validation/index.js"; import {getAttestationValidData} from "../../../utils/validationData/attestation.js"; -import {getAttDataBase64FromAttestationSerialized} from "../../../../src/util/sszBytes.js"; +import {getAttDataFromAttestationSerialized} from "../../../../src/util/sszBytes.js"; describe("validate gossip attestation", () => { setBenchOpts({ @@ -42,7 +42,7 @@ describe("validate gossip attestation", () => { attestation: null, serializedData, attSlot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet0 ); @@ -67,7 +67,7 @@ describe("validate gossip attestation", () => { attestation: null, serializedData, attSlot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }; }); diff --git a/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh new file mode 100755 index 00000000000..f211f5d0714 --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besu/common-setup.sh @@ -0,0 +1,19 @@ +#!/bin/bash -x + +echo $TTD +echo $DATA_DIR +echo $EL_BINARY_DIR +echo $JWT_SECRET_HEX +echo $TEMPLATE_FILE + +echo $scriptDir +echo $currentDir + + +env TTD=$TTD envsubst < $scriptDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json +echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json +echo "12345678" > $DATA_DIR/password.txt +pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" + +# echo a hex encoded 256 bit secret into a file +echo $JWT_SECRET_HEX> $DATA_DIR/jwtsecret diff --git a/packages/beacon-node/test/scripts/el-interop/besu/electra.tmpl b/packages/beacon-node/test/scripts/el-interop/besu/electra.tmpl new file mode 100644 index 00000000000..7a63bfbe36d --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besu/electra.tmpl @@ -0,0 +1,77 @@ +{ + "config": { + "chainId":6110, + "homesteadBlock":0, + "eip150Block":0, + "eip155Block":0, + "eip158Block":0, + "byzantiumBlock":0, + "constantinopleBlock":0, + "petersburgBlock":0, + "istanbulBlock":0, + "muirGlacierBlock":0, + "berlinBlock":0, + "londonBlock":0, + "terminalTotalDifficulty":0, + "cancunTime":0, + "experimentalEipsTime":10, + "clique": { + "period": 5, + "epoch": 30000 + }, + "depositContractAddress": "0x4242424242424242424242424242424242424242" + }, + "nonce":"0x42", + "timestamp":"0x0", + "extraData":"0x0000000000000000000000000000000000000000000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "gasLimit":"0x1C9C380", + "difficulty":"0x400000000", + "mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase":"0x0000000000000000000000000000000000000000", + "alloc":{ + "0xa4664C40AACeBD82A2Db79f0ea36C06Bc6A19Adb": { + "balance": "1000000000000000000000000000" + }, + "0x4242424242424242424242424242424242424242": { + "balance": "0", + "code": "0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a26469706673582212201dd26f37a621703009abf16e77e69c93dc50c79db7f6cc37543e3e0e3decdc9764736f6c634300060b0033", + "storage": { + "0x0000000000000000000000000000000000000000000000000000000000000022": "0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b", + "0x0000000000000000000000000000000000000000000000000000000000000023": "0xdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d71", + "0x0000000000000000000000000000000000000000000000000000000000000024": "0xc78009fdf07fc56a11f122370658a353aaa542ed63e44c4bc15ff4cd105ab33c", + "0x0000000000000000000000000000000000000000000000000000000000000025": "0x536d98837f2dd165a55d5eeae91485954472d56f246df256bf3cae19352a123c", + "0x0000000000000000000000000000000000000000000000000000000000000026": "0x9efde052aa15429fae05bad4d0b1d7c64da64d03d7a1854a588c2cb8430c0d30", + "0x0000000000000000000000000000000000000000000000000000000000000027": "0xd88ddfeed400a8755596b21942c1497e114c302e6118290f91e6772976041fa1", + "0x0000000000000000000000000000000000000000000000000000000000000028": "0x87eb0ddba57e35f6d286673802a4af5975e22506c7cf4c64bb6be5ee11527f2c", + "0x0000000000000000000000000000000000000000000000000000000000000029": "0x26846476fd5fc54a5d43385167c95144f2643f533cc85bb9d16b782f8d7db193", + "0x000000000000000000000000000000000000000000000000000000000000002a": "0x506d86582d252405b840018792cad2bf1259f1ef5aa5f887e13cb2f0094f51e1", + "0x000000000000000000000000000000000000000000000000000000000000002b": "0xffff0ad7e659772f9534c195c815efc4014ef1e1daed4404c06385d11192e92b", + "0x000000000000000000000000000000000000000000000000000000000000002c": "0x6cf04127db05441cd833107a52be852868890e4317e6a02ab47683aa75964220", + "0x000000000000000000000000000000000000000000000000000000000000002d": "0xb7d05f875f140027ef5118a2247bbb84ce8f2f0f1123623085daf7960c329f5f", + "0x000000000000000000000000000000000000000000000000000000000000002e": "0xdf6af5f5bbdb6be9ef8aa618e4bf8073960867171e29676f8b284dea6a08a85e", + "0x000000000000000000000000000000000000000000000000000000000000002f": "0xb58d900f5e182e3c50ef74969ea16c7726c549757cc23523c369587da7293784", + "0x0000000000000000000000000000000000000000000000000000000000000030": "0xd49a7502ffcfb0340b1d7885688500ca308161a7f96b62df9d083b71fcc8f2bb", + "0x0000000000000000000000000000000000000000000000000000000000000031": "0x8fe6b1689256c0d385f42f5bbe2027a22c1996e110ba97c171d3e5948de92beb", + "0x0000000000000000000000000000000000000000000000000000000000000032": "0x8d0d63c39ebade8509e0ae3c9c3876fb5fa112be18f905ecacfecb92057603ab", + "0x0000000000000000000000000000000000000000000000000000000000000033": "0x95eec8b2e541cad4e91de38385f2e046619f54496c2382cb6cacd5b98c26f5a4", + "0x0000000000000000000000000000000000000000000000000000000000000034": "0xf893e908917775b62bff23294dbbe3a1cd8e6cc1c35b4801887b646a6f81f17f", + "0x0000000000000000000000000000000000000000000000000000000000000035": "0xcddba7b592e3133393c16194fac7431abf2f5485ed711db282183c819e08ebaa", + "0x0000000000000000000000000000000000000000000000000000000000000036": "0x8a8d7fe3af8caa085a7639a832001457dfb9128a8061142ad0335629ff23ff9c", + "0x0000000000000000000000000000000000000000000000000000000000000037": "0xfeb3c337d7a51a6fbf00b9e34c52e1c9195c969bd4e7a0bfd51d5c5bed9c1167", + "0x0000000000000000000000000000000000000000000000000000000000000038": "0xe71f0aa83cc32edfbefa9f4d3e0174ca85182eec9f3a09f6a6c0df6377a510d7", + "0x0000000000000000000000000000000000000000000000000000000000000039": "0x31206fa80a50bb6abe29085058f16212212a60eec8f049fecb92d8c8e0a84bc0", + "0x000000000000000000000000000000000000000000000000000000000000003a": "0x21352bfecbeddde993839f614c3dac0a3ee37543f9b412b16199dc158e23b544", + "0x000000000000000000000000000000000000000000000000000000000000003b": "0x619e312724bb6d7c3153ed9de791d764a366b389af13c58bf8a8d90481a46765", + "0x000000000000000000000000000000000000000000000000000000000000003c": "0x7cdd2986268250628d0c10e385c58c6191e6fbe05191bcc04f133f2cea72c1c4", + "0x000000000000000000000000000000000000000000000000000000000000003d": "0x848930bd7ba8cac54661072113fb278869e07bb8587f91392933374d017bcbe1", + "0x000000000000000000000000000000000000000000000000000000000000003e": "0x8869ff2c22b28cc10510d9853292803328be4fb0e80495e8bb8d271f5b889636", + "0x000000000000000000000000000000000000000000000000000000000000003f": "0xb5fe28e79f1b850f8658246ce9b6a1e7b49fc06db7143e8fe0b4f2b0c5523a5c", + "0x0000000000000000000000000000000000000000000000000000000000000040": "0x985e929f70af28d0bdd1a90a808f977f597c7c778c489e98d3bd8910d31ac0f7" + } + } + }, + "number":"0x0", + "gasUsed":"0x0", + "parentHash":"0x0000000000000000000000000000000000000000000000000000000000000000", + "baseFeePerGas":"0x7" +} diff --git a/packages/beacon-node/test/scripts/el-interop/besu/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/besu/post-merge.sh new file mode 100755 index 00000000000..d864814ece7 --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besu/post-merge.sh @@ -0,0 +1,8 @@ +#!/bin/bash -x + +scriptDir=$(dirname $0) +currentDir=$(pwd) + +. $scriptDir/common-setup.sh + +$EL_BINARY_DIR/besu --engine-rpc-enabled --rpc-http-enabled --rpc-http-api ADMIN,ETH,MINER,NET --rpc-http-port $ETH_PORT --engine-rpc-port $ENGINE_PORT --engine-jwt-secret $currentDir/$DATA_DIR/jwtsecret --data-path $DATA_DIR --data-storage-format BONSAI --genesis-file $DATA_DIR/genesis.json diff --git a/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh b/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh new file mode 100644 index 00000000000..b3d93190ef2 --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besudocker/common-setup.sh @@ -0,0 +1,22 @@ +#!/bin/bash -x + +echo $TTD +echo $DATA_DIR +echo $EL_BINARY_DIR +echo $JWT_SECRET_HEX +echo $TEMPLATE_FILE + +echo $scriptDir +echo $currentDir + + +env TTD=$TTD envsubst < $scriptDir/$TEMPLATE_FILE > $DATA_DIR/genesis.json +echo "45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8" > $DATA_DIR/sk.json +echo "12345678" > $DATA_DIR/password.txt +pubKey="0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" + +# echo a hex encoded 256 bit secret into a file +echo $JWT_SECRET_HEX> $DATA_DIR/jwtsecret +# clear any previous docker dangling docker run +docker rm -f custom-execution +rm -rf $DATA_DIR/besu diff --git a/packages/beacon-node/test/scripts/el-interop/besudocker/electra.tmpl b/packages/beacon-node/test/scripts/el-interop/besudocker/electra.tmpl new file mode 100644 index 00000000000..7a63bfbe36d --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besudocker/electra.tmpl @@ -0,0 +1,77 @@ +{ + "config": { + "chainId":6110, + "homesteadBlock":0, + "eip150Block":0, + "eip155Block":0, + "eip158Block":0, + "byzantiumBlock":0, + "constantinopleBlock":0, + "petersburgBlock":0, + "istanbulBlock":0, + "muirGlacierBlock":0, + "berlinBlock":0, + "londonBlock":0, + "terminalTotalDifficulty":0, + "cancunTime":0, + "experimentalEipsTime":10, + "clique": { + "period": 5, + "epoch": 30000 + }, + "depositContractAddress": "0x4242424242424242424242424242424242424242" + }, + "nonce":"0x42", + "timestamp":"0x0", + "extraData":"0x0000000000000000000000000000000000000000000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "gasLimit":"0x1C9C380", + "difficulty":"0x400000000", + "mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase":"0x0000000000000000000000000000000000000000", + "alloc":{ + "0xa4664C40AACeBD82A2Db79f0ea36C06Bc6A19Adb": { + "balance": "1000000000000000000000000000" + }, + "0x4242424242424242424242424242424242424242": { + "balance": "0", + "code": "0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a26469706673582212201dd26f37a621703009abf16e77e69c93dc50c79db7f6cc37543e3e0e3decdc9764736f6c634300060b0033", + "storage": { + "0x0000000000000000000000000000000000000000000000000000000000000022": "0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b", + "0x0000000000000000000000000000000000000000000000000000000000000023": "0xdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d71", + "0x0000000000000000000000000000000000000000000000000000000000000024": "0xc78009fdf07fc56a11f122370658a353aaa542ed63e44c4bc15ff4cd105ab33c", + "0x0000000000000000000000000000000000000000000000000000000000000025": "0x536d98837f2dd165a55d5eeae91485954472d56f246df256bf3cae19352a123c", + "0x0000000000000000000000000000000000000000000000000000000000000026": "0x9efde052aa15429fae05bad4d0b1d7c64da64d03d7a1854a588c2cb8430c0d30", + "0x0000000000000000000000000000000000000000000000000000000000000027": "0xd88ddfeed400a8755596b21942c1497e114c302e6118290f91e6772976041fa1", + "0x0000000000000000000000000000000000000000000000000000000000000028": "0x87eb0ddba57e35f6d286673802a4af5975e22506c7cf4c64bb6be5ee11527f2c", + "0x0000000000000000000000000000000000000000000000000000000000000029": "0x26846476fd5fc54a5d43385167c95144f2643f533cc85bb9d16b782f8d7db193", + "0x000000000000000000000000000000000000000000000000000000000000002a": "0x506d86582d252405b840018792cad2bf1259f1ef5aa5f887e13cb2f0094f51e1", + "0x000000000000000000000000000000000000000000000000000000000000002b": "0xffff0ad7e659772f9534c195c815efc4014ef1e1daed4404c06385d11192e92b", + "0x000000000000000000000000000000000000000000000000000000000000002c": "0x6cf04127db05441cd833107a52be852868890e4317e6a02ab47683aa75964220", + "0x000000000000000000000000000000000000000000000000000000000000002d": "0xb7d05f875f140027ef5118a2247bbb84ce8f2f0f1123623085daf7960c329f5f", + "0x000000000000000000000000000000000000000000000000000000000000002e": "0xdf6af5f5bbdb6be9ef8aa618e4bf8073960867171e29676f8b284dea6a08a85e", + "0x000000000000000000000000000000000000000000000000000000000000002f": "0xb58d900f5e182e3c50ef74969ea16c7726c549757cc23523c369587da7293784", + "0x0000000000000000000000000000000000000000000000000000000000000030": "0xd49a7502ffcfb0340b1d7885688500ca308161a7f96b62df9d083b71fcc8f2bb", + "0x0000000000000000000000000000000000000000000000000000000000000031": "0x8fe6b1689256c0d385f42f5bbe2027a22c1996e110ba97c171d3e5948de92beb", + "0x0000000000000000000000000000000000000000000000000000000000000032": "0x8d0d63c39ebade8509e0ae3c9c3876fb5fa112be18f905ecacfecb92057603ab", + "0x0000000000000000000000000000000000000000000000000000000000000033": "0x95eec8b2e541cad4e91de38385f2e046619f54496c2382cb6cacd5b98c26f5a4", + "0x0000000000000000000000000000000000000000000000000000000000000034": "0xf893e908917775b62bff23294dbbe3a1cd8e6cc1c35b4801887b646a6f81f17f", + "0x0000000000000000000000000000000000000000000000000000000000000035": "0xcddba7b592e3133393c16194fac7431abf2f5485ed711db282183c819e08ebaa", + "0x0000000000000000000000000000000000000000000000000000000000000036": "0x8a8d7fe3af8caa085a7639a832001457dfb9128a8061142ad0335629ff23ff9c", + "0x0000000000000000000000000000000000000000000000000000000000000037": "0xfeb3c337d7a51a6fbf00b9e34c52e1c9195c969bd4e7a0bfd51d5c5bed9c1167", + "0x0000000000000000000000000000000000000000000000000000000000000038": "0xe71f0aa83cc32edfbefa9f4d3e0174ca85182eec9f3a09f6a6c0df6377a510d7", + "0x0000000000000000000000000000000000000000000000000000000000000039": "0x31206fa80a50bb6abe29085058f16212212a60eec8f049fecb92d8c8e0a84bc0", + "0x000000000000000000000000000000000000000000000000000000000000003a": "0x21352bfecbeddde993839f614c3dac0a3ee37543f9b412b16199dc158e23b544", + "0x000000000000000000000000000000000000000000000000000000000000003b": "0x619e312724bb6d7c3153ed9de791d764a366b389af13c58bf8a8d90481a46765", + "0x000000000000000000000000000000000000000000000000000000000000003c": "0x7cdd2986268250628d0c10e385c58c6191e6fbe05191bcc04f133f2cea72c1c4", + "0x000000000000000000000000000000000000000000000000000000000000003d": "0x848930bd7ba8cac54661072113fb278869e07bb8587f91392933374d017bcbe1", + "0x000000000000000000000000000000000000000000000000000000000000003e": "0x8869ff2c22b28cc10510d9853292803328be4fb0e80495e8bb8d271f5b889636", + "0x000000000000000000000000000000000000000000000000000000000000003f": "0xb5fe28e79f1b850f8658246ce9b6a1e7b49fc06db7143e8fe0b4f2b0c5523a5c", + "0x0000000000000000000000000000000000000000000000000000000000000040": "0x985e929f70af28d0bdd1a90a808f977f597c7c778c489e98d3bd8910d31ac0f7" + } + } + }, + "number":"0x0", + "gasUsed":"0x0", + "parentHash":"0x0000000000000000000000000000000000000000000000000000000000000000", + "baseFeePerGas":"0x7" +} diff --git a/packages/beacon-node/test/scripts/el-interop/besudocker/post-merge.sh b/packages/beacon-node/test/scripts/el-interop/besudocker/post-merge.sh new file mode 100755 index 00000000000..d26307ee3d2 --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/besudocker/post-merge.sh @@ -0,0 +1,8 @@ +#!/bin/bash -x + +scriptDir=$(dirname $0) +currentDir=$(pwd) + +. $scriptDir/common-setup.sh + +docker run --rm -u $(id -u ${USER}):$(id -g ${USER}) --name custom-execution -p $ETH_PORT:$ETH_PORT -p $ENGINE_PORT:$ENGINE_PORT -v $currentDir/$DATA_DIR:/data $EL_BINARY_DIR --engine-rpc-enabled --rpc-http-enabled --rpc-http-api ADMIN,ETH,MINER,NET --rpc-http-port $ETH_PORT --engine-rpc-port $ENGINE_PORT --engine-jwt-secret /data/jwtsecret --data-path /data/besu --data-storage-format BONSAI --genesis-file /data/genesis.json diff --git a/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/electra.tmpl b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/electra.tmpl new file mode 100644 index 00000000000..3a06b75cd00 --- /dev/null +++ b/packages/beacon-node/test/scripts/el-interop/ethereumjsdocker/electra.tmpl @@ -0,0 +1,94 @@ +{ +"config": { +"chainId":1, +"homesteadBlock":0, +"eip150Block":0, +"eip155Block":0, +"eip158Block":0, +"byzantiumBlock":0, +"constantinopleBlock":0, +"petersburgBlock":0, +"istanbulBlock":0, +"muirGlacierBlock":0, +"berlinBlock":0, +"londonBlock":0, +"shanghaiTime":0, +"cancunTime": 0, +"pragueTime": 0, +"clique": { +"blockperiodseconds": 5, +"epochlength": 30000 +}, +"terminalTotalDifficulty":${TTD} +}, +"nonce":"0x42", +"timestamp":"0x0", +"extraData":"0x0000000000000000000000000000000000000000000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", +"gasLimit":"0x1C9C380", +"difficulty":"0x400000000", +"mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000", +"coinbase":"0x0000000000000000000000000000000000000000", +"alloc":{ + "0x610adc49ecd66cbf176a8247ebd59096c031bd9f": { + "balance": "0x6d6172697573766477000000" + }, + "0xa4664C40AACeBD82A2Db79f0ea36C06Bc6A19Adb": { + "balance": "1000000000000000000000000000" + }, + "0x00000000219ab540356cBB839Cbe05303d7705Fa": { + "balance": "0", + "code": "0x60806040526004361061003f5760003560e01c806301ffc9a71461004457806322895118146100a4578063621fd130146101ba578063c5f2892f14610244575b600080fd5b34801561005057600080fd5b506100906004803603602081101561006757600080fd5b50357fffffffff000000000000000000000000000000000000000000000000000000001661026b565b604080519115158252519081900360200190f35b6101b8600480360360808110156100ba57600080fd5b8101906020810181356401000000008111156100d557600080fd5b8201836020820111156100e757600080fd5b8035906020019184600183028401116401000000008311171561010957600080fd5b91939092909160208101903564010000000081111561012757600080fd5b82018360208201111561013957600080fd5b8035906020019184600183028401116401000000008311171561015b57600080fd5b91939092909160208101903564010000000081111561017957600080fd5b82018360208201111561018b57600080fd5b803590602001918460018302840111640100000000831117156101ad57600080fd5b919350915035610304565b005b3480156101c657600080fd5b506101cf6110b5565b6040805160208082528351818301528351919283929083019185019080838360005b838110156102095781810151838201526020016101f1565b50505050905090810190601f1680156102365780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b34801561025057600080fd5b506102596110c7565b60408051918252519081900360200190f35b60007fffffffff0000000000000000000000000000000000000000000000000000000082167f01ffc9a70000000000000000000000000000000000000000000000000000000014806102fe57507fffffffff0000000000000000000000000000000000000000000000000000000082167f8564090700000000000000000000000000000000000000000000000000000000145b92915050565b6030861461035d576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118056026913960400191505060405180910390fd5b602084146103b6576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603681526020018061179c6036913960400191505060405180910390fd5b6060821461040f576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260298152602001806118786029913960400191505060405180910390fd5b670de0b6b3a7640000341015610470576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260268152602001806118526026913960400191505060405180910390fd5b633b9aca003406156104cd576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260338152602001806117d26033913960400191505060405180910390fd5b633b9aca00340467ffffffffffffffff811115610535576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602781526020018061182b6027913960400191505060405180910390fd5b6060610540826114ba565b90507f649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c589898989858a8a6105756020546114ba565b6040805160a0808252810189905290819060208201908201606083016080840160c085018e8e80828437600083820152601f017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690910187810386528c815260200190508c8c808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe01690920188810386528c5181528c51602091820193918e019250908190849084905b83811015610648578181015183820152602001610630565b50505050905090810190601f1680156106755780820380516001836020036101000a031916815260200191505b5086810383528881526020018989808284376000838201819052601f9091017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0169092018881038452895181528951602091820193918b019250908190849084905b838110156106ef5781810151838201526020016106d7565b50505050905090810190601f16801561071c5780820380516001836020036101000a031916815260200191505b509d505050505050505050505050505060405180910390a1600060028a8a600060801b604051602001808484808284377fffffffffffffffffffffffffffffffff0000000000000000000000000000000090941691909301908152604080517ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0818403018152601090920190819052815191955093508392506020850191508083835b602083106107fc57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016107bf565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610859573d6000803e3d6000fd5b5050506040513d602081101561086e57600080fd5b5051905060006002806108846040848a8c6116fe565b6040516020018083838082843780830192505050925050506040516020818303038152906040526040518082805190602001908083835b602083106108f857805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016108bb565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610955573d6000803e3d6000fd5b5050506040513d602081101561096a57600080fd5b5051600261097b896040818d6116fe565b60405160009060200180848480828437919091019283525050604080518083038152602092830191829052805190945090925082918401908083835b602083106109f457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016109b7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610a51573d6000803e3d6000fd5b5050506040513d6020811015610a6657600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610ada57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610a9d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610b37573d6000803e3d6000fd5b5050506040513d6020811015610b4c57600080fd5b50516040805160208101858152929350600092600292839287928f928f92018383808284378083019250505093505050506040516020818303038152906040526040518082805190602001908083835b60208310610bd957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610b9c565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610c36573d6000803e3d6000fd5b5050506040513d6020811015610c4b57600080fd5b50516040518651600291889160009188916020918201918291908601908083835b60208310610ca957805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610c6c565b6001836020036101000a0380198251168184511680821785525050505050509050018367ffffffffffffffff191667ffffffffffffffff1916815260180182815260200193505050506040516020818303038152906040526040518082805190602001908083835b60208310610d4e57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610d11565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610dab573d6000803e3d6000fd5b5050506040513d6020811015610dc057600080fd5b5051604080516020818101949094528082019290925280518083038201815260609092019081905281519192909182918401908083835b60208310610e3457805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610df7565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015610e91573d6000803e3d6000fd5b5050506040513d6020811015610ea657600080fd5b50519050858114610f02576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260548152602001806117486054913960600191505060405180910390fd5b60205463ffffffff11610f60576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260218152602001806117276021913960400191505060405180910390fd5b602080546001019081905560005b60208110156110a9578160011660011415610fa0578260008260208110610f9157fe5b0155506110ac95505050505050565b600260008260208110610faf57fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061102557805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101610fe8565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa158015611082573d6000803e3d6000fd5b5050506040513d602081101561109757600080fd5b50519250600282049150600101610f6e565b50fe5b50505050505050565b60606110c26020546114ba565b905090565b6020546000908190815b60208110156112f05781600116600114156111e6576002600082602081106110f557fe5b01548460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061116b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161112e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156111c8573d6000803e3d6000fd5b5050506040513d60208110156111dd57600080fd5b505192506112e2565b600283602183602081106111f657fe5b015460405160200180838152602001828152602001925050506040516020818303038152906040526040518082805190602001908083835b6020831061126b57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161122e565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa1580156112c8573d6000803e3d6000fd5b5050506040513d60208110156112dd57600080fd5b505192505b6002820491506001016110d1565b506002826112ff6020546114ba565b600060401b6040516020018084815260200183805190602001908083835b6020831061135a57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0909201916020918201910161131d565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790527fffffffffffffffffffffffffffffffffffffffffffffffff000000000000000095909516920191825250604080518083037ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8018152601890920190819052815191955093508392850191508083835b6020831061143f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe09092019160209182019101611402565b51815160209384036101000a7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01801990921691161790526040519190930194509192505080830381855afa15801561149c573d6000803e3d6000fd5b5050506040513d60208110156114b157600080fd5b50519250505090565b60408051600880825281830190925260609160208201818036833701905050905060c082901b8060071a60f81b826000815181106114f457fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060061a60f81b8260018151811061153757fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060051a60f81b8260028151811061157a57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060041a60f81b826003815181106115bd57fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060031a60f81b8260048151811061160057fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060021a60f81b8260058151811061164357fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060011a60f81b8260068151811061168657fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a9053508060001a60f81b826007815181106116c957fe5b60200101907effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff1916908160001a90535050919050565b6000808585111561170d578182fd5b83861115611719578182fd5b505082019391909203915056fe4465706f736974436f6e74726163743a206d65726b6c6520747265652066756c6c4465706f736974436f6e74726163743a207265636f6e7374727563746564204465706f7369744461746120646f6573206e6f74206d6174636820737570706c696564206465706f7369745f646174615f726f6f744465706f736974436f6e74726163743a20696e76616c6964207769746864726177616c5f63726564656e7469616c73206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c7565206e6f74206d756c7469706c65206f6620677765694465706f736974436f6e74726163743a20696e76616c6964207075626b6579206c656e6774684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f20686967684465706f736974436f6e74726163743a206465706f7369742076616c756520746f6f206c6f774465706f736974436f6e74726163743a20696e76616c6964207369676e6174757265206c656e677468a2646970667358221220dceca8706b29e917dacf25fceef95acac8d90d765ac926663ce4096195952b6164736f6c634300060b0033", + "storage": { + "0x0000000000000000000000000000000000000000000000000000000000000022": "0xf5a5fd42d16a20302798ef6ed309979b43003d2320d9f0e8ea9831a92759fb4b", + "0x0000000000000000000000000000000000000000000000000000000000000023": "0xdb56114e00fdd4c1f85c892bf35ac9a89289aaecb1ebd0a96cde606a748b5d71", + "0x0000000000000000000000000000000000000000000000000000000000000024": "0xc78009fdf07fc56a11f122370658a353aaa542ed63e44c4bc15ff4cd105ab33c", + "0x0000000000000000000000000000000000000000000000000000000000000025": "0x536d98837f2dd165a55d5eeae91485954472d56f246df256bf3cae19352a123c", + "0x0000000000000000000000000000000000000000000000000000000000000026": "0x9efde052aa15429fae05bad4d0b1d7c64da64d03d7a1854a588c2cb8430c0d30", + "0x0000000000000000000000000000000000000000000000000000000000000027": "0xd88ddfeed400a8755596b21942c1497e114c302e6118290f91e6772976041fa1", + "0x0000000000000000000000000000000000000000000000000000000000000028": "0x87eb0ddba57e35f6d286673802a4af5975e22506c7cf4c64bb6be5ee11527f2c", + "0x0000000000000000000000000000000000000000000000000000000000000029": "0x26846476fd5fc54a5d43385167c95144f2643f533cc85bb9d16b782f8d7db193", + "0x000000000000000000000000000000000000000000000000000000000000002a": "0x506d86582d252405b840018792cad2bf1259f1ef5aa5f887e13cb2f0094f51e1", + "0x000000000000000000000000000000000000000000000000000000000000002b": "0xffff0ad7e659772f9534c195c815efc4014ef1e1daed4404c06385d11192e92b", + "0x000000000000000000000000000000000000000000000000000000000000002c": "0x6cf04127db05441cd833107a52be852868890e4317e6a02ab47683aa75964220", + "0x000000000000000000000000000000000000000000000000000000000000002d": "0xb7d05f875f140027ef5118a2247bbb84ce8f2f0f1123623085daf7960c329f5f", + "0x000000000000000000000000000000000000000000000000000000000000002e": "0xdf6af5f5bbdb6be9ef8aa618e4bf8073960867171e29676f8b284dea6a08a85e", + "0x000000000000000000000000000000000000000000000000000000000000002f": "0xb58d900f5e182e3c50ef74969ea16c7726c549757cc23523c369587da7293784", + "0x0000000000000000000000000000000000000000000000000000000000000030": "0xd49a7502ffcfb0340b1d7885688500ca308161a7f96b62df9d083b71fcc8f2bb", + "0x0000000000000000000000000000000000000000000000000000000000000031": "0x8fe6b1689256c0d385f42f5bbe2027a22c1996e110ba97c171d3e5948de92beb", + "0x0000000000000000000000000000000000000000000000000000000000000032": "0x8d0d63c39ebade8509e0ae3c9c3876fb5fa112be18f905ecacfecb92057603ab", + "0x0000000000000000000000000000000000000000000000000000000000000033": "0x95eec8b2e541cad4e91de38385f2e046619f54496c2382cb6cacd5b98c26f5a4", + "0x0000000000000000000000000000000000000000000000000000000000000034": "0xf893e908917775b62bff23294dbbe3a1cd8e6cc1c35b4801887b646a6f81f17f", + "0x0000000000000000000000000000000000000000000000000000000000000035": "0xcddba7b592e3133393c16194fac7431abf2f5485ed711db282183c819e08ebaa", + "0x0000000000000000000000000000000000000000000000000000000000000036": "0x8a8d7fe3af8caa085a7639a832001457dfb9128a8061142ad0335629ff23ff9c", + "0x0000000000000000000000000000000000000000000000000000000000000037": "0xfeb3c337d7a51a6fbf00b9e34c52e1c9195c969bd4e7a0bfd51d5c5bed9c1167", + "0x0000000000000000000000000000000000000000000000000000000000000038": "0xe71f0aa83cc32edfbefa9f4d3e0174ca85182eec9f3a09f6a6c0df6377a510d7", + "0x0000000000000000000000000000000000000000000000000000000000000039": "0x31206fa80a50bb6abe29085058f16212212a60eec8f049fecb92d8c8e0a84bc0", + "0x000000000000000000000000000000000000000000000000000000000000003a": "0x21352bfecbeddde993839f614c3dac0a3ee37543f9b412b16199dc158e23b544", + "0x000000000000000000000000000000000000000000000000000000000000003b": "0x619e312724bb6d7c3153ed9de791d764a366b389af13c58bf8a8d90481a46765", + "0x000000000000000000000000000000000000000000000000000000000000003c": "0x7cdd2986268250628d0c10e385c58c6191e6fbe05191bcc04f133f2cea72c1c4", + "0x000000000000000000000000000000000000000000000000000000000000003d": "0x848930bd7ba8cac54661072113fb278869e07bb8587f91392933374d017bcbe1", + "0x000000000000000000000000000000000000000000000000000000000000003e": "0x8869ff2c22b28cc10510d9853292803328be4fb0e80495e8bb8d271f5b889636", + "0x000000000000000000000000000000000000000000000000000000000000003f": "0xb5fe28e79f1b850f8658246ce9b6a1e7b49fc06db7143e8fe0b4f2b0c5523a5c", + "0x0000000000000000000000000000000000000000000000000000000000000040": "0x985e929f70af28d0bdd1a90a808f977f597c7c778c489e98d3bd8910d31ac0f7" + } + }, + "0x25a219378dad9b3503c8268c9ca836a52427a4fb": { + "balance": "0", + "nonce": "1", + "code": "0x60203611603157600143035f35116029575f356120000143116029576120005f3506545f5260205ff35b5f5f5260205ff35b5f5ffd00" + }, + "0x00A3ca265EBcb825B45F985A16CEFB49958cE017": { + "balance": "0", + "nonce": "1", + "code": "0x3373fffffffffffffffffffffffffffffffffffffffe146090573615156028575f545f5260205ff35b366038141561012e5760115f54600182026001905f5b5f82111560595781019083028483029004916001019190603e565b90939004341061012e57600154600101600155600354806003026004013381556001015f3581556001016020359055600101600355005b6003546002548082038060101160a4575060105b5f5b81811460dd5780604c02838201600302600401805490600101805490600101549160601b83528260140152906034015260010160a6565b910180921460ed579060025560f8565b90505f6002555f6003555b5f548061049d141561010757505f5b60015460028282011161011c5750505f610122565b01600290035b5f555f600155604c025ff35b5f5ffd", + "storage": { + "0x0000000000000000000000000000000000000000000000000000000000000000": + "0x000000000000000000000000000000000000000000000000000000000000049d" + } + } +}, +"number":"0x0", +"gasUsed":"0x0", +"parentHash":"0x0000000000000000000000000000000000000000000000000000000000000000", +"baseFeePerGas":"0x7" +} diff --git a/packages/beacon-node/test/sim/electra-interop.test.ts b/packages/beacon-node/test/sim/electra-interop.test.ts new file mode 100644 index 00000000000..d0c00e75fd5 --- /dev/null +++ b/packages/beacon-node/test/sim/electra-interop.test.ts @@ -0,0 +1,457 @@ +import fs from "node:fs"; +import assert from "node:assert"; +import {describe, it, vi, afterAll, afterEach} from "vitest"; + +import {LogLevel, sleep} from "@lodestar/utils"; +import {ForkName, SLOTS_PER_EPOCH, UNSET_DEPOSIT_REQUESTS_START_INDEX} from "@lodestar/params"; +import {electra, Epoch, Slot} from "@lodestar/types"; +import {ValidatorProposerConfig} from "@lodestar/validator"; + +import {ChainConfig} from "@lodestar/config"; +import {TimestampFormatCode} from "@lodestar/logger"; +import {CachedBeaconStateElectra} from "@lodestar/state-transition"; +import {initializeExecutionEngine} from "../../src/execution/index.js"; +import {ExecutionPayloadStatus, PayloadAttributes} from "../../src/execution/engine/interface.js"; + +import {testLogger, TestLoggerOpts} from "../utils/logger.js"; +import {runEL, ELStartMode, ELClient, sendRawTransactionBig} from "../utils/runEl.js"; +import {defaultExecutionEngineHttpOpts} from "../../src/execution/engine/http.js"; +import {getDevBeaconNode} from "../utils/node/beacon.js"; +import {BeaconRestApiServerOpts} from "../../src/api/index.js"; +import {simTestInfoTracker} from "../utils/node/simTest.js"; +import {getAndInitDevValidators} from "../utils/node/validator.js"; +import {ClockEvent} from "../../src/util/clock.js"; +import {dataToBytes} from "../../src/eth1/provider/utils.js"; +import {bytesToData} from "../../lib/eth1/provider/utils.js"; +import {BeaconNode} from "../../src/index.js"; +import {logFilesDir} from "./params.js"; +import {shell} from "./shell.js"; + +// NOTE: How to run +// DEV_RUN=true EL_BINARY_DIR=ethpandaops/ethereumjs:master-0e06ddf EL_SCRIPT_DIR=ethereumjsdocker yarn vitest --run test/sim/electra-interop.test.ts +// ``` + +/* eslint-disable no-console, @typescript-eslint/naming-convention */ + +const jwtSecretHex = "0xdc6457099f127cf0bac78de8b297df04951281909db4f58b43def7c7151e765d"; +const retries = defaultExecutionEngineHttpOpts.retries; +const retryDelay = defaultExecutionEngineHttpOpts.retryDelay; +describe("executionEngine / ExecutionEngineHttp", function () { + if (!process.env.EL_BINARY_DIR || !process.env.EL_SCRIPT_DIR) { + throw Error( + `EL ENV must be provided, EL_BINARY_DIR: ${process.env.EL_BINARY_DIR}, EL_SCRIPT_DIR: ${process.env.EL_SCRIPT_DIR}` + ); + } + vi.setConfig({testTimeout: 1000 * 60 * 10, hookTimeout: 1000 * 60 * 10}); + + const dataPath = fs.mkdtempSync("lodestar-test-electra"); + const elSetupConfig = { + elScriptDir: process.env.EL_SCRIPT_DIR, + elBinaryDir: process.env.EL_BINARY_DIR, + }; + const elRunOptions = { + dataPath, + jwtSecretHex, + enginePort: parseInt(process.env.ENGINE_PORT ?? "8551"), + ethPort: parseInt(process.env.ETH_PORT ?? "8545"), + }; + + const controller = new AbortController(); + afterAll(async () => { + controller?.abort(); + await shell(`sudo rm -rf ${dataPath}`); + }); + + const afterEachCallbacks: (() => Promise | void)[] = []; + afterEach(async () => { + while (afterEachCallbacks.length > 0) { + const callback = afterEachCallbacks.pop(); + if (callback) await callback(); + } + }); + + it("Send and get payloads with depositRequests to/from EL", async () => { + const {elClient, tearDownCallBack} = await runEL( + {...elSetupConfig, mode: ELStartMode.PostMerge, genesisTemplate: "electra.tmpl"}, + {...elRunOptions, ttd: BigInt(0)}, + controller.signal + ); + afterEachCallbacks.push(() => tearDownCallBack()); + const {genesisBlockHash, engineRpcUrl, ethRpcUrl} = elClient; + console.log({genesisBlockHash}); + + const loggerExecutionEngine = testLogger("executionEngine"); + + const executionEngine = initializeExecutionEngine( + {mode: "http", urls: [engineRpcUrl], jwtSecretHex, retries, retryDelay}, + {signal: controller.signal, logger: loggerExecutionEngine} + ); + + // 1. Prepare payload + const preparePayloadParams: PayloadAttributes = { + // Note: this is created with a pre-defined genesis.json + timestamp: 10, + prevRandao: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + suggestedFeeRecipient: "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", + withdrawals: [], + parentBeaconBlockRoot: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + }; + const payloadId = await executionEngine.notifyForkchoiceUpdate( + ForkName.electra, + genesisBlockHash, + //use finalizedBlockHash as safeBlockHash + genesisBlockHash, + genesisBlockHash, + preparePayloadParams + ); + if (!payloadId) throw Error("InvalidPayloadId"); + + // 2. Send raw deposit transaction A and B. tx A is to be imported via newPayload, tx B is to be included in payload via getPayload + const depositTransactionA = + "0x02f90213018080648401c9c3809400000000219ab540356cbb839cbe05303d7705fa8901bc16d674ec800000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000001208cd4e5a69709cf8ee5b1b73d6efbf3f33bcac92fb7e4ce62b2467542fb50a72d0000000000000000000000000000000000000000000000000000000000000030ac842878bb70009552a4cfcad801d6e659c50bd50d7d03306790cb455ce7363c5b6972f0159d170f625a99b2064dbefc000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020010000000000000000000000818ccb1c4eda80270b04d6df822b1e72dd83c3030000000000000000000000000000000000000000000000000000000000000060a747f75c72d0cf0d2b52504c7385b516f0523e2f0842416399f42b4aee5c6384a5674f6426b1cc3d0827886fa9b909e616f5c9f61f986013ed2b9bf37071cbae951136265b549f44e3c8e26233c0433e9124b7fd0dc86e82f9fedfc0a179d769c080a067c9857d27a42f8fde4d5cf2d6c324af94469ac93ec867eacdd9002e1297835fa07927224866e03d51fb1ae94390e7aec453cad8df9e048892e98f945178eab254"; + const depositRequestA = { + amount: 32000000000, + index: 0, + pubkey: dataToBytes( + "0xac842878bb70009552a4cfcad801d6e659c50bd50d7d03306790cb455ce7363c5b6972f0159d170f625a99b2064dbefc", + 48 + ), + signature: dataToBytes( + "0xa747f75c72d0cf0d2b52504c7385b516f0523e2f0842416399f42b4aee5c6384a5674f6426b1cc3d0827886fa9b909e616f5c9f61f986013ed2b9bf37071cbae951136265b549f44e3c8e26233c0433e9124b7fd0dc86e82f9fedfc0a179d769", + 96 + ), + withdrawalCredentials: dataToBytes("0x010000000000000000000000818ccb1c4eda80270b04d6df822b1e72dd83c303", 32), + }; + + const depositTransactionB = + "0x02f90213010180648401c9c3809400000000219ab540356cbb839cbe05303d7705fa8901bc16d674ec800000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e00000000000000000000000000000000000000000000000000000000000000120a7ec6a3459bf9389265f62abbdffcd0ef20924bd03e4856d3b964edf565bd8e80000000000000000000000000000000000000000000000000000000000000030a5290ddb9abd6a7fb8bac3414c6c7ff093a18ff297c1eada20464de388b14aafa505bfc98847ca7e6f7ca3aa9d4ca769000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020010000000000000000000000da628fed218cbe3a9e684a9f51c49dd63a229a1d000000000000000000000000000000000000000000000000000000000000006080e12262f94795ce3453f17eea2dd44843ff7977d303b192c1d2a4ce0dbebc8856c398d6445cbf244ba9e99307ead1e30b2544a5e9693cdd5196a33c46e2dd8a8b83afc8278c1ea79cd5c13cac2b96a62257b3636787d0f1e0f881c50a4667ddc080a0b653aad27e504d4fcd19b7c317ffbd2a26a81d6ac14ecea6a891a63dcf7816dfa02953273b4cddc93b2a9ba21aaeb0db988cb1086319dd0b91f79bc101adfe32e4"; + const depositRequestB = { + amount: 32000000000, + index: 1, + pubkey: dataToBytes( + "0xa5290ddb9abd6a7fb8bac3414c6c7ff093a18ff297c1eada20464de388b14aafa505bfc98847ca7e6f7ca3aa9d4ca769", + 48 + ), + signature: dataToBytes( + "0x80e12262f94795ce3453f17eea2dd44843ff7977d303b192c1d2a4ce0dbebc8856c398d6445cbf244ba9e99307ead1e30b2544a5e9693cdd5196a33c46e2dd8a8b83afc8278c1ea79cd5c13cac2b96a62257b3636787d0f1e0f881c50a4667dd", + 96 + ), + withdrawalCredentials: dataToBytes("0x010000000000000000000000da628fed218cbe3a9e684a9f51c49dd63a229a1d", 32), + }; + + sendRawTransactionBig(ethRpcUrl, depositTransactionA, `${dataPath}/deposit.json`).catch((e: Error) => { + loggerExecutionEngine.error("Fail to send raw deposit transaction A", undefined, e); + }); + + sendRawTransactionBig(ethRpcUrl, depositTransactionB, `${dataPath}/deposit.json`).catch((e: Error) => { + loggerExecutionEngine.error("Fail to send raw deposit transaction B", undefined, e); + }); + + // 3. Import new payload with tx A and deposit receipt A + const newPayloadBlockHash = "0x4cec1852552239cf78e8bd2db35ff9396acb6b40c3ce486e6e3028bc75c9faec"; + const newPayload = { + parentHash: dataToBytes("0xeb86e5aca89ea5477a6e169a389efbbe7e5a3d5f5c5296bcde3a4b032ea9bae8", 32), + feeRecipient: dataToBytes("0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", 20), + stateRoot: dataToBytes("0x686ce0478cabce79b298712fefee4aefd2fac1ab4a4813936d2c1ccca788bbc3", 32), + logsBloom: dataToBytes( + "0x00000000000000000000400000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000020000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000", + 256 + ), + prevRandao: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + gasLimit: 30000000, + gasUsed: 84714, + timestamp: 16, + extraData: dataToBytes("0x", 0), + baseFeePerGas: 7n, + excessBlobGas: 0n, + transactions: [dataToBytes(depositTransactionA, null)], + withdrawals: [], + depositRequests: [depositRequestA], + blockNumber: 1, + blockHash: dataToBytes(newPayloadBlockHash, 32), + receiptsRoot: dataToBytes("0x0b67bea29f17eeb290685e01e9a2e4cd77a83471d9985a8ce27997a7ed3ee3f8", 32), + blobGasUsed: 0n, + withdrawalRequests: [], + }; + const parentBeaconBlockRoot = dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32); + const payloadResult = await executionEngine.notifyNewPayload( + ForkName.electra, + newPayload, + [], + parentBeaconBlockRoot + ); + if (payloadResult.status !== ExecutionPayloadStatus.VALID) { + throw Error("getPayload returned payload that notifyNewPayload deems invalid"); + } + + // 4. Update fork choice + const preparePayloadParams2: PayloadAttributes = { + timestamp: 48, + prevRandao: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + suggestedFeeRecipient: "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", + withdrawals: [], + parentBeaconBlockRoot: dataToBytes("0x0000000000000000000000000000000000000000000000000000000000000000", 32), + }; + + const payloadId2 = await executionEngine.notifyForkchoiceUpdate( + ForkName.electra, + newPayloadBlockHash, + //use finalizedBlockHash as safeBlockHash + newPayloadBlockHash, + newPayloadBlockHash, + preparePayloadParams2 + ); + if (!payloadId2) throw Error("InvalidPayloadId"); + + // 5. Get the payload. Check depositRequests field contains deposit + // Wait a bit first for besu to pick up tx from the tx pool. + await sleep(1000); + const payloadAndBlockValue = await executionEngine.getPayload(ForkName.electra, payloadId2); + const payload = payloadAndBlockValue.executionPayload as electra.ExecutionPayload; + + if (payload.transactions.length !== 1) { + throw Error(`Number of transactions mismatched. Expected: 1, actual: ${payload.transactions.length}`); + } else { + const actualTransaction = bytesToData(payload.transactions[0]); + + if (actualTransaction !== depositTransactionB) { + throw Error(`Transaction mismatched. Expected: ${depositTransactionB}, actual: ${actualTransaction}`); + } + } + + if (payload.depositRequests.length !== 1) { + throw Error(`Number of depositRequests mismatched. Expected: 1, actual: ${payload.depositRequests.length}`); + } + + const actualDepositRequest = payload.depositRequests[0]; + assert.deepStrictEqual( + actualDepositRequest, + depositRequestB, + `Deposit receipts mismatched. Expected: ${JSON.stringify(depositRequestB)}, actual: ${JSON.stringify( + actualDepositRequest + )}` + ); + }); + + // TODO: get this post merge run working + it.skip("Post-merge, run for a few blocks", async function () { + console.log("\n\nPost-merge, run for a few blocks\n\n"); + const {elClient, tearDownCallBack} = await runEL( + {...elSetupConfig, mode: ELStartMode.PostMerge, genesisTemplate: "electra.tmpl"}, + {...elRunOptions, ttd: BigInt(0)}, + controller.signal + ); + afterEachCallbacks.push(() => tearDownCallBack()); + + await runNodeWithEL({ + elClient, + electraEpoch: 0, + testName: "post-merge", + }); + }); + + /** + * Want to test two things: + * 1) Send two raw deposit transactions, and see if two new validators with correct balances show up in the state.validators and unfinalized cache + * 2) Upon state-transition, see if the two new validators move from unfinalized cache to finalized cache + */ + async function runNodeWithEL({ + elClient, + electraEpoch, + testName, + }: { + elClient: ELClient; + electraEpoch: Epoch; + testName: string; + }): Promise { + const {genesisBlockHash, ttd, engineRpcUrl, ethRpcUrl} = elClient; + const validatorClientCount = 1; + const validatorsPerClient = 32; + + const testParams: Pick = { + SECONDS_PER_SLOT: 2, + }; + + // Just enough to have a checkpoint finalized + const expectedEpochsToFinish = 4; + // 1 epoch of margin of error + const epochsOfMargin = 1; + const timeoutSetupMargin = 30 * 1000; // Give extra 30 seconds of margin + + // delay a bit so regular sync sees it's up to date and sync is completed from the beginning + const genesisSlotsDelay = 8; + + const timeout = + ((epochsOfMargin + expectedEpochsToFinish) * SLOTS_PER_EPOCH + genesisSlotsDelay) * + testParams.SECONDS_PER_SLOT * + 1000; + + vi.setConfig({testTimeout: timeout + 2 * timeoutSetupMargin}); + + const genesisTime = Math.floor(Date.now() / 1000) + genesisSlotsDelay * testParams.SECONDS_PER_SLOT; + + const testLoggerOpts: TestLoggerOpts = { + level: LogLevel.info, + file: { + filepath: `${logFilesDir}/mergemock-${testName}.log`, + level: LogLevel.debug, + }, + timestampFormat: { + format: TimestampFormatCode.EpochSlot, + genesisTime, + slotsPerEpoch: SLOTS_PER_EPOCH, + secondsPerSlot: testParams.SECONDS_PER_SLOT, + }, + }; + const loggerNodeA = testLogger("Node-A", testLoggerOpts); + + const bn = await getDevBeaconNode({ + params: { + ...testParams, + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: electraEpoch, + TERMINAL_TOTAL_DIFFICULTY: ttd, + }, + options: { + api: {rest: {enabled: true} as BeaconRestApiServerOpts}, + sync: {isSingleNode: true}, + network: {allowPublishToZeroPeers: true, discv5: null}, + // Now eth deposit/merge tracker methods directly available on engine endpoints + eth1: {enabled: false, providerUrls: [engineRpcUrl], jwtSecretHex}, + executionEngine: {urls: [engineRpcUrl], jwtSecretHex}, + chain: {suggestedFeeRecipient: "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb"}, + }, + validatorCount: validatorClientCount * validatorsPerClient, + logger: loggerNodeA, + genesisTime, + eth1BlockHash: dataToBytes(genesisBlockHash, 32), + withEth1Credentials: true, + }); + + afterEachCallbacks.push(async function () { + await bn.close(); + await sleep(1000); + }); + + const stopInfoTracker = simTestInfoTracker(bn, loggerNodeA); + const valProposerConfig = { + defaultConfig: { + feeRecipient: "0xcccccccccccccccccccccccccccccccccccccccc", + }, + } as ValidatorProposerConfig; + + const {validators} = await getAndInitDevValidators({ + node: bn, + logPrefix: "Node-A", + validatorsPerClient, + validatorClientCount, + startIndex: 0, + // At least one sim test must use the REST API for beacon <-> validator comms + useRestApi: true, + testLoggerOpts, + valProposerConfig, + }); + + afterEachCallbacks.push(async function () { + await Promise.all(validators.map((v) => v.close())); + }); + + await waitForSlot(bn, 1); + + // send raw tx at slot 1 + const depositTransaction = + "0x02f90213018080648401c9c3809400000000219ab540356cbb839cbe05303d7705fa8901bc16d674ec800000b901a422895118000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000e000000000000000000000000000000000000000000000000000000000000001208cd4e5a69709cf8ee5b1b73d6efbf3f33bcac92fb7e4ce62b2467542fb50a72d0000000000000000000000000000000000000000000000000000000000000030ac842878bb70009552a4cfcad801d6e659c50bd50d7d03306790cb455ce7363c5b6972f0159d170f625a99b2064dbefc000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020010000000000000000000000818ccb1c4eda80270b04d6df822b1e72dd83c3030000000000000000000000000000000000000000000000000000000000000060a747f75c72d0cf0d2b52504c7385b516f0523e2f0842416399f42b4aee5c6384a5674f6426b1cc3d0827886fa9b909e616f5c9f61f986013ed2b9bf37071cbae951136265b549f44e3c8e26233c0433e9124b7fd0dc86e82f9fedfc0a179d769c080a067c9857d27a42f8fde4d5cf2d6c324af94469ac93ec867eacdd9002e1297835fa07927224866e03d51fb1ae94390e7aec453cad8df9e048892e98f945178eab254"; + sendRawTransactionBig(ethRpcUrl, depositTransaction, `${dataPath}/deposit.json`).catch((e: Error) => { + loggerNodeA.error("Fail to send raw deposit transaction", undefined, e); + }); + + await waitForSlot(bn, 5); + // Expect new validator to be in unfinalized cache, in state.validators and not in finalized cache + let headState = bn.chain.getHeadState(); + let epochCtx = headState.epochCtx; + if (headState.validators.length !== 33 || headState.balances.length !== 33) { + throw Error("New validator is not reflected in the beacon state at slot 5"); + } + if (epochCtx.index2pubkey.length !== 32 || epochCtx.pubkey2index.size !== 32) { + throw Error("Finalized cache is modified."); + } + if (epochCtx.unfinalizedPubkey2index.size !== 1) { + throw Error( + `Unfinalized cache is missing the expected validator. Size: ${epochCtx.unfinalizedPubkey2index.size}` + ); + } + // validator count at epoch 1 should be empty at this point since no epoch transition has happened. + if (epochCtx.getValidatorCountAtEpoch(1) !== undefined) { + throw Error("Historical validator lengths is modified"); + } + + await new Promise((resolve, _reject) => { + bn.chain.clock.on(ClockEvent.epoch, (epoch) => { + // Resolve only if the finalized checkpoint includes execution payload + if (epoch >= expectedEpochsToFinish) { + console.log("\nGot event epoch, stopping validators and nodes\n"); + resolve(); + } + }); + }); + + // Stop chain and un-subscribe events so the execution engine won't update it's head + // Allow some time to broadcast finalized events and complete the importBlock routine + await Promise.all(validators.map((v) => v.close())); + await bn.close(); + await sleep(500); + + // Check if new validator is in finalized cache + headState = bn.chain.getHeadState() as CachedBeaconStateElectra; + epochCtx = headState.epochCtx; + + if (headState.validators.length !== 33 || headState.balances.length !== 33) { + throw Error("New validator is not reflected in the beacon state."); + } + if (epochCtx.index2pubkey.length !== 33 || epochCtx.pubkey2index.size !== 33) { + throw Error("New validator is not in finalized cache"); + } + if (!epochCtx.unfinalizedPubkey2index.isEmpty()) { + throw Error("Unfinalized cache still contains new validator"); + } + // After 4 epochs, headState's finalized cp epoch should be 2 + // epochCtx should only have validator count for epoch 3 and 4. + if (epochCtx.getValidatorCountAtEpoch(4) === undefined || epochCtx.getValidatorCountAtEpoch(3) === undefined) { + throw Error("Missing historical validator length for epoch 3 or 4"); + } + + if (epochCtx.getValidatorCountAtEpoch(4) !== 33 || epochCtx.getValidatorCountAtEpoch(3) !== 33) { + throw Error("Incorrect historical validator length for epoch 3 or 4"); + } + + if (epochCtx.getValidatorCountAtEpoch(2) !== undefined || epochCtx.getValidatorCountAtEpoch(1) !== undefined) { + throw Error("Historical validator length for epoch 1 or 2 is not dropped properly"); + } + + if (headState.depositRequestsStartIndex === UNSET_DEPOSIT_REQUESTS_START_INDEX) { + throw Error("state.depositRequestsStartIndex is not set upon processing new deposit receipt"); + } + + // wait for 1 slot to print current epoch stats + await sleep(1 * bn.config.SECONDS_PER_SLOT * 1000); + stopInfoTracker(); + console.log("\n\nDone\n\n"); + } +}); + +async function waitForSlot(bn: BeaconNode, targetSlot: Slot): Promise { + await new Promise((resolve, reject) => { + bn.chain.clock.on(ClockEvent.slot, (currentSlot) => { + if (currentSlot === targetSlot) { + resolve(); + return; + } + if (currentSlot > targetSlot) { + reject(Error(`Beacon node has passed target slot ${targetSlot}. Current slot ${currentSlot}`)); + } + }); + }); +} diff --git a/packages/beacon-node/test/spec/presets/epoch_processing.test.ts b/packages/beacon-node/test/spec/presets/epoch_processing.test.ts index a244762143f..604243400aa 100644 --- a/packages/beacon-node/test/spec/presets/epoch_processing.test.ts +++ b/packages/beacon-node/test/spec/presets/epoch_processing.test.ts @@ -5,6 +5,7 @@ import { EpochTransitionCache, BeaconStateAllForks, beforeProcessEpoch, + CachedBeaconStateAltair, } from "@lodestar/state-transition"; import * as epochFns from "@lodestar/state-transition/epoch"; import {ssz} from "@lodestar/types"; @@ -22,7 +23,10 @@ export type EpochTransitionFn = (state: CachedBeaconStateAllForks, epochTransiti /* eslint-disable @typescript-eslint/naming-convention */ const epochTransitionFns: Record = { - effective_balance_updates: epochFns.processEffectiveBalanceUpdates, + effective_balance_updates: (state, epochTransitionCache) => { + const fork = state.config.getForkSeq(state.slot); + epochFns.processEffectiveBalanceUpdates(fork, state, epochTransitionCache); + }, eth1_data_reset: epochFns.processEth1DataReset, historical_roots_update: epochFns.processHistoricalRootsUpdate, inactivity_updates: epochFns.processInactivityUpdates as EpochTransitionFn, @@ -30,12 +34,20 @@ const epochTransitionFns: Record = { participation_flag_updates: epochFns.processParticipationFlagUpdates as EpochTransitionFn, participation_record_updates: epochFns.processParticipationRecordUpdates as EpochTransitionFn, randao_mixes_reset: epochFns.processRandaoMixesReset, - registry_updates: epochFns.processRegistryUpdates, + registry_updates: (state, epochTransitionCache) => { + const fork = state.config.getForkSeq(state.slot); + epochFns.processRegistryUpdates(fork, state, epochTransitionCache); + }, rewards_and_penalties: epochFns.processRewardsAndPenalties, slashings: epochFns.processSlashings, slashings_reset: epochFns.processSlashingsReset, - sync_committee_updates: epochFns.processSyncCommitteeUpdates as EpochTransitionFn, + sync_committee_updates: (state, _) => { + const fork = state.config.getForkSeq(state.slot); + epochFns.processSyncCommitteeUpdates(fork, state as CachedBeaconStateAltair); + }, historical_summaries_update: epochFns.processHistoricalSummariesUpdate as EpochTransitionFn, + pending_balance_deposits: epochFns.processPendingBalanceDeposits as EpochTransitionFn, + pending_consolidations: epochFns.processPendingConsolidations as EpochTransitionFn, }; /** diff --git a/packages/beacon-node/test/spec/presets/fork.test.ts b/packages/beacon-node/test/spec/presets/fork.test.ts index 228ab6a3893..c121e651fce 100644 --- a/packages/beacon-node/test/spec/presets/fork.test.ts +++ b/packages/beacon-node/test/spec/presets/fork.test.ts @@ -5,6 +5,7 @@ import { CachedBeaconStateAltair, CachedBeaconStatePhase0, CachedBeaconStateCapella, + CachedBeaconStateDeneb, } from "@lodestar/state-transition"; import * as slotFns from "@lodestar/state-transition/slot"; import {phase0, ssz} from "@lodestar/types"; @@ -35,6 +36,8 @@ const fork: TestRunnerFn = (forkNext) => { return slotFns.upgradeStateToCapella(preState as CachedBeaconStateBellatrix); case ForkName.deneb: return slotFns.upgradeStateToDeneb(preState as CachedBeaconStateCapella); + case ForkName.electra: + return slotFns.upgradeStateToElectra(preState as CachedBeaconStateDeneb); } }, options: { diff --git a/packages/beacon-node/test/spec/presets/fork_choice.test.ts b/packages/beacon-node/test/spec/presets/fork_choice.test.ts index 92862c6cb03..7cb6e3c3d69 100644 --- a/packages/beacon-node/test/spec/presets/fork_choice.test.ts +++ b/packages/beacon-node/test/spec/presets/fork_choice.test.ts @@ -4,7 +4,17 @@ import {toHexString} from "@chainsafe/ssz"; import {BeaconStateAllForks, isExecutionStateType, signedBlockToSignedHeader} from "@lodestar/state-transition"; import {InputType} from "@lodestar/spec-test-util"; import {CheckpointWithHex, ForkChoice} from "@lodestar/fork-choice"; -import {phase0, bellatrix, ssz, RootHex, deneb, BeaconBlock, SignedBeaconBlock} from "@lodestar/types"; +import { + bellatrix, + ssz, + RootHex, + deneb, + BeaconBlock, + SignedBeaconBlock, + sszTypesFor, + Attestation, + AttesterSlashing, +} from "@lodestar/types"; import {bnToNum, fromHex} from "@lodestar/utils"; import {createBeaconConfig} from "@lodestar/config"; import {ACTIVE_PRESET, ForkSeq, isForkBlobs, ForkName} from "@lodestar/params"; @@ -136,8 +146,11 @@ const forkChoiceTest = const attestation = testcase.attestations.get(step.attestation); if (!attestation) throw Error(`No attestation ${step.attestation}`); const headState = chain.getHeadState(); - const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(attestation.data)); - chain.forkChoice.onAttestation(headState.epochCtx.getIndexedAttestation(attestation), attDataRootHex); + const attDataRootHex = toHexString(sszTypesFor(fork).AttestationData.hashTreeRoot(attestation.data)); + chain.forkChoice.onAttestation( + headState.epochCtx.getIndexedAttestation(ForkSeq[fork], attestation), + attDataRootHex + ); } // attester slashing step @@ -340,16 +353,16 @@ const forkChoiceTest = [BLOCK_FILE_NAME]: ssz[fork].SignedBeaconBlock, [BLOBS_FILE_NAME]: ssz.deneb.Blobs, [POW_BLOCK_FILE_NAME]: ssz.bellatrix.PowBlock, - [ATTESTATION_FILE_NAME]: ssz.phase0.Attestation, - [ATTESTER_SLASHING_FILE_NAME]: ssz.phase0.AttesterSlashing, + [ATTESTATION_FILE_NAME]: sszTypesFor(fork).Attestation, + [ATTESTER_SLASHING_FILE_NAME]: sszTypesFor(fork).AttesterSlashing, }, mapToTestCase: (t: Record) => { // t has input file name as key const blocks = new Map(); const blobs = new Map(); const powBlocks = new Map(); - const attestations = new Map(); - const attesterSlashings = new Map(); + const attestations = new Map(); + const attesterSlashings = new Map(); for (const key in t) { const blockMatch = key.match(BLOCK_FILE_NAME); if (blockMatch) { @@ -492,8 +505,8 @@ type ForkChoiceTestCase = { blocks: Map; blobs: Map; powBlocks: Map; - attestations: Map; - attesterSlashings: Map; + attestations: Map; + attesterSlashings: Map; }; function isTick(step: Step): step is OnTick { diff --git a/packages/beacon-node/test/spec/presets/genesis.test.ts b/packages/beacon-node/test/spec/presets/genesis.test.ts index f03f2595a56..773debe3bb1 100644 --- a/packages/beacon-node/test/spec/presets/genesis.test.ts +++ b/packages/beacon-node/test/spec/presets/genesis.test.ts @@ -60,9 +60,7 @@ const genesisInitialization: TestRunnerFn - ) + executionPayloadHeaderType.toViewDU(testcase["execution_payload_header"]) ); }, // eth1.yaml diff --git a/packages/beacon-node/test/spec/presets/operations.test.ts b/packages/beacon-node/test/spec/presets/operations.test.ts index 4c1c10e0cb6..7e2e9c1e9c5 100644 --- a/packages/beacon-node/test/spec/presets/operations.test.ts +++ b/packages/beacon-node/test/spec/presets/operations.test.ts @@ -4,11 +4,12 @@ import { CachedBeaconStateAllForks, CachedBeaconStateBellatrix, CachedBeaconStateCapella, + CachedBeaconStateElectra, ExecutionPayloadStatus, getBlockRootAtSlot, } from "@lodestar/state-transition"; import * as blockFns from "@lodestar/state-transition/block"; -import {ssz, phase0, altair, bellatrix, capella, sszTypesFor} from "@lodestar/types"; +import {ssz, phase0, altair, bellatrix, capella, electra, sszTypesFor} from "@lodestar/types"; import {InputType} from "@lodestar/spec-test-util"; import {ACTIVE_PRESET, ForkName} from "@lodestar/params"; @@ -65,7 +66,8 @@ const operationFns: Record> = sync_aggregate_random: sync_aggregate, voluntary_exit: (state, testCase: {voluntary_exit: phase0.SignedVoluntaryExit}) => { - blockFns.processVoluntaryExit(state, testCase.voluntary_exit); + const fork = state.config.getForkSeq(state.slot); + blockFns.processVoluntaryExit(fork, state, testCase.voluntary_exit); }, execution_payload: (state, testCase: {body: bellatrix.BeaconBlockBody; execution: {execution_valid: boolean}}) => { @@ -82,7 +84,22 @@ const operationFns: Record> = }, withdrawals: (state, testCase: {execution_payload: capella.ExecutionPayload}) => { - blockFns.processWithdrawals(state as CachedBeaconStateCapella, testCase.execution_payload); + const fork = state.config.getForkSeq(state.slot); + blockFns.processWithdrawals(fork, state as CachedBeaconStateCapella, testCase.execution_payload); + }, + + withdrawal_request: (state, testCase: {withdrawal_request: electra.WithdrawalRequest}) => { + const fork = state.config.getForkSeq(state.slot); + blockFns.processWithdrawalRequest(fork, state as CachedBeaconStateElectra, testCase.withdrawal_request); + }, + + deposit_request: (state, testCase: {deposit_request: electra.DepositRequest}) => { + const fork = state.config.getForkSeq(state.slot); + blockFns.processDepositRequest(fork, state as CachedBeaconStateElectra, testCase.deposit_request); + }, + + consolidation_request: (state, testCase: {consolidation_request: electra.ConsolidationRequest}) => { + blockFns.processConsolidationRequest(state as CachedBeaconStateElectra, testCase.consolidation_request); }, }; @@ -116,8 +133,8 @@ const operations: TestRunnerFn = (fork, sszTypes: { pre: ssz[fork].BeaconState, post: ssz[fork].BeaconState, - attestation: ssz.phase0.Attestation, - attester_slashing: ssz.phase0.AttesterSlashing, + attestation: sszTypesFor(fork).Attestation, + attester_slashing: sszTypesFor(fork).AttesterSlashing, block: ssz[fork].BeaconBlock, body: ssz[fork].BeaconBlockBody, deposit: ssz.phase0.Deposit, @@ -132,6 +149,10 @@ const operations: TestRunnerFn = (fork, : ssz.bellatrix.ExecutionPayload, // Capella address_change: ssz.capella.SignedBLSToExecutionChange, + // Electra + withdrawal_request: ssz.electra.WithdrawalRequest, + deposit_request: ssz.electra.DepositRequest, + consolidation_request: ssz.electra.ConsolidationRequest, }, shouldError: (testCase) => testCase.post === undefined, getExpected: (testCase) => testCase.post, diff --git a/packages/beacon-node/test/spec/presets/ssz_static.test.ts b/packages/beacon-node/test/spec/presets/ssz_static.test.ts index d81b9dee009..6e43d851ef6 100644 --- a/packages/beacon-node/test/spec/presets/ssz_static.test.ts +++ b/packages/beacon-node/test/spec/presets/ssz_static.test.ts @@ -1,6 +1,6 @@ import fs from "node:fs"; import path from "node:path"; -import {it, vi} from "vitest"; +import {expect, it, vi} from "vitest"; import {Type} from "@chainsafe/ssz"; import {ssz, sszTypesFor} from "@lodestar/types"; import {ACTIVE_PRESET, ForkName} from "@lodestar/params"; @@ -45,12 +45,20 @@ const sszStatic = /* eslint-disable @typescript-eslint/strict-boolean-expressions */ const sszType = (sszTypesFor(fork) as Types)[typeName] || + (ssz.electra as Types)[typeName] || + (ssz.deneb as Types)[typeName] || (ssz.capella as Types)[typeName] || (ssz.bellatrix as Types)[typeName] || (ssz.altair as Types)[typeName] || (ssz.phase0 as Types)[typeName]; + + it(`${fork} - ${typeName} type exists`, function () { + expect(sszType).toEqualWithMessage(expect.any(Type), `SSZ type ${typeName} for fork ${fork} is not defined`); + }); + if (!sszType) { - throw Error(`No type for ${typeName}`); + // Return instead of throwing an error to only skip ssz_static tests associated to missing type + return; } const sszTypeNoUint = replaceUintTypeWithUintBigintType(sszType); diff --git a/packages/beacon-node/test/spec/presets/transition.test.ts b/packages/beacon-node/test/spec/presets/transition.test.ts index d9925f29267..cae7c667b59 100644 --- a/packages/beacon-node/test/spec/presets/transition.test.ts +++ b/packages/beacon-node/test/spec/presets/transition.test.ts @@ -102,6 +102,14 @@ function getTransitionConfig(fork: ForkName, forkEpoch: number): Partial testId.startsWith(skippedPrefix))) { + if (opts?.skippedTestSuites?.some((skippedMatch) => testId.match(skippedMatch))) { displaySkipTest(testId); } else if (fork === undefined) { displayFailTest(testId, `Unknown fork ${forkStr}`); @@ -150,7 +157,11 @@ export function specTestIterator( // Generic testRunner else { const {testFunction, options} = testRunner.fn(fork, testHandler, testSuite); - + if (opts.skippedTests && options.shouldSkip === undefined) { + options.shouldSkip = (_testCase: any, name: string, _index: number): boolean => { + return opts?.skippedTests?.some((skippedMatch) => name.match(skippedMatch)) ?? false; + }; + } describeDirectorySpecTest(testId, testSuiteDirpath, testFunction, options); } } diff --git a/packages/beacon-node/test/unit/chain/lightclient/upgradeLightClientHeader.test.ts b/packages/beacon-node/test/unit/chain/lightclient/upgradeLightClientHeader.test.ts index a9a5edc9ec0..abb520bddaf 100644 --- a/packages/beacon-node/test/unit/chain/lightclient/upgradeLightClientHeader.test.ts +++ b/packages/beacon-node/test/unit/chain/lightclient/upgradeLightClientHeader.test.ts @@ -15,6 +15,7 @@ describe("UpgradeLightClientHeader", function () { BELLATRIX_FORK_EPOCH: 2, CAPELLA_FORK_EPOCH: 3, DENEB_FORK_EPOCH: 4, + ELECTRA_FORK_EPOCH: 5, }); const genesisValidatorsRoot = Buffer.alloc(32, 0xaa); @@ -27,6 +28,7 @@ describe("UpgradeLightClientHeader", function () { capella: ssz.capella.LightClientHeader.defaultValue(), bellatrix: ssz.altair.LightClientHeader.defaultValue(), deneb: ssz.deneb.LightClientHeader.defaultValue(), + electra: ssz.electra.LightClientHeader.defaultValue(), }; testSlots = { @@ -35,6 +37,7 @@ describe("UpgradeLightClientHeader", function () { bellatrix: 17, capella: 25, deneb: 33, + electra: 41, }; }); diff --git a/packages/beacon-node/test/unit/chain/opPools/aggregatedAttestationPool.test.ts b/packages/beacon-node/test/unit/chain/opPools/aggregatedAttestationPool.test.ts index 800984fa84b..f00a300bbe4 100644 --- a/packages/beacon-node/test/unit/chain/opPools/aggregatedAttestationPool.test.ts +++ b/packages/beacon-node/test/unit/chain/opPools/aggregatedAttestationPool.test.ts @@ -1,14 +1,23 @@ import {BitArray, fromHexString, toHexString} from "@chainsafe/ssz"; import {describe, it, expect, beforeEach, beforeAll, afterEach, vi} from "vitest"; -import {SecretKey, Signature, fastAggregateVerify} from "@chainsafe/blst"; +import {SecretKey, Signature, fastAggregateVerify, aggregateSignatures} from "@chainsafe/blst"; import {CachedBeaconStateAllForks, newFilledArray} from "@lodestar/state-transition"; -import {FAR_FUTURE_EPOCH, ForkName, MAX_EFFECTIVE_BALANCE, SLOTS_PER_EPOCH} from "@lodestar/params"; +import { + FAR_FUTURE_EPOCH, + ForkName, + MAX_COMMITTEES_PER_SLOT, + MAX_EFFECTIVE_BALANCE, + SLOTS_PER_EPOCH, +} from "@lodestar/params"; import {ssz, phase0} from "@lodestar/types"; import {CachedBeaconStateAltair} from "@lodestar/state-transition/src/types.js"; +import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; import {MockedForkChoice, getMockedForkChoice} from "../../../mocks/mockedBeaconChain.js"; import { + aggregateConsolidation, AggregatedAttestationPool, aggregateInto, + AttestationsConsolidation, getNotSeenValidatorsFn, MatchingDataAttestationGroup, } from "../../../../src/chain/opPools/aggregatedAttestationPool.js"; @@ -28,6 +37,9 @@ const validSignature = fromHexString( describe("AggregatedAttestationPool", function () { let pool: AggregatedAttestationPool; const fork = ForkName.altair; + const config = createChainForkConfig({ + ...defaultChainConfig, + }); const altairForkEpoch = 2020; const currentEpoch = altairForkEpoch + 10; const currentSlot = SLOTS_PER_EPOCH * currentEpoch; @@ -71,7 +83,7 @@ describe("AggregatedAttestationPool", function () { let forkchoiceStub: MockedForkChoice; beforeEach(() => { - pool = new AggregatedAttestationPool(); + pool = new AggregatedAttestationPool(config); altairState = originalState.clone(); forkchoiceStub = getMockedForkChoice(); }); @@ -80,11 +92,11 @@ describe("AggregatedAttestationPool", function () { vi.clearAllMocks(); }); - it("getParticipationFn", () => { + it("getNotSeenValidatorsFn", () => { // previousEpochParticipation and currentEpochParticipation is created inside generateCachedState // 0 and 1 are fully participated const notSeenValidatorFn = getNotSeenValidatorsFn(altairState); - const participation = notSeenValidatorFn(currentEpoch, committee); + const participation = notSeenValidatorFn(currentEpoch, currentSlot, committeeIndex); // seen attesting indices are 0, 1 => not seen are 2, 3 expect(participation).toEqual( // { @@ -279,6 +291,7 @@ describe("MatchingDataAttestationGroup.getAttestationsForBlock", () => { } } const attestationsForBlock = attestationGroup.getAttestationsForBlock( + ForkName.phase0, // notSeenValidatorIndices, notSeenAttestingIndices ); @@ -320,3 +333,75 @@ describe("MatchingDataAttestationGroup aggregateInto", function () { ); }); }); + +describe("aggregateConsolidation", function () { + const sk0 = SecretKey.fromBytes(Buffer.alloc(32, 1)); + const sk1 = SecretKey.fromBytes(Buffer.alloc(32, 2)); + const sk2 = SecretKey.fromBytes(Buffer.alloc(32, 3)); + const skArr = [sk0, sk1, sk2]; + const testCases: { + name: string; + committeeIndices: number[]; + aggregationBitsArr: Array[]; + expectedAggregationBits: Array; + expectedCommitteeBits: Array; + }[] = [ + // note that bit index starts from the right + { + name: "test case 0", + committeeIndices: [0, 1, 2], + aggregationBitsArr: [[0b111], [0b011], [0b111]], + expectedAggregationBits: [0b11011111, 0b1], + expectedCommitteeBits: [true, true, true, false], + }, + { + name: "test case 1", + committeeIndices: [2, 3, 1], + aggregationBitsArr: [[0b100], [0b010], [0b001]], + expectedAggregationBits: [0b10100001, 0b0], + expectedCommitteeBits: [false, true, true, true], + }, + ]; + for (const { + name, + committeeIndices, + aggregationBitsArr, + expectedAggregationBits, + expectedCommitteeBits, + } of testCases) { + it(name, () => { + const attData = ssz.phase0.AttestationData.defaultValue(); + const consolidation: AttestationsConsolidation = { + byCommittee: new Map(), + attData: attData, + totalNotSeenCount: 0, + score: 0, + }; + // to simplify, instead of signing the signingRoot, just sign the attData root + const sigArr = skArr.map((sk) => sk.sign(ssz.phase0.AttestationData.hashTreeRoot(attData))); + const attestationSeed = ssz.electra.Attestation.defaultValue(); + for (let i = 0; i < committeeIndices.length; i++) { + const committeeIndex = committeeIndices[i]; + const commiteeBits = BitArray.fromBoolArray( + Array.from({length: MAX_COMMITTEES_PER_SLOT}, (_, i) => i === committeeIndex) + ); + const aggAttestation = { + ...attestationSeed, + aggregationBits: new BitArray(new Uint8Array(aggregationBitsArr[i]), 3), + committeeBits: commiteeBits, + signature: sigArr[i].toBytes(), + }; + consolidation.byCommittee.set(committeeIndex, { + attestation: aggAttestation, + notSeenAttesterCount: aggregationBitsArr[i].filter((item) => item).length, + }); + } + + const finalAttestation = aggregateConsolidation(consolidation); + expect(finalAttestation.aggregationBits.uint8Array).toEqual(new Uint8Array(expectedAggregationBits)); + expect(finalAttestation.committeeBits.toBoolArray()).toEqual(expectedCommitteeBits); + expect(finalAttestation.data).toEqual(attData); + expect(finalAttestation.signature).toEqual(aggregateSignatures(sigArr).toBytes()); + }); + } +}); diff --git a/packages/beacon-node/test/unit/chain/opPools/attestationPool.test.ts b/packages/beacon-node/test/unit/chain/opPools/attestationPool.test.ts new file mode 100644 index 00000000000..68efd075158 --- /dev/null +++ b/packages/beacon-node/test/unit/chain/opPools/attestationPool.test.ts @@ -0,0 +1,120 @@ +import {fromHexString, toHexString} from "@chainsafe/ssz"; +import {describe, it, expect, beforeEach, vi} from "vitest"; +import {GENESIS_SLOT, SLOTS_PER_EPOCH} from "@lodestar/params"; +import {ssz} from "@lodestar/types"; +import {createChainForkConfig, defaultChainConfig} from "@lodestar/config"; +import {InsertOutcome} from "../../../../src/chain/opPools/types.js"; +import {AttestationPool} from "../../../../src/chain/opPools/attestationPool.js"; +import {getMockedClock} from "../../../mocks/clock.js"; + +/** Valid signature of random data to prevent BLS errors */ +export const validSignature = fromHexString( + "0xb2afb700f6c561ce5e1b4fedaec9d7c06b822d38c720cf588adfda748860a940adf51634b6788f298c552de40183b5a203b2bbe8b7dd147f0bb5bc97080a12efbb631c8888cb31a99cc4706eb3711865b8ea818c10126e4d818b542e9dbf9ae8" +); + +describe("AttestationPool", function () { + /* eslint-disable @typescript-eslint/naming-convention */ + const config = createChainForkConfig({ + ...defaultChainConfig, + ELECTRA_FORK_EPOCH: 5, + DENEB_FORK_EPOCH: 4, + CAPELLA_FORK_EPOCH: 3, + BELLATRIX_FORK_EPOCH: 2, + ALTAIR_FORK_EPOCH: 1, + }); + const clockStub = getMockedClock(); + vi.spyOn(clockStub, "secFromSlot").mockReturnValue(0); + + const cutOffSecFromSlot = (2 / 3) * config.SECONDS_PER_SLOT; + + // Mock attestations + const electraAttestationData = { + ...ssz.phase0.AttestationData.defaultValue(), + slot: config.ELECTRA_FORK_EPOCH * SLOTS_PER_EPOCH, + }; + const electraAttestation = { + ...ssz.electra.Attestation.defaultValue(), + data: electraAttestationData, + signature: validSignature, + }; + const phase0AttestationData = {...ssz.phase0.AttestationData.defaultValue(), slot: GENESIS_SLOT}; + const phase0Attestation = { + ...ssz.phase0.Attestation.defaultValue(), + data: phase0AttestationData, + signature: validSignature, + }; + + let pool: AttestationPool; + + beforeEach(() => { + pool = new AttestationPool(config, clockStub, cutOffSecFromSlot); + }); + + it("add correct electra attestation", () => { + const committeeIndex = 0; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(electraAttestation.data)); + const outcome = pool.add(committeeIndex, electraAttestation, attDataRootHex); + + expect(outcome).equal(InsertOutcome.NewData); + expect(pool.getAggregate(electraAttestationData.slot, committeeIndex, attDataRootHex)).toEqual(electraAttestation); + }); + + it("add correct phase0 attestation", () => { + const committeeIndex = null; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(phase0Attestation.data)); + const outcome = pool.add(committeeIndex, phase0Attestation, attDataRootHex); + + expect(outcome).equal(InsertOutcome.NewData); + expect(pool.getAggregate(phase0AttestationData.slot, committeeIndex, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, 10, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, 42, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, null, attDataRootHex)).toEqual(phase0Attestation); + }); + + it("add electra attestation without committee index", () => { + const committeeIndex = null; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(electraAttestation.data)); + + expect(() => pool.add(committeeIndex, electraAttestation, attDataRootHex)).toThrow(); + expect(pool.getAggregate(electraAttestationData.slot, committeeIndex, attDataRootHex)).toBeNull(); + }); + + it("add phase0 attestation with committee index", () => { + const committeeIndex = 0; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(phase0Attestation.data)); + const outcome = pool.add(committeeIndex, phase0Attestation, attDataRootHex); + + expect(outcome).equal(InsertOutcome.NewData); + expect(pool.getAggregate(phase0AttestationData.slot, committeeIndex, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, 123, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, 456, attDataRootHex)).toEqual(phase0Attestation); + expect(pool.getAggregate(phase0AttestationData.slot, null, attDataRootHex)).toEqual(phase0Attestation); + }); + + it("add electra attestation with phase0 slot", () => { + const electraAttestationDataWithPhase0Slot = {...ssz.phase0.AttestationData.defaultValue(), slot: GENESIS_SLOT}; + const attestation = { + ...ssz.electra.Attestation.defaultValue(), + data: electraAttestationDataWithPhase0Slot, + signature: validSignature, + }; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(electraAttestationDataWithPhase0Slot)); + + expect(() => pool.add(0, attestation, attDataRootHex)).toThrow(); + }); + + it("add phase0 attestation with electra slot", () => { + const phase0AttestationDataWithElectraSlot = { + ...ssz.phase0.AttestationData.defaultValue(), + slot: config.ELECTRA_FORK_EPOCH * SLOTS_PER_EPOCH, + }; + const attestation = { + ...ssz.phase0.Attestation.defaultValue(), + data: phase0AttestationDataWithElectraSlot, + signature: validSignature, + }; + const attDataRootHex = toHexString(ssz.phase0.AttestationData.hashTreeRoot(phase0AttestationDataWithElectraSlot)); + + expect(() => pool.add(0, attestation, attDataRootHex)).toThrow(); + }); +}); diff --git a/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts b/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts index 7d3f34ddac3..b4aac92dd9b 100644 --- a/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts +++ b/packages/beacon-node/test/unit/chain/stateCache/fifoBlockStateCache.test.ts @@ -1,8 +1,7 @@ import {describe, it, expect, beforeEach} from "vitest"; import {toHexString} from "@chainsafe/ssz"; -import {EpochShuffling} from "@lodestar/state-transition"; +import {EpochShuffling, CachedBeaconStateAllForks} from "@lodestar/state-transition"; import {SLOTS_PER_EPOCH} from "@lodestar/params"; -import {CachedBeaconStateAllForks} from "@lodestar/state-transition/src/types.js"; import {FIFOBlockStateCache} from "../../../../src/chain/stateCache/index.js"; import {generateCachedState} from "../../../utils/state.js"; diff --git a/packages/beacon-node/test/unit/chain/validation/attestation/validateAttestation.test.ts b/packages/beacon-node/test/unit/chain/validation/attestation/validateAttestation.test.ts index 56aab699f4f..45d293ffbb3 100644 --- a/packages/beacon-node/test/unit/chain/validation/attestation/validateAttestation.test.ts +++ b/packages/beacon-node/test/unit/chain/validation/attestation/validateAttestation.test.ts @@ -1,6 +1,6 @@ import {BitArray} from "@chainsafe/ssz"; -import {describe, it} from "vitest"; -import {SLOTS_PER_EPOCH} from "@lodestar/params"; +import {describe, expect, it} from "vitest"; +import {ForkName, SLOTS_PER_EPOCH} from "@lodestar/params"; import {ssz} from "@lodestar/types"; // eslint-disable-next-line import/no-relative-packages import {generateTestCachedBeaconStateOnlyValidators} from "../../../../../../state-transition/test/perf/util.js"; @@ -9,14 +9,17 @@ import {IBeaconChain} from "../../../../../src/chain/index.js"; import { ApiAttestation, GossipAttestation, + getSeenAttDataKeyFromGossipAttestation, + getSeenAttDataKeyFromSignedAggregateAndProof, validateApiAttestation, validateAttestation, } from "../../../../../src/chain/validation/index.js"; -import {getAttDataBase64FromAttestationSerialized} from "../../../../../src/util/sszBytes.js"; +import {getAttDataFromAttestationSerialized} from "../../../../../src/util/sszBytes.js"; import {memoOnce} from "../../../../utils/cache.js"; import {expectRejectedWithLodestarError} from "../../../../utils/errors.js"; import {AttestationValidDataOpts, getAttestationValidData} from "../../../../utils/validationData/attestation.js"; +// TODO: more tests for electra describe("validateAttestation", () => { const vc = 64; const stateSlot = 100; @@ -72,7 +75,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.BAD_TARGET_EPOCH @@ -91,7 +94,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.PAST_SLOT @@ -110,7 +113,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.FUTURE_SLOT @@ -135,7 +138,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.NOT_EXACTLY_ONE_AGGREGATION_BIT_SET @@ -155,7 +158,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.NOT_EXACTLY_ONE_AGGREGATION_BIT_SET @@ -179,7 +182,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.UNKNOWN_OR_PREFINALIZED_BEACON_BLOCK_ROOT @@ -199,7 +202,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.INVALID_TARGET_ROOT @@ -226,7 +229,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.WRONG_NUMBER_OF_AGGREGATION_BITS @@ -245,7 +248,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, invalidSubnet, AttestationErrorCode.INVALID_SUBNET_ID @@ -265,7 +268,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.ATTESTATION_ALREADY_KNOWN @@ -287,7 +290,7 @@ describe("validateAttestation", () => { attestation: null, serializedData, attSlot: attestation.data.slot, - attDataBase64: getAttDataBase64FromAttestationSerialized(serializedData), + attDataBase64: getAttDataFromAttestationSerialized(serializedData), }, subnet, AttestationErrorCode.INVALID_SIGNATURE @@ -314,3 +317,53 @@ describe("validateAttestation", () => { await expectRejectedWithLodestarError(validateAttestation(fork, chain, attestationOrBytes, subnet), errorCode); } }); + +describe("getSeenAttDataKey", () => { + const slot = 100; + const index = 0; + const blockRoot = Buffer.alloc(32, 1); + + it("phase0", () => { + const attestationData = ssz.phase0.AttestationData.defaultValue(); + attestationData.slot = slot; + attestationData.index = index; + attestationData.beaconBlockRoot = blockRoot; + const attestation = ssz.phase0.Attestation.defaultValue(); + attestation.data = attestationData; + const attDataBase64 = Buffer.from(ssz.phase0.AttestationData.serialize(attestationData)).toString("base64"); + const attestationBytes = ssz.phase0.Attestation.serialize(attestation); + const gossipAttestation = {attDataBase64, serializedData: attestationBytes, attSlot: slot} as GossipAttestation; + + const signedAggregateAndProof = ssz.phase0.SignedAggregateAndProof.defaultValue(); + signedAggregateAndProof.message.aggregate.data.slot = slot; + signedAggregateAndProof.message.aggregate.data.index = index; + signedAggregateAndProof.message.aggregate.data.beaconBlockRoot = blockRoot; + const aggregateAndProofBytes = ssz.phase0.SignedAggregateAndProof.serialize(signedAggregateAndProof); + + expect(getSeenAttDataKeyFromGossipAttestation(ForkName.phase0, gossipAttestation)).toEqual( + getSeenAttDataKeyFromSignedAggregateAndProof(ForkName.phase0, aggregateAndProofBytes) + ); + }); + + it("electra", () => { + const attestationData = ssz.phase0.AttestationData.defaultValue(); + attestationData.slot = slot; + attestationData.index = index; + attestationData.beaconBlockRoot = blockRoot; + const attestation = ssz.electra.Attestation.defaultValue(); + attestation.data = attestationData; + const attDataBase64 = Buffer.from(ssz.phase0.AttestationData.serialize(attestationData)).toString("base64"); + const attestationBytes = ssz.electra.Attestation.serialize(attestation); + const gossipAttestation = {attDataBase64, serializedData: attestationBytes, attSlot: slot} as GossipAttestation; + + const signedAggregateAndProof = ssz.electra.SignedAggregateAndProof.defaultValue(); + signedAggregateAndProof.message.aggregate.data.slot = slot; + signedAggregateAndProof.message.aggregate.data.index = index; + signedAggregateAndProof.message.aggregate.data.beaconBlockRoot = blockRoot; + const aggregateAndProofBytes = ssz.electra.SignedAggregateAndProof.serialize(signedAggregateAndProof); + + expect(getSeenAttDataKeyFromGossipAttestation(ForkName.electra, gossipAttestation)).toEqual( + getSeenAttDataKeyFromSignedAggregateAndProof(ForkName.electra, aggregateAndProofBytes) + ); + }); +}); diff --git a/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts b/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts index ce0d7fae1fa..316f75efc5c 100644 --- a/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts +++ b/packages/beacon-node/test/unit/eth1/utils/deposits.test.ts @@ -1,13 +1,15 @@ import {describe, it, expect} from "vitest"; import {phase0, ssz} from "@lodestar/types"; -import {MAX_DEPOSITS} from "@lodestar/params"; +import {MAX_DEPOSITS, SLOTS_PER_EPOCH} from "@lodestar/params"; import {verifyMerkleBranch} from "@lodestar/utils"; +import {createChainForkConfig} from "@lodestar/config"; import {filterBy} from "../../../utils/db.js"; import {Eth1ErrorCode} from "../../../../src/eth1/errors.js"; import {generateState} from "../../../utils/state.js"; import {expectRejectedWithLodestarError} from "../../../utils/errors.js"; import {getDeposits, getDepositsWithProofs, DepositGetter} from "../../../../src/eth1/utils/deposits.js"; import {DepositTree} from "../../../../src/db/repositories/depositDataRoot.js"; +import {createCachedBeaconStateTest} from "../../../utils/cachedBeaconState.js"; describe("eth1 / util / deposits", function () { describe("getDeposits", () => { @@ -18,6 +20,7 @@ describe("eth1 / util / deposits", function () { depositIndexes: number[]; expectedReturnedIndexes?: number[]; error?: Eth1ErrorCode; + postElectra?: boolean; }; const testCases: TestCase[] = [ @@ -70,18 +73,59 @@ describe("eth1 / util / deposits", function () { depositIndexes: [], expectedReturnedIndexes: [], }, + { + id: "No deposits to be included post Electra after deposit_requests_start_index", + depositCount: 2030, + eth1DepositIndex: 2025, + depositIndexes: Array.from({length: 2030}, (_, i) => i), + expectedReturnedIndexes: [], + postElectra: true, + }, + { + id: "Should return deposits post Electra before deposit_requests_start_index", + depositCount: 2022, + eth1DepositIndex: 2018, + depositIndexes: Array.from({length: 2022}, (_, i) => i), + expectedReturnedIndexes: [2018, 2019, 2020, 2021], + postElectra: true, + }, + { + id: "Should return deposits less than MAX_DEPOSITS post Electra before deposit_requests_start_index", + depositCount: 10 * MAX_DEPOSITS, + eth1DepositIndex: 0, + depositIndexes: Array.from({length: 10 * MAX_DEPOSITS}, (_, i) => i), + expectedReturnedIndexes: Array.from({length: MAX_DEPOSITS}, (_, i) => i), + postElectra: true, + }, ]; + /* eslint-disable @typescript-eslint/naming-convention */ + const postElectraConfig = createChainForkConfig({ + ALTAIR_FORK_EPOCH: 1, + BELLATRIX_FORK_EPOCH: 2, + CAPELLA_FORK_EPOCH: 3, + DENEB_FORK_EPOCH: 4, + ELECTRA_FORK_EPOCH: 5, + }); + const postElectraSlot = postElectraConfig.ELECTRA_FORK_EPOCH * SLOTS_PER_EPOCH + 1; + for (const testCase of testCases) { - const {id, depositIndexes, eth1DepositIndex, depositCount, expectedReturnedIndexes, error} = testCase; + const {id, depositIndexes, eth1DepositIndex, depositCount, expectedReturnedIndexes, error, postElectra} = + testCase; it(id, async function () { - const state = generateState({eth1DepositIndex}); + const state = postElectra + ? generateState({slot: postElectraSlot, eth1DepositIndex}, postElectraConfig) + : generateState({eth1DepositIndex}); + const cachedState = createCachedBeaconStateTest( + state, + postElectra ? postElectraConfig : createChainForkConfig({}) + ); const eth1Data = generateEth1Data(depositCount); const deposits = depositIndexes.map((index) => generateDepositEvent(index)); const depositsGetter: DepositGetter = async (indexRange) => filterBy(deposits, indexRange, (deposit) => deposit.index); - const resultPromise = getDeposits(state, eth1Data, depositsGetter); + const resultPromise = getDeposits(cachedState, eth1Data, depositsGetter); if (expectedReturnedIndexes) { const result = await resultPromise; diff --git a/packages/beacon-node/test/unit/executionEngine/http.test.ts b/packages/beacon-node/test/unit/executionEngine/http.test.ts index aa33c7dbbc4..5ac4fd4ca67 100644 --- a/packages/beacon-node/test/unit/executionEngine/http.test.ts +++ b/packages/beacon-node/test/unit/executionEngine/http.test.ts @@ -193,6 +193,9 @@ describe("ExecutionEngine / http", () => { amount: "0x7b", }, ], + depositRequests: null, // depositRequests is null pre-electra + withdrawalRequests: null, + consolidationRequests: null, }, null, // null returned for missing blocks { @@ -201,6 +204,9 @@ describe("ExecutionEngine / http", () => { "0xb084c10440f05f5a23a55d1d7ebcb1b3892935fb56f23cdc9a7f42c348eed174", ], withdrawals: null, // withdrawals is null pre-capella + depositRequests: null, // depositRequests is null pre-electra + withdrawalRequests: null, + consolidationRequests: null, }, ], }; @@ -219,7 +225,7 @@ describe("ExecutionEngine / http", () => { returnValue = response; - const res = await executionEngine.getPayloadBodiesByHash(reqBlockHashes); + const res = await executionEngine.getPayloadBodiesByHash(ForkName.bellatrix, reqBlockHashes); expect(reqJsonRpcPayload).toEqual(request); expect(res.map(serializeExecutionPayloadBody)).toEqual(response.result); @@ -248,6 +254,9 @@ describe("ExecutionEngine / http", () => { amount: "0x7b", }, ], + depositRequests: null, // depositRequests is null pre-electra + withdrawalRequests: null, + consolidationRequests: null, }, null, // null returned for missing blocks { @@ -256,6 +265,9 @@ describe("ExecutionEngine / http", () => { "0xb084c10440f05f5a23a55d1d7ebcb1b3892935fb56f23cdc9a7f42c348eed174", ], withdrawals: null, // withdrawals is null pre-capella + depositRequests: null, // depositRequests is null pre-electra + withdrawalRequests: null, + consolidationRequests: null, }, ], }; @@ -268,7 +280,7 @@ describe("ExecutionEngine / http", () => { returnValue = response; - const res = await executionEngine.getPayloadBodiesByRange(startBlockNumber, blockCount); + const res = await executionEngine.getPayloadBodiesByRange(ForkName.bellatrix, startBlockNumber, blockCount); expect(reqJsonRpcPayload).toEqual(request); expect(res.map(serializeExecutionPayloadBody)).toEqual(response.result); diff --git a/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts b/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts index 3fb9cb8e1c7..d1dc7ba57fa 100644 --- a/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts +++ b/packages/beacon-node/test/unit/network/beaconBlocksMaybeBlobsByRange.test.ts @@ -24,6 +24,7 @@ describe("beaconBlocksMaybeBlobsByRange", () => { BELLATRIX_FORK_EPOCH: 0, CAPELLA_FORK_EPOCH: 0, DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, }); const genesisValidatorsRoot = Buffer.alloc(32, 0xaa); const config = createBeaconConfig(chainConfig, genesisValidatorsRoot); @@ -101,7 +102,7 @@ describe("beaconBlocksMaybeBlobsByRange", () => { const expectedResponse = blocksWithBlobs.map(([block, blobSidecars]) => { const blobs = blobSidecars !== undefined ? blobSidecars : []; return getBlockInput.availableData(config, block, BlockSource.byRange, null, { - fork: ForkName.deneb, + fork: ForkName.electra, blobs, blobsSource: BlobsSource.byRange, blobsBytes: blobs.map(() => null), diff --git a/packages/beacon-node/test/unit/network/fork.test.ts b/packages/beacon-node/test/unit/network/fork.test.ts index be748d2e818..bbe1c0870d3 100644 --- a/packages/beacon-node/test/unit/network/fork.test.ts +++ b/packages/beacon-node/test/unit/network/fork.test.ts @@ -9,12 +9,14 @@ function getForkConfig({ bellatrix, capella, deneb, + electra, }: { phase0: number; altair: number; bellatrix: number; capella: number; deneb: number; + electra: number; }): BeaconConfig { const forks: Record = { phase0: { @@ -57,6 +59,14 @@ function getForkConfig({ prevVersion: Buffer.from([0, 0, 0, 3]), prevForkName: ForkName.capella, }, + electra: { + name: ForkName.electra, + seq: ForkSeq.electra, + epoch: electra, + version: Buffer.from([0, 0, 0, 5]), + prevVersion: Buffer.from([0, 0, 0, 4]), + prevForkName: ForkName.deneb, + }, }; const forksAscendingEpochOrder = Object.values(forks); const forksDescendingEpochOrder = Object.values(forks).reverse(); @@ -133,9 +143,10 @@ const testScenarios = [ for (const testScenario of testScenarios) { const {phase0, altair, bellatrix, capella, testCases} = testScenario; const deneb = Infinity; + const electra = Infinity; describe(`network / fork: phase0: ${phase0}, altair: ${altair}, bellatrix: ${bellatrix} capella: ${capella}`, () => { - const forkConfig = getForkConfig({phase0, altair, bellatrix, capella, deneb}); + const forkConfig = getForkConfig({phase0, altair, bellatrix, capella, deneb, electra}); const forks = forkConfig.forks; for (const testCase of testCases) { const {epoch, currentFork, nextFork, activeForks} = testCase; diff --git a/packages/beacon-node/test/unit/util/sszBytes.test.ts b/packages/beacon-node/test/unit/util/sszBytes.test.ts index 4285f4ca88b..612eea5a438 100644 --- a/packages/beacon-node/test/unit/util/sszBytes.test.ts +++ b/packages/beacon-node/test/unit/util/sszBytes.test.ts @@ -1,10 +1,12 @@ import {describe, it, expect} from "vitest"; -import {deneb, Epoch, phase0, RootHex, Slot, ssz} from "@lodestar/types"; +import {BitArray} from "@chainsafe/ssz"; +import {deneb, electra, Epoch, isElectraAttestation, phase0, RootHex, Slot, ssz} from "@lodestar/types"; import {fromHex, toHex} from "@lodestar/utils"; +import {ForkName, MAX_COMMITTEES_PER_SLOT} from "@lodestar/params"; import { - getAttDataBase64FromAttestationSerialized, - getAttDataBase64FromSignedAggregateAndProofSerialized, - getAggregationBitsFromAttestationSerialized, + getAttDataFromAttestationSerialized, + getAttDataFromSignedAggregateAndProofPhase0, + getAggregationBitsFromAttestationSerialized as getAggregationBitsFromAttestationSerialized, getBlockRootFromAttestationSerialized, getBlockRootFromSignedAggregateAndProofSerialized, getSlotFromAttestationSerialized, @@ -12,10 +14,13 @@ import { getSignatureFromAttestationSerialized, getSlotFromSignedBeaconBlockSerialized, getSlotFromBlobSidecarSerialized, + getCommitteeBitsFromAttestationSerialized, + getCommitteeBitsFromSignedAggregateAndProofElectra, + getAttDataFromSignedAggregateAndProofElectra, } from "../../../src/util/sszBytes.js"; describe("attestation SSZ serialized picking", () => { - const testCases: phase0.Attestation[] = [ + const testCases: (phase0.Attestation | electra.Attestation)[] = [ ssz.phase0.Attestation.defaultValue(), attestationFromValues( 4_000_000, @@ -23,21 +28,45 @@ describe("attestation SSZ serialized picking", () => { 200_00, "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeffffffffffffffffffffffffffffffff" ), + ssz.electra.Attestation.defaultValue(), + { + ...attestationFromValues( + 4_000_000, + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + 200_00, + "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeffffffffffffffffffffffffffffffff" + ), + committeeBits: BitArray.fromSingleBit(MAX_COMMITTEES_PER_SLOT, 3), + }, ]; for (const [i, attestation] of testCases.entries()) { it(`attestation ${i}`, () => { - const bytes = ssz.phase0.Attestation.serialize(attestation); + const isElectra = isElectraAttestation(attestation); + const bytes = isElectra + ? ssz.electra.Attestation.serialize(attestation) + : ssz.phase0.Attestation.serialize(attestation); expect(getSlotFromAttestationSerialized(bytes)).toBe(attestation.data.slot); expect(getBlockRootFromAttestationSerialized(bytes)).toBe(toHex(attestation.data.beaconBlockRoot)); - expect(getAggregationBitsFromAttestationSerialized(bytes)?.toBoolArray()).toEqual( - attestation.aggregationBits.toBoolArray() - ); - expect(getSignatureFromAttestationSerialized(bytes)).toEqual(attestation.signature); + + if (isElectra) { + expect(getAggregationBitsFromAttestationSerialized(ForkName.electra, bytes)?.toBoolArray()).toEqual( + attestation.aggregationBits.toBoolArray() + ); + expect(getCommitteeBitsFromAttestationSerialized(bytes)).toEqual( + Buffer.from(attestation.committeeBits.uint8Array).toString("base64") + ); + expect(getSignatureFromAttestationSerialized(bytes)).toEqual(attestation.signature); + } else { + expect(getAggregationBitsFromAttestationSerialized(ForkName.phase0, bytes)?.toBoolArray()).toEqual( + attestation.aggregationBits.toBoolArray() + ); + expect(getSignatureFromAttestationSerialized(bytes)).toEqual(attestation.signature); + } const attDataBase64 = ssz.phase0.AttestationData.serialize(attestation.data); - expect(getAttDataBase64FromAttestationSerialized(bytes)).toBe(Buffer.from(attDataBase64).toString("base64")); + expect(getAttDataFromAttestationSerialized(bytes)).toBe(Buffer.from(attDataBase64).toString("base64")); }); } @@ -55,17 +84,18 @@ describe("attestation SSZ serialized picking", () => { } }); - it("getAttDataBase64FromAttestationSerialized - invalid data", () => { + it("getAttDataFromAttestationSerialized - invalid data", () => { const invalidAttDataBase64DataSizes = [0, 4, 100, 128, 131]; for (const size of invalidAttDataBase64DataSizes) { - expect(getAttDataBase64FromAttestationSerialized(Buffer.alloc(size))).toBeNull(); + expect(getAttDataFromAttestationSerialized(Buffer.alloc(size))).toBeNull(); } }); - it("getAggregateionBitsFromAttestationSerialized - invalid data", () => { + it("getAggregationBitsFromAttestationSerialized - invalid data", () => { const invalidAggregationBitsDataSizes = [0, 4, 100, 128, 227]; for (const size of invalidAggregationBitsDataSizes) { - expect(getAggregationBitsFromAttestationSerialized(Buffer.alloc(size))).toBeNull(); + expect(getAggregationBitsFromAttestationSerialized(ForkName.phase0, Buffer.alloc(size))).toBeNull(); + expect(getAggregationBitsFromAttestationSerialized(ForkName.electra, Buffer.alloc(size))).toBeNull(); } }); @@ -73,14 +103,15 @@ describe("attestation SSZ serialized picking", () => { const invalidSignatureDataSizes = [0, 4, 100, 128, 227]; for (const size of invalidSignatureDataSizes) { expect(getSignatureFromAttestationSerialized(Buffer.alloc(size))).toBeNull(); + expect(getSignatureFromAttestationSerialized(Buffer.alloc(size))).toBeNull(); } }); }); -describe("aggregateAndProof SSZ serialized picking", () => { +describe("phase0 SignedAggregateAndProof SSZ serialized picking", () => { const testCases: phase0.SignedAggregateAndProof[] = [ ssz.phase0.SignedAggregateAndProof.defaultValue(), - signedAggregateAndProofFromValues( + phase0SignedAggregateAndProofFromValues( 4_000_000, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", 200_00, @@ -100,8 +131,62 @@ describe("aggregateAndProof SSZ serialized picking", () => { ); const attDataBase64 = ssz.phase0.AttestationData.serialize(signedAggregateAndProof.message.aggregate.data); - expect(getAttDataBase64FromSignedAggregateAndProofSerialized(bytes)).toBe( - Buffer.from(attDataBase64).toString("base64") + expect(getAttDataFromSignedAggregateAndProofPhase0(bytes)).toBe(Buffer.from(attDataBase64).toString("base64")); + }); + } + + it("getSlotFromSignedAggregateAndProofSerialized - invalid data", () => { + const invalidSlotDataSizes = [0, 4, 11]; + for (const size of invalidSlotDataSizes) { + expect(getSlotFromSignedAggregateAndProofSerialized(Buffer.alloc(size))).toBeNull(); + } + }); + + it("getBlockRootFromSignedAggregateAndProofSerialized - invalid data", () => { + const invalidBlockRootDataSizes = [0, 4, 20, 227]; + for (const size of invalidBlockRootDataSizes) { + expect(getBlockRootFromSignedAggregateAndProofSerialized(Buffer.alloc(size))).toBeNull(); + } + }); + + it("getAttDataBase64FromSignedAggregateAndProofSerialized - invalid data", () => { + const invalidAttDataBase64DataSizes = [0, 4, 100, 128, 339]; + for (const size of invalidAttDataBase64DataSizes) { + expect(getAttDataFromSignedAggregateAndProofPhase0(Buffer.alloc(size))).toBeNull(); + } + }); +}); + +describe("electra SignedAggregateAndProof SSZ serialized picking", () => { + const testCases: electra.SignedAggregateAndProof[] = [ + ssz.electra.SignedAggregateAndProof.defaultValue(), + electraSignedAggregateAndProofFromValues( + 4_000_000, + "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", + 200_00, + "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeffffffffffffffffffffffffffffffff" + ), + ]; + + for (const [i, signedAggregateAndProof] of testCases.entries()) { + it(`signedAggregateAndProof ${i}`, () => { + const bytes = ssz.electra.SignedAggregateAndProof.serialize(signedAggregateAndProof); + + expect(getSlotFromSignedAggregateAndProofSerialized(bytes)).toBe( + signedAggregateAndProof.message.aggregate.data.slot + ); + expect(getBlockRootFromSignedAggregateAndProofSerialized(bytes)).toBe( + toHex(signedAggregateAndProof.message.aggregate.data.beaconBlockRoot) + ); + + const attDataBase64 = ssz.phase0.AttestationData.serialize(signedAggregateAndProof.message.aggregate.data); + const committeeBits = ssz.electra.CommitteeBits.serialize( + signedAggregateAndProof.message.aggregate.committeeBits + ); + + expect(getAttDataFromSignedAggregateAndProofElectra(bytes)).toBe(Buffer.from(attDataBase64).toString("base64")); + expect(getCommitteeBitsFromSignedAggregateAndProofElectra(bytes)).toBe( + Buffer.from(committeeBits).toString("base64") ); }); } @@ -123,7 +208,7 @@ describe("aggregateAndProof SSZ serialized picking", () => { it("getAttDataBase64FromSignedAggregateAndProofSerialized - invalid data", () => { const invalidAttDataBase64DataSizes = [0, 4, 100, 128, 339]; for (const size of invalidAttDataBase64DataSizes) { - expect(getAttDataBase64FromSignedAggregateAndProofSerialized(Buffer.alloc(size))).toBeNull(); + expect(getAttDataFromSignedAggregateAndProofPhase0(Buffer.alloc(size))).toBeNull(); } }); it("getSlotFromSignedAggregateAndProofSerialized - invalid data - large slots", () => { @@ -187,7 +272,7 @@ function attestationFromValues( return attestation; } -function signedAggregateAndProofFromValues( +function phase0SignedAggregateAndProofFromValues( slot: Slot, blockRoot: RootHex, targetEpoch: Epoch, @@ -201,6 +286,21 @@ function signedAggregateAndProofFromValues( return signedAggregateAndProof; } +function electraSignedAggregateAndProofFromValues( + slot: Slot, + blockRoot: RootHex, + targetEpoch: Epoch, + targetRoot: RootHex +): electra.SignedAggregateAndProof { + const signedAggregateAndProof = ssz.electra.SignedAggregateAndProof.defaultValue(); + signedAggregateAndProof.message.aggregate.data.slot = slot; + signedAggregateAndProof.message.aggregate.data.beaconBlockRoot = fromHex(blockRoot); + signedAggregateAndProof.message.aggregate.data.target.epoch = targetEpoch; + signedAggregateAndProof.message.aggregate.data.target.root = fromHex(targetRoot); + signedAggregateAndProof.message.aggregate.committeeBits = BitArray.fromSingleBit(MAX_COMMITTEES_PER_SLOT, 1); + return signedAggregateAndProof; +} + function signedBeaconBlockFromValues(slot: Slot): phase0.SignedBeaconBlock { const signedBeaconBlock = ssz.phase0.SignedBeaconBlock.defaultValue(); signedBeaconBlock.message.slot = slot; diff --git a/packages/beacon-node/test/utils/config.ts b/packages/beacon-node/test/utils/config.ts index 54c058d3072..2aad1c14c03 100644 --- a/packages/beacon-node/test/utils/config.ts +++ b/packages/beacon-node/test/utils/config.ts @@ -31,5 +31,13 @@ export function getConfig(fork: ForkName, forkEpoch = 0): ChainForkConfig { CAPELLA_FORK_EPOCH: 0, DENEB_FORK_EPOCH: forkEpoch, }); + case ForkName.electra: + return createChainForkConfig({ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: forkEpoch, + }); } } diff --git a/packages/beacon-node/test/utils/node/validator.ts b/packages/beacon-node/test/utils/node/validator.ts index 4ec60dcc8b4..285fa13fd01 100644 --- a/packages/beacon-node/test/utils/node/validator.ts +++ b/packages/beacon-node/test/utils/node/validator.ts @@ -97,7 +97,7 @@ export function getApiFromServerHandlers(api: BeaconApiMethods): ApiClient { return async (args: unknown) => { try { const apiResponse = new ApiResponse({} as any, null, new Response(null, {status: HttpStatusCode.OK})); - const result = await api(args, {}); + const result = await api.call(apiModule, args, {}); apiResponse.value = () => result.data; apiResponse.meta = () => result.meta; return apiResponse; diff --git a/packages/beacon-node/test/utils/state.ts b/packages/beacon-node/test/utils/state.ts index 1e9f614e809..bb55adca1eb 100644 --- a/packages/beacon-node/test/utils/state.ts +++ b/packages/beacon-node/test/utils/state.ts @@ -7,8 +7,10 @@ import { PubkeyIndexMap, CachedBeaconStateBellatrix, BeaconStateBellatrix, + CachedBeaconStateElectra, + BeaconStateElectra, } from "@lodestar/state-transition"; -import {BeaconState, altair, bellatrix, ssz} from "@lodestar/types"; +import {BeaconState, altair, bellatrix, electra, ssz} from "@lodestar/types"; import {createBeaconConfig, ChainForkConfig} from "@lodestar/config"; import {FAR_FUTURE_EPOCH, ForkName, ForkSeq, MAX_EFFECTIVE_BALANCE, SYNC_COMMITTEE_SIZE} from "@lodestar/params"; @@ -64,6 +66,7 @@ export function generateState( : generateValidators(numValidators, validatorOpts)); state.genesisTime = Math.floor(Date.now() / 1000); + state.slot = stateSlot; state.fork.previousVersion = config.GENESIS_FORK_VERSION; state.fork.currentVersion = config.GENESIS_FORK_VERSION; state.latestBlockHeader.bodyRoot = ssz.phase0.BeaconBlockBody.hashTreeRoot(ssz.phase0.BeaconBlockBody.defaultValue()); @@ -92,6 +95,12 @@ export function generateState( }; } + if (forkSeq >= ForkSeq.electra) { + const stateElectra = state as electra.BeaconState; + stateElectra.depositRequestsStartIndex = 2023n; + stateElectra.latestExecutionPayloadHeader = ssz.electra.ExecutionPayloadHeader.defaultValue(); + } + return config.getForkTypes(stateSlot).BeaconState.toViewDU(state); } @@ -137,6 +146,18 @@ export function generateCachedBellatrixState(opts?: TestBeaconState): CachedBeac }); } +/** + * This generates state with default pubkey + */ +export function generateCachedElectraState(opts?: TestBeaconState): CachedBeaconStateElectra { + const config = getConfig(ForkName.electra); + const state = generateState(opts, config); + return createCachedBeaconState(state as BeaconStateElectra, { + config: createBeaconConfig(config, state.genesisValidatorsRoot), + pubkey2index: new PubkeyIndexMap(), + index2pubkey: [], + }); +} export const zeroProtoBlock: ProtoBlock = { slot: 0, blockRoot: ZERO_HASH_HEX, diff --git a/packages/cli/package.json b/packages/cli/package.json index b14d6d5ccaf..bf8621344dc 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -57,7 +57,7 @@ "@chainsafe/discv5": "^9.0.0", "@chainsafe/enr": "^3.0.0", "@chainsafe/persistent-merkle-tree": "^0.8.0", - "@chainsafe/ssz": "^0.17.0", + "@chainsafe/ssz": "^0.17.1", "@chainsafe/threads": "^1.11.1", "@libp2p/crypto": "^4.1.0", "@libp2p/peer-id": "^4.1.0", diff --git a/packages/config/package.json b/packages/config/package.json index e306920aea4..45c462e1908 100644 --- a/packages/config/package.json +++ b/packages/config/package.json @@ -64,7 +64,7 @@ "blockchain" ], "dependencies": { - "@chainsafe/ssz": "^0.17.0", + "@chainsafe/ssz": "^0.17.1", "@lodestar/params": "^1.21.0", "@lodestar/types": "^1.21.0" } diff --git a/packages/config/src/chainConfig/configs/mainnet.ts b/packages/config/src/chainConfig/configs/mainnet.ts index 883688ca821..741ddc99f8c 100644 --- a/packages/config/src/chainConfig/configs/mainnet.ts +++ b/packages/config/src/chainConfig/configs/mainnet.ts @@ -49,6 +49,10 @@ export const chainConfig: ChainConfig = { DENEB_FORK_VERSION: b("0x04000000"), DENEB_FORK_EPOCH: 269568, // March 13, 2024, 01:55:35pm UTC + // ELECTRA + ELECTRA_FORK_VERSION: b("0x05000000"), + ELECTRA_FORK_EPOCH: Infinity, + // Time parameters // --------------------------------------------------------------- // 12 seconds @@ -98,4 +102,10 @@ export const chainConfig: ChainConfig = { // Deneb // `2**12` (= 4096 epochs, ~18 days) MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096, + + // Electra + // 2**8 * 10**9 (= 256,000,000,000) + MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT: 256000000000, + // 2*7 * 10**9 (= 128,000,000,000) + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: 128000000000, }; diff --git a/packages/config/src/chainConfig/configs/minimal.ts b/packages/config/src/chainConfig/configs/minimal.ts index 23cd14e763e..26f49cc3e47 100644 --- a/packages/config/src/chainConfig/configs/minimal.ts +++ b/packages/config/src/chainConfig/configs/minimal.ts @@ -45,6 +45,9 @@ export const chainConfig: ChainConfig = { // Deneb DENEB_FORK_VERSION: b("0x04000001"), DENEB_FORK_EPOCH: Infinity, + // ELECTRA + ELECTRA_FORK_VERSION: b("0x05000001"), + ELECTRA_FORK_EPOCH: Infinity, // Time parameters // --------------------------------------------------------------- @@ -96,4 +99,10 @@ export const chainConfig: ChainConfig = { // Deneb // `2**12` (= 4096 epochs, ~18 days) MIN_EPOCHS_FOR_BLOB_SIDECARS_REQUESTS: 4096, + + // Electra + // 2**7 * 10**9 (= 128,000,000,000) + MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT: 128000000000, + // 2**6 * 10**9 (= 64,000,000,000) + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: 64000000000, }; diff --git a/packages/config/src/chainConfig/types.ts b/packages/config/src/chainConfig/types.ts index 45f05bfaa72..05fff02f2ea 100644 --- a/packages/config/src/chainConfig/types.ts +++ b/packages/config/src/chainConfig/types.ts @@ -40,6 +40,9 @@ export type ChainConfig = { // DENEB DENEB_FORK_VERSION: Uint8Array; DENEB_FORK_EPOCH: number; + // ELECTRA + ELECTRA_FORK_VERSION: Uint8Array; + ELECTRA_FORK_EPOCH: number; // Time parameters SECONDS_PER_SLOT: number; @@ -55,6 +58,8 @@ export type ChainConfig = { MIN_PER_EPOCH_CHURN_LIMIT: number; MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: number; CHURN_LIMIT_QUOTIENT: number; + MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT: number; + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: number; // Fork choice PROPOSER_SCORE_BOOST: number; @@ -99,6 +104,9 @@ export const chainConfigTypes: SpecTypes = { // DENEB DENEB_FORK_VERSION: "bytes", DENEB_FORK_EPOCH: "number", + // ELECTRA + ELECTRA_FORK_VERSION: "bytes", + ELECTRA_FORK_EPOCH: "number", // Time parameters SECONDS_PER_SLOT: "number", @@ -114,6 +122,8 @@ export const chainConfigTypes: SpecTypes = { MIN_PER_EPOCH_CHURN_LIMIT: "number", MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: "number", CHURN_LIMIT_QUOTIENT: "number", + MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT: "number", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "number", // Fork choice PROPOSER_SCORE_BOOST: "number", diff --git a/packages/config/src/forkConfig/index.ts b/packages/config/src/forkConfig/index.ts index 16d8952548b..513cd7559ee 100644 --- a/packages/config/src/forkConfig/index.ts +++ b/packages/config/src/forkConfig/index.ts @@ -11,7 +11,7 @@ import { ForkLightClient, ForkBlobs, } from "@lodestar/params"; -import {Slot, Version, SSZTypesFor, sszTypesFor} from "@lodestar/types"; +import {Slot, Version, SSZTypesFor, sszTypesFor, Epoch} from "@lodestar/types"; import {ChainConfig} from "../chainConfig/index.js"; import {ForkConfig, ForkInfo} from "./types.js"; @@ -59,10 +59,18 @@ export function createForkConfig(config: ChainConfig): ForkConfig { prevVersion: config.CAPELLA_FORK_VERSION, prevForkName: ForkName.capella, }; + const electra: ForkInfo = { + name: ForkName.electra, + seq: ForkSeq.electra, + epoch: config.ELECTRA_FORK_EPOCH, + version: config.ELECTRA_FORK_VERSION, + prevVersion: config.DENEB_FORK_VERSION, + prevForkName: ForkName.deneb, + }; /** Forks in order order of occurence, `phase0` first */ // Note: Downstream code relies on proper ordering. - const forks = {phase0, altair, bellatrix, capella, deneb}; + const forks = {phase0, altair, bellatrix, capella, deneb, electra}; // Prevents allocating an array on every getForkInfo() call const forksAscendingEpochOrder = Object.values(forks); @@ -76,6 +84,9 @@ export function createForkConfig(config: ChainConfig): ForkConfig { // Fork convenience methods getForkInfo(slot: Slot): ForkInfo { const epoch = Math.floor(Math.max(slot, 0) / SLOTS_PER_EPOCH); + return this.getForkInfoAtEpoch(epoch); + }, + getForkInfoAtEpoch(epoch: Epoch): ForkInfo { // NOTE: forks must be sorted by descending epoch, latest fork first for (const fork of forksDescendingEpochOrder) { if (epoch >= fork.epoch) return fork; @@ -88,6 +99,9 @@ export function createForkConfig(config: ChainConfig): ForkConfig { getForkSeq(slot: Slot): ForkSeq { return this.getForkInfo(slot).seq; }, + getForkSeqAtEpoch(epoch: Epoch): ForkSeq { + return this.getForkInfoAtEpoch(epoch).seq; + }, getForkVersion(slot: Slot): Version { return this.getForkInfo(slot).version; }, diff --git a/packages/config/src/forkConfig/types.ts b/packages/config/src/forkConfig/types.ts index 2905e6f03c3..ebb2899a2a2 100644 --- a/packages/config/src/forkConfig/types.ts +++ b/packages/config/src/forkConfig/types.ts @@ -21,11 +21,14 @@ export type ForkConfig = { /** Get the hard-fork info for the active fork at `slot` */ getForkInfo(slot: Slot): ForkInfo; - + /** Get the hard-fork info for the active fork at `epoch` */ + getForkInfoAtEpoch(epoch: Epoch): ForkInfo; /** Get the hard-fork name at a given slot */ getForkName(slot: Slot): ForkName; /** Get the hard-fork sequence number at a given slot */ getForkSeq(slot: Slot): ForkSeq; + /** Get the hard-fork sequence number at a given epoch */ + getForkSeqAtEpoch(epoch: Epoch): ForkSeq; /** Get the hard-fork version at a given slot */ getForkVersion(slot: Slot): Version; /** Get SSZ types by hard-fork */ diff --git a/packages/db/package.json b/packages/db/package.json index a68e4e6f2a8..1a31603b679 100644 --- a/packages/db/package.json +++ b/packages/db/package.json @@ -35,7 +35,7 @@ "check-readme": "typescript-docs-verifier" }, "dependencies": { - "@chainsafe/ssz": "^0.17.0", + "@chainsafe/ssz": "^0.17.1", "@lodestar/config": "^1.21.0", "@lodestar/utils": "^1.21.0", "classic-level": "^1.4.1", diff --git a/packages/flare/src/cmds/selfSlashAttester.ts b/packages/flare/src/cmds/selfSlashAttester.ts index 8b43e6a92cb..e29a956a930 100644 --- a/packages/flare/src/cmds/selfSlashAttester.ts +++ b/packages/flare/src/cmds/selfSlashAttester.ts @@ -1,6 +1,6 @@ import {SecretKey, aggregateSignatures} from "@chainsafe/blst"; import {getClient} from "@lodestar/api"; -import {phase0, ssz} from "@lodestar/types"; +import {AttesterSlashing, phase0, ssz} from "@lodestar/types"; import {config as chainConfig} from "@lodestar/config/default"; import {createBeaconConfig, BeaconConfig} from "@lodestar/config"; import {DOMAIN_BEACON_ATTESTER, MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; @@ -117,7 +117,7 @@ export async function selfSlashAttesterHandler(args: SelfSlashArgs): Promise 32)); + const balances = new Uint16Array(Array.from({length: opts.initialValidatorCount}, () => 32)); const fcStore: IForkChoiceStore = { currentSlot: genesisSlot, diff --git a/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts b/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts index fcb7376cffa..40988a0e4a7 100644 --- a/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts +++ b/packages/fork-choice/test/unit/forkChoice/forkChoice.test.ts @@ -52,16 +52,16 @@ describe("Forkchoice", function () { currentSlot: genesisSlot + 1, justified: { checkpoint: {epoch: genesisEpoch, root: fromHexString(finalizedRoot), rootHex: finalizedRoot}, - balances: new Uint8Array([32]), + balances: new Uint16Array([32]), totalBalance: 32, }, unrealizedJustified: { checkpoint: {epoch: genesisEpoch, root: fromHexString(finalizedRoot), rootHex: finalizedRoot}, - balances: new Uint8Array([32]), + balances: new Uint16Array([32]), }, finalizedCheckpoint: {epoch: genesisEpoch, root: fromHexString(finalizedRoot), rootHex: finalizedRoot}, unrealizedFinalizedCheckpoint: {epoch: genesisEpoch, root: fromHexString(finalizedRoot), rootHex: finalizedRoot}, - justifiedBalancesGetter: () => new Uint8Array([32]), + justifiedBalancesGetter: () => new Uint16Array([32]), equivocatingIndices: new Set(), }; diff --git a/packages/fork-choice/test/unit/forkChoice/getProposerHead.test.ts b/packages/fork-choice/test/unit/forkChoice/getProposerHead.test.ts index cc14b5b57b9..f603a4069b8 100644 --- a/packages/fork-choice/test/unit/forkChoice/getProposerHead.test.ts +++ b/packages/fork-choice/test/unit/forkChoice/getProposerHead.test.ts @@ -102,12 +102,12 @@ describe("Forkchoice / GetProposerHead", function () { currentSlot: genesisSlot + 1, justified: { checkpoint: {epoch: genesisEpoch, root: fromHexString(genesisBlock.blockRoot), rootHex: genesisBlock.blockRoot}, - balances: new Uint8Array(Array(32).fill(150)), + balances: new Uint16Array(Array(32).fill(150)), totalBalance: 32 * 150, }, unrealizedJustified: { checkpoint: {epoch: genesisEpoch, root: fromHexString(genesisBlock.blockRoot), rootHex: genesisBlock.blockRoot}, - balances: new Uint8Array(Array(32).fill(150)), + balances: new Uint16Array(Array(32).fill(150)), }, finalizedCheckpoint: { epoch: genesisEpoch, @@ -119,7 +119,7 @@ describe("Forkchoice / GetProposerHead", function () { root: fromHexString(genesisBlock.blockRoot), rootHex: genesisBlock.blockRoot, }, - justifiedBalancesGetter: () => new Uint8Array(Array(32).fill(150)), + justifiedBalancesGetter: () => new Uint16Array(Array(32).fill(150)), equivocatingIndices: new Set(), }; diff --git a/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts b/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts index fde551d43cd..4428807bd13 100644 --- a/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts +++ b/packages/fork-choice/test/unit/protoArray/computeDeltas.test.ts @@ -253,7 +253,7 @@ describe("computeDeltas", () => { nextEpoch: 0, })); - const balances = new Uint8Array([firstBalance, secondBalance]); + const balances = new Uint16Array([firstBalance, secondBalance]); // 1st validator is part of an attester slashing const equivocatingIndices = new Set([0]); let deltas = computeDeltas(indices.size, votes, balances, balances, equivocatingIndices); diff --git a/packages/light-client/package.json b/packages/light-client/package.json index 0bc1a5529c8..1188732c887 100644 --- a/packages/light-client/package.json +++ b/packages/light-client/package.json @@ -76,7 +76,7 @@ "@chainsafe/bls": "7.1.3", "@chainsafe/blst": "^0.2.0", "@chainsafe/persistent-merkle-tree": "^0.8.0", - "@chainsafe/ssz": "^0.17.0", + "@chainsafe/ssz": "^0.17.1", "@lodestar/api": "^1.21.0", "@lodestar/config": "^1.21.0", "@lodestar/params": "^1.21.0", diff --git a/packages/light-client/src/spec/utils.ts b/packages/light-client/src/spec/utils.ts index 65d6f3e84c5..3e59cb14cfa 100644 --- a/packages/light-client/src/spec/utils.ts +++ b/packages/light-client/src/spec/utils.ts @@ -112,6 +112,18 @@ export function upgradeLightClientHeader( // Break if no further upgradation is required else fall through if (ForkSeq[targetFork] <= ForkSeq.deneb) break; + + // eslint-disable-next-line no-fallthrough + case ForkName.electra: + (upgradedHeader as LightClientHeader).execution.depositRequestsRoot = + ssz.electra.LightClientHeader.fields.execution.fields.depositRequestsRoot.defaultValue(); + (upgradedHeader as LightClientHeader).execution.withdrawalRequestsRoot = + ssz.electra.LightClientHeader.fields.execution.fields.withdrawalRequestsRoot.defaultValue(); + (upgradedHeader as LightClientHeader).execution.consolidationRequestsRoot = + ssz.electra.LightClientHeader.fields.execution.fields.consolidationRequestsRoot.defaultValue(); + + // Break if no further upgrades is required else fall through + if (ForkSeq[targetFork] <= ForkSeq.electra) break; } return upgradedHeader; } @@ -145,6 +157,15 @@ export function isValidLightClientHeader(config: ChainForkConfig, header: LightC } } + if (epoch < config.ELECTRA_FORK_EPOCH) { + if ( + (header as LightClientHeader).execution.depositRequestsRoot !== undefined || + (header as LightClientHeader).execution.withdrawalRequestsRoot !== undefined + ) { + return false; + } + } + return isValidMerkleBranch( config .getExecutionForkTypes(header.beacon.slot) diff --git a/packages/params/src/forkName.ts b/packages/params/src/forkName.ts index a5f6d49d1ce..42e8917942d 100644 --- a/packages/params/src/forkName.ts +++ b/packages/params/src/forkName.ts @@ -7,6 +7,7 @@ export enum ForkName { bellatrix = "bellatrix", capella = "capella", deneb = "deneb", + electra = "electra", } /** @@ -18,6 +19,7 @@ export enum ForkSeq { bellatrix = 2, capella = 3, deneb = 4, + electra = 5, } function exclude(coll: T[], val: U[]): Exclude[] { @@ -78,3 +80,16 @@ export const forkBlobs = exclude(forkAll, [ForkName.phase0, ForkName.altair, For export function isForkBlobs(fork: ForkName): fork is ForkBlobs { return isForkWithdrawals(fork) && fork !== ForkName.capella; } + +export type ForkPreElectra = ForkPreBlobs | ForkName.deneb; +export type ForkPostElectra = Exclude; +export const forkPostElectra = exclude(forkAll, [ + ForkName.phase0, + ForkName.altair, + ForkName.bellatrix, + ForkName.capella, + ForkName.deneb, +]); +export function isForkPostElectra(fork: ForkName): fork is ForkPostElectra { + return isForkBlobs(fork) && fork !== ForkName.deneb; +} diff --git a/packages/params/src/index.ts b/packages/params/src/index.ts index 6a95e3ca632..e7fd5b97633 100644 --- a/packages/params/src/index.ts +++ b/packages/params/src/index.ts @@ -93,6 +93,21 @@ export const { MAX_BLOB_COMMITMENTS_PER_BLOCK, MAX_BLOBS_PER_BLOCK, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH, + + MAX_EFFECTIVE_BALANCE_ELECTRA, + MIN_ACTIVATION_BALANCE, + PENDING_BALANCE_DEPOSITS_LIMIT, + PENDING_PARTIAL_WITHDRAWALS_LIMIT, + PENDING_CONSOLIDATIONS_LIMIT, + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, + + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, + MAX_ATTESTER_SLASHINGS_ELECTRA, + MAX_ATTESTATIONS_ELECTRA, + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP, + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA, } = activePreset; //////////// @@ -114,6 +129,7 @@ export const JUSTIFICATION_BITS_LENGTH = 4; // Since the prefixes are just 1 byte, we define and use them as number export const BLS_WITHDRAWAL_PREFIX = 0; export const ETH1_ADDRESS_WITHDRAWAL_PREFIX = 1; +export const COMPOUNDING_WITHDRAWAL_PREFIX = 2; // Domain types @@ -128,7 +144,7 @@ export const DOMAIN_SYNC_COMMITTEE = Uint8Array.from([7, 0, 0, 0]); export const DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF = Uint8Array.from([8, 0, 0, 0]); export const DOMAIN_CONTRIBUTION_AND_PROOF = Uint8Array.from([9, 0, 0, 0]); export const DOMAIN_BLS_TO_EXECUTION_CHANGE = Uint8Array.from([10, 0, 0, 0]); -export const DOMAIN_BLOB_SIDECAR = Uint8Array.from([11, 0, 0, 0]); +export const DOMAIN_CONSOLIDATION = Uint8Array.from([11, 0, 0, 0]); // Application specific domains @@ -244,3 +260,11 @@ export const KZG_COMMITMENT_SUBTREE_INDEX0 = KZG_COMMITMENT_GINDEX0 - 2 ** KZG_C // ssz.deneb.BlobSidecars.elementType.fixedSize export const BLOBSIDECAR_FIXED_SIZE = ACTIVE_PRESET === PresetName.minimal ? 131672 : 131928; + +// Electra Misc +export const UNSET_DEPOSIT_REQUESTS_START_INDEX = 2n ** 64n - 1n; +export const FULL_EXIT_REQUEST_AMOUNT = 0; +export const NEXT_SYNC_COMMITTEE_GINDEX_ELECTRA = 87; +export const NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA = 6; +export const FINALIZED_ROOT_DEPTH_ELECTRA = 7; +export const FINALIZED_ROOT_INDEX_ELECTRA = 169; diff --git a/packages/params/src/presets/mainnet.ts b/packages/params/src/presets/mainnet.ts index 42a705a07f0..a4a88aac1f5 100644 --- a/packages/params/src/presets/mainnet.ts +++ b/packages/params/src/presets/mainnet.ts @@ -118,4 +118,21 @@ export const mainnetPreset: BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: 4096, MAX_BLOBS_PER_BLOCK: 6, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 17, + + // ELECTRA + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: 8192, + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: 16, + MAX_ATTESTER_SLASHINGS_ELECTRA: 1, + MAX_ATTESTATIONS_ELECTRA: 8, + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP: 8, + // 2**11 * 10**9 (= 2,048,000,000,000) Gwei + MAX_EFFECTIVE_BALANCE_ELECTRA: 2048000000000, + // 2**16 (= 65536) + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: 4096, + MIN_ACTIVATION_BALANCE: 32000000000, + PENDING_BALANCE_DEPOSITS_LIMIT: 134217728, + PENDING_PARTIAL_WITHDRAWALS_LIMIT: 134217728, + PENDING_CONSOLIDATIONS_LIMIT: 262144, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: 1, + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA: 4096, }; diff --git a/packages/params/src/presets/minimal.ts b/packages/params/src/presets/minimal.ts index b940841a042..97eff53cf01 100644 --- a/packages/params/src/presets/minimal.ts +++ b/packages/params/src/presets/minimal.ts @@ -119,4 +119,21 @@ export const minimalPreset: BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: 16, MAX_BLOBS_PER_BLOCK: 6, KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: 9, + + // ELECTRA + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: 4, + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: 2, + MAX_ATTESTER_SLASHINGS_ELECTRA: 1, + MAX_ATTESTATIONS_ELECTRA: 8, + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP: 1, + // 2**11 * 10**9 (= 2,048,000,000,000) Gwei + MAX_EFFECTIVE_BALANCE_ELECTRA: 2048000000000, + // 2**16 (= 65536) + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: 4096, + MIN_ACTIVATION_BALANCE: 32000000000, + PENDING_BALANCE_DEPOSITS_LIMIT: 134217728, + PENDING_PARTIAL_WITHDRAWALS_LIMIT: 64, + PENDING_CONSOLIDATIONS_LIMIT: 64, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: 1, + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA: 4096, }; diff --git a/packages/params/src/types.ts b/packages/params/src/types.ts index 3c5ba638113..e867b4a3cf7 100644 --- a/packages/params/src/types.ts +++ b/packages/params/src/types.ts @@ -82,6 +82,21 @@ export type BeaconPreset = { MAX_BLOB_COMMITMENTS_PER_BLOCK: number; MAX_BLOBS_PER_BLOCK: number; KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: number; + + // ELECTRA + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: number; + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: number; + MAX_ATTESTER_SLASHINGS_ELECTRA: number; + MAX_ATTESTATIONS_ELECTRA: number; + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP: number; + MAX_EFFECTIVE_BALANCE_ELECTRA: number; + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: number; + MIN_ACTIVATION_BALANCE: number; + PENDING_BALANCE_DEPOSITS_LIMIT: number; + PENDING_PARTIAL_WITHDRAWALS_LIMIT: number; + PENDING_CONSOLIDATIONS_LIMIT: number; + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: number; + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA: number; }; /** @@ -167,6 +182,21 @@ export const beaconPresetTypes: BeaconPresetTypes = { MAX_BLOB_COMMITMENTS_PER_BLOCK: "number", MAX_BLOBS_PER_BLOCK: "number", KZG_COMMITMENT_INCLUSION_PROOF_DEPTH: "number", + + // ELECTRA + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD: "number", + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD: "number", + MAX_ATTESTER_SLASHINGS_ELECTRA: "number", + MAX_ATTESTATIONS_ELECTRA: "number", + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP: "number", + MAX_EFFECTIVE_BALANCE_ELECTRA: "number", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "number", + MIN_ACTIVATION_BALANCE: "number", + PENDING_BALANCE_DEPOSITS_LIMIT: "number", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "number", + PENDING_CONSOLIDATIONS_LIMIT: "number", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "number", + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA: "number", }; type BeaconPresetTypes = { diff --git a/packages/params/test/e2e/ensure-config-is-synced.test.ts b/packages/params/test/e2e/ensure-config-is-synced.test.ts index 06fb4bae000..38168fa02ba 100644 --- a/packages/params/test/e2e/ensure-config-is-synced.test.ts +++ b/packages/params/test/e2e/ensure-config-is-synced.test.ts @@ -8,7 +8,7 @@ import {loadConfigYaml} from "../yaml.js"; // Not e2e, but slow. Run with e2e tests /** https://github.com/ethereum/consensus-specs/releases */ -const specConfigCommit = "v1.4.0-beta.5"; +const specConfigCommit = "v1.5.0-alpha.3"; describe("Ensure config is synced", function () { vi.setConfig({testTimeout: 60 * 1000}); diff --git a/packages/params/test/unit/__snapshots__/forkName.test.ts.snap b/packages/params/test/unit/__snapshots__/forkName.test.ts.snap index 3d7009a7c2e..a54f9dd6913 100644 --- a/packages/params/test/unit/__snapshots__/forkName.test.ts.snap +++ b/packages/params/test/unit/__snapshots__/forkName.test.ts.snap @@ -7,12 +7,14 @@ exports[`forkName > should have valid allForks 1`] = ` "bellatrix", "capella", "deneb", + "electra", ] `; exports[`forkName > should have valid blobs forks 1`] = ` [ "deneb", + "electra", ] `; @@ -21,6 +23,7 @@ exports[`forkName > should have valid execution forks 1`] = ` "bellatrix", "capella", "deneb", + "electra", ] `; @@ -30,6 +33,7 @@ exports[`forkName > should have valid lightclient forks 1`] = ` "bellatrix", "capella", "deneb", + "electra", ] `; @@ -37,5 +41,6 @@ exports[`forkName > should have valid withdrawal forks 1`] = ` [ "capella", "deneb", + "electra", ] `; diff --git a/packages/state-transition/package.json b/packages/state-transition/package.json index 6fcf70f5898..1dd43ef98f8 100644 --- a/packages/state-transition/package.json +++ b/packages/state-transition/package.json @@ -62,12 +62,13 @@ "@chainsafe/blst": "^2.0.3", "@chainsafe/persistent-merkle-tree": "^0.8.0", "@chainsafe/persistent-ts": "^0.19.1", - "@chainsafe/ssz": "^0.17.0", + "@chainsafe/ssz": "^0.17.1", "@lodestar/config": "^1.21.0", "@lodestar/params": "^1.21.0", "@lodestar/types": "^1.21.0", "@lodestar/utils": "^1.21.0", - "bigint-buffer": "^1.1.5" + "bigint-buffer": "^1.1.5", + "immutable": "^4.3.2" }, "keywords": [ "ethereum", diff --git a/packages/state-transition/src/block/index.ts b/packages/state-transition/src/block/index.ts index fdfc9e90351..3857511292c 100644 --- a/packages/state-transition/src/block/index.ts +++ b/packages/state-transition/src/block/index.ts @@ -47,10 +47,12 @@ export function processBlock( // https://github.com/ethereum/consensus-specs/blob/b62c9e877990242d63aa17a2a59a49bc649a2f2e/specs/eip4844/beacon-chain.md#disabling-withdrawals if (fork >= ForkSeq.capella) { processWithdrawals( + fork, state as CachedBeaconStateCapella, fullOrBlindedPayload as capella.FullOrBlindedExecutionPayload ); } + processExecutionPayload(fork, state as CachedBeaconStateBellatrix, block.body, externalData); } diff --git a/packages/state-transition/src/block/initiateValidatorExit.ts b/packages/state-transition/src/block/initiateValidatorExit.ts index e34d4dda700..d1420daef84 100644 --- a/packages/state-transition/src/block/initiateValidatorExit.ts +++ b/packages/state-transition/src/block/initiateValidatorExit.ts @@ -1,7 +1,8 @@ import {CompositeViewDU} from "@chainsafe/ssz"; -import {FAR_FUTURE_EPOCH} from "@lodestar/params"; +import {FAR_FUTURE_EPOCH, ForkSeq} from "@lodestar/params"; import {ssz} from "@lodestar/types"; -import {CachedBeaconStateAllForks} from "../types.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateElectra} from "../types.js"; +import {computeExitEpochAndUpdateChurn} from "../util/epoch.js"; /** * Initiate the exit of the validator with index ``index``. @@ -24,6 +25,7 @@ import {CachedBeaconStateAllForks} from "../types.js"; * Forcing consumers to pass the SubTree of `validator` directly mitigates this issue. */ export function initiateValidatorExit( + fork: ForkSeq, state: CachedBeaconStateAllForks, validator: CompositeViewDU ): void { @@ -34,18 +36,27 @@ export function initiateValidatorExit( return; } - // Limits the number of validators that can exit on each epoch. - // Expects all state.validators to follow this rule, i.e. no validator.exitEpoch is greater than exitQueueEpoch. - // If there the churnLimit is reached at this current exitQueueEpoch, advance epoch and reset churn. - if (epochCtx.exitQueueChurn >= epochCtx.churnLimit) { - epochCtx.exitQueueEpoch += 1; - epochCtx.exitQueueChurn = 1; // = 1 to account for this validator with exitQueueEpoch + if (fork < ForkSeq.electra) { + // Limits the number of validators that can exit on each epoch. + // Expects all state.validators to follow this rule, i.e. no validator.exitEpoch is greater than exitQueueEpoch. + // If there the churnLimit is reached at this current exitQueueEpoch, advance epoch and reset churn. + if (epochCtx.exitQueueChurn >= epochCtx.churnLimit) { + epochCtx.exitQueueEpoch += 1; + epochCtx.exitQueueChurn = 1; // = 1 to account for this validator with exitQueueEpoch + } else { + // Add this validator to the current exitQueueEpoch churn + epochCtx.exitQueueChurn += 1; + } + + // set validator exit epoch + validator.exitEpoch = epochCtx.exitQueueEpoch; } else { - // Add this validator to the current exitQueueEpoch churn - epochCtx.exitQueueChurn += 1; + // set validator exit epoch + // Note we don't use epochCtx.exitQueueChurn and exitQueueEpoch anymore + validator.exitEpoch = computeExitEpochAndUpdateChurn( + state as CachedBeaconStateElectra, + BigInt(validator.effectiveBalance) + ); } - - // set validator exit epoch and withdrawable epoch - validator.exitEpoch = epochCtx.exitQueueEpoch; - validator.withdrawableEpoch = epochCtx.exitQueueEpoch + config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY; + validator.withdrawableEpoch = validator.exitEpoch + config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY; } diff --git a/packages/state-transition/src/block/isValidIndexedAttestation.ts b/packages/state-transition/src/block/isValidIndexedAttestation.ts index e3965b97ee7..33d92a20826 100644 --- a/packages/state-transition/src/block/isValidIndexedAttestation.ts +++ b/packages/state-transition/src/block/isValidIndexedAttestation.ts @@ -1,4 +1,4 @@ -import {MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; +import {ForkSeq, MAX_COMMITTEES_PER_SLOT, MAX_VALIDATORS_PER_COMMITTEE} from "@lodestar/params"; import {phase0} from "@lodestar/types"; import {CachedBeaconStateAllForks} from "../types.js"; import {verifySignatureSet} from "../util/index.js"; @@ -44,7 +44,11 @@ export function isValidIndexedAttestationBigint( */ export function isValidIndexedAttestationIndices(state: CachedBeaconStateAllForks, indices: number[]): boolean { // verify max number of indices - if (!(indices.length > 0 && indices.length <= MAX_VALIDATORS_PER_COMMITTEE)) { + const maxIndices = + state.config.getForkSeq(state.slot) >= ForkSeq.electra + ? MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT + : MAX_VALIDATORS_PER_COMMITTEE; + if (!(indices.length > 0 && indices.length <= maxIndices)) { return false; } diff --git a/packages/state-transition/src/block/processAttestationPhase0.ts b/packages/state-transition/src/block/processAttestationPhase0.ts index 95fc575003e..ba6bc908969 100644 --- a/packages/state-transition/src/block/processAttestationPhase0.ts +++ b/packages/state-transition/src/block/processAttestationPhase0.ts @@ -1,7 +1,7 @@ -import {Slot, phase0, ssz} from "@lodestar/types"; - -import {MIN_ATTESTATION_INCLUSION_DELAY, SLOTS_PER_EPOCH, ForkSeq} from "@lodestar/params"; import {toRootHex} from "@lodestar/utils"; +import {Slot, Attestation, electra, phase0, ssz} from "@lodestar/types"; +import {MIN_ATTESTATION_INCLUSION_DELAY, SLOTS_PER_EPOCH, ForkSeq} from "@lodestar/params"; +import {assert} from "@lodestar/utils"; import {computeEpochAtSlot} from "../util/index.js"; import {CachedBeaconStatePhase0, CachedBeaconStateAllForks} from "../types.js"; import {isValidIndexedAttestation} from "./index.js"; @@ -51,27 +51,18 @@ export function processAttestationPhase0( state.previousEpochAttestations.push(pendingAttestation); } - if (!isValidIndexedAttestation(state, epochCtx.getIndexedAttestation(attestation), verifySignature)) { + if (!isValidIndexedAttestation(state, epochCtx.getIndexedAttestation(ForkSeq.phase0, attestation), verifySignature)) { throw new Error("Attestation is not valid"); } } -export function validateAttestation( - fork: ForkSeq, - state: CachedBeaconStateAllForks, - attestation: phase0.Attestation -): void { +export function validateAttestation(fork: ForkSeq, state: CachedBeaconStateAllForks, attestation: Attestation): void { const {epochCtx} = state; const slot = state.slot; const data = attestation.data; const computedEpoch = computeEpochAtSlot(data.slot); const committeeCount = epochCtx.getCommitteeCountPerSlot(computedEpoch); - if (!(data.index < committeeCount)) { - throw new Error( - "Attestation committee index not within current committee count: " + - `committeeIndex=${data.index} committeeCount=${committeeCount}` - ); - } + if (!(data.target.epoch === epochCtx.previousShuffling.epoch || data.target.epoch === epochCtx.epoch)) { throw new Error( "Attestation target epoch not in previous or current epoch: " + @@ -93,12 +84,47 @@ export function validateAttestation( ); } - const committee = epochCtx.getBeaconCommittee(data.slot, data.index); - if (attestation.aggregationBits.bitLen !== committee.length) { - throw new Error( - "Attestation aggregation bits length does not match committee length: " + - `aggregationBitsLength=${attestation.aggregationBits.bitLen} committeeLength=${committee.length}` + if (fork >= ForkSeq.electra) { + assert.equal(data.index, 0, `AttestationData.index must be zero: index=${data.index}`); + const attestationElectra = attestation as electra.Attestation; + const committeeIndices = attestationElectra.committeeBits.getTrueBitIndexes(); + + if (committeeIndices.length === 0) { + throw Error("Attestation should have at least one committee bit set"); + } else { + const lastCommitteeIndex = committeeIndices[committeeIndices.length - 1]; + if (lastCommitteeIndex >= committeeCount) { + throw new Error( + `Attestation committee index exceeds committee count: lastCommitteeIndex=${lastCommitteeIndex} numCommittees=${committeeCount}` + ); + } + } + + // Get total number of attestation participant of every committee specified + const participantCount = committeeIndices + .map((committeeIndex) => epochCtx.getBeaconCommittee(data.slot, committeeIndex).length) + .reduce((acc, committeeSize) => acc + committeeSize, 0); + + assert.equal( + attestationElectra.aggregationBits.bitLen, + participantCount, + `Attestation aggregation bits length does not match total number of committee participant aggregationBitsLength=${attestation.aggregationBits.bitLen} participantCount=${participantCount}` ); + } else { + if (!(data.index < committeeCount)) { + throw new Error( + "Attestation committee index not within current committee count: " + + `committeeIndex=${data.index} committeeCount=${committeeCount}` + ); + } + + const committee = epochCtx.getBeaconCommittee(data.slot, data.index); + if (attestation.aggregationBits.bitLen !== committee.length) { + throw new Error( + "Attestation aggregation bits length does not match committee length: " + + `aggregationBitsLength=${attestation.aggregationBits.bitLen} committeeLength=${committee.length}` + ); + } } } diff --git a/packages/state-transition/src/block/processAttestations.ts b/packages/state-transition/src/block/processAttestations.ts index 2b132fa22e0..844bda76857 100644 --- a/packages/state-transition/src/block/processAttestations.ts +++ b/packages/state-transition/src/block/processAttestations.ts @@ -1,4 +1,4 @@ -import {phase0} from "@lodestar/types"; +import {Attestation} from "@lodestar/types"; import {ForkSeq} from "@lodestar/params"; import {CachedBeaconStateAllForks, CachedBeaconStateAltair, CachedBeaconStatePhase0} from "../types.js"; import {processAttestationPhase0} from "./processAttestationPhase0.js"; @@ -10,7 +10,7 @@ import {processAttestationsAltair} from "./processAttestationsAltair.js"; export function processAttestations( fork: ForkSeq, state: CachedBeaconStateAllForks, - attestations: phase0.Attestation[], + attestations: Attestation[], verifySignatures = true ): void { if (fork === ForkSeq.phase0) { diff --git a/packages/state-transition/src/block/processAttestationsAltair.ts b/packages/state-transition/src/block/processAttestationsAltair.ts index e3762971219..046a23d7dc2 100644 --- a/packages/state-transition/src/block/processAttestationsAltair.ts +++ b/packages/state-transition/src/block/processAttestationsAltair.ts @@ -1,5 +1,5 @@ import {byteArrayEquals} from "@chainsafe/ssz"; -import {Epoch, phase0} from "@lodestar/types"; +import {Epoch, Attestation, phase0} from "@lodestar/types"; import {intSqrt} from "@lodestar/utils"; import { @@ -32,7 +32,7 @@ const SLOTS_PER_EPOCH_SQRT = intSqrt(SLOTS_PER_EPOCH); export function processAttestationsAltair( fork: ForkSeq, state: CachedBeaconStateAltair, - attestations: phase0.Attestation[], + attestations: Attestation[], verifySignature = true ): void { const {epochCtx} = state; @@ -49,8 +49,7 @@ export function processAttestationsAltair( validateAttestation(fork, state, attestation); // Retrieve the validator indices from the attestation participation bitfield - const committeeIndices = epochCtx.getBeaconCommittee(data.slot, data.index); - const attestingIndices = attestation.aggregationBits.intersectValues(committeeIndices); + const attestingIndices = epochCtx.getAttestingIndices(fork, attestation); // this check is done last because its the most expensive (if signature verification is toggled on) // TODO: Why should we verify an indexed attestation that we just created? If it's just for the signature @@ -76,6 +75,7 @@ export function processAttestationsAltair( // For each participant, update their participation // In epoch processing, this participation info is used to calculate balance updates let totalBalanceIncrementsWithWeight = 0; + const validators = state.validators; for (const index of attestingIndices) { const flags = epochParticipation.get(index); @@ -105,7 +105,7 @@ export function processAttestationsAltair( // TODO: describe issue. Compute progressive target balances // When processing each attestation, increase the cummulative target balance. Only applies post-altair if ((flagsNewSet & TIMELY_TARGET) === TIMELY_TARGET) { - const validator = state.validators.getReadonly(index); + const validator = validators.getReadonly(index); if (!validator.slashed) { if (inCurrentEpoch) { epochCtx.currentTargetUnslashedBalanceIncrements += effectiveBalanceIncrements[index]; diff --git a/packages/state-transition/src/block/processConsolidationRequest.ts b/packages/state-transition/src/block/processConsolidationRequest.ts new file mode 100644 index 00000000000..71b85e92733 --- /dev/null +++ b/packages/state-transition/src/block/processConsolidationRequest.ts @@ -0,0 +1,73 @@ +import {electra, ssz} from "@lodestar/types"; +import {FAR_FUTURE_EPOCH, MIN_ACTIVATION_BALANCE, PENDING_CONSOLIDATIONS_LIMIT} from "@lodestar/params"; + +import {CachedBeaconStateElectra} from "../types.js"; +import {getConsolidationChurnLimit, isActiveValidator} from "../util/validator.js"; +import {hasExecutionWithdrawalCredential} from "../util/electra.js"; +import {computeConsolidationEpochAndUpdateChurn} from "../util/epoch.js"; + +export function processConsolidationRequest( + state: CachedBeaconStateElectra, + consolidationRequest: electra.ConsolidationRequest +): void { + // If the pending consolidations queue is full, consolidation requests are ignored + if (state.pendingConsolidations.length >= PENDING_CONSOLIDATIONS_LIMIT) { + return; + } + + // If there is too little available consolidation churn limit, consolidation requests are ignored + if (getConsolidationChurnLimit(state.epochCtx) <= MIN_ACTIVATION_BALANCE) { + return; + } + + const {sourcePubkey, targetPubkey} = consolidationRequest; + const sourceIndex = state.epochCtx.getValidatorIndex(sourcePubkey); + const targetIndex = state.epochCtx.getValidatorIndex(targetPubkey); + + if (sourceIndex === undefined || targetIndex === undefined) { + return; + } + + // Verify that source != target, so a consolidation cannot be used as an exit. + if (sourceIndex === targetIndex) { + return; + } + + const sourceValidator = state.validators.get(sourceIndex); + const targetValidator = state.validators.getReadonly(targetIndex); + const sourceWithdrawalAddress = sourceValidator.withdrawalCredentials.subarray(12); + const currentEpoch = state.epochCtx.epoch; + + // Verify withdrawal credentials + if ( + !hasExecutionWithdrawalCredential(sourceValidator.withdrawalCredentials) || + !hasExecutionWithdrawalCredential(targetValidator.withdrawalCredentials) + ) { + return; + } + + if (Buffer.compare(sourceWithdrawalAddress, consolidationRequest.sourceAddress) !== 0) { + return; + } + + // Verify the source and the target are active + if (!isActiveValidator(sourceValidator, currentEpoch) || !isActiveValidator(targetValidator, currentEpoch)) { + return; + } + + // Verify exits for source and target have not been initiated + if (sourceValidator.exitEpoch !== FAR_FUTURE_EPOCH || targetValidator.exitEpoch !== FAR_FUTURE_EPOCH) { + return; + } + + // TODO Electra: See if we can get rid of big int + const exitEpoch = computeConsolidationEpochAndUpdateChurn(state, BigInt(sourceValidator.effectiveBalance)); + sourceValidator.exitEpoch = exitEpoch; + sourceValidator.withdrawableEpoch = exitEpoch + state.config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY; + + const pendingConsolidation = ssz.electra.PendingConsolidation.toViewDU({ + sourceIndex, + targetIndex, + }); + state.pendingConsolidations.push(pendingConsolidation); +} diff --git a/packages/state-transition/src/block/processDeposit.ts b/packages/state-transition/src/block/processDeposit.ts index 7ade79fd773..7e343d7fc33 100644 --- a/packages/state-transition/src/block/processDeposit.ts +++ b/packages/state-transition/src/block/processDeposit.ts @@ -1,5 +1,5 @@ import {PublicKey, Signature, verify} from "@chainsafe/blst"; -import {phase0, ssz} from "@lodestar/types"; +import {BLSPubkey, Bytes32, UintNum64, phase0, ssz} from "@lodestar/types"; import {verifyMerkleBranch} from "@lodestar/utils"; import { @@ -11,9 +11,19 @@ import { MAX_EFFECTIVE_BALANCE, } from "@lodestar/params"; +import {DepositData} from "@lodestar/types/lib/phase0/types.js"; +import {DepositRequest} from "@lodestar/types/lib/electra/types.js"; +import {BeaconConfig} from "@lodestar/config"; import {ZERO_HASH} from "../constants/index.js"; -import {computeDomain, computeSigningRoot, increaseBalance} from "../util/index.js"; -import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "../types.js"; +import { + computeDomain, + computeSigningRoot, + hasCompoundingWithdrawalCredential, + hasEth1WithdrawalCredential, + increaseBalance, + switchToCompoundingValidator, +} from "../util/index.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateAltair, CachedBeaconStateElectra} from "../types.js"; /** * Process a Deposit operation. Potentially adds a new validator to the registry. Mutates the validators and balances @@ -22,8 +32,6 @@ import {CachedBeaconStateAllForks, CachedBeaconStateAltair} from "../types.js"; * PERF: Work depends on number of Deposit per block. On regular networks the average is 0 / block. */ export function processDeposit(fork: ForkSeq, state: CachedBeaconStateAllForks, deposit: phase0.Deposit): void { - const {config, validators, epochCtx} = state; - // verify the merkle branch if ( !verifyMerkleBranch( @@ -40,68 +48,132 @@ export function processDeposit(fork: ForkSeq, state: CachedBeaconStateAllForks, // deposits must be processed in order state.eth1DepositIndex += 1; - const pubkey = deposit.data.pubkey; // Drop tree - const amount = deposit.data.amount; - const cachedIndex = epochCtx.pubkey2index.get(pubkey); + applyDeposit(fork, state, deposit.data); +} + +/** + * Adds a new validator into the registry. Or increase balance if already exist. + * Follows applyDeposit() in consensus spec. Will be used by processDeposit() and processDepositRequest() + * + */ +export function applyDeposit( + fork: ForkSeq, + state: CachedBeaconStateAllForks, + deposit: DepositData | DepositRequest +): void { + const {config, validators, epochCtx} = state; + const {pubkey, withdrawalCredentials, amount} = deposit; + + const cachedIndex = epochCtx.getValidatorIndex(pubkey); if (cachedIndex === undefined || !Number.isSafeInteger(cachedIndex) || cachedIndex >= validators.length) { - // verify the deposit signature (proof of posession) which is not checked by the deposit contract - const depositMessage = { - pubkey: deposit.data.pubkey, // Retain tree for hashing - withdrawalCredentials: deposit.data.withdrawalCredentials, // Retain tree for hashing - amount: deposit.data.amount, - }; - // fork-agnostic domain since deposits are valid across forks - const domain = computeDomain(DOMAIN_DEPOSIT, config.GENESIS_FORK_VERSION, ZERO_HASH); - const signingRoot = computeSigningRoot(ssz.phase0.DepositMessage, depositMessage, domain); - try { - // Pubkeys must be checked for group + inf. This must be done only once when the validator deposit is processed - const publicKey = PublicKey.fromBytes(pubkey, true); - const signature = Signature.fromBytes(deposit.data.signature, true); - if (!verify(signingRoot, publicKey, signature)) { - return; + if (isValidDepositSignature(config, pubkey, withdrawalCredentials, amount, deposit.signature)) { + addValidatorToRegistry(fork, state, pubkey, withdrawalCredentials, amount); + } + } else { + if (fork < ForkSeq.electra) { + // increase balance by deposit amount right away pre-electra + increaseBalance(state, cachedIndex, amount); + } else if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + const pendingBalanceDeposit = ssz.electra.PendingBalanceDeposit.toViewDU({ + index: cachedIndex, + amount: BigInt(amount), + }); + stateElectra.pendingBalanceDeposits.push(pendingBalanceDeposit); + + if ( + hasCompoundingWithdrawalCredential(withdrawalCredentials) && + hasEth1WithdrawalCredential(validators.getReadonly(cachedIndex).withdrawalCredentials) && + isValidDepositSignature(config, pubkey, withdrawalCredentials, amount, deposit.signature) + ) { + switchToCompoundingValidator(stateElectra, cachedIndex); } - } catch (e) { - return; // Catch all BLS errors: failed key validation, failed signature validation, invalid signature } + } +} - // add validator and balance entries - const effectiveBalance = Math.min(amount - (amount % EFFECTIVE_BALANCE_INCREMENT), MAX_EFFECTIVE_BALANCE); - validators.push( - ssz.phase0.Validator.toViewDU({ - pubkey, - withdrawalCredentials: deposit.data.withdrawalCredentials, - activationEligibilityEpoch: FAR_FUTURE_EPOCH, - activationEpoch: FAR_FUTURE_EPOCH, - exitEpoch: FAR_FUTURE_EPOCH, - withdrawableEpoch: FAR_FUTURE_EPOCH, - effectiveBalance, - slashed: false, - }) - ); - state.balances.push(amount); +function addValidatorToRegistry( + fork: ForkSeq, + state: CachedBeaconStateAllForks, + pubkey: BLSPubkey, + withdrawalCredentials: Bytes32, + amount: UintNum64 +): void { + const {validators, epochCtx} = state; + // add validator and balance entries + const effectiveBalance = + fork < ForkSeq.electra ? Math.min(amount - (amount % EFFECTIVE_BALANCE_INCREMENT), MAX_EFFECTIVE_BALANCE) : 0; + validators.push( + ssz.phase0.Validator.toViewDU({ + pubkey, + withdrawalCredentials, + activationEligibilityEpoch: FAR_FUTURE_EPOCH, + activationEpoch: FAR_FUTURE_EPOCH, + exitEpoch: FAR_FUTURE_EPOCH, + withdrawableEpoch: FAR_FUTURE_EPOCH, + effectiveBalance, + slashed: false, + }) + ); - const validatorIndex = validators.length - 1; - // Updating here is better than updating at once on epoch transition - // - Simplify genesis fn applyDeposits(): effectiveBalanceIncrements is populated immediately - // - Keep related code together to reduce risk of breaking this cache - // - Should have equal performance since it sets a value in a flat array - epochCtx.effectiveBalanceIncrementsSet(validatorIndex, effectiveBalance); + const validatorIndex = validators.length - 1; + // TODO Electra: Review this + // Updating here is better than updating at once on epoch transition + // - Simplify genesis fn applyDeposits(): effectiveBalanceIncrements is populated immediately + // - Keep related code together to reduce risk of breaking this cache + // - Should have equal performance since it sets a value in a flat array + epochCtx.effectiveBalanceIncrementsSet(validatorIndex, effectiveBalance); - // now that there is a new validator, update the epoch context with the new pubkey - epochCtx.addPubkey(validatorIndex, pubkey); + // now that there is a new validator, update the epoch context with the new pubkey + epochCtx.addPubkey(validatorIndex, pubkey); - // Only after altair: - if (fork >= ForkSeq.altair) { - const stateAltair = state as CachedBeaconStateAltair; + // Only after altair: + if (fork >= ForkSeq.altair) { + const stateAltair = state as CachedBeaconStateAltair; - stateAltair.inactivityScores.push(0); + stateAltair.inactivityScores.push(0); - // add participation caches - stateAltair.previousEpochParticipation.push(0); - stateAltair.currentEpochParticipation.push(0); - } - } else { - // increase balance by deposit amount - increaseBalance(state, cachedIndex, amount); + // add participation caches + stateAltair.previousEpochParticipation.push(0); + stateAltair.currentEpochParticipation.push(0); + } + + if (fork < ForkSeq.electra) { + state.balances.push(amount); + } else if (fork >= ForkSeq.electra) { + state.balances.push(0); + const stateElectra = state as CachedBeaconStateElectra; + const pendingBalanceDeposit = ssz.electra.PendingBalanceDeposit.toViewDU({ + index: validatorIndex, + amount: BigInt(amount), + }); + stateElectra.pendingBalanceDeposits.push(pendingBalanceDeposit); + } +} + +function isValidDepositSignature( + config: BeaconConfig, + pubkey: Uint8Array, + withdrawalCredentials: Uint8Array, + amount: number, + depositSignature: Uint8Array +): boolean { + // verify the deposit signature (proof of posession) which is not checked by the deposit contract + const depositMessage = { + pubkey, + withdrawalCredentials, + amount, + }; + // fork-agnostic domain since deposits are valid across forks + const domain = computeDomain(DOMAIN_DEPOSIT, config.GENESIS_FORK_VERSION, ZERO_HASH); + const signingRoot = computeSigningRoot(ssz.phase0.DepositMessage, depositMessage, domain); + try { + // Pubkeys must be checked for group + inf. This must be done only once when the validator deposit is processed + const publicKey = PublicKey.fromBytes(pubkey, true); + const signature = Signature.fromBytes(depositSignature, true); + + return verify(signingRoot, publicKey, signature); + } catch (e) { + return false; // Catch all BLS errors: failed key validation, failed signature validation, invalid signature } } diff --git a/packages/state-transition/src/block/processDepositRequest.ts b/packages/state-transition/src/block/processDepositRequest.ts new file mode 100644 index 00000000000..e5dd99a40c4 --- /dev/null +++ b/packages/state-transition/src/block/processDepositRequest.ts @@ -0,0 +1,17 @@ +import {electra} from "@lodestar/types"; +import {ForkSeq, UNSET_DEPOSIT_REQUESTS_START_INDEX} from "@lodestar/params"; + +import {CachedBeaconStateElectra} from "../types.js"; +import {applyDeposit} from "./processDeposit.js"; + +export function processDepositRequest( + fork: ForkSeq, + state: CachedBeaconStateElectra, + depositRequest: electra.DepositRequest +): void { + if (state.depositRequestsStartIndex === UNSET_DEPOSIT_REQUESTS_START_INDEX) { + state.depositRequestsStartIndex = BigInt(depositRequest.index); + } + + applyDeposit(fork, state, depositRequest); +} diff --git a/packages/state-transition/src/block/processOperations.ts b/packages/state-transition/src/block/processOperations.ts index 38716bb42a4..6f61e7c242f 100644 --- a/packages/state-transition/src/block/processOperations.ts +++ b/packages/state-transition/src/block/processOperations.ts @@ -1,14 +1,18 @@ -import {BeaconBlockBody, capella} from "@lodestar/types"; -import {ForkSeq, MAX_DEPOSITS} from "@lodestar/params"; +import {BeaconBlockBody, capella, electra} from "@lodestar/types"; +import {ForkSeq} from "@lodestar/params"; -import {CachedBeaconStateAllForks, CachedBeaconStateCapella} from "../types.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateCapella, CachedBeaconStateElectra} from "../types.js"; +import {getEth1DepositCount} from "../util/deposit.js"; import {processAttestations} from "./processAttestations.js"; import {processProposerSlashing} from "./processProposerSlashing.js"; import {processAttesterSlashing} from "./processAttesterSlashing.js"; import {processDeposit} from "./processDeposit.js"; import {processVoluntaryExit} from "./processVoluntaryExit.js"; import {processBlsToExecutionChange} from "./processBlsToExecutionChange.js"; +import {processWithdrawalRequest} from "./processWithdrawalRequest.js"; +import {processDepositRequest} from "./processDepositRequest.js"; import {ProcessBlockOpts} from "./types.js"; +import {processConsolidationRequest} from "./processConsolidationRequest.js"; export { processProposerSlashing, @@ -16,7 +20,10 @@ export { processAttestations, processDeposit, processVoluntaryExit, + processWithdrawalRequest, processBlsToExecutionChange, + processDepositRequest, + processConsolidationRequest, }; export function processOperations( @@ -26,7 +33,7 @@ export function processOperations( opts: ProcessBlockOpts = {verifySignatures: true} ): void { // verify that outstanding deposits are processed up to the maximum number of deposits - const maxDeposits = Math.min(MAX_DEPOSITS, state.eth1Data.depositCount - state.eth1DepositIndex); + const maxDeposits = getEth1DepositCount(state); if (body.deposits.length !== maxDeposits) { throw new Error( `Block contains incorrect number of deposits: depositCount=${body.deposits.length} expected=${maxDeposits}` @@ -45,8 +52,9 @@ export function processOperations( for (const deposit of body.deposits) { processDeposit(fork, state, deposit); } + for (const voluntaryExit of body.voluntaryExits) { - processVoluntaryExit(state, voluntaryExit, opts.verifySignatures); + processVoluntaryExit(fork, state, voluntaryExit, opts.verifySignatures); } if (fork >= ForkSeq.capella) { @@ -54,4 +62,21 @@ export function processOperations( processBlsToExecutionChange(state as CachedBeaconStateCapella, blsToExecutionChange); } } + + if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + const bodyElectra = body as electra.BeaconBlockBody; + + for (const depositRequest of bodyElectra.executionPayload.depositRequests) { + processDepositRequest(fork, stateElectra, depositRequest); + } + + for (const elWithdrawalRequest of bodyElectra.executionPayload.withdrawalRequests) { + processWithdrawalRequest(fork, stateElectra, elWithdrawalRequest); + } + + for (const elConsolidationRequest of bodyElectra.executionPayload.consolidationRequests) { + processConsolidationRequest(stateElectra, elConsolidationRequest); + } + } } diff --git a/packages/state-transition/src/block/processVoluntaryExit.ts b/packages/state-transition/src/block/processVoluntaryExit.ts index 80982623a44..b08aa780088 100644 --- a/packages/state-transition/src/block/processVoluntaryExit.ts +++ b/packages/state-transition/src/block/processVoluntaryExit.ts @@ -1,7 +1,7 @@ -import {FAR_FUTURE_EPOCH} from "@lodestar/params"; +import {FAR_FUTURE_EPOCH, ForkSeq} from "@lodestar/params"; import {phase0} from "@lodestar/types"; -import {isActiveValidator} from "../util/index.js"; -import {CachedBeaconStateAllForks} from "../types.js"; +import {getPendingBalanceToWithdraw, isActiveValidator} from "../util/index.js"; +import {CachedBeaconStateAllForks, CachedBeaconStateElectra} from "../types.js"; import {verifyVoluntaryExitSignature} from "../signatureSets/index.js"; import {initiateValidatorExit} from "./index.js"; @@ -11,16 +11,21 @@ import {initiateValidatorExit} from "./index.js"; * PERF: Work depends on number of VoluntaryExit per block. On regular networks the average is 0 / block. */ export function processVoluntaryExit( + fork: ForkSeq, state: CachedBeaconStateAllForks, signedVoluntaryExit: phase0.SignedVoluntaryExit, verifySignature = true ): void { - if (!isValidVoluntaryExit(state, signedVoluntaryExit, verifySignature)) { - throw Error("Invalid voluntary exit"); + const isValidExit = + fork >= ForkSeq.electra + ? isValidVoluntaryExitElectra(state as CachedBeaconStateElectra, signedVoluntaryExit, verifySignature) + : isValidVoluntaryExit(state, signedVoluntaryExit, verifySignature); + if (!isValidExit) { + throw Error(`Invalid voluntary exit at forkSeq=${fork}`); } const validator = state.validators.get(signedVoluntaryExit.message.validatorIndex); - initiateValidatorExit(state, validator); + initiateValidatorExit(fork, state, validator); } export function isValidVoluntaryExit( @@ -46,3 +51,16 @@ export function isValidVoluntaryExit( (!verifySignature || verifyVoluntaryExitSignature(state, signedVoluntaryExit)) ); } + +function isValidVoluntaryExitElectra( + state: CachedBeaconStateElectra, + signedVoluntaryExit: phase0.SignedVoluntaryExit, + verifySignature = true +): boolean { + // only exit validator if it has no pending withdrawals in the queue (post-Electra only) + if (getPendingBalanceToWithdraw(state, signedVoluntaryExit.message.validatorIndex) === 0) { + return isValidVoluntaryExit(state, signedVoluntaryExit, verifySignature); + } + + return false; +} diff --git a/packages/state-transition/src/block/processWithdrawalRequest.ts b/packages/state-transition/src/block/processWithdrawalRequest.ts new file mode 100644 index 00000000000..cff1ea03bd8 --- /dev/null +++ b/packages/state-transition/src/block/processWithdrawalRequest.ts @@ -0,0 +1,99 @@ +import {toHexString} from "@chainsafe/ssz"; +import {electra, phase0, ssz} from "@lodestar/types"; +import { + FAR_FUTURE_EPOCH, + MIN_ACTIVATION_BALANCE, + PENDING_PARTIAL_WITHDRAWALS_LIMIT, + FULL_EXIT_REQUEST_AMOUNT, + ForkSeq, +} from "@lodestar/params"; + +import {CachedBeaconStateElectra} from "../types.js"; +import {hasCompoundingWithdrawalCredential, hasExecutionWithdrawalCredential} from "../util/electra.js"; +import {getPendingBalanceToWithdraw, isActiveValidator} from "../util/validator.js"; +import {computeExitEpochAndUpdateChurn} from "../util/epoch.js"; +import {initiateValidatorExit} from "./initiateValidatorExit.js"; + +export function processWithdrawalRequest( + fork: ForkSeq, + state: CachedBeaconStateElectra, + withdrawalRequest: electra.WithdrawalRequest +): void { + const amount = Number(withdrawalRequest.amount); + const {pendingPartialWithdrawals, validators, epochCtx} = state; + // no need to use unfinalized pubkey cache from 6110 as validator won't be active anyway + const {pubkey2index, config} = epochCtx; + const isFullExitRequest = amount === FULL_EXIT_REQUEST_AMOUNT; + + // If partial withdrawal queue is full, only full exits are processed + if (pendingPartialWithdrawals.length >= PENDING_PARTIAL_WITHDRAWALS_LIMIT && !isFullExitRequest) { + return; + } + + // bail out if validator is not in beacon state + // note that we don't need to check for 6110 unfinalized vals as they won't be eligible for withdraw/exit anyway + const validatorIndex = pubkey2index.get(withdrawalRequest.validatorPubkey); + if (validatorIndex === undefined) { + return; + } + + const validator = validators.get(validatorIndex); + if (!isValidatorEligibleForWithdrawOrExit(validator, withdrawalRequest.sourceAddress, state)) { + return; + } + + // TODO Electra: Consider caching pendingPartialWithdrawals + const pendingBalanceToWithdraw = getPendingBalanceToWithdraw(state, validatorIndex); + const validatorBalance = state.balances.get(validatorIndex); + + if (isFullExitRequest) { + // only exit validator if it has no pending withdrawals in the queue + if (pendingBalanceToWithdraw === 0) { + initiateValidatorExit(fork, state, validator); + } + return; + } + + // partial withdrawal request + const hasSufficientEffectiveBalance = validator.effectiveBalance >= MIN_ACTIVATION_BALANCE; + const hasExcessBalance = validatorBalance > MIN_ACTIVATION_BALANCE + pendingBalanceToWithdraw; + + // Only allow partial withdrawals with compounding withdrawal credentials + if ( + hasCompoundingWithdrawalCredential(validator.withdrawalCredentials) && + hasSufficientEffectiveBalance && + hasExcessBalance + ) { + const amountToWithdraw = BigInt( + Math.min(validatorBalance - MIN_ACTIVATION_BALANCE - pendingBalanceToWithdraw, amount) + ); + const exitQueueEpoch = computeExitEpochAndUpdateChurn(state, amountToWithdraw); + const withdrawableEpoch = exitQueueEpoch + config.MIN_VALIDATOR_WITHDRAWABILITY_DELAY; + + const pendingPartialWithdrawal = ssz.electra.PendingPartialWithdrawal.toViewDU({ + index: validatorIndex, + amount: amountToWithdraw, + withdrawableEpoch, + }); + state.pendingPartialWithdrawals.push(pendingPartialWithdrawal); + } +} + +function isValidatorEligibleForWithdrawOrExit( + validator: phase0.Validator, + sourceAddress: Uint8Array, + state: CachedBeaconStateElectra +): boolean { + const {withdrawalCredentials} = validator; + const addressStr = toHexString(withdrawalCredentials.subarray(12)); + const sourceAddressStr = toHexString(sourceAddress); + const {epoch: currentEpoch, config} = state.epochCtx; + + return ( + hasExecutionWithdrawalCredential(withdrawalCredentials) && + addressStr === sourceAddressStr && + isActiveValidator(validator, currentEpoch) && + validator.exitEpoch === FAR_FUTURE_EPOCH && + currentEpoch >= validator.activationEpoch + config.SHARD_COMMITTEE_PERIOD + ); +} diff --git a/packages/state-transition/src/block/processWithdrawals.ts b/packages/state-transition/src/block/processWithdrawals.ts index 9ae4c570e01..b06209167be 100644 --- a/packages/state-transition/src/block/processWithdrawals.ts +++ b/packages/state-transition/src/block/processWithdrawals.ts @@ -1,20 +1,30 @@ import {byteArrayEquals} from "@chainsafe/ssz"; import {ssz, capella} from "@lodestar/types"; import { - MAX_EFFECTIVE_BALANCE, MAX_WITHDRAWALS_PER_PAYLOAD, MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP, + ForkSeq, + MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP, + FAR_FUTURE_EPOCH, + MIN_ACTIVATION_BALANCE, } from "@lodestar/params"; import {toRootHex} from "@lodestar/utils"; -import {CachedBeaconStateCapella} from "../types.js"; -import {decreaseBalance, hasEth1WithdrawalCredential, isCapellaPayloadHeader} from "../util/index.js"; +import {CachedBeaconStateCapella, CachedBeaconStateElectra} from "../types.js"; +import { + decreaseBalance, + getValidatorMaxEffectiveBalance, + isCapellaPayloadHeader, + isFullyWithdrawableValidator, + isPartiallyWithdrawableValidator, +} from "../util/index.js"; export function processWithdrawals( - state: CachedBeaconStateCapella, + fork: ForkSeq, + state: CachedBeaconStateCapella | CachedBeaconStateElectra, payload: capella.FullOrBlindedExecutionPayload ): void { - const {withdrawals: expectedWithdrawals} = getExpectedWithdrawals(state); + const {withdrawals: expectedWithdrawals, partialWithdrawalsCount} = getExpectedWithdrawals(fork, state); const numWithdrawals = expectedWithdrawals.length; if (isCapellaPayloadHeader(payload)) { @@ -44,6 +54,11 @@ export function processWithdrawals( decreaseBalance(state, withdrawal.validatorIndex, Number(withdrawal.amount)); } + if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + stateElectra.pendingPartialWithdrawals = stateElectra.pendingPartialWithdrawals.sliceFrom(partialWithdrawalsCount); + } + // Update the nextWithdrawalIndex if (expectedWithdrawals.length > 0) { const latestWithdrawal = expectedWithdrawals[expectedWithdrawals.length - 1]; @@ -63,46 +78,80 @@ export function processWithdrawals( } } -export function getExpectedWithdrawals(state: CachedBeaconStateCapella): { +export function getExpectedWithdrawals( + fork: ForkSeq, + state: CachedBeaconStateCapella | CachedBeaconStateElectra +): { withdrawals: capella.Withdrawal[]; sampledValidators: number; + partialWithdrawalsCount: number; } { const epoch = state.epochCtx.epoch; let withdrawalIndex = state.nextWithdrawalIndex; const {validators, balances, nextWithdrawalValidatorIndex} = state; - const bound = Math.min(validators.length, MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP); - - let n = 0; const withdrawals: capella.Withdrawal[] = []; + + if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + + for (const withdrawal of stateElectra.pendingPartialWithdrawals.getAllReadonly()) { + if (withdrawal.withdrawableEpoch > epoch || withdrawals.length === MAX_PENDING_PARTIALS_PER_WITHDRAWALS_SWEEP) { + break; + } + + const validator = validators.getReadonly(withdrawal.index); + + if ( + validator.exitEpoch === FAR_FUTURE_EPOCH && + validator.effectiveBalance >= MIN_ACTIVATION_BALANCE && + balances.get(withdrawal.index) > MIN_ACTIVATION_BALANCE + ) { + const balanceOverMinActivationBalance = BigInt(balances.get(withdrawal.index) - MIN_ACTIVATION_BALANCE); + const withdrawableBalance = + balanceOverMinActivationBalance < withdrawal.amount ? balanceOverMinActivationBalance : withdrawal.amount; + withdrawals.push({ + index: withdrawalIndex, + validatorIndex: withdrawal.index, + address: validator.withdrawalCredentials.subarray(12), + amount: withdrawableBalance, + }); + withdrawalIndex++; + } + } + } + + const partialWithdrawalsCount = withdrawals.length; + const bound = Math.min(validators.length, MAX_VALIDATORS_PER_WITHDRAWALS_SWEEP); + let n = 0; // Just run a bounded loop max iterating over all withdrawals // however breaks out once we have MAX_WITHDRAWALS_PER_PAYLOAD for (n = 0; n < bound; n++) { // Get next validator in turn const validatorIndex = (nextWithdrawalValidatorIndex + n) % validators.length; - // It's most likely for validators to not have set eth1 credentials, than having 0 balance const validator = validators.getReadonly(validatorIndex); - if (!hasEth1WithdrawalCredential(validator.withdrawalCredentials)) { + const balance = balances.get(validatorIndex); + // early skip for balance = 0 as its now more likely that validator has exited/slahed with + // balance zero than not have withdrawal credentials set + if (balance === 0) { continue; } - const balance = balances.get(validatorIndex); - - if (balance > 0 && validator.withdrawableEpoch <= epoch) { + if (isFullyWithdrawableValidator(fork, validator, balance, epoch)) { withdrawals.push({ index: withdrawalIndex, validatorIndex, - address: validator.withdrawalCredentials.slice(12), + address: validator.withdrawalCredentials.subarray(12), amount: BigInt(balance), }); withdrawalIndex++; - } else if (validator.effectiveBalance === MAX_EFFECTIVE_BALANCE && balance > MAX_EFFECTIVE_BALANCE) { + } else if (isPartiallyWithdrawableValidator(fork, validator, balance)) { withdrawals.push({ index: withdrawalIndex, validatorIndex, - address: validator.withdrawalCredentials.slice(12), - amount: BigInt(balance - MAX_EFFECTIVE_BALANCE), + address: validator.withdrawalCredentials.subarray(12), + amount: BigInt(balance - getValidatorMaxEffectiveBalance(validator.withdrawalCredentials)), }); withdrawalIndex++; } @@ -113,5 +162,5 @@ export function getExpectedWithdrawals(state: CachedBeaconStateCapella): { } } - return {withdrawals, sampledValidators: n}; + return {withdrawals, sampledValidators: n, partialWithdrawalsCount}; } diff --git a/packages/state-transition/src/block/slashValidator.ts b/packages/state-transition/src/block/slashValidator.ts index 9f3eb294764..c4b7d5f848e 100644 --- a/packages/state-transition/src/block/slashValidator.ts +++ b/packages/state-transition/src/block/slashValidator.ts @@ -6,11 +6,13 @@ import { MIN_SLASHING_PENALTY_QUOTIENT, MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR, MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX, + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA, PROPOSER_REWARD_QUOTIENT, PROPOSER_WEIGHT, TIMELY_TARGET_FLAG_INDEX, WEIGHT_DENOMINATOR, WHISTLEBLOWER_REWARD_QUOTIENT, + WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA, } from "@lodestar/params"; import {decreaseBalance, increaseBalance} from "../util/index.js"; @@ -31,7 +33,7 @@ export function slashValidator( const validator = state.validators.get(slashedIndex); // TODO: Bellatrix initiateValidatorExit validators.update() with the one below - initiateValidatorExit(state, validator); + initiateValidatorExit(fork, state, validator); validator.slashed = true; validator.withdrawableEpoch = Math.max(validator.withdrawableEpoch, epoch + EPOCHS_PER_SLASHINGS_VECTOR); @@ -41,7 +43,7 @@ export function slashValidator( // state.slashings is initially a Gwei (BigInt) vector, however since Nov 2023 it's converted to UintNum64 (number) vector in the state transition because: // - state.slashings[nextEpoch % EPOCHS_PER_SLASHINGS_VECTOR] is reset per epoch in processSlashingsReset() // - max slashed validators per epoch is SLOTS_PER_EPOCH * MAX_ATTESTER_SLASHINGS * MAX_VALIDATORS_PER_COMMITTEE which is 32 * 2 * 2048 = 131072 on mainnet - // - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 + // - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE or 2048_000_000_000 MAX_EFFECTIVE_BALANCE_ELECTRA, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 // - we don't need to compute the total slashings from state.slashings, it's handled by totalSlashingsByIncrement in EpochCache const slashingIndex = epoch % EPOCHS_PER_SLASHINGS_VECTOR; state.slashings.set(slashingIndex, (state.slashings.get(slashingIndex) ?? 0) + effectiveBalance); @@ -52,11 +54,16 @@ export function slashValidator( ? MIN_SLASHING_PENALTY_QUOTIENT : fork === ForkSeq.altair ? MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR - : MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX; + : fork < ForkSeq.electra // no change from bellatrix to deneb + ? MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX + : MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA; decreaseBalance(state, slashedIndex, Math.floor(effectiveBalance / minSlashingPenaltyQuotient)); // apply proposer and whistleblower rewards - const whistleblowerReward = Math.floor(effectiveBalance / WHISTLEBLOWER_REWARD_QUOTIENT); + const whistleblowerReward = + fork < ForkSeq.electra + ? Math.floor(effectiveBalance / WHISTLEBLOWER_REWARD_QUOTIENT) + : Math.floor(effectiveBalance / WHISTLEBLOWER_REWARD_QUOTIENT_ELECTRA); const proposerReward = fork === ForkSeq.phase0 ? Math.floor(whistleblowerReward / PROPOSER_REWARD_QUOTIENT) diff --git a/packages/state-transition/src/cache/effectiveBalanceIncrements.ts b/packages/state-transition/src/cache/effectiveBalanceIncrements.ts index a82eb030043..bd72b333a03 100644 --- a/packages/state-transition/src/cache/effectiveBalanceIncrements.ts +++ b/packages/state-transition/src/cache/effectiveBalanceIncrements.ts @@ -3,18 +3,17 @@ import {BeaconStateAllForks} from "../types.js"; /** * Alias to allow easier refactoring. - * TODO: Estimate the risk of future proof of MAX_EFFECTIVE_BALANCE_INCREMENT < 255 */ -export type EffectiveBalanceIncrements = Uint8Array; +export type EffectiveBalanceIncrements = Uint16Array; -/** Helper to prevent re-writting tests downstream if we change Uint8Array to number[] */ +/** Helper to prevent re-writting tests downstream if we change Uint16Array to number[] */ export function getEffectiveBalanceIncrementsZeroed(len: number): EffectiveBalanceIncrements { - return new Uint8Array(len); + return new Uint16Array(len); } /** * effectiveBalanceIncrements length will always be equal or greater than validatorCount. The - * getEffectiveBalanceIncrementsByteLen() modulo is used to reduce the frequency at which its Uint8Array is recreated. + * getEffectiveBalanceIncrementsByteLen() modulo is used to reduce the frequency at which its Uint16Array is recreated. * if effectiveBalanceIncrements has length greater than validatorCount it's not a problem since those values would * never be accessed. */ @@ -22,7 +21,7 @@ export function getEffectiveBalanceIncrementsWithLen(validatorCount: number): Ef // TODO: Research what's the best number to minimize both memory cost and copy costs const byteLen = 1024 * Math.ceil(validatorCount / 1024); - return new Uint8Array(byteLen); + return new Uint16Array(byteLen); } /** @@ -32,7 +31,7 @@ export function getEffectiveBalanceIncrementsWithLen(validatorCount: number): Ef */ export function getEffectiveBalanceIncrements(state: BeaconStateAllForks): EffectiveBalanceIncrements { const validatorsArr = state.validators.getAllReadonlyValues(); - const effectiveBalanceIncrements = new Uint8Array(validatorsArr.length); + const effectiveBalanceIncrements = new Uint16Array(validatorsArr.length); for (let i = 0; i < validatorsArr.length; i++) { effectiveBalanceIncrements[i] = Math.floor(validatorsArr[i].effectiveBalance / EFFECTIVE_BALANCE_INCREMENT); } diff --git a/packages/state-transition/src/cache/epochCache.ts b/packages/state-transition/src/cache/epochCache.ts index 191aa7f3985..af6e976e908 100644 --- a/packages/state-transition/src/cache/epochCache.ts +++ b/packages/state-transition/src/cache/epochCache.ts @@ -1,5 +1,18 @@ import {PublicKey} from "@chainsafe/blst"; -import {BLSSignature, CommitteeIndex, Epoch, Slot, ValidatorIndex, phase0, SyncPeriod} from "@lodestar/types"; +import * as immutable from "immutable"; +import {fromHexString} from "@chainsafe/ssz"; +import { + BLSSignature, + CommitteeIndex, + Epoch, + Slot, + ValidatorIndex, + phase0, + SyncPeriod, + Attestation, + IndexedAttestation, + electra, +} from "@lodestar/types"; import {createBeaconConfig, BeaconConfig, ChainConfig} from "@lodestar/config"; import { ATTESTATION_SUBNET_COUNT, @@ -29,8 +42,17 @@ import {computeEpochShuffling, EpochShuffling, getShufflingDecisionBlock} from " import {computeBaseRewardPerIncrement, computeSyncParticipantReward} from "../util/syncCommittee.js"; import {sumTargetUnslashedBalanceIncrements} from "../util/targetUnslashedBalance.js"; import {getTotalSlashingsByIncrement} from "../epoch/processSlashings.js"; +import {EpochCacheMetrics} from "../metrics.js"; import {EffectiveBalanceIncrements, getEffectiveBalanceIncrementsWithLen} from "./effectiveBalanceIncrements.js"; -import {Index2PubkeyCache, PubkeyIndexMap, syncPubkeys} from "./pubkeyCache.js"; +import { + Index2PubkeyCache, + PubkeyIndexMap, + UnfinalizedPubkeyIndexMap, + syncPubkeys, + toMemoryEfficientHexStr, + PubkeyHex, + newUnfinalizedPubkeyIndexMap, +} from "./pubkeyCache.js"; import {BeaconStateAllForks, BeaconStateAltair, ShufflingGetter} from "./types.js"; import { computeSyncCommitteeCache, @@ -82,23 +104,31 @@ type ProposersDeferred = {computed: false; seed: Uint8Array} | {computed: true; export class EpochCache { config: BeaconConfig; /** - * Unique globally shared pubkey registry. There should only exist one for the entire application. + * Unique globally shared finalized pubkey registry. There should only exist one for the entire application. * * TODO: this is a hack, we need a safety mechanism in case a bad eth1 majority vote is in, * or handle non finalized data differently, or use an immutable.js structure for cheap copies - * Warning: may contain pubkeys that do not yet exist in the current state, but do in a later processed state. + * + * New: This would include only validators whose activation_eligibility_epoch != FAR_FUTURE_EPOCH and hence it is + * insert only. Validators could be 1) Active 2) In the activation queue 3) Initialized but pending queued * * $VALIDATOR_COUNT x 192 char String -> Number Map */ pubkey2index: PubkeyIndexMap; /** - * Unique globally shared pubkey registry. There should only exist one for the entire application. + * Unique globally shared finalized pubkey registry. There should only exist one for the entire application. * - * Warning: may contain indices that do not yet exist in the current state, but do in a later processed state. + * New: This would include only validators whose activation_eligibility_epoch != FAR_FUTURE_EPOCH and hence it is + * insert only. Validators could be 1) Active 2) In the activation queue 3) Initialized but pending queued * * $VALIDATOR_COUNT x BLST deserialized pubkey (Jacobian coordinates) */ index2pubkey: Index2PubkeyCache; + /** + * Unique pubkey registry shared in the same fork. There should only exist one for the fork. + */ + unfinalizedPubkey2index: UnfinalizedPubkeyIndexMap; + /** * Indexes of the block proposers for the current epoch. * @@ -161,6 +191,7 @@ export class EpochCache { * initiateValidatorExit(). This value may vary on each fork of the state. * * NOTE: Changes block to block + * NOTE: No longer used by initiateValidatorExit post-electra */ exitQueueEpoch: Epoch; /** @@ -168,6 +199,7 @@ export class EpochCache { * initiateValidatorExit(). This value may vary on each fork of the state. * * NOTE: Changes block to block + * NOTE: No longer used by initiateValidatorExit post-electra */ exitQueueChurn: number; @@ -198,11 +230,21 @@ export class EpochCache { // TODO: Helper stats epoch: Epoch; syncPeriod: SyncPeriod; + /** + * state.validators.length of every state at epoch boundary + * They are saved in increasing order of epoch. + * The first validator length in the list corresponds to the state AFTER the latest finalized checkpoint state. ie. state.finalizedCheckpoint.epoch - 1 + * The last validator length corresponds to the latest epoch state ie. this.epoch + * eg. latest epoch = 105, latest finalized cp state epoch = 102 + * then the list will be (in terms of epoch) [103, 104, 105] + */ + historicalValidatorLengths: immutable.List; constructor(data: { config: BeaconConfig; pubkey2index: PubkeyIndexMap; index2pubkey: Index2PubkeyCache; + unfinalizedPubkey2index: UnfinalizedPubkeyIndexMap; proposers: number[]; proposersPrevEpoch: number[] | null; proposersNextEpoch: ProposersDeferred; @@ -225,10 +267,12 @@ export class EpochCache { nextSyncCommitteeIndexed: SyncCommitteeCache; epoch: Epoch; syncPeriod: SyncPeriod; + historialValidatorLengths: immutable.List; }) { this.config = data.config; this.pubkey2index = data.pubkey2index; this.index2pubkey = data.index2pubkey; + this.unfinalizedPubkey2index = data.unfinalizedPubkey2index; this.proposers = data.proposers; this.proposersPrevEpoch = data.proposersPrevEpoch; this.proposersNextEpoch = data.proposersNextEpoch; @@ -251,11 +295,12 @@ export class EpochCache { this.nextSyncCommitteeIndexed = data.nextSyncCommitteeIndexed; this.epoch = data.epoch; this.syncPeriod = data.syncPeriod; + this.historicalValidatorLengths = data.historialValidatorLengths; } /** * Create an epoch cache - * @param validators cached validators that matches `state.validators` + * @param state a finalized beacon state. Passing in unfinalized state may cause unexpected behaviour eg. empty unfinalized cache * * SLOW CODE - 🐢 */ @@ -354,7 +399,12 @@ export class EpochCache { // Allow to create CachedBeaconState for empty states, or no active validators const proposers = currentShuffling.activeIndices.length > 0 - ? computeProposers(currentProposerSeed, currentShuffling, effectiveBalanceIncrements) + ? computeProposers( + config.getForkSeqAtEpoch(currentEpoch), + currentProposerSeed, + currentShuffling, + effectiveBalanceIncrements + ) : []; const proposersNextEpoch: ProposersDeferred = { @@ -431,6 +481,8 @@ export class EpochCache { config, pubkey2index, index2pubkey, + // `createFromFinalizedState()` creates cache with empty unfinalizedPubkey2index. Be cautious to only pass in finalized state + unfinalizedPubkey2index: newUnfinalizedPubkeyIndexMap(), proposers, // On first epoch, set to null to prevent unnecessary work since this is only used for metrics proposersPrevEpoch: null, @@ -454,6 +506,7 @@ export class EpochCache { nextSyncCommitteeIndexed, epoch: currentEpoch, syncPeriod: computeSyncPeriodAtEpoch(currentEpoch), + historialValidatorLengths: immutable.List(), }); } @@ -469,6 +522,8 @@ export class EpochCache { // Common append-only structures shared with all states, no need to clone pubkey2index: this.pubkey2index, index2pubkey: this.index2pubkey, + // No need to clone this reference. On each mutation the `unfinalizedPubkey2index` reference is replaced, @see `addPubkey` + unfinalizedPubkey2index: this.unfinalizedPubkey2index, // Immutable data proposers: this.proposers, proposersPrevEpoch: this.proposersPrevEpoch, @@ -495,6 +550,7 @@ export class EpochCache { nextSyncCommitteeIndexed: this.nextSyncCommitteeIndexed, epoch: this.epoch, syncPeriod: this.syncPeriod, + historialValidatorLengths: this.historicalValidatorLengths, }); } @@ -505,6 +561,7 @@ export class EpochCache { afterProcessEpoch( state: BeaconStateAllForks, epochTransitionCache: { + indicesEligibleForActivationQueue: ValidatorIndex[]; nextEpochShufflingActiveValidatorIndices: ValidatorIndex[]; nextEpochShufflingActiveIndicesLength: number; nextEpochTotalActiveBalanceByIncrement: number; @@ -526,7 +583,12 @@ export class EpochCache { this.proposersPrevEpoch = this.proposers; const currentProposerSeed = getSeed(state, this.currentShuffling.epoch, DOMAIN_BEACON_PROPOSER); - this.proposers = computeProposers(currentProposerSeed, this.currentShuffling, this.effectiveBalanceIncrements); + this.proposers = computeProposers( + this.config.getForkSeqAtEpoch(currEpoch), + currentProposerSeed, + this.currentShuffling, + this.effectiveBalanceIncrements + ); // Only pre-compute the seed since it's very cheap. Do the expensive computeProposers() call only on demand. this.proposersNextEpoch = {computed: false, seed: getSeed(state, this.nextShuffling.epoch, DOMAIN_BEACON_PROPOSER)}; @@ -579,27 +641,78 @@ export class EpochCache { // ``` this.epoch = computeEpochAtSlot(state.slot); this.syncPeriod = computeSyncPeriodAtEpoch(this.epoch); + // ELECTRA Only: Add current cpState.validators.length + // Only keep validatorLength for epochs after finalized cpState.epoch + // eg. [100(epoch 1), 102(epoch 2)].push(104(epoch 3)), this.epoch = 3, finalized cp epoch = 1 + // We keep the last (3 - 1) items = [102, 104] + if (currEpoch >= this.config.ELECTRA_FORK_EPOCH) { + this.historicalValidatorLengths = this.historicalValidatorLengths.push(state.validators.length); + + // If number of validatorLengths we want to keep exceeds the current list size, it implies + // finalized checkpoint hasn't advanced, and no need to slice + const hasFinalizedCpAdvanced = + this.epoch - state.finalizedCheckpoint.epoch < this.historicalValidatorLengths.size; + + if (hasFinalizedCpAdvanced) { + // We use finalized cp epoch - this.epoch which is a negative number to keep the last n entries and discard the rest + this.historicalValidatorLengths = this.historicalValidatorLengths.slice( + state.finalizedCheckpoint.epoch - this.epoch + ); + } + } } beforeEpochTransition(): void { // Clone (copy) before being mutated in processEffectiveBalanceUpdates - // NOTE: Force to use Uint8Array.slice (copy) instead of Buffer.call (not copy) - this.effectiveBalanceIncrements = Uint8Array.prototype.slice.call(this.effectiveBalanceIncrements, 0); + // NOTE: Force to use Uint16Array.slice (copy) instead of Buffer.call (not copy) + this.effectiveBalanceIncrements = Uint16Array.prototype.slice.call(this.effectiveBalanceIncrements, 0); } /** * Return the beacon committee at slot for index. */ getBeaconCommittee(slot: Slot, index: CommitteeIndex): Uint32Array { + return this.getBeaconCommittees(slot, [index]); + } + + /** + * Return a single Uint32Array representing concatted committees of indices + */ + getBeaconCommittees(slot: Slot, indices: CommitteeIndex[]): Uint32Array { + if (indices.length === 0) { + throw new Error("Attempt to get committees without providing CommitteeIndex"); + } + const slotCommittees = this.getShufflingAtSlot(slot).committees[slot % SLOTS_PER_EPOCH]; - if (index >= slotCommittees.length) { - throw new EpochCacheError({ - code: EpochCacheErrorCode.COMMITTEE_INDEX_OUT_OF_RANGE, - index, - maxIndex: slotCommittees.length, - }); + const committees = []; + + for (const index of indices) { + if (index >= slotCommittees.length) { + throw new EpochCacheError({ + code: EpochCacheErrorCode.COMMITTEE_INDEX_OUT_OF_RANGE, + index, + maxIndex: slotCommittees.length, + }); + } + committees.push(slotCommittees[index]); } - return slotCommittees[index]; + + // Early return if only one index + if (committees.length === 1) { + return committees[0]; + } + + // Create a new Uint32Array to flatten `committees` + const totalLength = committees.reduce((acc, curr) => acc + curr.length, 0); + const result = new Uint32Array(totalLength); + + let offset = 0; + for (const committee of committees) { + result.set(committee, offset); + offset += committee.length; + } + + return result; } getCommitteeCountPerSlot(epoch: Epoch): number { @@ -672,6 +785,7 @@ export class EpochCache { getBeaconProposersNextEpoch(): ValidatorIndex[] { if (!this.proposersNextEpoch.computed) { const indexes = computeProposers( + this.config.getForkSeqAtEpoch(this.epoch + 1), this.proposersNextEpoch.seed, this.nextShuffling, this.effectiveBalanceIncrements @@ -685,10 +799,9 @@ export class EpochCache { /** * Return the indexed attestation corresponding to ``attestation``. */ - getIndexedAttestation(attestation: phase0.Attestation): phase0.IndexedAttestation { - const {aggregationBits, data} = attestation; - const committeeIndices = this.getBeaconCommittee(data.slot, data.index); - const attestingIndices = aggregationBits.intersectValues(committeeIndices); + getIndexedAttestation(fork: ForkSeq, attestation: Attestation): IndexedAttestation { + const {data} = attestation; + const attestingIndices = this.getAttestingIndices(fork, attestation); // sort in-place attestingIndices.sort((a, b) => a - b); @@ -699,6 +812,31 @@ export class EpochCache { }; } + /** + * Return indices of validators who attestested in `attestation` + */ + getAttestingIndices(fork: ForkSeq, attestation: Attestation): number[] { + if (fork < ForkSeq.electra) { + const {aggregationBits, data} = attestation; + const validatorIndices = this.getBeaconCommittee(data.slot, data.index); + + return aggregationBits.intersectValues(validatorIndices); + } else { + const {aggregationBits, committeeBits, data} = attestation as electra.Attestation; + + // There is a naming conflict on the term `committeeIndices` + // In Lodestar it usually means a list of validator indices of participants in a committee + // In the spec it means a list of committee indices according to committeeBits + // This `committeeIndices` refers to the latter + // TODO Electra: resolve the naming conflicts + const committeeIndices = committeeBits.getTrueBitIndexes(); + + const validatorIndices = this.getBeaconCommittees(data.slot, committeeIndices); + + return aggregationBits.intersectValues(validatorIndices); + } + } + getCommitteeAssignments( epoch: Epoch, requestedValidatorIndices: ValidatorIndex[] @@ -766,9 +904,75 @@ export class EpochCache { return isAggregatorFromCommitteeLength(committee.length, slotSignature); } + /** + * Return finalized pubkey given the validator index. + * Only finalized pubkey as we do not store unfinalized pubkey because no where in the spec has a + * need to make such enquiry + */ + getPubkey(index: ValidatorIndex): PublicKey | undefined { + return this.index2pubkey[index]; + } + + getValidatorIndex(pubkey: Uint8Array | PubkeyHex): ValidatorIndex | undefined { + if (this.isPostElectra()) { + return this.pubkey2index.get(pubkey) ?? this.unfinalizedPubkey2index.get(toMemoryEfficientHexStr(pubkey)); + } else { + return this.pubkey2index.get(pubkey); + } + } + + /** + * + * Add unfinalized pubkeys + * + */ addPubkey(index: ValidatorIndex, pubkey: Uint8Array): void { + if (this.isPostElectra()) { + this.addUnFinalizedPubkey(index, pubkey); + } else { + // deposit mechanism pre ELECTRA follows a safe distance with assumption + // that they are already canonical + this.addFinalizedPubkey(index, pubkey); + } + } + + addUnFinalizedPubkey(index: ValidatorIndex, pubkey: PubkeyHex | Uint8Array, metrics?: EpochCacheMetrics): void { + this.unfinalizedPubkey2index = this.unfinalizedPubkey2index.set(toMemoryEfficientHexStr(pubkey), index); + metrics?.newUnFinalizedPubkey.inc(); + } + + addFinalizedPubkeys(pubkeyMap: UnfinalizedPubkeyIndexMap, metrics?: EpochCacheMetrics): void { + pubkeyMap.forEach((index, pubkey) => this.addFinalizedPubkey(index, pubkey, metrics)); + } + + /** + * Add finalized validator index and pubkey into finalized cache. + * Since addFinalizedPubkey() primarily takes pubkeys from unfinalized cache, it can take pubkey hex string directly + */ + addFinalizedPubkey(index: ValidatorIndex, pubkey: PubkeyHex | Uint8Array, metrics?: EpochCacheMetrics): void { + const existingIndex = this.pubkey2index.get(pubkey); + + if (existingIndex !== undefined) { + if (existingIndex === index) { + // Repeated insert. + metrics?.finalizedPubkeyDuplicateInsert.inc(); + return; + } else { + // attempt to insert the same pubkey with different index, should never happen. + throw Error("inserted existing pubkey into finalizedPubkey2index cache with a different index"); + } + } + this.pubkey2index.set(pubkey, index); - this.index2pubkey[index] = PublicKey.fromBytes(pubkey); // Optimize for aggregation + const pubkeyBytes = pubkey instanceof Uint8Array ? pubkey : fromHexString(pubkey); + this.index2pubkey[index] = PublicKey.fromBytes(pubkeyBytes); // Optimize for aggregation + } + + /** + * Delete pubkeys from unfinalized cache + */ + deleteUnfinalizedPubkeys(pubkeys: Iterable): void { + this.unfinalizedPubkey2index = this.unfinalizedPubkey2index.deleteAll(pubkeys); } getShufflingAtSlot(slot: Slot): EpochShuffling { @@ -845,15 +1049,53 @@ export class EpochCache { } effectiveBalanceIncrementsSet(index: number, effectiveBalance: number): void { - if (index >= this.effectiveBalanceIncrements.length) { - // Clone and extend effectiveBalanceIncrements + if (this.isPostElectra()) { + // TODO: electra + // getting length and setting getEffectiveBalanceIncrementsByteLen is not fork safe + // so each time we add an index, we should new the Uint8Array to keep it forksafe + // one simple optimization could be to increment the length once per block rather + // on each add/set + // + // there could still be some unused length remaining from the prev ELECTRA padding + const newLength = + index >= this.effectiveBalanceIncrements.length ? index + 1 : this.effectiveBalanceIncrements.length; const effectiveBalanceIncrements = this.effectiveBalanceIncrements; - this.effectiveBalanceIncrements = new Uint8Array(getEffectiveBalanceIncrementsByteLen(index + 1)); + this.effectiveBalanceIncrements = new Uint16Array(newLength); this.effectiveBalanceIncrements.set(effectiveBalanceIncrements, 0); + } else { + if (index >= this.effectiveBalanceIncrements.length) { + // Clone and extend effectiveBalanceIncrements + const effectiveBalanceIncrements = this.effectiveBalanceIncrements; + this.effectiveBalanceIncrements = new Uint16Array(getEffectiveBalanceIncrementsByteLen(index + 1)); + this.effectiveBalanceIncrements.set(effectiveBalanceIncrements, 0); + } } this.effectiveBalanceIncrements[index] = Math.floor(effectiveBalance / EFFECTIVE_BALANCE_INCREMENT); } + + isPostElectra(): boolean { + return this.epoch >= this.config.ELECTRA_FORK_EPOCH; + } + + getValidatorCountAtEpoch(targetEpoch: Epoch): number | undefined { + const currentEpoch = this.epoch; + + if (targetEpoch === currentEpoch) { + return this.historicalValidatorLengths.get(-1); + } + + // Attempt to get validator count from future epoch + if (targetEpoch > currentEpoch) { + return undefined; + } + + // targetEpoch is so far back that historicalValidatorLengths doesnt contain such info + if (targetEpoch < currentEpoch - this.historicalValidatorLengths.size + 1) { + return undefined; + } + return this.historicalValidatorLengths.get(targetEpoch - currentEpoch - 1); + } } function getEffectiveBalanceIncrementsByteLen(validatorCount: number): number { diff --git a/packages/state-transition/src/cache/epochTransitionCache.ts b/packages/state-transition/src/cache/epochTransitionCache.ts index e6f84de6c62..6f27ad96d1c 100644 --- a/packages/state-transition/src/cache/epochTransitionCache.ts +++ b/packages/state-transition/src/cache/epochTransitionCache.ts @@ -1,6 +1,6 @@ -import {Epoch, ValidatorIndex} from "@lodestar/types"; +import {Epoch, ValidatorIndex, phase0} from "@lodestar/types"; import {intDiv} from "@lodestar/utils"; -import {EPOCHS_PER_SLASHINGS_VECTOR, FAR_FUTURE_EPOCH, ForkSeq, MAX_EFFECTIVE_BALANCE} from "@lodestar/params"; +import {EPOCHS_PER_SLASHINGS_VECTOR, FAR_FUTURE_EPOCH, ForkSeq, MIN_ACTIVATION_BALANCE} from "@lodestar/params"; import { hasMarkers, @@ -78,7 +78,7 @@ export interface EpochTransitionCache { /** * Indices of validators that just joined and will be eligible for the active queue. * ``` - * v.activationEligibilityEpoch === FAR_FUTURE_EPOCH && v.effectiveBalance === MAX_EFFECTIVE_BALANCE + * v.activationEligibilityEpoch === FAR_FUTURE_EPOCH && v.effectiveBalance >= MAX_EFFECTIVE_BALANCE * ``` * All validators in indicesEligibleForActivationQueue get activationEligibilityEpoch set. So it can only include * validators that have just joined the registry through a valid full deposit(s). @@ -127,6 +127,18 @@ export interface EpochTransitionCache { flags: number[]; + /** + * Validators in the current epoch, should use it for read-only value instead of accessing state.validators directly. + * Note that during epoch processing, validators could be updated so need to use it with care. + */ + validators: phase0.Validator[]; + + /** + * This is for electra only + * Validators that're switched to compounding during processPendingConsolidations(), not available in beforeProcessEpoch() + */ + newCompoundingValidators?: Set; + /** * balances array will be populated by processRewardsAndPenalties() and consumed by processEffectiveBalanceUpdates(). * processRewardsAndPenalties() already has a regular Javascript array of balances. @@ -297,12 +309,12 @@ export function beforeProcessEpoch( // def is_eligible_for_activation_queue(validator: Validator) -> bool: // return ( // validator.activation_eligibility_epoch == FAR_FUTURE_EPOCH - // and validator.effective_balance == MAX_EFFECTIVE_BALANCE + // and validator.effective_balance >= MAX_EFFECTIVE_BALANCE # [Modified in Electra] // ) // ``` if ( validator.activationEligibilityEpoch === FAR_FUTURE_EPOCH && - validator.effectiveBalance === MAX_EFFECTIVE_BALANCE + validator.effectiveBalance >= MIN_ACTIVATION_BALANCE ) { indicesEligibleForActivationQueue.push(i); } @@ -481,7 +493,9 @@ export function beforeProcessEpoch( proposerIndices, inclusionDelays, flags, - + validators, + // will be assigned in processPendingConsolidations() + newCompoundingValidators: undefined, // Will be assigned in processRewardsAndPenalties() balances: undefined, }; diff --git a/packages/state-transition/src/cache/pubkeyCache.ts b/packages/state-transition/src/cache/pubkeyCache.ts index b2b45ca09d2..44fe6df4559 100644 --- a/packages/state-transition/src/cache/pubkeyCache.ts +++ b/packages/state-transition/src/cache/pubkeyCache.ts @@ -1,9 +1,17 @@ import {PublicKey} from "@chainsafe/blst"; +import * as immutable from "immutable"; import {ValidatorIndex, phase0} from "@lodestar/types"; export type Index2PubkeyCache = PublicKey[]; +/** + * OrderedMap preserves the order of entries in which they are `set()`. + * We assume `values()` yields validator indices in strictly increasing order + * as new validator indices are assigned in increasing order. + * EIP-6914 will break this assumption. + */ +export type UnfinalizedPubkeyIndexMap = immutable.Map; -type PubkeyHex = string; +export type PubkeyHex = string; /** * toHexString() creates hex strings via string concatenation, which are very memory inefficient. @@ -13,7 +21,7 @@ type PubkeyHex = string; * * See https://github.com/ChainSafe/lodestar/issues/3446 */ -function toMemoryEfficientHexStr(hex: Uint8Array | string): string { +export function toMemoryEfficientHexStr(hex: Uint8Array | string): string { if (typeof hex === "string") { if (hex.startsWith("0x")) { hex = hex.slice(2); @@ -24,6 +32,13 @@ function toMemoryEfficientHexStr(hex: Uint8Array | string): string { return Buffer.from(hex.buffer, hex.byteOffset, hex.byteLength).toString("hex"); } +/** + * A wrapper for calling immutable.js. To abstract the initialization of UnfinalizedPubkeyIndexMap + */ +export function newUnfinalizedPubkeyIndexMap(): UnfinalizedPubkeyIndexMap { + return immutable.Map(); +} + export class PubkeyIndexMap { // We don't really need the full pubkey. We could just use the first 20 bytes like an Ethereum address readonly map = new Map(); @@ -39,7 +54,7 @@ export class PubkeyIndexMap { return this.map.get(toMemoryEfficientHexStr(key)); } - set(key: Uint8Array, value: ValidatorIndex): void { + set(key: Uint8Array | PubkeyHex, value: ValidatorIndex): void { this.map.set(toMemoryEfficientHexStr(key), value); } } diff --git a/packages/state-transition/src/cache/stateCache.ts b/packages/state-transition/src/cache/stateCache.ts index f4e637e5d66..0435e8829d2 100644 --- a/packages/state-transition/src/cache/stateCache.ts +++ b/packages/state-transition/src/cache/stateCache.ts @@ -10,6 +10,7 @@ import { BeaconStateBellatrix, BeaconStateCapella, BeaconStateDeneb, + BeaconStateElectra, } from "./types.js"; import {RewardCache, createEmptyRewardCache} from "./rewardCache.js"; @@ -130,11 +131,13 @@ export type CachedBeaconStateAltair = CachedBeaconState; export type CachedBeaconStateBellatrix = CachedBeaconState; export type CachedBeaconStateCapella = CachedBeaconState; export type CachedBeaconStateDeneb = CachedBeaconState; +export type CachedBeaconStateElectra = CachedBeaconState; export type CachedBeaconStateAllForks = CachedBeaconState; export type CachedBeaconStateExecutions = CachedBeaconState; /** * Create CachedBeaconState computing a new EpochCache instance + * TODO ELECTRA: rename this to createFinalizedCachedBeaconState() as it's intended for finalized state only */ export function createCachedBeaconState( state: T, @@ -158,7 +161,7 @@ export function createCachedBeaconState( * Create a CachedBeaconState given a cached seed state and state bytes * This guarantees that the returned state shares the same tree with the seed state * Check loadState() api for more details - * // TODO: rename to loadUnfinalizedCachedBeaconState() due to EIP-6110 + * // TODO: rename to loadUnfinalizedCachedBeaconState() due to ELECTRA */ export function loadCachedBeaconState( cachedSeedState: T, diff --git a/packages/state-transition/src/cache/types.ts b/packages/state-transition/src/cache/types.ts index d6d8a3c3790..b3fe6fc8ed5 100644 --- a/packages/state-transition/src/cache/types.ts +++ b/packages/state-transition/src/cache/types.ts @@ -8,6 +8,7 @@ export type BeaconStateAltair = CompositeViewDU>; export type BeaconStateCapella = CompositeViewDU>; export type BeaconStateDeneb = CompositeViewDU>; +export type BeaconStateElectra = CompositeViewDU>; export type BeaconStateAllForks = CompositeViewDU>; export type BeaconStateExecutions = CompositeViewDU>; diff --git a/packages/state-transition/src/epoch/index.ts b/packages/state-transition/src/epoch/index.ts index b55ebe291fb..bfb415b9ed6 100644 --- a/packages/state-transition/src/epoch/index.ts +++ b/packages/state-transition/src/epoch/index.ts @@ -11,6 +11,7 @@ import { CachedBeaconStateAltair, CachedBeaconStatePhase0, EpochTransitionCache, + CachedBeaconStateElectra, } from "../types.js"; import {BeaconStateTransitionMetrics} from "../metrics.js"; import {processEffectiveBalanceUpdates} from "./processEffectiveBalanceUpdates.js"; @@ -27,6 +28,8 @@ import {processRewardsAndPenalties} from "./processRewardsAndPenalties.js"; import {processSlashings} from "./processSlashings.js"; import {processSlashingsReset} from "./processSlashingsReset.js"; import {processSyncCommitteeUpdates} from "./processSyncCommitteeUpdates.js"; +import {processPendingBalanceDeposits} from "./processPendingBalanceDeposits.js"; +import {processPendingConsolidations} from "./processPendingConsolidations.js"; // For spec tests export {getRewardsAndPenalties} from "./processRewardsAndPenalties.js"; @@ -45,6 +48,8 @@ export { processParticipationFlagUpdates, processSyncCommitteeUpdates, processHistoricalSummariesUpdate, + processPendingBalanceDeposits, + processPendingConsolidations, }; export {computeUnrealizedCheckpoints} from "./computeUnrealizedCheckpoints.js"; @@ -65,6 +70,8 @@ export enum EpochTransitionStep { processEffectiveBalanceUpdates = "processEffectiveBalanceUpdates", processParticipationFlagUpdates = "processParticipationFlagUpdates", processSyncCommitteeUpdates = "processSyncCommitteeUpdates", + processPendingBalanceDeposits = "processPendingBalanceDeposits", + processPendingConsolidations = "processPendingConsolidations", } export function processEpoch( @@ -76,7 +83,7 @@ export function processEpoch( // state.slashings is initially a Gwei (BigInt) vector, however since Nov 2023 it's converted to UintNum64 (number) vector in the state transition because: // - state.slashings[nextEpoch % EPOCHS_PER_SLASHINGS_VECTOR] is reset per epoch in processSlashingsReset() // - max slashed validators per epoch is SLOTS_PER_EPOCH * MAX_ATTESTER_SLASHINGS * MAX_VALIDATORS_PER_COMMITTEE which is 32 * 2 * 2048 = 131072 on mainnet - // - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 + // - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE or 2048_000_000_000 MAX_EFFECTIVE_BALANCE_ELECTRA, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 if (maxValidatorsPerStateSlashing > maxSafeValidators) { throw new Error("Lodestar does not support this network, parameters don't fit number value inside state.slashings"); } @@ -100,7 +107,7 @@ export function processEpoch( // processRewardsAndPenalties(state, cache); { const timer = metrics?.epochTransitionStepTime.startTimer({step: EpochTransitionStep.processRegistryUpdates}); - processRegistryUpdates(state, cache); + processRegistryUpdates(fork, state, cache); timer?.(); } @@ -120,12 +127,32 @@ export function processEpoch( processEth1DataReset(state, cache); + if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + { + const timer = metrics?.epochTransitionStepTime.startTimer({ + step: EpochTransitionStep.processPendingBalanceDeposits, + }); + processPendingBalanceDeposits(stateElectra, cache); + timer?.(); + } + + { + const timer = metrics?.epochTransitionStepTime.startTimer({ + step: EpochTransitionStep.processPendingConsolidations, + }); + processPendingConsolidations(stateElectra, cache); + timer?.(); + } + } + { const timer = metrics?.epochTransitionStepTime.startTimer({ step: EpochTransitionStep.processEffectiveBalanceUpdates, }); - processEffectiveBalanceUpdates(state, cache); + const numUpdate = processEffectiveBalanceUpdates(fork, state, cache); timer?.(); + metrics?.numEffectiveBalanceUpdates.set(numUpdate); } processSlashingsReset(state, cache); @@ -152,7 +179,7 @@ export function processEpoch( const timer = metrics?.epochTransitionStepTime.startTimer({ step: EpochTransitionStep.processSyncCommitteeUpdates, }); - processSyncCommitteeUpdates(state as CachedBeaconStateAltair); + processSyncCommitteeUpdates(fork, state as CachedBeaconStateAltair); timer?.(); } } diff --git a/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts b/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts index 5f1df35b721..0ea4b49dddf 100644 --- a/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts +++ b/packages/state-transition/src/epoch/processEffectiveBalanceUpdates.ts @@ -5,9 +5,12 @@ import { HYSTERESIS_QUOTIENT, HYSTERESIS_UPWARD_MULTIPLIER, MAX_EFFECTIVE_BALANCE, + MAX_EFFECTIVE_BALANCE_ELECTRA, + MIN_ACTIVATION_BALANCE, TIMELY_TARGET_FLAG_INDEX, } from "@lodestar/params"; import {EpochTransitionCache, CachedBeaconStateAllForks, BeaconStateAltair} from "../types.js"; +import {hasCompoundingWithdrawalCredential} from "../util/electra.js"; /** Same to https://github.com/ethereum/eth2.0-specs/blob/v1.1.0-alpha.5/specs/altair/beacon-chain.md#has_flag */ const TIMELY_TARGET = 1 << TIMELY_TARGET_FLAG_INDEX; @@ -20,8 +23,14 @@ const TIMELY_TARGET = 1 << TIMELY_TARGET_FLAG_INDEX; * * - On normal mainnet conditions 0 validators change their effective balance * - In case of big innactivity event a medium portion of validators may have their effectiveBalance updated + * + * Return number of validators updated */ -export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, cache: EpochTransitionCache): void { +export function processEffectiveBalanceUpdates( + fork: ForkSeq, + state: CachedBeaconStateAllForks, + cache: EpochTransitionCache +): number { const HYSTERESIS_INCREMENT = EFFECTIVE_BALANCE_INCREMENT / HYSTERESIS_QUOTIENT; const DOWNWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_DOWNWARD_MULTIPLIER; const UPWARD_THRESHOLD = HYSTERESIS_INCREMENT * HYSTERESIS_UPWARD_MULTIPLIER; @@ -32,10 +41,14 @@ export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, // update effective balances with hysteresis - // epochTransitionCache.balances is set in processRewardsAndPenalties(), so it's recycled here for performance. - // It defaults to `state.balances.getAll()` to make Typescript happy and for spec tests + // epochTransitionCache.balances is initialized in processRewardsAndPenalties() + // and updated in processPendingBalanceDeposits() and processPendingConsolidations() + // so it's recycled here for performance. const balances = cache.balances ?? state.balances.getAll(); + const currentEpochValidators = cache.validators; + const newCompoundingValidators = cache.newCompoundingValidators ?? new Set(); + let numUpdate = 0; for (let i = 0, len = balances.length; i < len; i++) { const balance = balances[i]; @@ -43,16 +56,28 @@ export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, let effectiveBalanceIncrement = effectiveBalanceIncrements[i]; let effectiveBalance = effectiveBalanceIncrement * EFFECTIVE_BALANCE_INCREMENT; + let effectiveBalanceLimit: number; + if (fork < ForkSeq.electra) { + effectiveBalanceLimit = MAX_EFFECTIVE_BALANCE; + } else { + // from electra, effectiveBalanceLimit is per validator + const isCompoundingValidator = + hasCompoundingWithdrawalCredential(currentEpochValidators[i].withdrawalCredentials) || + newCompoundingValidators.has(i); + effectiveBalanceLimit = isCompoundingValidator ? MAX_EFFECTIVE_BALANCE_ELECTRA : MIN_ACTIVATION_BALANCE; + } + if ( // Too big effectiveBalance > balance + DOWNWARD_THRESHOLD || // Too small. Check effectiveBalance < MAX_EFFECTIVE_BALANCE to prevent unnecessary updates - (effectiveBalance < MAX_EFFECTIVE_BALANCE && effectiveBalance < balance - UPWARD_THRESHOLD) + (effectiveBalance < effectiveBalanceLimit && effectiveBalance + UPWARD_THRESHOLD < balance) ) { - effectiveBalance = Math.min(balance - (balance % EFFECTIVE_BALANCE_INCREMENT), MAX_EFFECTIVE_BALANCE); // Update the state tree // Should happen rarely, so it's fine to update the tree const validator = validators.get(i); + + effectiveBalance = Math.min(balance - (balance % EFFECTIVE_BALANCE_INCREMENT), effectiveBalanceLimit); validator.effectiveBalance = effectiveBalance; // Also update the fast cached version const newEffectiveBalanceIncrement = Math.floor(effectiveBalance / EFFECTIVE_BALANCE_INCREMENT); @@ -76,6 +101,7 @@ export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, effectiveBalanceIncrement = newEffectiveBalanceIncrement; effectiveBalanceIncrements[i] = effectiveBalanceIncrement; + numUpdate++; } // TODO: Do this in afterEpochTransitionCache, looping a Uint8Array should be very cheap @@ -86,4 +112,5 @@ export function processEffectiveBalanceUpdates(state: CachedBeaconStateAllForks, } cache.nextEpochTotalActiveBalanceByIncrement = nextEpochTotalActiveBalanceByIncrement; + return numUpdate; } diff --git a/packages/state-transition/src/epoch/processPendingBalanceDeposits.ts b/packages/state-transition/src/epoch/processPendingBalanceDeposits.ts new file mode 100644 index 00000000000..bef3ec0b272 --- /dev/null +++ b/packages/state-transition/src/epoch/processPendingBalanceDeposits.ts @@ -0,0 +1,70 @@ +import {FAR_FUTURE_EPOCH} from "@lodestar/params"; +import {CachedBeaconStateElectra, EpochTransitionCache} from "../types.js"; +import {increaseBalance} from "../util/balance.js"; +import {getActivationExitChurnLimit} from "../util/validator.js"; + +/** + * Starting from Electra: + * Process pending balance deposits from state subject to churn limit and depsoitBalanceToConsume. + * For each eligible `deposit`, call `increaseBalance()`. + * Remove the processed deposits from `state.pendingBalanceDeposits`. + * Update `state.depositBalanceToConsume` for the next epoch + * + * TODO Electra: Update ssz library to support batch push to `pendingBalanceDeposits` + */ +export function processPendingBalanceDeposits(state: CachedBeaconStateElectra, cache: EpochTransitionCache): void { + const nextEpoch = state.epochCtx.epoch + 1; + const availableForProcessing = state.depositBalanceToConsume + BigInt(getActivationExitChurnLimit(state.epochCtx)); + let processedAmount = 0n; + let nextDepositIndex = 0; + const depositsToPostpone = []; + const validators = state.validators; + const cachedBalances = cache.balances; + + for (const deposit of state.pendingBalanceDeposits.getAllReadonly()) { + const {amount, index: depositIndex} = deposit; + const validator = validators.getReadonly(depositIndex); + + // Validator is exiting, postpone the deposit until after withdrawable epoch + if (validator.exitEpoch < FAR_FUTURE_EPOCH) { + if (nextEpoch <= validator.withdrawableEpoch) { + depositsToPostpone.push(deposit); + } else { + // Deposited balance will never become active. Increase balance but do not consume churn + increaseBalance(state, depositIndex, Number(amount)); + if (cachedBalances) { + cachedBalances[depositIndex] += Number(amount); + } + } + } else { + // Validator is not exiting, attempt to process deposit + if (processedAmount + amount > availableForProcessing) { + // Deposit does not fit in the churn, no more deposit processing in this epoch. + break; + } else { + // Deposit fits in the churn, process it. Increase balance and consume churn. + increaseBalance(state, depositIndex, Number(amount)); + if (cachedBalances) { + cachedBalances[depositIndex] += Number(amount); + } + processedAmount = processedAmount + amount; + } + } + // Regardless of how the deposit was handled, we move on in the queue. + nextDepositIndex++; + } + + const remainingPendingBalanceDeposits = state.pendingBalanceDeposits.sliceFrom(nextDepositIndex); + state.pendingBalanceDeposits = remainingPendingBalanceDeposits; + + if (remainingPendingBalanceDeposits.length === 0) { + state.depositBalanceToConsume = 0n; + } else { + state.depositBalanceToConsume = availableForProcessing - processedAmount; + } + + // TODO Electra: add a function in ListCompositeTreeView to support batch push operation + for (const deposit of depositsToPostpone) { + state.pendingBalanceDeposits.push(deposit); + } +} diff --git a/packages/state-transition/src/epoch/processPendingConsolidations.ts b/packages/state-transition/src/epoch/processPendingConsolidations.ts new file mode 100644 index 00000000000..28178a509bb --- /dev/null +++ b/packages/state-transition/src/epoch/processPendingConsolidations.ts @@ -0,0 +1,56 @@ +import {ValidatorIndex} from "@lodestar/types"; +import {CachedBeaconStateElectra, EpochTransitionCache} from "../types.js"; +import {decreaseBalance, increaseBalance} from "../util/balance.js"; +import {getActiveBalance} from "../util/validator.js"; +import {switchToCompoundingValidator} from "../util/electra.js"; + +/** + * Starting from Electra: + * Process every `pendingConsolidation` in `state.pendingConsolidations`. + * Churn limit was applied when enqueueing so we don't care about the limit here + * However we only process consolidations up to current epoch + * + * For each valid `pendingConsolidation`, update withdrawal credential of target + * validator to compounding, decrease balance of source validator and increase balance + * of target validator. + * + * Dequeue all processed consolidations from `state.pendingConsolidation` + * + */ +export function processPendingConsolidations(state: CachedBeaconStateElectra, cache: EpochTransitionCache): void { + const nextEpoch = state.epochCtx.epoch + 1; + let nextPendingConsolidation = 0; + const validators = state.validators; + const cachedBalances = cache.balances; + const newCompoundingValidators = new Set(); + + for (const pendingConsolidation of state.pendingConsolidations.getAllReadonly()) { + const {sourceIndex, targetIndex} = pendingConsolidation; + const sourceValidator = validators.getReadonly(sourceIndex); + + if (sourceValidator.slashed) { + nextPendingConsolidation++; + continue; + } + + if (sourceValidator.withdrawableEpoch > nextEpoch) { + break; + } + // Churn any target excess active balance of target and raise its max + switchToCompoundingValidator(state, targetIndex); + newCompoundingValidators.add(targetIndex); + // Move active balance to target. Excess balance is withdrawable. + const activeBalance = getActiveBalance(state, sourceIndex); + decreaseBalance(state, sourceIndex, activeBalance); + increaseBalance(state, targetIndex, activeBalance); + if (cachedBalances) { + cachedBalances[sourceIndex] -= activeBalance; + cachedBalances[targetIndex] += activeBalance; + } + + nextPendingConsolidation++; + } + + cache.newCompoundingValidators = newCompoundingValidators; + state.pendingConsolidations = state.pendingConsolidations.sliceFrom(nextPendingConsolidation); +} diff --git a/packages/state-transition/src/epoch/processRegistryUpdates.ts b/packages/state-transition/src/epoch/processRegistryUpdates.ts index 905e7b567c0..d2e93632dab 100644 --- a/packages/state-transition/src/epoch/processRegistryUpdates.ts +++ b/packages/state-transition/src/epoch/processRegistryUpdates.ts @@ -1,3 +1,4 @@ +import {ForkSeq} from "@lodestar/params"; import {computeActivationExitEpoch} from "../util/index.js"; import {initiateValidatorExit} from "../block/index.js"; import {EpochTransitionCache, CachedBeaconStateAllForks} from "../types.js"; @@ -16,7 +17,11 @@ import {EpochTransitionCache, CachedBeaconStateAllForks} from "../types.js"; * - indicesEligibleForActivationQueue: 0 * - indicesToEject: 0 */ -export function processRegistryUpdates(state: CachedBeaconStateAllForks, cache: EpochTransitionCache): void { +export function processRegistryUpdates( + fork: ForkSeq, + state: CachedBeaconStateAllForks, + cache: EpochTransitionCache +): void { const {epochCtx} = state; // Get the validators sub tree once for all the loop @@ -28,7 +33,7 @@ export function processRegistryUpdates(state: CachedBeaconStateAllForks, cache: for (const index of cache.indicesToEject) { // set validator exit epoch and withdrawable epoch // TODO: Figure out a way to quickly set properties on the validators tree - initiateValidatorExit(state, validators.get(index)); + initiateValidatorExit(fork, state, validators.get(index)); } // set new activation eligibilities @@ -38,7 +43,10 @@ export function processRegistryUpdates(state: CachedBeaconStateAllForks, cache: const finalityEpoch = state.finalizedCheckpoint.epoch; // this avoids an array allocation compared to `slice(0, epochCtx.activationChurnLimit)` - const len = Math.min(cache.indicesEligibleForActivation.length, epochCtx.activationChurnLimit); + const len = + fork < ForkSeq.electra + ? Math.min(cache.indicesEligibleForActivation.length, epochCtx.activationChurnLimit) + : cache.indicesEligibleForActivation.length; const activationEpoch = computeActivationExitEpoch(cache.currentEpoch); // dequeue validators for activation up to churn limit for (let i = 0; i < len; i++) { diff --git a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts index 61680b81002..6c5d5aa3cb5 100644 --- a/packages/state-transition/src/epoch/processRewardsAndPenalties.ts +++ b/packages/state-transition/src/epoch/processRewardsAndPenalties.ts @@ -28,7 +28,8 @@ export function processRewardsAndPenalties( const balances = state.balances.getAll(); for (let i = 0, len = rewards.length; i < len; i++) { - balances[i] += rewards[i] - penalties[i] - (slashingPenalties[i] ?? 0); + const result = balances[i] + rewards[i] - penalties[i] - (slashingPenalties[i] ?? 0); + balances[i] = Math.max(result, 0); } // important: do not change state one balance at a time. Set them all at once, constructing the tree in one go diff --git a/packages/state-transition/src/epoch/processSyncCommitteeUpdates.ts b/packages/state-transition/src/epoch/processSyncCommitteeUpdates.ts index b3fd9b45053..df4e0364be1 100644 --- a/packages/state-transition/src/epoch/processSyncCommitteeUpdates.ts +++ b/packages/state-transition/src/epoch/processSyncCommitteeUpdates.ts @@ -1,5 +1,5 @@ import {aggregateSerializedPublicKeys} from "@chainsafe/blst"; -import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD} from "@lodestar/params"; +import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, ForkSeq} from "@lodestar/params"; import {ssz} from "@lodestar/types"; import {getNextSyncCommitteeIndices} from "../util/seed.js"; import {CachedBeaconStateAltair} from "../types.js"; @@ -10,7 +10,7 @@ import {CachedBeaconStateAltair} from "../types.js"; * PERF: Once every `EPOCHS_PER_SYNC_COMMITTEE_PERIOD`, do an expensive operation to compute the next committee. * Calculating the next sync committee has a proportional cost to $VALIDATOR_COUNT */ -export function processSyncCommitteeUpdates(state: CachedBeaconStateAltair): void { +export function processSyncCommitteeUpdates(fork: ForkSeq, state: CachedBeaconStateAltair): void { const nextEpoch = state.epochCtx.epoch + 1; if (nextEpoch % EPOCHS_PER_SYNC_COMMITTEE_PERIOD === 0) { @@ -18,15 +18,15 @@ export function processSyncCommitteeUpdates(state: CachedBeaconStateAltair): voi const {effectiveBalanceIncrements} = state.epochCtx; const nextSyncCommitteeIndices = getNextSyncCommitteeIndices( + fork, state, activeValidatorIndices, effectiveBalanceIncrements ); + const validators = state.validators; // Using the index2pubkey cache is slower because it needs the serialized pubkey. - const nextSyncCommitteePubkeys = nextSyncCommitteeIndices.map( - (index) => state.validators.getReadonly(index).pubkey - ); + const nextSyncCommitteePubkeys = nextSyncCommitteeIndices.map((index) => validators.getReadonly(index).pubkey); // Rotate syncCommittee in state state.currentSyncCommittee = state.nextSyncCommittee; diff --git a/packages/state-transition/src/index.ts b/packages/state-transition/src/index.ts index 0ef460e784a..4ed801e3c49 100644 --- a/packages/state-transition/src/index.ts +++ b/packages/state-transition/src/index.ts @@ -11,6 +11,7 @@ export type { CachedBeaconStateBellatrix, CachedBeaconStateCapella, CachedBeaconStateDeneb, + CachedBeaconStateElectra, CachedBeaconStateAllForks, CachedBeaconStateExecutions, // Non-cached states @@ -19,6 +20,7 @@ export type { BeaconStateBellatrix, BeaconStateCapella, BeaconStateDeneb, + BeaconStateElectra, BeaconStateAllForks, BeaconStateExecutions, } from "./types.js"; @@ -42,7 +44,12 @@ export { export {type EpochTransitionCache, beforeProcessEpoch} from "./cache/epochTransitionCache.js"; // Aux data-structures -export {PubkeyIndexMap, type Index2PubkeyCache} from "./cache/pubkeyCache.js"; +export { + PubkeyIndexMap, + type Index2PubkeyCache, + type UnfinalizedPubkeyIndexMap, + newUnfinalizedPubkeyIndexMap, +} from "./cache/pubkeyCache.js"; export { type EffectiveBalanceIncrements, diff --git a/packages/state-transition/src/metrics.ts b/packages/state-transition/src/metrics.ts index 12cec46d9a4..a5e5463231f 100644 --- a/packages/state-transition/src/metrics.ts +++ b/packages/state-transition/src/metrics.ts @@ -11,6 +11,7 @@ export type BeaconStateTransitionMetrics = { processBlockTime: Histogram; processBlockCommitTime: Histogram; stateHashTreeRootTime: Histogram<{source: StateHashTreeRootSource}>; + numEffectiveBalanceUpdates: Gauge; preStateBalancesNodesPopulatedMiss: Gauge<{source: StateCloneSource}>; preStateBalancesNodesPopulatedHit: Gauge<{source: StateCloneSource}>; preStateValidatorsNodesPopulatedMiss: Gauge<{source: StateCloneSource}>; @@ -30,6 +31,11 @@ export type BeaconStateTransitionMetrics = { ) => void; }; +export type EpochCacheMetrics = { + finalizedPubkeyDuplicateInsert: Gauge; + newUnFinalizedPubkey: Gauge; +}; + export function onStateCloneMetrics( state: CachedBeaconStateAllForks, metrics: BeaconStateTransitionMetrics, diff --git a/packages/state-transition/src/signatureSets/attesterSlashings.ts b/packages/state-transition/src/signatureSets/attesterSlashings.ts index f0de50e5d0b..8088c252228 100644 --- a/packages/state-transition/src/signatureSets/attesterSlashings.ts +++ b/packages/state-transition/src/signatureSets/attesterSlashings.ts @@ -1,4 +1,4 @@ -import {SignedBeaconBlock, phase0, ssz} from "@lodestar/types"; +import {SignedBeaconBlock, ssz, AttesterSlashing, IndexedAttestationBigint} from "@lodestar/types"; import {DOMAIN_BEACON_ATTESTER} from "@lodestar/params"; import {computeSigningRoot, computeStartSlotAtEpoch, ISignatureSet, SignatureSetType} from "../util/index.js"; import {CachedBeaconStateAllForks} from "../types.js"; @@ -16,7 +16,7 @@ export function getAttesterSlashingsSignatureSets( /** Get signature sets from a single AttesterSlashing object */ export function getAttesterSlashingSignatureSets( state: CachedBeaconStateAllForks, - attesterSlashing: phase0.AttesterSlashing + attesterSlashing: AttesterSlashing ): ISignatureSet[] { return [attesterSlashing.attestation1, attesterSlashing.attestation2].map((attestation) => getIndexedAttestationBigintSignatureSet(state, attestation) @@ -25,15 +25,14 @@ export function getAttesterSlashingSignatureSets( export function getIndexedAttestationBigintSignatureSet( state: CachedBeaconStateAllForks, - indexedAttestation: phase0.IndexedAttestationBigint + indexedAttestation: IndexedAttestationBigint ): ISignatureSet { - const {index2pubkey} = state.epochCtx; const slot = computeStartSlotAtEpoch(Number(indexedAttestation.data.target.epoch as bigint)); const domain = state.config.getDomain(state.slot, DOMAIN_BEACON_ATTESTER, slot); return { type: SignatureSetType.aggregate, - pubkeys: indexedAttestation.attestingIndices.map((i) => index2pubkey[i]), + pubkeys: indexedAttestation.attestingIndices.map((i) => state.epochCtx.index2pubkey[i]), signingRoot: computeSigningRoot(ssz.phase0.AttestationDataBigint, indexedAttestation.data, domain), signature: indexedAttestation.signature, }; diff --git a/packages/state-transition/src/signatureSets/index.ts b/packages/state-transition/src/signatureSets/index.ts index 983e131e00e..c883bb0587f 100644 --- a/packages/state-transition/src/signatureSets/index.ts +++ b/packages/state-transition/src/signatureSets/index.ts @@ -31,6 +31,9 @@ export function getBlockSignatureSets( skipProposerSignature?: boolean; } ): ISignatureSet[] { + // fork based validations + const fork = state.config.getForkSeq(signedBlock.message.slot); + const signatureSets = [ getRandaoRevealSignatureSet(state, signedBlock.message), ...getProposerSlashingsSignatureSets(state, signedBlock), @@ -43,9 +46,6 @@ export function getBlockSignatureSets( signatureSets.push(getBlockProposerSignatureSet(state, signedBlock)); } - // fork based validations - const fork = state.config.getForkSeq(signedBlock.message.slot); - // Only after altair fork, validate tSyncCommitteeSignature if (fork >= ForkSeq.altair) { const syncCommitteeSignatureSet = getSyncCommitteeSignatureSet( diff --git a/packages/state-transition/src/signatureSets/indexedAttestation.ts b/packages/state-transition/src/signatureSets/indexedAttestation.ts index 9ae6627d0b5..86535fece8b 100644 --- a/packages/state-transition/src/signatureSets/indexedAttestation.ts +++ b/packages/state-transition/src/signatureSets/indexedAttestation.ts @@ -41,7 +41,11 @@ export function getAttestationsSignatureSets( state: CachedBeaconStateAllForks, signedBlock: SignedBeaconBlock ): ISignatureSet[] { + // TODO: figure how to get attesting indices of an attestation once per block processing return signedBlock.message.body.attestations.map((attestation) => - getIndexedAttestationSignatureSet(state, state.epochCtx.getIndexedAttestation(attestation)) + getIndexedAttestationSignatureSet( + state, + state.epochCtx.getIndexedAttestation(state.config.getForkSeq(signedBlock.message.slot), attestation) + ) ); } diff --git a/packages/state-transition/src/slot/index.ts b/packages/state-transition/src/slot/index.ts index 6c4add1d123..b05bd7ac93f 100644 --- a/packages/state-transition/src/slot/index.ts +++ b/packages/state-transition/src/slot/index.ts @@ -7,6 +7,7 @@ export {upgradeStateToAltair} from "./upgradeStateToAltair.js"; export {upgradeStateToBellatrix} from "./upgradeStateToBellatrix.js"; export {upgradeStateToCapella} from "./upgradeStateToCapella.js"; export {upgradeStateToDeneb} from "./upgradeStateToDeneb.js"; +export {upgradeStateToElectra} from "./upgradeStateToElectra.js"; /** * Dial state to next slot. Common for all forks diff --git a/packages/state-transition/src/slot/upgradeStateToAltair.ts b/packages/state-transition/src/slot/upgradeStateToAltair.ts index 0afa43930ef..fa7e6fbeba8 100644 --- a/packages/state-transition/src/slot/upgradeStateToAltair.ts +++ b/packages/state-transition/src/slot/upgradeStateToAltair.ts @@ -70,6 +70,7 @@ export function upgradeStateToAltair(statePhase0: CachedBeaconStatePhase0): Cach stateAltair.inactivityScores = ssz.altair.InactivityScores.toViewDU(newZeroedArray(validatorCount)); const {syncCommittee, indices} = getNextSyncCommittee( + ForkSeq.altair, stateAltair, stateAltair.epochCtx.nextShuffling.activeIndices, stateAltair.epochCtx.effectiveBalanceIncrements diff --git a/packages/state-transition/src/slot/upgradeStateToElectra.ts b/packages/state-transition/src/slot/upgradeStateToElectra.ts new file mode 100644 index 00000000000..6600ad98e80 --- /dev/null +++ b/packages/state-transition/src/slot/upgradeStateToElectra.ts @@ -0,0 +1,167 @@ +import {Epoch, ValidatorIndex, ssz} from "@lodestar/types"; +import {FAR_FUTURE_EPOCH, UNSET_DEPOSIT_REQUESTS_START_INDEX} from "@lodestar/params"; +import {CachedBeaconStateDeneb} from "../types.js"; +import {CachedBeaconStateElectra, getCachedBeaconState} from "../cache/stateCache.js"; +import { + hasCompoundingWithdrawalCredential, + queueEntireBalanceAndResetValidator, + queueExcessActiveBalance, +} from "../util/electra.js"; +import {computeActivationExitEpoch} from "../util/epoch.js"; +import {getActivationExitChurnLimit, getConsolidationChurnLimit} from "../util/validator.js"; + +/** + * Upgrade a state from Deneb to Electra. + */ +export function upgradeStateToElectra(stateDeneb: CachedBeaconStateDeneb): CachedBeaconStateElectra { + const {config} = stateDeneb; + + ssz.deneb.BeaconState.commitViewDU(stateDeneb); + const stateElectraCloned = stateDeneb; + + const stateElectraView = ssz.electra.BeaconState.defaultViewDU(); + stateElectraView.genesisTime = stateElectraCloned.genesisTime; + stateElectraView.genesisValidatorsRoot = stateElectraCloned.genesisValidatorsRoot; + stateElectraView.slot = stateElectraCloned.slot; + stateElectraView.fork = ssz.phase0.Fork.toViewDU({ + previousVersion: stateDeneb.fork.currentVersion, + currentVersion: config.ELECTRA_FORK_VERSION, + epoch: stateDeneb.epochCtx.epoch, + }); + stateElectraView.latestBlockHeader = stateElectraCloned.latestBlockHeader; + stateElectraView.blockRoots = stateElectraCloned.blockRoots; + stateElectraView.stateRoots = stateElectraCloned.stateRoots; + stateElectraView.historicalRoots = stateElectraCloned.historicalRoots; + stateElectraView.eth1Data = stateElectraCloned.eth1Data; + stateElectraView.eth1DataVotes = stateElectraCloned.eth1DataVotes; + stateElectraView.eth1DepositIndex = stateElectraCloned.eth1DepositIndex; + stateElectraView.validators = stateElectraCloned.validators; + stateElectraView.balances = stateElectraCloned.balances; + stateElectraView.randaoMixes = stateElectraCloned.randaoMixes; + stateElectraView.slashings = stateElectraCloned.slashings; + stateElectraView.previousEpochParticipation = stateElectraCloned.previousEpochParticipation; + stateElectraView.currentEpochParticipation = stateElectraCloned.currentEpochParticipation; + stateElectraView.justificationBits = stateElectraCloned.justificationBits; + stateElectraView.previousJustifiedCheckpoint = stateElectraCloned.previousJustifiedCheckpoint; + stateElectraView.currentJustifiedCheckpoint = stateElectraCloned.currentJustifiedCheckpoint; + stateElectraView.finalizedCheckpoint = stateElectraCloned.finalizedCheckpoint; + stateElectraView.inactivityScores = stateElectraCloned.inactivityScores; + stateElectraView.currentSyncCommittee = stateElectraCloned.currentSyncCommittee; + stateElectraView.nextSyncCommittee = stateElectraCloned.nextSyncCommittee; + stateElectraView.latestExecutionPayloadHeader = ssz.electra.BeaconState.fields.latestExecutionPayloadHeader.toViewDU({ + ...stateElectraCloned.latestExecutionPayloadHeader.toValue(), + depositRequestsRoot: ssz.Root.defaultValue(), + withdrawalRequestsRoot: ssz.Root.defaultValue(), + consolidationRequestsRoot: ssz.Root.defaultValue(), + }); + stateElectraView.nextWithdrawalIndex = stateDeneb.nextWithdrawalIndex; + stateElectraView.nextWithdrawalValidatorIndex = stateDeneb.nextWithdrawalValidatorIndex; + stateElectraView.historicalSummaries = stateElectraCloned.historicalSummaries; + + // latestExecutionPayloadHeader's depositRequestsRoot and withdrawalRequestsRoot set to zeros by default + // default value of depositRequestsStartIndex is UNSET_DEPOSIT_REQUESTS_START_INDEX + stateElectraView.depositRequestsStartIndex = UNSET_DEPOSIT_REQUESTS_START_INDEX; + stateElectraView.depositBalanceToConsume = BigInt(0); + stateElectraView.exitBalanceToConsume = BigInt(0); + + const validatorsArr = stateElectraView.validators.getAllReadonly(); + const exitEpochs: Epoch[] = []; + + // [EIP-7251]: add validators that are not yet active to pending balance deposits + const preActivation: ValidatorIndex[] = []; + for (let validatorIndex = 0; validatorIndex < validatorsArr.length; validatorIndex++) { + const {activationEpoch, exitEpoch} = validatorsArr[validatorIndex]; + if (activationEpoch === FAR_FUTURE_EPOCH) { + preActivation.push(validatorIndex); + } + if (exitEpoch !== FAR_FUTURE_EPOCH) { + exitEpochs.push(exitEpoch); + } + } + + const currentEpochPre = stateDeneb.epochCtx.epoch; + + if (exitEpochs.length === 0) { + exitEpochs.push(currentEpochPre); + } + stateElectraView.earliestExitEpoch = Math.max(...exitEpochs) + 1; + stateElectraView.consolidationBalanceToConsume = BigInt(0); + stateElectraView.earliestConsolidationEpoch = computeActivationExitEpoch(currentEpochPre); + // stateElectraView.pendingBalanceDeposits = ssz.electra.PendingBalanceDeposits.defaultViewDU(); + // pendingBalanceDeposits, pendingPartialWithdrawals, pendingConsolidations are default values + // TODO-electra: can we improve this? + stateElectraView.commit(); + const tmpElectraState = getCachedBeaconState(stateElectraView, stateDeneb); + stateElectraView.exitBalanceToConsume = BigInt(getActivationExitChurnLimit(tmpElectraState.epochCtx)); + stateElectraView.consolidationBalanceToConsume = BigInt(getConsolidationChurnLimit(tmpElectraState.epochCtx)); + + preActivation.sort((i0, i1) => { + const res = validatorsArr[i0].activationEligibilityEpoch - validatorsArr[i1].activationEligibilityEpoch; + return res !== 0 ? res : i0 - i1; + }); + + for (const validatorIndex of preActivation) { + queueEntireBalanceAndResetValidator(stateElectraView as CachedBeaconStateElectra, validatorIndex); + } + + for (let i = 0; i < validatorsArr.length; i++) { + const validator = validatorsArr[i]; + + // [EIP-7251]: Ensure early adopters of compounding credentials go through the activation churn + const withdrawalCredential = validator.withdrawalCredentials; + if (hasCompoundingWithdrawalCredential(withdrawalCredential)) { + queueExcessActiveBalance(stateElectraView as CachedBeaconStateElectra, i); + } + } + + const stateElectra = getCachedBeaconState(stateElectraView, stateDeneb); + // Commit new added fields ViewDU to the root node + stateElectra.commit(); + // Clear cache to ensure the cache of deneb fields is not used by new ELECTRA fields + stateElectra["clearCache"](); + + return stateElectra; +} + +export function upgradeStateToElectraOriginal(stateDeneb: CachedBeaconStateDeneb): CachedBeaconStateElectra { + const {config} = stateDeneb; + + const stateElectraNode = ssz.deneb.BeaconState.commitViewDU(stateDeneb); + const stateElectraView = ssz.electra.BeaconState.getViewDU(stateElectraNode); + + const stateElectra = getCachedBeaconState(stateElectraView, stateDeneb); + + stateElectra.fork = ssz.phase0.Fork.toViewDU({ + previousVersion: stateDeneb.fork.currentVersion, + currentVersion: config.ELECTRA_FORK_VERSION, + epoch: stateDeneb.epochCtx.epoch, + }); + + // latestExecutionPayloadHeader's depositRequestsRoot and withdrawalRequestsRoot set to zeros by default + // default value of depositRequestsStartIndex is UNSET_DEPOSIT_REQUESTS_START_INDEX + stateElectra.depositRequestsStartIndex = UNSET_DEPOSIT_REQUESTS_START_INDEX; + + const validatorsArr = stateElectra.validators.getAllReadonly(); + + for (let i = 0; i < validatorsArr.length; i++) { + const validator = validatorsArr[i]; + + // [EIP-7251]: add validators that are not yet active to pending balance deposits + if (validator.activationEligibilityEpoch === FAR_FUTURE_EPOCH) { + queueEntireBalanceAndResetValidator(stateElectra, i); + } + + // [EIP-7251]: Ensure early adopters of compounding credentials go through the activation churn + const withdrawalCredential = validator.withdrawalCredentials; + if (hasCompoundingWithdrawalCredential(withdrawalCredential)) { + queueExcessActiveBalance(stateElectra, i); + } + } + + // Commit new added fields ViewDU to the root node + stateElectra.commit(); + // Clear cache to ensure the cache of deneb fields is not used by new ELECTRA fields + stateElectra["clearCache"](); + + return stateElectra; +} diff --git a/packages/state-transition/src/stateTransition.ts b/packages/state-transition/src/stateTransition.ts index a454bf4b34d..40e87c8d07d 100644 --- a/packages/state-transition/src/stateTransition.ts +++ b/packages/state-transition/src/stateTransition.ts @@ -9,6 +9,7 @@ import { CachedBeaconStateAltair, CachedBeaconStateBellatrix, CachedBeaconStateCapella, + CachedBeaconStateDeneb, } from "./types.js"; import {computeEpochAtSlot} from "./util/index.js"; import {verifyProposerSignature} from "./signatureSets/index.js"; @@ -18,6 +19,7 @@ import { upgradeStateToBellatrix, upgradeStateToCapella, upgradeStateToDeneb, + upgradeStateToElectra, } from "./slot/index.js"; import {processBlock} from "./block/index.js"; import {EpochTransitionStep, processEpoch} from "./epoch/index.js"; @@ -226,19 +228,22 @@ function processSlotsWithTransientCache( epochTransitionTimer?.(); // Upgrade state if exactly at epoch boundary - const stateSlot = computeEpochAtSlot(postState.slot); - if (stateSlot === config.ALTAIR_FORK_EPOCH) { + const stateEpoch = computeEpochAtSlot(postState.slot); + if (stateEpoch === config.ALTAIR_FORK_EPOCH) { postState = upgradeStateToAltair(postState as CachedBeaconStatePhase0) as CachedBeaconStateAllForks; } - if (stateSlot === config.BELLATRIX_FORK_EPOCH) { + if (stateEpoch === config.BELLATRIX_FORK_EPOCH) { postState = upgradeStateToBellatrix(postState as CachedBeaconStateAltair) as CachedBeaconStateAllForks; } - if (stateSlot === config.CAPELLA_FORK_EPOCH) { + if (stateEpoch === config.CAPELLA_FORK_EPOCH) { postState = upgradeStateToCapella(postState as CachedBeaconStateBellatrix) as CachedBeaconStateAllForks; } - if (stateSlot === config.DENEB_FORK_EPOCH) { + if (stateEpoch === config.DENEB_FORK_EPOCH) { postState = upgradeStateToDeneb(postState as CachedBeaconStateCapella) as CachedBeaconStateAllForks; } + if (stateEpoch === config.ELECTRA_FORK_EPOCH) { + postState = upgradeStateToElectra(postState as CachedBeaconStateDeneb) as CachedBeaconStateAllForks; + } } else { postState.slot++; } diff --git a/packages/state-transition/src/types.ts b/packages/state-transition/src/types.ts index 6b6b1f6260b..d3a1ed69a7a 100644 --- a/packages/state-transition/src/types.ts +++ b/packages/state-transition/src/types.ts @@ -9,6 +9,7 @@ export type { CachedBeaconStateBellatrix, CachedBeaconStateCapella, CachedBeaconStateDeneb, + CachedBeaconStateElectra, } from "./cache/stateCache.js"; export type { @@ -19,4 +20,5 @@ export type { BeaconStateBellatrix, BeaconStateCapella, BeaconStateDeneb, + BeaconStateElectra, } from "./cache/types.js"; diff --git a/packages/state-transition/src/util/balance.ts b/packages/state-transition/src/util/balance.ts index e305c745ab7..e9b7a06e413 100644 --- a/packages/state-transition/src/util/balance.ts +++ b/packages/state-transition/src/util/balance.ts @@ -56,8 +56,8 @@ export function getEffectiveBalanceIncrementsZeroInactive( const validatorCount = justifiedState.validators.length; const {effectiveBalanceIncrements} = justifiedState.epochCtx; // Slice up to `validatorCount` since it won't be mutated, nor accessed beyond `validatorCount` - // NOTE: Force to use Uint8Array.slice (copy) instead of Buffer.call (not copy) - const effectiveBalanceIncrementsZeroInactive = Uint8Array.prototype.slice.call( + // NOTE: Force to use Uint16Array.slice (copy) instead of Buffer.call (not copy) + const effectiveBalanceIncrementsZeroInactive = Uint16Array.prototype.slice.call( effectiveBalanceIncrements, 0, validatorCount diff --git a/packages/state-transition/src/util/deposit.ts b/packages/state-transition/src/util/deposit.ts new file mode 100644 index 00000000000..e8ef93c515d --- /dev/null +++ b/packages/state-transition/src/util/deposit.ts @@ -0,0 +1,24 @@ +import {ForkSeq, MAX_DEPOSITS} from "@lodestar/params"; +import {UintNum64, phase0} from "@lodestar/types"; +import {CachedBeaconStateAllForks, CachedBeaconStateElectra} from "../types.js"; + +export function getEth1DepositCount(state: CachedBeaconStateAllForks, eth1Data?: phase0.Eth1Data): UintNum64 { + const eth1DataToUse = eth1Data ?? state.eth1Data; + if (state.config.getForkSeq(state.slot) >= ForkSeq.electra) { + const electraState = state as CachedBeaconStateElectra; + // eth1DataIndexLimit = min(UintNum64, UintBn64) can be safely casted as UintNum64 + // since the result lies within upper and lower bound of UintNum64 + const eth1DataIndexLimit: UintNum64 = + eth1DataToUse.depositCount < electraState.depositRequestsStartIndex + ? eth1DataToUse.depositCount + : Number(electraState.depositRequestsStartIndex); + + if (state.eth1DepositIndex < eth1DataIndexLimit) { + return Math.min(MAX_DEPOSITS, eth1DataIndexLimit - state.eth1DepositIndex); + } else { + return 0; + } + } else { + return Math.min(MAX_DEPOSITS, eth1DataToUse.depositCount - state.eth1DepositIndex); + } +} diff --git a/packages/state-transition/src/util/electra.ts b/packages/state-transition/src/util/electra.ts new file mode 100644 index 00000000000..63f74bc96cc --- /dev/null +++ b/packages/state-transition/src/util/electra.ts @@ -0,0 +1,109 @@ +import { + COMPOUNDING_WITHDRAWAL_PREFIX, + FAR_FUTURE_EPOCH, + ForkSeq, + MAX_EFFECTIVE_BALANCE, + MIN_ACTIVATION_BALANCE, +} from "@lodestar/params"; +import {ValidatorIndex, phase0, ssz} from "@lodestar/types"; +import {CachedBeaconStateElectra} from "../types.js"; +import {getValidatorMaxEffectiveBalance} from "./validator.js"; +import {hasEth1WithdrawalCredential} from "./capella.js"; + +type ValidatorInfo = Pick; + +export function hasCompoundingWithdrawalCredential(withdrawalCredentials: Uint8Array): boolean { + return withdrawalCredentials[0] === COMPOUNDING_WITHDRAWAL_PREFIX; +} + +export function hasExecutionWithdrawalCredential(withdrawalCredentials: Uint8Array): boolean { + return ( + hasCompoundingWithdrawalCredential(withdrawalCredentials) || hasEth1WithdrawalCredential(withdrawalCredentials) + ); +} + +export function isFullyWithdrawableValidator( + fork: ForkSeq, + validatorCredential: ValidatorInfo, + balance: number, + epoch: number +): boolean { + const {withdrawableEpoch, withdrawalCredentials} = validatorCredential; + + if (fork < ForkSeq.capella) { + throw new Error(`isFullyWithdrawableValidator not supported at forkSeq=${fork} < ForkSeq.capella`); + } + const hasWithdrawableCredentials = + fork >= ForkSeq.electra + ? hasExecutionWithdrawalCredential(withdrawalCredentials) + : hasEth1WithdrawalCredential(withdrawalCredentials); + + return hasWithdrawableCredentials && withdrawableEpoch <= epoch && balance > 0; +} + +export function isPartiallyWithdrawableValidator( + fork: ForkSeq, + validatorCredential: ValidatorInfo, + balance: number +): boolean { + const {effectiveBalance, withdrawalCredentials} = validatorCredential; + + if (fork < ForkSeq.capella) { + throw new Error(`isPartiallyWithdrawableValidator not supported at forkSeq=${fork} < ForkSeq.capella`); + } + const hasWithdrawableCredentials = + fork >= ForkSeq.electra + ? hasExecutionWithdrawalCredential(withdrawalCredentials) + : hasEth1WithdrawalCredential(withdrawalCredentials); + + const validatorMaxEffectiveBalance = + fork >= ForkSeq.electra ? getValidatorMaxEffectiveBalance(withdrawalCredentials) : MAX_EFFECTIVE_BALANCE; + const hasMaxEffectiveBalance = effectiveBalance === validatorMaxEffectiveBalance; + const hasExcessBalance = balance > validatorMaxEffectiveBalance; + + return hasWithdrawableCredentials && hasMaxEffectiveBalance && hasExcessBalance; +} + +export function switchToCompoundingValidator(state: CachedBeaconStateElectra, index: ValidatorIndex): void { + const validator = state.validators.get(index); + + if (hasEth1WithdrawalCredential(validator.withdrawalCredentials)) { + // directly modifying the byte leads to ssz missing the modification resulting into + // wrong root compute, although slicing can be avoided but anyway this is not going + // to be a hot path so its better to clean slice and avoid side effects + const newWithdrawalCredentials = validator.withdrawalCredentials.slice(); + newWithdrawalCredentials[0] = COMPOUNDING_WITHDRAWAL_PREFIX; + validator.withdrawalCredentials = newWithdrawalCredentials; + queueExcessActiveBalance(state, index); + } +} + +export function queueExcessActiveBalance(state: CachedBeaconStateElectra, index: ValidatorIndex): void { + const balance = state.balances.get(index); + if (balance > MIN_ACTIVATION_BALANCE) { + const excessBalance = balance - MIN_ACTIVATION_BALANCE; + state.balances.set(index, MIN_ACTIVATION_BALANCE); + + const pendingBalanceDeposit = ssz.electra.PendingBalanceDeposit.toViewDU({ + index, + amount: BigInt(excessBalance), + }); + state.pendingBalanceDeposits.push(pendingBalanceDeposit); + } +} + +export function queueEntireBalanceAndResetValidator(state: CachedBeaconStateElectra, index: ValidatorIndex): void { + const balance = state.balances.get(index); + state.balances.set(index, 0); + + const validator = state.validators.get(index); + validator.effectiveBalance = 0; + state.epochCtx.effectiveBalanceIncrementsSet(index, 0); + validator.activationEligibilityEpoch = FAR_FUTURE_EPOCH; + + const pendingBalanceDeposit = ssz.electra.PendingBalanceDeposit.toViewDU({ + index, + amount: BigInt(balance), + }); + state.pendingBalanceDeposits.push(pendingBalanceDeposit); +} diff --git a/packages/state-transition/src/util/epoch.ts b/packages/state-transition/src/util/epoch.ts index ba182f627de..7fed5e53f1f 100644 --- a/packages/state-transition/src/util/epoch.ts +++ b/packages/state-transition/src/util/epoch.ts @@ -1,5 +1,7 @@ import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, GENESIS_EPOCH, MAX_SEED_LOOKAHEAD, SLOTS_PER_EPOCH} from "@lodestar/params"; -import {BeaconState, Epoch, Slot, SyncPeriod} from "@lodestar/types"; +import {BeaconState, Epoch, Slot, SyncPeriod, Gwei} from "@lodestar/types"; +import {CachedBeaconStateElectra} from "../types.js"; +import {getActivationExitChurnLimit, getConsolidationChurnLimit} from "./validator.js"; /** * Return the epoch number at the given slot. @@ -39,6 +41,60 @@ export function computeActivationExitEpoch(epoch: Epoch): Epoch { return epoch + 1 + MAX_SEED_LOOKAHEAD; } +export function computeExitEpochAndUpdateChurn(state: CachedBeaconStateElectra, exitBalance: Gwei): number { + let earliestExitEpoch = Math.max(state.earliestExitEpoch, computeActivationExitEpoch(state.epochCtx.epoch)); + const perEpochChurn = getActivationExitChurnLimit(state.epochCtx); + + // New epoch for exits. + let exitBalanceToConsume = + state.earliestExitEpoch < earliestExitEpoch ? perEpochChurn : Number(state.exitBalanceToConsume); + + // Exit doesn't fit in the current earliest epoch. + if (exitBalance > exitBalanceToConsume) { + const balanceToProcess = Number(exitBalance) - exitBalanceToConsume; + const additionalEpochs = Math.floor((balanceToProcess - 1) / perEpochChurn) + 1; + earliestExitEpoch += additionalEpochs; + exitBalanceToConsume += additionalEpochs * perEpochChurn; + } + + // Consume the balance and update state variables. + state.exitBalanceToConsume = BigInt(exitBalanceToConsume) - exitBalance; + state.earliestExitEpoch = earliestExitEpoch; + + return state.earliestExitEpoch; +} + +export function computeConsolidationEpochAndUpdateChurn( + state: CachedBeaconStateElectra, + consolidationBalance: Gwei +): number { + let earliestConsolidationEpoch = Math.max( + state.earliestConsolidationEpoch, + computeActivationExitEpoch(state.epochCtx.epoch) + ); + const perEpochConsolidationChurn = getConsolidationChurnLimit(state.epochCtx); + + // New epoch for consolidations + let consolidationBalanceToConsume = + state.earliestConsolidationEpoch < earliestConsolidationEpoch + ? perEpochConsolidationChurn + : Number(state.consolidationBalanceToConsume); + + // Consolidation doesn't fit in the current earliest epoch. + if (consolidationBalance > consolidationBalanceToConsume) { + const balanceToProcess = Number(consolidationBalance) - consolidationBalanceToConsume; + const additionalEpochs = Math.floor((balanceToProcess - 1) / perEpochConsolidationChurn) + 1; + earliestConsolidationEpoch += additionalEpochs; + consolidationBalanceToConsume += additionalEpochs * perEpochConsolidationChurn; + } + + // Consume the balance and update state variables. + state.consolidationBalanceToConsume = BigInt(consolidationBalanceToConsume) - consolidationBalance; + state.earliestConsolidationEpoch = earliestConsolidationEpoch; + + return state.earliestConsolidationEpoch; +} + /** * Return the current epoch of the given state. */ diff --git a/packages/state-transition/src/util/execution.ts b/packages/state-transition/src/util/execution.ts index 1c5046354fc..06e654f9f1d 100644 --- a/packages/state-transition/src/util/execution.ts +++ b/packages/state-transition/src/util/execution.ts @@ -2,6 +2,7 @@ import { bellatrix, capella, deneb, + electra, isBlindedBeaconBlockBody, ssz, BeaconBlock, @@ -170,5 +171,14 @@ export function executionPayloadToPayloadHeader(fork: ForkSeq, payload: Executio ).excessBlobGas; } + if (fork >= ForkSeq.electra) { + (bellatrixPayloadFields as electra.ExecutionPayloadHeader).depositRequestsRoot = + ssz.electra.DepositRequests.hashTreeRoot((payload as electra.ExecutionPayload).depositRequests); + (bellatrixPayloadFields as electra.ExecutionPayloadHeader).withdrawalRequestsRoot = + ssz.electra.WithdrawalRequests.hashTreeRoot((payload as electra.ExecutionPayload).withdrawalRequests); + (bellatrixPayloadFields as electra.ExecutionPayloadHeader).consolidationRequestsRoot = + ssz.electra.ConsolidationRequests.hashTreeRoot((payload as electra.ExecutionPayload).consolidationRequests); + } + return bellatrixPayloadFields; } diff --git a/packages/state-transition/src/util/genesis.ts b/packages/state-transition/src/util/genesis.ts index 1041c33d0eb..02bcef00bb5 100644 --- a/packages/state-transition/src/util/genesis.ts +++ b/packages/state-transition/src/util/genesis.ts @@ -4,18 +4,21 @@ import { EFFECTIVE_BALANCE_INCREMENT, EPOCHS_PER_HISTORICAL_VECTOR, ForkName, + ForkSeq, GENESIS_EPOCH, GENESIS_SLOT, MAX_EFFECTIVE_BALANCE, + UNSET_DEPOSIT_REQUESTS_START_INDEX, } from "@lodestar/params"; import {Bytes32, phase0, Root, ssz, TimeSeconds} from "@lodestar/types"; -import {CachedBeaconStateAllForks, BeaconStateAllForks} from "../types.js"; +import {CachedBeaconStateAllForks, BeaconStateAllForks, CachedBeaconStateElectra} from "../types.js"; import {createCachedBeaconState} from "../cache/stateCache.js"; import {EpochCacheImmutableData} from "../cache/epochCache.js"; import {processDeposit} from "../block/processDeposit.js"; +import {increaseBalance} from "../index.js"; import {computeEpochAtSlot} from "./epoch.js"; -import {getActiveValidatorIndices} from "./validator.js"; +import {getActiveValidatorIndices, getValidatorMaxEffectiveBalance} from "./validator.js"; import {getTemporaryBlockHeader} from "./blockRoot.js"; import {newFilledArray} from "./array.js"; import {getNextSyncCommittee} from "./syncCommittee.js"; @@ -131,6 +134,7 @@ export function applyDeposits( newDeposits: phase0.Deposit[], fullDepositDataRootList?: DepositDataRootViewDU ): {activatedValidatorCount: number} { + const fork = config.getForkSeq(state.slot); const depositDataRootList: Root[] = []; const fullDepositDataRootArr = fullDepositDataRootList ? fullDepositDataRootList.getAllReadonlyValues() : null; @@ -163,6 +167,16 @@ export function applyDeposits( processDeposit(fork, state, deposit); } + // Process deposit balance updates + if (fork >= ForkSeq.electra) { + const stateElectra = state as CachedBeaconStateElectra; + stateElectra.commit(); + for (const {index: validatorIndex, amount} of stateElectra.pendingBalanceDeposits.getAllReadonly()) { + increaseBalance(state, validatorIndex, Number(amount)); + } + stateElectra.pendingBalanceDeposits = ssz.electra.PendingBalanceDeposits.defaultViewDU(); + } + // Process activations const {epochCtx} = state; const balancesArr = state.balances.getAll(); @@ -179,12 +193,15 @@ export function applyDeposits( } const balance = balancesArr[i]; - const effectiveBalance = Math.min(balance - (balance % EFFECTIVE_BALANCE_INCREMENT), MAX_EFFECTIVE_BALANCE); + const effectiveBalance = Math.min( + balance - (balance % EFFECTIVE_BALANCE_INCREMENT), + getValidatorMaxEffectiveBalance(validator.withdrawalCredentials) + ); validator.effectiveBalance = effectiveBalance; epochCtx.effectiveBalanceIncrementsSet(i, effectiveBalance); - if (validator.effectiveBalance === MAX_EFFECTIVE_BALANCE) { + if (validator.effectiveBalance >= MAX_EFFECTIVE_BALANCE) { validator.activationEligibilityEpoch = GENESIS_EPOCH; validator.activationEpoch = GENESIS_EPOCH; activatedValidatorCount++; @@ -214,6 +231,7 @@ export function initializeBeaconStateFromEth1( | typeof ssz.bellatrix.ExecutionPayloadHeader | typeof ssz.capella.ExecutionPayloadHeader | typeof ssz.deneb.ExecutionPayloadHeader + | typeof ssz.electra.ExecutionPayloadHeader > ): CachedBeaconStateAllForks { const stateView = getGenesisBeaconState( @@ -224,6 +242,8 @@ export function initializeBeaconStateFromEth1( getTemporaryBlockHeader(config, config.getForkTypes(GENESIS_SLOT).BeaconBlock.defaultValue()) ); + const fork = config.getForkSeq(GENESIS_SLOT); + // We need a CachedBeaconState to run processDeposit() which uses various caches. // However at this point the state's syncCommittees are not known. // This function can be called by: @@ -244,8 +264,9 @@ export function initializeBeaconStateFromEth1( state.commit(); const activeValidatorIndices = getActiveValidatorIndices(state, computeEpochAtSlot(GENESIS_SLOT)); - if (GENESIS_SLOT >= config.ALTAIR_FORK_EPOCH) { + if (fork >= ForkSeq.altair) { const {syncCommittee} = getNextSyncCommittee( + fork, state, activeValidatorIndices, state.epochCtx.effectiveBalanceIncrements @@ -257,7 +278,7 @@ export function initializeBeaconStateFromEth1( stateAltair.nextSyncCommittee = ssz.altair.SyncCommittee.toViewDU(syncCommittee); } - if (GENESIS_SLOT >= config.BELLATRIX_FORK_EPOCH) { + if (fork >= ForkSeq.bellatrix) { const stateBellatrix = state as CompositeViewDU; stateBellatrix.fork.previousVersion = config.BELLATRIX_FORK_VERSION; stateBellatrix.fork.currentVersion = config.BELLATRIX_FORK_VERSION; @@ -266,7 +287,7 @@ export function initializeBeaconStateFromEth1( ssz.bellatrix.ExecutionPayloadHeader.defaultViewDU(); } - if (GENESIS_SLOT >= config.CAPELLA_FORK_EPOCH) { + if (fork >= ForkSeq.capella) { const stateCapella = state as CompositeViewDU; stateCapella.fork.previousVersion = config.CAPELLA_FORK_VERSION; stateCapella.fork.currentVersion = config.CAPELLA_FORK_VERSION; @@ -275,7 +296,7 @@ export function initializeBeaconStateFromEth1( ssz.capella.ExecutionPayloadHeader.defaultViewDU(); } - if (GENESIS_SLOT >= config.DENEB_FORK_EPOCH) { + if (fork >= ForkSeq.deneb) { const stateDeneb = state as CompositeViewDU; stateDeneb.fork.previousVersion = config.DENEB_FORK_VERSION; stateDeneb.fork.currentVersion = config.DENEB_FORK_VERSION; @@ -284,6 +305,16 @@ export function initializeBeaconStateFromEth1( ssz.deneb.ExecutionPayloadHeader.defaultViewDU(); } + if (fork >= ForkSeq.electra) { + const stateElectra = state as CompositeViewDU; + stateElectra.fork.previousVersion = config.ELECTRA_FORK_VERSION; + stateElectra.fork.currentVersion = config.ELECTRA_FORK_VERSION; + stateElectra.latestExecutionPayloadHeader = + (executionPayloadHeader as CompositeViewDU) ?? + ssz.electra.ExecutionPayloadHeader.defaultViewDU(); + stateElectra.depositRequestsStartIndex = UNSET_DEPOSIT_REQUESTS_START_INDEX; + } + state.commit(); return state; diff --git a/packages/state-transition/src/util/index.ts b/packages/state-transition/src/util/index.ts index 3f2e91da9a7..6a839fbe103 100644 --- a/packages/state-transition/src/util/index.ts +++ b/packages/state-transition/src/util/index.ts @@ -23,3 +23,5 @@ export * from "./slot.js"; export * from "./syncCommittee.js"; export * from "./validator.js"; export * from "./weakSubjectivity.js"; +export * from "./deposit.js"; +export * from "./electra.js"; diff --git a/packages/state-transition/src/util/seed.ts b/packages/state-transition/src/util/seed.ts index cf48fda8bec..a5a0028d6c1 100644 --- a/packages/state-transition/src/util/seed.ts +++ b/packages/state-transition/src/util/seed.ts @@ -5,7 +5,9 @@ import { DOMAIN_SYNC_COMMITTEE, EFFECTIVE_BALANCE_INCREMENT, EPOCHS_PER_HISTORICAL_VECTOR, + ForkSeq, MAX_EFFECTIVE_BALANCE, + MAX_EFFECTIVE_BALANCE_ELECTRA, MIN_SEED_LOOKAHEAD, SHUFFLE_ROUND_COUNT, SLOTS_PER_EPOCH, @@ -20,6 +22,7 @@ import {computeEpochAtSlot} from "./epoch.js"; * Compute proposer indices for an epoch */ export function computeProposers( + fork: ForkSeq, epochSeed: Uint8Array, shuffling: {epoch: Epoch; activeIndices: ArrayLike}, effectiveBalanceIncrements: EffectiveBalanceIncrements @@ -29,6 +32,7 @@ export function computeProposers( for (let slot = startSlot; slot < startSlot + SLOTS_PER_EPOCH; slot++) { proposers.push( computeProposerIndex( + fork, effectiveBalanceIncrements, shuffling.activeIndices, digest(Buffer.concat([epochSeed, intToBytes(slot, 8)])) @@ -44,6 +48,7 @@ export function computeProposers( * SLOW CODE - 🐢 */ export function computeProposerIndex( + fork: ForkSeq, effectiveBalanceIncrements: EffectiveBalanceIncrements, indices: ArrayLike, seed: Uint8Array @@ -54,7 +59,10 @@ export function computeProposerIndex( // TODO: Inline outside this function const MAX_RANDOM_BYTE = 2 ** 8 - 1; - const MAX_EFFECTIVE_BALANCE_INCREMENT = MAX_EFFECTIVE_BALANCE / EFFECTIVE_BALANCE_INCREMENT; + const MAX_EFFECTIVE_BALANCE_INCREMENT = + fork >= ForkSeq.electra + ? MAX_EFFECTIVE_BALANCE_ELECTRA / EFFECTIVE_BALANCE_INCREMENT + : MAX_EFFECTIVE_BALANCE / EFFECTIVE_BALANCE_INCREMENT; let i = 0; /* eslint-disable-next-line no-constant-condition */ @@ -73,9 +81,6 @@ export function computeProposerIndex( return candidateIndex; } i += 1; - if (i === indices.length) { - return -1; - } } } @@ -90,13 +95,17 @@ export function computeProposerIndex( * SLOW CODE - 🐢 */ export function getNextSyncCommitteeIndices( + fork: ForkSeq, state: BeaconStateAllForks, activeValidatorIndices: ArrayLike, effectiveBalanceIncrements: EffectiveBalanceIncrements ): ValidatorIndex[] { // TODO: Bechmark if it's necessary to inline outside of this function const MAX_RANDOM_BYTE = 2 ** 8 - 1; - const MAX_EFFECTIVE_BALANCE_INCREMENT = MAX_EFFECTIVE_BALANCE / EFFECTIVE_BALANCE_INCREMENT; + const MAX_EFFECTIVE_BALANCE_INCREMENT = + fork >= ForkSeq.electra + ? MAX_EFFECTIVE_BALANCE_ELECTRA / EFFECTIVE_BALANCE_INCREMENT + : MAX_EFFECTIVE_BALANCE / EFFECTIVE_BALANCE_INCREMENT; const epoch = computeEpochAtSlot(state.slot) + 1; diff --git a/packages/state-transition/src/util/syncCommittee.ts b/packages/state-transition/src/util/syncCommittee.ts index b6de821d540..c1f53632e52 100644 --- a/packages/state-transition/src/util/syncCommittee.ts +++ b/packages/state-transition/src/util/syncCommittee.ts @@ -2,6 +2,7 @@ import {aggregateSerializedPublicKeys} from "@chainsafe/blst"; import { BASE_REWARD_FACTOR, EFFECTIVE_BALANCE_INCREMENT, + ForkSeq, SLOTS_PER_EPOCH, SYNC_COMMITTEE_SIZE, SYNC_REWARD_WEIGHT, @@ -19,11 +20,12 @@ import {getNextSyncCommitteeIndices} from "./seed.js"; * SLOW CODE - 🐢 */ export function getNextSyncCommittee( + fork: ForkSeq, state: BeaconStateAllForks, activeValidatorIndices: ArrayLike, effectiveBalanceIncrements: EffectiveBalanceIncrements ): {indices: ValidatorIndex[]; syncCommittee: altair.SyncCommittee} { - const indices = getNextSyncCommitteeIndices(state, activeValidatorIndices, effectiveBalanceIncrements); + const indices = getNextSyncCommitteeIndices(fork, state, activeValidatorIndices, effectiveBalanceIncrements); // Using the index2pubkey cache is slower because it needs the serialized pubkey. const pubkeys = indices.map((index) => state.validators.getReadonly(index).pubkey); diff --git a/packages/state-transition/src/util/validator.ts b/packages/state-transition/src/util/validator.ts index 99f1e6fa0b1..728f14587fd 100644 --- a/packages/state-transition/src/util/validator.ts +++ b/packages/state-transition/src/util/validator.ts @@ -1,8 +1,14 @@ import {Epoch, phase0, ValidatorIndex} from "@lodestar/types"; import {intDiv} from "@lodestar/utils"; import {ChainForkConfig} from "@lodestar/config"; -import {ForkSeq} from "@lodestar/params"; -import {BeaconStateAllForks} from "../types.js"; +import { + EFFECTIVE_BALANCE_INCREMENT, + ForkSeq, + MAX_EFFECTIVE_BALANCE_ELECTRA, + MIN_ACTIVATION_BALANCE, +} from "@lodestar/params"; +import {BeaconStateAllForks, CachedBeaconStateElectra, EpochCache} from "../types.js"; +import {hasCompoundingWithdrawalCredential} from "./electra.js"; /** * Check if [[validator]] is active @@ -47,3 +53,48 @@ export function getActivationChurnLimit(config: ChainForkConfig, fork: ForkSeq, export function getChurnLimit(config: ChainForkConfig, activeValidatorCount: number): number { return Math.max(config.MIN_PER_EPOCH_CHURN_LIMIT, intDiv(activeValidatorCount, config.CHURN_LIMIT_QUOTIENT)); } + +/** + * Get combined churn limit of activation-exit and consolidation + */ +export function getBalanceChurnLimit(epochCtx: EpochCache): number { + const churnLimitByTotalActiveBalance = Math.floor( + (epochCtx.totalActiveBalanceIncrements / epochCtx.config.CHURN_LIMIT_QUOTIENT) * EFFECTIVE_BALANCE_INCREMENT + ); // TODO Electra: verify calculation + + const churn = Math.max(churnLimitByTotalActiveBalance, epochCtx.config.MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA); + + return churn - (churn % EFFECTIVE_BALANCE_INCREMENT); +} + +export function getActivationExitChurnLimit(epochCtx: EpochCache): number { + return Math.min(epochCtx.config.MAX_PER_EPOCH_ACTIVATION_EXIT_CHURN_LIMIT, getBalanceChurnLimit(epochCtx)); +} + +export function getConsolidationChurnLimit(epochCtx: EpochCache): number { + return getBalanceChurnLimit(epochCtx) - getActivationExitChurnLimit(epochCtx); +} + +export function getValidatorMaxEffectiveBalance(withdrawalCredentials: Uint8Array): number { + // Compounding withdrawal credential only available since Electra + if (hasCompoundingWithdrawalCredential(withdrawalCredentials)) { + return MAX_EFFECTIVE_BALANCE_ELECTRA; + } else { + return MIN_ACTIVATION_BALANCE; + } +} + +export function getActiveBalance(state: CachedBeaconStateElectra, validatorIndex: ValidatorIndex): number { + const validatorMaxEffectiveBalance = getValidatorMaxEffectiveBalance( + state.validators.getReadonly(validatorIndex).withdrawalCredentials + ); + + return Math.min(state.balances.get(validatorIndex), validatorMaxEffectiveBalance); +} + +export function getPendingBalanceToWithdraw(state: CachedBeaconStateElectra, validatorIndex: ValidatorIndex): number { + return state.pendingPartialWithdrawals + .getAllReadonly() + .filter((item) => item.index === validatorIndex) + .reduce((total, item) => total + Number(item.amount), 0); +} diff --git a/packages/state-transition/test/memory/effectiveBalanceIncrements.ts b/packages/state-transition/test/memory/effectiveBalanceIncrements.ts deleted file mode 100644 index f1c603b8565..00000000000 --- a/packages/state-transition/test/memory/effectiveBalanceIncrements.ts +++ /dev/null @@ -1,62 +0,0 @@ -import {MutableVector} from "@chainsafe/persistent-ts"; -import {testRunnerMemory} from "@lodestar/beacon-node/test/memory/testRunnerMemory"; -import {newZeroedArray} from "../../src/index.js"; - -// Results in Linux Feb 2022 -// -// EffectiveBalanceIncrements Uint8Array 300000 - 299873.5 bytes / instance -// EffectiveBalanceIncrements array 300000 - 2400093.1 bytes / instance -// EffectiveBalanceIncrements MutableVector 300000 - 4380557.0 bytes / instance -// EffectiveBalanceIncrements MutableVector 300000 cloned 10 - 4399575.0 bytes / instance -// -// With MutableVector, break even at 14 instances of Uint8Array -// 4380557 / 299873 = 14 - -const vc = 300_000; -const cloneTimes = 10; - -testRunnerMemoryBpi([ - { - id: `EffectiveBalanceIncrements Uint8Array ${vc}`, - getInstance: () => new Uint8Array(vc), - }, - { - id: `EffectiveBalanceIncrements array ${vc}`, - getInstance: () => newZeroedArray(vc), - }, - { - id: `EffectiveBalanceIncrements MutableVector ${vc}`, - getInstance: () => MutableVector.from(newZeroedArray(vc)), - }, - { - id: `EffectiveBalanceIncrements MutableVector ${vc} cloned ${cloneTimes}`, - getInstance: () => { - const mv = MutableVector.from(newZeroedArray(vc)); - const mvs = [mv]; - for (let i = 0; i < cloneTimes; i++) { - const mvc = mv.clone(); - mvc.push(0); - mvs.push(mvc); - } - return mvs; - }, - }, -]); - -/** - * Test bytes per instance in different representations of raw binary data - */ -function testRunnerMemoryBpi(testCases: {getInstance: (bytes: number) => unknown; id: string}[]): void { - const longestId = Math.max(...testCases.map(({id}) => id.length)); - - for (const {id, getInstance} of testCases) { - const bpi = testRunnerMemory({ - getInstance, - convergeFactor: 1 / 100, - sampleEvery: 5, - }); - - // eslint-disable-next-line no-console - console.log(`${id.padEnd(longestId)} - ${bpi.toFixed(1)} bytes / instance`); - } -} diff --git a/packages/state-transition/test/perf/analyzeEpochs.ts b/packages/state-transition/test/perf/analyzeEpochs.ts index 6f61bc81abb..deb0861427b 100644 --- a/packages/state-transition/test/perf/analyzeEpochs.ts +++ b/packages/state-transition/test/perf/analyzeEpochs.ts @@ -152,6 +152,9 @@ async function analyzeEpochs(network: NetworkName, fromEpoch?: number): Promise< // processRegistryUpdates: function of registry updates // processSlashingsAllForks: function of process.indicesToSlash // processSlashingsReset: free + // -- electra + // processPendingBalanceDeposits: - + // processPendingConsolidations: - // -- altair // processInactivityUpdates: - // processParticipationFlagUpdates: - diff --git a/packages/state-transition/test/perf/block/processWithdrawals.test.ts b/packages/state-transition/test/perf/block/processWithdrawals.test.ts index 997f401d32c..66d624b39bf 100644 --- a/packages/state-transition/test/perf/block/processWithdrawals.test.ts +++ b/packages/state-transition/test/perf/block/processWithdrawals.test.ts @@ -1,4 +1,5 @@ import {itBench} from "@dapplion/benchmark"; +import {ForkSeq} from "@lodestar/params"; import {CachedBeaconStateCapella} from "../../../src/index.js"; import {getExpectedWithdrawals} from "../../../src/block/processWithdrawals.js"; import {numValidators} from "../util.js"; @@ -9,7 +10,7 @@ import {getExpectedWithdrawalsTestData, WithdrawalOpts} from "../../utils/capell // having BLS withdrawal credential prefix as that validator probe is wasted. // // Best case: -// All Validator have balances > MAX_EFFECTIVE_BALANCE and ETH1 withdrawal credential prefix set +// All Validator have balances > MAX_EFFECTIVE_BALANCE and ETH1 withdrawal credential prefix set // TODO Electra: Not true anymore // // Worst case: // All balances are low enough or withdrawal credential not set @@ -69,7 +70,7 @@ describe("getExpectedWithdrawals", () => { return opts.cache ? state : state.clone(true); }, fn: (state) => { - const {sampledValidators} = getExpectedWithdrawals(state); + const {sampledValidators} = getExpectedWithdrawals(ForkSeq.capella, state); // TODO Electra: Do test for electra if (sampledValidators !== opts.sampled) { throw Error(`Wrong sampledValidators ${sampledValidators} != ${opts.sampled}`); } diff --git a/packages/state-transition/test/perf/dataStructures/arrayish.memory.ts b/packages/state-transition/test/perf/dataStructures/arrayish.memory.ts index 1f414191262..7e10f447181 100644 --- a/packages/state-transition/test/perf/dataStructures/arrayish.memory.ts +++ b/packages/state-transition/test/perf/dataStructures/arrayish.memory.ts @@ -1,5 +1,3 @@ -import {MutableVector} from "@chainsafe/persistent-ts"; - const refs: any[] = []; const xs: number[] = []; const arrayBuffersArr: number[] = []; @@ -23,7 +21,6 @@ const size = 100; const testType = TestType.Set; let arrayNumGlobal: number[] | null = null; -let mutableVectorGlobal: MutableVector | null = null; for (let i = 0; i < 1e8; i++) { switch (testType as TestType) { @@ -65,49 +62,6 @@ for (let i = 0; i < 1e8; i++) { break; } - // size | 100 | 1000 | 10000 | - // ---- | ------ | ------ | ------ | - // rssM | 1817.4 | 15518. | 154335 | - case TestType.MutableVector: { - const items = createArray(size); - const mutableVector = MutableVector.from(items); - refs.push(mutableVector); - break; - } - - // size | 100 | 1000 | - // ---- | ------ | ------ | - // rssM | 58.68 | 55.89 | - case TestType.MutableVectorClone: { - if (!mutableVectorGlobal) { - const items = createArray(size); - mutableVectorGlobal = MutableVector.from(items); - } - refs.push(mutableVectorGlobal.clone()); - break; - } - - // Grid of size / changes, all values = rssM in bytes - // | 100 | 1000 | 10000 | - // ----- | ------ | ------ | ------ | - // 1 | 793.45 | 801.53 | 1137.9 | - // 10 | 803.98 | 802.36 | 1144.9 | - // 100 | 1573.2 | 1826.4 | 2172.0 | - // 1000 | - | 11250. | 11886. | - // 10000 | - | - | 111365 | - case TestType.MutableVectorCloneAndMutate: { - if (!mutableVectorGlobal) { - const items = createArray(size); - mutableVectorGlobal = MutableVector.from(items); - } - const newArr = mutableVectorGlobal.clone(); - for (let j = 0; j < 10000; j++) { - newArr.set(j, i); - } - refs.push(newArr); - break; - } - // size | 100 | 1000 | // ---- | ------ | ------ | // rssM | 2646.8 | 20855. | @@ -161,14 +115,6 @@ for (let i = 0; i < 1e8; i++) { } } -function createArray(n: number): number[] { - const items: number[] = []; - for (let i = 0; i < n; i++) { - items.push(i); - } - return items; -} - /** * From https://github.com/simple-statistics/simple-statistics/blob/d0d177baf74976a2421638bce98ab028c5afb537/src/linear_regression.js * diff --git a/packages/state-transition/test/perf/dataStructures/arrayish.test.ts b/packages/state-transition/test/perf/dataStructures/arrayish.test.ts index 59162b6eecc..5b6af0d989b 100644 --- a/packages/state-transition/test/perf/dataStructures/arrayish.test.ts +++ b/packages/state-transition/test/perf/dataStructures/arrayish.test.ts @@ -1,6 +1,5 @@ import {itBench, setBenchOpts} from "@dapplion/benchmark"; import {LeafNode, toGindex, Tree, zeroNode} from "@chainsafe/persistent-merkle-tree"; -import {MutableVector} from "@chainsafe/persistent-ts"; // Understand the cost of each array-ish data structure to: // - Get one element @@ -99,48 +98,6 @@ describe("Tree (persistent-merkle-tree)", () => { } }); -describe("MutableVector", () => { - // Don't track regressions in CI - setBenchOpts({noThreshold: true}); - - let items: number[]; - let mutableVector: MutableVector; - - before(function () { - items = createArray(n); - mutableVector = MutableVector.from(items); - }); - - itBench(`MutableVector ${n} create`, () => { - MutableVector.from(items); - }); - - itBench({id: `MutableVector ${n} get(${ih})`, runsFactor}, () => { - for (let i = 0; i < runsFactor; i++) mutableVector.get(ih - i); - }); - - itBench({id: `MutableVector ${n} set(${ih})`, runsFactor}, () => { - for (let i = 0; i < runsFactor; i++) mutableVector.set(ih - i, 10000000); - }); - - itBench(`MutableVector ${n} toArray()`, () => { - mutableVector.toArray(); - }); - - itBench(`MutableVector ${n} iterate all - toArray() + loop`, () => { - const mvArr = mutableVector.toArray(); - for (let i = 0; i < n; i++) { - mvArr[i]; - } - }); - - itBench(`MutableVector ${n} iterate all - get(i)`, () => { - for (let i = 0; i < n; i++) { - mutableVector.get(i); - } - }); -}); - describe("Array", () => { // Don't track regressions in CI setBenchOpts({noThreshold: true}); diff --git a/packages/state-transition/test/perf/dataStructures/effectiveBalanceIncrements.test.ts b/packages/state-transition/test/perf/dataStructures/effectiveBalanceIncrements.test.ts deleted file mode 100644 index 13c2d982e86..00000000000 --- a/packages/state-transition/test/perf/dataStructures/effectiveBalanceIncrements.test.ts +++ /dev/null @@ -1,32 +0,0 @@ -import {itBench, setBenchOpts} from "@dapplion/benchmark"; -import {MutableVector} from "@chainsafe/persistent-ts"; -import {newZeroedArray} from "../../../src/index.js"; - -describe("effectiveBalanceIncrements", () => { - setBenchOpts({noThreshold: true}); - - const vc = 300_000; - const uint8Array = new Uint8Array(vc); - const mv = MutableVector.from(newZeroedArray(vc)); - - itBench(`effectiveBalanceIncrements clone Uint8Array ${vc}`, () => { - uint8Array.slice(0); - }); - - itBench(`effectiveBalanceIncrements clone MutableVector ${vc}`, () => { - mv.clone(); - }); - - itBench(`effectiveBalanceIncrements rw all Uint8Array ${vc}`, () => { - for (let i = 0; i < vc; i++) { - uint8Array[i]++; - } - }); - - itBench(`effectiveBalanceIncrements rw all MutableVector ${vc}`, () => { - for (let i = 0; i < vc; i++) { - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - mv.set(i, mv.get(i)! + 1); - } - }); -}); diff --git a/packages/state-transition/test/perf/epoch/epochAltair.test.ts b/packages/state-transition/test/perf/epoch/epochAltair.test.ts index 273353d8632..15cde849ce9 100644 --- a/packages/state-transition/test/perf/epoch/epochAltair.test.ts +++ b/packages/state-transition/test/perf/epoch/epochAltair.test.ts @@ -120,7 +120,7 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - altair processRegistryUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processRegistryUpdates(state, cache.value), + fn: (state) => processRegistryUpdates(ForkSeq.altair, state, cache.value), }); // TODO: Needs a better state to test with, current does not include enough actions: 39.985 us/op @@ -141,7 +141,9 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - altair processEffectiveBalanceUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processEffectiveBalanceUpdates(state, cache.value), + fn: (state) => { + processEffectiveBalanceUpdates(ForkSeq.altair, state, cache.value); + }, }); itBench({ @@ -172,7 +174,7 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue id: `${stateId} - altair processSyncCommitteeUpdates`, convergeFactor: 1 / 100, // Very unstable make it converge faster beforeEach: () => stateOg.value.clone() as CachedBeaconStateAltair, - fn: (state) => processSyncCommitteeUpdates(state), + fn: (state) => processSyncCommitteeUpdates(ForkSeq.altair, state), }); itBench({ diff --git a/packages/state-transition/test/perf/epoch/epochCapella.test.ts b/packages/state-transition/test/perf/epoch/epochCapella.test.ts index eeaf8bfc540..61bfad20b1e 100644 --- a/packages/state-transition/test/perf/epoch/epochCapella.test.ts +++ b/packages/state-transition/test/perf/epoch/epochCapella.test.ts @@ -99,7 +99,7 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - capella processRegistryUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processRegistryUpdates(state, cache.value), + fn: (state) => processRegistryUpdates(ForkSeq.capella, state, cache.value), }); // TODO: Needs a better state to test with, current does not include enough actions: 39.985 us/op @@ -120,7 +120,9 @@ function benchmarkAltairEpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - capella processEffectiveBalanceUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processEffectiveBalanceUpdates(state, cache.value), + fn: (state) => { + processEffectiveBalanceUpdates(ForkSeq.capella, state, cache.value); + }, }); itBench({ diff --git a/packages/state-transition/test/perf/epoch/epochPhase0.test.ts b/packages/state-transition/test/perf/epoch/epochPhase0.test.ts index 4e43634b166..3af3d4d4a83 100644 --- a/packages/state-transition/test/perf/epoch/epochPhase0.test.ts +++ b/packages/state-transition/test/perf/epoch/epochPhase0.test.ts @@ -102,7 +102,7 @@ function benchmarkPhase0EpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - phase0 processRegistryUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processRegistryUpdates(state, cache.value), + fn: (state) => processRegistryUpdates(ForkSeq.phase0, state, cache.value), }); // TODO: Needs a better state to test with, current does not include enough actions: 39.985 us/op @@ -123,7 +123,9 @@ function benchmarkPhase0EpochSteps(stateOg: LazyValue itBench({ id: `${stateId} - phase0 processEffectiveBalanceUpdates`, beforeEach: () => stateOg.value.clone(), - fn: (state) => processEffectiveBalanceUpdates(state, cache.value), + fn: (state) => { + processEffectiveBalanceUpdates(ForkSeq.phase0, state, cache.value); + }, }); itBench({ diff --git a/packages/state-transition/test/perf/epoch/processEffectiveBalanceUpdates.test.ts b/packages/state-transition/test/perf/epoch/processEffectiveBalanceUpdates.test.ts index 0fb1d448142..19f18df86c2 100644 --- a/packages/state-transition/test/perf/epoch/processEffectiveBalanceUpdates.test.ts +++ b/packages/state-transition/test/perf/epoch/processEffectiveBalanceUpdates.test.ts @@ -1,6 +1,7 @@ import {itBench} from "@dapplion/benchmark"; import {ssz} from "@lodestar/types"; import {config} from "@lodestar/config/default"; +import {ForkSeq} from "@lodestar/params"; import {beforeProcessEpoch, CachedBeaconStateAllForks, EpochTransitionCache} from "../../../src/index.js"; import {processEffectiveBalanceUpdates} from "../../../src/epoch/processEffectiveBalanceUpdates.js"; import {numValidators} from "../util.js"; @@ -35,7 +36,9 @@ describe("phase0 processEffectiveBalanceUpdates", () => { minRuns: 5, // Worst case is very slow before: () => getEffectiveBalanceTestData(vc, changeRatio), beforeEach: ({state, cache}) => ({state: state.clone(), cache}), - fn: ({state, cache}) => processEffectiveBalanceUpdates(state, cache), + fn: ({state, cache}) => { + processEffectiveBalanceUpdates(ForkSeq.phase0, state, cache); + }, }); } }); diff --git a/packages/state-transition/test/perf/epoch/processRegistryUpdates.test.ts b/packages/state-transition/test/perf/epoch/processRegistryUpdates.test.ts index ccfd2405a66..2d57de44f8e 100644 --- a/packages/state-transition/test/perf/epoch/processRegistryUpdates.test.ts +++ b/packages/state-transition/test/perf/epoch/processRegistryUpdates.test.ts @@ -1,4 +1,5 @@ import {itBench} from "@dapplion/benchmark"; +import {ForkSeq} from "@lodestar/params"; import {beforeProcessEpoch, CachedBeaconStateAllForks, EpochTransitionCache} from "../../../src/index.js"; import {processRegistryUpdates} from "../../../src/epoch/processRegistryUpdates.js"; import {generatePerfTestCachedStatePhase0, numValidators} from "../util.js"; @@ -62,7 +63,7 @@ describe("phase0 processRegistryUpdates", () => { noThreshold: notTrack, before: () => getRegistryUpdatesTestData(vc, lengths), beforeEach: async ({state, cache}) => ({state: state.clone(), cache}), - fn: ({state, cache}) => processRegistryUpdates(state, cache), + fn: ({state, cache}) => processRegistryUpdates(ForkSeq.phase0, state, cache), }); } }); diff --git a/packages/state-transition/test/perf/epoch/processSyncCommitteeUpdates.test.ts b/packages/state-transition/test/perf/epoch/processSyncCommitteeUpdates.test.ts index ffde30e1302..4497dc16be0 100644 --- a/packages/state-transition/test/perf/epoch/processSyncCommitteeUpdates.test.ts +++ b/packages/state-transition/test/perf/epoch/processSyncCommitteeUpdates.test.ts @@ -1,5 +1,5 @@ import {itBench} from "@dapplion/benchmark"; -import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD} from "@lodestar/params"; +import {EPOCHS_PER_SYNC_COMMITTEE_PERIOD, ForkSeq} from "@lodestar/params"; import {processSyncCommitteeUpdates} from "../../../src/epoch/processSyncCommitteeUpdates.js"; import {StateAltair} from "../types.js"; import {generatePerfTestCachedStateAltair, numValidators} from "../util.js"; @@ -21,7 +21,7 @@ describe("altair processSyncCommitteeUpdates", () => { }, fn: (state) => { const nextSyncCommitteeBefore = state.nextSyncCommittee; - processSyncCommitteeUpdates(state); + processSyncCommitteeUpdates(ForkSeq.altair, state); if (state.nextSyncCommittee === nextSyncCommitteeBefore) { throw Error("nextSyncCommittee instance has not changed"); } diff --git a/packages/state-transition/test/perf/util.ts b/packages/state-transition/test/perf/util.ts index 4b2a7da4a50..f3c2eaef91e 100644 --- a/packages/state-transition/test/perf/util.ts +++ b/packages/state-transition/test/perf/util.ts @@ -7,6 +7,7 @@ import { EPOCHS_PER_ETH1_VOTING_PERIOD, EPOCHS_PER_HISTORICAL_VECTOR, ForkName, + ForkSeq, MAX_ATTESTATIONS, MAX_EFFECTIVE_BALANCE, SLOTS_PER_EPOCH, @@ -273,7 +274,12 @@ export function generatePerformanceStateAltair(pubkeysArg?: Uint8Array[]): Beaco const activeValidatorIndices = getActiveValidatorIndices(altairState, epoch); const effectiveBalanceIncrements = getEffectiveBalanceIncrements(altairState); - const {syncCommittee} = getNextSyncCommittee(altairState, activeValidatorIndices, effectiveBalanceIncrements); + const {syncCommittee} = getNextSyncCommittee( + ForkSeq.altair, + altairState, + activeValidatorIndices, + effectiveBalanceIncrements + ); state.currentSyncCommittee = syncCommittee; state.nextSyncCommittee = syncCommittee; diff --git a/packages/state-transition/test/perf/util/shufflings.test.ts b/packages/state-transition/test/perf/util/shufflings.test.ts index 96c7878a46a..24be96c4676 100644 --- a/packages/state-transition/test/perf/util/shufflings.test.ts +++ b/packages/state-transition/test/perf/util/shufflings.test.ts @@ -28,7 +28,8 @@ describe("epoch shufflings", () => { id: `computeProposers - vc ${numValidators}`, fn: () => { const epochSeed = getSeed(state, state.epochCtx.nextShuffling.epoch, DOMAIN_BEACON_PROPOSER); - computeProposers(epochSeed, state.epochCtx.nextShuffling, state.epochCtx.effectiveBalanceIncrements); + const fork = state.config.getForkSeq(state.slot); + computeProposers(fork, epochSeed, state.epochCtx.nextShuffling, state.epochCtx.effectiveBalanceIncrements); }, }); @@ -43,7 +44,9 @@ describe("epoch shufflings", () => { itBench({ id: `getNextSyncCommittee - vc ${numValidators}`, fn: () => { + const fork = state.config.getForkSeq(state.slot); getNextSyncCommittee( + fork, state, state.epochCtx.nextShuffling.activeIndices, state.epochCtx.effectiveBalanceIncrements diff --git a/packages/state-transition/test/unit/block/processWithdrawals.test.ts b/packages/state-transition/test/unit/block/processWithdrawals.test.ts index 2841da63547..7b708d108a7 100644 --- a/packages/state-transition/test/unit/block/processWithdrawals.test.ts +++ b/packages/state-transition/test/unit/block/processWithdrawals.test.ts @@ -1,4 +1,5 @@ import {describe, it, expect} from "vitest"; +import {ForkSeq} from "@lodestar/params"; import {getExpectedWithdrawals} from "../../../src/block/processWithdrawals.js"; import {numValidators} from "../../perf/util.js"; import {getExpectedWithdrawalsTestData, WithdrawalOpts} from "../../utils/capella.js"; @@ -36,8 +37,9 @@ describe("getExpectedWithdrawals", () => { // Clone true to drop cache const state = beforeValue(() => getExpectedWithdrawalsTestData(vc, opts).clone(true)); + // TODO Electra: Add test for electra it(`getExpectedWithdrawals ${vc} ${caseID}`, () => { - const {sampledValidators, withdrawals} = getExpectedWithdrawals(state.value); + const {sampledValidators, withdrawals} = getExpectedWithdrawals(ForkSeq.capella, state.value); expect(sampledValidators).toBe(opts.sampled); expect(withdrawals.length).toBe(opts.withdrawals); }); diff --git a/packages/state-transition/test/unit/cachedBeaconState.test.ts b/packages/state-transition/test/unit/cachedBeaconState.test.ts index 2891cd3e621..77c5da7a5f4 100644 --- a/packages/state-transition/test/unit/cachedBeaconState.test.ts +++ b/packages/state-transition/test/unit/cachedBeaconState.test.ts @@ -1,8 +1,9 @@ +import {fromHexString} from "@chainsafe/ssz"; import {describe, it, expect} from "vitest"; import {Epoch, ssz, RootHex} from "@lodestar/types"; import {toHexString} from "@lodestar/utils"; import {config as defaultConfig} from "@lodestar/config/default"; -import {createBeaconConfig} from "@lodestar/config"; +import {createBeaconConfig, createChainForkConfig} from "@lodestar/config"; import {createCachedBeaconStateTest} from "../utils/state.js"; import {PubkeyIndexMap} from "../../src/cache/pubkeyCache.js"; import {createCachedBeaconState, loadCachedBeaconState} from "../../src/cache/stateCache.js"; @@ -28,6 +29,65 @@ describe("CachedBeaconState", () => { expect(state2.epochCtx.epoch).toBe(0); }); + it("Clone and mutate cache pre-Electra", () => { + const stateView = ssz.altair.BeaconState.defaultViewDU(); + const state1 = createCachedBeaconStateTest(stateView); + + const pubkey1 = fromHexString( + "0x84105a985058fc8740a48bf1ede9d223ef09e8c6b1735ba0a55cf4a9ff2ff92376b778798365e488dab07a652eb04576" + ); + const index1 = 123; + const pubkey2 = fromHexString( + "0xa41726266b1d83ef609d759ba7796d54cfe549154e01e4730a3378309bc81a7638140d7e184b33593c072595f23f032d" + ); + const index2 = 456; + + state1.epochCtx.addPubkey(index1, pubkey1); + + const state2 = state1.clone(); + state2.epochCtx.addPubkey(index2, pubkey2); + + expect(state1.epochCtx.getValidatorIndex(pubkey1)).toBe(index1); + expect(state2.epochCtx.getValidatorIndex(pubkey1)).toBe(index1); + expect(state1.epochCtx.getValidatorIndex(pubkey2)).toBe(index2); + expect(state2.epochCtx.getValidatorIndex(pubkey2)).toBe(index2); + }); + + /* eslint-disable @typescript-eslint/naming-convention */ + it("Clone and mutate cache post-Electra", () => { + const stateView = ssz.electra.BeaconState.defaultViewDU(); + const state1 = createCachedBeaconStateTest( + stateView, + createChainForkConfig({ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, + }), + {skipSyncCommitteeCache: true, skipSyncPubkeys: true} + ); + + const pubkey1 = fromHexString( + "0x84105a985058fc8740a48bf1ede9d223ef09e8c6b1735ba0a55cf4a9ff2ff92376b778798365e488dab07a652eb04576" + ); + const index1 = 123; + const pubkey2 = fromHexString( + "0xa41726266b1d83ef609d759ba7796d54cfe549154e01e4730a3378309bc81a7638140d7e184b33593c072595f23f032d" + ); + const index2 = 456; + + state1.epochCtx.addPubkey(index1, pubkey1); + + const state2 = state1.clone(); + state2.epochCtx.addPubkey(index2, pubkey2); + + expect(state1.epochCtx.getValidatorIndex(pubkey1)).toBe(index1); + expect(state2.epochCtx.getValidatorIndex(pubkey1)).toBe(index1); + expect(state1.epochCtx.getValidatorIndex(pubkey2)).toBe(undefined); + expect(state2.epochCtx.getValidatorIndex(pubkey2)).toBe(index2); + }); + it("Auto-commit on hashTreeRoot", () => { // Use Checkpoint instead of BeaconState to speed up the test const cp1 = ssz.phase0.Checkpoint.defaultViewDU(); @@ -71,7 +131,7 @@ describe("CachedBeaconState", () => { const capellaStateType = ssz.capella.BeaconState; - for (let validatorCountDelta = -numValidator; validatorCountDelta <= numValidator; validatorCountDelta++) { + for (let validatorCountDelta = -numValidator + 1; validatorCountDelta <= numValidator; validatorCountDelta++) { const testName = `loadCachedBeaconState - ${validatorCountDelta > 0 ? "more" : "less"} ${Math.abs( validatorCountDelta )} validators`; diff --git a/packages/state-transition/test/unit/upgradeState.test.ts b/packages/state-transition/test/unit/upgradeState.test.ts index 2ea8eef182a..df9b052542f 100644 --- a/packages/state-transition/test/unit/upgradeState.test.ts +++ b/packages/state-transition/test/unit/upgradeState.test.ts @@ -5,6 +5,7 @@ import {createBeaconConfig, ChainForkConfig, createChainForkConfig} from "@lodes import {config as chainConfig} from "@lodestar/config/default"; import {upgradeStateToDeneb} from "../../src/slot/upgradeStateToDeneb.js"; +import {upgradeStateToElectra} from "../../src/slot/upgradeStateToElectra.js"; import {createCachedBeaconState} from "../../src/cache/stateCache.js"; import {PubkeyIndexMap} from "../../src/cache/pubkeyCache.js"; @@ -24,6 +25,21 @@ describe("upgradeState", () => { const newState = upgradeStateToDeneb(stateView); expect(() => newState.toValue()).not.toThrow(); }); + it("upgradeStateToElectra", () => { + const denebState = ssz.deneb.BeaconState.defaultViewDU(); + const config = getConfig(ForkName.deneb); + const stateView = createCachedBeaconState( + denebState, + { + config: createBeaconConfig(config, denebState.genesisValidatorsRoot), + pubkey2index: new PubkeyIndexMap(), + index2pubkey: [], + }, + {skipSyncCommitteeCache: true} + ); + const newState = upgradeStateToElectra(stateView); + expect(() => newState.toValue()).not.toThrow(); + }); }); const ZERO_HASH = Buffer.alloc(32, 0); @@ -55,5 +71,13 @@ function getConfig(fork: ForkName, forkEpoch = 0): ChainForkConfig { CAPELLA_FORK_EPOCH: 0, DENEB_FORK_EPOCH: forkEpoch, }); + case ForkName.electra: + return createChainForkConfig({ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: forkEpoch, + }); } } diff --git a/packages/state-transition/test/unit/util/deposit.test.ts b/packages/state-transition/test/unit/util/deposit.test.ts new file mode 100644 index 00000000000..3cfa4abb340 --- /dev/null +++ b/packages/state-transition/test/unit/util/deposit.test.ts @@ -0,0 +1,99 @@ +import {describe, it, expect} from "vitest"; +import {ssz} from "@lodestar/types"; +import {createChainForkConfig} from "@lodestar/config"; +import {MAX_DEPOSITS} from "@lodestar/params"; +import {getEth1DepositCount} from "../../../src/index.js"; +import {createCachedBeaconStateTest} from "../../utils/state.js"; + +describe("getEth1DepositCount", () => { + it("Pre Electra", () => { + const stateView = ssz.altair.BeaconState.defaultViewDU(); + const preElectraState = createCachedBeaconStateTest(stateView); + + if (preElectraState.epochCtx.isPostElectra()) { + throw Error("Not a pre-Electra state"); + } + + preElectraState.eth1Data.depositCount = 123; + + // 1. Should get less than MAX_DEPOSIT + preElectraState.eth1DepositIndex = 120; + expect(getEth1DepositCount(preElectraState)).toBe(3); + + // 2. Should get MAX_DEPOSIT + preElectraState.eth1DepositIndex = 100; + expect(getEth1DepositCount(preElectraState)).toBe(MAX_DEPOSITS); + }); + it("Post Electra with eth1 deposit", () => { + const stateView = ssz.electra.BeaconState.defaultViewDU(); + const postElectraState = createCachedBeaconStateTest( + stateView, + createChainForkConfig({ + /* eslint-disable @typescript-eslint/naming-convention */ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, + }), + {skipSyncCommitteeCache: true, skipSyncPubkeys: true} + ); + + if (!postElectraState.epochCtx.isPostElectra()) { + throw Error("Not a post-Electra state"); + } + + postElectraState.depositRequestsStartIndex = 1000n; + postElectraState.eth1Data.depositCount = 995; + + // 1. Should get less than MAX_DEPOSIT + postElectraState.eth1DepositIndex = 990; + expect(getEth1DepositCount(postElectraState)).toBe(5); + + // 2. Should get MAX_DEPOSIT + postElectraState.eth1DepositIndex = 100; + expect(getEth1DepositCount(postElectraState)).toBe(MAX_DEPOSITS); + + // 3. Should be 0 + postElectraState.eth1DepositIndex = 1000; + expect(getEth1DepositCount(postElectraState)).toBe(0); + }); + it("Post Electra without eth1 deposit", () => { + const stateView = ssz.electra.BeaconState.defaultViewDU(); + const postElectraState = createCachedBeaconStateTest( + stateView, + createChainForkConfig({ + /* eslint-disable @typescript-eslint/naming-convention */ + ALTAIR_FORK_EPOCH: 0, + BELLATRIX_FORK_EPOCH: 0, + CAPELLA_FORK_EPOCH: 0, + DENEB_FORK_EPOCH: 0, + ELECTRA_FORK_EPOCH: 0, + }), + {skipSyncCommitteeCache: true, skipSyncPubkeys: true} + ); + + if (!postElectraState.epochCtx.isPostElectra()) { + throw Error("Not a post-Electra state"); + } + + postElectraState.depositRequestsStartIndex = 1000n; + postElectraState.eth1Data.depositCount = 1005; + + // Before eth1DepositIndex reaching the start index + // 1. Should get less than MAX_DEPOSIT + postElectraState.eth1DepositIndex = 990; + expect(getEth1DepositCount(postElectraState)).toBe(10); + + // 2. Should get MAX_DEPOSIT + postElectraState.eth1DepositIndex = 983; + expect(getEth1DepositCount(postElectraState)).toBe(MAX_DEPOSITS); + + // After eth1DepositIndex reaching the start index + // 1. Should be 0 + postElectraState.eth1DepositIndex = 1000; + expect(getEth1DepositCount(postElectraState)).toBe(0); + postElectraState.eth1DepositIndex = 1003; + expect(getEth1DepositCount(postElectraState)).toBe(0); + }); +}); diff --git a/packages/types/package.json b/packages/types/package.json index 9a36e311def..861dbdbee0e 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -29,6 +29,9 @@ "./deneb": { "import": "./lib/deneb/index.js" }, + "./electra": { + "import": "./lib/electra/index.js" + }, "./phase0": { "import": "./lib/phase0/index.js" } @@ -70,7 +73,7 @@ }, "types": "lib/index.d.ts", "dependencies": { - "@chainsafe/ssz": "^0.17.0", + "@chainsafe/ssz": "^0.17.1", "@lodestar/params": "^1.21.0", "ethereum-cryptography": "^2.0.0" }, diff --git a/packages/types/src/electra/index.ts b/packages/types/src/electra/index.ts new file mode 100644 index 00000000000..981b2015e02 --- /dev/null +++ b/packages/types/src/electra/index.ts @@ -0,0 +1,4 @@ +export * from "./types.js"; +import * as ts from "./types.js"; +import * as ssz from "./sszTypes.js"; +export {ts, ssz}; diff --git a/packages/types/src/electra/sszTypes.ts b/packages/types/src/electra/sszTypes.ts new file mode 100644 index 00000000000..31844aac86c --- /dev/null +++ b/packages/types/src/electra/sszTypes.ts @@ -0,0 +1,440 @@ +import { + BitListType, + BitVectorType, + ContainerType, + ListBasicType, + ListCompositeType, + VectorCompositeType, +} from "@chainsafe/ssz"; +import { + HISTORICAL_ROOTS_LIMIT, + BLOCK_BODY_EXECUTION_PAYLOAD_DEPTH as EXECUTION_PAYLOAD_DEPTH, + EPOCHS_PER_SYNC_COMMITTEE_PERIOD, + SLOTS_PER_EPOCH, + MAX_DEPOSIT_REQUESTS_PER_PAYLOAD, + MAX_VALIDATORS_PER_COMMITTEE, + MAX_COMMITTEES_PER_SLOT, + MAX_ATTESTATIONS_ELECTRA, + MAX_ATTESTER_SLASHINGS_ELECTRA, + MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD, + PENDING_BALANCE_DEPOSITS_LIMIT, + PENDING_PARTIAL_WITHDRAWALS_LIMIT, + PENDING_CONSOLIDATIONS_LIMIT, + FINALIZED_ROOT_DEPTH_ELECTRA, + NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA, +} from "@lodestar/params"; +import {ssz as primitiveSsz} from "../primitive/index.js"; +import {ssz as phase0Ssz} from "../phase0/index.js"; +import {ssz as altairSsz} from "../altair/index.js"; +import {ssz as bellatrixSsz} from "../bellatrix/index.js"; +import {ssz as capellaSsz} from "../capella/index.js"; +import {ssz as denebSsz} from "../deneb/index.js"; + +const { + Epoch, + Gwei, + UintNum64, + Slot, + Root, + BLSSignature, + UintBn256, + Bytes32, + BLSPubkey, + DepositIndex, + UintBn64, + ExecutionAddress, + ValidatorIndex, +} = primitiveSsz; + +export const AggregationBits = new BitListType(MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT); + +// This CommitteeBits serves a different purpose than CommitteeBits in phase0 +// TODO Electra: Rename phase0.CommitteeBits to ParticipationBits to avoid confusion +export const CommitteeBits = new BitVectorType(MAX_COMMITTEES_PER_SLOT); + +export const AttestingIndices = new ListBasicType( + ValidatorIndex, + MAX_VALIDATORS_PER_COMMITTEE * MAX_COMMITTEES_PER_SLOT +); + +export const Attestation = new ContainerType( + { + aggregationBits: AggregationBits, // Modified in ELECTRA + data: phase0Ssz.AttestationData, + signature: BLSSignature, + committeeBits: CommitteeBits, // New in ELECTRA + }, + {typeName: "Attestation", jsonCase: "eth2"} +); + +export const IndexedAttestation = new ContainerType( + { + attestingIndices: AttestingIndices, // Modified in ELECTRA + data: phase0Ssz.AttestationData, + signature: BLSSignature, + }, + {typeName: "IndexedAttestation", jsonCase: "eth2"} +); + +/** Same as `IndexedAttestation` but epoch, slot and index are not bounded and must be a bigint */ +export const IndexedAttestationBigint = new ContainerType( + { + attestingIndices: AttestingIndices, // Modified in ELECTRA + data: phase0Ssz.AttestationDataBigint, + signature: BLSSignature, + }, + {typeName: "IndexedAttestation", jsonCase: "eth2"} +); + +export const AttesterSlashing = new ContainerType( + { + attestation1: IndexedAttestationBigint, // Modified in ELECTRA + attestation2: IndexedAttestationBigint, // Modified in ELECTRA + }, + {typeName: "AttesterSlashing", jsonCase: "eth2"} +); + +export const AggregateAndProof = new ContainerType( + { + aggregatorIndex: ValidatorIndex, + aggregate: Attestation, // Modified in ELECTRA + selectionProof: BLSSignature, + }, + {typeName: "AggregateAndProof", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const SignedAggregateAndProof = new ContainerType( + { + message: AggregateAndProof, // Modified in ELECTRA + signature: BLSSignature, + }, + {typeName: "SignedAggregateAndProof", jsonCase: "eth2"} +); + +export const DepositRequest = new ContainerType( + { + pubkey: BLSPubkey, + withdrawalCredentials: Bytes32, + amount: UintNum64, + signature: BLSSignature, + index: DepositIndex, + }, + {typeName: "DepositRequest", jsonCase: "eth2"} +); + +export const DepositRequests = new ListCompositeType(DepositRequest, MAX_DEPOSIT_REQUESTS_PER_PAYLOAD); + +export const WithdrawalRequest = new ContainerType( + { + sourceAddress: ExecutionAddress, + validatorPubkey: BLSPubkey, + amount: UintNum64, + }, + {typeName: "WithdrawalRequest", jsonCase: "eth2"} +); +export const WithdrawalRequests = new ListCompositeType(WithdrawalRequest, MAX_WITHDRAWAL_REQUESTS_PER_PAYLOAD); +export const ConsolidationRequest = new ContainerType( + { + sourceAddress: ExecutionAddress, + sourcePubkey: BLSPubkey, + targetPubkey: BLSPubkey, + }, + {typeName: "ConsolidationRequest", jsonCase: "eth2"} +); +export const ConsolidationRequests = new ListCompositeType( + ConsolidationRequest, + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD +); + +export const ExecutionPayload = new ContainerType( + { + ...denebSsz.ExecutionPayload.fields, + depositRequests: DepositRequests, // New in ELECTRA + withdrawalRequests: WithdrawalRequests, // New in ELECTRA + consolidationRequests: ConsolidationRequests, // New in ELECTRA + }, + {typeName: "ExecutionPayload", jsonCase: "eth2"} +); + +export const ExecutionPayloadHeader = new ContainerType( + { + ...denebSsz.ExecutionPayloadHeader.fields, + depositRequestsRoot: Root, // New in ELECTRA + withdrawalRequestsRoot: Root, // New in ELECTRA + consolidationRequestsRoot: Root, // New in ELECTRA + }, + {typeName: "ExecutionPayloadHeader", jsonCase: "eth2"} +); + +// We have to preserve Fields ordering while changing the type of ExecutionPayload +export const BeaconBlockBody = new ContainerType( + { + randaoReveal: phase0Ssz.BeaconBlockBody.fields.randaoReveal, + eth1Data: phase0Ssz.BeaconBlockBody.fields.eth1Data, + graffiti: phase0Ssz.BeaconBlockBody.fields.graffiti, + proposerSlashings: phase0Ssz.BeaconBlockBody.fields.proposerSlashings, + attesterSlashings: new ListCompositeType(AttesterSlashing, MAX_ATTESTER_SLASHINGS_ELECTRA), // Modified in ELECTRA + attestations: new ListCompositeType(Attestation, MAX_ATTESTATIONS_ELECTRA), // Modified in ELECTRA + deposits: phase0Ssz.BeaconBlockBody.fields.deposits, + voluntaryExits: phase0Ssz.BeaconBlockBody.fields.voluntaryExits, + syncAggregate: altairSsz.BeaconBlockBody.fields.syncAggregate, + executionPayload: ExecutionPayload, // Modified in ELECTRA + blsToExecutionChanges: capellaSsz.BeaconBlockBody.fields.blsToExecutionChanges, + blobKzgCommitments: denebSsz.BeaconBlockBody.fields.blobKzgCommitments, + }, + {typeName: "BeaconBlockBody", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const BeaconBlock = new ContainerType( + { + ...denebSsz.BeaconBlock.fields, + body: BeaconBlockBody, // Modified in ELECTRA + }, + {typeName: "BeaconBlock", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const SignedBeaconBlock = new ContainerType( + { + message: BeaconBlock, // Modified in ELECTRA + signature: BLSSignature, + }, + {typeName: "SignedBeaconBlock", jsonCase: "eth2"} +); + +export const BlindedBeaconBlockBody = new ContainerType( + { + randaoReveal: phase0Ssz.BeaconBlockBody.fields.randaoReveal, + eth1Data: phase0Ssz.BeaconBlockBody.fields.eth1Data, + graffiti: phase0Ssz.BeaconBlockBody.fields.graffiti, + proposerSlashings: phase0Ssz.BeaconBlockBody.fields.proposerSlashings, + attesterSlashings: new ListCompositeType(AttesterSlashing, MAX_ATTESTER_SLASHINGS_ELECTRA), // Modified in ELECTRA + attestations: new ListCompositeType(Attestation, MAX_ATTESTATIONS_ELECTRA), // Modified in ELECTRA + deposits: phase0Ssz.BeaconBlockBody.fields.deposits, + voluntaryExits: phase0Ssz.BeaconBlockBody.fields.voluntaryExits, + syncAggregate: altairSsz.SyncAggregate, + executionPayloadHeader: ExecutionPayloadHeader, // Modified in ELECTRA + blsToExecutionChanges: capellaSsz.BeaconBlockBody.fields.blsToExecutionChanges, + blobKzgCommitments: denebSsz.BeaconBlockBody.fields.blobKzgCommitments, + }, + {typeName: "BlindedBeaconBlockBody", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const BlindedBeaconBlock = new ContainerType( + { + ...denebSsz.BlindedBeaconBlock.fields, + body: BlindedBeaconBlockBody, // Modified in ELECTRA + }, + {typeName: "BlindedBeaconBlock", jsonCase: "eth2", cachePermanentRootStruct: true} +); + +export const SignedBlindedBeaconBlock = new ContainerType( + { + message: BlindedBeaconBlock, // Modified in ELECTRA + signature: BLSSignature, + }, + {typeName: "SignedBlindedBeaconBlock", jsonCase: "eth2"} +); + +export const BuilderBid = new ContainerType( + { + header: ExecutionPayloadHeader, // Modified in ELECTRA + blindedBlobsBundle: denebSsz.BlobKzgCommitments, + value: UintBn256, + pubkey: BLSPubkey, + }, + {typeName: "BuilderBid", jsonCase: "eth2"} +); + +export const SignedBuilderBid = new ContainerType( + { + message: BuilderBid, + signature: BLSSignature, + }, + {typeName: "SignedBuilderBid", jsonCase: "eth2"} +); + +export const ExecutionPayloadAndBlobsBundle = new ContainerType( + { + executionPayload: ExecutionPayload, // Modified in ELECTRA + blobsBundle: denebSsz.BlobsBundle, + }, + {typeName: "ExecutionPayloadAndBlobsBundle", jsonCase: "eth2"} +); + +export const PendingBalanceDeposit = new ContainerType( + { + index: ValidatorIndex, + amount: Gwei, + }, + {typeName: "PendingBalanceDeposit", jsonCase: "eth2"} +); + +export const PendingBalanceDeposits = new ListCompositeType(PendingBalanceDeposit, PENDING_BALANCE_DEPOSITS_LIMIT); + +export const PendingPartialWithdrawal = new ContainerType( + { + index: ValidatorIndex, + amount: Gwei, + withdrawableEpoch: Epoch, + }, + {typeName: "PendingPartialWithdrawal", jsonCase: "eth2"} +); + +export const PendingConsolidation = new ContainerType( + { + sourceIndex: ValidatorIndex, + targetIndex: ValidatorIndex, + }, + {typeName: "PendingConsolidation", jsonCase: "eth2"} +); + +// In EIP-7251, we spread deneb fields as new fields are appended at the end +export const BeaconState = new ContainerType( + { + genesisTime: UintNum64, + genesisValidatorsRoot: Root, + slot: primitiveSsz.Slot, + fork: phase0Ssz.Fork, + // History + latestBlockHeader: phase0Ssz.BeaconBlockHeader, + blockRoots: phase0Ssz.HistoricalBlockRoots, + stateRoots: phase0Ssz.HistoricalStateRoots, + // historical_roots Frozen in Capella, replaced by historical_summaries + historicalRoots: new ListCompositeType(Root, HISTORICAL_ROOTS_LIMIT), + // Eth1 + eth1Data: phase0Ssz.Eth1Data, + eth1DataVotes: phase0Ssz.Eth1DataVotes, + eth1DepositIndex: UintNum64, + // Registry + validators: phase0Ssz.Validators, + balances: phase0Ssz.Balances, + randaoMixes: phase0Ssz.RandaoMixes, + // Slashings + slashings: phase0Ssz.Slashings, + // Participation + previousEpochParticipation: altairSsz.EpochParticipation, + currentEpochParticipation: altairSsz.EpochParticipation, + // Finality + justificationBits: phase0Ssz.JustificationBits, + previousJustifiedCheckpoint: phase0Ssz.Checkpoint, + currentJustifiedCheckpoint: phase0Ssz.Checkpoint, + finalizedCheckpoint: phase0Ssz.Checkpoint, + // Inactivity + inactivityScores: altairSsz.InactivityScores, + // Sync + currentSyncCommittee: altairSsz.SyncCommittee, + nextSyncCommittee: altairSsz.SyncCommittee, + // Execution + latestExecutionPayloadHeader: ExecutionPayloadHeader, // Modified in ELECTRA + // Withdrawals + nextWithdrawalIndex: capellaSsz.BeaconState.fields.nextWithdrawalIndex, + nextWithdrawalValidatorIndex: capellaSsz.BeaconState.fields.nextWithdrawalValidatorIndex, + // Deep history valid from Capella onwards + historicalSummaries: capellaSsz.BeaconState.fields.historicalSummaries, + depositRequestsStartIndex: UintBn64, // New in ELECTRA:EIP6110 + depositBalanceToConsume: Gwei, // New in ELECTRA:EIP7251 + exitBalanceToConsume: Gwei, // New in ELECTRA:EIP7251 + earliestExitEpoch: Epoch, // New in ELECTRA:EIP7251 + consolidationBalanceToConsume: Gwei, // New in ELECTRA:EIP7251 + earliestConsolidationEpoch: Epoch, // New in ELECTRA:EIP7251 + pendingBalanceDeposits: PendingBalanceDeposits, // New in ELECTRA:EIP7251 + pendingPartialWithdrawals: new ListCompositeType(PendingPartialWithdrawal, PENDING_PARTIAL_WITHDRAWALS_LIMIT), // New in ELECTRA:EIP7251 + pendingConsolidations: new ListCompositeType(PendingConsolidation, PENDING_CONSOLIDATIONS_LIMIT), // New in ELECTRA:EIP7251 + }, + {typeName: "BeaconState", jsonCase: "eth2"} +); + +export const LightClientHeader = new ContainerType( + { + beacon: phase0Ssz.BeaconBlockHeader, + execution: ExecutionPayloadHeader, // Modified in ELECTRA + executionBranch: new VectorCompositeType(Bytes32, EXECUTION_PAYLOAD_DEPTH), + }, + {typeName: "LightClientHeader", jsonCase: "eth2"} +); + +export const LightClientBootstrap = new ContainerType( + { + header: LightClientHeader, + currentSyncCommittee: altairSsz.SyncCommittee, + currentSyncCommitteeBranch: new VectorCompositeType(Bytes32, NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA), + }, + {typeName: "LightClientBootstrap", jsonCase: "eth2"} +); + +export const LightClientUpdate = new ContainerType( + { + attestedHeader: LightClientHeader, + nextSyncCommittee: altairSsz.SyncCommittee, + nextSyncCommitteeBranch: new VectorCompositeType(Bytes32, NEXT_SYNC_COMMITTEE_DEPTH_ELECTRA), + finalizedHeader: LightClientHeader, + finalityBranch: new VectorCompositeType(Bytes32, FINALIZED_ROOT_DEPTH_ELECTRA), + syncAggregate: altairSsz.SyncAggregate, + signatureSlot: Slot, + }, + {typeName: "LightClientUpdate", jsonCase: "eth2"} +); + +export const LightClientFinalityUpdate = new ContainerType( + { + attestedHeader: LightClientHeader, + finalizedHeader: LightClientHeader, + finalityBranch: new VectorCompositeType(Bytes32, FINALIZED_ROOT_DEPTH_ELECTRA), + syncAggregate: altairSsz.SyncAggregate, + signatureSlot: Slot, + }, + {typeName: "LightClientFinalityUpdate", jsonCase: "eth2"} +); + +export const LightClientOptimisticUpdate = new ContainerType( + { + attestedHeader: LightClientHeader, + syncAggregate: altairSsz.SyncAggregate, + signatureSlot: Slot, + }, + {typeName: "LightClientOptimisticUpdate", jsonCase: "eth2"} +); + +export const LightClientStore = new ContainerType( + { + snapshot: LightClientBootstrap, + validUpdates: new ListCompositeType(LightClientUpdate, EPOCHS_PER_SYNC_COMMITTEE_PERIOD * SLOTS_PER_EPOCH), + }, + {typeName: "LightClientStore", jsonCase: "eth2"} +); + +// PayloadAttributes primarily for SSE event +export const PayloadAttributes = new ContainerType( + { + ...capellaSsz.PayloadAttributes.fields, + parentBeaconBlockRoot: Root, + }, + {typeName: "PayloadAttributes", jsonCase: "eth2"} +); + +export const SSEPayloadAttributes = new ContainerType( + { + ...bellatrixSsz.SSEPayloadAttributesCommon.fields, + payloadAttributes: PayloadAttributes, + }, + {typeName: "SSEPayloadAttributes", jsonCase: "eth2"} +); + +export const BlockContents = new ContainerType( + { + block: BeaconBlock, + kzgProofs: denebSsz.KZGProofs, + blobs: denebSsz.Blobs, + }, + {typeName: "BlockContents", jsonCase: "eth2"} +); + +export const SignedBlockContents = new ContainerType( + { + signedBlock: SignedBeaconBlock, + kzgProofs: denebSsz.KZGProofs, + blobs: denebSsz.Blobs, + }, + {typeName: "BlockContents", jsonCase: "eth2"} +); diff --git a/packages/types/src/electra/types.ts b/packages/types/src/electra/types.ts new file mode 100644 index 00000000000..9a81aec43b5 --- /dev/null +++ b/packages/types/src/electra/types.ts @@ -0,0 +1,54 @@ +import {ValueOf} from "@chainsafe/ssz"; +import * as ssz from "./sszTypes.js"; + +export type Attestation = ValueOf; +export type IndexedAttestation = ValueOf; +export type IndexedAttestationBigint = ValueOf; +export type AttesterSlashing = ValueOf; + +export type AggregateAndProof = ValueOf; +export type SignedAggregateAndProof = ValueOf; + +export type DepositRequest = ValueOf; +export type DepositRequests = ValueOf; + +export type WithdrawalRequest = ValueOf; +export type WithdrawalRequests = ValueOf; + +export type ConsolidationRequest = ValueOf; +export type ConsolidationRequests = ValueOf; + +export type ExecutionPayload = ValueOf; +export type ExecutionPayloadHeader = ValueOf; + +export type ExecutionPayloadAndBlobsBundle = ValueOf; + +export type BeaconBlockBody = ValueOf; +export type BeaconBlock = ValueOf; +export type SignedBeaconBlock = ValueOf; + +export type BeaconState = ValueOf; + +export type BlindedBeaconBlockBody = ValueOf; +export type BlindedBeaconBlock = ValueOf; +export type SignedBlindedBeaconBlock = ValueOf; + +export type FullOrBlindedExecutionPayload = ExecutionPayload | ExecutionPayloadHeader; + +export type BuilderBid = ValueOf; +export type SignedBuilderBid = ValueOf; +export type SSEPayloadAttributes = ValueOf; + +export type LightClientHeader = ValueOf; +export type LightClientBootstrap = ValueOf; +export type LightClientUpdate = ValueOf; +export type LightClientFinalityUpdate = ValueOf; +export type LightClientOptimisticUpdate = ValueOf; +export type LightClientStore = ValueOf; + +export type PendingBalanceDeposit = ValueOf; +export type PendingPartialWithdrawal = ValueOf; +export type PendingConsolidation = ValueOf; + +export type BlockContents = ValueOf; +export type SignedBlockContents = ValueOf; diff --git a/packages/types/src/phase0/sszTypes.ts b/packages/types/src/phase0/sszTypes.ts index 9eb2a13e5fa..4a04701b789 100644 --- a/packages/types/src/phase0/sszTypes.ts +++ b/packages/types/src/phase0/sszTypes.ts @@ -236,7 +236,7 @@ export const RandaoMixes = new VectorCompositeType(Bytes32, EPOCHS_PER_HISTORICA * This is initially a Gwei (BigInt) vector, however since Nov 2023 it's converted to UintNum64 (number) vector in the state transition because: * - state.slashings[nextEpoch % EPOCHS_PER_SLASHINGS_VECTOR] is reset per epoch in processSlashingsReset() * - max slashed validators per epoch is SLOTS_PER_EPOCH * MAX_ATTESTER_SLASHINGS * MAX_VALIDATORS_PER_COMMITTEE which is 32 * 2 * 2048 = 131072 on mainnet - * - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 + * - with that and 32_000_000_000 MAX_EFFECTIVE_BALANCE or 2048_000_000_000 MAX_EFFECTIVE_BALANCE_ELECTRA, it still fits in a number given that Math.floor(Number.MAX_SAFE_INTEGER / 32_000_000_000) = 281474 * - we don't need to compute the total slashings from state.slashings, it's handled by totalSlashingsByIncrement in EpochCache */ export const Slashings = new VectorBasicType(UintNum64, EPOCHS_PER_SLASHINGS_VECTOR); diff --git a/packages/types/src/primitive/sszTypes.ts b/packages/types/src/primitive/sszTypes.ts index 068a32e2cc1..376e17c3f1b 100644 --- a/packages/types/src/primitive/sszTypes.ts +++ b/packages/types/src/primitive/sszTypes.ts @@ -50,6 +50,7 @@ export const SubcommitteeIndex = UintNum64; */ export const ValidatorIndex = UintNum64; export const WithdrawalIndex = UintNum64; +export const DepositIndex = UintNum64; export const Gwei = UintBn64; export const Wei = UintBn256; export const Root = new ByteVectorType(32); diff --git a/packages/types/src/sszTypes.ts b/packages/types/src/sszTypes.ts index 60980fa0822..4399904a94b 100644 --- a/packages/types/src/sszTypes.ts +++ b/packages/types/src/sszTypes.ts @@ -5,9 +5,10 @@ import {ssz as altair} from "./altair/index.js"; import {ssz as bellatrix} from "./bellatrix/index.js"; import {ssz as capella} from "./capella/index.js"; import {ssz as deneb} from "./deneb/index.js"; +import {ssz as electra} from "./electra/index.js"; export * from "./primitive/sszTypes.js"; -export {phase0, altair, bellatrix, capella, deneb}; +export {phase0, altair, bellatrix, capella, deneb, electra}; /** * Index the ssz types that differ by fork @@ -19,6 +20,7 @@ const typesByFork = { [ForkName.bellatrix]: {...phase0, ...altair, ...bellatrix}, [ForkName.capella]: {...phase0, ...altair, ...bellatrix, ...capella}, [ForkName.deneb]: {...phase0, ...altair, ...bellatrix, ...capella, ...deneb}, + [ForkName.electra]: {...phase0, ...altair, ...bellatrix, ...capella, ...deneb, ...electra}, }; /** diff --git a/packages/types/src/types.ts b/packages/types/src/types.ts index 3602299ae5e..1071eed79a1 100644 --- a/packages/types/src/types.ts +++ b/packages/types/src/types.ts @@ -4,6 +4,7 @@ import {ts as altair} from "./altair/index.js"; import {ts as bellatrix} from "./bellatrix/index.js"; import {ts as capella} from "./capella/index.js"; import {ts as deneb} from "./deneb/index.js"; +import {ts as electra} from "./electra/index.js"; import {Slot} from "./primitive/types.js"; export * from "./primitive/types.js"; @@ -12,6 +13,7 @@ export {ts as altair} from "./altair/index.js"; export {ts as bellatrix} from "./bellatrix/index.js"; export {ts as capella} from "./capella/index.js"; export {ts as deneb} from "./deneb/index.js"; +export {ts as electra} from "./electra/index.js"; /** Common non-spec type to represent roots as strings */ export type RootHex = string; @@ -34,6 +36,12 @@ type TypesByFork = { BeaconState: phase0.BeaconState; SignedBeaconBlock: phase0.SignedBeaconBlock; Metadata: phase0.Metadata; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; }; [ForkName.altair]: { BeaconBlockHeader: phase0.BeaconBlockHeader; @@ -51,6 +59,12 @@ type TypesByFork = { LightClientStore: altair.LightClientStore; SyncCommittee: altair.SyncCommittee; SyncAggregate: altair.SyncAggregate; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; }; [ForkName.bellatrix]: { BeaconBlockHeader: phase0.BeaconBlockHeader; @@ -76,6 +90,12 @@ type TypesByFork = { SSEPayloadAttributes: bellatrix.SSEPayloadAttributes; SyncCommittee: altair.SyncCommittee; SyncAggregate: altair.SyncAggregate; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; }; [ForkName.capella]: { BeaconBlockHeader: phase0.BeaconBlockHeader; @@ -101,6 +121,12 @@ type TypesByFork = { SSEPayloadAttributes: capella.SSEPayloadAttributes; SyncCommittee: altair.SyncCommittee; SyncAggregate: altair.SyncAggregate; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; }; [ForkName.deneb]: { BeaconBlockHeader: phase0.BeaconBlockHeader; @@ -131,6 +157,48 @@ type TypesByFork = { Contents: deneb.Contents; SyncCommittee: altair.SyncCommittee; SyncAggregate: altair.SyncAggregate; + Attestation: phase0.Attestation; + IndexedAttestation: phase0.IndexedAttestation; + IndexedAttestationBigint: phase0.IndexedAttestationBigint; + AttesterSlashing: phase0.AttesterSlashing; + AggregateAndProof: phase0.AggregateAndProof; + SignedAggregateAndProof: phase0.SignedAggregateAndProof; + }; + [ForkName.electra]: { + BeaconBlockHeader: phase0.BeaconBlockHeader; + SignedBeaconBlockHeader: phase0.SignedBeaconBlockHeader; + BeaconBlock: electra.BeaconBlock; + BeaconBlockBody: electra.BeaconBlockBody; + BeaconState: electra.BeaconState; + SignedBeaconBlock: electra.SignedBeaconBlock; + Metadata: altair.Metadata; + LightClientHeader: electra.LightClientHeader; + LightClientBootstrap: electra.LightClientBootstrap; + LightClientUpdate: electra.LightClientUpdate; + LightClientFinalityUpdate: electra.LightClientFinalityUpdate; + LightClientOptimisticUpdate: electra.LightClientOptimisticUpdate; + LightClientStore: electra.LightClientStore; + BlindedBeaconBlock: electra.BlindedBeaconBlock; + BlindedBeaconBlockBody: electra.BlindedBeaconBlockBody; + SignedBlindedBeaconBlock: electra.SignedBlindedBeaconBlock; + ExecutionPayload: electra.ExecutionPayload; + ExecutionPayloadHeader: electra.ExecutionPayloadHeader; + BuilderBid: electra.BuilderBid; + SignedBuilderBid: electra.SignedBuilderBid; + SSEPayloadAttributes: electra.SSEPayloadAttributes; + BlockContents: electra.BlockContents; + SignedBlockContents: electra.SignedBlockContents; + ExecutionPayloadAndBlobsBundle: deneb.ExecutionPayloadAndBlobsBundle; + BlobsBundle: deneb.BlobsBundle; + Contents: deneb.Contents; + SyncCommittee: altair.SyncCommittee; + SyncAggregate: altair.SyncAggregate; + Attestation: electra.Attestation; + IndexedAttestation: electra.IndexedAttestation; + IndexedAttestationBigint: electra.IndexedAttestationBigint; + AttesterSlashing: electra.AttesterSlashing; + AggregateAndProof: electra.AggregateAndProof; + SignedAggregateAndProof: electra.SignedAggregateAndProof; }; }; @@ -189,3 +257,10 @@ export type Metadata = TypesByFork[F]["Metadata"]; export type BuilderBid = TypesByFork[F]["BuilderBid"]; export type SignedBuilderBid = TypesByFork[F]["SignedBuilderBid"]; export type SSEPayloadAttributes = TypesByFork[F]["SSEPayloadAttributes"]; + +export type Attestation = TypesByFork[F]["Attestation"]; +export type IndexedAttestation = TypesByFork[F]["IndexedAttestation"]; +export type IndexedAttestationBigint = TypesByFork[F]["IndexedAttestationBigint"]; +export type AttesterSlashing = TypesByFork[F]["AttesterSlashing"]; +export type AggregateAndProof = TypesByFork[F]["AggregateAndProof"]; +export type SignedAggregateAndProof = TypesByFork[F]["SignedAggregateAndProof"]; diff --git a/packages/types/src/utils/typeguards.ts b/packages/types/src/utils/typeguards.ts index f006227e03c..72910645f6e 100644 --- a/packages/types/src/utils/typeguards.ts +++ b/packages/types/src/utils/typeguards.ts @@ -1,4 +1,4 @@ -import {ForkBlobs, ForkExecution} from "@lodestar/params"; +import {ForkBlobs, ForkExecution, ForkPostElectra} from "@lodestar/params"; import { BlockContents, SignedBeaconBlock, @@ -13,6 +13,7 @@ import { BlindedBeaconBlockBody, SignedBlockContents, BeaconBlock, + Attestation, } from "../types.js"; export function isExecutionPayload( @@ -66,3 +67,7 @@ export function isSignedBlockContents( ): data is SignedBlockContents { return (data as SignedBlockContents).kzgProofs !== undefined; } + +export function isElectraAttestation(attestation: Attestation): attestation is Attestation { + return (attestation as Attestation).committeeBits !== undefined; +} diff --git a/packages/types/test/unit/blinded.test.ts b/packages/types/test/unit/blinded.test.ts new file mode 100644 index 00000000000..3a4b346d29b --- /dev/null +++ b/packages/types/test/unit/blinded.test.ts @@ -0,0 +1,35 @@ +import {describe, it, expect} from "vitest"; +import {ForkName, isForkExecution} from "@lodestar/params"; +import {ssz} from "../../src/index.js"; + +describe("blinded data structures", function () { + it("should have the same number of fields as non-blinded", () => { + const blindedTypes = [ + {a: "BlindedBeaconBlockBody" as const, b: "BeaconBlockBody" as const}, + {a: "ExecutionPayloadHeader" as const, b: "ExecutionPayload" as const}, + ]; + + for (const {a, b} of blindedTypes) { + for (const fork of Object.keys(ssz.sszTypesFor) as ForkName[]) { + if (!isForkExecution(fork)) { + continue; + } + + const blindedType = ssz[fork][a]; + if (blindedType === undefined) { + expect.fail(`fork: ${fork}, type ${a} is undefined`); + } + + const type = ssz[fork][b]; + if (type === undefined) { + expect.fail(`fork: ${fork}, type ${b} is undefined`); + } + + expect(Object.keys(blindedType.fields).length).toBeWithMessage( + Object.keys(type.fields).length, + `fork: ${fork}, types ${a} and ${b} have different number of fields` + ); + } + } + }); +}); diff --git a/packages/utils/src/assert.ts b/packages/utils/src/assert.ts index aa86161cca4..91612b0e640 100644 --- a/packages/utils/src/assert.ts +++ b/packages/utils/src/assert.ts @@ -21,6 +21,18 @@ export const assert = { } }, + /** + * Assert not null + * ``` + * actual !== null + * ``` + */ + notNull(actual: T | null, message?: string): asserts actual is T { + if (!(actual !== null)) { + throw new AssertionError(`${message || "Expected value to be not null"}`); + } + }, + /** * Assert less than or equal * ```js diff --git a/packages/utils/test/unit/assert.test.ts b/packages/utils/test/unit/assert.test.ts index 0555bcbd01a..3b413efa11b 100644 --- a/packages/utils/test/unit/assert.test.ts +++ b/packages/utils/test/unit/assert.test.ts @@ -20,8 +20,18 @@ describe("assert", () => { }); }); + describe("notNull with custom message", () => { + it("Should not throw error with not null value", () => { + expect(() => assert.notNull(0)).not.toThrow(); + expect(() => assert.notNull("")).not.toThrow(); + }); + it("Should throw with null value", () => { + expect(() => assert.notNull(null, "something must not be null")).toThrow("something must not be null"); + }); + }); + const cases: { - op: keyof Omit; + op: keyof Omit; args: [number, number]; ok: boolean; }[] = [ diff --git a/packages/validator/package.json b/packages/validator/package.json index 66de444c81b..373e59817d2 100644 --- a/packages/validator/package.json +++ b/packages/validator/package.json @@ -46,7 +46,7 @@ ], "dependencies": { "@chainsafe/blst": "^2.0.3", - "@chainsafe/ssz": "^0.17.0", + "@chainsafe/ssz": "^0.17.1", "@lodestar/api": "^1.21.0", "@lodestar/config": "^1.21.0", "@lodestar/db": "^1.21.0", diff --git a/packages/validator/src/services/attestation.ts b/packages/validator/src/services/attestation.ts index 9191b251a6d..fc43b603c6b 100644 --- a/packages/validator/src/services/attestation.ts +++ b/packages/validator/src/services/attestation.ts @@ -1,8 +1,10 @@ import {toHexString} from "@chainsafe/ssz"; -import {BLSSignature, phase0, Slot, ssz} from "@lodestar/types"; +import {BLSSignature, phase0, Slot, ssz, Attestation, SignedAggregateAndProof} from "@lodestar/types"; +import {ForkSeq} from "@lodestar/params"; import {computeEpochAtSlot, isAggregatorFromCommitteeLength} from "@lodestar/state-transition"; import {sleep} from "@lodestar/utils"; import {ApiClient, routes} from "@lodestar/api"; +import {ChainForkConfig} from "@lodestar/config"; import {IClock, LoggerVc} from "../util/index.js"; import {PubkeyHex} from "../types.js"; import {Metrics} from "../metrics.js"; @@ -43,6 +45,7 @@ export class AttestationService { chainHeadTracker: ChainHeaderTracker, syncingStatusTracker: SyncingStatusTracker, private readonly metrics: Metrics | null, + private readonly config: ChainForkConfig, private readonly opts?: AttestationServiceOpts ) { this.dutiesService = new AttestationDutiesService( @@ -137,7 +140,7 @@ export class AttestationService { // Then download, sign and publish a `SignedAggregateAndProof` for each // validator that is elected to aggregate for this `slot` and `committeeIndex`. - await this.produceAndPublishAggregates(attestation, dutiesSameCommittee); + await this.produceAndPublishAggregates(attestation, index, dutiesSameCommittee); } private async runAttestationTasksGrouped( @@ -157,13 +160,14 @@ export class AttestationService { this.metrics?.attesterStepCallProduceAggregate.observe(this.clock.secFromSlot(slot + 2 / 3)); const dutiesByCommitteeIndex = groupAttDutiesByCommitteeIndex(dutiesAll); + const isPostElectra = this.config.getForkSeq(slot) >= ForkSeq.electra; // Then download, sign and publish a `SignedAggregateAndProof` for each // validator that is elected to aggregate for this `slot` and `committeeIndex`. await Promise.all( Array.from(dutiesByCommitteeIndex.entries()).map(([index, dutiesSameCommittee]) => { - const attestationData: phase0.AttestationData = {...attestationNoCommittee, index}; - return this.produceAndPublishAggregates(attestationData, dutiesSameCommittee); + const attestationData: phase0.AttestationData = {...attestationNoCommittee, index: isPostElectra ? 0 : index}; + return this.produceAndPublishAggregates(attestationData, index, dutiesSameCommittee); }) ); } @@ -190,13 +194,14 @@ export class AttestationService { attestationNoCommittee: phase0.AttestationData, duties: AttDutyAndProof[] ): Promise { - const signedAttestations: phase0.Attestation[] = []; + const signedAttestations: Attestation[] = []; const headRootHex = toHexString(attestationNoCommittee.beaconBlockRoot); const currentEpoch = computeEpochAtSlot(slot); + const isPostElectra = currentEpoch >= this.config.ELECTRA_FORK_EPOCH; await Promise.all( duties.map(async ({duty}) => { - const index = duty.committeeIndex; + const index = isPostElectra ? 0 : duty.committeeIndex; const attestationData: phase0.AttestationData = {...attestationNoCommittee, index}; const logCtxValidator = {slot, index, head: headRootHex, validatorIndex: duty.validatorIndex}; @@ -232,7 +237,11 @@ export class AttestationService { ...(this.opts?.disableAttestationGrouping && {index: attestationNoCommittee.index}), }; try { - (await this.api.beacon.submitPoolAttestations({signedAttestations})).assertOk(); + if (isPostElectra) { + (await this.api.beacon.submitPoolAttestationsV2({signedAttestations})).assertOk(); + } else { + (await this.api.beacon.submitPoolAttestations({signedAttestations})).assertOk(); + } this.logger.info("Published attestations", {...logCtx, count: signedAttestations.length}); this.metrics?.publishedAttestations.inc(signedAttestations.length); } catch (e) { @@ -254,9 +263,11 @@ export class AttestationService { */ private async produceAndPublishAggregates( attestation: phase0.AttestationData, + committeeIndex: number, duties: AttDutyAndProof[] ): Promise { - const logCtx = {slot: attestation.slot, index: attestation.index}; + const logCtx = {slot: attestation.slot, index: committeeIndex}; + const isPostElectra = this.config.getForkSeq(attestation.slot) >= ForkSeq.electra; // No validator is aggregator, skip if (duties.every(({selectionProof}) => selectionProof === null)) { @@ -264,14 +275,20 @@ export class AttestationService { } this.logger.verbose("Aggregating attestations", logCtx); - const res = await this.api.validator.getAggregatedAttestation({ - attestationDataRoot: ssz.phase0.AttestationData.hashTreeRoot(attestation), - slot: attestation.slot, - }); + const res = isPostElectra + ? await this.api.validator.getAggregatedAttestationV2({ + attestationDataRoot: ssz.phase0.AttestationData.hashTreeRoot(attestation), + slot: attestation.slot, + committeeIndex, + }) + : await this.api.validator.getAggregatedAttestation({ + attestationDataRoot: ssz.phase0.AttestationData.hashTreeRoot(attestation), + slot: attestation.slot, + }); const aggregate = res.value(); this.metrics?.numParticipantsInAggregate.observe(aggregate.aggregationBits.getTrueBitIndexes().length); - const signedAggregateAndProofs: phase0.SignedAggregateAndProof[] = []; + const signedAggregateAndProofs: SignedAggregateAndProof[] = []; await Promise.all( duties.map(async ({duty, selectionProof}) => { @@ -294,7 +311,11 @@ export class AttestationService { if (signedAggregateAndProofs.length > 0) { try { - (await this.api.validator.publishAggregateAndProofs({signedAggregateAndProofs})).assertOk(); + if (isPostElectra) { + (await this.api.validator.publishAggregateAndProofsV2({signedAggregateAndProofs})).assertOk(); + } else { + (await this.api.validator.publishAggregateAndProofs({signedAggregateAndProofs})).assertOk(); + } this.logger.info("Published aggregateAndProofs", {...logCtx, count: signedAggregateAndProofs.length}); this.metrics?.publishedAggregates.inc(signedAggregateAndProofs.length); } catch (e) { diff --git a/packages/validator/src/services/block.ts b/packages/validator/src/services/block.ts index a9dd7654a8f..7792ef85be9 100644 --- a/packages/validator/src/services/block.ts +++ b/packages/validator/src/services/block.ts @@ -160,7 +160,7 @@ export class BlockProposingService { this.logger.debug("Produced block", {...debugLogCtx, ...blockContents.debugLogCtx}); this.metrics?.blocksProduced.inc(); - const signedBlock = await this.validatorStore.signBlock(pubkey, blockContents.block, slot); + const signedBlock = await this.validatorStore.signBlock(pubkey, blockContents.block, slot, this.logger); const {broadcastValidation} = this.opts; const publishOpts = {broadcastValidation}; diff --git a/packages/validator/src/services/validatorStore.ts b/packages/validator/src/services/validatorStore.ts index 53299463ad2..fa9d855aa24 100644 --- a/packages/validator/src/services/validatorStore.ts +++ b/packages/validator/src/services/validatorStore.ts @@ -19,6 +19,8 @@ import { DOMAIN_SYNC_COMMITTEE, DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF, DOMAIN_APPLICATION_BUILDER, + ForkSeq, + MAX_COMMITTEES_PER_SLOT, } from "@lodestar/params"; import { altair, @@ -35,6 +37,9 @@ import { Slot, ssz, ValidatorIndex, + Attestation, + AggregateAndProof, + SignedAggregateAndProof, } from "@lodestar/types"; import {routes} from "@lodestar/api"; import {ISlashingProtection} from "../slashingProtection/index.js"; @@ -493,7 +498,7 @@ export class ValidatorStore { duty: routes.validator.AttesterDuty, attestationData: phase0.AttestationData, currentEpoch: Epoch - ): Promise { + ): Promise { // Make sure the target epoch is not higher than the current epoch to avoid potential attacks. if (attestationData.target.epoch > currentEpoch) { throw Error( @@ -525,21 +530,30 @@ export class ValidatorStore { data: attestationData, }; - return { - aggregationBits: BitArray.fromSingleBit(duty.committeeLength, duty.validatorCommitteeIndex), - data: attestationData, - signature: await this.getSignature(duty.pubkey, signingRoot, signingSlot, signableMessage), - }; + if (this.config.getForkSeq(duty.slot) >= ForkSeq.electra) { + return { + aggregationBits: BitArray.fromSingleBit(duty.committeeLength, duty.validatorCommitteeIndex), + data: attestationData, + signature: await this.getSignature(duty.pubkey, signingRoot, signingSlot, signableMessage), + committeeBits: BitArray.fromSingleBit(MAX_COMMITTEES_PER_SLOT, duty.committeeIndex), + }; + } else { + return { + aggregationBits: BitArray.fromSingleBit(duty.committeeLength, duty.validatorCommitteeIndex), + data: attestationData, + signature: await this.getSignature(duty.pubkey, signingRoot, signingSlot, signableMessage), + } as phase0.Attestation; + } } async signAggregateAndProof( duty: routes.validator.AttesterDuty, selectionProof: BLSSignature, - aggregate: phase0.Attestation - ): Promise { + aggregate: Attestation + ): Promise { this.validateAttestationDuty(duty, aggregate.data); - const aggregateAndProof: phase0.AggregateAndProof = { + const aggregateAndProof: AggregateAndProof = { aggregate, aggregatorIndex: duty.validatorIndex, selectionProof, @@ -547,7 +561,10 @@ export class ValidatorStore { const signingSlot = aggregate.data.slot; const domain = this.config.getDomain(signingSlot, DOMAIN_AGGREGATE_AND_PROOF); - const signingRoot = computeSigningRoot(ssz.phase0.AggregateAndProof, aggregateAndProof, domain); + const signingRoot = + this.config.getForkSeq(duty.slot) >= ForkSeq.electra + ? computeSigningRoot(ssz.electra.AggregateAndProof, aggregateAndProof, domain) + : computeSigningRoot(ssz.phase0.AggregateAndProof, aggregateAndProof, domain); const signableMessage: SignableMessage = { type: SignableMessageType.AGGREGATE_AND_PROOF, @@ -783,11 +800,16 @@ export class ValidatorStore { if (duty.slot !== data.slot) { throw Error(`Inconsistent duties during signing: duty.slot ${duty.slot} != att.slot ${data.slot}`); } - if (duty.committeeIndex != data.index) { + + const isPostElectra = this.config.getForkSeq(duty.slot) >= ForkSeq.electra; + if (!isPostElectra && duty.committeeIndex != data.index) { throw Error( `Inconsistent duties during signing: duty.committeeIndex ${duty.committeeIndex} != att.committeeIndex ${data.index}` ); } + if (isPostElectra && data.index !== 0) { + throw Error(`Non-zero committee index post-electra during signing: att.committeeIndex ${data.index}`); + } } private assertDoppelgangerSafe(pubKey: PubkeyHex | BLSPubkey): void { diff --git a/packages/validator/src/util/params.ts b/packages/validator/src/util/params.ts index 8ccaf9fe75b..6d6705f512d 100644 --- a/packages/validator/src/util/params.ts +++ b/packages/validator/src/util/params.ts @@ -73,6 +73,7 @@ function getSpecCriticalParams(localConfig: ChainConfig): Record = {ELECTRA_FORK_EPOCH: 0}; + + const testContexts: [string, AttestationServiceOpts, Partial][] = [ + ["With default configuration", {}, {}], + ["With default configuration post-electra", {}, electraConfig], + ["With attestation grouping disabled", {disableAttestationGrouping: true}, {}], + ["With attestation grouping disabled post-electra", {disableAttestationGrouping: true}, electraConfig], + ["With distributed aggregation selection enabled", {distributedAggregationSelection: true}, {}], ]; - for (const [title, opts] of testContexts) { + for (const [title, opts, chainConfig] of testContexts) { describe(title, () => { it("Should produce, sign, and publish an attestation + aggregate", async () => { const clock = new ClockMock(); + const config = createChainForkConfig({...defaultConfig, ...chainConfig}); + const isPostElectra = chainConfig.ELECTRA_FORK_EPOCH === 0; const attestationService = new AttestationService( loggerVc, api, @@ -68,11 +78,16 @@ describe("AttestationService", function () { chainHeadTracker, syncingStatusTracker, null, + config, opts ); - const attestation = ssz.phase0.Attestation.defaultValue(); - const aggregate = ssz.phase0.SignedAggregateAndProof.defaultValue(); + const attestation = isPostElectra + ? ssz.electra.Attestation.defaultValue() + : ssz.phase0.Attestation.defaultValue(); + const aggregate = isPostElectra + ? ssz.electra.SignedAggregateAndProof.defaultValue() + : ssz.phase0.SignedAggregateAndProof.defaultValue(); const duties: AttDutyAndProof[] = [ { duty: { @@ -102,10 +117,17 @@ describe("AttestationService", function () { // Mock beacon's attestation and aggregates endpoints api.validator.produceAttestationData.mockResolvedValue(mockApiResponse({data: attestation.data})); - api.validator.getAggregatedAttestation.mockResolvedValue(mockApiResponse({data: attestation})); - - api.beacon.submitPoolAttestations.mockResolvedValue(mockApiResponse({})); - api.validator.publishAggregateAndProofs.mockResolvedValue(mockApiResponse({})); + if (isPostElectra) { + api.validator.getAggregatedAttestationV2.mockResolvedValue( + mockApiResponse({data: attestation, meta: {version: ForkName.electra}}) + ); + api.beacon.submitPoolAttestationsV2.mockResolvedValue(mockApiResponse({})); + api.validator.publishAggregateAndProofsV2.mockResolvedValue(mockApiResponse({})); + } else { + api.validator.getAggregatedAttestation.mockResolvedValue(mockApiResponse({data: attestation})); + api.beacon.submitPoolAttestations.mockResolvedValue(mockApiResponse({})); + api.validator.publishAggregateAndProofs.mockResolvedValue(mockApiResponse({})); + } if (opts.distributedAggregationSelection) { // Mock distributed validator middleware client selections endpoint @@ -146,13 +168,25 @@ describe("AttestationService", function () { expect(api.validator.prepareBeaconCommitteeSubnet).toHaveBeenCalledWith({subscriptions: [subscription]}); } - // Must submit the attestation received through produceAttestationData() - expect(api.beacon.submitPoolAttestations).toHaveBeenCalledOnce(); - expect(api.beacon.submitPoolAttestations).toHaveBeenCalledWith({signedAttestations: [attestation]}); - - // Must submit the aggregate received through getAggregatedAttestation() then createAndSignAggregateAndProof() - expect(api.validator.publishAggregateAndProofs).toHaveBeenCalledOnce(); - expect(api.validator.publishAggregateAndProofs).toHaveBeenCalledWith({signedAggregateAndProofs: [aggregate]}); + if (isPostElectra) { + // Must submit the attestation received through produceAttestationData() + expect(api.beacon.submitPoolAttestationsV2).toHaveBeenCalledOnce(); + expect(api.beacon.submitPoolAttestationsV2).toHaveBeenCalledWith({signedAttestations: [attestation]}); + + // Must submit the aggregate received through getAggregatedAttestationV2() then createAndSignAggregateAndProof() + expect(api.validator.publishAggregateAndProofsV2).toHaveBeenCalledOnce(); + expect(api.validator.publishAggregateAndProofsV2).toHaveBeenCalledWith({ + signedAggregateAndProofs: [aggregate], + }); + } else { + // Must submit the attestation received through produceAttestationData() + expect(api.beacon.submitPoolAttestations).toHaveBeenCalledOnce(); + expect(api.beacon.submitPoolAttestations).toHaveBeenCalledWith({signedAttestations: [attestation]}); + + // Must submit the aggregate received through getAggregatedAttestation() then createAndSignAggregateAndProof() + expect(api.validator.publishAggregateAndProofs).toHaveBeenCalledOnce(); + expect(api.validator.publishAggregateAndProofs).toHaveBeenCalledWith({signedAggregateAndProofs: [aggregate]}); + } }); }); } diff --git a/packages/validator/test/unit/utils/interopConfigs.ts b/packages/validator/test/unit/utils/interopConfigs.ts index 2c05203660f..d263fa8c1d8 100644 --- a/packages/validator/test/unit/utils/interopConfigs.ts +++ b/packages/validator/test/unit/utils/interopConfigs.ts @@ -30,6 +30,7 @@ export const lighthouseHoleskyConfig = { EJECTION_BALANCE: "28000000000", MIN_PER_EPOCH_CHURN_LIMIT: "4", MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: "8", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "128000000000", CHURN_LIMIT_QUOTIENT: "65536", PROPOSER_SCORE_BOOST: "40", DEPOSIT_CHAIN_ID: "17000", @@ -120,6 +121,14 @@ export const lighthouseHoleskyConfig = { DOMAIN_VOLUNTARY_EXIT: "0x04000000", DOMAIN_CONTRIBUTION_AND_PROOF: "0x09000000", DOMAIN_SYNC_COMMITTEE_SELECTION_PROOF: "0x08000000", + DOMAIN_BLS_TO_EXECUTION_CHANGE: "0x0A000000", + MAX_EFFECTIVE_BALANCE_ELECTRA: "2048000000000", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "65536", + MIN_ACTIVATION_BALANCE: "32000000000", + PENDING_BALANCE_DEPOSITS_LIMIT: "134217728", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "134217728", + PENDING_CONSOLIDATIONS_LIMIT: "262144", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "1", }; export const prysmHoleskyConfig = { @@ -207,10 +216,12 @@ export const prysmHoleskyConfig = { MIN_GENESIS_ACTIVE_VALIDATOR_COUNT: "16384", MIN_GENESIS_TIME: "1695902100", MIN_PER_EPOCH_CHURN_LIMIT: "4", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "128000000000", MIN_SEED_LOOKAHEAD: "1", MIN_SLASHING_PENALTY_QUOTIENT: "128", MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR: "64", MIN_SLASHING_PENALTY_QUOTIENT_BELLATRIX: "32", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "65536", MIN_SYNC_COMMITTEE_PARTICIPANTS: "1", MIN_VALIDATOR_WITHDRAWABILITY_DELAY: "256", NODE_ID_BITS: "256", @@ -252,6 +263,12 @@ export const prysmHoleskyConfig = { VALIDATOR_REGISTRY_LIMIT: "1099511627776", WEIGHT_DENOMINATOR: "64", WHISTLEBLOWER_REWARD_QUOTIENT: "512", + MAX_EFFECTIVE_BALANCE_ELECTRA: "2048000000000", + MIN_ACTIVATION_BALANCE: "32000000000", + PENDING_BALANCE_DEPOSITS_LIMIT: "134217728", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "134217728", + PENDING_CONSOLIDATIONS_LIMIT: "262144", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "1", }; export const tekuHoleskyConfig = { @@ -348,6 +365,8 @@ export const tekuHoleskyConfig = { MAX_BLOB_COMMITMENTS_PER_BLOCK: "4096", DOMAIN_RANDAO: "0x02000000", CAPELLA_FORK_VERSION: "0x04017000", + EPOCHS_PER_RANDOM_SUBNET_SUBSCRIPTION: "256", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "128000000000", MIN_SLASHING_PENALTY_QUOTIENT_ALTAIR: "64", EPOCHS_PER_ETH1_VOTING_PERIOD: "64", MAX_DEPOSIT_RECEIPTS_PER_PAYLOAD: "8192", @@ -383,6 +402,13 @@ export const tekuHoleskyConfig = { DOMAIN_AGGREGATE_AND_PROOF: "0x06000000", CHURN_LIMIT_QUOTIENT: "65536", BLS_WITHDRAWAL_PREFIX: "0x00", + MAX_EFFECTIVE_BALANCE_ELECTRA: "2048000000000", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "65536", + MIN_ACTIVATION_BALANCE: "32000000000", + PENDING_BALANCE_DEPOSITS_LIMIT: "134217728", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "134217728", + PENDING_CONSOLIDATIONS_LIMIT: "262144", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "1", }; export const nimbusHoleskyConfig = { @@ -465,6 +491,7 @@ export const nimbusHoleskyConfig = { INACTIVITY_SCORE_RECOVERY_RATE: "16", EJECTION_BALANCE: "28000000000", MIN_PER_EPOCH_CHURN_LIMIT: "4", + MIN_PER_EPOCH_CHURN_LIMIT_ELECTRA: "128000000000", CHURN_LIMIT_QUOTIENT: "65536", MAX_PER_EPOCH_ACTIVATION_CHURN_LIMIT: "8", PROPOSER_SCORE_BOOST: "40", @@ -518,4 +545,11 @@ export const nimbusHoleskyConfig = { TARGET_AGGREGATORS_PER_COMMITTEE: "16", TARGET_AGGREGATORS_PER_SYNC_SUBCOMMITTEE: "16", SYNC_COMMITTEE_SUBNET_COUNT: "4", + MAX_EFFECTIVE_BALANCE_ELECTRA: "2048000000000", + MIN_SLASHING_PENALTY_QUOTIENT_ELECTRA: "65536", + MIN_ACTIVATION_BALANCE: "32000000000", + PENDING_BALANCE_DEPOSITS_LIMIT: "134217728", + PENDING_PARTIAL_WITHDRAWALS_LIMIT: "134217728", + PENDING_CONSOLIDATIONS_LIMIT: "262144", + MAX_CONSOLIDATION_REQUESTS_PER_PAYLOAD: "1", }; diff --git a/packages/validator/test/utils/apiStub.ts b/packages/validator/test/utils/apiStub.ts index 3c1d80fff75..ef615af0336 100644 --- a/packages/validator/test/utils/apiStub.ts +++ b/packages/validator/test/utils/apiStub.ts @@ -20,6 +20,7 @@ export function getApiClientStub(): ApiClientStub { publishBlockV2: vi.fn(), submitPoolSyncCommitteeSignatures: vi.fn(), submitPoolAttestations: vi.fn(), + submitPoolAttestationsV2: vi.fn(), }, node: { getSyncingStatus: vi.fn(), @@ -36,7 +37,9 @@ export function getApiClientStub(): ApiClientStub { submitSyncCommitteeSelections: vi.fn(), produceAttestationData: vi.fn(), getAggregatedAttestation: vi.fn(), + getAggregatedAttestationV2: vi.fn(), publishAggregateAndProofs: vi.fn(), + publishAggregateAndProofsV2: vi.fn(), submitBeaconCommitteeSelections: vi.fn(), }, httpClient: httpClientStub, diff --git a/vite.base.config.ts b/vite.base.config.ts index 65e1bad0150..f9030d9edb7 100644 --- a/vite.base.config.ts +++ b/vite.base.config.ts @@ -40,6 +40,7 @@ export function getBaseViteConfig( esbuild: { banner, legalComments: "none", + sourcemap: "inline", }, build: { // "modules" refer to ['es2020', 'edge88', 'firefox78', 'chrome87', 'safari14'] diff --git a/yarn.lock b/yarn.lock index d47feb4514f..60e41c276e5 100644 --- a/yarn.lock +++ b/yarn.lock @@ -594,10 +594,10 @@ "@chainsafe/as-sha256" "^0.4.1" "@chainsafe/persistent-merkle-tree" "^0.6.1" -"@chainsafe/ssz@^0.17.0": - version "0.17.0" - resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.17.0.tgz#b154260f6885693fd77bfb2fff43dd8c3fa37edd" - integrity sha512-DEzyH9vF4zz+Zqe2EMZuxXyxV5+7cmmLwljL3VC3ApzmyPORsprGJM7xLaUJu3oMRKMdBpR8UjRNkfB2ROQJzQ== +"@chainsafe/ssz@^0.17.1": + version "0.17.1" + resolved "https://registry.yarnpkg.com/@chainsafe/ssz/-/ssz-0.17.1.tgz#7986afbcad5e6971006d596fdb7dfa34bc195131" + integrity sha512-1ay46QqYcVTBvUnDXTPTi5WTiENu7tIxpZGMDpUWps1/nYBmh/We/UoCF/jO+o/fkcDD3p8xQPlHbcCfy+jyjA== dependencies: "@chainsafe/as-sha256" "0.5.0" "@chainsafe/persistent-merkle-tree" "0.8.0" @@ -7710,6 +7710,11 @@ ignore@^5.0.4, ignore@^5.1.1, ignore@^5.2.0, ignore@^5.2.4: resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.0.tgz#67418ae40d34d6999c95ff56016759c718c82f78" integrity sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg== +immutable@^4.3.2: + version "4.3.5" + resolved "https://registry.yarnpkg.com/immutable/-/immutable-4.3.5.tgz#f8b436e66d59f99760dc577f5c99a4fd2a5cc5a0" + integrity sha512-8eabxkth9gZatlwl5TBuJnCsoTADlL6ftEr7A4qgdaTsPyreilDSnUk57SO+jfKcNtxPa22U5KK6DSeAYhpBJw== + import-fresh@^3.2.1, import-fresh@^3.3.0: version "3.3.0" resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz"