Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: refactor and unit test getDataColumnSidecars #6947

Closed
wants to merge 24 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
08884f8
feat: placeholder PR for electra
g11tech Jan 24, 2024
b1481f1
feat: implement peerDAS on electra
g11tech Jan 24, 2024
7dab01e
fix: docker build issue for c-kzg
matthewkeil Jun 21, 2024
ae1a03e
rebase fixes
g11tech Jun 26, 2024
38fc1b4
fix: update c-zkg install workflow
matthewkeil Jun 26, 2024
2b26a4e
feat: add trustedSetupPrecompute cli flag
matthewkeil Jun 26, 2024
33e8ba2
fix: update trusted-setup for testing
matthewkeil Jun 26, 2024
c210176
fix: update c-zkg install workflow to remove sudo
matthewkeil Jun 26, 2024
1a800c7
fix: add rsync to apk deps
matthewkeil Jul 11, 2024
7ea9a60
get various sync mechanisms working with/without sharded data
g11tech Jul 14, 2024
e3f6bf4
some network options to control peering behavior
g11tech Jul 14, 2024
3186c20
allow setting node custody capability via --params
g11tech Jul 15, 2024
467c892
use eip754 names for the peerdas config
g11tech Jul 15, 2024
56b2fe7
refactor to add and use nodeid computation and clear out nodeid tracking
g11tech Jul 16, 2024
99d5c83
refactor: getDataColumnSidecars
matthewkeil Jul 12, 2024
f18db45
test: unit test getDataColumnSidecars with mocks from c-kzg library
matthewkeil Jul 12, 2024
135c991
refactor: use fromHex util
matthewkeil Jul 12, 2024
2c84091
chore: update numbering on mocks
matthewkeil Jul 15, 2024
84861f9
chore: update c-kzg to latest version
matthewkeil Jul 15, 2024
494a683
chore: fix type export syntax
matthewkeil Jul 15, 2024
b878ac3
test: add verification for cells from sidecars
matthewkeil Jul 15, 2024
96026f2
test: add verification to DataColumnSidecars tests
matthewkeil Jul 15, 2024
8f2155e
refactor: getDataColumnSidecars for PR comments
matthewkeil Jul 18, 2024
00a47c3
feat: narrow type and remove unnecessary conditional
matthewkeil Jul 18, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@

# --platform=$BUILDPLATFORM is used build javascript source with host arch
# Otherwise TS builds on emulated archs and can be extremely slow (+1h)
FROM --platform=${BUILDPLATFORM:-amd64} node:22-alpine as build_src
FROM --platform=${BUILDPLATFORM:-amd64} node:22-alpine AS build_src
ARG COMMIT
WORKDIR /usr/app
RUN apk update && apk add --no-cache g++ make python3 py3-setuptools && rm -rf /var/cache/apk/*
RUN apk update && apk add --no-cache git g++ rsync make python3 py3-setuptools && rm -rf /var/cache/apk/*

COPY . .

Expand All @@ -21,9 +21,9 @@ RUN cd packages/cli && GIT_COMMIT=${COMMIT} yarn write-git-data

# Copy built src + node_modules to build native packages for archs different than host.
# Note: This step is redundant for the host arch
FROM node:22-alpine as build_deps
FROM node:22-alpine AS build_deps
WORKDIR /usr/app
RUN apk update && apk add --no-cache g++ make python3 py3-setuptools && rm -rf /var/cache/apk/*
RUN apk update && apk add --no-cache git g++ rsync make python3 py3-setuptools && rm -rf /var/cache/apk/*

COPY --from=build_src /usr/app .

Expand Down
2 changes: 1 addition & 1 deletion packages/beacon-node/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@
"@lodestar/utils": "^1.19.0",
"@lodestar/validator": "^1.19.0",
"@multiformats/multiaddr": "^12.1.3",
"c-kzg": "^2.1.2",
"c-kzg": "matthewkeil/c-kzg-4844#853b22fa416d4eac376678a36bfba0bccb59dd78",
"datastore-core": "^9.1.1",
"datastore-level": "^10.1.1",
"deepmerge": "^4.3.1",
Expand Down
49 changes: 39 additions & 10 deletions packages/beacon-node/src/api/impl/beacon/blocks/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,10 @@ import {
reconstructFullBlockOrContents,
signedBeaconBlockToBlinded,
} from "@lodestar/state-transition";
import {ForkExecution, SLOTS_PER_HISTORICAL_ROOT, isForkExecution} from "@lodestar/params";
import {ForkExecution, SLOTS_PER_HISTORICAL_ROOT, isForkExecution, ForkName} from "@lodestar/params";
import {sleep, fromHex, toHex} from "@lodestar/utils";
import {
electra,
deneb,
isSignedBlockContents,
ProducedBlockSource,
Expand All @@ -23,10 +24,13 @@ import {
BlockInput,
BlobsSource,
BlockInputDataBlobs,
BlockInputDataDataColumns,
DataColumnsSource,
BlockInputData,
} from "../../../../chain/blocks/types.js";
import {promiseAllMaybeAsync} from "../../../../util/promises.js";
import {isOptimisticBlock} from "../../../../util/forkChoice.js";
import {computeBlobSidecars} from "../../../../util/blobs.js";
import {computeBlobSidecars, getDataColumnSidecars} from "../../../../util/blobs.js";
import {BlockError, BlockErrorCode, BlockGossipError} from "../../../../chain/errors/index.js";
import {OpSource} from "../../../../metrics/validatorMonitor.js";
import {NetworkEvent} from "../../../../network/index.js";
Expand Down Expand Up @@ -65,17 +69,40 @@ export function getBeaconBlockApi({
opts: PublishBlockOpts = {}
) => {
const seenTimestampSec = Date.now() / 1000;
let blockForImport: BlockInput, signedBlock: SignedBeaconBlock, blobSidecars: deneb.BlobSidecars;
let blockForImport: BlockInput,
signedBlock: SignedBeaconBlock,
blobSidecars: deneb.BlobSidecars,
dataColumnSidecars: electra.DataColumnSidecars;

if (isSignedBlockContents(signedBlockOrContents)) {
({signedBlock} = signedBlockOrContents);
blobSidecars = computeBlobSidecars(config, signedBlock, signedBlockOrContents);
const blockData = {
fork: config.getForkName(signedBlock.message.slot),
blobs: blobSidecars,
blobsSource: BlobsSource.api,
blobsBytes: blobSidecars.map(() => null),
} as BlockInputDataBlobs;
const fork = config.getForkName(signedBlock.message.slot);
let blockData: BlockInputData;
if (fork === ForkName.electra) {
dataColumnSidecars = getDataColumnSidecars(config, signedBlock, signedBlockOrContents);
blockData = {
fork,
dataColumnsLen: dataColumnSidecars.length,
// custodyColumns is a 1 based index of ith column present in dataColumns[custodyColumns[i-1]]
dataColumnsIndex: new Uint8Array(Array.from({length: dataColumnSidecars.length}, (_, j) => 1 + j)),
dataColumns: dataColumnSidecars,
dataColumnsBytes: dataColumnSidecars.map(() => null),
dataColumnsSource: DataColumnsSource.api,
} as BlockInputDataDataColumns;
blobSidecars = [];
} else if (fork === ForkName.deneb) {
blobSidecars = computeBlobSidecars(config, signedBlock, signedBlockOrContents);
blockData = {
fork,
blobs: blobSidecars,
blobsSource: BlobsSource.api,
blobsBytes: blobSidecars.map(() => null),
} as BlockInputDataBlobs;
dataColumnSidecars = [];
} else {
throw Error(`Invalid data fork=${fork} for publish`);
}

blockForImport = getBlockInput.availableData(
config,
signedBlock,
Expand All @@ -87,6 +114,7 @@ export function getBeaconBlockApi({
} else {
signedBlock = signedBlockOrContents;
blobSidecars = [];
dataColumnSidecars = [];
blockForImport = getBlockInput.preData(config, signedBlock, BlockSource.api, context?.sszBytes ?? null);
}

Expand Down Expand Up @@ -221,6 +249,7 @@ export function getBeaconBlockApi({
// b) they might require more hops to reach recipients in peerDAS kind of setup where
// blobs might need to hop between nodes because of partial subnet subscription
...blobSidecars.map((blobSidecar) => () => network.publishBlobSidecar(blobSidecar)),
...dataColumnSidecars.map((dataColumnSidecar) => () => network.publishDataColumnSidecar(dataColumnSidecar)),
() => network.publishBeaconBlock(signedBlock) as Promise<unknown>,
() =>
// there is no rush to persist block since we published it to gossip anyway
Expand Down
38 changes: 25 additions & 13 deletions packages/beacon-node/src/chain/blocks/importBlock.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
import {toHexString} from "@chainsafe/ssz";
import {capella, ssz, altair, BeaconBlock} from "@lodestar/types";
import {ForkLightClient, ForkSeq, INTERVALS_PER_SLOT, MAX_SEED_LOOKAHEAD, SLOTS_PER_EPOCH} from "@lodestar/params";
import {
ForkName,
ForkLightClient,
ForkSeq,
INTERVALS_PER_SLOT,
MAX_SEED_LOOKAHEAD,
SLOTS_PER_EPOCH,
} from "@lodestar/params";
import {
CachedBeaconStateAltair,
computeEpochAtSlot,
Expand Down Expand Up @@ -113,18 +120,23 @@ export async function importBlock(
// out of data range blocks and import then in forkchoice although one would not be able to
// attest and propose with such head similar to optimistic sync
if (blockInput.type === BlockInputType.availableData) {
const {blobsSource, blobs} = blockInput.blockData;

this.metrics?.importBlock.blobsBySource.inc({blobsSource});
for (const blobSidecar of blobs) {
const {index, kzgCommitment} = blobSidecar;
this.emitter.emit(routes.events.EventType.blobSidecar, {
blockRoot: blockRootHex,
slot: blockSlot,
index,
kzgCommitment: toHexString(kzgCommitment),
versionedHash: toHexString(kzgCommitmentToVersionedHash(kzgCommitment)),
});
const {blockData} = blockInput;
if (blockData.fork === ForkName.deneb) {
const {blobsSource, blobs} = blockData;

this.metrics?.importBlock.blobsBySource.inc({blobsSource});
for (const blobSidecar of blobs) {
const {index, kzgCommitment} = blobSidecar;
this.emitter.emit(routes.events.EventType.blobSidecar, {
blockRoot: blockRootHex,
slot: blockSlot,
index,
kzgCommitment: toHexString(kzgCommitment),
versionedHash: toHexString(kzgCommitmentToVersionedHash(kzgCommitment)),
});
}
} else if (blockData.fork === ForkName.electra) {
// TODO peerDAS build and emit the event for the datacolumns
}
}
});
Expand Down
54 changes: 48 additions & 6 deletions packages/beacon-node/src/chain/blocks/types.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import {CachedBeaconStateAllForks, computeEpochAtSlot} from "@lodestar/state-transition";
import {MaybeValidExecutionStatus, DataAvailabilityStatus} from "@lodestar/fork-choice";
import {deneb, Slot, RootHex, SignedBeaconBlock} from "@lodestar/types";
import {deneb, Slot, RootHex, SignedBeaconBlock, electra, ColumnIndex} from "@lodestar/types";
import {ForkSeq, ForkName} from "@lodestar/params";
import {ChainForkConfig} from "@lodestar/config";

Expand Down Expand Up @@ -29,23 +29,45 @@ export enum BlobsSource {
byRoot = "req_resp_by_root",
}

export enum DataColumnsSource {
gossip = "gossip",
api = "api",
byRange = "req_resp_by_range",
byRoot = "req_resp_by_root",
}

export enum GossipedInputType {
block = "block",
blob = "blob",
dataColumn = "dataColumn",
}

type BlobsCacheMap = Map<number, {blobSidecar: deneb.BlobSidecar; blobBytes: Uint8Array | null}>;
export type BlobsCacheMap = Map<number, {blobSidecar: deneb.BlobSidecar; blobBytes: Uint8Array | null}>;
export type DataColumnsCacheMap = Map<
number,
{dataColumnSidecar: electra.DataColumnSidecar; dataColumnBytes: Uint8Array | null}
>;

type ForkBlobsInfo = {fork: ForkName.deneb};
type BlobsData = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]; blobsSource: BlobsSource};
export type BlockInputDataBlobs = ForkBlobsInfo & BlobsData;
export type BlockInputData = BlockInputDataBlobs;

export type BlockInputBlobs = {blobs: deneb.BlobSidecars; blobsBytes: (Uint8Array | null)[]; blobsSource: BlobsSource};
type Availability<T> = {availabilityPromise: Promise<T>; resolveAvailability: (data: T) => void};
type ForkDataColumnsInfo = {fork: ForkName.electra};
type DataColumnsData = {
// marker of that columns are to be custodied
dataColumnsLen: number;
dataColumnsIndex: Uint8Array;
dataColumns: electra.DataColumnSidecars;
dataColumnsBytes: (Uint8Array | null)[];
dataColumnsSource: DataColumnsSource;
};
export type BlockInputDataDataColumns = ForkDataColumnsInfo & DataColumnsData;
export type BlockInputData = BlockInputDataBlobs | BlockInputDataDataColumns;

type Availability<T> = {availabilityPromise: Promise<T>; resolveAvailability: (data: T) => void};
type CachedBlobs = {blobsCache: BlobsCacheMap} & Availability<BlockInputDataBlobs>;
export type CachedData = ForkBlobsInfo & CachedBlobs;
type CachedDataColumns = {dataColumnsCache: DataColumnsCacheMap} & Availability<BlockInputDataDataColumns>;
export type CachedData = (ForkBlobsInfo & CachedBlobs) | (ForkDataColumnsInfo & CachedDataColumns);

export type BlockInput = {block: SignedBeaconBlock; source: BlockSource; blockBytes: Uint8Array | null} & (
| {type: BlockInputType.preData | BlockInputType.outOfRangeData}
Expand Down Expand Up @@ -161,6 +183,26 @@ export function getBlockInputBlobs(blobsCache: BlobsCacheMap): Omit<BlobsData, "
return {blobs, blobsBytes};
}

export function getBlockInputDataColumns(
dataColumnsCache: DataColumnsCacheMap,
columnIndexes: ColumnIndex[]
): Omit<DataColumnsData, "dataColumnsLen" | "dataColumnsIndex" | "dataColumnsSource"> {
const dataColumns = [];
const dataColumnsBytes = [];

for (const index of columnIndexes) {
const dataColumnCache = dataColumnsCache.get(index);
if (dataColumnCache === undefined) {
// check if the index is correct as per the custody columns
throw Error(`Missing dataColumnCache at index=${index}`);
}
const {dataColumnSidecar, dataColumnBytes} = dataColumnCache;
dataColumns.push(dataColumnSidecar);
dataColumnsBytes.push(dataColumnBytes);
}
return {dataColumns, dataColumnsBytes};
}

export enum AttestationImportOpt {
Skip,
Force,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,19 @@ import {DataAvailabilityStatus} from "@lodestar/fork-choice";
import {ChainForkConfig} from "@lodestar/config";
import {deneb, UintNum64} from "@lodestar/types";
import {Logger} from "@lodestar/utils";
import {ForkName} from "@lodestar/params";
import {BlockError, BlockErrorCode} from "../errors/index.js";
import {validateBlobSidecars} from "../validation/blobSidecar.js";
import {validateDataColumnsSidecars} from "../validation/dataColumnSidecar.js";
import {Metrics} from "../../metrics/metrics.js";
import {BlockInput, BlockInputType, ImportBlockOpts, BlobSidecarValidation, getBlockInput} from "./types.js";
import {
BlockInput,
BlockInputType,
ImportBlockOpts,
BlobSidecarValidation,
getBlockInput,
BlockInputData,
} from "./types.js";

// we can now wait for full 12 seconds because unavailable block sync will try pulling
// the blobs from the network anyway after 500ms of seeing the block
Expand Down Expand Up @@ -88,27 +97,37 @@ async function maybeValidateBlobs(
// run full validation
const {block} = blockInput;
const blockSlot = block.message.slot;

const blobsData =
blockInput.type === BlockInputType.availableData
? blockInput.blockData
: await raceWithCutoff(chain, blockInput, blockInput.cachedData.availabilityPromise);
const {blobs} = blobsData;

const {blobKzgCommitments} = (block as deneb.SignedBeaconBlock).message.body;
const beaconBlockRoot = chain.config.getForkTypes(blockSlot).BeaconBlock.hashTreeRoot(block.message);

// if the blob siddecars have been individually verified then we can skip kzg proof check
// but other checks to match blobs with block data still need to be performed
const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual;
validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck});
const blockData =
blockInput.type === BlockInputType.availableData
? blockInput.blockData
: await raceWithCutoff(
chain,
blockInput,
blockInput.cachedData.availabilityPromise as Promise<BlockInputData>
);

if (blockData.fork === ForkName.deneb) {
const {blobs} = blockData;

// if the blob siddecars have been individually verified then we can skip kzg proof check
// but other checks to match blobs with block data still need to be performed
const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual;
validateBlobSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, blobs, {skipProofsCheck});
} else if (blockData.fork === ForkName.electra) {
const {dataColumns} = blockData;
const skipProofsCheck = opts.validBlobSidecars === BlobSidecarValidation.Individual;
// might require numColumns, custodyColumns from blockData as input to below
validateDataColumnsSidecars(blockSlot, beaconBlockRoot, blobKzgCommitments, dataColumns, {skipProofsCheck});
}

const availableBlockInput = getBlockInput.availableData(
chain.config,
blockInput.block,
blockInput.source,
blockInput.blockBytes,
blobsData
blockData
);
return {dataAvailabilityStatus: DataAvailabilityStatus.Available, availableBlockInput: availableBlockInput};
}
Expand Down
Loading
Loading