Skip to content

Commit

Permalink
Merge pull request #32 from Layr-Labs/epociask--eigenda-challenge-test
Browse files Browse the repository at this point in the history
feat: E2E challenge test and loads of bug fixes
  • Loading branch information
ethenotethan authored Aug 23, 2024
2 parents fe3b113 + 480f000 commit 1518a52
Show file tree
Hide file tree
Showing 24 changed files with 580 additions and 448 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ jobs:
echo "GOMEMLIMIT=6GiB" >> "$GITHUB_ENV"
echo "GOGC=80" >> "$GITHUB_ENV"
- name: Spinup eigenda-proxy test dependency
- name: spinup eigenda-proxy
run: ./scripts/start-eigenda-proxy.sh

- name: run tests without race detection
Expand All @@ -166,7 +166,7 @@ jobs:
if: matrix.test-mode == 'challenge'
run: |
packages=`go list ./...`
stdbuf -oL gotestsum --format short-verbose --packages="$packages" --rerun-fails=1 --no-color=false -- ./... -coverprofile=coverage.txt -covermode=atomic -coverpkg=./...,./go-ethereum/... -parallel=8 -tags=challengetest -run=TestChallenge > >(stdbuf -oL tee full.log | grep -vE "INFO|seal")
stdbuf -oL gotestsum --format short-verbose --packages="$packages" --rerun-fails=1 --no-color=false -- ./... -timeout 60m -coverprofile=coverage.txt -covermode=atomic -coverpkg=./...,./go-ethereum/... -parallel=8 -tags=challengetest -run=TestChallenge > >(stdbuf -oL tee full.log | grep -vE "INFO|seal")
- name: run stylus tests
if: matrix.test-mode == 'stylus'
Expand Down
2 changes: 1 addition & 1 deletion .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
[submodule "contracts"]
path = contracts
url = https://github.com/Layr-Labs/nitro-contracts.git
branch = new-osp-fixes-v3.0.3
branch = epociask--length-fixes-v3.0.3
[submodule "nitro-testnode"]
path = nitro-testnode
url = https://github.com/Layr-Labs/nitro-testnode.git
Expand Down
57 changes: 41 additions & 16 deletions arbitrator/prover/src/kzgbn254.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::utils::append_left_padded_uint32_be;
use crate::{utils::append_left_padded_biguint_be, Bytes32};
use ark_bn254::G2Affine;
use ark_ec::{AffineRepr, CurveGroup};
Expand All @@ -8,33 +9,50 @@ use kzgbn254::{blob::Blob, kzg::Kzg, polynomial::PolynomialFormat};
use num::BigUint;
use sha2::{Digest, Sha256};
use sha3::Keccak256;
use std::env;
use std::io::Write;
use std::path::PathBuf;

lazy_static::lazy_static! {

// note that we are loading 3000 for testing purposes atm, but for production use these values:
// g1 and g2 points from the operator setup guide
// srs_order = 268435456
// srs_points_to_load = 131072 (65536 is enough)

pub static ref KZG: Kzg = Kzg::setup(
"./arbitrator/prover/src/mainnet-files/g1.point.65536",
"./arbitrator/prover/src/mainnet-files/g2.point.65536",
"./arbitrator/prover/src/mainnet-files/g2.point.powerOf2",
pub static ref KZG_BN254_SETTINGS: Kzg = Kzg::setup(
&load_directory_with_prefix("src/mainnet-files/g1.point.65536"),
&load_directory_with_prefix("src/mainnet-files/g2.point.65536"),
&load_directory_with_prefix("src/mainnet-files/g2.point.powerOf2"),
268435456,
65536
).unwrap();
}

// Necessary helper function for understanding if srs is being loaded for normal node operation
// or for challenge testing.
fn load_directory_with_prefix(directory_name: &str) -> String {
let cwd = env::current_dir().expect("Failed to get current directory");
return match cwd {
cwd if cwd.ends_with("system_tests") => {
return PathBuf::from("../arbitrator/prover/")
.join(directory_name)
.to_string_lossy()
.into_owned();
}
_ => {
return PathBuf::from("./arbitrator/prover/")
.join(directory_name)
.to_string_lossy()
.into_owned();
}
};
}

/// Creates a KZG preimage proof consumable by the point evaluation precompile.
pub fn prove_kzg_preimage_bn254(
hash: Bytes32,
preimage: &[u8],
offset: u32,
out: &mut impl Write,
) -> Result<()> {
let mut kzg = KZG.clone();

let mut kzg = KZG_BN254_SETTINGS.clone();
// expand roots of unity
kzg.calculate_roots_of_unity(preimage.len() as u64)?;

Expand All @@ -47,12 +65,15 @@ pub fn prove_kzg_preimage_bn254(

let commitment_x_bigint: BigUint = blob_commitment.x.into();
let commitment_y_bigint: BigUint = blob_commitment.y.into();
let mut commitment_encoded_bytes = Vec::with_capacity(32);
append_left_padded_biguint_be(&mut commitment_encoded_bytes, &commitment_x_bigint);
append_left_padded_biguint_be(&mut commitment_encoded_bytes, &commitment_y_bigint);
let length_uint32: u32 = blob.len() as u32;

let mut commitment_encoded_length_bytes = Vec::with_capacity(68);
append_left_padded_biguint_be(&mut commitment_encoded_length_bytes, &commitment_x_bigint);
append_left_padded_biguint_be(&mut commitment_encoded_length_bytes, &commitment_y_bigint);
append_left_padded_uint32_be(&mut commitment_encoded_length_bytes, &length_uint32);

let mut keccak256_hasher = Keccak256::new();
keccak256_hasher.update(&commitment_encoded_bytes);
keccak256_hasher.update(&commitment_encoded_length_bytes);
let commitment_hash: Bytes32 = keccak256_hasher.finalize().into();

ensure!(
Expand All @@ -68,6 +89,11 @@ pub fn prove_kzg_preimage_bn254(
offset,
);

let mut commitment_encoded_bytes = Vec::with_capacity(64);

append_left_padded_biguint_be(&mut commitment_encoded_bytes, &commitment_x_bigint);
append_left_padded_biguint_be(&mut commitment_encoded_bytes, &commitment_y_bigint);

let mut proving_offset = offset;
let length_usize = preimage.len() as u64;

Expand All @@ -81,8 +107,7 @@ pub fn prove_kzg_preimage_bn254(
proving_offset = 0;
}

// Y = ϕ(offset) --> evaluation point for computing quotient proof
// confirming if this is actually ok ?
// Y = ϕ(offset)
let proven_y_fr = blob_polynomial_evaluation_form
.get_at_index(proving_offset as usize / 32)
.ok_or_else(|| {
Expand Down
2 changes: 1 addition & 1 deletion arbitrator/prover/src/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3031,7 +3031,7 @@ impl Machine {
self.preimage_resolver
.get_const(self.context, preimage_ty, hash)
else {
panic!("Missing requested preimage for hash {}", hash)
panic!("Missing requested preimage for hash when trying to serialize proof {}", hash)
};
data.push(0); // preimage proof type
match preimage_ty {
Expand Down
56 changes: 31 additions & 25 deletions arbitrator/prover/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@

#[cfg(feature = "native")]
use crate::kzg::ETHEREUM_KZG_SETTINGS;
use crate::kzgbn254::KZG_BN254_SETTINGS;
use arbutil::PreimageType;
use ark_serialize::CanonicalSerialize;
#[cfg(feature = "native")]
use c_kzg::{Blob, KzgCommitment};
use digest::Digest;
use eyre::{eyre, Result};
use kzgbn254::{blob::Blob as EigenDABlob, kzg::Kzg as KzgBN254, polynomial::PolynomialFormat};
use kzgbn254::{blob::Blob as EigenDABlob, polynomial::PolynomialFormat};
use num::BigUint;
use serde::{Deserialize, Serialize};
use sha2::Sha256;
Expand Down Expand Up @@ -201,6 +202,33 @@ pub fn append_left_padded_biguint_be(vec: &mut Vec<u8>, biguint: &BigUint) {
vec.extend_from_slice(&bytes);
}

pub fn append_left_padded_uint32_be(vec: &mut Vec<u8>, uint32: &u32) {
let bytes = uint32.to_be_bytes();
vec.extend_from_slice(&bytes);
}

pub fn hash_eigenda_preimage(preimage: &[u8]) -> Result<[u8; 32]> {
let blob = EigenDABlob::from_padded_bytes_unchecked(preimage);

let blob_polynomial = blob.to_polynomial(PolynomialFormat::InCoefficientForm)?;
let blob_commitment = KZG_BN254_SETTINGS.commit(&blob_polynomial)?;

let commitment_x_bigint: BigUint = blob_commitment.x.into();
let commitment_y_bigint: BigUint = blob_commitment.y.into();
let length_uint32: u32 = blob.len() as u32;

let mut commitment_length_encoded_bytes = Vec::with_capacity(68);
append_left_padded_biguint_be(&mut commitment_length_encoded_bytes, &commitment_x_bigint);
append_left_padded_biguint_be(&mut commitment_length_encoded_bytes, &commitment_y_bigint);
append_left_padded_uint32_be(&mut commitment_length_encoded_bytes, &length_uint32);

let mut keccak256_hasher = Keccak256::new();
keccak256_hasher.update(&commitment_length_encoded_bytes);
let commitment_hash: [u8; 32] = keccak256_hasher.finalize().into();

Ok(commitment_hash)
}

#[cfg(feature = "native")]
pub fn hash_preimage(preimage: &[u8], ty: PreimageType) -> Result<[u8; 32]> {
match ty {
Expand All @@ -216,31 +244,9 @@ pub fn hash_preimage(preimage: &[u8], ty: PreimageType) -> Result<[u8; 32]> {
Ok(commitment_hash)
}
PreimageType::EigenDAHash => {
let kzg_bn254: KzgBN254 = KzgBN254::setup(
"./arbitrator/prover/src/mainnet-files/g1.point.65536",
"./arbitrator/prover/src/mainnet-files/g2.point.65536",
"./arbitrator/prover/src/mainnet-files/g2.point.powerOf2",
268435456,
65536,
)
.unwrap();

let blob = EigenDABlob::from_padded_bytes_unchecked(preimage);

let blob_polynomial = blob.to_polynomial(PolynomialFormat::InCoefficientForm)?;
let blob_commitment = kzg_bn254.commit(&blob_polynomial)?;

let commitment_x_bigint: BigUint = blob_commitment.x.into();
let commitment_y_bigint: BigUint = blob_commitment.y.into();
let mut commitment_encoded_bytes = Vec::with_capacity(32);
append_left_padded_biguint_be(&mut commitment_encoded_bytes, &commitment_x_bigint);
append_left_padded_biguint_be(&mut commitment_encoded_bytes, &commitment_y_bigint);

let mut keccak256_hasher = Keccak256::new();
keccak256_hasher.update(&commitment_encoded_bytes);
let commitment_hash: [u8; 32] = keccak256_hasher.finalize().into();
let hash = hash_eigenda_preimage(preimage)?;

Ok(commitment_hash)
Ok(hash)
}
}
}
2 changes: 1 addition & 1 deletion arbitrator/prover/test-cases/go/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ func main() {
}
}
// EIGENDA COMMIT HASH
_, err = wavmio.ResolveTypedPreimage(arbutil.EigenDaPreimageType, common.HexToHash("13bbacb54f9aa9896af97156ca4dfc626e94031c5ed78fea68659e4ec9c9c55a"))
_, err = wavmio.ResolveTypedPreimage(arbutil.EigenDaPreimageType, common.HexToHash("1c303f6af17677aa69367bea000420f4b0ee26bb2c542a8879b9791a4b43d4d0"))
if err != nil {
panic(fmt.Sprintf("failed to resolve eigenda preimage: %v", err))
}
Expand Down
4 changes: 1 addition & 3 deletions arbitrator/prover/test-cases/rust/src/bin/host-io.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,7 @@ fn main() {
let expected_len = 0;

for i in 0..5{
// test-files srs 011e229d75b13559dcb2d757ecae9b66fa579268e28e196789503322115c06e1
// mainnet srs 01605220b6928163676612ca50bbe5e0c595052876796dbedeae8ef597c9fdcf
let eigen_hash = hex!("13bbacb54f9aa9896af97156ca4dfc626e94031c5ed78fea68659e4ec9c9c55a");
let eigen_hash = hex!("1c303f6af17677aa69367bea000420f4b0ee26bb2c542a8879b9791a4b43d4d0");
bytebuffer = Bytes32(eigen_hash);

let actual_len = wavm_read_eigen_da_hash_preimage(bytebuffer.0.as_mut_ptr(), i * 32);
Expand Down
69 changes: 9 additions & 60 deletions arbnode/batch_poster.go
Original file line number Diff line number Diff line change
Expand Up @@ -955,10 +955,10 @@ func (b *BatchPoster) encodeAddBatch(
methodName := sequencerBatchPostMethodName
if use4844 {
methodName = sequencerBatchPostWithBlobsMethodName
}
if useEigenDA {
} else if useEigenDA {
methodName = sequencerBatchPostWithEigendaMethodName
}

method, ok := b.seqInboxABI.Methods[methodName]
if !ok {
return nil, nil, errors.New("failed to find add batch method")
Expand All @@ -981,54 +981,6 @@ func (b *BatchPoster) encodeAddBatch(
)
} else if useEigenDA {

blobVerificationProofType, err := abi.NewType("tuple", "", []abi.ArgumentMarshaling{
{Name: "batchID", Type: "uint32"},
{Name: "blobIndex", Type: "uint32"},
{Name: "batchMetadata", Type: "tuple",
Components: []abi.ArgumentMarshaling{
{Name: "batchHeader", Type: "tuple",
Components: []abi.ArgumentMarshaling{
{Name: "blobHeadersRoot", Type: "bytes32"},
{Name: "quorumNumbers", Type: "bytes"},
{Name: "signedStakeForQuorums", Type: "bytes"},
{Name: "referenceBlockNumber", Type: "uint32"},
},
},
{Name: "signatoryRecordHash", Type: "bytes32"},
{Name: "confirmationBlockNumber", Type: "uint32"},
},
},
{
Name: "inclusionProof",
Type: "bytes",
},
{
Name: "quorumIndices",
Type: "bytes",
},
})

if err != nil {
return nil, nil, err
}

blobHeaderType, err := abi.NewType("tuple", "", []abi.ArgumentMarshaling{
{Name: "commitment", Type: "tuple", Components: []abi.ArgumentMarshaling{
{Name: "X", Type: "uint256"},
{Name: "Y", Type: "uint256"},
}},
{Name: "dataLength", Type: "uint32"},
{Name: "quorumBlobParams", Type: "tuple[]", Components: []abi.ArgumentMarshaling{
{Name: "quorumNumber", Type: "uint8"},
{Name: "adversaryThresholdPercentage", Type: "uint8"},
{Name: "confirmationThresholdPercentage", Type: "uint8"},
{Name: "chunkLength", Type: "uint32"},
}},
})
if err != nil {
return nil, nil, err
}

addressType, err := abi.NewType("address", "", nil)
if err != nil {
return nil, nil, err
Expand All @@ -1042,23 +994,20 @@ func (b *BatchPoster) encodeAddBatch(
// Create ABI arguments
arguments := abi.Arguments{
{Type: uint256Type},
{Type: blobVerificationProofType},
{Type: blobHeaderType},
{Type: eigenda.DACertTypeABI},
{Type: addressType},
{Type: uint256Type},
{Type: uint256Type},
{Type: uint256Type},
}

// define values array
values := make([]interface{}, 7)
values := make([]interface{}, 6)
values[0] = seqNum
values[1] = eigenDaBlobInfo.BlobVerificationProof
values[2] = eigenDaBlobInfo.BlobHeader
values[3] = b.config().gasRefunder
values[4] = new(big.Int).SetUint64(delayedMsg)
values[5] = new(big.Int).SetUint64(uint64(prevMsgNum))
values[6] = new(big.Int).SetUint64(uint64(newMsgNum))
values[1] = eigenDaBlobInfo
values[2] = b.config().gasRefunder
values[3] = new(big.Int).SetUint64(delayedMsg)
values[4] = new(big.Int).SetUint64(uint64(prevMsgNum))
values[5] = new(big.Int).SetUint64(uint64(newMsgNum))

calldata, err = arguments.PackValues(values)

Expand Down
Loading

0 comments on commit 1518a52

Please sign in to comment.