Skip to content

Commit

Permalink
Merge branch 'develop' into pre-eip155
Browse files Browse the repository at this point in the history
  • Loading branch information
DreamWuGit authored Sep 11, 2024
2 parents ef2fa95 + 636c2c1 commit ad10e55
Show file tree
Hide file tree
Showing 38 changed files with 1,144 additions and 781 deletions.
30 changes: 15 additions & 15 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ members = [
resolver = "2"

[workspace.package]
version = "0.12.0"
version = "0.14.0"
edition = "2021"
license = "MIT OR Apache-2.0"

Expand Down
2 changes: 0 additions & 2 deletions aggregator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,5 +46,3 @@ csv = "1.1"
[features]
default = ["revm-precompile/c-kzg"]
print-trace = ["ark-std/print-trace"]
# This feature is useful for unit tests where we check the SAT of pi batch circuit
disable_proof_aggregation = []
94 changes: 18 additions & 76 deletions aggregator/src/aggregation/circuit.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,4 @@
use crate::{
aggregation::decoder::WORKED_EXAMPLE,
blob::BatchData,
witgen::{zstd_encode, MultiBlockProcessResult},
LOG_DEGREE, PI_CHAIN_ID, PI_CURRENT_BATCH_HASH, PI_CURRENT_STATE_ROOT,
PI_CURRENT_WITHDRAW_ROOT, PI_PARENT_BATCH_HASH, PI_PARENT_STATE_ROOT,
};
use ark_std::{end_timer, start_timer};
use halo2_base::{Context, ContextParams};

#[cfg(not(feature = "disable_proof_aggregation"))]
use halo2_ecc::{ecc::EccChip, fields::fp::FpConfig};

use halo2_proofs::{
circuit::{Layouter, SimpleFloorPlanner, Value},
halo2curves::bn256::{Bn256, Fr, G1Affine},
Expand All @@ -19,34 +7,34 @@ use halo2_proofs::{
};
use itertools::Itertools;
use rand::Rng;
#[cfg(not(feature = "disable_proof_aggregation"))]
use std::rc::Rc;
use std::{env, fs::File};

#[cfg(not(feature = "disable_proof_aggregation"))]
use snark_verifier::loader::halo2::{halo2_ecc::halo2_base::AssignedValue, Halo2Loader};
use snark_verifier::pcs::kzg::KzgSuccinctVerifyingKey;
#[cfg(not(feature = "disable_proof_aggregation"))]
use snark_verifier::{
loader::halo2::halo2_ecc::halo2_base,
pcs::kzg::{Bdfg21, Kzg},
loader::halo2::{
halo2_ecc::{
ecc::EccChip,
fields::fp::FpConfig,
halo2_base::{AssignedValue, Context, ContextParams},
},
Halo2Loader,
},
pcs::kzg::{Bdfg21, Kzg, KzgSuccinctVerifyingKey},
};
#[cfg(not(feature = "disable_proof_aggregation"))]
use snark_verifier_sdk::{aggregate, flatten_accumulator};
use snark_verifier_sdk::{CircuitExt, Snark, SnarkWitness};
use snark_verifier_sdk::{aggregate, flatten_accumulator, CircuitExt, Snark, SnarkWitness};
use std::{env, fs::File, rc::Rc};
use zkevm_circuits::util::Challenges;

use crate::{
aggregation::witgen::process,
aggregation::{decoder::WORKED_EXAMPLE, witgen::process, BatchCircuitConfig},
batch::BatchHash,
blob::BatchData,
constants::{ACC_LEN, DIGEST_LEN},
core::{assign_batch_hashes, extract_proof_and_instances_with_pairing_check},
util::parse_hash_digest_cells,
AssignedBarycentricEvaluationConfig, ConfigParams,
witgen::{zstd_encode, MultiBlockProcessResult},
AssignedBarycentricEvaluationConfig, ConfigParams, LOG_DEGREE, PI_CHAIN_ID,
PI_CURRENT_BATCH_HASH, PI_CURRENT_STATE_ROOT, PI_CURRENT_WITHDRAW_ROOT, PI_PARENT_BATCH_HASH,
PI_PARENT_STATE_ROOT,
};

use super::BatchCircuitConfig;

/// Batch circuit, the chunk aggregation routine below recursion circuit
#[derive(Clone)]
pub struct BatchCircuit<const N_SNARKS: usize> {
Expand Down Expand Up @@ -186,47 +174,10 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> {
.range()
.load_lookup_table(&mut layouter)
.expect("load range lookup table");

// ==============================================
// Step 1: snark aggregation circuit
// ==============================================
#[cfg(feature = "disable_proof_aggregation")]
let barycentric = {
let mut first_pass = halo2_base::SKIP_FIRST_PASS;
layouter.assign_region(
|| "barycentric evaluation",
|region| {
if first_pass {
first_pass = false;
return Ok(AssignedBarycentricEvaluationConfig::default());
}

let mut ctx = Context::new(
region,
ContextParams {
max_rows: config.flex_gate().max_rows,
num_context_ids: 1,
fixed_columns: config.flex_gate().constants.clone(),
},
);

let barycentric = config.barycentric.assign(
&mut ctx,
&self.batch_hash.point_evaluation_assignments.coefficients,
self.batch_hash
.point_evaluation_assignments
.challenge_digest,
self.batch_hash.point_evaluation_assignments.evaluation,
);

config.barycentric.scalar.range.finalize(&mut ctx);
ctx.print_stats(&["barycentric evaluation"]);

Ok(barycentric)
},
)?
};

#[cfg(not(feature = "disable_proof_aggregation"))]
let (accumulator_instances, snark_inputs, barycentric) = {
use halo2_proofs::halo2curves::bn256::Fq;
let mut first_pass = halo2_base::SKIP_FIRST_PASS;
Expand Down Expand Up @@ -375,21 +326,13 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> {
};

// Extract digests
#[cfg(feature = "disable_proof_aggregation")]
let (_batch_hash_digest, _chunk_pi_hash_digests, _potential_batch_data_hash_digest) =
parse_hash_digest_cells::<N_SNARKS>(&assigned_batch_hash.hash_output);

#[cfg(not(feature = "disable_proof_aggregation"))]
let (_batch_hash_digest, chunk_pi_hash_digests, _potential_batch_data_hash_digest) =
parse_hash_digest_cells::<N_SNARKS>(&assigned_batch_hash.hash_output);

// ========================================================================
// step 2.a: check accumulator including public inputs to the snarks
// ========================================================================
#[cfg(not(feature = "disable_proof_aggregation"))]
let mut first_pass = halo2_base::SKIP_FIRST_PASS;

#[cfg(not(feature = "disable_proof_aggregation"))]
layouter.assign_region(
|| "BatchCircuit: Chunk PI",
|mut region| -> Result<(), Error> {
Expand Down Expand Up @@ -424,7 +367,6 @@ impl<const N_SNARKS: usize> Circuit<Fr> for BatchCircuit<N_SNARKS> {
},
)?;

#[cfg(not(feature = "disable_proof_aggregation"))]
{
assert!(accumulator_instances.len() == ACC_LEN);
for (i, v) in accumulator_instances.iter().enumerate() {
Expand Down
41 changes: 34 additions & 7 deletions aggregator/src/tests/aggregation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,19 +16,24 @@ use crate::{
BatchData, ChunkInfo,
};

// See https://github.com/scroll-tech/zkevm-circuits/pull/1311#issuecomment-2139559866
#[test]
fn test_max_agg_snarks_batch_circuit() {
fn batch_circuit_raw() {
let k = 21;

// This set up requires one round of keccak for chunk's data hash
// let circuit: BatchCircuit<MAX_AGG_SNARKS> = build_new_batch_circuit(2, k);
let circuit: BatchCircuit<MAX_AGG_SNARKS> = build_batch_circuit_skip_encoding();
let instance = circuit.instances();
let mock_prover = MockProver::<Fr>::run(k, &circuit, instance).unwrap();
mock_prover.assert_satisfied_par();
}

#[test]
fn batch_circuit_encode() {
let k = 21;
let circuit: BatchCircuit<MAX_AGG_SNARKS> = build_new_batch_circuit(2, k);
let instance = circuit.instances();
let mock_prover = MockProver::<Fr>::run(k, &circuit, instance).unwrap();
mock_prover.assert_satisfied_par();
}

#[ignore]
#[test]
fn test_2_snark_batch_circuit() {
Expand Down Expand Up @@ -143,6 +148,19 @@ fn build_new_batch_circuit<const N_SNARKS: usize>(
) -> BatchCircuit<N_SNARKS> {
// inner circuit: Mock circuit
let k0 = 8;
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub struct ChunkProof {
pub chunk_info: ChunkInfo,
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
struct BatchProvingTask {
pub chunk_proofs: Vec<ChunkProof>,
pub batch_header: BatchHeader<MAX_AGG_SNARKS>,
}
let file = std::fs::File::open("data/batch-task.json").expect("batch-task.json exists");
let reader = std::io::BufReader::new(file);
let batch_proving_task: BatchProvingTask =
serde_json::from_reader(reader).expect("deserialisation should succeed");

let mut rng = test_rng();
let params = gen_srs(k0);
Expand All @@ -163,6 +181,16 @@ fn build_new_batch_circuit<const N_SNARKS: usize>(
let batch_data = BatchData::<N_SNARKS>::new(num_real_chunks, &chunks_with_padding);
let batch_bytes = batch_data.get_batch_data_bytes();
let blob_bytes = get_blob_bytes(&batch_bytes);
let batch_header = BatchHeader::construct_from_chunks(
batch_proving_task.batch_header.version,
batch_proving_task.batch_header.batch_index,
batch_proving_task.batch_header.l1_message_popped,
batch_proving_task.batch_header.total_l1_message_popped,
batch_proving_task.batch_header.parent_batch_hash,
batch_proving_task.batch_header.last_block_timestamp,
&chunks_with_padding,
&blob_bytes,
);

// ==========================
// real chunks
Expand Down Expand Up @@ -190,8 +218,7 @@ fn build_new_batch_circuit<const N_SNARKS: usize>(
// ==========================
// batch
// ==========================
let batch_hash =
BatchHash::construct(&chunks_with_padding, BatchHeader::default(), &blob_bytes);
let batch_hash = BatchHash::construct(&chunks_with_padding, batch_header, &blob_bytes);

BatchCircuit::new(
&params,
Expand Down
Loading

0 comments on commit ad10e55

Please sign in to comment.