Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

libzkp and coordinator updates (preparation for Upgrade4) #1419

Merged
merged 11 commits into from
Jul 11, 2024
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
286 changes: 257 additions & 29 deletions common/libzkp/impl/Cargo.lock

Large diffs are not rendered by default.

6 changes: 5 additions & 1 deletion common/libzkp/impl/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,11 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
[dependencies]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", default-features = false, features = ["parallel_syn", "scroll"] }

# curie
prover_v3 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
# darwin
prover_v4 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/agg_recursion", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }

base64 = "0.13.0"
env_logger = "0.9.0"
Expand Down
117 changes: 102 additions & 15 deletions common/libzkp/impl/src/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,20 @@ use crate::{
},
};
use libc::c_char;
use prover::{
aggregator::{Prover, Verifier},
use prover_v3::BatchProof as BatchProofV3;
use prover_v4::{
aggregator::{Prover, Verifier as VerifierV4},
check_chunk_hashes,
consts::AGG_VK_FILENAME,
consts::BATCH_VK_FILENAME,
utils::{chunk_trace_to_witness_block, init_env_and_log},
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof,
BatchHeader, BatchProof as BatchProofV4, BatchProvingTask, BlockTrace, BundleProof,
BundleProvingTask, ChunkInfo, ChunkProof, MAX_AGG_SNARKS,
};
use snark_verifier_sdk::verify_evm_calldata;
use std::{cell::OnceCell, env, ptr::null};

static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER: OnceCell<Verifier> = OnceCell::new();
static mut VERIFIER_V4: OnceCell<VerifierV4> = OnceCell::new();

/// # Safety
#[no_mangle]
Expand All @@ -30,8 +33,8 @@ pub unsafe extern "C" fn init_batch_prover(params_dir: *const c_char, assets_dir
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);

// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &AGG_VK_FILENAME) {
panic!("{} must exist in folder {}", *AGG_VK_FILENAME, assets_dir);
if !file_exists(assets_dir, &BATCH_VK_FILENAME) {
panic!("{} must exist in folder {}", *BATCH_VK_FILENAME, assets_dir);
}

let prover = Prover::from_dirs(params_dir, assets_dir);
Expand All @@ -49,15 +52,26 @@ pub unsafe extern "C" fn init_batch_verifier(params_dir: *const c_char, assets_d

// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_dirs(params_dir, assets_dir);
let verifier_v4 = VerifierV4::from_dirs(params_dir, assets_dir);

VERIFIER.set(verifier).unwrap();
VERIFIER_V4.set(verifier_v4).unwrap();
}

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_batch_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_vk());
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_batch_vk());

vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_bundle_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_bundle_vk());

vk_result
.ok()
Expand Down Expand Up @@ -103,15 +117,19 @@ pub unsafe extern "C" fn check_chunk_proofs(chunk_proofs: *const c_char) -> *con
pub unsafe extern "C" fn gen_batch_proof(
chunk_hashes: *const c_char,
chunk_proofs: *const c_char,
batch_header: *const c_char,
) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let chunk_hashes = c_char_to_vec(chunk_hashes);
let chunk_proofs = c_char_to_vec(chunk_proofs);
let batch_header = c_char_to_vec(batch_header);

let chunk_hashes = serde_json::from_slice::<Vec<ChunkInfo>>(&chunk_hashes)
.map_err(|e| format!("failed to deserialize chunk hashes: {e:?}"))?;
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs)
.map_err(|e| format!("failed to deserialize chunk proofs: {e:?}"))?;
let batch_header = serde_json::from_slice::<BatchHeader<MAX_AGG_SNARKS>>(&batch_header)
.map_err(|e| format!("failed to deserialize batch header: {e:?}"))?;

if chunk_hashes.len() != chunk_proofs.len() {
return Err(format!("chunk hashes and chunk proofs lengths mismatch: chunk_hashes.len() = {}, chunk_proofs.len() = {}",
Expand All @@ -125,12 +143,13 @@ pub unsafe extern "C" fn gen_batch_proof(
check_chunk_hashes("", &chunk_hashes_proofs).map_err(|e| format!("failed to check chunk info: {e:?}"))?;

let batch = BatchProvingTask {
chunk_proofs
chunk_proofs,
batch_header,
};
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_agg_evm_proof(batch, None, OUTPUT_DIR.as_deref())
.gen_batch_proof(batch, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate proof: {e:?}"))?;

serde_json::to_vec(&proof).map_err(|e| format!("failed to serialize the proof: {e:?}"))
Expand All @@ -153,10 +172,78 @@ pub unsafe extern "C" fn gen_batch_proof(

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(proof: *const c_char) -> c_char {
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 3 {
// As of upgrade #3 (Curie), we verify batch proofs on-chain (EVM).
let proof = serde_json::from_slice::<BatchProofV3>(proof.as_slice()).unwrap();
verify_evm_calldata(
georgehao marked this conversation as resolved.
Show resolved Hide resolved
include_bytes!("plonk_verifier_0.11.4.bin").to_vec(),
proof.calldata(),
)
} else {
// Post upgrade #4 (Darwin), batch proofs are not EVM-verifiable. Instead they are
// halo2 proofs meant to be bundled recursively.
let proof = serde_json::from_slice::<BatchProofV4>(proof.as_slice()).unwrap();
VERIFIER_V4.get().unwrap().verify_batch_proof(proof)
}
});
verified.unwrap_or(false) as c_char
}

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_bundle_proof(batch_proofs: *const c_char) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let batch_proofs = c_char_to_vec(batch_proofs);
let batch_proofs = serde_json::from_slice::<Vec<BatchProofV4>>(&batch_proofs)
.map_err(|e| format!("failed to deserialize batch proofs: {e:?}"))?;

let bundle = BundleProvingTask { batch_proofs };
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_bundle_proof(bundle, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate bundle proof: {e:?}"))?;

serde_json::to_vec(&proof)
.map_err(|e| format!("failed to serialize the bundle proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));

let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};

serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_bundle_proof(proof: *const c_char) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
let verified = panic_catch(|| VERIFIER.get().unwrap().verify_agg_evm_proof(proof));
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
let verified = panic_catch(|| VERIFIER_V4.get().unwrap().verify_bundle_proof(proof));
verified.unwrap_or(false) as c_char
}

Expand Down
41 changes: 32 additions & 9 deletions common/libzkp/impl/src/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,18 @@ use crate::{
},
};
use libc::c_char;
use prover::{
use prover_v3::{zkevm::Verifier as VerifierV3, ChunkProof as ChunkProofV3};
use prover_v4::{
consts::CHUNK_VK_FILENAME,
utils::init_env_and_log,
zkevm::{Prover, Verifier},
BlockTrace, ChunkProof, ChunkProvingTask,
zkevm::{Prover, Verifier as VerifierV4},
BlockTrace, ChunkProof as ChunkProofV4, ChunkProvingTask,
};
use std::{cell::OnceCell, env, ptr::null};

static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER: OnceCell<Verifier> = OnceCell::new();
static mut VERIFIER_V3: OnceCell<VerifierV3> = OnceCell::new();
static mut VERIFIER_V4: OnceCell<VerifierV4> = OnceCell::new();

/// # Safety
#[no_mangle]
Expand Down Expand Up @@ -48,9 +50,11 @@ pub unsafe extern "C" fn init_chunk_verifier(params_dir: *const c_char, assets_d

// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_dirs(params_dir, assets_dir);
let verifier_v3 = VerifierV3::from_dirs(params_dir, assets_dir);
let verifier_v4 = VerifierV4::from_dirs(params_dir, assets_dir);

VERIFIER.set(verifier).unwrap();
VERIFIER_V3.set(verifier_v3).unwrap();
VERIFIER_V4.set(verifier_v4).unwrap();
}

/// # Safety
Expand Down Expand Up @@ -99,10 +103,29 @@ pub unsafe extern "C" fn gen_chunk_proof(block_traces: *const c_char) -> *const

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(proof: *const c_char) -> c_char {
pub unsafe extern "C" fn verify_chunk_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();

let verified = panic_catch(|| VERIFIER.get().unwrap().verify_chunk_proof(proof));
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 3 {
let proof = serde_json::from_slice::<ChunkProofV3>(proof.as_slice()).unwrap();
VERIFIER_V3.get().unwrap().verify_chunk_proof(proof)
} else {
let proof = serde_json::from_slice::<ChunkProofV4>(proof.as_slice()).unwrap();
VERIFIER_V4.get().unwrap().verify_chunk_proof(proof)
}
});
verified.unwrap_or(false) as c_char
}
Binary file removed common/libzkp/impl/src/plonk_verifier_0.10.3.bin
Binary file not shown.
Binary file added common/libzkp/impl/src/plonk_verifier_0.11.4.bin
Binary file not shown.
17 changes: 14 additions & 3 deletions common/libzkp/interface/libzkp.h
Original file line number Diff line number Diff line change
@@ -1,15 +1,26 @@
// BatchProver is used to:
// - Batch a list of chunk proofs
// - Bundle a list of batch proofs
void init_batch_prover(char* params_dir, char* assets_dir);
// BatchVerifier is used to:
// - Verify a batch proof
// - Verify a bundle proof
void init_batch_verifier(char* params_dir, char* assets_dir);

char* get_batch_vk();
char* check_chunk_proofs(char* chunk_proofs);
char* gen_batch_proof(char* chunk_hashes, char* chunk_proofs);
char verify_batch_proof(char* proof);
char* gen_batch_proof(char* chunk_hashes, char* chunk_proofs, char* batch_header);
char verify_batch_proof(char* proof, char* fork_name);

char* get_bundle_vk();
char* gen_bundle_proof(char* batch_proofs);
char verify_bundle_proof(char* proof);

void init_chunk_prover(char* params_dir, char* assets_dir);
void init_chunk_verifier(char* params_dir, char* assets_dir);
char* get_chunk_vk();
char* gen_chunk_proof(char* block_traces);
char verify_chunk_proof(char* proof);
char verify_chunk_proof(char* proof, char* fork_name);

char* block_traces_to_chunk_info(char* block_traces);
void free_c_chars(char* ptr);
16 changes: 4 additions & 12 deletions common/types/message/message.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,22 +52,14 @@ type ChunkTaskDetail struct {

// BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
ParentStateRoot common.Hash `json:"parent_state_root"`
ParentBatchHash common.Hash `json:"parent_batch_hash"`
BatchHeader *codecv3.DABatch `json:"batch_header"`
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader *codecv3.DABatch `json:"batch_header"`
georgehao marked this conversation as resolved.
Show resolved Hide resolved
}

// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
type BundleTaskDetail struct {
ChainID uint64 `json:"chain_id"`
FinalizedBatchHash common.Hash `json:"finalized_batch_hash"`
FinalizedStateRoot common.Hash `json:"finalized_state_root"`
PendingBatchHash common.Hash `json:"pending_batch_hash"`
PendingStateRoot common.Hash `json:"pending_state_root"`
PendingWithdrawRoot common.Hash `json:"pending_withdraw_root"`
BatchProofs []*BatchProof `json:"batch_proofs"`
BatchProofs []*BatchProof `json:"batch_proofs"`
georgehao marked this conversation as resolved.
Show resolved Hide resolved
}

// ChunkInfo is for calculating pi_hash for chunk
Expand Down
2 changes: 0 additions & 2 deletions coordinator/internal/logic/provertask/batch_prover_task.go
Original file line number Diff line number Diff line change
Expand Up @@ -214,8 +214,6 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
}

if hardForkName == "darwin" {
taskDetail.ParentStateRoot = common.HexToHash(chunks[0].ParentChunkStateRoot)
taskDetail.ParentBatchHash = common.HexToHash(batch.ParentBatchHash)
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(batch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header, taskID:%s err:%w", task.TaskID, decodeErr)
Expand Down
10 changes: 1 addition & 9 deletions coordinator/internal/logic/provertask/bundle_prover_task.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
Expand Down Expand Up @@ -209,15 +208,8 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
batchProofs = append(batchProofs, &proof)
}

lastPendingBatch := batches[len(batches)-1]
taskDetail := message.BundleTaskDetail{
ChainID: bp.cfg.L2.ChainID,
FinalizedBatchHash: common.HexToHash(latestFinalizedBatch.Hash),
FinalizedStateRoot: common.HexToHash(latestFinalizedBatch.StateRoot),
PendingBatchHash: common.HexToHash(lastPendingBatch.Hash),
PendingStateRoot: common.HexToHash(lastPendingBatch.StateRoot),
PendingWithdrawRoot: common.HexToHash(lastPendingBatch.WithdrawRoot),
BatchProofs: batchProofs,
BatchProofs: batchProofs,
}

batchProofsBytes, err := json.Marshal(taskDetail)
Expand Down
11 changes: 8 additions & 3 deletions coordinator/internal/logic/submitproof/proof_receiver.go
Original file line number Diff line number Diff line change
Expand Up @@ -169,17 +169,22 @@
if getHardForkErr != nil {
return ErrGetHardForkNameFailed
}
// only verify batch proof. chunk proof verifier have been disabled after Bernoulli
// Post-Bernoulli we do not verify chunk proofs.
// Verify batch proof
if message.ProofType(proofParameter.TaskType) == message.ProofTypeBatch {
var batchProof message.BatchProof
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &batchProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyBatchProof(&batchProof, hardForkName)
}

// Verify bundle proof
if message.ProofType(proofParameter.TaskType) == message.ProofTypeBundle {
// TODO add bundle check here
var bundleProof message.BundleProof
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyBundleProof(&bundleProof)

Check failure on line 187 in coordinator/internal/logic/submitproof/proof_receiver.go

View workflow job for this annotation

GitHub Actions / tests

m.verifier.VerifyBundleProof undefined (type *verifier.Verifier has no field or method VerifyBundleProof)
}

if verifyErr != nil || !success {
Expand Down
Loading
Loading