Skip to content

Commit

Permalink
updates to libzkp and coordinator
Browse files Browse the repository at this point in the history
  • Loading branch information
roynalnaruto committed Jul 9, 2024
1 parent abe66c6 commit 89509a1
Show file tree
Hide file tree
Showing 8 changed files with 435 additions and 81 deletions.
278 changes: 253 additions & 25 deletions common/libzkp/impl/Cargo.lock

Large diffs are not rendered by default.

6 changes: 5 additions & 1 deletion common/libzkp/impl/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,11 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
[dependencies]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", default-features = false, features = ["parallel_syn", "scroll"] }

# curie
prover_v3 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
# darwin
prover_v4 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/agg_recursion", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }

base64 = "0.13.0"
env_logger = "0.9.0"
Expand Down
103 changes: 84 additions & 19 deletions common/libzkp/impl/src/batch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,20 @@ use crate::{
},
};
use libc::c_char;
use prover::{
aggregator::{Prover, Verifier},
use prover_v3::BatchProof as BatchProofV3;
use prover_v4::{
aggregator::{Prover, Verifier as VerifierV4},
check_chunk_hashes,
consts::AGG_VK_FILENAME,
consts::BATCH_VK_FILENAME,
utils::{chunk_trace_to_witness_block, init_env_and_log},
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof,
BatchHeader, BatchProof as BatchProofV4, BatchProvingTask, BlockTrace, BundleProof,
BundleProvingTask, ChunkInfo, ChunkProof,
};
use snark_verifier_sdk::verify_evm_calldata;
use std::{cell::OnceCell, env, ptr::null};

static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER: OnceCell<Verifier> = OnceCell::new();
static mut VERIFIER_V4: OnceCell<VerifierV4> = OnceCell::new();

/// # Safety
#[no_mangle]
Expand All @@ -31,8 +33,8 @@ pub unsafe extern "C" fn init_batch_prover(params_dir: *const c_char, assets_dir
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);

// VK file must exist, it is optional and logged as a warning in prover.
if !file_exists(assets_dir, &AGG_VK_FILENAME) {
panic!("{} must exist in folder {}", *AGG_VK_FILENAME, assets_dir);
if !file_exists(assets_dir, &BATCH_VK_FILENAME) {
panic!("{} must exist in folder {}", *BATCH_VK_FILENAME, assets_dir);
}

let prover = Prover::from_dirs(params_dir, assets_dir);
Expand All @@ -50,15 +52,26 @@ pub unsafe extern "C" fn init_batch_verifier(params_dir: *const c_char, assets_d

// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_dirs(params_dir, assets_dir);
let verifier_v4 = VerifierV4::from_dirs(params_dir, assets_dir);

VERIFIER.set(verifier).unwrap();
VERIFIER_V4.set(verifier_v4).unwrap();
}

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_batch_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_vk());
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_batch_vk());

vk_result
.ok()
.flatten()
.map_or(null(), |vk| string_to_c_char(base64::encode(vk)))
}

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn get_bundle_vk() -> *const c_char {
let vk_result = panic_catch(|| PROVER.get_mut().unwrap().get_bundle_vk());

vk_result
.ok()
Expand Down Expand Up @@ -104,15 +117,19 @@ pub unsafe extern "C" fn check_chunk_proofs(chunk_proofs: *const c_char) -> *con
pub unsafe extern "C" fn gen_batch_proof(
chunk_hashes: *const c_char,
chunk_proofs: *const c_char,
batch_header: *const c_char,
) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let chunk_hashes = c_char_to_vec(chunk_hashes);
let chunk_proofs = c_char_to_vec(chunk_proofs);
let batch_header = c_char_to_vec(batch_header);

let chunk_hashes = serde_json::from_slice::<Vec<ChunkInfo>>(&chunk_hashes)
.map_err(|e| format!("failed to deserialize chunk hashes: {e:?}"))?;
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs)
.map_err(|e| format!("failed to deserialize chunk proofs: {e:?}"))?;
let batch_header = serde_json::from_slice::<BatchHeader>(&batch_header)
.map_err(|e| format!("failed to deserialize batch header: {e:?}"))?;

if chunk_hashes.len() != chunk_proofs.len() {
return Err(format!("chunk hashes and chunk proofs lengths mismatch: chunk_hashes.len() = {}, chunk_proofs.len() = {}",
Expand All @@ -126,12 +143,13 @@ pub unsafe extern "C" fn gen_batch_proof(
check_chunk_hashes("", &chunk_hashes_proofs).map_err(|e| format!("failed to check chunk info: {e:?}"))?;

let batch = BatchProvingTask {
chunk_proofs
chunk_proofs,
batch_header,
};
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_agg_evm_proof(batch, None, OUTPUT_DIR.as_deref())
.gen_batch_proof(batch, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate proof: {e:?}"))?;

serde_json::to_vec(&proof).map_err(|e| format!("failed to serialize the proof: {e:?}"))
Expand Down Expand Up @@ -159,30 +177,77 @@ pub unsafe extern "C" fn verify_batch_proof(
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"bernoulli" => 2,
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as curie");
3
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 2 {
// before upgrade#3(DA Compression)
if fork_id == 3 {
// As of upgrade #3 (Curie), we verify batch proofs on-chain (EVM).
let proof = serde_json::from_slice::<BatchProofV3>(proof.as_slice()).unwrap();
verify_evm_calldata(
include_bytes!("plonk_verifier_0.10.3.bin").to_vec(),
// TODO(infra): include_bytes!("plonk_verifier_0.11.4.bin").to_vec(),
proof.calldata(),
)
} else {
VERIFIER.get().unwrap().verify_agg_evm_proof(proof)
// Post upgrade #4 (Darwin), batch proofs are not EVM-verifiable. Instead they are
// halo2 proofs meant to be bundled recursively.
let proof = serde_json::from_slice::<BatchProofV4>(proof.as_slice()).unwrap();
VERIFIER_V4.get().unwrap().verify_batch_proof(proof)
}
});
verified.unwrap_or(false) as c_char
}

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn gen_bundle_proof(batch_proofs: *const c_char) -> *const c_char {
let proof_result: Result<Vec<u8>, String> = panic_catch(|| {
let batch_proofs = c_char_to_vec(batch_proofs);
let batch_proofs = serde_json::from_slice::<Vec<BatchProofV4>>(&batch_proofs)
.map_err(|e| format!("failed to deserialize batch proofs: {e:?}"))?;

let bundle = BundleProvingTask { batch_proofs };
let proof = PROVER
.get_mut()
.expect("failed to get mutable reference to PROVER.")
.gen_bundle_proof(bundle, None, OUTPUT_DIR.as_deref())
.map_err(|e| format!("failed to generate bundle proof: {e:?}"))?;

serde_json::to_vec(&proof)
.map_err(|e| format!("failed to serialize the bundle proof: {e:?}"))
})
.unwrap_or_else(|e| Err(format!("unwind error: {e:?}")));

let r = match proof_result {
Ok(proof_bytes) => ProofResult {
message: Some(proof_bytes),
error: None,
},
Err(err) => ProofResult {
message: None,
error: Some(err),
},
};

serde_json::to_vec(&r).map_or(null(), vec_to_c_char)
}

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_bundle_proof(proof: *const c_char) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
let verified = panic_catch(|| VERIFIER_V4.get().unwrap().verify_bundle_proof(proof));
verified.unwrap_or(false) as c_char
}

// This function is only used for debugging on Go side.
/// # Safety
#[no_mangle]
Expand Down
41 changes: 32 additions & 9 deletions common/libzkp/impl/src/chunk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,18 @@ use crate::{
},
};
use libc::c_char;
use prover::{
use prover_v3::{zkevm::Verifier as VerifierV3, ChunkProof as ChunkProofV3};
use prover_v4::{
consts::CHUNK_VK_FILENAME,
utils::init_env_and_log,
zkevm::{Prover, Verifier},
BlockTrace, ChunkProof, ChunkProvingTask,
zkevm::{Prover, Verifier as VerifierV4},
BlockTrace, ChunkProof as ChunkProofV4, ChunkProvingTask,
};
use std::{cell::OnceCell, env, ptr::null};

static mut PROVER: OnceCell<Prover> = OnceCell::new();
static mut VERIFIER: OnceCell<Verifier> = OnceCell::new();
static mut VERIFIER_V3: OnceCell<VerifierV3> = OnceCell::new();
static mut VERIFIER_V4: OnceCell<VerifierV4> = OnceCell::new();

/// # Safety
#[no_mangle]
Expand Down Expand Up @@ -48,9 +50,11 @@ pub unsafe extern "C" fn init_chunk_verifier(params_dir: *const c_char, assets_d

// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_dirs(params_dir, assets_dir);
let verifier_v3 = VerifierV3::from_dirs(params_dir, assets_dir);
let verifier_v4 = VerifierV4::from_dirs(params_dir, assets_dir);

VERIFIER.set(verifier).unwrap();
VERIFIER_V3.set(verifier_v3).unwrap();
VERIFIER_V4.set(verifier_v4).unwrap();
}

/// # Safety
Expand Down Expand Up @@ -99,10 +103,29 @@ pub unsafe extern "C" fn gen_chunk_proof(block_traces: *const c_char) -> *const

/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(proof: *const c_char) -> c_char {
pub unsafe extern "C" fn verify_chunk_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();

let verified = panic_catch(|| VERIFIER.get().unwrap().verify_chunk_proof(proof));
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 3 {
let proof = serde_json::from_slice::<ChunkProofV3>(proof.as_slice()).unwrap();
VERIFIER_V3.get().unwrap().verify_chunk_proof(proof)
} else {
let proof = serde_json::from_slice::<ChunkProofV4>(proof.as_slice()).unwrap();
VERIFIER_V4.get().unwrap().verify_chunk_proof(proof)
}
});
verified.unwrap_or(false) as c_char
}
15 changes: 13 additions & 2 deletions common/libzkp/interface/libzkp.h
Original file line number Diff line number Diff line change
@@ -1,15 +1,26 @@
// BatchProver is used to:
// - Batch a list of chunk proofs
// - Bundle a list of batch proofs
void init_batch_prover(char* params_dir, char* assets_dir);
// BatchVerifier is used to:
// - Verify a batch proof
// - Verify a bundle proof
void init_batch_verifier(char* params_dir, char* assets_dir);

char* get_batch_vk();
char* check_chunk_proofs(char* chunk_proofs);
char* gen_batch_proof(char* chunk_hashes, char* chunk_proofs);
char* gen_batch_proof(char* chunk_hashes, char* chunk_proofs, char* batch_header);
char verify_batch_proof(char* proof, char* fork_name);

char* get_bundle_vk();
char* gen_bundle_proof(char* batch_proofs);
char verify_bundle_proof(char* proof);

void init_chunk_prover(char* params_dir, char* assets_dir);
void init_chunk_verifier(char* params_dir, char* assets_dir);
char* get_chunk_vk();
char* gen_chunk_proof(char* block_traces);
char verify_chunk_proof(char* proof);
char verify_chunk_proof(char* proof, char* fork_name);

char* block_traces_to_chunk_info(char* block_traces);
void free_c_chars(char* ptr);
20 changes: 6 additions & 14 deletions common/types/message/message.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,22 +52,14 @@ type ChunkTaskDetail struct {

// BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
ParentStateRoot common.Hash `json:"parent_state_root"`
ParentBatchHash common.Hash `json:"parent_batch_hash"`
BatchHeader *codecv3.DABatch `json:"batch_header"`
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader *codecv3.DABatch `json:"batch_header"`
}

// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
type BundleTaskDetail struct {
ChainID uint64 `json:"chain_id"`
FinalizedBatchHash common.Hash `json:"finalized_batch_hash"`
FinalizedStateRoot common.Hash `json:"finalized_state_root"`
PendingBatchHash common.Hash `json:"pending_batch_hash"`
PendingStateRoot common.Hash `json:"pending_state_root"`
PendingWithdrawRoot common.Hash `json:"pending_withdraw_root"`
BatchProofs []*BatchProof `json:"batch_proofs"`
BatchProofs []*BatchProof `json:"batch_proofs"`
}

// ChunkInfo is for calculating pi_hash for chunk
Expand Down Expand Up @@ -107,8 +99,8 @@ type BatchProof struct {
Instances []byte `json:"instances"`
Vk []byte `json:"vk"`
// cross-reference between cooridinator computation and prover compution
BatchHash common.Hash `json:"batch_hash"`
GitVersion string `json:"git_version,omitempty"`
BatchHeader *codecv3.DABatch `json:"batch_header"`
GitVersion string `json:"git_version,omitempty"`
}

// SanityCheck checks whether a BatchProof is in a legal format
Expand Down
11 changes: 8 additions & 3 deletions coordinator/internal/logic/submitproof/proof_receiver.go
Original file line number Diff line number Diff line change
Expand Up @@ -169,17 +169,22 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
if getHardForkErr != nil {
return ErrGetHardForkNameFailed
}
// only verify batch proof. chunk proof verifier have been disabled after Bernoulli
// Post-Bernoulli we do not verify chunk proofs.
// Verify batch proof
if message.ProofType(proofParameter.TaskType) == message.ProofTypeBatch {
var batchProof message.BatchProof
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &batchProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyBatchProof(&batchProof, hardForkName)
}

// Verify bundle proof
if message.ProofType(proofParameter.TaskType) == message.ProofTypeBundle {
// TODO add bundle check here
var bundleProof message.BundleProof
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyBundleProof(&bundleProof)

Check failure on line 187 in coordinator/internal/logic/submitproof/proof_receiver.go

View workflow job for this annotation

GitHub Actions / tests

m.verifier.VerifyBundleProof undefined (type *verifier.Verifier has no field or method VerifyBundleProof)
}

if verifyErr != nil || !success {
Expand Down
Loading

0 comments on commit 89509a1

Please sign in to comment.