Skip to content

Commit

Permalink
wip: data driven batch test
Browse files Browse the repository at this point in the history
  • Loading branch information
dndll committed Feb 5, 2024
1 parent 1e857f5 commit 183b21a
Show file tree
Hide file tree
Showing 9 changed files with 239 additions and 102 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
**/target
/vendor
/build/*
circuits/plonky2x/build

.direnv

Expand Down
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion bin/operator/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use near_light_clientx::plonky2x::backend::function::Plonky2xFunction;

// Testnet
// Testnet, FIXME: this is error prone, use something else
const NETWORK: usize = 1;

fn main() {
Expand Down
191 changes: 106 additions & 85 deletions circuits/plonky2x/src/circuits/verify.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use plonky2x::frontend::{
hint::simple::hint::Hint, mapreduce::generator::MapReduceDynamicGenerator,
};
pub use plonky2x::{self, backend::circuit::Circuit, prelude::*};
use plonky2x::{
frontend::{hint::simple::hint::Hint, mapreduce::generator::MapReduceDynamicGenerator},
prelude::plonky2::plonk::config::{AlgebraicHasher, GenericConfig},
};
use serde::{Deserialize, Serialize};

use crate::{
Expand All @@ -13,18 +14,15 @@ use crate::{
},
};

// TODO: improve the way we can lookup the transaction, ideally map
// TransactionOrReceiptId => Proof and map this way, now we are not limited by
// the data transformation
#[derive(CircuitVariable, Debug, Clone)]
pub struct ProofMapReduceVariable<const B: usize> {
pub height_indices: ArrayVariable<BlockHeightVariable, B>,
pub results: ArrayVariable<BoolVariable, B>,
}

#[derive(CircuitVariable, Debug, Clone)]
pub struct ProofMapReduceCtx {
pub zero: BlockHeightVariable,
pub result: BoolVariable,
}

#[derive(Debug, Clone)]
pub struct VerifyCircuit<const N: usize, const B: usize, const NETWORK: usize = 1>;

Expand All @@ -33,75 +31,63 @@ impl<const N: usize, const B: usize, const NETWORK: usize> Circuit
{
fn define<L: PlonkParameters<D>, const D: usize>(b: &mut CircuitBuilder<L, D>)
where
<<L as PlonkParameters<D>>::Config as plonky2::plonk::config::GenericConfig<D>>::Hasher:
plonky2::plonk::config::AlgebraicHasher<<L as PlonkParameters<D>>::Field>,
<<L as PlonkParameters<D>>::Config as GenericConfig<D>>::Hasher:
AlgebraicHasher<<L as PlonkParameters<D>>::Field>,
{
assert!(
N % B == 0,
"Cannot batch by this configuration, must be a power of 2"
);

let trusted_head = b.read::<HeaderVariable>();
let ids = b.read::<ArrayVariable<TransactionOrReceiptIdVariable, N>>();
println!("len of ids: {}", ids.data.len());

let proofs = FetchProofInputs::<N>(NETWORK.into()).fetch(b, &trusted_head, &ids.data);

let output = b
.mapreduce_dynamic::<_, ProofVariable, ProofMapReduceVariable<N>, Self, B, _, _>(
(),
proofs.data,
|ctx, proofs, b| {
let zero = b.zero::<BlockHeightVariable>();
let _false = b._false();

let mut heights = vec![];
let mut results = vec![];
for p in proofs.data {
heights.push(p.block_header.inner_lite.height);
results.push(b.verify(p));
}
b.watch_slice(&heights, "map job -- heights");
b.watch_slice(&results, "map job -- results");

heights.resize(N, zero);
results.resize(N, _false);

let state = ProofMapReduceVariable {
height_indices: heights.into(),
results: results.into(),
};

state
},
|_ctx, left, right, b| MergeProofHint::<N>.merge(b, &left, &right),
);
let output = b.mapreduce_dynamic::<_, _, _, Self, B, _, _>(
(),
proofs.data,
|_, proofs, b| {
let mut heights = vec![];
let mut results = vec![];

// TODO[Optimisation]: could parallelise these
for p in proofs.data {
heights.push(p.block_header.inner_lite.height);
results.push(b.verify(p));
}

b.watch_slice(&heights, "map job -- heights");
b.watch_slice(&results, "map job -- results");

let zero = b.zero::<BlockHeightVariable>();
let _false = b._false();
heights.resize(N, zero);
results.resize(N, _false);

let state = ProofMapReduceVariable::<N> {
height_indices: heights.into(),
results: results.into(),
};

state
},
|_, l, r, b| MergeProofHint::<N>.merge(b, &l, &r),
);
b.write::<ProofMapReduceVariable<N>>(output);
}

fn register_generators<L: PlonkParameters<D>, const D: usize>(registry: &mut HintRegistry<L, D>)
where
<<L as PlonkParameters<D>>::Config as plonky2::plonk::config::GenericConfig<D>>::Hasher:
plonky2::plonk::config::AlgebraicHasher<L::Field>,
<<L as PlonkParameters<D>>::Config as GenericConfig<D>>::Hasher: AlgebraicHasher<L::Field>,
{
registry.register_async_hint::<FetchProofInputs<N>>();
registry.register_hint::<EncodeInner>();

let dynamic_id = MapReduceDynamicGenerator::<L, (), (), (), Self, 1, D>::id();

registry.register_simple::<MapReduceDynamicGenerator<
L,
ProofMapReduceCtx,
ProofVariable,
ProofMapReduceVariable<N>,
Self,
B,
D,
>>(dynamic_id);
registry.register_hint::<MergeProofHint<N>>();

// We hash in verify
registry.register_hint::<EncodeInner>();
}
}

// Hinting for this as it's taking too much effort to do it in a constrained way
// It's probably a security risk that we'd need to fix later since technically
// these can just be changed post-verification
//
// |ctx, mut left, right, b| {
// let mut r_heights = right.height_indices.data;
Expand Down Expand Up @@ -133,8 +119,8 @@ impl<L: PlonkParameters<D>, const D: usize, const N: usize> Hint<L, D> for Merge
.chain(right.height_indices.iter())
.zip(left.results.iter().chain(right.results.iter()))
.filter_map(|(h, r)| if *h != 0 { Some((*h, *r)) } else { None })
.inspect(|(h, r)| log::debug!("heights/results: {:#?}, {:#?}", h, r))
.unzip();

height_indices.resize(N, 0);
results.resize(N, false);

Expand Down Expand Up @@ -170,9 +156,13 @@ mod beefy_tests {
use std::str::FromStr;

use ::test_utils::CryptoHash;
use near_light_client_protocol::prelude::Itertools;
use near_light_client_protocol::{
prelude::{Header, Itertools},
BlockHeaderInnerLite, BlockHeaderInnerLiteView,
};
use near_primitives::types::TransactionOrReceiptId;
use serial_test::serial;
use test_utils::fixture;

use super::*;
use crate::{
Expand All @@ -187,8 +177,9 @@ mod beefy_tests {
fn beefy_test_verify_e2e() {
let (header, _, _) = testnet_state();

const AMT: usize = 8;
const BATCH: usize = 2;
// TODO: test many configs of these
const AMT: usize = 2;
const BATCH: usize = 1;

fn tx(hash: &str, sender: &str) -> TransactionOrReceiptId {
TransactionOrReceiptId::Transaction {
Expand All @@ -213,27 +204,27 @@ mod beefy_tests {
"9cVuYLKYF26QevZ315RLb9ArU3gbcgPc4LDRJfZQyZHo",
"priceoracle.testnet",
),
rx("3UzHjFP8hVR2P6JJHwWchhcXPUV3vuPCDhtdWK7JmTy9", "system"),
tx(
"3V1qYGZe9NBc4EQjg5RzM5CrDiRgxqbQsYaRvMTyU4UR",
"hotwallet.dev-kaiching.testnet",
),
rx(
"CjaBC9EJE2eYg1vAy6sjJWpzgAroMv7tbFkhyz5Nhk3h",
"wallet.dev-kaiching.testnet",
),
tx(
"4VqSnHtFPGsgRJ7f4iz75bibCfbEiqYjnyEdentUyvbr",
"operator_manager.orderly.testnet",
),
tx(
"FTLQF8KxwThbfriNk8jNHJsmNk9mteXwQ71Q6hc7JLbg",
"operator-manager.orderly-qa.testnet",
),
tx(
"4VvKfzUzQVA6zNSSG1CZRbiTe4QRz5rwAzcZadKi1EST",
"operator-manager.orderly-dev.testnet",
),
// rx("3UzHjFP8hVR2P6JJHwWchhcXPUV3vuPCDhtdWK7JmTy9", "system"),
// tx(
// "3V1qYGZe9NBc4EQjg5RzM5CrDiRgxqbQsYaRvMTyU4UR",
// "hotwallet.dev-kaiching.testnet",
// ),
// rx(
// "CjaBC9EJE2eYg1vAy6sjJWpzgAroMv7tbFkhyz5Nhk3h",
// "wallet.dev-kaiching.testnet",
// ),
// tx(
// "4VqSnHtFPGsgRJ7f4iz75bibCfbEiqYjnyEdentUyvbr",
// "operator_manager.orderly.testnet",
// ),
// tx(
// "FTLQF8KxwThbfriNk8jNHJsmNk9mteXwQ71Q6hc7JLbg",
// "operator-manager.orderly-qa.testnet",
// ),
// tx(
// "4VvKfzUzQVA6zNSSG1CZRbiTe4QRz5rwAzcZadKi1EST",
// "operator-manager.orderly-dev.testnet",
// ),
]
.into_iter()
.map(Into::into)
Expand All @@ -253,4 +244,34 @@ mod beefy_tests {
};
builder_suite(define, writer, assertions);
}

// TODO: ignore flag as this test will likely be overkill
#[test]
#[serial]
fn beefy_test_data_driven_verify_e2e() {
let (header, _, _) = testnet_state();

const AMT: usize = 8;
const BATCH: usize = 1;

let ids = fixture::<Vec<TransactionOrReceiptId>>("ids.json")
.into_iter()
.take(AMT)
.map(Into::<TransactionOrReceiptIdVariableValue<GoldilocksField>>::into)
.collect_vec();

assert_eq!(ids.len(), AMT);

let define = |b: &mut B| {
VerifyCircuit::<AMT, BATCH, NETWORK>::define(b);
};
let writer = |input: &mut PI| {
input.write::<HeaderVariable>(header.into());
input.write::<ArrayVariable<TransactionOrReceiptIdVariable, AMT>>(ids.into());
};
let assertions = |mut output: PO| {
println!("{:#?}", output.read::<ProofMapReduceVariable<AMT>>());
};
builder_suite(define, writer, assertions);
}
}
1 change: 1 addition & 0 deletions circuits/plonky2x/src/hint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ impl FetchNextHeaderInputs {
}
}

// TODO: refactor into some client-like carrier for all hints that is serdeable
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct FetchProofInputs<const B: usize>(pub Network);

Expand Down
1 change: 1 addition & 0 deletions circuits/plonky2x/src/variables.rs
Original file line number Diff line number Diff line change
Expand Up @@ -627,6 +627,7 @@ impl HashBpsInputs {
}
}

// TODO: EVM these, maybe macro?
#[derive(CircuitVariable, Clone, Debug)]
pub struct TransactionOrReceiptIdVariable {
pub is_transaction: BoolVariable,
Expand Down
1 change: 1 addition & 0 deletions crates/rpc/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,3 +34,4 @@ hex.workspace = true
pretty_env_logger.workspace = true
rand = "*"
serde_json.workspace = true
tokio.workspace = true
Loading

0 comments on commit 183b21a

Please sign in to comment.