From f33232df434dc889b059e3ab75b87307c47718c2 Mon Sep 17 00:00:00 2001 From: Victor Lopez Date: Wed, 23 Mar 2022 16:22:22 +0100 Subject: [PATCH 1/4] Use `CryptographicSponge` as generic argument Resolves #87 --- Cargo.toml | 35 +++--- src/constraints/ahp.rs | 18 +-- src/constraints/data_structures.rs | 116 +++++++++++--------- src/constraints/snark.rs | 102 ++++++++++------- src/constraints/verifier.rs | 41 +++---- src/constraints/verifier_test.rs | 8 +- src/data_structures.rs | 81 +++++++++----- src/fiat_shamir/mod.rs | 1 + src/fiat_shamir/poseidon/mod.rs | 27 +++++ src/lib.rs | 170 ++++++++++++----------------- src/test.rs | 20 +++- 11 files changed, 349 insertions(+), 270 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 5b86e90..f0e06d8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,34 +20,35 @@ license = "MIT/Apache-2.0" edition = "2018" [dependencies] -ark-serialize = { version = "^0.2.0", default-features = false, features = [ "derive" ] } -ark-ff = { version = "^0.2.0", default-features = false } -ark-std = { version = "^0.2.0", default-features = false } -ark-poly = { version = "^0.2.0", default-features = false } -ark-relations = { version = "^0.2.0", default-features = false } +ark-serialize = { version = "0.3", default-features = false, features = [ "derive" ] } +ark-ff = { version = "0.3", default-features = false } +ark-std = { version = "0.3", default-features = false } +ark-poly = { version = "0.3", default-features = false } +ark-relations = { version = "0.3", default-features = false } ark-poly-commit = { git = "https://github.com/arkworks-rs/poly-commit", branch = "constraints", default-features = false, features = [ "r1cs" ] } +ark-sponge = { version = "0.3", default-features = false } -rand_chacha = { version = "0.2.1", default-features = false } +rand_chacha = { version = "0.3", default-features = false } rayon = { version = "1", optional = true } digest = { version = "0.9" } derivative = { version = "2", features = ["use_core"] } -ark-ec = { version = "^0.2.0", default-features = false } -ark-crypto-primitives = { version = "^0.2.0", default-features = false, features = [ "r1cs" ] } -ark-r1cs-std = { version = "^0.2.0", default-features = false } -ark-nonnative-field = { version = "^0.2.0", default-features = false } -ark-snark = { version = "^0.2.0", default-features = false } +ark-ec = { version = "0.3", default-features = false } +ark-crypto-primitives = { version = "0.3", default-features = false, features = [ "r1cs" ] } +ark-r1cs-std = { version = "0.3", default-features = false } +ark-nonnative-field = { version = "0.3", default-features = false } +ark-snark = { version = "0.3", default-features = false } hashbrown = "0.9" tracing = { version = "0.1", default-features = false, features = [ "attributes" ] } tracing-subscriber = { version = "0.2", default-features = false, optional = true } [dev-dependencies] blake2 = { version = "0.9", default-features = false } -ark-bls12-381 = { version = "^0.2.0", default-features = false, features = [ "curve" ] } -ark-mnt4-298 = { version = "^0.2.0", default-features = false, features = ["r1cs", "curve"] } -ark-mnt6-298 = { version = "^0.2.0", default-features = false, features = ["r1cs"] } -ark-mnt4-753 = { version = "^0.2.0", default-features = false, features = ["r1cs", "curve"] } -ark-mnt6-753 = { version = "^0.2.0", default-features = false, features = ["r1cs"] } +ark-bls12-381 = { version = "0.3", default-features = false, features = [ "curve" ] } +ark-mnt4-298 = { version = "0.3", default-features = false, features = ["r1cs", "curve"] } +ark-mnt6-298 = { version = "0.3", default-features = false, features = ["r1cs"] } +ark-mnt4-753 = { version = "0.3", default-features = false, features = ["r1cs", "curve"] } +ark-mnt6-753 = { version = "0.3", default-features = false, features = ["r1cs"] } [profile.release] opt-level = 3 @@ -76,4 +77,4 @@ parallel = [ "std", "ark-ff/parallel", "ark-poly/parallel", "ark-std/parallel", name = "marlin-benches" path = "benches/bench.rs" harness = false -required-features = ["std"] \ No newline at end of file +required-features = ["std"] diff --git a/src/constraints/ahp.rs b/src/constraints/ahp.rs index 7d315ba..f22c698 100644 --- a/src/constraints/ahp.rs +++ b/src/constraints/ahp.rs @@ -23,6 +23,7 @@ use ark_r1cs_std::{ ToBitsGadget, ToConstraintFieldGadget, }; use ark_relations::r1cs::ConstraintSystemRef; +use ark_sponge::CryptographicSponge; use hashbrown::{HashMap, HashSet}; #[derive(Clone)] @@ -57,14 +58,16 @@ pub struct VerifierThirdMsgVar { pub struct AHPForR1CS< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, > where PCG::VerifierKeyVar: ToConstraintFieldGadget, PCG::CommitmentVar: ToConstraintFieldGadget, { field: PhantomData, constraint_field: PhantomData, + sponge: PhantomData, polynomial_commitment: PhantomData, pc_check: PhantomData, } @@ -72,9 +75,10 @@ pub struct AHPForR1CS< impl< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, - > AHPForR1CS + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, + > AHPForR1CS where PCG::VerifierKeyVar: ToConstraintFieldGadget, PCG::CommitmentVar: ToConstraintFieldGadget, @@ -529,8 +533,8 @@ where PR: FiatShamirRng, R: FiatShamirRngVar, >( - index_pvk: &PreparedIndexVerifierKeyVar, - proof: &ProofVar, + index_pvk: &PreparedIndexVerifierKeyVar, + proof: &ProofVar, state: &VerifierStateVar, ) -> Result< ( diff --git a/src/constraints/data_structures.rs b/src/constraints/data_structures.rs index ef7d3ec..881bf4a 100644 --- a/src/constraints/data_structures.rs +++ b/src/constraints/data_structures.rs @@ -17,16 +17,19 @@ use ark_r1cs_std::{ R1CSVar, ToBytesGadget, ToConstraintFieldGadget, }; use ark_relations::r1cs::{ConstraintSystemRef, Namespace}; +use ark_sponge::CryptographicSponge; use ark_std::borrow::Borrow; use hashbrown::HashMap; -pub type UniversalSRS = >>::UniversalParams; +pub type UniversalSRS = + , S>>::UniversalParams; pub struct IndexVerifierKeyVar< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, > { pub cs: ConstraintSystemRef, pub domain_h_size: u64, @@ -40,9 +43,10 @@ pub struct IndexVerifierKeyVar< impl< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, - > IndexVerifierKeyVar + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, + > IndexVerifierKeyVar { fn cs(&self) -> ConstraintSystemRef { self.cs.clone() @@ -52,9 +56,10 @@ impl< impl< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, - > AllocVar, CF> for IndexVerifierKeyVar + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, + > AllocVar, CF> for IndexVerifierKeyVar { #[tracing::instrument(target = "r1cs", skip(cs, f))] fn new_variable( @@ -63,7 +68,7 @@ impl< mode: AllocationMode, ) -> Result where - T: Borrow>, + T: Borrow>, { let t = f()?; let ivk = t.borrow(); @@ -117,9 +122,10 @@ impl< impl< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, - > ToBytesGadget for IndexVerifierKeyVar + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, + > ToBytesGadget for IndexVerifierKeyVar { #[tracing::instrument(target = "r1cs", skip(self))] fn to_bytes(&self) -> Result>, SynthesisError> { @@ -140,9 +146,10 @@ impl< impl< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, - > Clone for IndexVerifierKeyVar + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, + > Clone for IndexVerifierKeyVar { fn clone(&self) -> Self { Self { @@ -160,9 +167,10 @@ impl< impl< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, - > IndexVerifierKeyVar + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, + > IndexVerifierKeyVar { pub fn iter(&self) -> impl Iterator { self.index_comms.iter() @@ -172,8 +180,9 @@ impl< pub struct PreparedIndexVerifierKeyVar< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, PR: FiatShamirRng, R: FiatShamirRngVar, > { @@ -192,11 +201,12 @@ pub struct PreparedIndexVerifierKeyVar< impl< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, PR: FiatShamirRng, R: FiatShamirRngVar, - > Clone for PreparedIndexVerifierKeyVar + > Clone for PreparedIndexVerifierKeyVar { fn clone(&self) -> Self { PreparedIndexVerifierKeyVar { @@ -213,24 +223,26 @@ impl< } } -impl PreparedIndexVerifierKeyVar +impl PreparedIndexVerifierKeyVar where F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, PR: FiatShamirRng, R: FiatShamirRngVar, PCG::VerifierKeyVar: ToConstraintFieldGadget, PCG::CommitmentVar: ToConstraintFieldGadget, { #[tracing::instrument(target = "r1cs", skip(vk))] - pub fn prepare(vk: &IndexVerifierKeyVar) -> Result { + pub fn prepare(vk: &IndexVerifierKeyVar) -> Result { let cs = vk.cs(); let mut fs_rng_raw = PR::new(); - fs_rng_raw - .absorb_bytes(&to_bytes![&MarlinVerifierVar::::PROTOCOL_NAME].unwrap()); + fs_rng_raw.absorb_bytes( + &to_bytes![&MarlinVerifierVar::::PROTOCOL_NAME].unwrap(), + ); let index_vk_hash = { let mut vk_hash_rng = PR::new(); @@ -280,13 +292,14 @@ where } } -impl AllocVar, CF> - for PreparedIndexVerifierKeyVar +impl AllocVar, CF> + for PreparedIndexVerifierKeyVar where F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, PR: FiatShamirRng, R: FiatShamirRngVar, PC::VerifierKey: ToConstraintField, @@ -301,7 +314,7 @@ where mode: AllocationMode, ) -> Result where - T: Borrow>, + T: Borrow>, { let t = f()?; let obj = t.borrow(); @@ -342,8 +355,9 @@ where }; let mut fs_rng_raw = PR::new(); - fs_rng_raw - .absorb_bytes(&to_bytes![&MarlinVerifierVar::::PROTOCOL_NAME].unwrap()); + fs_rng_raw.absorb_bytes( + &to_bytes![&MarlinVerifierVar::::PROTOCOL_NAME].unwrap(), + ); let fs_rng = { let mut fs_rng = R::constant(cs.clone(), &fs_rng_raw); @@ -379,8 +393,9 @@ where pub struct ProofVar< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, > { pub cs: ConstraintSystemRef, pub commitments: Vec>, @@ -392,9 +407,10 @@ pub struct ProofVar< impl< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, - > ProofVar + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, + > ProofVar { pub fn new( cs: ConstraintSystemRef, @@ -413,12 +429,13 @@ impl< } } -impl AllocVar, CF> for ProofVar +impl AllocVar, CF> for ProofVar where F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, + PCG: PCCheckVar, PC, CF, S>, PC::VerifierKey: ToConstraintField, PC::Commitment: ToConstraintField, PCG::VerifierKeyVar: ToConstraintFieldGadget, @@ -431,7 +448,7 @@ where mode: AllocationMode, ) -> Result where - T: Borrow>, + T: Borrow>, { let ns = cs.into(); let cs = ns.cs(); @@ -534,9 +551,10 @@ where impl< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, - > Clone for ProofVar + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, + > Clone for ProofVar { fn clone(&self) -> Self { ProofVar { diff --git a/src/constraints/snark.rs b/src/constraints/snark.rs index 4e9a428..2d8aa68 100644 --- a/src/constraints/snark.rs +++ b/src/constraints/snark.rs @@ -2,7 +2,7 @@ use crate::constraints::{ data_structures::{IndexVerifierKeyVar, PreparedIndexVerifierKeyVar, ProofVar}, verifier::Marlin as MarlinVerifierGadget, }; -use crate::fiat_shamir::{constraints::FiatShamirRngVar, FiatShamirRng}; +use crate::fiat_shamir::{constraints::FiatShamirRngVar, AlgebraicSponge, FiatShamirRng}; use crate::Error::IndexTooLarge; use crate::{ Box, IndexProverKey, IndexVerifierKey, Marlin, MarlinConfig, PreparedIndexVerifierKey, Proof, @@ -21,6 +21,7 @@ use ark_relations::r1cs::{ ConstraintSynthesizer, ConstraintSystemRef, LinearCombination, SynthesisError, Variable, }; use ark_snark::UniversalSetupSNARK; +use ark_sponge::CryptographicSponge; use ark_std::cmp::min; use ark_std::fmt::{Debug, Formatter}; use ark_std::marker::PhantomData; @@ -49,38 +50,41 @@ impl Debug for MarlinBound { pub struct MarlinSNARK< F: PrimeField, FSF: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge + AlgebraicSponge, + PC: PolynomialCommitment, S>, FS: FiatShamirRng, MC: MarlinConfig, > { f_phantom: PhantomData, fsf_phantom: PhantomData, + s_phantom: PhantomData, pc_phantom: PhantomData, fs_phantom: PhantomData, mc_phantom: PhantomData, } -impl SNARK for MarlinSNARK +impl SNARK for MarlinSNARK where F: PrimeField, FSF: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge + AlgebraicSponge, + PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, FS: FiatShamirRng, MC: MarlinConfig, PC::VerifierKey: ToConstraintField, PC::Commitment: ToConstraintField, { - type ProvingKey = IndexProverKey; - type VerifyingKey = IndexVerifierKey; - type ProcessedVerifyingKey = PreparedIndexVerifierKey; - type Proof = Proof; + type ProvingKey = IndexProverKey; + type VerifyingKey = IndexVerifierKey; + type ProcessedVerifyingKey = PreparedIndexVerifierKey; + type Proof = Proof; type Error = Box; fn circuit_specific_setup, R: RngCore + CryptoRng>( circuit: C, rng: &mut R, ) -> Result<(Self::ProvingKey, Self::VerifyingKey), Self::Error> { - Ok(Marlin::::circuit_specific_setup(circuit, rng).unwrap()) + Ok(Marlin::::circuit_specific_setup(circuit, rng).unwrap()) } fn prove, R: RngCore>( @@ -88,14 +92,14 @@ where circuit: C, rng: &mut R, ) -> Result { - match Marlin::::prove(&pk, circuit, rng) { + match Marlin::::prove(&pk, circuit, rng) { Ok(res) => Ok(res), Err(e) => Err(Box::new(MarlinError::from(e))), } } fn verify(vk: &Self::VerifyingKey, x: &[F], proof: &Self::Proof) -> Result { - match Marlin::::verify(vk, x, proof) { + match Marlin::::verify(vk, x, proof) { Ok(res) => Ok(res), Err(e) => Err(Box::new(MarlinError::from(e))), } @@ -111,25 +115,26 @@ where x: &[F], proof: &Self::Proof, ) -> Result { - match Marlin::::prepared_verify(pvk, x, proof) { + match Marlin::::prepared_verify(pvk, x, proof) { Ok(res) => Ok(res), Err(e) => Err(Box::new(MarlinError::from(e))), } } } -impl UniversalSetupSNARK for MarlinSNARK +impl UniversalSetupSNARK for MarlinSNARK where F: PrimeField, FSF: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge + AlgebraicSponge, + PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, FS: FiatShamirRng, MC: MarlinConfig, PC::VerifierKey: ToConstraintField, PC::Commitment: ToConstraintField, { type ComputationBound = MarlinBound; - type PublicParameters = (MarlinBound, UniversalSRS); + type PublicParameters = (MarlinBound, UniversalSRS); fn universal_setup( bound: &Self::ComputationBound, @@ -137,7 +142,7 @@ where ) -> Result { let Self::ComputationBound { max_degree } = bound; - match Marlin::::universal_setup(1, 1, (max_degree + 5) / 3, rng) { + match Marlin::::universal_setup(1, 1, (max_degree + 5) / 3, rng) { Ok(res) => Ok((bound.clone(), res)), Err(e) => Err(Box::new(MarlinError::from(e))), } @@ -152,7 +157,7 @@ where (Self::ProvingKey, Self::VerifyingKey), UniversalSetupIndexError, > { - let index_res = Marlin::::index(&crs.1, circuit); + let index_res = Marlin::::index(&crs.1, circuit); match index_res { Ok(res) => Ok(res), Err(err) => match err { @@ -167,18 +172,20 @@ where } } -pub struct MarlinSNARKGadget +pub struct MarlinSNARKGadget where F: PrimeField, FSF: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, FS: FiatShamirRng, MC: MarlinConfig, - PCG: PCCheckVar, PC, FSF>, + PCG: PCCheckVar, PC, FSF, S>, FSG: FiatShamirRngVar, { pub f_phantom: PhantomData, pub fsf_phantom: PhantomData, + pub s_phantom: PhantomData, pub pc_phantom: PhantomData, pub fs_phantom: PhantomData, pub mc_phantom: PhantomData, @@ -186,30 +193,31 @@ where pub fsg_phantom: PhantomData, } -impl SNARKGadget> - for MarlinSNARKGadget +impl SNARKGadget> + for MarlinSNARKGadget where F: PrimeField, FSF: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge + AlgebraicSponge, + PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, FS: FiatShamirRng, MC: MarlinConfig, - PCG: PCCheckVar, PC, FSF>, + PCG: PCCheckVar, PC, FSF, S>, FSG: FiatShamirRngVar, PC::VerifierKey: ToConstraintField, PC::Commitment: ToConstraintField, PCG::VerifierKeyVar: ToConstraintFieldGadget, PCG::CommitmentVar: ToConstraintFieldGadget, { - type ProcessedVerifyingKeyVar = PreparedIndexVerifierKeyVar; - type VerifyingKeyVar = IndexVerifierKeyVar; + type ProcessedVerifyingKeyVar = PreparedIndexVerifierKeyVar; + type VerifyingKeyVar = IndexVerifierKeyVar; type InputVar = NonNativeFieldInputVar; - type ProofVar = ProofVar; + type ProofVar = ProofVar; type VerifierSize = usize; fn verifier_size( - circuit_vk: & as SNARK>::VerifyingKey, + circuit_vk: & as SNARK>::VerifyingKey, ) -> Self::VerifierSize { circuit_vk.index_info.num_instance_variables } @@ -221,8 +229,12 @@ where proof: &Self::ProofVar, ) -> Result, SynthesisError> { Ok( - MarlinVerifierGadget::::prepared_verify(&circuit_pvk, &x.val, proof) - .unwrap(), + MarlinVerifierGadget::::prepared_verify( + &circuit_pvk, + &x.val, + proof, + ) + .unwrap(), ) } @@ -233,8 +245,10 @@ where proof: &Self::ProofVar, ) -> Result, SynthesisError> { Ok( - MarlinVerifierGadget::::verify::(circuit_vk, &x.val, proof) - .unwrap(), + MarlinVerifierGadget::::verify::( + circuit_vk, &x.val, proof, + ) + .unwrap(), ) } } @@ -297,16 +311,17 @@ impl ConstraintSynthesizer for MarlinBoundCircuit { } } -impl - UniversalSetupSNARKGadget> - for MarlinSNARKGadget +impl + UniversalSetupSNARKGadget> + for MarlinSNARKGadget where F: PrimeField, FSF: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge + AlgebraicSponge, + PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, FS: FiatShamirRng, MC: MarlinConfig, - PCG: PCCheckVar, PC, FSF>, + PCG: PCCheckVar, PC, FSF, S>, FSG: FiatShamirRngVar, PC::VerifierKey: ToConstraintField, PC::Commitment: ToConstraintField, @@ -449,12 +464,18 @@ mod test { type TestSNARK = MarlinSNARK< MNT4Fr, MNT4Fq, - MarlinKZG10>, + PoseidonSponge, + MarlinKZG10, PoseidonSponge>, FS4, TestMarlinConfig, >; type FS4 = FiatShamirAlgebraicSpongeRng>; - type PCGadget4 = MarlinKZG10Gadget, MNT4PairingVar>; + type PCGadget4 = MarlinKZG10Gadget< + Mnt64298cycle, + DensePolynomial, + MNT4PairingVar, + PoseidonSponge, + >; type FSG4 = FiatShamirAlgebraicSpongeRngVar< MNT4Fr, MNT4Fq, @@ -464,7 +485,8 @@ mod test { type TestSNARKGadget = MarlinSNARKGadget< MNT4Fr, MNT4Fq, - MarlinKZG10>, + PoseidonSponge, + MarlinKZG10, PoseidonSponge>, FS4, TestMarlinConfig, PCGadget4, diff --git a/src/constraints/verifier.rs b/src/constraints/verifier.rs index 4a50414..80d8c47 100644 --- a/src/constraints/verifier.rs +++ b/src/constraints/verifier.rs @@ -1,8 +1,6 @@ use crate::{ - constraints::{ - ahp::AHPForR1CS, - data_structures::{IndexVerifierKeyVar, PreparedIndexVerifierKeyVar, ProofVar}, - }, + constraints::ahp::AHPForR1CS, + constraints::data_structures::{IndexVerifierKeyVar, PreparedIndexVerifierKeyVar, ProofVar}, fiat_shamir::{constraints::FiatShamirRngVar, FiatShamirRng}, Error, PhantomData, PrimeField, String, Vec, }; @@ -12,25 +10,29 @@ use ark_poly::univariate::DensePolynomial; use ark_poly_commit::{PCCheckRandomDataVar, PCCheckVar, PolynomialCommitment}; use ark_r1cs_std::{bits::boolean::Boolean, fields::FieldVar, R1CSVar, ToConstraintFieldGadget}; use ark_relations::ns; +use ark_sponge::CryptographicSponge; pub struct Marlin< F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, >( PhantomData, PhantomData, + PhantomData, PhantomData, PhantomData, ); -impl Marlin +impl Marlin where F: PrimeField, CF: PrimeField, - PC: PolynomialCommitment>, - PCG: PCCheckVar, PC, CF>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, + PCG: PCCheckVar, PC, CF, S>, PCG::VerifierKeyVar: ToConstraintFieldGadget, PCG::CommitmentVar: ToConstraintFieldGadget, { @@ -39,9 +41,9 @@ where /// verify with an established hashchain initial state #[tracing::instrument(target = "r1cs", skip(index_pvk, proof))] pub fn prepared_verify, R: FiatShamirRngVar>( - index_pvk: &PreparedIndexVerifierKeyVar, + index_pvk: &PreparedIndexVerifierKeyVar, public_input: &[NonNativeFieldVar], - proof: &ProofVar, + proof: &ProofVar, ) -> Result, Error> { let cs = index_pvk .cs @@ -55,7 +57,7 @@ where fs_rng.absorb_nonnative_field_elements(&public_input, OptimizationType::Weight)?; - let (_, verifier_state) = AHPForR1CS::::verifier_first_round( + let (_, verifier_state) = AHPForR1CS::::verifier_first_round( index_pvk.domain_h_size, index_pvk.domain_k_size, &mut fs_rng, @@ -63,14 +65,14 @@ where &proof.prover_messages[0].field_elements, )?; - let (_, verifier_state) = AHPForR1CS::::verifier_second_round( + let (_, verifier_state) = AHPForR1CS::::verifier_second_round( verifier_state, &mut fs_rng, &proof.commitments[1], &proof.prover_messages[1].field_elements, )?; - let verifier_state = AHPForR1CS::::verifier_third_round( + let verifier_state = AHPForR1CS::::verifier_third_round( verifier_state, &mut fs_rng, &proof.commitments[2], @@ -82,7 +84,7 @@ where formatted_public_input.push(elem); } - let lc = AHPForR1CS::::verifier_decision( + let lc = AHPForR1CS::::verifier_decision( ns!(cs, "ahp").cs(), &formatted_public_input, &proof.evaluations, @@ -91,7 +93,7 @@ where )?; let (num_opening_challenges, num_batching_rands, comm, query_set, evaluations) = - AHPForR1CS::::verifier_comm_query_eval_set( + AHPForR1CS::::verifier_comm_query_eval_set( &index_pvk, &proof, &verifier_state, @@ -140,11 +142,12 @@ where #[tracing::instrument(target = "r1cs", skip(index_vk, proof))] pub fn verify, R: FiatShamirRngVar>( - index_vk: &IndexVerifierKeyVar, + index_vk: &IndexVerifierKeyVar, public_input: &[NonNativeFieldVar], - proof: &ProofVar, + proof: &ProofVar, ) -> Result, Error> { - let index_pvk = PreparedIndexVerifierKeyVar::::prepare(&index_vk)?; + let index_pvk = + PreparedIndexVerifierKeyVar::::prepare(&index_vk)?; Self::prepared_verify(&index_pvk, public_input, proof) } } diff --git a/src/constraints/verifier_test.rs b/src/constraints/verifier_test.rs index 6589042..461f493 100644 --- a/src/constraints/verifier_test.rs +++ b/src/constraints/verifier_test.rs @@ -42,10 +42,12 @@ mod tests { } type FS = FiatShamirAlgebraicSpongeRng>; - type MultiPC = MarlinKZG10>; - type MarlinNativeInst = MarlinNative; + type MultiPC = MarlinKZG10, PoseidonSponge>; + type MarlinNativeInst = + MarlinNative, MultiPC, FS, MarlinRecursiveConfig>; - type MultiPCVar = MarlinKZG10Gadget, MNT4PairingVar>; + type MultiPCVar = + MarlinKZG10Gadget, MNT4PairingVar, PoseidonSponge>; #[derive(Copy, Clone)] struct Circuit { diff --git a/src/data_structures.rs b/src/data_structures.rs index eded1b5..99b1ca4 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -10,6 +10,7 @@ use ark_poly_commit::{ }; use ark_relations::r1cs::SynthesisError; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; +use ark_sponge::CryptographicSponge; use ark_std::{ format, io::{Read, Write}, @@ -20,7 +21,8 @@ use ark_std::{ /* ************************************************************************* */ /// The universal public parameters for the argument system. -pub type UniversalSRS = >>::UniversalParams; +pub type UniversalSRS = + , S>>::UniversalParams; /* ************************************************************************* */ /* ************************************************************************* */ @@ -28,7 +30,11 @@ pub type UniversalSRS = /// Verification key for a specific index (i.e., R1CS matrices). #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct IndexVerifierKey>> { +pub struct IndexVerifierKey< + F: PrimeField, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, +> { /// Stores information about the size of the index, as well as its field of /// definition. pub index_info: IndexInfo, @@ -38,8 +44,8 @@ pub struct IndexVerifierKey>> ark_ff::ToBytes - for IndexVerifierKey +impl, S>> + ark_ff::ToBytes for IndexVerifierKey { fn write(&self, mut w: W) -> ark_std::io::Result<()> { self.index_info.write(&mut w)?; @@ -47,8 +53,8 @@ impl>> ark_ff::ToB } } -impl>> Clone - for IndexVerifierKey +impl, S>> + Clone for IndexVerifierKey { fn clone(&self) -> Self { Self { @@ -59,7 +65,9 @@ impl>> Clone } } -impl>> IndexVerifierKey { +impl, S>> + IndexVerifierKey +{ /// Iterate over the commitments to indexed polynomials in `self`. pub fn iter(&self) -> impl Iterator { self.index_comms.iter() @@ -71,8 +79,11 @@ impl>> IndexVerifi /* ************************************************************************* */ /// Verification key, prepared (preprocessed) for use in pairings. -pub struct PreparedIndexVerifierKey>> -{ +pub struct PreparedIndexVerifierKey< + F: PrimeField, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, +> { /// Size of the variable domain. pub domain_h_size: u64, /// Size of the matrix domain. @@ -84,13 +95,14 @@ pub struct PreparedIndexVerifierKey, + pub orig_vk: IndexVerifierKey, } -impl Clone for PreparedIndexVerifierKey +impl Clone for PreparedIndexVerifierKey where F: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, { fn clone(&self) -> Self { PreparedIndexVerifierKey { @@ -103,12 +115,13 @@ where } } -impl PreparedIndexVerifierKey +impl PreparedIndexVerifierKey where F: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, { - pub fn prepare(vk: &IndexVerifierKey) -> Self { + pub fn prepare(vk: &IndexVerifierKey) -> Self { let mut prepared_index_comms = Vec::::new(); for (_, comm) in vk.index_comms.iter().enumerate() { prepared_index_comms.push(PC::PreparedCommitment::prepare(comm)); @@ -142,9 +155,13 @@ where /// Proving key for a specific index (i.e., R1CS matrices). #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct IndexProverKey>> { +pub struct IndexProverKey< + F: PrimeField, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, +> { /// The index verifier key. - pub index_vk: IndexVerifierKey, + pub index_vk: IndexVerifierKey, /// The randomness for the index polynomial commitments. pub index_comm_rands: Vec, /// The index itself. @@ -153,7 +170,8 @@ pub struct IndexProverKey>> Clone for IndexProverKey +impl, S>> + Clone for IndexProverKey where PC::Commitment: Clone, { @@ -173,7 +191,11 @@ where /// A zkSNARK proof. #[derive(CanonicalSerialize, CanonicalDeserialize)] -pub struct Proof>> { +pub struct Proof< + F: PrimeField, + S: CryptographicSponge, + PC: PolynomialCommitment, S>, +> { /// Commitments to the polynomials produced by the AHP prover. pub commitments: Vec>, /// Evaluations of these polynomials. @@ -181,16 +203,18 @@ pub struct Proof>> /// The field elements sent by the prover. pub prover_messages: Vec>, /// An evaluation proof from the polynomial commitment. - pub pc_proof: BatchLCProof, PC>, + pub pc_proof: BatchLCProof>, } -impl>> Proof { +impl, S>> + Proof +{ /// Construct a new proof. pub fn new( commitments: Vec>, evaluations: Vec, prover_messages: Vec>, - pc_proof: BatchLCProof, PC>, + pc_proof: BatchLCProof>, ) -> Self { Self { commitments, @@ -202,7 +226,7 @@ impl>> Proof>> Proof = self.pc_proof.proof.clone().into(); - let num_proofs = proofs.len(); - for proof in &proofs { - size_bytes_proofs += proof.size_in_bytes(); - } + let num_proofs = 1; + size_bytes_proofs += self.pc_proof.serialized_size(); let num_evals = self.evaluations.len(); let evals_size_in_bytes = num_evals * size_of_fe_in_bytes; @@ -270,7 +291,9 @@ impl>> Proof>> Clone for Proof { +impl, S>> + Clone for Proof +{ fn clone(&self) -> Self { Proof { commitments: self.commitments.clone(), diff --git a/src/fiat_shamir/mod.rs b/src/fiat_shamir/mod.rs index 09bd1af..96df382 100644 --- a/src/fiat_shamir/mod.rs +++ b/src/fiat_shamir/mod.rs @@ -9,6 +9,7 @@ use rand_chacha::ChaChaRng; /// The constraints for Fiat-Shamir pub mod constraints; + /// The Poseidon sponge pub mod poseidon; diff --git a/src/fiat_shamir/poseidon/mod.rs b/src/fiat_shamir/poseidon/mod.rs index 33f6d2a..cdfe483 100644 --- a/src/fiat_shamir/poseidon/mod.rs +++ b/src/fiat_shamir/poseidon/mod.rs @@ -8,6 +8,7 @@ use crate::fiat_shamir::AlgebraicSponge; use crate::Vec; use ark_ff::PrimeField; +use ark_sponge::{Absorb, CryptographicSponge}; use ark_std::rand::SeedableRng; /// constraints for Poseidon @@ -241,3 +242,29 @@ impl AlgebraicSponge for PoseidonSponge { squeezed_elems } } + +impl CryptographicSponge for PoseidonSponge +where + F: PrimeField, +{ + type Parameters = (); + + fn new(_params: &Self::Parameters) -> Self { + >::new() + } + + fn absorb(&mut self, input: &impl Absorb) { + >::absorb( + self, + input.to_sponge_field_elements_as_vec().as_slice(), + ) + } + + fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { + todo!() + } + + fn squeeze_bits(&mut self, num_bits: usize) -> Vec { + todo!() + } +} diff --git a/src/lib.rs b/src/lib.rs index c130fcf..e643169 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,21 +7,25 @@ //! is the same as the number of constraints (i.e., where the constraint //! matrices are square). Furthermore, Marlin only supports instances where the //! public inputs are of size one less than a power of 2 (i.e., 2^n - 1). -#![deny(unused_import_braces, unused_qualifications, trivial_casts)] -#![deny(trivial_numeric_casts, private_in_public)] -#![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] -#![deny(unused_attributes, unused_imports, unused_mut)] -#![deny(renamed_and_removed_lints, stable_features, unused_allocation)] -#![deny(unused_comparisons, bare_trait_objects, unused_must_use, const_err)] +//#![deny(unused_import_braces, unused_qualifications, trivial_casts)] +//#![deny(trivial_numeric_casts, private_in_public)] +//#![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] +//#![deny(unused_attributes, unused_imports, unused_mut)] +//#![deny(renamed_and_removed_lints, stable_features, unused_allocation)] +//#![deny(unused_comparisons, bare_trait_objects, unused_must_use, const_err)] #![forbid(unsafe_code)] #![allow(clippy::op_ref)] +use crate::ahp::prover::ProverMsg; use ark_ff::{to_bytes, PrimeField, ToConstraintField}; +use ark_nonnative_field::params::OptimizationType; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; -use ark_poly_commit::Evaluations; -use ark_poly_commit::LabeledPolynomial; -use ark_poly_commit::{LabeledCommitment, PCUniversalParams, PolynomialCommitment}; +use ark_poly_commit::{ + challenge::ChallengeGenerator, Evaluations, LabeledCommitment, LabeledPolynomial, + PCUniversalParams, PolynomialCommitment, +}; use ark_relations::r1cs::{ConstraintSynthesizer, SynthesisError}; +use ark_sponge::CryptographicSponge; use ark_std::rand::RngCore; #[macro_use] @@ -45,7 +49,7 @@ macro_rules! eprintln { /// Implements a Fiat-Shamir based Rng that allows one to incrementally update /// the seed based on new messages in the proof transcript. pub mod fiat_shamir; -use crate::fiat_shamir::FiatShamirRng; +pub use fiat_shamir::*; mod error; pub use error::*; @@ -57,10 +61,7 @@ pub mod constraints; /// Implements an Algebraic Holographic Proof (AHP) for the R1CS indexed relation. pub mod ahp; -use crate::ahp::prover::ProverMsg; -pub use ahp::AHPForR1CS; -use ahp::EvaluationsProvider; -use ark_nonnative_field::params::OptimizationType; +pub use ahp::{AHPForR1CS, EvaluationsProvider}; #[cfg(test)] mod test; @@ -87,22 +88,25 @@ impl MarlinConfig for MarlinRecursiveConfig { pub struct Marlin< F: PrimeField, FSF: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge + AlgebraicSponge, + PC: PolynomialCommitment, S>, FS: FiatShamirRng, MC: MarlinConfig, >( #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, + #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, ); -fn compute_vk_hash(vk: &IndexVerifierKey) -> Vec +fn compute_vk_hash(vk: &IndexVerifierKey) -> Vec where F: PrimeField, FSF: PrimeField, - PC: PolynomialCommitment>, + S: CryptographicSponge + AlgebraicSponge, + PC: PolynomialCommitment, S>, FS: FiatShamirRng, PC::Commitment: ToConstraintField, { @@ -111,9 +115,10 @@ where vk_hash_rng.squeeze_native_field_elements(1) } -impl Marlin +impl Marlin where - PC: PolynomialCommitment>, + S: CryptographicSponge + AlgebraicSponge, + PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, PC::VerifierKey: ToConstraintField, PC::Commitment: ToConstraintField, FS: FiatShamirRng, @@ -129,7 +134,7 @@ where num_variables: usize, num_non_zero: usize, rng: &mut R, - ) -> Result, Error> { + ) -> Result, Error> { let max_degree = AHPForR1CS::::max_degree(num_constraints, num_variables, num_non_zero)?; let setup_time = start_timer!(|| { format!( @@ -149,7 +154,7 @@ where pub fn circuit_specific_setup, R: RngCore>( c: C, rng: &mut R, - ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { + ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { let index_time = start_timer!(|| "Marlin::Index"); let for_recursion = MC::FOR_RECURSION; @@ -233,9 +238,9 @@ where /// keys. This is a deterministic algorithm that anyone can rerun. #[allow(clippy::type_complexity)] pub fn index>( - srs: &UniversalSRS, + srs: &UniversalSRS, c: C, - ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { + ) -> Result<(IndexProverKey, IndexVerifierKey), Error> { let index_time = start_timer!(|| "Marlin::Index"); let for_recursion = MC::FOR_RECURSION; @@ -314,10 +319,10 @@ where /// Create a zkSNARK asserting that the constraint system is satisfied. pub fn prove, R: RngCore>( - index_pk: &IndexProverKey, + index_pk: &IndexProverKey, c: C, zk_rng: &mut R, - ) -> Result, Error> { + ) -> Result, Error> { let prover_time = start_timer!(|| "Marlin::Prover"); // TODO: Add check that c is in the correct mode. @@ -332,7 +337,7 @@ where if for_recursion { fs_rng.absorb_bytes(&to_bytes![&Self::PROTOCOL_NAME].unwrap()); - fs_rng.absorb_native_field_elements(&compute_vk_hash::( + fs_rng.absorb_native_field_elements(&compute_vk_hash::( &index_pk.index_vk, )); fs_rng.absorb_nonnative_field_elements(&public_input, OptimizationType::Weight); @@ -537,57 +542,39 @@ where fs_rng.absorb_bytes(&to_bytes![&evaluations].unwrap()); } - let pc_proof = if for_recursion { - let num_open_challenges: usize = 7; - - let mut opening_challenges = Vec::::new(); - opening_challenges - .append(&mut fs_rng.squeeze_128_bits_nonnative_field_elements(num_open_challenges)); - - let opening_challenges_f = |i| opening_challenges[i as usize]; - - PC::open_combinations_individual_opening_challenges( - &index_pk.committer_key, - &lc_s, - polynomials, - &labeled_comms, - &query_set, - &opening_challenges_f, - &comm_rands, - Some(zk_rng), - ) - .map_err(Error::from_pc_err)? - } else { - let opening_challenge: F = fs_rng.squeeze_128_bits_nonnative_field_elements(1)[0]; - - PC::open_combinations( - &index_pk.committer_key, - &lc_s, - polynomials, - &labeled_comms, - &query_set, - opening_challenge, - &comm_rands, - Some(zk_rng), - ) - .map_err(Error::from_pc_err)? - }; + let sponge = >::new(); + let mut opening_challenges = ChallengeGenerator::::new_multivariate(sponge); + + let pc_proof = PC::open_combinations( + &index_pk.committer_key, + &lc_s, + polynomials, + &labeled_comms, + &query_set, + &mut opening_challenges, + &comm_rands, + Some(zk_rng), + ) + .map_err(Error::from_pc_err)?; // Gather prover messages together. let prover_messages = vec![prover_first_msg, prover_second_msg, prover_third_msg]; let proof = Proof::new(commitments, evaluations, prover_messages, pc_proof); + proof.print_size_info(); + end_timer!(prover_time); + Ok(proof) } /// Verify that a proof for the constrain system defined by `C` asserts that /// all constraints are satisfied. pub fn verify( - index_vk: &IndexVerifierKey, + index_vk: &IndexVerifierKey, public_input: &[F], - proof: &Proof, + proof: &Proof, ) -> Result> { let verifier_time = start_timer!(|| "Marlin::Verify"); @@ -609,7 +596,7 @@ where if for_recursion { fs_rng.absorb_bytes(&to_bytes![&Self::PROTOCOL_NAME].unwrap()); - fs_rng.absorb_native_field_elements(&compute_vk_hash::(index_vk)); + fs_rng.absorb_native_field_elements(&compute_vk_hash::(index_vk)); fs_rng.absorb_nonnative_field_elements(&public_input, OptimizationType::Weight); } else { fs_rng @@ -734,56 +721,37 @@ where for_recursion, )?; - let evaluations_are_correct = if for_recursion { - let num_open_challenges: usize = 7; - - let mut opening_challenges = Vec::::new(); - opening_challenges - .append(&mut fs_rng.squeeze_128_bits_nonnative_field_elements(num_open_challenges)); - - let opening_challenges_f = |i| opening_challenges[i as usize]; - - PC::check_combinations_individual_opening_challenges( - &index_vk.verifier_key, - &lc_s, - &commitments, - &query_set, - &evaluations, - &proof.pc_proof, - &opening_challenges_f, - &mut fs_rng, - ) - .map_err(Error::from_pc_err)? - } else { - let opening_challenge: F = fs_rng.squeeze_128_bits_nonnative_field_elements(1)[0]; - - PC::check_combinations( - &index_vk.verifier_key, - &lc_s, - &commitments, - &query_set, - &evaluations, - &proof.pc_proof, - opening_challenge, - &mut fs_rng, - ) - .map_err(Error::from_pc_err)? - }; + let sponge = >::new(); + let mut opening_challenges = ChallengeGenerator::::new_multivariate(sponge); + + let evaluations_are_correct = PC::check_combinations( + &index_vk.verifier_key, + &lc_s, + &commitments, + &query_set, + &evaluations, + &proof.pc_proof, + &mut opening_challenges, + &mut fs_rng, + ) + .map_err(Error::from_pc_err)?; if !evaluations_are_correct { eprintln!("PC::Check failed"); } + end_timer!(verifier_time, || format!( " PC::Check for AHP Verifier linear equations: {}", evaluations_are_correct )); + Ok(evaluations_are_correct) } pub fn prepared_verify( - prepared_vk: &PreparedIndexVerifierKey, + prepared_vk: &PreparedIndexVerifierKey, public_input: &[F], - proof: &Proof, + proof: &Proof, ) -> Result> { Self::verify(&prepared_vk.orig_vk, public_input, proof) } diff --git a/src/test.rs b/src/test.rs index f8d5d1c..6a2285e 100644 --- a/src/test.rs +++ b/src/test.rs @@ -115,7 +115,10 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; - use crate::{fiat_shamir::FiatShamirChaChaRng, Marlin, MarlinDefaultConfig}; + use crate::{ + fiat_shamir::poseidon::PoseidonSponge, fiat_shamir::FiatShamirChaChaRng, Marlin, + MarlinDefaultConfig, + }; use ark_bls12_381::{Bls12_381, Fq, Fr}; use ark_ff::UniformRand; @@ -124,9 +127,15 @@ mod marlin { use ark_std::ops::MulAssign; use blake2::Blake2s; - type MultiPC = MarlinKZG10>; - type MarlinInst = - Marlin, MarlinDefaultConfig>; + type MultiPC = MarlinKZG10, PoseidonSponge>; + type MarlinInst = Marlin< + Fr, + Fq, + PoseidonSponge, + MultiPC, + FiatShamirChaChaRng, + MarlinDefaultConfig, + >; fn test_circuit(num_constraints: usize, num_variables: usize) { let rng = &mut ark_std::test_rng(); @@ -217,10 +226,11 @@ mod marlin_recursion { use ark_poly_commit::marlin_pc::MarlinKZG10; use core::ops::MulAssign; - type MultiPC = MarlinKZG10>; + type MultiPC = MarlinKZG10, PoseidonSponge>; type MarlinInst = Marlin< Fr, Fq, + PoseidonSponge, MultiPC, FiatShamirAlgebraicSpongeRng>, MarlinRecursiveConfig, From cb859441b832b803fe8a8de7a2f65dfd95002e1c Mon Sep 17 00:00:00 2001 From: Victor Lopez Date: Wed, 23 Mar 2022 18:14:05 +0100 Subject: [PATCH 2/4] Restore lib.rs directives --- src/lib.rs | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index e643169..547cf57 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,12 +7,12 @@ //! is the same as the number of constraints (i.e., where the constraint //! matrices are square). Furthermore, Marlin only supports instances where the //! public inputs are of size one less than a power of 2 (i.e., 2^n - 1). -//#![deny(unused_import_braces, unused_qualifications, trivial_casts)] -//#![deny(trivial_numeric_casts, private_in_public)] -//#![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] -//#![deny(unused_attributes, unused_imports, unused_mut)] -//#![deny(renamed_and_removed_lints, stable_features, unused_allocation)] -//#![deny(unused_comparisons, bare_trait_objects, unused_must_use, const_err)] +#![deny(unused_import_braces, unused_qualifications, trivial_casts)] +#![deny(trivial_numeric_casts, private_in_public)] +#![deny(stable_features, unreachable_pub, non_shorthand_field_patterns)] +#![deny(unused_attributes, unused_imports, unused_mut)] +#![deny(renamed_and_removed_lints, stable_features, unused_allocation)] +#![deny(unused_comparisons, bare_trait_objects, unused_must_use, const_err)] #![forbid(unsafe_code)] #![allow(clippy::op_ref)] From f5b62dee25f493f3f11956e29f18b5e3001aa32b Mon Sep 17 00:00:00 2001 From: Victor Lopez Date: Sat, 2 Apr 2022 05:38:21 +0200 Subject: [PATCH 3/4] Use `PC::BatchVar` --- src/constraints/data_structures.rs | 2 +- src/constraints/snark.rs | 8 ++++---- src/constraints/verifier_test.rs | 19 ++++++++++--------- src/data_structures.rs | 4 ++-- src/fiat_shamir/poseidon/mod.rs | 4 ++-- src/lib.rs | 2 +- 6 files changed, 20 insertions(+), 19 deletions(-) diff --git a/src/constraints/data_structures.rs b/src/constraints/data_structures.rs index 881bf4a..9448429 100644 --- a/src/constraints/data_structures.rs +++ b/src/constraints/data_structures.rs @@ -434,7 +434,7 @@ where F: PrimeField, CF: PrimeField, S: CryptographicSponge, - PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, + PC: PolynomialCommitment, S>, PCG: PCCheckVar, PC, CF, S>, PC::VerifierKey: ToConstraintField, PC::Commitment: ToConstraintField, diff --git a/src/constraints/snark.rs b/src/constraints/snark.rs index 2d8aa68..61bdac5 100644 --- a/src/constraints/snark.rs +++ b/src/constraints/snark.rs @@ -68,7 +68,7 @@ where F: PrimeField, FSF: PrimeField, S: CryptographicSponge + AlgebraicSponge, - PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, + PC: PolynomialCommitment, S>, FS: FiatShamirRng, MC: MarlinConfig, PC::VerifierKey: ToConstraintField, @@ -127,7 +127,7 @@ where F: PrimeField, FSF: PrimeField, S: CryptographicSponge + AlgebraicSponge, - PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, + PC: PolynomialCommitment, S>, FS: FiatShamirRng, MC: MarlinConfig, PC::VerifierKey: ToConstraintField, @@ -199,7 +199,7 @@ where F: PrimeField, FSF: PrimeField, S: CryptographicSponge + AlgebraicSponge, - PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, + PC: PolynomialCommitment, S>, FS: FiatShamirRng, MC: MarlinConfig, PCG: PCCheckVar, PC, FSF, S>, @@ -318,7 +318,7 @@ where F: PrimeField, FSF: PrimeField, S: CryptographicSponge + AlgebraicSponge, - PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, + PC: PolynomialCommitment, S>, FS: FiatShamirRng, MC: MarlinConfig, PCG: PCCheckVar, PC, FSF, S>, diff --git a/src/constraints/verifier_test.rs b/src/constraints/verifier_test.rs index 461f493..9af0646 100644 --- a/src/constraints/verifier_test.rs +++ b/src/constraints/verifier_test.rs @@ -123,7 +123,7 @@ mod tests { cs.set_optimization_goal(OptimizationGoal::Weight); // BEGIN: ivk to ivk_gadget - let ivk_gadget: IndexVerifierKeyVar = + let ivk_gadget: IndexVerifierKeyVar, MultiPC, MultiPCVar> = IndexVerifierKeyVar::new_witness(ns!(cs, "alloc#index vk"), || Ok(index_vk)).unwrap(); // END: ivk to ivk_gadget @@ -188,12 +188,13 @@ mod tests { }) .collect(); - let pc_batch_proof = - BatchLCProofVar::, MNT4PairingVar>::new_witness( - ns!(cs, "alloc#proof"), - || Ok(pc_proof), - ) - .unwrap(); + let pc_batch_proof = BatchLCProofVar::< + MNT298Cycle, + DensePolynomial, + MNT4PairingVar, + PoseidonSponge, + >::new_witness(ns!(cs, "alloc#proof"), || Ok(pc_proof)) + .unwrap(); let mut evaluation_gadgets = HashMap::>::new(); @@ -214,7 +215,7 @@ mod tests { evaluation_gadgets.insert(s.to_string(), (*eval).clone()); } - let proof_gadget: ProofVar = ProofVar { + let proof_gadget: ProofVar, MultiPC, MultiPCVar> = ProofVar { cs: cs.clone(), commitments: commitment_gadgets, evaluations: evaluation_gadgets, @@ -223,7 +224,7 @@ mod tests { }; // END: proof to proof_gadget - Marlin::::verify::< + Marlin::, MultiPC, MultiPCVar>::verify::< FiatShamirAlgebraicSpongeRng>, FiatShamirAlgebraicSpongeRngVar, PoseidonSpongeVar>, >(&ivk_gadget, &public_input_gadget, &proof_gadget) diff --git a/src/data_structures.rs b/src/data_structures.rs index 99b1ca4..1d79109 100644 --- a/src/data_structures.rs +++ b/src/data_structures.rs @@ -203,7 +203,7 @@ pub struct Proof< /// The field elements sent by the prover. pub prover_messages: Vec>, /// An evaluation proof from the polynomial commitment. - pub pc_proof: BatchLCProof>, + pub pc_proof: BatchLCProof, } impl, S>> @@ -214,7 +214,7 @@ impl>, evaluations: Vec, prover_messages: Vec>, - pc_proof: BatchLCProof>, + pc_proof: BatchLCProof, ) -> Self { Self { commitments, diff --git a/src/fiat_shamir/poseidon/mod.rs b/src/fiat_shamir/poseidon/mod.rs index cdfe483..e9d3318 100644 --- a/src/fiat_shamir/poseidon/mod.rs +++ b/src/fiat_shamir/poseidon/mod.rs @@ -260,11 +260,11 @@ where ) } - fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { + fn squeeze_bytes(&mut self, _num_bytes: usize) -> Vec { todo!() } - fn squeeze_bits(&mut self, num_bits: usize) -> Vec { + fn squeeze_bits(&mut self, _num_bits: usize) -> Vec { todo!() } } diff --git a/src/lib.rs b/src/lib.rs index 547cf57..233748a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -118,7 +118,7 @@ where impl Marlin where S: CryptographicSponge + AlgebraicSponge, - PC: PolynomialCommitment, S, BatchProof = DensePolynomial>, + PC: PolynomialCommitment, S>, PC::VerifierKey: ToConstraintField, PC::Commitment: ToConstraintField, FS: FiatShamirRng, From c25470345203ad710cdc5bd497b5f680962e27ec Mon Sep 17 00:00:00 2001 From: Victor Lopez Date: Mon, 25 Apr 2022 18:54:55 +0200 Subject: [PATCH 4/4] Remove `fiat_shamir` mod in favor of `ark-sponge` --- src/ahp/mod.rs | 33 +- src/ahp/prover.rs | 6 +- src/ahp/verifier.rs | 30 +- src/constraints/ahp.rs | 84 ++--- src/constraints/data_structures.rs | 105 +++--- src/constraints/snark.rs | 226 ++++++------ src/constraints/verifier.rs | 47 ++- src/constraints/verifier_test.rs | 4 - src/fiat_shamir/constraints.rs | 460 ------------------------ src/fiat_shamir/mod.rs | 454 ----------------------- src/fiat_shamir/poseidon/constraints.rs | 298 --------------- src/fiat_shamir/poseidon/mod.rs | 270 -------------- src/lib.rs | 159 ++++---- src/test.rs | 16 +- 14 files changed, 344 insertions(+), 1848 deletions(-) delete mode 100644 src/fiat_shamir/constraints.rs delete mode 100644 src/fiat_shamir/mod.rs delete mode 100644 src/fiat_shamir/poseidon/constraints.rs delete mode 100644 src/fiat_shamir/poseidon/mod.rs diff --git a/src/ahp/mod.rs b/src/ahp/mod.rs index aa2879d..26e918d 100644 --- a/src/ahp/mod.rs +++ b/src/ahp/mod.rs @@ -1,9 +1,12 @@ use crate::{String, ToString, Vec}; use ark_ff::{Field, PrimeField}; +use ark_nonnative_field::NonNativeFieldVar; use ark_poly::univariate::DensePolynomial; use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::{LCTerm, LinearCombination}; -use ark_relations::r1cs::SynthesisError; +use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; +use ark_sponge::constraints::CryptographicSpongeVar; +use ark_sponge::CryptographicSponge; use ark_std::{borrow::Borrow, cfg_iter_mut, format, marker::PhantomData, vec}; #[cfg(feature = "parallel")] @@ -20,6 +23,34 @@ pub mod verifier; /// A labeled DensePolynomial with coefficients over `F` pub type LabeledPolynomial = ark_poly_commit::LabeledPolynomial>; +/// The interface for a cryptographic sponge with default parameters +pub trait CryptographicSpongeWithDefault: CryptographicSponge { + /// Default parametes for the cryptographic sponge + /// + /// Replacement for the requirement of S::Parameters: Default to minimize the upwards impact of + /// this implementation + fn default_params() -> Self::Parameters; +} + +/// The interface for a cryptographic sponge constraints on field `F`. +/// A sponge can `absorb` or take in inputs and later `squeeze` or output bytes or field elements. +/// The outputs are dependent on previous `absorb` and `squeeze` calls. +pub trait CryptographicSpongeVarNonNative: + CryptographicSpongeVar +{ + /// Default parametes for the cryptographic sponge var + fn default_params() -> >::Parameters; + + /// Plaintext sponge + fn constant(cs: ConstraintSystemRef) -> Self; + + /// Absorb non native `CF` elements + fn absorb_nonnative( + &mut self, + input: &[NonNativeFieldVar], + ) -> Result<(), SynthesisError>; +} + /// The algebraic holographic proof defined in [CHMMVW19](https://eprint.iacr.org/2019/1047). /// Currently, this AHP only supports inputs of size one /// less than a power of 2 (i.e., of the form 2^n - 1). diff --git a/src/ahp/prover.rs b/src/ahp/prover.rs index 17e1c2e..96f1b35 100644 --- a/src/ahp/prover.rs +++ b/src/ahp/prover.rs @@ -444,10 +444,9 @@ impl AHPForR1CS { } /// Output the second round message and the next state. - pub fn prover_second_round<'a, R: RngCore>( + pub fn prover_second_round<'a>( ver_message: &VerifierFirstMsg, mut state: ProverState<'a, F>, - _r: &mut R, hiding: bool, ) -> (ProverMsg, ProverSecondOracles, ProverState<'a, F>) { let round_time = start_timer!(|| "AHP::Prover::SecondRound"); @@ -598,10 +597,9 @@ impl AHPForR1CS { } /// Output the third round message and the next state. - pub fn prover_third_round<'a, R: RngCore>( + pub fn prover_third_round<'a>( ver_message: &VerifierSecondMsg, prover_state: ProverState<'a, F>, - _r: &mut R, ) -> Result<(ProverMsg, ProverThirdOracles), Error> { let round_time = start_timer!(|| "AHP::Prover::ThirdRound"); diff --git a/src/ahp/verifier.rs b/src/ahp/verifier.rs index b1dfce5..310bd62 100644 --- a/src/ahp/verifier.rs +++ b/src/ahp/verifier.rs @@ -3,11 +3,9 @@ use crate::ahp::indexer::IndexInfo; use crate::ahp::*; -use crate::fiat_shamir::FiatShamirRng; use ark_ff::PrimeField; -use ark_nonnative_field::params::OptimizationType; -use ark_poly::{EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::QuerySet; +use ark_sponge::CryptographicSponge; /// State of the AHP verifier pub struct VerifierState { @@ -42,21 +40,22 @@ pub struct VerifierSecondMsg { impl AHPForR1CS { /// Output the first message and next round state. - pub fn verifier_first_round>( + pub fn verifier_first_round( index_info: IndexInfo, - fs_rng: &mut R, + sponge: &mut S, ) -> Result<(VerifierFirstMsg, VerifierState), Error> { if index_info.num_constraints != index_info.num_variables { return Err(Error::NonSquareMatrix); } - let domain_h = GeneralEvaluationDomain::new(index_info.num_constraints) - .ok_or(SynthesisError::PolynomialDegreeTooLarge)?; + let domain_h: GeneralEvaluationDomain = + GeneralEvaluationDomain::new(index_info.num_constraints) + .ok_or(SynthesisError::PolynomialDegreeTooLarge)?; let domain_k = GeneralEvaluationDomain::new(index_info.num_non_zero) .ok_or(SynthesisError::PolynomialDegreeTooLarge)?; - let elems = fs_rng.squeeze_nonnative_field_elements(4, OptimizationType::Weight); + let elems = sponge.squeeze_field_elements(4); let alpha = elems[0]; let eta_a = elems[1]; let eta_b = elems[2]; @@ -82,11 +81,11 @@ impl AHPForR1CS { } /// Output the second message and next round state. - pub fn verifier_second_round>( + pub fn verifier_second_round( mut state: VerifierState, - fs_rng: &mut R, + sponge: &mut S, ) -> (VerifierSecondMsg, VerifierState) { - let elems = fs_rng.squeeze_nonnative_field_elements(1, OptimizationType::Weight); + let elems = sponge.squeeze_field_elements(1); let beta = elems[0]; assert!(!state.domain_h.evaluate_vanishing_polynomial(beta).is_zero()); @@ -97,11 +96,11 @@ impl AHPForR1CS { } /// Output the third message and next round state. - pub fn verifier_third_round>( + pub fn verifier_third_round( mut state: VerifierState, - fs_rng: &mut R, + sponge: &mut S, ) -> VerifierState { - let elems = fs_rng.squeeze_nonnative_field_elements(1, OptimizationType::Weight); + let elems = sponge.squeeze_field_elements(1); let gamma = elems[0]; state.gamma = Some(gamma); @@ -109,9 +108,8 @@ impl AHPForR1CS { } /// Output the query state and next round state. - pub fn verifier_query_set<'a, FSF: PrimeField, R: FiatShamirRng>( + pub fn verifier_query_set<'a, FSF: PrimeField>( state: VerifierState, - _: &'a mut R, with_vanishing: bool, ) -> (QuerySet, VerifierState) { let alpha = state.first_round_msg.unwrap().alpha; diff --git a/src/constraints/ahp.rs b/src/constraints/ahp.rs index f22c698..fbd079f 100644 --- a/src/constraints/ahp.rs +++ b/src/constraints/ahp.rs @@ -1,14 +1,12 @@ use crate::{ - ahp::Error, + ahp::{CryptographicSpongeVarNonNative, Error}, constraints::{ data_structures::{PreparedIndexVerifierKeyVar, ProofVar}, lagrange_interpolation::LagrangeInterpolationVar, polynomial::AlgebraForAHP, }, - fiat_shamir::{constraints::FiatShamirRngVar, FiatShamirRng}, PhantomData, PrimeField, String, ToString, Vec, }; -use ark_nonnative_field::params::OptimizationType; use ark_nonnative_field::NonNativeFieldVar; use ark_poly::univariate::DensePolynomial; use ark_poly_commit::{ @@ -23,7 +21,7 @@ use ark_r1cs_std::{ ToBitsGadget, ToConstraintFieldGadget, }; use ark_relations::r1cs::ConstraintSystemRef; -use ark_sponge::CryptographicSponge; +use ark_sponge::{constraints::CryptographicSpongeVar, CryptographicSponge}; use hashbrown::{HashMap, HashSet}; #[derive(Clone)] @@ -59,6 +57,7 @@ pub struct AHPForR1CS< F: PrimeField, CF: PrimeField, S: CryptographicSponge, + SV: CryptographicSpongeVar, PC: PolynomialCommitment, S>, PCG: PCCheckVar, PC, CF, S>, > where @@ -68,6 +67,7 @@ pub struct AHPForR1CS< field: PhantomData, constraint_field: PhantomData, sponge: PhantomData, + sponge_var: PhantomData, polynomial_commitment: PhantomData, pc_check: PhantomData, } @@ -76,24 +76,21 @@ impl< F: PrimeField, CF: PrimeField, S: CryptographicSponge, + SVN: CryptographicSpongeVarNonNative, PC: PolynomialCommitment, S>, PCG: PCCheckVar, PC, CF, S>, - > AHPForR1CS + > AHPForR1CS where PCG::VerifierKeyVar: ToConstraintFieldGadget, PCG::CommitmentVar: ToConstraintFieldGadget, { /// Output the first message and next round state. - #[tracing::instrument(target = "r1cs", skip(fs_rng, comms))] + #[tracing::instrument(target = "r1cs", skip(sponge_var, comms))] #[allow(clippy::type_complexity)] - pub fn verifier_first_round< - CommitmentVar: ToConstraintFieldGadget, - PR: FiatShamirRng, - R: FiatShamirRngVar, - >( + pub fn verifier_first_round>( domain_h_size: u64, domain_k_size: u64, - fs_rng: &mut R, + sponge_var: &mut SVN, comms: &[CommitmentVar], message: &[NonNativeFieldVar], ) -> Result<(VerifierFirstMsgVar, VerifierStateVar), Error> { @@ -103,16 +100,16 @@ where comms.iter().for_each(|comm| { elems.append(&mut comm.to_constraint_field().unwrap()); }); - fs_rng.absorb_native_field_elements(&elems)?; - fs_rng.absorb_nonnative_field_elements(&message, OptimizationType::Weight)?; + sponge_var.absorb(&elems)?; + sponge_var.absorb_nonnative(&message)?; } - // obtain four elements from the sponge - let elems = fs_rng.squeeze_field_elements(4)?; - let alpha = elems[0].clone(); - let eta_a = elems[1].clone(); - let eta_b = elems[2].clone(); - let eta_c = elems[3].clone(); + // obtain four elements from the sponge_var + let elems = sponge_var.squeeze_nonnative_field_elements(4)?; + let alpha = elems.0[0].clone(); + let eta_a = elems.0[1].clone(); + let eta_b = elems.0[2].clone(); + let eta_c = elems.0[3].clone(); let msg = VerifierFirstMsgVar { alpha, @@ -132,15 +129,11 @@ where Ok((msg, new_state)) } - #[tracing::instrument(target = "r1cs", skip(state, fs_rng, comms))] + #[tracing::instrument(target = "r1cs", skip(state, sponge_var, comms))] #[allow(clippy::type_complexity)] - pub fn verifier_second_round< - CommitmentVar: ToConstraintFieldGadget, - PR: FiatShamirRng, - R: FiatShamirRngVar, - >( + pub fn verifier_second_round>( state: VerifierStateVar, - fs_rng: &mut R, + sponge_var: &mut SVN, comms: &[CommitmentVar], message: &[NonNativeFieldVar], ) -> Result<(VerifierSecondMsgVar, VerifierStateVar), Error> { @@ -157,13 +150,13 @@ where comms.iter().for_each(|comm| { elems.append(&mut comm.to_constraint_field().unwrap()); }); - fs_rng.absorb_native_field_elements(&elems)?; - fs_rng.absorb_nonnative_field_elements(&message, OptimizationType::Weight)?; + sponge_var.absorb(&elems)?; + sponge_var.absorb_nonnative(&message)?; } - // obtain one element from the sponge - let elems = fs_rng.squeeze_field_elements(1)?; - let beta = elems[0].clone(); + // obtain one element from the sponge_var + let elems = sponge_var.squeeze_nonnative_field_elements(1)?; + let beta = elems.0[0].clone(); let msg = VerifierSecondMsgVar { beta }; @@ -178,14 +171,10 @@ where Ok((msg, new_state)) } - #[tracing::instrument(target = "r1cs", skip(state, fs_rng, comms))] - pub fn verifier_third_round< - CommitmentVar: ToConstraintFieldGadget, - PR: FiatShamirRng, - R: FiatShamirRngVar, - >( + #[tracing::instrument(target = "r1cs", skip(state, sponge_var, comms))] + pub fn verifier_third_round>( state: VerifierStateVar, - fs_rng: &mut R, + sponge_var: &mut SVN, comms: &[CommitmentVar], message: &[NonNativeFieldVar], ) -> Result, Error> { @@ -203,13 +192,13 @@ where comms.iter().for_each(|comm| { elems.append(&mut comm.to_constraint_field().unwrap()); }); - fs_rng.absorb_native_field_elements(&elems)?; - fs_rng.absorb_nonnative_field_elements(&message, OptimizationType::Weight)?; + sponge_var.absorb(&elems)?; + sponge_var.absorb_nonnative(&message)?; } - // obtain one element from the sponge - let elems = fs_rng.squeeze_field_elements(1)?; - let gamma = elems[0].clone(); + // obtain one element from the sponge_var + let elems = sponge_var.squeeze_nonnative_field_elements(1)?; + let gamma = elems.0[0].clone(); let new_state = VerifierStateVar { domain_h_size, @@ -529,11 +518,8 @@ where #[tracing::instrument(target = "r1cs", skip(index_pvk, proof, state))] #[allow(clippy::type_complexity)] - pub fn verifier_comm_query_eval_set< - PR: FiatShamirRng, - R: FiatShamirRngVar, - >( - index_pvk: &PreparedIndexVerifierKeyVar, + pub fn verifier_comm_query_eval_set( + index_pvk: &PreparedIndexVerifierKeyVar, proof: &ProofVar, state: &VerifierStateVar, ) -> Result< diff --git a/src/constraints/data_structures.rs b/src/constraints/data_structures.rs index 9448429..2dc6728 100644 --- a/src/constraints/data_structures.rs +++ b/src/constraints/data_structures.rs @@ -1,9 +1,9 @@ use crate::ahp::prover::ProverMsg; +use crate::ahp::{CryptographicSpongeVarNonNative, CryptographicSpongeWithDefault}; use crate::{ constraints::verifier::Marlin as MarlinVerifierVar, data_structures::{IndexVerifierKey, PreparedIndexVerifierKey, Proof}, - fiat_shamir::{constraints::FiatShamirRngVar, FiatShamirRng}, - PhantomData, PrimeField, String, SynthesisError, ToString, Vec, + PrimeField, String, SynthesisError, ToString, Vec, }; use ark_ff::{to_bytes, ToConstraintField}; use ark_nonnative_field::NonNativeFieldVar; @@ -17,7 +17,7 @@ use ark_r1cs_std::{ R1CSVar, ToBytesGadget, ToConstraintFieldGadget, }; use ark_relations::r1cs::{ConstraintSystemRef, Namespace}; -use ark_sponge::CryptographicSponge; +use ark_sponge::{Absorb, CryptographicSponge}; use ark_std::borrow::Borrow; use hashbrown::HashMap; @@ -146,7 +146,7 @@ impl< impl< F: PrimeField, CF: PrimeField, - S: CryptographicSponge, + S: CryptographicSpongeWithDefault, PC: PolynomialCommitment, S>, PCG: PCCheckVar, PC, CF, S>, > Clone for IndexVerifierKeyVar @@ -181,10 +181,9 @@ pub struct PreparedIndexVerifierKeyVar< F: PrimeField, CF: PrimeField, S: CryptographicSponge, + SVN: CryptographicSpongeVarNonNative, PC: PolynomialCommitment, S>, PCG: PCCheckVar, PC, CF, S>, - PR: FiatShamirRng, - R: FiatShamirRngVar, > { pub cs: ConstraintSystemRef, pub domain_h_size: u64, @@ -193,20 +192,17 @@ pub struct PreparedIndexVerifierKeyVar< pub domain_k_size_gadget: FpVar, pub prepared_index_comms: Vec, pub prepared_verifier_key: PCG::PreparedVerifierKeyVar, - pub fs_rng: R, - - pr: PhantomData, + pub sponge_var: SVN, } impl< F: PrimeField, CF: PrimeField, S: CryptographicSponge, + SVN: CryptographicSpongeVarNonNative, PC: PolynomialCommitment, S>, PCG: PCCheckVar, PC, CF, S>, - PR: FiatShamirRng, - R: FiatShamirRngVar, - > Clone for PreparedIndexVerifierKeyVar + > Clone for PreparedIndexVerifierKeyVar { fn clone(&self) -> Self { PreparedIndexVerifierKeyVar { @@ -217,35 +213,29 @@ impl< domain_k_size_gadget: self.domain_k_size_gadget.clone(), prepared_index_comms: self.prepared_index_comms.clone(), prepared_verifier_key: self.prepared_verifier_key.clone(), - fs_rng: self.fs_rng.clone(), - pr: PhantomData, + sponge_var: self.sponge_var.clone(), } } } -impl PreparedIndexVerifierKeyVar +impl PreparedIndexVerifierKeyVar where F: PrimeField, - CF: PrimeField, - S: CryptographicSponge, + CF: PrimeField + Absorb, + S: CryptographicSpongeWithDefault, + SVN: CryptographicSpongeVarNonNative, PC: PolynomialCommitment, S>, PCG: PCCheckVar, PC, CF, S>, - PR: FiatShamirRng, - R: FiatShamirRngVar, PCG::VerifierKeyVar: ToConstraintFieldGadget, PCG::CommitmentVar: ToConstraintFieldGadget, { #[tracing::instrument(target = "r1cs", skip(vk))] pub fn prepare(vk: &IndexVerifierKeyVar) -> Result { let cs = vk.cs(); - - let mut fs_rng_raw = PR::new(); - fs_rng_raw.absorb_bytes( - &to_bytes![&MarlinVerifierVar::::PROTOCOL_NAME].unwrap(), - ); + let params = S::default_params(); let index_vk_hash = { - let mut vk_hash_rng = PR::new(); + let mut vk_hash = S::new(¶ms); let mut vk_elems = Vec::::new(); vk.index_comms.iter().for_each(|index_comm| { @@ -258,18 +248,25 @@ where .collect(), ); }); - vk_hash_rng.absorb_native_field_elements(&vk_elems); + + vk_hash.absorb(&vk_elems); + FpVar::::new_witness(ark_relations::ns!(cs, "alloc#vk_hash"), || { - Ok(vk_hash_rng.squeeze_native_field_elements(1)[0]) + Ok(vk_hash.squeeze_field_elements::(1)[0]) }) .unwrap() }; - let fs_rng = { - let mut fs_rng = R::constant(cs, &fs_rng_raw); - fs_rng.absorb_native_field_elements(&[index_vk_hash])?; - fs_rng - }; + let params = S::default_params(); + let mut sponge = S::new(¶ms); + + sponge.absorb(&to_bytes![&MarlinVerifierVar::::PROTOCOL_NAME].unwrap()); + + // FIXME Original call was `R::constant` + let params_var = SVN::default_params(); + let mut sponge_var = SVN::new(cs, ¶ms_var); + + sponge_var.absorb(&index_vk_hash)?; let mut prepared_index_comms = Vec::::new(); for comm in vk.index_comms.iter() { @@ -286,22 +283,20 @@ where domain_k_size_gadget: vk.domain_k_size_gadget.clone(), prepared_index_comms, prepared_verifier_key, - fs_rng, - pr: PhantomData, + sponge_var, }) } } -impl AllocVar, CF> - for PreparedIndexVerifierKeyVar +impl AllocVar, CF> + for PreparedIndexVerifierKeyVar where F: PrimeField, - CF: PrimeField, - S: CryptographicSponge, + CF: PrimeField + Absorb, + S: CryptographicSpongeWithDefault, + SVN: CryptographicSpongeVarNonNative, PC: PolynomialCommitment, S>, PCG: PCCheckVar, PC, CF, S>, - PR: FiatShamirRng, - R: FiatShamirRngVar, PC::VerifierKey: ToConstraintField, PC::Commitment: ToConstraintField, PCG::VerifierKeyVar: ToConstraintFieldGadget, @@ -343,33 +338,36 @@ where }); let index_vk_hash = { - let mut vk_hash_rng = PR::new(); + let params = S::default_params(); + let mut vk_hash_rng = S::new(¶ms); + + vk_hash_rng.absorb(&vk_elems); - vk_hash_rng.absorb_native_field_elements(&vk_elems); FpVar::::new_variable( ark_relations::ns!(cs, "alloc#vk_hash"), - || Ok(vk_hash_rng.squeeze_native_field_elements(1)[0]), + || Ok(vk_hash_rng.squeeze_field_elements::(1)[0]), mode, ) .unwrap() }; - let mut fs_rng_raw = PR::new(); - fs_rng_raw.absorb_bytes( - &to_bytes![&MarlinVerifierVar::::PROTOCOL_NAME].unwrap(), - ); + let params = S::default_params(); + let mut sponge = S::new(¶ms); - let fs_rng = { - let mut fs_rng = R::constant(cs.clone(), &fs_rng_raw); - fs_rng.absorb_native_field_elements(&[index_vk_hash])?; - fs_rng - }; + sponge.absorb(&to_bytes![&MarlinVerifierVar::::PROTOCOL_NAME].unwrap()); + + // FIXME Original call was `R::constant` + let params_var = SVN::default_params(); + let mut sponge_var = SVN::new(cs.clone(), ¶ms_var); + + sponge_var.absorb(&index_vk_hash)?; let domain_h_size_gadget = FpVar::::new_variable( ark_relations::ns!(cs, "domain_h_size"), || Ok(CF::from(obj.domain_h_size as u128)), mode, )?; + let domain_k_size_gadget = FpVar::::new_variable( ark_relations::ns!(cs, "domain_k_size"), || Ok(CF::from(obj.domain_k_size as u128)), @@ -384,8 +382,7 @@ where domain_k_size_gadget, prepared_index_comms, prepared_verifier_key, - fs_rng, - pr: PhantomData, + sponge_var, }) } } diff --git a/src/constraints/snark.rs b/src/constraints/snark.rs index 61bdac5..67724ee 100644 --- a/src/constraints/snark.rs +++ b/src/constraints/snark.rs @@ -1,9 +1,12 @@ -use crate::constraints::{ - data_structures::{IndexVerifierKeyVar, PreparedIndexVerifierKeyVar, ProofVar}, - verifier::Marlin as MarlinVerifierGadget, -}; -use crate::fiat_shamir::{constraints::FiatShamirRngVar, AlgebraicSponge, FiatShamirRng}; +use crate::ahp::CryptographicSpongeVarNonNative; use crate::Error::IndexTooLarge; +use crate::{ + ahp::CryptographicSpongeWithDefault, + constraints::{ + data_structures::{IndexVerifierKeyVar, PreparedIndexVerifierKeyVar, ProofVar}, + verifier::Marlin as MarlinVerifierGadget, + }, +}; use crate::{ Box, IndexProverKey, IndexVerifierKey, Marlin, MarlinConfig, PreparedIndexVerifierKey, Proof, String, ToString, UniversalSRS, Vec, @@ -14,14 +17,15 @@ use ark_crypto_primitives::snark::{ }; use ark_ff::{PrimeField, ToConstraintField}; use ark_poly::univariate::DensePolynomial; -use ark_poly_commit::{PCCheckVar, PolynomialCommitment}; +use ark_poly_commit::optional_rng::OptionalRng; +use ark_poly_commit::{LabeledCommitment, PCCheckVar, PolynomialCommitment}; use ark_r1cs_std::{bits::boolean::Boolean, ToConstraintFieldGadget}; use ark_relations::lc; use ark_relations::r1cs::{ ConstraintSynthesizer, ConstraintSystemRef, LinearCombination, SynthesisError, Variable, }; use ark_snark::UniversalSetupSNARK; -use ark_sponge::CryptographicSponge; +use ark_sponge::{Absorb, CryptographicSponge}; use ark_std::cmp::min; use ark_std::fmt::{Debug, Formatter}; use ark_std::marker::PhantomData; @@ -29,6 +33,7 @@ use ark_std::{ rand::{CryptoRng, RngCore}, test_rng, }; +use rand_chacha::ChaChaRng; #[derive(Clone, PartialEq, PartialOrd)] pub struct MarlinBound { @@ -50,29 +55,81 @@ impl Debug for MarlinBound { pub struct MarlinSNARK< F: PrimeField, FSF: PrimeField, - S: CryptographicSponge + AlgebraicSponge, + S: CryptographicSponge, PC: PolynomialCommitment, S>, - FS: FiatShamirRng, MC: MarlinConfig, > { f_phantom: PhantomData, fsf_phantom: PhantomData, s_phantom: PhantomData, pc_phantom: PhantomData, - fs_phantom: PhantomData, mc_phantom: PhantomData, } -impl SNARK for MarlinSNARK +pub struct MarlinError { + pub error_msg: String, +} + +impl From> for MarlinError where - F: PrimeField, - FSF: PrimeField, - S: CryptographicSponge + AlgebraicSponge, + E: ark_std::error::Error, +{ + fn from(e: crate::Error) -> Self { + match e { + IndexTooLarge(v) => Self { + error_msg: format!("index too large, needed degree {}", v), + }, + crate::Error::::AHPError(err) => match err { + crate::ahp::Error::MissingEval(str) => Self { + error_msg: String::from("missing eval: ") + &*str, + }, + crate::ahp::Error::InvalidPublicInputLength => Self { + error_msg: String::from("invalid public input length"), + }, + crate::ahp::Error::InstanceDoesNotMatchIndex => Self { + error_msg: String::from("instance does not match index"), + }, + crate::ahp::Error::NonSquareMatrix => Self { + error_msg: String::from("non-sqaure matrix"), + }, + crate::ahp::Error::ConstraintSystemError(error) => Self { + error_msg: error.to_string(), + }, + }, + crate::Error::::R1CSError(err) => Self { + error_msg: err.to_string(), + }, + crate::Error::::PolynomialCommitmentError(err) => Self { + error_msg: err.to_string(), + }, + } + } +} + +impl Debug for MarlinError { + fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.error_msg) + } +} + +impl core::fmt::Display for MarlinError { + fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.error_msg) + } +} + +impl ark_std::error::Error for MarlinError {} + +impl SNARK for MarlinSNARK +where + F: PrimeField + Absorb, + FSF: PrimeField + Absorb, + S: CryptographicSpongeWithDefault, PC: PolynomialCommitment, S>, - FS: FiatShamirRng, MC: MarlinConfig, PC::VerifierKey: ToConstraintField, - PC::Commitment: ToConstraintField, + PC::Commitment: ToConstraintField + Absorb, + LabeledCommitment<, S>>::Commitment>: Absorb, { type ProvingKey = IndexProverKey; type VerifyingKey = IndexVerifierKey; @@ -84,7 +141,7 @@ where circuit: C, rng: &mut R, ) -> Result<(Self::ProvingKey, Self::VerifyingKey), Self::Error> { - Ok(Marlin::::circuit_specific_setup(circuit, rng).unwrap()) + Ok(Marlin::::circuit_specific_setup(circuit, rng).unwrap()) } fn prove, R: RngCore>( @@ -92,14 +149,14 @@ where circuit: C, rng: &mut R, ) -> Result { - match Marlin::::prove(&pk, circuit, rng) { + match Marlin::::prove(&pk, circuit, rng) { Ok(res) => Ok(res), Err(e) => Err(Box::new(MarlinError::from(e))), } } fn verify(vk: &Self::VerifyingKey, x: &[F], proof: &Self::Proof) -> Result { - match Marlin::::verify(vk, x, proof) { + match Marlin::::verify::>(vk, x, proof, None) { Ok(res) => Ok(res), Err(e) => Err(Box::new(MarlinError::from(e))), } @@ -115,23 +172,25 @@ where x: &[F], proof: &Self::Proof, ) -> Result { - match Marlin::::prepared_verify(pvk, x, proof) { + match Marlin::::prepared_verify::>( + pvk, x, proof, None, + ) { Ok(res) => Ok(res), Err(e) => Err(Box::new(MarlinError::from(e))), } } } -impl UniversalSetupSNARK for MarlinSNARK +impl UniversalSetupSNARK for MarlinSNARK where - F: PrimeField, - FSF: PrimeField, - S: CryptographicSponge + AlgebraicSponge, + F: PrimeField + Absorb, + FSF: PrimeField + Absorb, + S: CryptographicSpongeWithDefault, PC: PolynomialCommitment, S>, - FS: FiatShamirRng, MC: MarlinConfig, PC::VerifierKey: ToConstraintField, - PC::Commitment: ToConstraintField, + PC::Commitment: ToConstraintField + Absorb, + LabeledCommitment<, S>>::Commitment>: Absorb, { type ComputationBound = MarlinBound; type PublicParameters = (MarlinBound, UniversalSRS); @@ -142,7 +201,7 @@ where ) -> Result { let Self::ComputationBound { max_degree } = bound; - match Marlin::::universal_setup(1, 1, (max_degree + 5) / 3, rng) { + match Marlin::::universal_setup(1, 1, (max_degree + 5) / 3, rng) { Ok(res) => Ok((bound.clone(), res)), Err(e) => Err(Box::new(MarlinError::from(e))), } @@ -157,7 +216,7 @@ where (Self::ProvingKey, Self::VerifyingKey), UniversalSetupIndexError, > { - let index_res = Marlin::::index(&crs.1, circuit); + let index_res = Marlin::::index(&crs.1, circuit); match index_res { Ok(res) => Ok(res), Err(err) => match err { @@ -172,44 +231,42 @@ where } } -pub struct MarlinSNARKGadget +pub struct MarlinSNARKGadget where F: PrimeField, FSF: PrimeField, - S: CryptographicSponge, + S: CryptographicSpongeWithDefault, + SVN: CryptographicSpongeVarNonNative, PC: PolynomialCommitment, S>, - FS: FiatShamirRng, MC: MarlinConfig, PCG: PCCheckVar, PC, FSF, S>, - FSG: FiatShamirRngVar, { pub f_phantom: PhantomData, pub fsf_phantom: PhantomData, pub s_phantom: PhantomData, + pub svn_phantom: PhantomData, pub pc_phantom: PhantomData, - pub fs_phantom: PhantomData, pub mc_phantom: PhantomData, pub pcg_phantom: PhantomData, - pub fsg_phantom: PhantomData, } -impl SNARKGadget> - for MarlinSNARKGadget +impl SNARKGadget> + for MarlinSNARKGadget where - F: PrimeField, - FSF: PrimeField, - S: CryptographicSponge + AlgebraicSponge, + F: PrimeField + Absorb, + FSF: PrimeField + Absorb, + S: CryptographicSpongeWithDefault, + SVN: CryptographicSpongeVarNonNative, PC: PolynomialCommitment, S>, - FS: FiatShamirRng, MC: MarlinConfig, PCG: PCCheckVar, PC, FSF, S>, - FSG: FiatShamirRngVar, PC::VerifierKey: ToConstraintField, - PC::Commitment: ToConstraintField, + PC::Commitment: ToConstraintField + Absorb, PCG::VerifierKeyVar: ToConstraintFieldGadget, PCG::CommitmentVar: ToConstraintFieldGadget, + LabeledCommitment<, S>>::Commitment>: Absorb, { - type ProcessedVerifyingKeyVar = PreparedIndexVerifierKeyVar; + type ProcessedVerifyingKeyVar = PreparedIndexVerifierKeyVar; type VerifyingKeyVar = IndexVerifierKeyVar; type InputVar = NonNativeFieldInputVar; type ProofVar = ProofVar; @@ -217,7 +274,7 @@ where type VerifierSize = usize; fn verifier_size( - circuit_vk: & as SNARK>::VerifyingKey, + circuit_vk: & as SNARK>::VerifyingKey, ) -> Self::VerifierSize { circuit_vk.index_info.num_instance_variables } @@ -245,10 +302,8 @@ where proof: &Self::ProofVar, ) -> Result, SynthesisError> { Ok( - MarlinVerifierGadget::::verify::( - circuit_vk, &x.val, proof, - ) - .unwrap(), + MarlinVerifierGadget::::verify::(circuit_vk, &x.val, proof) + .unwrap(), ) } } @@ -311,80 +366,25 @@ impl ConstraintSynthesizer for MarlinBoundCircuit { } } -impl - UniversalSetupSNARKGadget> - for MarlinSNARKGadget +impl UniversalSetupSNARKGadget> + for MarlinSNARKGadget where - F: PrimeField, - FSF: PrimeField, - S: CryptographicSponge + AlgebraicSponge, + F: PrimeField + Absorb, + FSF: PrimeField + Absorb, + S: CryptographicSpongeWithDefault, + SVN: CryptographicSpongeVarNonNative, PC: PolynomialCommitment, S>, - FS: FiatShamirRng, MC: MarlinConfig, PCG: PCCheckVar, PC, FSF, S>, - FSG: FiatShamirRngVar, PC::VerifierKey: ToConstraintField, - PC::Commitment: ToConstraintField, + PC::Commitment: ToConstraintField + Absorb, PCG::VerifierKeyVar: ToConstraintFieldGadget, PCG::CommitmentVar: ToConstraintFieldGadget, + LabeledCommitment<, S>>::Commitment>: Absorb, { type BoundCircuit = MarlinBoundCircuit; } -pub struct MarlinError { - pub error_msg: String, -} - -impl From> for MarlinError -where - E: ark_std::error::Error, -{ - fn from(e: crate::Error) -> Self { - match e { - IndexTooLarge(v) => Self { - error_msg: format!("index too large, needed degree {}", v), - }, - crate::Error::::AHPError(err) => match err { - crate::ahp::Error::MissingEval(str) => Self { - error_msg: String::from("missing eval: ") + &*str, - }, - crate::ahp::Error::InvalidPublicInputLength => Self { - error_msg: String::from("invalid public input length"), - }, - crate::ahp::Error::InstanceDoesNotMatchIndex => Self { - error_msg: String::from("instance does not match index"), - }, - crate::ahp::Error::NonSquareMatrix => Self { - error_msg: String::from("non-sqaure matrix"), - }, - crate::ahp::Error::ConstraintSystemError(error) => Self { - error_msg: error.to_string(), - }, - }, - crate::Error::::R1CSError(err) => Self { - error_msg: err.to_string(), - }, - crate::Error::::PolynomialCommitmentError(err) => Self { - error_msg: err.to_string(), - }, - } - } -} - -impl Debug for MarlinError { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", self.error_msg) - } -} - -impl core::fmt::Display for MarlinError { - fn fmt(&self, f: &mut Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", self.error_msg) - } -} - -impl ark_std::error::Error for MarlinError {} - #[cfg(test)] mod test { use crate::MarlinConfig; @@ -406,10 +406,6 @@ mod test { } use crate::constraints::snark::{MarlinSNARK, MarlinSNARKGadget}; - use crate::fiat_shamir::constraints::FiatShamirAlgebraicSpongeRngVar; - use crate::fiat_shamir::poseidon::constraints::PoseidonSpongeVar; - use crate::fiat_shamir::poseidon::PoseidonSponge; - use crate::fiat_shamir::FiatShamirAlgebraicSpongeRng; use ark_crypto_primitives::snark::{SNARKGadget, SNARK}; use ark_ec::{CurveCycle, PairingEngine, PairingFriendlyCycle}; use ark_ff::{Field, UniformRand}; diff --git a/src/constraints/verifier.rs b/src/constraints/verifier.rs index 80d8c47..282bc37 100644 --- a/src/constraints/verifier.rs +++ b/src/constraints/verifier.rs @@ -1,16 +1,15 @@ use crate::{ + ahp::{CryptographicSpongeVarNonNative, CryptographicSpongeWithDefault}, constraints::ahp::AHPForR1CS, constraints::data_structures::{IndexVerifierKeyVar, PreparedIndexVerifierKeyVar, ProofVar}, - fiat_shamir::{constraints::FiatShamirRngVar, FiatShamirRng}, Error, PhantomData, PrimeField, String, Vec, }; -use ark_nonnative_field::params::OptimizationType; use ark_nonnative_field::NonNativeFieldVar; use ark_poly::univariate::DensePolynomial; use ark_poly_commit::{PCCheckRandomDataVar, PCCheckVar, PolynomialCommitment}; use ark_r1cs_std::{bits::boolean::Boolean, fields::FieldVar, R1CSVar, ToConstraintFieldGadget}; use ark_relations::ns; -use ark_sponge::CryptographicSponge; +use ark_sponge::{Absorb, CryptographicSponge}; pub struct Marlin< F: PrimeField, @@ -29,8 +28,8 @@ pub struct Marlin< impl Marlin where F: PrimeField, - CF: PrimeField, - S: CryptographicSponge, + CF: PrimeField + Absorb, + S: CryptographicSpongeWithDefault, PC: PolynomialCommitment, S>, PCG: PCCheckVar, PC, CF, S>, PCG::VerifierKeyVar: ToConstraintFieldGadget, @@ -40,8 +39,8 @@ where /// verify with an established hashchain initial state #[tracing::instrument(target = "r1cs", skip(index_pvk, proof))] - pub fn prepared_verify, R: FiatShamirRngVar>( - index_pvk: &PreparedIndexVerifierKeyVar, + pub fn prepared_verify>( + index_pvk: &PreparedIndexVerifierKeyVar, public_input: &[NonNativeFieldVar], proof: &ProofVar, ) -> Result, Error> { @@ -51,30 +50,30 @@ where .or(public_input.cs()) .or(proof.cs.clone()); - let mut fs_rng = index_pvk.fs_rng.clone(); + let mut sponge_var = index_pvk.sponge_var.clone(); eprintln!("before AHP: constraints: {}", cs.num_constraints()); - fs_rng.absorb_nonnative_field_elements(&public_input, OptimizationType::Weight)?; + sponge_var.absorb_nonnative(&public_input)?; - let (_, verifier_state) = AHPForR1CS::::verifier_first_round( + let (_, verifier_state) = AHPForR1CS::::verifier_first_round( index_pvk.domain_h_size, index_pvk.domain_k_size, - &mut fs_rng, + &mut sponge_var, &proof.commitments[0], &proof.prover_messages[0].field_elements, )?; - let (_, verifier_state) = AHPForR1CS::::verifier_second_round( + let (_, verifier_state) = AHPForR1CS::::verifier_second_round( verifier_state, - &mut fs_rng, + &mut sponge_var, &proof.commitments[1], &proof.prover_messages[1].field_elements, )?; - let verifier_state = AHPForR1CS::::verifier_third_round( + let verifier_state = AHPForR1CS::::verifier_third_round( verifier_state, - &mut fs_rng, + &mut sponge_var, &proof.commitments[2], &proof.prover_messages[2].field_elements, )?; @@ -84,7 +83,7 @@ where formatted_public_input.push(elem); } - let lc = AHPForR1CS::::verifier_decision( + let lc = AHPForR1CS::::verifier_decision( ns!(cs, "ahp").cs(), &formatted_public_input, &proof.evaluations, @@ -92,8 +91,8 @@ where &index_pvk.domain_k_size_gadget, )?; - let (num_opening_challenges, num_batching_rands, comm, query_set, evaluations) = - AHPForR1CS::::verifier_comm_query_eval_set( + let (num_opening_challenges, _num_batching_rands, comm, query_set, evaluations) = + AHPForR1CS::::verifier_comm_query_eval_set( &index_pvk, &proof, &verifier_state, @@ -112,12 +111,13 @@ where } } - fs_rng.absorb_nonnative_field_elements(&evals_vec, OptimizationType::Weight)?; + sponge_var.absorb_nonnative(&evals_vec)?; let (opening_challenges, opening_challenges_bits) = - fs_rng.squeeze_128_bits_field_elements_and_bits(num_opening_challenges)?; + sponge_var.squeeze_nonnative_field_elements(num_opening_challenges)?; + let (batching_rands, batching_rands_bits) = - fs_rng.squeeze_128_bits_field_elements_and_bits(num_batching_rands)?; + sponge_var.squeeze_nonnative_field_elements(num_opening_challenges)?; eprintln!("before PC checks: constraints: {}", cs.num_constraints()); @@ -141,13 +141,12 @@ where } #[tracing::instrument(target = "r1cs", skip(index_vk, proof))] - pub fn verify, R: FiatShamirRngVar>( + pub fn verify>( index_vk: &IndexVerifierKeyVar, public_input: &[NonNativeFieldVar], proof: &ProofVar, ) -> Result, Error> { - let index_pvk = - PreparedIndexVerifierKeyVar::::prepare(&index_vk)?; + let index_pvk = PreparedIndexVerifierKeyVar::::prepare(&index_vk)?; Self::prepared_verify(&index_pvk, public_input, proof) } } diff --git a/src/constraints/verifier_test.rs b/src/constraints/verifier_test.rs index 9af0646..6face2d 100644 --- a/src/constraints/verifier_test.rs +++ b/src/constraints/verifier_test.rs @@ -6,10 +6,6 @@ mod tests { data_structures::{IndexVerifierKeyVar, ProofVar, ProverMsgVar}, verifier::Marlin, }, - fiat_shamir::{ - constraints::FiatShamirAlgebraicSpongeRngVar, poseidon::constraints::PoseidonSpongeVar, - poseidon::PoseidonSponge, FiatShamirAlgebraicSpongeRng, - }, Marlin as MarlinNative, MarlinRecursiveConfig, Proof, }; use ark_ec::{CurveCycle, PairingEngine, PairingFriendlyCycle}; diff --git a/src/fiat_shamir/constraints.rs b/src/fiat_shamir/constraints.rs deleted file mode 100644 index 4b60da8..0000000 --- a/src/fiat_shamir/constraints.rs +++ /dev/null @@ -1,460 +0,0 @@ -use crate::fiat_shamir::{AlgebraicSponge, FiatShamirAlgebraicSpongeRng, FiatShamirRng}; -use crate::{overhead, Vec}; -use ark_ff::PrimeField; -use ark_nonnative_field::params::{get_params, OptimizationType}; -use ark_nonnative_field::{AllocatedNonNativeFieldVar, NonNativeFieldVar}; -use ark_r1cs_std::{ - alloc::AllocVar, - bits::{uint8::UInt8, ToBitsGadget}, - boolean::Boolean, - fields::fp::AllocatedFp, - fields::fp::FpVar, - R1CSVar, -}; -use ark_relations::lc; -use ark_relations::r1cs::{ - ConstraintSystemRef, LinearCombination, OptimizationGoal, SynthesisError, -}; -use core::marker::PhantomData; - -/// Vars for a RNG for use in a Fiat-Shamir transform. -pub trait FiatShamirRngVar>: - Clone -{ - /// Create a new RNG. - fn new(cs: ConstraintSystemRef) -> Self; - - // Instantiate from a plaintext fs_rng. - fn constant(cs: ConstraintSystemRef, pfs: &PFS) -> Self; - - /// Take in field elements. - fn absorb_nonnative_field_elements( - &mut self, - elems: &[NonNativeFieldVar], - ty: OptimizationType, - ) -> Result<(), SynthesisError>; - - /// Take in field elements. - fn absorb_native_field_elements(&mut self, elems: &[FpVar]) -> Result<(), SynthesisError>; - - /// Take in bytes. - fn absorb_bytes(&mut self, elems: &[UInt8]) -> Result<(), SynthesisError>; - - /// Output field elements. - fn squeeze_native_field_elements( - &mut self, - num: usize, - ) -> Result>, SynthesisError>; - - /// Output field elements. - fn squeeze_field_elements( - &mut self, - num: usize, - ) -> Result>, SynthesisError>; - - /// Output field elements and the corresponding bits (this can reduce repeated computation). - #[allow(clippy::type_complexity)] - fn squeeze_field_elements_and_bits( - &mut self, - num: usize, - ) -> Result<(Vec>, Vec>>), SynthesisError>; - - /// Output field elements with only 128 bits. - fn squeeze_128_bits_field_elements( - &mut self, - num: usize, - ) -> Result>, SynthesisError>; - - /// Output field elements with only 128 bits, and the corresponding bits (this can reduce - /// repeated computation). - #[allow(clippy::type_complexity)] - fn squeeze_128_bits_field_elements_and_bits( - &mut self, - num: usize, - ) -> Result<(Vec>, Vec>>), SynthesisError>; -} - -/// Trait for an algebraic sponge such as Poseidon. -pub trait AlgebraicSpongeVar>: Clone { - /// Create the new sponge. - fn new(cs: ConstraintSystemRef) -> Self; - - /// Instantiate from a plaintext sponge. - fn constant(cs: ConstraintSystemRef, ps: &PS) -> Self; - - /// Obtain the constraint system. - fn cs(&self) -> ConstraintSystemRef; - - /// Take in field elements. - fn absorb(&mut self, elems: &[FpVar]) -> Result<(), SynthesisError>; - - /// Output field elements. - fn squeeze(&mut self, num: usize) -> Result>, SynthesisError>; -} - -/// Building the Fiat-Shamir sponge's gadget from any algebraic sponge's gadget. -#[derive(Clone)] -pub struct FiatShamirAlgebraicSpongeRngVar< - F: PrimeField, - CF: PrimeField, - PS: AlgebraicSponge, - S: AlgebraicSpongeVar, -> { - pub cs: ConstraintSystemRef, - pub s: S, - #[doc(hidden)] - f_phantom: PhantomData, - cf_phantom: PhantomData, - ps_phantom: PhantomData, -} - -impl, S: AlgebraicSpongeVar> - FiatShamirAlgebraicSpongeRngVar -{ - /// Compress every two elements if possible. Provides a vector of (limb, num_of_additions), - /// both of which are CF. - #[tracing::instrument(target = "r1cs")] - pub fn compress_gadgets( - src_limbs: &[(FpVar, CF)], - ty: OptimizationType, - ) -> Result>, SynthesisError> { - let capacity = CF::size_in_bits() - 1; - let mut dest_limbs = Vec::>::new(); - - if src_limbs.is_empty() { - return Ok(vec![]); - } - - let params = get_params(F::size_in_bits(), CF::size_in_bits(), ty); - - let adjustment_factor_lookup_table = { - let mut table = Vec::::new(); - - let mut cur = CF::one(); - for _ in 1..=capacity { - table.push(cur); - cur.double_in_place(); - } - - table - }; - - let mut i: usize = 0; - let src_len = src_limbs.len(); - while i < src_len { - let first = &src_limbs[i]; - let second = if i + 1 < src_len { - Some(&src_limbs[i + 1]) - } else { - None - }; - - let first_max_bits_per_limb = params.bits_per_limb + overhead!(first.1 + &CF::one()); - let second_max_bits_per_limb = if second.is_some() { - params.bits_per_limb + overhead!(second.unwrap().1 + &CF::one()) - } else { - 0 - }; - - if second.is_some() && first_max_bits_per_limb + second_max_bits_per_limb <= capacity { - let adjustment_factor = &adjustment_factor_lookup_table[second_max_bits_per_limb]; - - dest_limbs.push(&first.0 * *adjustment_factor + &second.unwrap().0); - i += 2; - } else { - dest_limbs.push(first.0.clone()); - i += 1; - } - } - - Ok(dest_limbs) - } - - /// Push gadgets to sponge. - #[tracing::instrument(target = "r1cs", skip(sponge))] - pub fn push_gadgets_to_sponge( - sponge: &mut S, - src: &[NonNativeFieldVar], - ty: OptimizationType, - ) -> Result<(), SynthesisError> { - let mut src_limbs: Vec<(FpVar, CF)> = Vec::new(); - - for elem in src.iter() { - match elem { - NonNativeFieldVar::Constant(c) => { - let v = AllocatedNonNativeFieldVar::::new_constant(sponge.cs(), c)?; - - for limb in v.limbs.iter() { - let num_of_additions_over_normal_form = - if v.num_of_additions_over_normal_form == CF::zero() { - CF::one() - } else { - v.num_of_additions_over_normal_form - }; - src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); - } - } - NonNativeFieldVar::Var(v) => { - for limb in v.limbs.iter() { - let num_of_additions_over_normal_form = - if v.num_of_additions_over_normal_form == CF::zero() { - CF::one() - } else { - v.num_of_additions_over_normal_form - }; - src_limbs.push((limb.clone(), num_of_additions_over_normal_form)); - } - } - } - } - - let dest_limbs = Self::compress_gadgets(&src_limbs, ty)?; - sponge.absorb(&dest_limbs)?; - Ok(()) - } - - /// Obtain random bits from hashchain gadget. (Not guaranteed to be uniformly distributed, - /// should only be used in certain situations.) - #[tracing::instrument(target = "r1cs", skip(sponge))] - pub fn get_booleans_from_sponge( - sponge: &mut S, - num_bits: usize, - ) -> Result>, SynthesisError> { - let bits_per_element = CF::size_in_bits() - 1; - let num_elements = (num_bits + bits_per_element - 1) / bits_per_element; - - let src_elements = sponge.squeeze(num_elements)?; - let mut dest_bits = Vec::>::new(); - - for elem in src_elements.iter() { - let elem_bits = elem.to_bits_be()?; - dest_bits.extend_from_slice(&elem_bits[1..]); // discard the highest bit - } - - Ok(dest_bits) - } - - /// Obtain random elements from hashchain gadget. (Not guaranteed to be uniformly distributed, - /// should only be used in certain situations.) - #[tracing::instrument(target = "r1cs", skip(sponge))] - pub fn get_gadgets_from_sponge( - sponge: &mut S, - num_elements: usize, - outputs_short_elements: bool, - ) -> Result>, SynthesisError> { - let (dest_gadgets, _) = - Self::get_gadgets_and_bits_from_sponge(sponge, num_elements, outputs_short_elements)?; - - Ok(dest_gadgets) - } - - /// Obtain random elements, and the corresponding bits, from hashchain gadget. (Not guaranteed - /// to be uniformly distributed, should only be used in certain situations.) - #[tracing::instrument(target = "r1cs", skip(sponge))] - #[allow(clippy::type_complexity)] - pub fn get_gadgets_and_bits_from_sponge( - sponge: &mut S, - num_elements: usize, - outputs_short_elements: bool, - ) -> Result<(Vec>, Vec>>), SynthesisError> { - let cs = sponge.cs(); - - let optimization_type = match cs.optimization_goal() { - OptimizationGoal::None => OptimizationType::Constraints, - OptimizationGoal::Constraints => OptimizationType::Constraints, - OptimizationGoal::Weight => OptimizationType::Weight, - }; - - let params = get_params(F::size_in_bits(), CF::size_in_bits(), optimization_type); - - let num_bits_per_nonnative = if outputs_short_elements { - 128 - } else { - F::size_in_bits() - 1 // also omit the highest bit - }; - let bits = Self::get_booleans_from_sponge(sponge, num_bits_per_nonnative * num_elements)?; - - let mut lookup_table = Vec::>::new(); - let mut cur = F::one(); - for _ in 0..num_bits_per_nonnative { - let repr = AllocatedNonNativeFieldVar::::get_limbs_representations( - &cur, - optimization_type, - )?; - lookup_table.push(repr); - cur.double_in_place(); - } - - let mut dest_gadgets = Vec::>::new(); - let mut dest_bits = Vec::>>::new(); - bits.chunks_exact(num_bits_per_nonnative) - .for_each(|per_nonnative_bits| { - let mut val = vec![CF::zero(); params.num_limbs]; - let mut lc = vec![LinearCombination::::zero(); params.num_limbs]; - - let mut per_nonnative_bits_le = per_nonnative_bits.to_vec(); - per_nonnative_bits_le.reverse(); - - dest_bits.push(per_nonnative_bits_le.clone()); - - for (j, bit) in per_nonnative_bits_le.iter().enumerate() { - if bit.value().unwrap_or_default() { - for (k, val) in val.iter_mut().enumerate().take(params.num_limbs) { - *val += &lookup_table[j][k]; - } - } - - #[allow(clippy::needless_range_loop)] - for k in 0..params.num_limbs { - lc[k] = &lc[k] + bit.lc() * lookup_table[j][k]; - } - } - - let mut limbs = Vec::new(); - for k in 0..params.num_limbs { - let gadget = - AllocatedFp::new_witness(ark_relations::ns!(cs, "alloc"), || Ok(val[k])) - .unwrap(); - lc[k] = lc[k].clone() - (CF::one(), gadget.variable); - cs.enforce_constraint(lc!(), lc!(), lc[k].clone()).unwrap(); - limbs.push(FpVar::::from(gadget)); - } - - dest_gadgets.push(NonNativeFieldVar::::Var( - AllocatedNonNativeFieldVar:: { - cs: cs.clone(), - limbs, - num_of_additions_over_normal_form: CF::zero(), - is_in_the_normal_form: true, - target_phantom: Default::default(), - }, - )); - }); - - Ok((dest_gadgets, dest_bits)) - } -} - -impl, S: AlgebraicSpongeVar> - FiatShamirRngVar> - for FiatShamirAlgebraicSpongeRngVar -{ - fn new(cs: ConstraintSystemRef) -> Self { - Self { - cs: cs.clone(), - s: S::new(cs), - f_phantom: PhantomData, - cf_phantom: PhantomData, - ps_phantom: PhantomData, - } - } - - fn constant( - cs: ConstraintSystemRef, - pfs: &FiatShamirAlgebraicSpongeRng, - ) -> Self { - Self { - cs: cs.clone(), - s: S::constant(cs, &pfs.s.clone()), - f_phantom: PhantomData, - cf_phantom: PhantomData, - ps_phantom: PhantomData, - } - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn absorb_nonnative_field_elements( - &mut self, - elems: &[NonNativeFieldVar], - ty: OptimizationType, - ) -> Result<(), SynthesisError> { - Self::push_gadgets_to_sponge(&mut self.s, &elems.to_vec(), ty) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn absorb_native_field_elements(&mut self, elems: &[FpVar]) -> Result<(), SynthesisError> { - self.s.absorb(elems)?; - Ok(()) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn absorb_bytes(&mut self, elems: &[UInt8]) -> Result<(), SynthesisError> { - let capacity = CF::size_in_bits() - 1; - let mut bits = Vec::>::new(); - for elem in elems.iter() { - let mut bits_le = elem.to_bits_le()?; // UInt8's to_bits is le, which is an exception in Zexe. - bits_le.reverse(); - bits.extend_from_slice(&bits_le); - } - - let mut adjustment_factors = Vec::::new(); - let mut cur = CF::one(); - for _ in 0..capacity { - adjustment_factors.push(cur); - cur.double_in_place(); - } - - let mut gadgets = Vec::>::new(); - for elem_bits in bits.chunks(capacity) { - let mut elem = CF::zero(); - let mut lc = LinearCombination::zero(); - for (bit, adjustment_factor) in elem_bits.iter().rev().zip(adjustment_factors.iter()) { - if bit.value().unwrap_or_default() { - elem += adjustment_factor; - } - lc = &lc + bit.lc() * *adjustment_factor; - } - - let gadget = - AllocatedFp::new_witness(ark_relations::ns!(self.cs, "gadget"), || Ok(elem))?; - lc = lc.clone() - (CF::one(), gadget.variable); - - gadgets.push(FpVar::from(gadget)); - self.cs.enforce_constraint(lc!(), lc!(), lc)?; - } - - self.s.absorb(&gadgets) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_native_field_elements( - &mut self, - num: usize, - ) -> Result>, SynthesisError> { - self.s.squeeze(num) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_field_elements( - &mut self, - num: usize, - ) -> Result>, SynthesisError> { - Self::get_gadgets_from_sponge(&mut self.s, num, false) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - #[allow(clippy::type_complexity)] - fn squeeze_field_elements_and_bits( - &mut self, - num: usize, - ) -> Result<(Vec>, Vec>>), SynthesisError> { - Self::get_gadgets_and_bits_from_sponge(&mut self.s, num, false) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_128_bits_field_elements( - &mut self, - num: usize, - ) -> Result>, SynthesisError> { - Self::get_gadgets_from_sponge(&mut self.s, num, true) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - #[allow(clippy::type_complexity)] - fn squeeze_128_bits_field_elements_and_bits( - &mut self, - num: usize, - ) -> Result<(Vec>, Vec>>), SynthesisError> { - Self::get_gadgets_and_bits_from_sponge(&mut self.s, num, true) - } -} diff --git a/src/fiat_shamir/mod.rs b/src/fiat_shamir/mod.rs deleted file mode 100644 index 96df382..0000000 --- a/src/fiat_shamir/mod.rs +++ /dev/null @@ -1,454 +0,0 @@ -use crate::Vec; -use ark_ff::{BigInteger, FpParameters, PrimeField, ToConstraintField}; -use ark_nonnative_field::params::{get_params, OptimizationType}; -use ark_nonnative_field::AllocatedNonNativeFieldVar; -use ark_std::marker::PhantomData; -use ark_std::rand::{RngCore, SeedableRng}; -use digest::Digest; -use rand_chacha::ChaChaRng; - -/// The constraints for Fiat-Shamir -pub mod constraints; - -/// The Poseidon sponge -pub mod poseidon; - -/// a macro for computing ceil(log2(x))+1 for a field element x -#[doc(hidden)] -#[macro_export] -macro_rules! overhead { - ($x:expr) => {{ - use ark_ff::BigInteger; - let num = $x; - let num_bits = num.into_repr().to_bits_be(); - let mut skipped_bits = 0; - for b in num_bits.iter() { - if *b == false { - skipped_bits += 1; - } else { - break; - } - } - - let mut is_power_of_2 = true; - for b in num_bits.iter().skip(skipped_bits + 1) { - if *b == true { - is_power_of_2 = false; - } - } - - if is_power_of_2 { - num_bits.len() - skipped_bits - } else { - num_bits.len() - skipped_bits + 1 - } - }}; -} - -/// the trait for Fiat-Shamir RNG -pub trait FiatShamirRng: RngCore { - /// initialize the RNG - fn new() -> Self; - - /// take in field elements - fn absorb_nonnative_field_elements(&mut self, elems: &[F], ty: OptimizationType); - /// take in field elements - fn absorb_native_field_elements>(&mut self, elems: &[T]); - /// take in bytes - fn absorb_bytes(&mut self, elems: &[u8]); - - /// take out field elements - fn squeeze_nonnative_field_elements(&mut self, num: usize, ty: OptimizationType) -> Vec; - /// take in field elements - fn squeeze_native_field_elements(&mut self, num: usize) -> Vec; - /// take out field elements of 128 bits - fn squeeze_128_bits_nonnative_field_elements(&mut self, num: usize) -> Vec; -} - -/// the trait for algebraic sponge -pub trait AlgebraicSponge: Clone { - /// initialize the sponge - fn new() -> Self; - /// take in field elements - fn absorb(&mut self, elems: &[CF]); - /// take out field elements - fn squeeze(&mut self, num: usize) -> Vec; -} - -/// use a ChaCha stream cipher to generate the actual pseudorandom bits -/// use a digest funcion to do absorbing -pub struct FiatShamirChaChaRng { - pub r: ChaChaRng, - pub seed: Vec, - #[doc(hidden)] - field: PhantomData, - representation_field: PhantomData, - digest: PhantomData, -} - -impl RngCore for FiatShamirChaChaRng { - fn next_u32(&mut self) -> u32 { - self.r.next_u32() - } - - fn next_u64(&mut self) -> u64 { - self.r.next_u64() - } - - fn fill_bytes(&mut self, dest: &mut [u8]) { - self.r.fill_bytes(dest) - } - - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { - self.r.try_fill_bytes(dest) - } -} - -impl FiatShamirRng - for FiatShamirChaChaRng -{ - fn new() -> Self { - let seed = [0; 32]; - let r = ChaChaRng::from_seed(seed); - Self { - r, - seed: seed.to_vec(), - field: PhantomData, - representation_field: PhantomData, - digest: PhantomData, - } - } - - fn absorb_nonnative_field_elements(&mut self, elems: &[F], _: OptimizationType) { - let mut bytes = Vec::new(); - for elem in elems { - elem.write(&mut bytes).expect("failed to convert to bytes"); - } - self.absorb_bytes(&bytes); - } - - fn absorb_native_field_elements>(&mut self, src: &[T]) { - let mut elems = Vec::::new(); - for elem in src.iter() { - elems.append(&mut elem.to_field_elements().unwrap()); - } - - let mut bytes = Vec::new(); - for elem in elems.iter() { - elem.write(&mut bytes).expect("failed to convert to bytes"); - } - self.absorb_bytes(&bytes); - } - - fn absorb_bytes(&mut self, elems: &[u8]) { - let mut bytes = elems.to_vec(); - bytes.extend_from_slice(&self.seed); - - let new_seed = D::digest(&bytes); - self.seed = (*new_seed.as_slice()).to_vec(); - - let mut seed = [0u8; 32]; - for (i, byte) in self.seed.as_slice().iter().enumerate() { - seed[i] = *byte; - } - - self.r = ChaChaRng::from_seed(seed); - } - - fn squeeze_nonnative_field_elements(&mut self, num: usize, _: OptimizationType) -> Vec { - let mut res = Vec::::new(); - for _ in 0..num { - res.push(F::rand(&mut self.r)); - } - res - } - - fn squeeze_native_field_elements(&mut self, num: usize) -> Vec { - let mut res = Vec::::new(); - for _ in 0..num { - res.push(CF::rand(&mut self.r)); - } - res - } - - fn squeeze_128_bits_nonnative_field_elements(&mut self, num: usize) -> Vec { - let mut res = Vec::::new(); - for _ in 0..num { - let mut x = [0u8; 16]; - self.r.fill_bytes(&mut x); - res.push(F::from_random_bytes(&x).unwrap()); - } - res - } -} - -/// rng from any algebraic sponge -pub struct FiatShamirAlgebraicSpongeRng> { - pub s: S, - #[doc(hidden)] - f_phantom: PhantomData, - cf_phantom: PhantomData, -} - -impl> FiatShamirAlgebraicSpongeRng { - /// compress every two elements if possible. Provides a vector of (limb, num_of_additions), both of which are P::BaseField. - pub fn compress_elements(src_limbs: &[(CF, CF)], ty: OptimizationType) -> Vec { - let capacity = CF::size_in_bits() - 1; - let mut dest_limbs = Vec::::new(); - - let params = get_params(F::size_in_bits(), CF::size_in_bits(), ty); - - let adjustment_factor_lookup_table = { - let mut table = Vec::::new(); - - let mut cur = CF::one(); - for _ in 1..=capacity { - table.push(cur); - cur.double_in_place(); - } - - table - }; - - let mut i = 0; - let src_len = src_limbs.len(); - while i < src_len { - let first = &src_limbs[i]; - let second = if i + 1 < src_len { - Some(&src_limbs[i + 1]) - } else { - None - }; - - let first_max_bits_per_limb = params.bits_per_limb + overhead!(first.1 + &CF::one()); - let second_max_bits_per_limb = if let Some(second) = second { - params.bits_per_limb + overhead!(second.1 + &CF::one()) - } else { - 0 - }; - - if let Some(second) = second { - if first_max_bits_per_limb + second_max_bits_per_limb <= capacity { - let adjustment_factor = - &adjustment_factor_lookup_table[second_max_bits_per_limb]; - - dest_limbs.push(first.0 * adjustment_factor + &second.0); - i += 2; - } else { - dest_limbs.push(first.0); - i += 1; - } - } else { - dest_limbs.push(first.0); - i += 1; - } - } - - dest_limbs - } - - /// push elements to sponge, treated in the non-native field representations. - pub fn push_elements_to_sponge(sponge: &mut S, src: &[F], ty: OptimizationType) { - let mut src_limbs = Vec::<(CF, CF)>::new(); - - for elem in src.iter() { - let limbs = - AllocatedNonNativeFieldVar::::get_limbs_representations(elem, ty).unwrap(); - for limb in limbs.iter() { - src_limbs.push((*limb, CF::one())); - // specifically set to one, since most gadgets in the constraint world would not have zero noise (due to the relatively weak normal form testing in `alloc`) - } - } - - let dest_limbs = Self::compress_elements(&src_limbs, ty); - sponge.absorb(&dest_limbs); - } - - /// obtain random bits from hashchain. - /// not guaranteed to be uniformly distributed, should only be used in certain situations. - pub fn get_bits_from_sponge(sponge: &mut S, num_bits: usize) -> Vec { - let bits_per_element = CF::size_in_bits() - 1; - let num_elements = (num_bits + bits_per_element - 1) / bits_per_element; - - let src_elements = sponge.squeeze(num_elements); - let mut dest_bits = Vec::::new(); - - let skip = (CF::Params::REPR_SHAVE_BITS + 1) as usize; - for elem in src_elements.iter() { - // discard the highest bit - let elem_bits = elem.into_repr().to_bits_be(); - dest_bits.extend_from_slice(&elem_bits[skip..]); - } - - dest_bits - } - - /// obtain random elements from hashchain. - /// not guaranteed to be uniformly distributed, should only be used in certain situations. - pub fn get_elements_from_sponge( - sponge: &mut S, - num_elements: usize, - outputs_short_elements: bool, - ) -> Vec { - let num_bits_per_nonnative = if outputs_short_elements { - 128 - } else { - F::size_in_bits() - 1 // also omit the highest bit - }; - let bits = Self::get_bits_from_sponge(sponge, num_bits_per_nonnative * num_elements); - - let mut lookup_table = Vec::::new(); - let mut cur = F::one(); - for _ in 0..num_bits_per_nonnative { - lookup_table.push(cur); - cur.double_in_place(); - } - - let mut dest_elements = Vec::::new(); - bits.chunks_exact(num_bits_per_nonnative) - .for_each(|per_nonnative_bits| { - // technically, this can be done via BigInterger::from_bits; here, we use this method for consistency with the gadget counterpart - let mut res = F::zero(); - - for (i, bit) in per_nonnative_bits.iter().rev().enumerate() { - if *bit { - res += &lookup_table[i]; - } - } - - dest_elements.push(res); - }); - - dest_elements - } -} - -impl> RngCore - for FiatShamirAlgebraicSpongeRng -{ - fn next_u32(&mut self) -> u32 { - assert!( - CF::size_in_bits() > 128, - "The native field of the algebraic sponge is too small." - ); - - let mut dest = [0u8; 4]; - self.fill_bytes(&mut dest); - - u32::from_be_bytes(dest) - } - - fn next_u64(&mut self) -> u64 { - assert!( - CF::size_in_bits() > 128, - "The native field of the algebraic sponge is too small." - ); - - let mut dest = [0u8; 8]; - self.fill_bytes(&mut dest); - - u64::from_be_bytes(dest) - } - - fn fill_bytes(&mut self, dest: &mut [u8]) { - assert!( - CF::size_in_bits() > 128, - "The native field of the algebraic sponge is too small." - ); - - let capacity = CF::size_in_bits() - 128; - let len = dest.len() * 8; - - let num_of_elements = (capacity + len - 1) / len; - let elements = self.s.squeeze(num_of_elements); - - let mut bits = Vec::::new(); - for elem in elements.iter() { - let mut elem_bits = elem.into_repr().to_bits_be(); - elem_bits.reverse(); - bits.extend_from_slice(&elem_bits[0..capacity]); - } - - bits.truncate(len); - bits.chunks_exact(8) - .enumerate() - .for_each(|(i, bits_per_byte)| { - let mut byte = 0; - for (j, bit) in bits_per_byte.iter().enumerate() { - if *bit { - byte += 1 << j; - } - } - dest[i] = byte; - }); - } - - fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ark_std::rand::Error> { - assert!( - CF::size_in_bits() > 128, - "The native field of the algebraic sponge is too small." - ); - - self.fill_bytes(dest); - Ok(()) - } -} - -impl> FiatShamirRng - for FiatShamirAlgebraicSpongeRng -{ - fn new() -> Self { - Self { - s: S::new(), - f_phantom: PhantomData, - cf_phantom: PhantomData, - } - } - - fn absorb_nonnative_field_elements(&mut self, elems: &[F], ty: OptimizationType) { - Self::push_elements_to_sponge(&mut self.s, elems, ty); - } - - fn absorb_native_field_elements>(&mut self, src: &[T]) { - let mut elems = Vec::::new(); - for elem in src.iter() { - elems.append(&mut elem.to_field_elements().unwrap()); - } - self.s.absorb(&elems); - } - - fn absorb_bytes(&mut self, elems: &[u8]) { - let capacity = CF::size_in_bits() - 1; - let mut bits = Vec::::new(); - for elem in elems.iter() { - bits.append(&mut vec![ - elem & 128 != 0, - elem & 64 != 0, - elem & 32 != 0, - elem & 16 != 0, - elem & 8 != 0, - elem & 4 != 0, - elem & 2 != 0, - elem & 1 != 0, - ]); - } - let elements = bits - .chunks(capacity) - .map(|bits| CF::from_repr(CF::BigInt::from_bits_be(bits)).unwrap()) - .collect::>(); - - self.s.absorb(&elements); - } - - fn squeeze_nonnative_field_elements(&mut self, num: usize, _: OptimizationType) -> Vec { - Self::get_elements_from_sponge(&mut self.s, num, false) - } - - fn squeeze_native_field_elements(&mut self, num: usize) -> Vec { - self.s.squeeze(num) - } - - fn squeeze_128_bits_nonnative_field_elements(&mut self, num: usize) -> Vec { - Self::get_elements_from_sponge(&mut self.s, num, true) - } -} diff --git a/src/fiat_shamir/poseidon/constraints.rs b/src/fiat_shamir/poseidon/constraints.rs deleted file mode 100644 index d3bf8d1..0000000 --- a/src/fiat_shamir/poseidon/constraints.rs +++ /dev/null @@ -1,298 +0,0 @@ -/* - * credit: - * This implementation of Poseidon is entirely from Fractal's implementation - * ([COS20]: https://eprint.iacr.org/2019/1076) - * with small syntax changes. - */ - -use crate::fiat_shamir::constraints::AlgebraicSpongeVar; -use crate::fiat_shamir::poseidon::{PoseidonSponge, PoseidonSpongeState}; -use crate::Vec; -use ark_ff::PrimeField; -use ark_r1cs_std::fields::fp::FpVar; -use ark_r1cs_std::prelude::*; -use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; -use ark_std::rand::SeedableRng; - -#[derive(Clone)] -/// the gadget for Poseidon sponge -pub struct PoseidonSpongeVar { - /// constraint system - pub cs: ConstraintSystemRef, - /// number of rounds in a full-round operation - pub full_rounds: u32, - /// number of rounds in a partial-round operation - pub partial_rounds: u32, - /// Exponent used in S-boxes - pub alpha: u64, - /// Additive Round keys. These are added before each MDS matrix application to make it an affine shift. - /// They are indexed by ark[round_num][state_element_index] - pub ark: Vec>, - /// Maximally Distance Separating Matrix. - pub mds: Vec>, - - /// the sponge's state - pub state: Vec>, - /// the rate - pub rate: usize, - /// the capacity - pub capacity: usize, - /// the mode - mode: PoseidonSpongeState, -} - -impl PoseidonSpongeVar { - #[tracing::instrument(target = "r1cs", skip(self))] - fn apply_s_box( - &self, - state: &mut [FpVar], - is_full_round: bool, - ) -> Result<(), SynthesisError> { - // Full rounds apply the S Box (x^alpha) to every element of state - if is_full_round { - for state_item in state.iter_mut() { - *state_item = state_item.pow_by_constant(&[self.alpha])?; - } - } - // Partial rounds apply the S Box (x^alpha) to just the final element of state - else { - state[state.len() - 1] = state[state.len() - 1].pow_by_constant(&[self.alpha])?; - } - - Ok(()) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn apply_ark(&self, state: &mut [FpVar], round_number: usize) -> Result<(), SynthesisError> { - for (i, state_elem) in state.iter_mut().enumerate() { - *state_elem += self.ark[round_number][i]; - } - Ok(()) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn apply_mds(&self, state: &mut [FpVar]) -> Result<(), SynthesisError> { - let mut new_state = Vec::new(); - let zero = FpVar::::zero(); - for i in 0..state.len() { - let mut cur = zero.clone(); - for (j, state_elem) in state.iter().enumerate() { - let term = state_elem * self.mds[i][j]; - cur += &term; - } - new_state.push(cur); - } - state.clone_from_slice(&new_state[..state.len()]); - Ok(()) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn permute(&mut self) -> Result<(), SynthesisError> { - let full_rounds_over_2 = self.full_rounds / 2; - let mut state = self.state.clone(); - for i in 0..full_rounds_over_2 { - self.apply_ark(&mut state, i as usize)?; - self.apply_s_box(&mut state, true)?; - self.apply_mds(&mut state)?; - } - for i in full_rounds_over_2..(full_rounds_over_2 + self.partial_rounds) { - self.apply_ark(&mut state, i as usize)?; - self.apply_s_box(&mut state, false)?; - self.apply_mds(&mut state)?; - } - - for i in - (full_rounds_over_2 + self.partial_rounds)..(self.partial_rounds + self.full_rounds) - { - self.apply_ark(&mut state, i as usize)?; - self.apply_s_box(&mut state, true)?; - self.apply_mds(&mut state)?; - } - - self.state = state; - Ok(()) - } - - #[tracing::instrument(target = "r1cs", skip(self))] - fn absorb_internal( - &mut self, - rate_start_index: usize, - elements: &[FpVar], - ) -> Result<(), SynthesisError> { - // if we can finish in this call - if rate_start_index + elements.len() <= self.rate { - for (i, element) in elements.iter().enumerate() { - self.state[i + rate_start_index] += element; - } - self.mode = PoseidonSpongeState::Absorbing { - next_absorb_index: rate_start_index + elements.len(), - }; - - return Ok(()); - } - // otherwise absorb (rate - rate_start_index) elements - let num_elements_absorbed = self.rate - rate_start_index; - for (i, element) in elements.iter().enumerate().take(num_elements_absorbed) { - self.state[i + rate_start_index] += element; - } - self.permute()?; - // Tail recurse, with the input elements being truncated by num elements absorbed - self.absorb_internal(0, &elements[num_elements_absorbed..]) - } - - // Squeeze |output| many elements. This does not end in a squeeze - #[tracing::instrument(target = "r1cs", skip(self))] - fn squeeze_internal( - &mut self, - rate_start_index: usize, - output: &mut [FpVar], - ) -> Result<(), SynthesisError> { - // if we can finish in this call - if rate_start_index + output.len() <= self.rate { - output - .clone_from_slice(&self.state[rate_start_index..(output.len() + rate_start_index)]); - self.mode = PoseidonSpongeState::Squeezing { - next_squeeze_index: rate_start_index + output.len(), - }; - return Ok(()); - } - // otherwise squeeze (rate - rate_start_index) elements - let num_elements_squeezed = self.rate - rate_start_index; - output[..num_elements_squeezed].clone_from_slice( - &self.state[rate_start_index..(num_elements_squeezed + rate_start_index)], - ); - - // Unless we are done with squeezing in this call, permute. - if output.len() != self.rate { - self.permute()?; - } - // Tail recurse, with the correct change to indices in output happening due to changing the slice - self.squeeze_internal(0, &mut output[num_elements_squeezed..]) - } -} - -impl AlgebraicSpongeVar> for PoseidonSpongeVar { - fn new(cs: ConstraintSystemRef) -> Self { - // Requires F to be Alt_Bn128Fr - let full_rounds = 8; - let partial_rounds = 31; - let alpha = 17; - - let mds = vec![ - vec![F::one(), F::zero(), F::one()], - vec![F::one(), F::one(), F::zero()], - vec![F::zero(), F::one(), F::one()], - ]; - - let mut ark = Vec::new(); - let mut ark_rng = rand_chacha::ChaChaRng::seed_from_u64(123456789u64); - - for _ in 0..(full_rounds + partial_rounds) { - let mut res = Vec::new(); - - for _ in 0..3 { - res.push(F::rand(&mut ark_rng)); - } - ark.push(res); - } - - let rate = 2; - let capacity = 1; - let zero = FpVar::::zero(); - let state = vec![zero; rate + capacity]; - let mode = PoseidonSpongeState::Absorbing { - next_absorb_index: 0, - }; - - Self { - cs, - full_rounds, - partial_rounds, - alpha, - ark, - mds, - - state, - rate, - capacity, - mode, - } - } - - fn constant(cs: ConstraintSystemRef, pfs: &PoseidonSponge) -> Self { - let mut state_gadgets = Vec::new(); - - for state_elem in pfs.state.iter() { - state_gadgets.push( - FpVar::::new_constant(ark_relations::ns!(cs, "alloc_elems"), *state_elem) - .unwrap(), - ); - } - - Self { - cs, - full_rounds: pfs.full_rounds, - partial_rounds: pfs.partial_rounds, - alpha: pfs.alpha, - ark: pfs.ark.clone(), - mds: pfs.mds.clone(), - - state: state_gadgets, - rate: pfs.rate, - capacity: pfs.capacity, - mode: pfs.mode.clone(), - } - } - - fn cs(&self) -> ConstraintSystemRef { - self.cs.clone() - } - - fn absorb(&mut self, elems: &[FpVar]) -> Result<(), SynthesisError> { - if elems.is_empty() { - return Ok(()); - } - - match self.mode { - PoseidonSpongeState::Absorbing { next_absorb_index } => { - let mut absorb_index = next_absorb_index; - if absorb_index == self.rate { - self.permute()?; - absorb_index = 0; - } - self.absorb_internal(absorb_index, elems)?; - } - PoseidonSpongeState::Squeezing { - next_squeeze_index: _, - } => { - self.permute()?; - self.absorb_internal(0, elems)?; - } - }; - - Ok(()) - } - - fn squeeze(&mut self, num: usize) -> Result>, SynthesisError> { - let zero = FpVar::zero(); - let mut squeezed_elems = vec![zero; num]; - match self.mode { - PoseidonSpongeState::Absorbing { - next_absorb_index: _, - } => { - self.permute()?; - self.squeeze_internal(0, &mut squeezed_elems)?; - } - PoseidonSpongeState::Squeezing { next_squeeze_index } => { - let mut squeeze_index = next_squeeze_index; - if squeeze_index == self.rate { - self.permute()?; - squeeze_index = 0; - } - self.squeeze_internal(squeeze_index, &mut squeezed_elems)?; - } - }; - - Ok(squeezed_elems) - } -} diff --git a/src/fiat_shamir/poseidon/mod.rs b/src/fiat_shamir/poseidon/mod.rs deleted file mode 100644 index e9d3318..0000000 --- a/src/fiat_shamir/poseidon/mod.rs +++ /dev/null @@ -1,270 +0,0 @@ -/* - * credit: - * This implementation of Poseidon is entirely from Fractal's implementation - * ([COS20]: https://eprint.iacr.org/2019/1076) - * with small syntax changes. - */ - -use crate::fiat_shamir::AlgebraicSponge; -use crate::Vec; -use ark_ff::PrimeField; -use ark_sponge::{Absorb, CryptographicSponge}; -use ark_std::rand::SeedableRng; - -/// constraints for Poseidon -pub mod constraints; - -#[derive(Clone)] -enum PoseidonSpongeState { - Absorbing { next_absorb_index: usize }, - Squeezing { next_squeeze_index: usize }, -} - -#[derive(Clone)] -/// the sponge for Poseidon -pub struct PoseidonSponge { - /// number of rounds in a full-round operation - pub full_rounds: u32, - /// number of rounds in a partial-round operation - pub partial_rounds: u32, - /// Exponent used in S-boxes - pub alpha: u64, - /// Additive Round keys. These are added before each MDS matrix application to make it an affine shift. - /// They are indexed by ark[round_num][state_element_index] - pub ark: Vec>, - /// Maximally Distance Separating Matrix. - pub mds: Vec>, - - /// the sponge's state - pub state: Vec, - /// the rate - pub rate: usize, - /// the capacity - pub capacity: usize, - /// the mode - mode: PoseidonSpongeState, -} - -impl PoseidonSponge { - fn apply_s_box(&self, state: &mut [F], is_full_round: bool) { - // Full rounds apply the S Box (x^alpha) to every element of state - if is_full_round { - for elem in state { - *elem = elem.pow(&[self.alpha]); - } - } - // Partial rounds apply the S Box (x^alpha) to just the final element of state - else { - state[state.len() - 1] = state[state.len() - 1].pow(&[self.alpha]); - } - } - - fn apply_ark(&self, state: &mut [F], round_number: usize) { - for (i, state_elem) in state.iter_mut().enumerate() { - state_elem.add_assign(&self.ark[round_number][i]); - } - } - - fn apply_mds(&self, state: &mut [F]) { - let mut new_state = Vec::new(); - for i in 0..state.len() { - let mut cur = F::zero(); - for (j, state_elem) in state.iter().enumerate() { - let term = state_elem.mul(&self.mds[i][j]); - cur.add_assign(&term); - } - new_state.push(cur); - } - state.clone_from_slice(&new_state[..state.len()]) - } - - fn permute(&mut self) { - let full_rounds_over_2 = self.full_rounds / 2; - let mut state = self.state.clone(); - for i in 0..full_rounds_over_2 { - self.apply_ark(&mut state, i as usize); - self.apply_s_box(&mut state, true); - self.apply_mds(&mut state); - } - - for i in full_rounds_over_2..(full_rounds_over_2 + self.partial_rounds) { - self.apply_ark(&mut state, i as usize); - self.apply_s_box(&mut state, false); - self.apply_mds(&mut state); - } - - for i in - (full_rounds_over_2 + self.partial_rounds)..(self.partial_rounds + self.full_rounds) - { - self.apply_ark(&mut state, i as usize); - self.apply_s_box(&mut state, true); - self.apply_mds(&mut state); - } - self.state = state; - } - - // Absorbs everything in elements, this does not end in an absorbtion. - fn absorb_internal(&mut self, rate_start_index: usize, elements: &[F]) { - // if we can finish in this call - if rate_start_index + elements.len() <= self.rate { - for (i, element) in elements.iter().enumerate() { - self.state[i + rate_start_index] += element; - } - self.mode = PoseidonSpongeState::Absorbing { - next_absorb_index: rate_start_index + elements.len(), - }; - - return; - } - // otherwise absorb (rate - rate_start_index) elements - let num_elements_absorbed = self.rate - rate_start_index; - for (i, element) in elements.iter().enumerate().take(num_elements_absorbed) { - self.state[i + rate_start_index] += element; - } - self.permute(); - // Tail recurse, with the input elements being truncated by num elements absorbed - self.absorb_internal(0, &elements[num_elements_absorbed..]); - } - - // Squeeze |output| many elements. This does not end in a squeeze - fn squeeze_internal(&mut self, rate_start_index: usize, output: &mut [F]) { - // if we can finish in this call - if rate_start_index + output.len() <= self.rate { - output - .clone_from_slice(&self.state[rate_start_index..(output.len() + rate_start_index)]); - self.mode = PoseidonSpongeState::Squeezing { - next_squeeze_index: rate_start_index + output.len(), - }; - return; - } - // otherwise squeeze (rate - rate_start_index) elements - let num_elements_squeezed = self.rate - rate_start_index; - output[..num_elements_squeezed].clone_from_slice( - &self.state[rate_start_index..(num_elements_squeezed + rate_start_index)], - ); - - // Unless we are done with squeezing in this call, permute. - if output.len() != self.rate { - self.permute(); - } - // Tail recurse, with the correct change to indices in output happening due to changing the slice - self.squeeze_internal(0, &mut output[num_elements_squeezed..]); - } -} - -impl AlgebraicSponge for PoseidonSponge { - fn new() -> Self { - // Requires F to be Alt_Bn128Fr - let full_rounds = 8; - let partial_rounds = 31; - let alpha = 17; - - let mds = vec![ - vec![F::one(), F::zero(), F::one()], - vec![F::one(), F::one(), F::zero()], - vec![F::zero(), F::one(), F::one()], - ]; - - let mut ark = Vec::new(); - let mut ark_rng = rand_chacha::ChaChaRng::seed_from_u64(123456789u64); - - for _ in 0..(full_rounds + partial_rounds) { - let mut res = Vec::new(); - - for _ in 0..3 { - res.push(F::rand(&mut ark_rng)); - } - ark.push(res); - } - - let rate = 2; - let capacity = 1; - let state = vec![F::zero(); rate + capacity]; - let mode = PoseidonSpongeState::Absorbing { - next_absorb_index: 0, - }; - - PoseidonSponge { - full_rounds, - partial_rounds, - alpha, - ark, - mds, - - state, - rate, - capacity, - mode, - } - } - - fn absorb(&mut self, elems: &[F]) { - if elems.is_empty() { - return; - } - - match self.mode { - PoseidonSpongeState::Absorbing { next_absorb_index } => { - let mut absorb_index = next_absorb_index; - if absorb_index == self.rate { - self.permute(); - absorb_index = 0; - } - self.absorb_internal(absorb_index, elems); - } - PoseidonSpongeState::Squeezing { - next_squeeze_index: _, - } => { - self.permute(); - self.absorb_internal(0, elems); - } - }; - } - - fn squeeze(&mut self, num: usize) -> Vec { - let mut squeezed_elems = vec![F::zero(); num]; - match self.mode { - PoseidonSpongeState::Absorbing { - next_absorb_index: _, - } => { - self.permute(); - self.squeeze_internal(0, &mut squeezed_elems); - } - PoseidonSpongeState::Squeezing { next_squeeze_index } => { - let mut squeeze_index = next_squeeze_index; - if squeeze_index == self.rate { - self.permute(); - squeeze_index = 0; - } - self.squeeze_internal(squeeze_index, &mut squeezed_elems); - } - }; - squeezed_elems - } -} - -impl CryptographicSponge for PoseidonSponge -where - F: PrimeField, -{ - type Parameters = (); - - fn new(_params: &Self::Parameters) -> Self { - >::new() - } - - fn absorb(&mut self, input: &impl Absorb) { - >::absorb( - self, - input.to_sponge_field_elements_as_vec().as_slice(), - ) - } - - fn squeeze_bytes(&mut self, _num_bytes: usize) -> Vec { - todo!() - } - - fn squeeze_bits(&mut self, _num_bits: usize) -> Vec { - todo!() - } -} diff --git a/src/lib.rs b/src/lib.rs index 233748a..70f74a1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -17,15 +17,15 @@ #![allow(clippy::op_ref)] use crate::ahp::prover::ProverMsg; +use ahp::CryptographicSpongeWithDefault; use ark_ff::{to_bytes, PrimeField, ToConstraintField}; -use ark_nonnative_field::params::OptimizationType; use ark_poly::{univariate::DensePolynomial, EvaluationDomain, GeneralEvaluationDomain}; use ark_poly_commit::{ challenge::ChallengeGenerator, Evaluations, LabeledCommitment, LabeledPolynomial, PCUniversalParams, PolynomialCommitment, }; use ark_relations::r1cs::{ConstraintSynthesizer, SynthesisError}; -use ark_sponge::CryptographicSponge; +use ark_sponge::{Absorb, CryptographicSponge}; use ark_std::rand::RngCore; #[macro_use] @@ -46,11 +46,6 @@ macro_rules! eprintln { ($($arg: tt)*) => {}; } -/// Implements a Fiat-Shamir based Rng that allows one to incrementally update -/// the seed based on new messages in the proof transcript. -pub mod fiat_shamir; -pub use fiat_shamir::*; - mod error; pub use error::*; @@ -88,40 +83,40 @@ impl MarlinConfig for MarlinRecursiveConfig { pub struct Marlin< F: PrimeField, FSF: PrimeField, - S: CryptographicSponge + AlgebraicSponge, + S: CryptographicSponge, PC: PolynomialCommitment, S>, - FS: FiatShamirRng, MC: MarlinConfig, >( #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, - #[doc(hidden)] PhantomData, #[doc(hidden)] PhantomData, ); -fn compute_vk_hash(vk: &IndexVerifierKey) -> Vec +fn compute_vk_hash(vk: &IndexVerifierKey) -> Vec where F: PrimeField, FSF: PrimeField, - S: CryptographicSponge + AlgebraicSponge, + S: CryptographicSpongeWithDefault, PC: PolynomialCommitment, S>, - FS: FiatShamirRng, - PC::Commitment: ToConstraintField, + PC::Commitment: ToConstraintField + Absorb, { - let mut vk_hash_rng = FS::new(); - vk_hash_rng.absorb_native_field_elements(&vk.index_comms); - vk_hash_rng.squeeze_native_field_elements(1) + let params = S::default_params(); + let mut vk_hash_rng = S::new(¶ms); + vk_hash_rng.absorb(&vk.index_comms); + vk_hash_rng.squeeze_field_elements(1) } -impl Marlin +impl Marlin where - S: CryptographicSponge + AlgebraicSponge, + S: CryptographicSpongeWithDefault, + F: Absorb, + FSF: Absorb, PC: PolynomialCommitment, S>, PC::VerifierKey: ToConstraintField, - PC::Commitment: ToConstraintField, - FS: FiatShamirRng, + PC::Commitment: ToConstraintField + Absorb, + LabeledCommitment<, S>>::Commitment>: Absorb, { /// The personalization string for this protocol. Used to personalize the /// Fiat-Shamir rng. @@ -331,18 +326,17 @@ where let prover_init_state = AHPForR1CS::prover_init(&index_pk.index, c)?; let public_input = prover_init_state.public_input(); - let mut fs_rng = FS::new(); + let params = S::default_params(); + let mut sponge = S::new(¶ms); let hiding = !for_recursion; if for_recursion { - fs_rng.absorb_bytes(&to_bytes![&Self::PROTOCOL_NAME].unwrap()); - fs_rng.absorb_native_field_elements(&compute_vk_hash::( - &index_pk.index_vk, - )); - fs_rng.absorb_nonnative_field_elements(&public_input, OptimizationType::Weight); + sponge.absorb(&to_bytes![&Self::PROTOCOL_NAME].unwrap()); + sponge.absorb(&compute_vk_hash::(&index_pk.index_vk)); + sponge.absorb(&public_input); } else { - fs_rng.absorb_bytes( + sponge.absorb( &to_bytes![&Self::PROTOCOL_NAME, &index_pk.index_vk, &public_input].unwrap(), ); } @@ -363,26 +357,24 @@ where end_timer!(first_round_comm_time); if for_recursion { - fs_rng.absorb_native_field_elements(&first_comms); + sponge.absorb(&first_comms); match prover_first_msg.clone() { ProverMsg::EmptyMessage => (), - ProverMsg::FieldElements(v) => { - fs_rng.absorb_nonnative_field_elements(&v, OptimizationType::Weight) - } + ProverMsg::FieldElements(v) => sponge.absorb(&v), } } else { - fs_rng.absorb_bytes(&to_bytes![first_comms, prover_first_msg].unwrap()); + sponge.absorb(&to_bytes![first_comms, prover_first_msg].unwrap()); } let (verifier_first_msg, verifier_state) = - AHPForR1CS::verifier_first_round(index_pk.index_vk.index_info, &mut fs_rng)?; + AHPForR1CS::verifier_first_round::(index_pk.index_vk.index_info, &mut sponge)?; // -------------------------------------------------------------------- // -------------------------------------------------------------------- // Second round let (prover_second_msg, prover_second_oracles, prover_state) = - AHPForR1CS::prover_second_round(&verifier_first_msg, prover_state, zk_rng, hiding); + AHPForR1CS::prover_second_round(&verifier_first_msg, prover_state, hiding); let second_round_comm_time = start_timer!(|| "Committing to second round polys"); let (second_comms, second_comm_rands) = PC::commit( @@ -394,25 +386,23 @@ where end_timer!(second_round_comm_time); if for_recursion { - fs_rng.absorb_native_field_elements(&second_comms); + sponge.absorb(&second_comms); match prover_second_msg.clone() { ProverMsg::EmptyMessage => (), - ProverMsg::FieldElements(v) => { - fs_rng.absorb_nonnative_field_elements(&v, OptimizationType::Weight) - } + ProverMsg::FieldElements(v) => sponge.absorb(&v), } } else { - fs_rng.absorb_bytes(&to_bytes![second_comms, prover_second_msg].unwrap()); + sponge.absorb(&to_bytes![second_comms, prover_second_msg].unwrap()); } let (verifier_second_msg, verifier_state) = - AHPForR1CS::verifier_second_round(verifier_state, &mut fs_rng); + AHPForR1CS::verifier_second_round::(verifier_state, &mut sponge); // -------------------------------------------------------------------- // -------------------------------------------------------------------- // Third round let (prover_third_msg, prover_third_oracles) = - AHPForR1CS::prover_third_round(&verifier_second_msg, prover_state, zk_rng)?; + AHPForR1CS::prover_third_round(&verifier_second_msg, prover_state)?; let third_round_comm_time = start_timer!(|| "Committing to third round polys"); let (third_comms, third_comm_rands) = PC::commit( @@ -424,18 +414,17 @@ where end_timer!(third_round_comm_time); if for_recursion { - fs_rng.absorb_native_field_elements(&third_comms); + sponge.absorb(&third_comms); match prover_third_msg.clone() { ProverMsg::EmptyMessage => (), - ProverMsg::FieldElements(v) => { - fs_rng.absorb_nonnative_field_elements(&v, OptimizationType::Weight) - } + ProverMsg::FieldElements(v) => sponge.absorb(&v), } } else { - fs_rng.absorb_bytes(&to_bytes![third_comms, prover_third_msg].unwrap()); + sponge.absorb(&to_bytes![third_comms, prover_third_msg].unwrap()); } - let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); + let verifier_state = + AHPForR1CS::verifier_third_round::(verifier_state, &mut sponge); // -------------------------------------------------------------------- let vanishing_polys = if for_recursion { @@ -511,7 +500,7 @@ where // Compute the AHP verifier's query set. let (query_set, verifier_state) = - AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng, for_recursion); + AHPForR1CS::verifier_query_set::(verifier_state, for_recursion); let lc_s = AHPForR1CS::construct_linear_combinations( &public_input, &polynomials, @@ -537,12 +526,14 @@ where end_timer!(eval_time); if for_recursion { - fs_rng.absorb_nonnative_field_elements(&evaluations, OptimizationType::Weight); + sponge.absorb(&evaluations); } else { - fs_rng.absorb_bytes(&to_bytes![&evaluations].unwrap()); + sponge.absorb(&to_bytes![&evaluations].unwrap()); } - let sponge = >::new(); + let params = S::default_params(); + let sponge = S::new(¶ms); + let mut opening_challenges = ChallengeGenerator::::new_multivariate(sponge); let pc_proof = PC::open_combinations( @@ -571,10 +562,11 @@ where /// Verify that a proof for the constrain system defined by `C` asserts that /// all constraints are satisfied. - pub fn verify( + pub fn verify( index_vk: &IndexVerifierKey, public_input: &[F], proof: &Proof, + zk_rng: Option<&mut R>, ) -> Result> { let verifier_time = start_timer!(|| "Marlin::Verify"); @@ -592,34 +584,32 @@ where let for_recursion = MC::FOR_RECURSION; - let mut fs_rng = FS::new(); + let params = S::default_params(); + let mut sponge = S::new(¶ms); if for_recursion { - fs_rng.absorb_bytes(&to_bytes![&Self::PROTOCOL_NAME].unwrap()); - fs_rng.absorb_native_field_elements(&compute_vk_hash::(index_vk)); - fs_rng.absorb_nonnative_field_elements(&public_input, OptimizationType::Weight); + sponge.absorb(&to_bytes![&Self::PROTOCOL_NAME].unwrap()); + sponge.absorb(&compute_vk_hash::(index_vk)); + sponge.absorb(&public_input); } else { - fs_rng - .absorb_bytes(&to_bytes![&Self::PROTOCOL_NAME, &index_vk, &public_input].unwrap()); + sponge.absorb(&to_bytes![&Self::PROTOCOL_NAME, &index_vk, &public_input].unwrap()); } // -------------------------------------------------------------------- // First round let first_comms = &proof.commitments[0]; if for_recursion { - fs_rng.absorb_native_field_elements(&first_comms); + sponge.absorb(&first_comms); match proof.prover_messages[0].clone() { ProverMsg::EmptyMessage => (), - ProverMsg::FieldElements(v) => { - fs_rng.absorb_nonnative_field_elements(&v, OptimizationType::Weight) - } + ProverMsg::FieldElements(v) => sponge.absorb(&v), }; } else { - fs_rng.absorb_bytes(&to_bytes![first_comms, proof.prover_messages[0]].unwrap()); + sponge.absorb(&to_bytes![first_comms, proof.prover_messages[0]].unwrap()); } let (_, verifier_state) = - AHPForR1CS::verifier_first_round(index_vk.index_info, &mut fs_rng)?; + AHPForR1CS::verifier_first_round::(index_vk.index_info, &mut sponge)?; // -------------------------------------------------------------------- // -------------------------------------------------------------------- @@ -627,18 +617,17 @@ where let second_comms = &proof.commitments[1]; if for_recursion { - fs_rng.absorb_native_field_elements(&second_comms); + sponge.absorb(&second_comms); match proof.prover_messages[1].clone() { ProverMsg::EmptyMessage => (), - ProverMsg::FieldElements(v) => { - fs_rng.absorb_nonnative_field_elements(&v, OptimizationType::Weight) - } + ProverMsg::FieldElements(v) => sponge.absorb(&v), }; } else { - fs_rng.absorb_bytes(&to_bytes![second_comms, proof.prover_messages[1]].unwrap()); + sponge.absorb(&to_bytes![second_comms, proof.prover_messages[1]].unwrap()); } - let (_, verifier_state) = AHPForR1CS::verifier_second_round(verifier_state, &mut fs_rng); + let (_, verifier_state) = + AHPForR1CS::verifier_second_round::(verifier_state, &mut sponge); // -------------------------------------------------------------------- // -------------------------------------------------------------------- @@ -646,18 +635,17 @@ where let third_comms = &proof.commitments[2]; if for_recursion { - fs_rng.absorb_native_field_elements(&third_comms); + sponge.absorb(&third_comms); match proof.prover_messages[2].clone() { ProverMsg::EmptyMessage => (), - ProverMsg::FieldElements(v) => { - fs_rng.absorb_nonnative_field_elements(&v, OptimizationType::Weight) - } + ProverMsg::FieldElements(v) => sponge.absorb(&v), }; } else { - fs_rng.absorb_bytes(&to_bytes![third_comms, proof.prover_messages[2]].unwrap()); + sponge.absorb(&to_bytes![third_comms, proof.prover_messages[2]].unwrap()); } - let verifier_state = AHPForR1CS::verifier_third_round(verifier_state, &mut fs_rng); + let verifier_state = + AHPForR1CS::verifier_third_round::(verifier_state, &mut sponge); // -------------------------------------------------------------------- // Collect degree bounds for commitments. Indexed polynomials have *no* @@ -690,12 +678,12 @@ where .collect(); let (query_set, verifier_state) = - AHPForR1CS::verifier_query_set(verifier_state, &mut fs_rng, for_recursion); + AHPForR1CS::verifier_query_set::(verifier_state, for_recursion); if for_recursion { - fs_rng.absorb_nonnative_field_elements(&proof.evaluations, OptimizationType::Weight); + sponge.absorb(&proof.evaluations); } else { - fs_rng.absorb_bytes(&to_bytes![&proof.evaluations].unwrap()); + sponge.absorb(&to_bytes![&proof.evaluations].unwrap()); } let mut evaluations = Evaluations::new(); @@ -721,8 +709,8 @@ where for_recursion, )?; - let sponge = >::new(); - let mut opening_challenges = ChallengeGenerator::::new_multivariate(sponge); + let params = S::default_params(); + let mut opening_challenges = ChallengeGenerator::::new_multivariate(S::new(¶ms)); let evaluations_are_correct = PC::check_combinations( &index_vk.verifier_key, @@ -732,7 +720,7 @@ where &evaluations, &proof.pc_proof, &mut opening_challenges, - &mut fs_rng, + zk_rng, ) .map_err(Error::from_pc_err)?; @@ -748,11 +736,12 @@ where Ok(evaluations_are_correct) } - pub fn prepared_verify( + pub fn prepared_verify( prepared_vk: &PreparedIndexVerifierKey, public_input: &[F], proof: &Proof, + zk_rng: Option<&mut R>, ) -> Result> { - Self::verify(&prepared_vk.orig_vk, public_input, proof) + Self::verify(&prepared_vk.orig_vk, public_input, proof, zk_rng) } } diff --git a/src/test.rs b/src/test.rs index 6a2285e..b286ad3 100644 --- a/src/test.rs +++ b/src/test.rs @@ -115,10 +115,7 @@ impl ConstraintSynthesizer for OutlineTestCircuit { mod marlin { use super::*; - use crate::{ - fiat_shamir::poseidon::PoseidonSponge, fiat_shamir::FiatShamirChaChaRng, Marlin, - MarlinDefaultConfig, - }; + use crate::{Marlin, MarlinDefaultConfig}; use ark_bls12_381::{Bls12_381, Fq, Fr}; use ark_ff::UniformRand; @@ -213,10 +210,7 @@ mod marlin { mod marlin_recursion { use super::*; - use crate::{ - fiat_shamir::{poseidon::PoseidonSponge, FiatShamirAlgebraicSpongeRng}, - Marlin, MarlinRecursiveConfig, - }; + use crate::{Marlin, MarlinRecursiveConfig}; use ark_ec::{CurveCycle, PairingEngine, PairingFriendlyCycle}; use ark_ff::UniformRand; @@ -348,12 +342,6 @@ mod marlin_recursion { } mod fiat_shamir { - use crate::fiat_shamir::constraints::FiatShamirRngVar; - use crate::fiat_shamir::{ - constraints::FiatShamirAlgebraicSpongeRngVar, - poseidon::{constraints::PoseidonSpongeVar, PoseidonSponge}, - FiatShamirAlgebraicSpongeRng, FiatShamirChaChaRng, FiatShamirRng, - }; use ark_ff::PrimeField; use ark_mnt4_298::{Fq, Fr}; use ark_nonnative_field::params::OptimizationType;