diff --git a/ferveo/src/dkg/pv.rs b/ferveo/src/dkg/pv.rs index ec90673a..75222af3 100644 --- a/ferveo/src/dkg/pv.rs +++ b/ferveo/src/dkg/pv.rs @@ -25,7 +25,7 @@ pub struct PubliclyVerifiableDkg { impl PubliclyVerifiableDkg { /// Create a new DKG context to participate in the DKG /// Every identity in the DKG is linked to an ed25519 public key; - /// `validatorst`: List of validators + /// `validators`: List of validators /// `params` contains the parameters of the DKG such as number of shares /// `me` the validator creating this instance /// `session_keypair` the keypair for `me` diff --git a/ferveo/src/lib.rs b/ferveo/src/lib.rs index 3cf7b621..6def2499 100644 --- a/ferveo/src/lib.rs +++ b/ferveo/src/lib.rs @@ -34,56 +34,95 @@ use measure_time::print_time; #[cfg(test)] mod test_dkg_full { use super::*; + use std::collections::HashMap; use crate::dkg::pv::test_common::*; - use ark_bls12_381::{Bls12_381 as EllipticCurve, Bls12_381, G2Projective}; + use ark_bls12_381::{ + Bls12_381 as E, Bls12_381, Fr, G1Affine, G2Projective, + }; use ark_ec::bls12::G2Affine; use ark_ec::group::Group; use ark_ff::{Fp12, UniformRand}; + use ark_std::test_rng; use ferveo_common::{ExternalValidator, Keypair}; use group_threshold_cryptography as tpke; - use group_threshold_cryptography::Ciphertext; + use group_threshold_cryptography::{Ciphertext, DecryptionShareSimple}; use itertools::{zip_eq, Itertools}; - type E = Bls12_381; + type Fqk = ::Fqk; - #[test] - fn test_dkg_simple_decryption_variant_single_validator() { - let rng = &mut ark_std::test_rng(); - let dkg = setup_dealt_dkg_with_n_validators(1, 1); - - let msg: &[u8] = "abc".as_bytes(); - let aad: &[u8] = "my-aad".as_bytes(); - let public_key = dkg.final_key(); - let g_inv = dkg.pvss_params.g_inv(); - - let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); + fn make_shared_secret_simple_tdec( + dkg: &PubliclyVerifiableDkg, + aad: &[u8], + ciphertext: &Ciphertext, + validator_keypairs: &[Keypair], + ) -> ( + PubliclyVerifiableSS, + Vec>, + Fqk, + ) { + // Make sure validators are in the same order dkg is by comparing their public keys + dkg.validators + .iter() + .zip_eq(validator_keypairs.iter()) + .for_each(|(v, k)| { + assert_eq!(v.validator.public_key, k.public()); + }); - let share_aggregate = aggregate_for_decryption(&dkg); - // Aggregate contains only one set of shares - assert_eq!(share_aggregate, dkg.vss.get(&0).unwrap().shares); + let pvss_aggregated = aggregate(dkg); - let validator_keypairs = gen_n_keypairs(1); - let decryption_shares = make_decryption_shares( - &ciphertext, - &validator_keypairs, - &share_aggregate, - aad, - &g_inv, - ); + let decryption_shares: Vec> = + validator_keypairs + .iter() + .enumerate() + .map(|(validator_index, validator_keypair)| { + pvss_aggregated.make_decryption_share_simple( + ciphertext, + aad, + &validator_keypair.decryption_key, + validator_index, + &dkg.pvss_params.g_inv(), + ) + }) + .collect(); - let shares_x = &dkg + let domain = &dkg .domain .elements() .take(decryption_shares.len()) .collect::>(); - let lagrange_coeffs = tpke::prepare_combine_simple::(shares_x); + assert_eq!(domain.len(), decryption_shares.len()); + + // TODO: Consider refactor this part into tpke::combine_simple and expose it + // as a public API in tpke::api + let lagrange_coeffs = tpke::prepare_combine_simple::(domain); let shared_secret = tpke::share_combine_simple::( &decryption_shares, &lagrange_coeffs, ); + (pvss_aggregated, decryption_shares, shared_secret) + } + + #[test] + fn test_dkg_simple_decryption_variant_single_validator() { + let rng = &mut test_rng(); + + let dkg = setup_dealt_dkg_with_n_validators(1, 1); + let msg: &[u8] = "abc".as_bytes(); + let aad: &[u8] = "my-aad".as_bytes(); + let public_key = dkg.final_key(); + let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); + let validator_keypairs = gen_n_keypairs(1); + + let (_, _, shared_secret) = make_shared_secret_simple_tdec( + &dkg, + aad, + &ciphertext, + &validator_keypairs, + ); + let plaintext = tpke::checked_decrypt_with_shared_secret( &ciphertext, aad, @@ -96,47 +135,22 @@ mod test_dkg_full { #[test] fn test_dkg_simple_decryption_variant() { - let rng = &mut ark_std::test_rng(); - let dkg = setup_dealt_dkg_with_n_validators(3, 4); + let rng = &mut test_rng(); + let dkg = setup_dealt_dkg_with_n_validators(3, 4); let msg: &[u8] = "abc".as_bytes(); let aad: &[u8] = "my-aad".as_bytes(); let public_key = dkg.final_key(); let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); - let g_inv = dkg.pvss_params.g_inv(); - - let share_aggregate = aggregate_for_decryption(&dkg); - let validator_keypairs = gen_n_keypairs(4); - // Make sure validators are in the same order dkg is by comparing their public keys - dkg.validators - .iter() - .zip_eq(validator_keypairs.iter()) - .for_each(|(v, k)| { - assert_eq!(v.validator.public_key, k.public()); - }); - let decryption_shares = make_decryption_shares( + + let (_, _, shared_secret) = make_shared_secret_simple_tdec( + &dkg, + aad, &ciphertext, &validator_keypairs, - &share_aggregate, - aad, - &g_inv, - ); - - let shares_x = &dkg - .domain - .elements() - .take(decryption_shares.len()) // TODO: Assert length instead? - .collect::>(); - let lagrange_coeffs = tpke::prepare_combine_simple::(shares_x); - - let shared_secret = tpke::share_combine_simple::( - &decryption_shares, - &lagrange_coeffs, ); - // Combination works, let's decrypt - let plaintext = tpke::checked_decrypt_with_shared_secret( &ciphertext, aad, @@ -145,17 +159,242 @@ mod test_dkg_full { ) .unwrap(); assert_eq!(plaintext, msg); + } + + #[test] + fn test_dkg_simple_decryption_shares_verification() { + let rng = &mut test_rng(); - // Testing green-path decryption share verification - izip!(decryption_shares, share_aggregate, validator_keypairs).for_each( - |(decryption_share, y_i, validator_keypair)| { + let dkg = setup_dealt_dkg_with_n_validators(3, 4); + let msg: &[u8] = "abc".as_bytes(); + let aad: &[u8] = "my-aad".as_bytes(); + let public_key = dkg.final_key(); + let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); + let validator_keypairs = gen_n_keypairs(4); + + let (pvss_aggregated, decryption_shares, _) = + make_shared_secret_simple_tdec( + &dkg, + aad, + &ciphertext, + &validator_keypairs, + ); + + izip!( + &pvss_aggregated.shares, + &validator_keypairs, + &decryption_shares, + ) + .for_each( + |(aggregated_share, validator_keypair, decryption_share)| { assert!(decryption_share.verify( - &y_i, + aggregated_share, &validator_keypair.public().encryption_key, &dkg.pvss_params.h, - &ciphertext + &ciphertext, )); }, ); + + // Testing red-path decryption share verification + let decryption_share = decryption_shares[0].clone(); + + // Should fail because of the bad decryption share + let mut with_bad_decryption_share = decryption_share.clone(); + with_bad_decryption_share.decryption_share = Fqk::zero(); + assert!(!with_bad_decryption_share.verify( + &pvss_aggregated.shares[0], + &validator_keypairs[0].public().encryption_key, + &dkg.pvss_params.h, + &ciphertext, + )); + + // Should fail because of the bad checksum + let mut with_bad_checksum = decryption_share; + with_bad_checksum.validator_checksum = G1Affine::zero(); + assert!(!with_bad_checksum.verify( + &pvss_aggregated.shares[0], + &validator_keypairs[0].public().encryption_key, + &dkg.pvss_params.h, + &ciphertext, + )); + } + + #[test] + fn test_dkg_simple_tdec_share_recovery() { + let rng = &mut test_rng(); + + let mut dkg = setup_dealt_dkg_with_n_validators(3, 4); + let msg: &[u8] = "abc".as_bytes(); + let aad: &[u8] = "my-aad".as_bytes(); + let public_key = &dkg.final_key(); + let ciphertext = tpke::encrypt::<_, E>(msg, aad, public_key, rng); + let mut validator_keypairs = gen_n_keypairs(4); + + // Create an initial shared secret + let (_, _, old_shared_secret) = make_shared_secret_simple_tdec( + &dkg, + aad, + &ciphertext, + &validator_keypairs, + ); + + // Now, we're going to recover a new share at a random point and check that the shared secret is still the same + + // Our random point + let x_r = Fr::rand(rng); + + // Remove one participant from the contexts and all nested structure + let removed_validator = dkg.validators.pop().unwrap(); + validator_keypairs.pop(); + // Remember to remove one domain point too + let mut domain_points = dkg.domain.elements().collect::>(); + domain_points.pop().unwrap(); + + // Each participant prepares an update for each other participant + let share_updates = &dkg + .validators + .iter() + .map(|p| { + let deltas_i = tpke::prepare_share_updates_for_recovery::( + &domain_points, + &dkg.pvss_params.h.into_affine(), + &x_r, + dkg.params.security_threshold as usize, + rng, + ); + (p.share_index, deltas_i) + }) + .collect::>(); + + // Participants share updates and update their shares + let pvss_aggregated = aggregate(&dkg); + + // Now, every participant separately: + let updated_shares: Vec<_> = validator_keypairs + .iter() + .enumerate() + .map(|(validator_index, validator_keypair)| { + // Receives updates from other participants + let updates_for_participant: Vec<_> = share_updates + .values() + .map(|updates| *updates.get(validator_index).unwrap()) + .collect(); + + // Creates updated private key shares + pvss_aggregated.update_private_key_share_for_recovery( + &validator_keypair.decryption_key, + validator_index, + &updates_for_participant, + ) + }) + .collect(); + + // Now, we have to combine new share fragments into a new share + let new_private_key_share = + tpke::recover_share_from_updated_private_shares( + &x_r, + &domain_points, + &updated_shares, + ); + + // Get decryption shares from remaining participants + let mut decryption_shares: Vec> = + validator_keypairs + .iter() + .enumerate() + .map(|(validator_index, validator_keypair)| { + pvss_aggregated.make_decryption_share_simple( + &ciphertext, + aad, + &validator_keypair.decryption_key, + validator_index, + &dkg.pvss_params.g_inv(), + ) + }) + .collect(); + + // Create a decryption share from a recovered private key share + let new_validator_decryption_key = Fr::rand(rng); + let validator_index = removed_validator.share_index; + decryption_shares.push( + DecryptionShareSimple::create( + validator_index, + &new_validator_decryption_key, + &new_private_key_share, + &ciphertext, + aad, + &dkg.pvss_params.g_inv(), + ) + .unwrap(), + ); + + let lagrange = tpke::prepare_combine_simple::(&domain_points); + let new_shared_secret = + tpke::share_combine_simple::(&decryption_shares, &lagrange); + + assert_eq!(old_shared_secret, new_shared_secret); + } + + #[test] + fn simple_threshold_decryption_with_share_refreshing() { + let rng = &mut test_rng(); + let dkg = setup_dealt_dkg_with_n_validators(3, 4); + + let msg: &[u8] = "abc".as_bytes(); + let aad: &[u8] = "my-aad".as_bytes(); + let public_key = dkg.final_key(); + let ciphertext = tpke::encrypt::<_, E>(msg, aad, &public_key, rng); + + let validator_keypairs = gen_n_keypairs(4); + let pvss_aggregated = aggregate(&dkg); + + // Create an initial shared secret + let (_, _, old_shared_secret) = make_shared_secret_simple_tdec( + &dkg, + aad, + &ciphertext, + &validator_keypairs, + ); + + // Now, we're going to refresh the shares and check that the shared secret is the same + + // Dealer computes a new random polynomial with constant term x_r = 0 + let polynomial = tpke::make_random_polynomial_at::( + dkg.params.security_threshold as usize, + &Fr::zero(), + rng, + ); + + // Dealer shares the polynomial with participants + + // Participants computes new decryption shares + let new_decryption_shares: Vec> = + validator_keypairs + .iter() + .enumerate() + .map(|(validator_index, validator_keypair)| { + pvss_aggregated.refresh_decryption_share( + &ciphertext, + aad, + &validator_keypair.decryption_key, + validator_index, + &polynomial, + &dkg, + ) + }) + .collect(); + + // Create a new shared secret + let domain = &dkg.domain.elements().collect::>(); + // TODO: Combine `tpke::prepare_combine_simple` and `tpke::share_combine_simple` into + // one function and expose it in the tpke::api? + let lagrange_coeffs = tpke::prepare_combine_simple::(domain); + let new_shared_secret = tpke::share_combine_simple::( + &new_decryption_shares, + &lagrange_coeffs, + ); + + assert_eq!(old_shared_secret, new_shared_secret); } } diff --git a/ferveo/src/vss/pvss.rs b/ferveo/src/vss/pvss.rs index 018ca213..0a36f0a4 100644 --- a/ferveo/src/vss/pvss.rs +++ b/ferveo/src/vss/pvss.rs @@ -9,7 +9,8 @@ use ark_ff::UniformRand; use ark_serialize::*; use ferveo_common::{Keypair, PublicKey}; use group_threshold_cryptography::{ - Ciphertext, DecryptionShareSimple, PrivateKeyShare, + refresh_private_key_share, update_share_for_recovery, Ciphertext, + DecryptionShareFast, DecryptionShareSimple, PrivateKeyShare, }; use itertools::{zip_eq, Itertools}; use subproductdomain::fast_multiexp; @@ -33,6 +34,7 @@ impl Aggregate for Aggregated {} /// Type alias for non aggregated PVSS transcripts pub type Pvss = PubliclyVerifiableSS; + /// Type alias for aggregated PVSS transcripts pub type AggregatedPvss = PubliclyVerifiableSS; @@ -170,7 +172,7 @@ impl PubliclyVerifiableSS { } } -/// Extra method available to aggregated PVSS transcripts +/// Extra methods available to aggregated PVSS transcripts impl PubliclyVerifiableSS { /// Verify that this PVSS instance is a valid aggregation of /// the PVSS instances, produced by [`aggregate`], @@ -199,10 +201,98 @@ impl PubliclyVerifiableSS { )) } } + + pub fn decrypt_private_key_share( + &self, + validator_decryption_key: &E::Fr, + validator_index: usize, + ) -> PrivateKeyShare { + // Decrypt private key shares https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares + let private_key_share = self + .shares + .get(validator_index) + .unwrap() + .mul(validator_decryption_key.inverse().unwrap().into_repr()) + .into_affine(); + PrivateKeyShare { private_key_share } + } + + pub fn make_decryption_share_simple( + &self, + ciphertext: &Ciphertext, + aad: &[u8], + validator_decryption_key: &E::Fr, + validator_index: usize, + g_inv: &E::G1Prepared, + ) -> DecryptionShareSimple { + let private_key_share = self.decrypt_private_key_share( + validator_decryption_key, + validator_index, + ); + DecryptionShareSimple::create( + validator_index, + validator_decryption_key, + &private_key_share, + ciphertext, + aad, + g_inv, + ) + .unwrap() // TODO: Add proper error handling + } + + pub fn refresh_decryption_share( + &self, + ciphertext: &Ciphertext, + aad: &[u8], + validator_decryption_key: &E::Fr, + validator_index: usize, + polynomial: &DensePolynomial, + dkg: &PubliclyVerifiableDkg, + ) -> DecryptionShareSimple { + let validator_private_key_share = self.decrypt_private_key_share( + validator_decryption_key, + validator_index, + ); + let h = dkg.pvss_params.h; + let g_inv = dkg.pvss_params.g_inv(); + let domain_point = dkg.domain.element(validator_index); + let refreshed_private_key_share = refresh_private_key_share( + &h, + &domain_point, + polynomial, + &validator_private_key_share, + ); + DecryptionShareSimple::create( + validator_index, + validator_decryption_key, + &refreshed_private_key_share, + ciphertext, + aad, + &g_inv, + ) + .unwrap() // TODO: Add proper error handling + } + + pub fn update_private_key_share_for_recovery( + &self, + validator_decryption_key: &E::Fr, + validator_index: usize, + share_updates: &[E::G2Projective], + ) -> PrivateKeyShare { + // Retrieves their private key share + let private_key_share = self.decrypt_private_key_share( + validator_decryption_key, + validator_index, + ); + + // And updates their share + update_share_for_recovery::(&private_key_share, share_updates) + } } /// Aggregate the PVSS instances in `pvss` from DKG session `dkg` /// into a new PVSS instance +/// See: https://nikkolasg.github.io/ferveo/pvss.html?highlight=aggregate#aggregation pub fn aggregate( dkg: &PubliclyVerifiableDkg, ) -> PubliclyVerifiableSS { @@ -238,63 +328,6 @@ pub fn aggregate( } } -pub fn aggregate_for_decryption( - dkg: &PubliclyVerifiableDkg, -) -> Vec> { - // From docs: https://nikkolasg.github.io/ferveo/pvss.html?highlight=aggregate#aggregation - // "Two PVSS instances may be aggregated into a single PVSS instance by adding elementwise each of the corresponding group elements." - let shares = dkg - .vss - .values() - .map(|pvss| pvss.shares.clone()) - .collect::>(); - let first_share = shares.first().unwrap().to_vec(); - shares - .into_iter() - .skip(1) - // We're assuming that in every PVSS instance, the shares are in the same order - .fold(first_share, |acc, shares| { - acc.into_iter() - .zip_eq(shares.into_iter()) - .map(|(a, b)| a + b) - .collect() - }) -} - -pub fn make_decryption_shares( - ciphertext: &Ciphertext, - validator_keypairs: &[Keypair], - aggregate: &[E::G2Affine], - aad: &[u8], - g_inv: &E::G1Prepared, -) -> Vec> { - // TODO: Calculate separately for each validator - aggregate - .iter() - .zip_eq(validator_keypairs.iter()) - .enumerate() - .map(|(decrypter_index, (encrypted_share, keypair))| { - // Decrypt private key shares https://nikkolasg.github.io/ferveo/pvss.html#validator-decryption-of-private-key-shares - let z_i = encrypted_share - .mul(keypair.decryption_key.inverse().unwrap().into_repr()); - // TODO: Consider using "container" structs from `tpke` for other primitives - let private_key_share = PrivateKeyShare { - private_key_share: z_i.into_affine(), - }; - - DecryptionShareSimple::create( - decrypter_index, - &keypair.decryption_key, - &private_key_share, - ciphertext, - aad, - g_inv, - ) - .unwrap() // Unwrapping here only because this is a test method! - }) - .collect::>() -} - #[cfg(test)] mod test_pvss { use super::*; diff --git a/tpke/benches/tpke.rs b/tpke/benches/tpke.rs index 872732ab..8206253c 100644 --- a/tpke/benches/tpke.rs +++ b/tpke/benches/tpke.rs @@ -1,6 +1,9 @@ #![allow(clippy::redundant_closure)] use ark_bls12_381::{Fr, G1Affine, G2Affine}; +use ark_ec::AffineCurve; +use ark_ff::Zero; +use std::collections::HashMap; use criterion::{ black_box, criterion_group, criterion_main, BenchmarkId, Criterion, @@ -459,8 +462,7 @@ pub fn bench_decryption_share_validity_checks(c: &mut Criterion) { } pub fn bench_recover_share_at_point(c: &mut Criterion) { - let mut group = c.benchmark_group("Recover Share at Point Benchmark"); - // Set up test conditions + let mut group = c.benchmark_group("RECOVER SHARE"); let rng = &mut StdRng::seed_from_u64(0); let msg_size = MSG_SIZE_CASES[0]; @@ -477,17 +479,55 @@ pub fn bench_recover_share_at_point(c: &mut Criterion) { for p in &mut remaining_participants { p.public_decryption_contexts.pop(); } + let domain_points = &remaining_participants[0] + .public_decryption_contexts + .iter() + .map(|ctxt| ctxt.domain) + .collect::>(); + let h = remaining_participants[0].public_decryption_contexts[0].h; + let share_updates = remaining_participants + .iter() + .map(|p| { + let deltas_i = prepare_share_updates_for_recovery::( + domain_points, + &h, + &x_r, + threshold, + rng, + ); + (p.index, deltas_i) + }) + .collect::>(); + let new_share_fragments: Vec<_> = remaining_participants + .iter() + .map(|p| { + // Current participant receives updates from other participants + let updates_for_participant: Vec<_> = share_updates + .values() + .map(|updates| *updates.get(p.index).unwrap()) + .collect(); + + // And updates their share + update_share_for_recovery::( + &p.private_key_share, + &updates_for_participant, + ) + }) + .collect(); group.bench_function( - BenchmarkId::new("Recover Share at Point", shares_num), + BenchmarkId::new( + "recover_share_from_updated_private_shares", + shares_num, + ), |b| { - let mut rng = rand::rngs::StdRng::seed_from_u64(0); b.iter(|| { - let _ = black_box(recover_share_at_point::( - &remaining_participants[..shares_num - 1], - threshold, - &x_r, - &mut rng, - )); + let _ = black_box( + recover_share_from_updated_private_shares::( + &x_r, + domain_points, + &new_share_fragments, + ), + ); }); }, ); @@ -495,23 +535,25 @@ pub fn bench_recover_share_at_point(c: &mut Criterion) { } pub fn bench_refresh_shares(c: &mut Criterion) { - let mut group = c.benchmark_group("Refresh Shares Benchmark"); - // Set up test conditions + let mut group = c.benchmark_group("REFRESH SHARES"); let rng = &mut StdRng::seed_from_u64(0); let msg_size = MSG_SIZE_CASES[0]; for &shares_num in NUM_SHARES_CASES.iter() { let setup = SetupSimple::new(shares_num, msg_size, rng); let threshold = setup.shared.threshold; + let polynomial = + make_random_polynomial_at::(threshold, &Fr::zero(), rng); + let p = setup.contexts[0].clone(); group.bench_function( - BenchmarkId::new("Refresh Shares", shares_num), + BenchmarkId::new("refresh_private_key_share", shares_num), |b| { - let mut rng = rand::rngs::StdRng::seed_from_u64(0); b.iter(|| { - black_box(refresh_shares::( - &setup.contexts, - threshold, - &mut rng, + black_box(refresh_private_key_share::( + &p.setup_params.h.into_projective(), + &p.public_decryption_contexts[0].domain, + &polynomial, + &p.private_key_share, )); }); }, diff --git a/tpke/src/ciphertext.rs b/tpke/src/ciphertext.rs index 42e27005..e32ba753 100644 --- a/tpke/src/ciphertext.rs +++ b/tpke/src/ciphertext.rs @@ -161,10 +161,10 @@ pub fn checked_decrypt_with_shared_secret( ciphertext: &Ciphertext, aad: &[u8], g_inv: &E::G1Prepared, - s: &E::Fqk, + shared_secret: &E::Fqk, ) -> Result> { check_ciphertext_validity(ciphertext, aad, g_inv)?; - Ok(decrypt_with_shared_secret(ciphertext, s)) + Ok(decrypt_with_shared_secret(ciphertext, shared_secret)) } fn sha256(input: &[u8]) -> Vec { diff --git a/tpke/src/combine.rs b/tpke/src/combine.rs index 51bcd3d9..d786b94c 100644 --- a/tpke/src/combine.rs +++ b/tpke/src/combine.rs @@ -1,5 +1,4 @@ #![allow(non_snake_case)] -#![allow(dead_code)] use crate::*; use ark_ec::ProjectiveCurve; @@ -12,7 +11,8 @@ pub fn prepare_combine_fast( let mut n_0 = E::Fr::one(); for d_i in shares.iter() { domain.push(public_decryption_contexts[d_i.decrypter_index].domain); - n_0 *= public_decryption_contexts[d_i.decrypter_index].lagrange_n_0; // n_0_i = 1 * t^1 * t^2 ... + // n_0_i = 1 * t^1 * t^2 ... + n_0 *= public_decryption_contexts[d_i.decrypter_index].lagrange_n_0; } let s = SubproductDomain::::new(domain); let mut lagrange = s.inverse_lagrange_coefficients(); // 1/L_i @@ -104,16 +104,13 @@ pub fn share_combine_simple( decryption_shares: &[DecryptionShareSimple], lagrange_coeffs: &[E::Fr], ) -> E::Fqk { - let mut product_of_shares = E::Fqk::one(); - // Sum of C_i^{L_i}z - for (c_i, alpha_i) in izip!(decryption_shares, lagrange_coeffs) { - // Exponentiation by alpha_i - let ss = c_i.decryption_share.pow(alpha_i.into_repr()); - product_of_shares *= ss; - } - - product_of_shares + izip!(decryption_shares, lagrange_coeffs).fold( + E::Fqk::one(), + |acc, (c_i, alpha_i)| { + acc * c_i.decryption_share.pow(alpha_i.into_repr()) + }, + ) } pub fn share_combine_simple_precomputed( diff --git a/tpke/src/context.rs b/tpke/src/context.rs index db9e6a05..b4d1da67 100644 --- a/tpke/src/context.rs +++ b/tpke/src/context.rs @@ -26,6 +26,7 @@ pub struct SetupParams { pub b_inv: E::Fr, pub g: E::G1Affine, pub g_inv: E::G1Prepared, + pub h_inv: E::G2Prepared, pub h: E::G2Affine, } @@ -35,7 +36,6 @@ pub struct PrivateDecryptionContextFast { pub setup_params: SetupParams, pub private_key_share: PrivateKeyShare, pub public_decryption_contexts: Vec>, - pub scalar_bits: usize, } impl PrivateDecryptionContextFast { diff --git a/tpke/src/decryption.rs b/tpke/src/decryption.rs index 0f899409..bc19c4ec 100644 --- a/tpke/src/decryption.rs +++ b/tpke/src/decryption.rs @@ -47,7 +47,7 @@ pub struct DecryptionShareSimple { impl DecryptionShareSimple { pub fn create( validator_index: usize, - validator_private_key: &E::Fr, + validator_decryption_key: &E::Fr, private_key_share: &PrivateKeyShare, ciphertext: &Ciphertext, aad: &[u8], @@ -55,7 +55,7 @@ impl DecryptionShareSimple { ) -> Result> { check_ciphertext_validity::(ciphertext, aad, g_inv)?; - // C_i = e(U, Z_i) + // D_i = e(U, Z_i) let decryption_share = E::pairing( ciphertext.commitment, private_key_share.private_key_share, @@ -64,7 +64,7 @@ impl DecryptionShareSimple { // C_i = dk_i^{-1} * U let validator_checksum = ciphertext .commitment - .mul(validator_private_key.inverse().unwrap()) + .mul(validator_decryption_key.inverse().unwrap()) .into_affine(); Ok(DecryptionShareSimple { diff --git a/tpke/src/key_share.rs b/tpke/src/key_share.rs index 1ca1473c..ae8ff8fd 100644 --- a/tpke/src/key_share.rs +++ b/tpke/src/key_share.rs @@ -52,7 +52,7 @@ impl BlindedKeyShare { } } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, PartialEq)] pub struct PrivateKeyShare { pub private_key_share: E::G2Affine, } diff --git a/tpke/src/lib.rs b/tpke/src/lib.rs index 724602a0..6d8a8fa1 100644 --- a/tpke/src/lib.rs +++ b/tpke/src/lib.rs @@ -9,7 +9,7 @@ use ark_poly::{ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use itertools::izip; -use subproductdomain::SubproductDomain; +use subproductdomain::{fast_multiexp, SubproductDomain}; use rand_core::RngCore; use std::usize; @@ -104,29 +104,13 @@ pub fn setup_fast( // `evals` are evaluations of the polynomial f over the domain, omega: f(ω_j) for ω_j in Ω let evals = threshold_poly.evaluate_over_domain_by_ref(fft_domain); - let mut domain_points = Vec::with_capacity(shares_num); - let mut point = E::Fr::one(); - let mut domain_points_inv = Vec::with_capacity(shares_num); - let mut point_inv = E::Fr::one(); - - for _ in 0..shares_num { - domain_points.push(point); // 1, t, t^2, t^3, ...; where t is a scalar generator fft_domain.group_gen - point *= fft_domain.group_gen; - domain_points_inv.push(point_inv); - point_inv *= fft_domain.group_gen_inv; - } - - let scalar_bits = E::Fr::size_in_bits(); - // A - public key shares of participants - let pubkey_shares = - subproductdomain::fast_multiexp(&evals.evals, g.into_projective()); + let pubkey_shares = fast_multiexp(&evals.evals, g.into_projective()); let pubkey_share = g.mul(evals.evals[0]); debug_assert!(pubkey_shares[0] == E::G1Affine::from(pubkey_share)); // Y, but only when b = 1 - private key shares of participants - let privkey_shares = - subproductdomain::fast_multiexp(&evals.evals, h.into_projective()); + let privkey_shares = fast_multiexp(&evals.evals, h.into_projective()); // a_0 let x = threshold_poly.coeffs[0]; @@ -135,6 +119,18 @@ pub fn setup_fast( let pubkey = g.mul(x); let privkey = h.mul(x); + let mut domain_points = Vec::with_capacity(shares_num); + let mut point = E::Fr::one(); + let mut domain_points_inv = Vec::with_capacity(shares_num); + let mut point_inv = E::Fr::one(); + + for _ in 0..shares_num { + domain_points.push(point); // 1, t, t^2, t^3, ...; where t is a scalar generator fft_domain.group_gen + point *= fft_domain.group_gen; + domain_points_inv.push(point_inv); + point_inv *= fft_domain.group_gen_inv; + } + let mut private_contexts = vec![]; let mut public_contexts = vec![]; @@ -159,12 +155,12 @@ pub fn setup_fast( b, b_inv: b.inverse().unwrap(), g, + h_inv: E::G2Prepared::from(-h), g_inv: E::G1Prepared::from(-g), h, }, private_key_share, public_decryption_contexts: vec![], - scalar_bits, }); public_contexts.push(PublicDecryptionContextFast:: { domain: *domain, @@ -208,14 +204,12 @@ pub fn setup_simple( let shares_x = fft_domain.elements().collect::>(); // A - public key shares of participants - let pubkey_shares = - subproductdomain::fast_multiexp(&evals.evals, g.into_projective()); + let pubkey_shares = fast_multiexp(&evals.evals, g.into_projective()); let pubkey_share = g.mul(evals.evals[0]); assert!(pubkey_shares[0] == E::G1Affine::from(pubkey_share)); // Y, but only when b = 1 - private key shares of participants - let privkey_shares = - subproductdomain::fast_multiexp(&evals.evals, h.into_projective()); + let privkey_shares = fast_multiexp(&evals.evals, h.into_projective()); // a_0 let x = threshold_poly.coeffs[0]; @@ -245,6 +239,7 @@ pub fn setup_simple( b, b_inv: b.inverse().unwrap(), g, + h_inv: E::G2Prepared::from(-h), g_inv: E::G1Prepared::from(-g), h, }, @@ -272,12 +267,13 @@ pub fn setup_simple( #[cfg(test)] mod tests { use crate::*; - use ark_bls12_381::{Fr, G1Affine}; + use ark_bls12_381::Fr; use ark_ec::ProjectiveCurve; use ark_ff::BigInteger256; use ark_std::test_rng; use itertools::Itertools; use rand::prelude::StdRng; + use std::collections::HashMap; use std::ops::Mul; type E = ark_bls12_381::Bls12_381; @@ -486,15 +482,10 @@ mod tests { .map(|c| c.create_share(&ciphertext, aad).unwrap()) .collect(); - let domain = contexts[0] - .public_decryption_contexts - .iter() - .map(|c| c.domain) - .collect::>(); - let lagrange = prepare_combine_simple::(&domain); - - let shared_secret = - share_combine_simple::(&decryption_shares, &lagrange); + let shared_secret = make_shared_secret( + &contexts[0].public_decryption_contexts, + &decryption_shares, + ); test_ciphertext_validation_fails( msg, @@ -576,7 +567,7 @@ mod tests { assert!(verify_decryption_shares_simple( pub_contexts, &ciphertext, - &decryption_shares + &decryption_shares, )); // Now, let's test that verification fails if we one of the decryption shares is invalid. @@ -606,9 +597,9 @@ mod tests { )); } - #[test] /// Ñ parties (where t <= Ñ <= N) jointly execute a "share recovery" algorithm, and the output is 1 new share. /// The new share is intended to restore a previously existing share, e.g., due to loss or corruption. + #[test] fn simple_threshold_decryption_with_share_recovery_at_selected_point() { let rng = &mut test_rng(); let shares_num = 16; @@ -626,23 +617,66 @@ mod tests { .last() .unwrap() .domain; - let original_y_r = - selected_participant.private_key_share.private_key_share; + let original_private_key_share = selected_participant.private_key_share; - // Now, we have to remove the participant from the contexts and all nested structures + // Remove one participant from the contexts and all nested structures let mut remaining_participants = contexts; for p in &mut remaining_participants { - p.public_decryption_contexts.pop(); + p.public_decryption_contexts.pop().unwrap(); } - // Recover the share - let y_r = recover_share_at_point( - &remaining_participants, - threshold, + // Each participant prepares an update for each other participant + let domain_points = remaining_participants[0] + .public_decryption_contexts + .iter() + .map(|c| c.domain) + .collect::>(); + let h = remaining_participants[0].public_decryption_contexts[0].h; + let share_updates = remaining_participants + .iter() + .map(|p| { + let deltas_i = prepare_share_updates_for_recovery::( + &domain_points, + &h, + &x_r, + threshold, + rng, + ); + (p.index, deltas_i) + }) + .collect::>(); + + // Participants share updates and update their shares + let new_share_fragments: Vec<_> = remaining_participants + .iter() + .map(|p| { + // Current participant receives updates from other participants + let updates_for_participant: Vec<_> = share_updates + .values() + .map(|updates| *updates.get(p.index).unwrap()) + .collect(); + + // And updates their share + update_share_for_recovery::( + &p.private_key_share, + &updates_for_participant, + ) + }) + .collect(); + + // Now, we have to combine new share fragments into a new share + let domain_points = &remaining_participants[0] + .public_decryption_contexts + .iter() + .map(|ctxt| ctxt.domain) + .collect::>(); + let new_private_key_share = recover_share_from_updated_private_shares( &x_r, - rng, + domain_points, + &new_share_fragments, ); - assert_eq!(y_r.into_affine(), original_y_r); + + assert_eq!(new_private_key_share, original_private_key_share); } fn make_shared_secret_from_contexts( @@ -670,19 +704,9 @@ mod tests { share_combine_simple::(decryption_shares, &lagrange) } - fn make_decryption_share( - private_share: &PrivateKeyShare, - ciphertext: &Ciphertext, - ) -> E::Fqk { - let z_i = private_share; - let u = ciphertext.commitment; - let z_i = z_i.private_key_share; - E::pairing(u, z_i) - } - - #[test] /// Ñ parties (where t <= Ñ <= N) jointly execute a "share recovery" algorithm, and the output is 1 new share. /// The new share is independent from the previously existing shares. We can use this to on-board a new participant into an existing cohort. + #[test] fn simple_threshold_decryption_with_share_recovery_at_random_point() { let rng = &mut test_rng(); let shares_num = 16; @@ -705,6 +729,9 @@ mod tests { // Now, we're going to recover a new share at a random point and check that the shared secret is still the same + // Our random point + let x_r = Fr::rand(rng); + // Remove one participant from the contexts and all nested structures let mut remaining_participants = contexts.clone(); let removed_participant = remaining_participants.pop().unwrap(); @@ -712,32 +739,77 @@ mod tests { p.public_decryption_contexts.pop().unwrap(); } - // Recover the share - let x_r = Fr::rand(rng); - let y_r = recover_share_at_point( - &remaining_participants, - threshold, + // Each participant prepares an update for each other participant + let domain_points = remaining_participants[0] + .public_decryption_contexts + .iter() + .map(|c| c.domain) + .collect::>(); + let h = remaining_participants[0].public_decryption_contexts[0].h; + let share_updates = remaining_participants + .iter() + .map(|p| { + let deltas_i = prepare_share_updates_for_recovery::( + &domain_points, + &h, + &x_r, + threshold, + rng, + ); + (p.index, deltas_i) + }) + .collect::>(); + + // Participants share updates and update their shares + let new_share_fragments: Vec<_> = remaining_participants + .iter() + .map(|p| { + // Current participant receives updates from other participants + let updates_for_participant: Vec<_> = share_updates + .values() + .map(|updates| *updates.get(p.index).unwrap()) + .collect(); + + // And updates their share + update_share_for_recovery::( + &p.private_key_share, + &updates_for_participant, + ) + }) + .collect(); + + // Now, we have to combine new share fragments into a new share + let domain_points = &remaining_participants[0] + .public_decryption_contexts + .iter() + .map(|ctxt| ctxt.domain) + .collect::>(); + let new_private_key_share = recover_share_from_updated_private_shares( &x_r, - rng, + domain_points, + &new_share_fragments, ); - let recovered_key_share = PrivateKeyShare { - private_key_share: y_r.into_affine(), - }; - // Creating decryption shares + // Get decryption shares from remaining participants let mut decryption_shares: Vec<_> = remaining_participants .iter() .map(|c| c.create_share(&ciphertext, aad).unwrap()) .collect(); - decryption_shares.push(DecryptionShareSimple { - decrypter_index: removed_participant.index, - decryption_share: make_decryption_share( - &recovered_key_share, + + // Create a decryption share from a recovered private key share + let new_validator_decryption_key = Fr::rand(rng); + let validator_index = removed_participant.index; + decryption_shares.push( + DecryptionShareSimple::create( + validator_index, + &new_validator_decryption_key, + &new_private_key_share, &ciphertext, - ), - // TODO: Implement a method to make a proper decryption share after refreshing - validator_checksum: G1Affine::zero(), - }); + aad, + g_inv, + ) + .unwrap(), + ); // Creating a shared secret from remaining shares and the recovered one let new_shared_secret = make_shared_secret( @@ -775,25 +847,33 @@ mod tests { // Now, we're going to refresh the shares and check that the shared secret is the same - // Refresh shares - let new_shares = refresh_shares::(&contexts, threshold, rng); + // Dealer computes a new random polynomial with constant term x_r + let polynomial = + make_random_polynomial_at::(threshold, &Fr::zero(), rng); + + // Dealer shares the polynomial with participants - // Creating new decryption shares - let new_decryption_shares: Vec<_> = new_shares + // Participants computes new decryption shares + let new_decryption_shares: Vec<_> = contexts .iter() .enumerate() - .map(|(decrypter_index, private_share)| { - let private_share = PrivateKeyShare { - private_key_share: private_share.into_affine(), - }; - let decryption_share = - make_decryption_share(&private_share, &ciphertext); - DecryptionShareSimple { - decrypter_index, - decryption_share, - // TODO: Implement a method to make a proper decryption share after refreshing - validator_checksum: G1Affine::zero(), - } + .map(|(i, p)| { + // Participant computes share updates and update their private key shares + let private_key_share = refresh_private_key_share::( + &p.setup_params.h.into_projective(), + &p.public_decryption_contexts[i].domain, + &polynomial, + &p.private_key_share, + ); + DecryptionShareSimple::create( + p.index, + &p.validator_private_key, + &private_key_share, + &ciphertext, + aad, + g_inv, + ) + .unwrap() }) .collect(); diff --git a/tpke/src/refresh.rs b/tpke/src/refresh.rs index 66821ea4..fffa88ad 100644 --- a/tpke/src/refresh.rs +++ b/tpke/src/refresh.rs @@ -1,87 +1,63 @@ -use crate::{lagrange_basis_at, PrivateDecryptionContextSimple}; -use ark_ec::{PairingEngine, ProjectiveCurve}; +use crate::{lagrange_basis_at, PrivateKeyShare}; +use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve}; use ark_ff::{PrimeField, Zero}; use ark_poly::{univariate::DensePolynomial, Polynomial, UVPolynomial}; use itertools::zip_eq; -use rand::prelude::StdRng; use rand_core::RngCore; -use std::collections::HashMap; -use std::usize; - -pub fn recover_share_at_point( - other_participants: &[PrivateDecryptionContextSimple], - threshold: usize, - x_r: &E::Fr, - rng: &mut StdRng, -) -> E::G2Projective { - let share_updates = prepare_share_updates_for_recovery::( - other_participants, - x_r, - threshold, - rng, - ); - - let new_shares_y = - update_shares_for_recovery::(other_participants, &share_updates); - // From the PSS paper, section 4.2.4, (https://link.springer.com/content/pdf/10.1007/3-540-44750-4_27.pdf) - // Interpolate new shares to recover y_r - let shares_x = &other_participants[0] - .public_decryption_contexts - .iter() - .map(|ctxt| ctxt.domain) - .collect::>(); - - // Recover y_r - let lagrange = lagrange_basis_at::(shares_x, x_r); - let prods = - zip_eq(new_shares_y, lagrange).map(|(y_j, l)| y_j.mul(l.into_repr())); - prods.fold(E::G2Projective::zero(), |acc, y_j| acc + y_j) -} +use std::usize; /// From PSS paper, section 4.2.1, (https://link.springer.com/content/pdf/10.1007/3-540-44750-4_27.pdf) -fn prepare_share_updates_for_recovery( - participants: &[PrivateDecryptionContextSimple], +pub fn prepare_share_updates_for_recovery( + domain_points: &[E::Fr], + h: &E::G2Affine, x_r: &E::Fr, threshold: usize, rng: &mut impl RngCore, -) -> HashMap> { - // TODO: Refactor this function so that each participant performs it individually - // Each participant prepares an update for each other participant - participants - .iter() - .map(|p1| { - let i = p1.index; - // Generate a new random polynomial with constant term x_r - let d_i = make_random_polynomial_at::(threshold, x_r, rng); +) -> Vec { + // Generate a new random polynomial with constant term x_r + let d_i = make_random_polynomial_at::(threshold, x_r, rng); - // Now, we need to evaluate the polynomial at each of participants' indices - let deltas_i: HashMap<_, _> = - compute_polynomial_deltas::(participants, &d_i); - (i, deltas_i) + // Now, we need to evaluate the polynomial at each of participants' indices + domain_points + .iter() + .map(|x_i| { + let eval = d_i.evaluate(x_i); + h.mul(eval.into_repr()) }) - .collect::>() + .collect() } /// From PSS paper, section 4.2.3, (https://link.springer.com/content/pdf/10.1007/3-540-44750-4_27.pdf) -fn update_shares_for_recovery( - participants: &[PrivateDecryptionContextSimple], - deltas: &HashMap>, -) -> Vec { - // TODO: Refactor this function so that each participant performs it individually - participants +pub fn update_share_for_recovery( + private_key_share: &PrivateKeyShare, + share_updates: &[E::G2Projective], +) -> PrivateKeyShare { + let private_key_share = share_updates .iter() - .map(|p| { - let i = p.index; - let mut new_y = E::G2Projective::from( - p.private_key_share.private_key_share, // y_i - ); - for j in deltas.keys() { - new_y += deltas[j][&i]; - } - new_y - }) - .collect() + .fold( + private_key_share.private_key_share.into_projective(), + |acc, delta| acc + delta, + ) + .into_affine(); + PrivateKeyShare { private_key_share } +} + +/// From the PSS paper, section 4.2.4, (https://link.springer.com/content/pdf/10.1007/3-540-44750-4_27.pdf) +pub fn recover_share_from_updated_private_shares( + x_r: &E::Fr, + domain_points: &[E::Fr], + updated_private_shares: &[PrivateKeyShare], +) -> PrivateKeyShare { + // Interpolate new shares to recover y_r + let lagrange = lagrange_basis_at::(domain_points, x_r); + let prods = zip_eq(updated_private_shares, lagrange) + .map(|(y_j, l)| y_j.private_key_share.mul(l.into_repr())); + let y_r = prods.fold(E::G2Projective::zero(), |acc, y_j| acc + y_j); + + PrivateKeyShare { + private_key_share: y_r.into_affine(), + } } pub fn make_random_polynomial_at( @@ -106,49 +82,20 @@ pub fn make_random_polynomial_at( threshold_poly } -fn prepare_share_updates_for_refreshing( - participants: &[PrivateDecryptionContextSimple], - threshold: usize, - rng: &mut impl RngCore, -) -> HashMap { - let polynomial = - make_random_polynomial_at::(threshold, &E::Fr::zero(), rng); - compute_polynomial_deltas(participants, &polynomial) -} - -fn compute_polynomial_deltas( - participants: &[PrivateDecryptionContextSimple], +// TODO: Expose a method to create a proper decryption share after refreshing +pub fn refresh_private_key_share( + h: &E::G2Projective, + domain_point: &E::Fr, polynomial: &DensePolynomial, -) -> HashMap { - let h_g2 = E::G2Projective::from(participants[0].setup_params.h); - participants - .iter() - .map(|p| { - let i = p.index; - let x_i = p.public_decryption_contexts[i].domain; - let eval = polynomial.evaluate(&x_i); - let eval_g2 = h_g2.mul(eval.into_repr()); - (i, eval_g2) - }) - .collect::>() -} - -pub fn refresh_shares( - participants: &[PrivateDecryptionContextSimple], - threshold: usize, - rng: &mut impl RngCore, -) -> Vec { - let share_updates = - prepare_share_updates_for_refreshing::(participants, threshold, rng); - participants - .iter() - .map(|p| { - let i = p.index; - let mut new_y = E::G2Projective::from( - p.private_key_share.private_key_share, // y_i - ); - new_y += share_updates[&i]; - new_y - }) - .collect() + validator_private_key_share: &PrivateKeyShare, +) -> PrivateKeyShare { + let evaluated_polynomial = polynomial.evaluate(domain_point); + let share_update = h.mul(evaluated_polynomial.into_repr()); + let updated_share = validator_private_key_share + .private_key_share + .into_projective() + + share_update; + PrivateKeyShare { + private_key_share: updated_share.into_affine(), + } }