Skip to content

Commit

Permalink
address initial low hanging fruit comments
Browse files Browse the repository at this point in the history
  • Loading branch information
kevaundray committed Sep 5, 2024
1 parent e043452 commit e90337c
Show file tree
Hide file tree
Showing 12 changed files with 56 additions and 32 deletions.
4 changes: 3 additions & 1 deletion cryptography/erasure_codes/src/reed_solomon.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,12 +75,14 @@ impl ReedSolomon {

let num_blocks = evaluation_size / block_size;

let block_size_domain = Domain::new(block_size);

Self {
poly_len,
evaluation_domain,
expansion_factor,
block_size,
block_size_domain: Domain::new(block_size),
block_size_domain,
num_blocks,
}
}
Expand Down
2 changes: 1 addition & 1 deletion cryptography/kzg_multi_open/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

## Overview

This crate provides a Rust API for the [FK20](https://github.com/khovratovich/Kate/blob/master/Kate_amortized.pdf) polynomial commitment scheme. FK20 allows you to commit to a polynomial over some field with prime characteristics, and later on reveal multiple evaluations of that polynomial, along with an (opening) proof that attests to the correctness of those evaluations.
This crate provides a Rust API for the [FK20](https://github.com/khovratovich/Kate/blob/master/Kate_amortized.pdf) polynomial commitment scheme. FK20 allows you to commit to a polynomial over some field with prime order, and later on reveal multiple evaluations of that polynomial, along with an (opening) proof that attests to the correctness of those evaluations.

The API is opinionated and although it is generic, it also does not support every use case. It has been made with the Ethereum Data Availability Sampling vision in mind. One can see that for example, we allow evaluations over particular cosets, where the order of the elements in each coset and the order of the cosets themselves are fixed. (Even though we test internally with permutations of the cosets)

Expand Down
10 changes: 9 additions & 1 deletion cryptography/kzg_multi_open/src/fk20/cosets.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
use bls12_381::Scalar;
use polynomial::domain::Domain;

/// Reverses the least significant `bits` of the given number `n`.
///
/// `n` - The input number whose bits are to be reversed.
/// `bits` - The number of least significant bits to reverse.
///
/// Returns a new `usize` with the specified number of bits reversed.
pub(crate) fn reverse_bits(n: usize, bits: u32) -> usize {
let mut n = n;
let mut r = 0;
Expand Down Expand Up @@ -38,7 +44,9 @@ pub fn reverse_bit_order<T>(a: &mut [T]) {
/// - num_points denotes how many points we want to open the polynomial at.
/// - num_cosets denotes how many cosets we want to generate, analogously how many proofs we want to produce.
///
/// Setting bit_reversed to true will generate the cosets in bit-reversed order.
/// Returns a `Vec<Scalar>` containing the generated coset elements with length `num_cosets`
///
/// Note: Setting bit_reversed to true will generate the cosets in bit-reversed order.
pub fn coset_gens(num_points: usize, num_cosets: usize, bit_reversed: bool) -> Vec<Scalar> {
use bls12_381::ff::Field;

Expand Down
8 changes: 4 additions & 4 deletions cryptography/kzg_multi_open/src/fk20/verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ impl FK20Verifier {
// Compute random challenges for batching the opening together.
//
// We compute one challenge `r` using fiat-shamir and the rest are powers of `r`
// This is safe since 1, X, X^2, ..., X^n of a variable X are linearly independent (ie there is no non-trivial linear combination that equals zero)
// This is safe because of the Schwartz-Zippel Lemma.
let r = compute_fiat_shamir_challenge(
&self.opening_key,
deduplicated_commitments,
Expand Down Expand Up @@ -186,10 +186,10 @@ impl FK20Verifier {
// to be `deduplicated_commitments.len()`.
//
// This only panics, if `deduplicated_commitments.len()` != `weights.len()`
let comm_random_sum_commitments = g1_lincomb(deduplicated_commitments, &weights)
let random_sum_commitments = g1_lincomb(deduplicated_commitments, &weights)
.expect("number of row_commitments and number of weights should be the same");

// Compute a random linear combination of the interpolation polynomials
// Linearly combine the interpolation polynomials using the same randomness `r`
let mut random_sum_interpolation_poly = Vec::new();
let coset_evals = bit_reversed_coset_evals.to_vec();
for (k, mut coset_eval) in coset_evals.into_iter().enumerate() {
Expand Down Expand Up @@ -230,7 +230,7 @@ impl FK20Verifier {
.expect("number of proofs and number of weighted_r_powers should be the same");

// This is `rl` in the specs.
let pairing_input_g1 = (comm_random_sum_commitments - comm_random_sum_interpolation_poly)
let pairing_input_g1 = (random_sum_commitments - comm_random_sum_interpolation_poly)
+ random_weighted_sum_proofs;

let normalized_vectors = g1_batch_normalize(&[comm_random_sum_proofs, pairing_input_g1]);
Expand Down
2 changes: 1 addition & 1 deletion cryptography/kzg_multi_open/src/opening_key.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl OpeningKey {
pub fn commit_g2(&self, polynomial: &[Scalar]) -> G2Projective {
assert!(self.g2s.len() >= polynomial.len());
g2_lincomb(&self.g2s[0..polynomial.len()], polynomial)
.expect("number of g1 points is equal to the number of coefficients in the polynomial")
.expect("number of g2 points is equal to the number of coefficients in the polynomial")
}

/// Commit to a polynomial in monomial form using the G1 group elements
Expand Down
4 changes: 1 addition & 3 deletions cryptography/polynomial/src/monomial.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,21 +53,19 @@ pub fn poly_eval(poly: &PolyCoeff, value: &Scalar) -> Scalar {
/// the result of `f(x) * g(x)` and returns the result.
pub fn poly_mul(a: PolyCoeff, b: PolyCoeff) -> PolyCoeff {
let mut result = vec![Scalar::ZERO; a.len() + b.len() - 1];

for (i, a_coeff) in a.iter().enumerate() {
for (j, b_coeff) in b.iter().enumerate() {
result[i + j] += a_coeff * b_coeff;
}
}

result
}

/// Given a list of points, this method will compute the polynomial
/// Z(x) which is equal to zero when evaluated at each point.
///
/// Example: vanishing_poly([1, 2, 3]) = (x - 1)(x - 2)(x - 3)
pub fn vanishing_poly(roots: &[Scalar]) -> Vec<Scalar> {
pub fn vanishing_poly(roots: &[Scalar]) -> PolyCoeff {
let mut poly = vec![Scalar::from(1u64)];
for root in roots {
poly = poly_mul(poly, vec![-root, Scalar::from(1u64)]);
Expand Down
8 changes: 4 additions & 4 deletions eip7594/src/constants.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ pub const BYTES_PER_FIELD_ELEMENT: usize = 32;

/// The number of field elements in a cell.
///
/// Note: This is user defined; modifying this value will change the number of proofs produced,
/// Note: Modifying this value will change the number of proofs produced,
/// the proof generation time and the time it takes to verify a proof.
///
/// Note: This value must be a power of two between 1 and 64. The greatest value is 64 because there
Expand All @@ -29,12 +29,12 @@ pub const BYTES_PER_CELL: usize = FIELD_ELEMENTS_PER_CELL * BYTES_PER_FIELD_ELEM

/// The factor by which we extend a blob.
///
/// Note: This is user defined; modifying this will change the number of proofs produced,
/// Note: Modifying this will change the number of proofs produced,
/// proof generation time and the rate of the reed-solomon code.
pub const EXTENSION_FACTOR: usize = 2;
pub const EXPANSION_FACTOR: usize = 2;

/// The number of field elements needed to represent an extended blob.
pub const FIELD_ELEMENTS_PER_EXT_BLOB: usize = EXTENSION_FACTOR * FIELD_ELEMENTS_PER_BLOB;
pub const FIELD_ELEMENTS_PER_EXT_BLOB: usize = EXPANSION_FACTOR * FIELD_ELEMENTS_PER_BLOB;

/// The number of cells in an extension blob.
///
Expand Down
6 changes: 5 additions & 1 deletion eip7594/src/errors.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::CellIndex;
use erasure_codes::errors::RSError;

/// Errors that can occur either during proving or verification.
/// Errors that can occur either during proving, verification or serialization.
#[derive(Debug)]
pub enum Error {
Prover(ProverError),
Expand All @@ -10,6 +10,10 @@ pub enum Error {
}

impl Error {
/// Returns true if the reason for the error was due to a proof failing verification.
///
/// Note: This distinction in practice, is not meaningful for the caller and is mainly
/// here due to the specs and spec tests making this distinction.
pub fn invalid_proof(&self) -> bool {
let verifier_error = match self {
Error::Verifier(verifier_err) => verifier_err,
Expand Down
5 changes: 1 addition & 4 deletions eip7594/src/prover.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,12 +105,9 @@ impl DASContext {
/// Recovers the cells and computes the KZG proofs, given a subset of cells.
///
/// Use erasure decoding to recover the polynomial corresponding to the cells
/// that were generated from KZG multi point prover.
/// that were provided as input.
///
/// The matching function in the specs is: https://github.com/ethereum/consensus-specs/blob/13ac373a2c284dc66b48ddd2ef0a10537e4e0de6/specs/_features/eip7594/polynomial-commitments-sampling.md#recover_cells_and_kzg_proofs
///
// Note: The fact that we recover the polynomial for the bit-reversed version of the blob
// is irrelevant.
pub fn recover_cells_and_kzg_proofs(
&self,
cell_indices: Vec<CellIndex>,
Expand Down
6 changes: 4 additions & 2 deletions eip7594/src/serialization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ fn deserialize_bytes_to_scalars(bytes: &[u8]) -> Result<Vec<Scalar>, Serializati

let mut scalars = Vec::with_capacity(bytes32s.len());
for bytes32 in bytes32s {
scalars.push(deserialize_scalar(bytes32)?)
scalars.push(deserialize_bytes_to_scalar(bytes32)?)
}
Ok(scalars)
}
Expand All @@ -44,7 +44,9 @@ pub(crate) fn deserialize_cell_to_scalars(
deserialize_bytes_to_scalars(cell_bytes)
}

pub(crate) fn deserialize_scalar(scalar_bytes: &[u8]) -> Result<Scalar, SerializationError> {
pub(crate) fn deserialize_bytes_to_scalar(
scalar_bytes: &[u8],
) -> Result<Scalar, SerializationError> {
let bytes32 = scalar_bytes.try_into().expect("infallible: expected blob chunks to be exactly {SCALAR_SERIALIZED_SIZE} bytes, since blob was a multiple of {SCALAR_SERIALIZED_SIZE");

// Convert the CtOption into Option
Expand Down
16 changes: 16 additions & 0 deletions eip7594/src/trusted_setup.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,24 @@ const TRUSTED_SETUP_JSON: &str = include_str!("../data/trusted_setup_4096.json")

#[derive(Deserialize, Debug, PartialEq, Eq)]
pub struct TrustedSetup {
/// G1_Monomial represents a list of uncompressed
/// hex encoded group elements in the G1 group on the bls12-381 curve.
///
/// Ethereum has multiple trusted setups, however the one being
/// used currently contains 4096 G1 elements.
pub g1_monomial: Vec<String>,
/// G1_Lagrange represents a list of uncompressed
/// hex encoded group elements in the G1 group on the bls12-381 curve.
///
/// These are related to G1_Monomial in that they are what one
/// would get if we did an inverse FFT on the G1_monomial elements.
///
/// The length of this vector is equal to the length of G1_Monomial.
pub g1_lagrange: Vec<String>,
/// G1_Monomial represents a list of uncompressed hex encoded
/// group elements in the G2 group on the bls12-381 curve.
///
/// The length of this vector is 65.
pub g2_monomial: Vec<String>,
}

Expand Down
17 changes: 7 additions & 10 deletions eip7594/src/verifier.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ pub use crate::errors::VerifierError;

use crate::{
constants::{
CELLS_PER_EXT_BLOB, EXTENSION_FACTOR, FIELD_ELEMENTS_PER_BLOB, FIELD_ELEMENTS_PER_EXT_BLOB,
CELLS_PER_EXT_BLOB, EXPANSION_FACTOR, FIELD_ELEMENTS_PER_BLOB, FIELD_ELEMENTS_PER_EXT_BLOB,
},
errors::Error,
serialization::{deserialize_cells, deserialize_compressed_g1_points},
Expand Down Expand Up @@ -39,7 +39,7 @@ impl VerifierContext {
VerifierContext {
rs: ReedSolomon::new(
FIELD_ELEMENTS_PER_BLOB,
EXTENSION_FACTOR,
EXPANSION_FACTOR,
CELLS_PER_EXT_BLOB,
),
kzg_multipoint_verifier: multipoint_verifier,
Expand Down Expand Up @@ -95,9 +95,6 @@ impl DASContext {
/// The matching function in the specs is: https://github.com/ethereum/consensus-specs/blob/13ac373a2c284dc66b48ddd2ef0a10537e4e0de6/specs/_features/eip7594/polynomial-commitments-sampling.md#verify_cell_kzg_proof_batch
pub fn verify_cell_kzg_proof_batch(
&self,
// This is a deduplicated list of row commitments
// It is not indicative of the total number of commitments in the batch.
// This is what row_indices is used for.
commitments: Vec<Bytes48Ref>,
cell_indices: Vec<CellIndex>,
cells: Vec<CellRef>,
Expand All @@ -123,7 +120,7 @@ impl DASContext {

// Deserialization
//
let row_commitment_ = deserialize_compressed_g1_points(deduplicated_commitments)?;
let row_commitments_ = deserialize_compressed_g1_points(deduplicated_commitments)?;
let proofs_ = deserialize_compressed_g1_points(proofs_bytes)?;
let coset_evals = deserialize_cells(cells)?;

Expand All @@ -133,7 +130,7 @@ impl DASContext {
.verifier_ctx
.kzg_multipoint_verifier
.verify_multi_opening(
&row_commitment_,
&row_commitments_,
&row_indices,
&cell_indices,
&coset_evals,
Expand Down Expand Up @@ -203,7 +200,7 @@ mod validation {
use kzg_multi_open::CommitmentIndex;

use crate::{
constants::{BYTES_PER_CELL, CELLS_PER_EXT_BLOB, EXTENSION_FACTOR},
constants::{BYTES_PER_CELL, CELLS_PER_EXT_BLOB, EXPANSION_FACTOR},
verifier::VerifierError,
Bytes48Ref, CellIndex, CellRef,
};
Expand Down Expand Up @@ -288,10 +285,10 @@ mod validation {
}

// Check that we have enough cells to perform a reconstruction
if cell_indices.len() < CELLS_PER_EXT_BLOB / EXTENSION_FACTOR {
if cell_indices.len() < CELLS_PER_EXT_BLOB / EXPANSION_FACTOR {
return Err(VerifierError::NotEnoughCellsToReconstruct {
num_cells_received: cell_indices.len(),
min_cells_needed: CELLS_PER_EXT_BLOB / EXTENSION_FACTOR,
min_cells_needed: CELLS_PER_EXT_BLOB / EXPANSION_FACTOR,
});
}

Expand Down

0 comments on commit e90337c

Please sign in to comment.