Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deserialization fixes #735

Merged
merged 19 commits into from
Apr 3, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 22 additions & 8 deletions algorithms/src/merkle_tree/merkle_path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use crate::{
errors::MerkleError,
traits::{MerkleParameters, CRH},
};
use snarkvm_utilities::{FromBytes, FromBytesDeserializer, ToBytes, ToBytesSerializer};
use snarkvm_utilities::{error, FromBytes, FromBytesDeserializer, ToBytes, ToBytesSerializer};

use anyhow::Result;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
Expand Down Expand Up @@ -124,17 +124,20 @@ impl<P: MerkleParameters> FromBytes for MerklePath<P> {
// is being introduced until a proper refactor can be discussed and implemented.
// If you are seeing this message, please be proactive in bringing it up :)
let parameters = {
let setup_message_length: u64 = FromBytes::read_le(&mut reader)?;
// Decode the setup message size.
let setup_message_length = u16::read_le(&mut reader)?;

let mut setup_message_bytes = vec![0u8; setup_message_length as usize];
reader.read_exact(&mut setup_message_bytes)?;
let setup_message =
String::from_utf8(setup_message_bytes).expect("Failed to parse setup message for Merkle parameters");
let setup_message = String::from_utf8(setup_message_bytes)
.map_err(|_| error("Failed to parse setup message for Merkle parameters"))?;

Arc::new(P::setup(&setup_message))
};

let path_length: u64 = FromBytes::read_le(&mut reader)?;
// Decode the Merkle path depth.
let path_length: u8 = FromBytes::read_le(&mut reader)?;

let mut path = Vec::with_capacity(path_length as usize);
for _ in 0..path_length {
path.push(FromBytes::read_le(&mut reader)?);
Expand All @@ -150,12 +153,23 @@ impl<P: MerkleParameters> ToBytes for MerklePath<P> {
#[inline]
fn write_le<W: Write>(&self, mut writer: W) -> IoResult<()> {
let setup_message_bytes: &[u8] = self.parameters.setup_message().as_bytes();
let setup_message_length: u64 = setup_message_bytes.len() as u64;

setup_message_length.write_le(&mut writer)?;
// Ensure the setup message size is within bounds.
if setup_message_bytes.len() > (u16::MAX as usize) {
return Err(error(format!("Merkle path setup message cannot exceed {} bytes", u16::MAX)));
}

// Encode the setup message.
(setup_message_bytes.len() as u16).write_le(&mut writer)?;
setup_message_bytes.write_le(&mut writer)?;

(self.path.len() as u64).write_le(&mut writer)?;
// Ensure the Merkle path length is within bounds.
if self.path.len() > (u8::MAX as usize) {
return Err(error(format!("Merkle path depth cannot exceed {}", u8::MAX)));
}

// Encode the Merkle path.
(self.path.len() as u8).write_le(&mut writer)?;
self.path.write_le(&mut writer)?;

self.leaf_index.write_le(&mut writer)
Expand Down
4 changes: 2 additions & 2 deletions dpc/src/block/block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -487,7 +487,7 @@ mod tests {
// Serialize
let expected_string = expected_block.to_string();
let candidate_string = serde_json::to_string(&expected_block).unwrap();
assert_eq!(4899, candidate_string.len(), "Update me if serialization has changed");
assert_eq!(4896, candidate_string.len(), "Update me if serialization has changed");
assert_eq!(expected_string, candidate_string);

// Deserialize
Expand All @@ -504,7 +504,7 @@ mod tests {
// Serialize
let expected_bytes = expected_block.to_bytes_le().unwrap();
let candidate_bytes = bincode::serialize(&expected_block).unwrap();
assert_eq!(2515, expected_bytes.len(), "Update me if serialization has changed");
assert_eq!(2505, expected_bytes.len(), "Update me if serialization has changed");
// TODO (howardwu): Serialization - Handle the inconsistency between ToBytes and Serialize (off by a length encoding).
assert_eq!(&expected_bytes[..], &candidate_bytes[8..]);

Expand Down
4 changes: 2 additions & 2 deletions dpc/src/block/template.rs
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ mod tests {
// Serialize
let expected_string = expected_template.to_string();
let candidate_string = serde_json::to_string(&expected_template).unwrap();
assert_eq!(3987, candidate_string.len(), "Update me if serialization has changed");
assert_eq!(3984, candidate_string.len(), "Update me if serialization has changed");
assert_eq!(expected_string, candidate_string);

// Deserialize
Expand All @@ -311,7 +311,7 @@ mod tests {
// Serialize
let expected_bytes = expected_template.to_bytes_le().unwrap();
let candidate_bytes = bincode::serialize(&expected_template).unwrap();
assert_eq!(1857, expected_bytes.len(), "Update me if serialization has changed");
assert_eq!(1847, expected_bytes.len(), "Update me if serialization has changed");
// TODO (howardwu): Serialization - Handle the inconsistency between ToBytes and Serialize (off by a length encoding).
assert_eq!(&expected_bytes[..], &candidate_bytes[8..]);

Expand Down
4 changes: 2 additions & 2 deletions dpc/src/block/transactions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ mod tests {
// Serialize
let expected_string = expected_transactions.to_string();
let candidate_string = serde_json::to_string(&expected_transactions).unwrap();
assert_eq!(3053, candidate_string.len(), "Update me if serialization has changed");
assert_eq!(3050, candidate_string.len(), "Update me if serialization has changed");
assert_eq!(expected_string, candidate_string);

// Deserialize
Expand All @@ -316,7 +316,7 @@ mod tests {
// Serialize
let expected_bytes = expected_transactions.to_bytes_le().unwrap();
let candidate_bytes = bincode::serialize(&expected_transactions).unwrap();
assert_eq!(1523, expected_bytes.len(), "Update me if serialization has changed");
assert_eq!(1513, expected_bytes.len(), "Update me if serialization has changed");
// TODO (howardwu): Serialization - Handle the inconsistency between ToBytes and Serialize (off by a length encoding).
assert_eq!(&expected_bytes[..], &candidate_bytes[8..]);

Expand Down
8 changes: 4 additions & 4 deletions dpc/src/circuits/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,8 @@ fn dpc_execute_circuits_test<N: Network>(
<N as Network>::InputSNARK::setup(&InputCircuit::<N>::blank(), &mut SRS::CircuitSpecific(rng)).unwrap();

// Compute the input circuit proofs.
let mut input_proofs = Vec::with_capacity(N::MAX_NUM_INPUT_RECORDS);
let mut input_public_variables = Vec::with_capacity(N::MAX_NUM_INPUT_RECORDS);
let mut input_proofs = Vec::with_capacity(N::NUM_INPUTS as usize);
let mut input_public_variables = Vec::with_capacity(N::NUM_INPUTS as usize);
for (
((((record, serial_number), ledger_proof), signature), input_value_commitment),
input_value_commitment_randomness,
Expand Down Expand Up @@ -169,8 +169,8 @@ fn dpc_execute_circuits_test<N: Network>(
<N as Network>::OutputSNARK::setup(&OutputCircuit::<N>::blank(), &mut SRS::CircuitSpecific(rng)).unwrap();

// Compute the output circuit proofs.
let mut output_proofs = Vec::with_capacity(N::MAX_NUM_OUTPUT_RECORDS);
let mut output_public_variables = Vec::with_capacity(N::MAX_NUM_OUTPUT_RECORDS);
let mut output_proofs = Vec::with_capacity(N::NUM_OUTPUTS as usize);
let mut output_public_variables = Vec::with_capacity(N::NUM_OUTPUTS as usize);
for (
(((record, commitment), encryption_randomness), output_value_commitment),
output_value_commitment_randomness,
Expand Down
6 changes: 3 additions & 3 deletions dpc/src/network/testnet1.rs
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,8 @@ impl Network for Testnet1 {
const NUM_TRANSITIONS: u8 = u8::pow(2, Self::TRANSACTION_TREE_DEPTH as u32);
const NUM_EVENTS: u16 = 2;

const MAX_NUM_INPUT_RECORDS: usize = 16;
const MAX_NUM_OUTPUT_RECORDS: usize = 16;
const NUM_INPUTS: u16 = 16;
const NUM_OUTPUTS: u16 = 16;

const BLOCK_HASH_PREFIX: u16 = hrp2!("ab");
const LEDGER_ROOT_PREFIX: u16 = hrp2!("al");
Expand Down Expand Up @@ -117,7 +117,7 @@ impl Network for Testnet1 {
const HEADER_PROOF_SIZE_IN_BYTES: usize = 796;
const PROGRAM_PROOF_SIZE_IN_BYTES: usize = 193;
const PROGRAM_ID_SIZE_IN_BYTES: usize = 32;
const RECORD_CIPHERTEXT_SIZE_IN_BYTES: usize = 294;
const RECORD_CIPHERTEXT_SIZE_IN_BYTES: usize = 292;
const RECORD_PAYLOAD_SIZE_IN_BYTES: usize = 128;
const RECORD_VIEW_KEY_SIZE_IN_BYTES: usize = 32;
const SIGNATURE_SIZE_IN_BYTES: usize = 128;
Expand Down
6 changes: 3 additions & 3 deletions dpc/src/network/testnet2.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,8 @@ impl Network for Testnet2 {
const NUM_TRANSITIONS: u8 = u8::pow(2, Self::TRANSACTION_TREE_DEPTH as u32);
const NUM_EVENTS: u16 = 2;

const MAX_NUM_INPUT_RECORDS: usize = 16;
const MAX_NUM_OUTPUT_RECORDS: usize = 16;
const NUM_INPUTS: u16 = 16;
const NUM_OUTPUTS: u16 = 16;

const BLOCK_HASH_PREFIX: u16 = hrp2!("ab");
const LEDGER_ROOT_PREFIX: u16 = hrp2!("al");
Expand Down Expand Up @@ -122,7 +122,7 @@ impl Network for Testnet2 {
const HEADER_PROOF_SIZE_IN_BYTES: usize = 796;
const PROGRAM_PROOF_SIZE_IN_BYTES: usize = 963;
const PROGRAM_ID_SIZE_IN_BYTES: usize = 32;
const RECORD_CIPHERTEXT_SIZE_IN_BYTES: usize = 294;
const RECORD_CIPHERTEXT_SIZE_IN_BYTES: usize = 292;
const RECORD_PAYLOAD_SIZE_IN_BYTES: usize = 128;
const RECORD_VIEW_KEY_SIZE_IN_BYTES: usize = 32;
const SIGNATURE_SIZE_IN_BYTES: usize = 128;
Expand Down
3 changes: 2 additions & 1 deletion dpc/src/posw/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ impl<N: Network> PoSWProof<N> {
match self {
Self::NonHiding(proof) => {
// Ensure the proof is valid.
if !<<N as Network>::PoSWSNARK as SNARK>::verify(verifying_key, &inputs.to_vec(), proof).unwrap() {
let check = <<N as Network>::PoSWSNARK as SNARK>::verify(verifying_key, &inputs.to_vec(), proof);
if check.is_err() || !check.unwrap() {
#[cfg(debug_assertions)]
eprintln!("PoSW proof verification failed");
return false;
Expand Down
13 changes: 10 additions & 3 deletions dpc/src/record/ciphertext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

use crate::{DecryptionKey, Network, RecordError, ViewKey};
use snarkvm_algorithms::traits::{EncryptionScheme, CRH};
use snarkvm_utilities::{io::Result as IoResult, to_bytes_le, FromBytes, Read, ToBytes, Write};
use snarkvm_utilities::{error, io::Result as IoResult, to_bytes_le, FromBytes, Read, ToBytes, Write};

use anyhow::{anyhow, Result};
use core::hash::{Hash, Hasher};
Expand Down Expand Up @@ -149,7 +149,9 @@ impl<N: Network> FromBytes for Ciphertext<N> {
let ciphertext_randomizer = N::RecordRandomizer::read_le(&mut reader)?;
let record_view_key_commitment = N::RecordViewKeyCommitment::read_le(&mut reader)?;

let num_elements: u32 = FromBytes::read_le(&mut reader)?;
// Decode the number of ciphertext elements.
let num_elements: u16 = FromBytes::read_le(&mut reader)?;

let mut record_elements = Vec::with_capacity(num_elements as usize);
for _ in 0..num_elements {
record_elements.push(FromBytes::read_le(&mut reader)?);
Expand All @@ -173,7 +175,12 @@ impl<N: Network> ToBytes for Ciphertext<N> {
self.randomizer.write_le(&mut writer)?;
self.record_view_key_commitment.write_le(&mut writer)?;

(self.record_elements.len() as u32).write_le(&mut writer)?;
// Ensure the number of elements is within bounds.
if self.record_elements.len() > (u16::MAX as usize) {
return Err(error(format!("The number of ciphertext elements cannot exceed {} elements", u16::MAX)));
}

(self.record_elements.len() as u16).write_le(&mut writer)?;
for element in &self.record_elements {
element.write_le(&mut writer)?;
}
Expand Down
4 changes: 2 additions & 2 deletions dpc/src/traits/network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -101,8 +101,8 @@ pub trait Network: 'static + Copy + Clone + Debug + Default + PartialEq + Eq + S
const NUM_TRANSITIONS: u8;
const NUM_EVENTS: u16;

const MAX_NUM_INPUT_RECORDS: usize;
const MAX_NUM_OUTPUT_RECORDS: usize;
const NUM_INPUTS: u16;
const NUM_OUTPUTS: u16;

const BLOCK_HASH_PREFIX: u16;
const LEDGER_ROOT_PREFIX: u16;
Expand Down
10 changes: 3 additions & 7 deletions dpc/src/transaction/request.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ impl<N: Network> Request<N> {
return Err(anyhow!("There must be at least one record consumed."));
}

let mut signatures = Vec::with_capacity(N::MAX_NUM_INPUT_RECORDS);
let mut signatures = Vec::with_capacity(N::NUM_INPUTS as usize);
for record in records.iter() {
// Ensure the caller and record owner match.
if caller_address != record.owner() {
Expand Down Expand Up @@ -141,12 +141,8 @@ impl<N: Network> Request<N> {
/// Returns `true` if the request signature is valid.
pub fn is_valid(&self) -> bool {
// Ensure the number of records is correct.
if self.records.len() > N::MAX_NUM_INPUT_RECORDS {
eprintln!(
"Incorrect number of request records. Maximum {}, found {}",
N::MAX_NUM_INPUT_RECORDS,
self.records.len()
);
if self.records.len() > (N::NUM_INPUTS as usize) {
eprintln!("Incorrect number of request records. Maximum {}, found {}", N::NUM_INPUTS, self.records.len());
return false;
}

Expand Down
4 changes: 2 additions & 2 deletions dpc/src/transaction/response.rs
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ impl<N: Network> Response<N> {

/// Returns the commitments.
pub fn commitments(&self) -> Vec<N::Commitment> {
self.records.iter().take(N::MAX_NUM_OUTPUT_RECORDS).map(Record::commitment).collect()
self.records.iter().take(N::NUM_OUTPUTS as usize).map(Record::commitment).collect()
}

/// Returns a reference to the records.
Expand All @@ -101,7 +101,7 @@ impl<N: Network> Response<N> {

/// Returns the ciphertexts.
pub fn ciphertexts(&self) -> Vec<N::RecordCiphertext> {
self.records.iter().take(N::MAX_NUM_OUTPUT_RECORDS).map(Record::ciphertext).cloned().collect()
self.records.iter().take(N::NUM_OUTPUTS as usize).map(Record::ciphertext).cloned().collect()
}

/// Returns a reference to the encryption randomness.
Expand Down
21 changes: 14 additions & 7 deletions dpc/src/transaction/transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ use crate::{
VirtualMachine,
};
use snarkvm_utilities::{
error,
has_duplicates,
io::{Read, Result as IoResult, Write},
FromBytes,
Expand Down Expand Up @@ -119,7 +120,7 @@ impl<N: Network> Transaction<N> {
}

// Returns `false` if the number of serial numbers in the transaction is incorrect.
if self.serial_numbers().count() > num_transitions * N::MAX_NUM_INPUT_RECORDS {
if self.serial_numbers().count() > num_transitions * N::NUM_INPUTS as usize {
eprintln!("Transaction contains incorrect number of serial numbers");
return false;
}
Expand All @@ -131,7 +132,7 @@ impl<N: Network> Transaction<N> {
}

// Returns `false` if the number of commitments in the transaction is incorrect.
if self.commitments().count() > num_transitions * N::MAX_NUM_OUTPUT_RECORDS {
if self.commitments().count() > num_transitions * N::NUM_OUTPUTS as usize {
eprintln!("Transaction contains incorrect number of commitments");
return false;
}
Expand All @@ -143,7 +144,7 @@ impl<N: Network> Transaction<N> {
}

// Returns `false` if the number of record ciphertexts in the transaction is incorrect.
if self.ciphertexts().count() > num_transitions * N::MAX_NUM_OUTPUT_RECORDS {
if self.ciphertexts().count() > num_transitions * N::NUM_OUTPUTS as usize {
eprintln!("Transaction contains incorrect number of record ciphertexts");
return false;
}
Expand Down Expand Up @@ -348,8 +349,8 @@ impl<N: Network> FromBytes for Transaction<N> {
transitions.push(FromBytes::read_le(&mut reader)?);
}

Ok(Self::from(input_circuit_id, output_circuit_id, ledger_root, transitions)
.expect("Failed to deserialize a transaction"))
Self::from(input_circuit_id, output_circuit_id, ledger_root, transitions)
.map_err(|e| error(format!("Failed to deserialize a transaction: {e}")))
}
}

Expand All @@ -359,6 +360,12 @@ impl<N: Network> ToBytes for Transaction<N> {
self.input_circuit_id.write_le(&mut writer)?;
self.output_circuit_id.write_le(&mut writer)?;
self.ledger_root.write_le(&mut writer)?;

// Ensure the number of transitions is within bounds.
if self.transitions.len() > N::NUM_TRANSITIONS as usize {
return Err(error(format!("The number of transitions cannot exceed {}", N::NUM_TRANSITIONS)));
}

(self.transitions.len() as u16).write_le(&mut writer)?;
self.transitions.write_le(&mut writer)
}
Expand Down Expand Up @@ -491,7 +498,7 @@ mod tests {
// Serialize
let expected_string = expected_transaction.to_string();
let candidate_string = serde_json::to_string(&expected_transaction).unwrap();
assert_eq!(3022, candidate_string.len(), "Update me if serialization has changed");
assert_eq!(3019, candidate_string.len(), "Update me if serialization has changed");
assert_eq!(expected_string, candidate_string);

// Deserialize
Expand All @@ -511,7 +518,7 @@ mod tests {
// Serialize
let expected_bytes = expected_transaction.to_bytes_le().unwrap();
let candidate_bytes = bincode::serialize(&expected_transaction).unwrap();
assert_eq!(1521, expected_bytes.len(), "Update me if serialization has changed");
assert_eq!(1511, expected_bytes.len(), "Update me if serialization has changed");
// TODO (howardwu): Serialization - Handle the inconsistency between ToBytes and Serialize (off by a length encoding).
assert_eq!(&expected_bytes[..], &candidate_bytes[8..]);

Expand Down
Loading