Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Switch to alloy #16

Merged
merged 8 commits into from
Aug 15, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions tree_hash/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ keywords = ["ethereum"]
categories = ["cryptography::cryptocurrencies"]

[dependencies]
ethereum-types = "0.14.1"
alloy-primitives = "0.7.0"
ethereum_hashing = "0.6.0"
smallvec = "1.6.1"

Expand All @@ -22,4 +22,4 @@ ethereum_ssz = "0.5"
ethereum_ssz_derive = "0.5"

[features]
arbitrary = ["ethereum-types/arbitrary"]
arbitrary = ["alloy-primitives/arbitrary"]
39 changes: 16 additions & 23 deletions tree_hash/src/impls.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use alloy_primitives::{Address, B256, U128, U256};

use super::*;
use ethereum_types::{H160, H256, U128, U256};
use std::sync::Arc;

fn int_to_hash256(int: u64) -> Hash256 {
Expand Down Expand Up @@ -109,19 +110,15 @@ impl TreeHash for U128 {
}

fn tree_hash_packed_encoding(&self) -> PackedEncoding {
let mut result = [0; 16];
self.to_little_endian(&mut result);
PackedEncoding::from_slice(&result)
PackedEncoding::from_slice(self.as_le_slice())
}

fn tree_hash_packing_factor() -> usize {
2
}

fn tree_hash_root(&self) -> Hash256 {
let mut result = [0; HASHSIZE];
self.to_little_endian(&mut result[0..16]);
Hash256::from_slice(&result)
Hash256::right_padding_from(self.as_le_slice())
}
}

Expand All @@ -131,30 +128,26 @@ impl TreeHash for U256 {
}

fn tree_hash_packed_encoding(&self) -> PackedEncoding {
let mut result = [0; 32];
self.to_little_endian(&mut result);
PackedEncoding::from_slice(&result)
PackedEncoding::from_slice(self.as_le_slice())
michaelsproul marked this conversation as resolved.
Show resolved Hide resolved
}

fn tree_hash_packing_factor() -> usize {
1
}

fn tree_hash_root(&self) -> Hash256 {
let mut result = [0; 32];
self.to_little_endian(&mut result[..]);
Hash256::from_slice(&result)
Hash256::from_slice(self.as_le_slice())
}
}

impl TreeHash for H160 {
impl TreeHash for Address {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Vector
}

fn tree_hash_packed_encoding(&self) -> PackedEncoding {
let mut result = [0; 32];
result[0..20].copy_from_slice(self.as_bytes());
result[0..20].copy_from_slice(self.as_slice());
PackedEncoding::from_slice(&result)
}

Expand All @@ -164,18 +157,18 @@ impl TreeHash for H160 {

fn tree_hash_root(&self) -> Hash256 {
let mut result = [0; 32];
result[0..20].copy_from_slice(self.as_bytes());
result[0..20].copy_from_slice(self.as_slice());
Hash256::from_slice(&result)
}
}

impl TreeHash for H256 {
impl TreeHash for B256 {
fn tree_hash_type() -> TreeHashType {
TreeHashType::Vector
}

fn tree_hash_packed_encoding(&self) -> PackedEncoding {
PackedEncoding::from_slice(self.as_bytes())
PackedEncoding::from_slice(self.as_slice())
}

fn tree_hash_packing_factor() -> usize {
Expand Down Expand Up @@ -216,8 +209,8 @@ mod test {

let false_bytes: Vec<u8> = vec![0; 32];

assert_eq!(true.tree_hash_root().as_bytes(), true_bytes.as_slice());
assert_eq!(false.tree_hash_root().as_bytes(), false_bytes.as_slice());
assert_eq!(true.tree_hash_root().as_slice(), true_bytes.as_slice());
assert_eq!(false.tree_hash_root().as_slice(), false_bytes.as_slice());
}

#[test]
Expand All @@ -229,16 +222,16 @@ mod test {

#[test]
fn int_to_bytes() {
assert_eq!(int_to_hash256(0).as_bytes(), &[0; 32]);
assert_eq!(int_to_hash256(0).as_slice(), &[0; 32]);
assert_eq!(
int_to_hash256(1).as_bytes(),
int_to_hash256(1).as_slice(),
&[
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0
]
);
assert_eq!(
int_to_hash256(u64::max_value()).as_bytes(),
int_to_hash256(u64::max_value()).as_slice(),
&[
255, 255, 255, 255, 255, 255, 255, 255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0
Expand Down
10 changes: 5 additions & 5 deletions tree_hash/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ pub const MERKLE_HASH_CHUNK: usize = 2 * BYTES_PER_CHUNK;
pub const MAX_UNION_SELECTOR: u8 = 127;
pub const SMALLVEC_SIZE: usize = 32;

pub type Hash256 = ethereum_types::H256;
pub type Hash256 = alloy_primitives::B256;
pub type PackedEncoding = SmallVec<[u8; SMALLVEC_SIZE]>;

/// Convenience method for `MerkleHasher` which also provides some fast-paths for small trees.
Expand All @@ -30,7 +30,7 @@ pub fn merkle_root(bytes: &[u8], minimum_leaf_count: usize) -> Hash256 {

if leaves == 0 {
// If there are no bytes then the hash is always zero.
Hash256::zero()
Hash256::ZERO
} else if leaves == 1 {
// If there is only one leaf, the hash is always those leaf bytes padded out to 32-bytes.
let mut hash = [0; HASHSIZE];
Expand Down Expand Up @@ -64,7 +64,7 @@ pub fn mix_in_length(root: &Hash256, length: usize) -> Hash256 {
let mut length_bytes = [0; BYTES_PER_CHUNK];
length_bytes[0..usize_len].copy_from_slice(&length.to_le_bytes());

Hash256::from_slice(&ethereum_hashing::hash32_concat(root.as_bytes(), &length_bytes)[..])
Hash256::from_slice(&ethereum_hashing::hash32_concat(root.as_slice(), &length_bytes)[..])
}

/// Returns `Some(root)` created by hashing `root` and `selector`, if `selector <=
Expand All @@ -88,7 +88,7 @@ pub fn mix_in_selector(root: &Hash256, selector: u8) -> Option<Hash256> {
let mut chunk = [0; BYTES_PER_CHUNK];
chunk[0] = selector;

let root = ethereum_hashing::hash32_concat(root.as_bytes(), &chunk);
let root = ethereum_hashing::hash32_concat(root.as_slice(), &chunk);
Some(Hash256::from_slice(&root))
}

Expand Down Expand Up @@ -201,7 +201,7 @@ mod test {
};

assert_eq!(
mix_in_length(&Hash256::from_slice(&[42; BYTES_PER_CHUNK]), 42).as_bytes(),
mix_in_length(&Hash256::from_slice(&[42; BYTES_PER_CHUNK]), 42).as_slice(),
&hash[..]
);
}
Expand Down
18 changes: 10 additions & 8 deletions tree_hash/src/merkle_hasher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ impl MerkleHasher {
} else if self.next_leaf == 1 {
// The next_leaf can only be 1 if the tree has a depth of one. If have been no
// leaves supplied, assume a root of zero.
break Ok(Hash256::zero());
break Ok(Hash256::ZERO);
} else {
// The only scenario where there are (a) no half nodes and (b) a tree of depth
// two or more is where no leaves have been supplied at all.
Expand Down Expand Up @@ -359,6 +359,8 @@ impl MerkleHasher {

#[cfg(test)]
mod test {
use alloy_primitives::U256;

use super::*;
use crate::merkleize_padded;

Expand All @@ -376,7 +378,7 @@ mod test {
fn compare_with_reference(leaves: &[Hash256], depth: usize) {
let reference_bytes = leaves
.iter()
.flat_map(|hash| hash.as_bytes())
.flat_map(|hash| hash.as_slice())
.copied()
.collect::<Vec<_>>();

Expand All @@ -385,7 +387,7 @@ mod test {
let merklizer_root_32_bytes = {
let mut m = MerkleHasher::with_depth(depth);
for leaf in leaves.iter() {
m.write(leaf.as_bytes()).expect("should process leaf");
m.write(leaf.as_slice()).expect("should process leaf");
}
m.finish().expect("should finish")
};
Expand Down Expand Up @@ -426,7 +428,7 @@ mod test {
/// of leaves and a depth.
fn compare_reference_with_len(leaves: u64, depth: usize) {
let leaves = (0..leaves)
.map(Hash256::from_low_u64_be)
.map(|leaf| Hash256::from(U256::from(leaf)))
.collect::<Vec<_>>();
compare_with_reference(&leaves, depth)
}
Expand All @@ -435,21 +437,21 @@ mod test {
/// results.
fn compare_new_with_leaf_count(num_leaves: u64, depth: usize) {
let leaves = (0..num_leaves)
.map(Hash256::from_low_u64_be)
.map(|leaf| Hash256::from(U256::from(leaf)))
.collect::<Vec<_>>();

let from_depth = {
let mut m = MerkleHasher::with_depth(depth);
for leaf in leaves.iter() {
m.write(leaf.as_bytes()).expect("should process leaf");
m.write(leaf.as_slice()).expect("should process leaf");
}
m.finish()
};

let from_num_leaves = {
let mut m = MerkleHasher::with_leaves(num_leaves as usize);
for leaf in leaves.iter() {
m.process_leaf(leaf.as_bytes())
m.process_leaf(leaf.as_slice())
.expect("should process leaf");
}
m.finish()
Expand Down Expand Up @@ -495,7 +497,7 @@ mod test {
#[test]
fn with_0_leaves() {
let hasher = MerkleHasher::with_leaves(0);
assert_eq!(hasher.finish().unwrap(), Hash256::zero());
assert_eq!(hasher.finish().unwrap(), Hash256::ZERO);
}

#[test]
Expand Down
2 changes: 1 addition & 1 deletion tree_hash/src/merkleize_padded.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ mod test {
let input = vec![0; 10 * BYTES_PER_CHUNK];
let min_nodes = 2usize.pow(ZERO_HASHES_MAX_INDEX as u32);
assert_eq!(
merkleize_padded(&input, min_nodes).as_bytes(),
merkleize_padded(&input, min_nodes).as_slice(),
get_zero_hash(ZERO_HASHES_MAX_INDEX)
);
}
Expand Down
2 changes: 1 addition & 1 deletion tree_hash/tests/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ fn mix_in_selector(a: Hash256, selector: u8) -> Hash256 {
let mut b = [0; 32];
b[0] = selector;

Hash256::from_slice(&ethereum_hashing::hash32_concat(a.as_bytes(), &b))
Hash256::from_slice(&ethereum_hashing::hash32_concat(a.as_slice(), &b))
}

fn u8_hash_concat(v1: u8, v2: u8) -> Hash256 {
Expand Down
2 changes: 1 addition & 1 deletion tree_hash_derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ fn tree_hash_derive_struct(item: &DeriveInput, struct_data: &DataStruct) -> Toke
let mut hasher = tree_hash::MerkleHasher::with_leaves(#num_leaves);

#(
hasher.write(self.#idents.tree_hash_root().as_bytes())
hasher.write(self.#idents.tree_hash_root().as_slice())
.expect("tree hash derive should not apply too many leaves");
)*

Expand Down
Loading