Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ethhash WIP #807

Draft
wants to merge 12 commits into
base: main
Choose a base branch
from
4 changes: 3 additions & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,9 @@ jobs:
~/.cargo/git/db/
target/
key: ${{ needs.build.outputs.cache-key }}
- name: Run tests
- name: Run tests with ethhash enabled
run: cargo test --verbose --features ethhash
- name: Run tests with ethhash disabled
run: cargo test --verbose

examples:
Expand Down
16 changes: 16 additions & 0 deletions coarsetime-metrics-macros/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
[package]
name = "coarsetime-metrics-macros"
version = "0.0.4"
edition = "2024"

[lib]
proc-macro = true

[dependencies]
metrics = "0.24.1"
syn = {version = "2.0.98", features = ["full", "printing"] }
quote = "1.0.9"

[dev-dependencies]
metrics = "0.24.1"
coarsetime = "0.1.35"
30 changes: 30 additions & 0 deletions coarsetime-metrics-macros/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
use quote::quote;

use proc_macro::TokenStream;

#[proc_macro_attribute]
pub fn measure(_attr: TokenStream, contents: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(contents as ItemFn).expect("Failed to parse input");
let ms_string = input.sig.ident.to_string() + "_ms";
let count_string = input.sig.ident.to_string() + "_count";

let name = quote!(_duration);
input.block.stmts.insert(
0,
syn::parse_quote! { let #name = coarsetime::Instant::now(); },
);

input
.block
.stmts
.push(syn::parse_quote! { let #name = #name.elapsed().as_millis(); });
input
.block
.stmts
.push(syn::parse_quote! { counter!(#count_string).increment(1); });
input
.block
.stmts
.push(syn::parse_quote! { counter!(#ms_string).increment(#name); });
TokenStream::from(quote::quote!(#input))
}
11 changes: 11 additions & 0 deletions coarsetime-metrics-macros/tests/basic.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
mod tests {
use coarsetime_metrics_macros::measure;
use metrics::counter;

#[test]
#[measure(name = "xyz")]
fn it_works() {
let result = 2 + 2;
assert_eq!(result, 4);
}
}
7 changes: 6 additions & 1 deletion firewood/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ nightly = []
io-uring = ["storage/io-uring"]
logger = ["storage/logger"]
branch_factor_256 = [ "storage/branch_factor_256" ]
ethhash = [ "storage/ethhash" ]

[dev-dependencies]
criterion = {version = "0.5.1", features = ["async_tokio"]}
Expand All @@ -47,7 +48,11 @@ clap = { version = "4.5.0", features = ['derive'] }
pprof = { version = "0.14.0", features = ["flamegraph"] }
tempfile = "3.12.0"
tokio = { version = "1.36.0", features = ["rt", "sync", "macros", "rt-multi-thread"] }

ethereum-types = "0.15.1"
sha3 = "0.10.8"
plain_hasher = "0.2.3"
hash-db = "0.15.2"
hex-literal = "0.4.1"

[[bench]]
name = "hashops"
Expand Down
107 changes: 96 additions & 11 deletions firewood/src/merkle.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ use std::iter::once;
use std::num::NonZeroUsize;
use std::sync::Arc;
use storage::{
BranchNode, Child, Hashable, HashedNodeReader, ImmutableProposal, LeafNode, LinearAddress,
MutableProposal, NibblesIterator, Node, NodeStore, Path, ReadableStorage, SharedNode, TrieHash,
TrieReader, ValueDigest,
BranchNode, Child, HashType, Hashable, HashedNodeReader, ImmutableProposal, LeafNode,
LinearAddress, MutableProposal, NibblesIterator, Node, NodeStore, Path, ReadableStorage,
SharedNode, TrieReader, ValueDigest,
};

use thiserror::Error;
Expand Down Expand Up @@ -184,7 +184,7 @@ impl<T: TrieReader> Merkle<T> {
// No nodes, even the root, are before `key`.
// The root alone proves the non-existence of `key`.
// TODO reduce duplicate code with ProofNode::from<PathIterItem>
let mut child_hashes: [Option<TrieHash>; BranchNode::MAX_CHILDREN] =
let mut child_hashes: [Option<HashType>; BranchNode::MAX_CHILDREN] =
[const { None }; BranchNode::MAX_CHILDREN];
if let Some(branch) = root.as_branch() {
// TODO danlaine: can we avoid indexing?
Expand Down Expand Up @@ -349,7 +349,7 @@ impl<T: HashedNodeReader> Merkle<T> {
pub(crate) fn dump_node(
&self,
addr: LinearAddress,
hash: Option<&TrieHash>,
hash: Option<&HashType>,
seen: &mut HashSet<LinearAddress>,
writer: &mut dyn Write,
) -> Result<(), MerkleError> {
Expand Down Expand Up @@ -397,7 +397,12 @@ impl<T: HashedNodeReader> Merkle<T> {
if let Some((root_addr, root_hash)) = self.nodestore.root_address_and_hash()? {
writeln!(result, " root -> {root_addr}")?;
let mut seen = HashSet::new();
self.dump_node(root_addr, Some(&root_hash), &mut seen, &mut result)?;
self.dump_node(
root_addr,
Some(&root_hash.into()),
&mut seen,
&mut result,
)?;
}
write!(result, "}}")?;

Expand Down Expand Up @@ -1339,7 +1344,7 @@ mod tests {
{
// Test that the proof is invalid when the hash is different
assert!(proof
.verify(key, Some(value), &TrieHash::default())
.verify(key, Some(value), &HashType::default().into())
.is_err());
}
}
Expand Down Expand Up @@ -1698,7 +1703,7 @@ mod tests {
) -> Result<Merkle<NodeStore<MutableProposal, MemStore>>, MerkleError> {
let nodestore = NodeStore::new_empty_proposal(MemStore::new(vec![]).into());
let mut merkle = Merkle::from(nodestore);
for (k, v) in items.iter() {
for (k, v) in items {
merkle.insert(k.as_ref(), Box::from(v.as_ref()))?;
}

Expand All @@ -1721,6 +1726,7 @@ mod tests {
Ok(())
}

#[cfg(not(feature = "ethhash"))]
#[test_case(vec![], None; "empty trie")]
#[test_case(vec![(&[0],&[0])], Some("073615413d814b23383fc2c8d8af13abfffcb371b654b98dbf47dd74b1e4d1b9"); "root")]
#[test_case(vec![(&[0,1],&[0,1])], Some("28e67ae4054c8cdf3506567aa43f122224fe65ef1ab3e7b7899f75448a69a6fd"); "root with partial path")]
Expand Down Expand Up @@ -1749,6 +1755,84 @@ mod tests {
}
}

#[cfg(feature = "ethhash")]
mod ethhasher {
use ethereum_types::H256;
use hash_db::Hasher;
use plain_hasher::PlainHasher;
use sha3::{Digest, Keccak256};

#[derive(Default, Debug, Clone, PartialEq, Eq, Hash)]
pub struct KeccakHasher;

impl Hasher for KeccakHasher {
type Out = H256;
type StdHasher = PlainHasher;
const LENGTH: usize = 32;

#[inline]
fn hash(x: &[u8]) -> Self::Out {
let mut hasher = Keccak256::new();
hasher.update(x);
let result = hasher.finalize();
H256::from_slice(result.as_slice())
}
}
}

// serialized leaf-bytes: "ca20887265696e64656572"
// serialized leaf-bytes: "ce89376c6573776f72746883636174"
// pass 1 bytes "f6808080808080a06c5112d862dbba220e8a398b3e36e5f290f0e42e9567f743077b913e7c728cc0808080808080808080857075707079"
// full bytes: e4808080808080ce89376c6573776f72746883636174 808080808080808080857075707079
// ce 14 bytes
// pass 2 bytes "e280a0e8f16a506602fad9fc32a127e4ebb7ed23cf6dc4784ec93a32ea02bdb61ee64c"
// pass 1 bytes "f8518080808080a0e1850687f4a8551960aede310f4fa74b99e68bea53f50255c95f88cb0761cd9680a0ff9375f48d0855ff015ad03b306c0af3afb53a28da1a549bf8264ae7243b297e808080808080808080"
// pass 2 bytes "e7850604060f06a0981d4451b3a6c74aa1b323853154ec63b8f4c99213b57f10243bda4ec2cfa960"
// pass 1 bytes "f8518080808080a0e1850687f4a8551960aede310f4fa74b99e68bea53f50255c95f88cb0761cd9680a0ff9375f48d0855ff015ad03b306c0af3afb53a28da1a549bf8264ae7243b297e808080808080808080"
// pass 2 bytes "e7850604060f06a0981d4451b3a6c74aa1b323853154ec63b8f4c99213b57f10243bda4ec2cfa960"

// full bytes: 887265696e64656572
// short node: ca20887265696e64656572
// final: ca20887265696e64656572
// full bytes: 83636174
// short node: ce89376c6573776f72746883636174
// final: ce89376c6573776f72746883636174
// full bytes: e4808080808080ce89376c6573776f72746883636174808080808080808080857075707079
// short node: e4808080808080ce89376c6573776f72746883636174808080808080808080857075707079
// final: 37efd11993cb04a54048c25320e9f29c50a432d28afdf01598b2978ce1ca3068
// full bytes: f83b8080808080ca20887265696e6465657280a037efd11993cb04a54048c25320e9f29c50a432d28afdf01598b2978ce1ca3068808080808080808080
// short node: e5831646f6a0db6ae1fda66890f6693f36560d36b4dca68b4d838f17016b151efe1d4c95c453
// final: 8aad789dff2f538bca5d8ea56e8abe10f4c7ba3a5dea95fea4cd6e7c3a1168d3

//serialized leaf-bytes: "ca20887265696e64656572"
//serialized leaf-bytes: "ce89376c6573776f72746883636174"
//pass 1 bytes "e4808080808080ce89376c6573776f72746883636174808080808080808080857075707079"
//pass 2=bytes "e4808080808080ce89376c6573776f72746883636174808080808080808080857075707079"
//pass 1 bytes "f83b8080808080ca20887265696e6465657280a037efd11993cb04a54048c25320e9f29c50a432d28afdf01598b2978ce1ca3068808080808080808080"
//pass 2 bytes "e5833646f6a0db6ae1fda66890f6693f36560d36b4dca68b4d838f17016b151efe1d4c95c453"
//pass 1 bytes "f83b8080808080ca20887265696e6465657280a037efd11993cb04a54048c25320e9f29c50a432d28afdf01598b2978ce1ca3068808080808080808080"
//pass 2 bytes "e5833 646f6a0db6ae1fda66890f6693f36560d36b4dca68b4d838f17016b151efe1d4c95c453"
// short node: e5831 646f6a0db6ae1fda66890f6693f36560d36b4dca68b4d838f17016b151efe1d4c95c453


#[cfg(feature = "ethhash")]
#[test_case(&[("doe", "reindeer")])]
#[test_case(&[("doe", "reindeer"),("dog", "puppy"),("dogglesworth", "cat")])]
#[test_case(&[("doe", "reindeer"),("dog", "puppy"),("dogglesworth", "cacatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatt")])]
#[test_case(&[("dogglesworth", "cacatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatcatt")])]
fn test_root_hash_eth_compatible<T: AsRef<[u8]> + Clone + Ord>(kvs: &[(T, T)]) {
use ethereum_types::H256;
use ethhasher::KeccakHasher;
use triehash::trie_root;

let merkle = merkle_build_test(kvs.to_vec()).unwrap().hash();
let firewood_hash = merkle.nodestore.root_hash().unwrap().unwrap_or_default();
let eth_hash = trie_root::<KeccakHasher, _, _, _>(kvs.to_vec());
let firewood_hash = H256::from_slice(firewood_hash.as_ref());

assert_eq!(firewood_hash, eth_hash);
}

#[test]
fn test_root_hash_fuzz_insertions() -> Result<(), MerkleError> {
use rand::rngs::StdRng;
Expand All @@ -1771,14 +1855,15 @@ mod tests {
key
};

for _ in 0..10 {
for _ in 0..100 {
let mut items = Vec::new();

for _ in 0..10 {
let val: Vec<u8> = (0..8).map(|_| rng.borrow_mut().random()).collect();
for _ in 0..100 {
let val: Vec<u8> = (0..256).map(|_| rng.borrow_mut().random()).collect();
items.push((keygen(), val));
}

test_root_hash_eth_compatible(&items);
merkle_build_test(items)?;
}

Expand Down
26 changes: 16 additions & 10 deletions firewood/src/proof.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
use crate::merkle::MerkleError;
use sha2::{Digest, Sha256};
use storage::{
BranchNode, Hashable, NibblesIterator, PathIterItem, Preimage, TrieHash, ValueDigest,
BranchNode, HashType, Hashable, NibblesIterator, PathIterItem, Preimage, TrieHash, ValueDigest
};
use thiserror::Error;

Expand Down Expand Up @@ -74,22 +74,28 @@ pub struct ProofNode {
/// Otherwise, the node's value or the hash of its value.
pub value_digest: Option<ValueDigest<Box<[u8]>>>,
/// The hash of each child, or None if the child does not exist.
pub child_hashes: [Option<TrieHash>; BranchNode::MAX_CHILDREN],
pub child_hashes: [Option<HashType>; BranchNode::MAX_CHILDREN],
}

impl Hashable for ProofNode {
fn key(&self) -> impl Iterator<Item = u8> + Clone {
self.key.as_ref().iter().copied()
}

fn partial_path(&self) -> impl Iterator<Item = u8> + Clone {
todo!();
#[allow(unreachable_code)]
std::iter::empty()
}

fn value_digest(&self) -> Option<ValueDigest<&[u8]>> {
self.value_digest.as_ref().map(|vd| match vd {
ValueDigest::Value(v) => ValueDigest::Value(v.as_ref()),
ValueDigest::_Hash(h) => ValueDigest::_Hash(h.as_ref()),
ValueDigest::Hash(h) => ValueDigest::Hash(h.as_ref()),
})
}

fn children(&self) -> impl Iterator<Item = (usize, &TrieHash)> + Clone {
fn children(&self) -> impl Iterator<Item = (usize, &HashType)> + Clone {
self.child_hashes
.iter()
.enumerate()
Expand All @@ -99,7 +105,7 @@ impl Hashable for ProofNode {

impl From<PathIterItem> for ProofNode {
fn from(item: PathIterItem) -> Self {
let mut child_hashes: [Option<TrieHash>; BranchNode::MAX_CHILDREN] =
let mut child_hashes: [Option<HashType>; BranchNode::MAX_CHILDREN] =
[const { None }; BranchNode::MAX_CHILDREN];

if let Some(branch) = item.node.as_branch() {
Expand All @@ -121,7 +127,7 @@ impl From<PathIterItem> for ProofNode {
}
}

impl From<&ProofNode> for TrieHash {
impl From<&ProofNode> for HashType {
fn from(node: &ProofNode) -> Self {
node.to_hash()
}
Expand Down Expand Up @@ -162,7 +168,7 @@ impl<T: Hashable> Proof<T> {
return Err(ProofError::ValueMismatch);
}
}
ValueDigest::_Hash(got_hash) => {
ValueDigest::Hash(got_hash) => {
// This proof proves that `key` maps to a value
// whose hash is `got_hash`.
let value_hash = Sha256::digest(expected_value.as_ref());
Expand Down Expand Up @@ -190,11 +196,11 @@ impl<T: Hashable> Proof<T> {
return Err(ProofError::Empty);
};

let mut expected_hash = root_hash;
let mut expected_hash: HashType = root_hash.clone().into();

let mut iter = self.0.iter().peekable();
while let Some(node) = iter.next() {
if node.to_hash() != *expected_hash {
if node.to_hash() != expected_hash {
return Err(ProofError::UnexpectedHash);
}

Expand Down Expand Up @@ -223,7 +229,7 @@ impl<T: Hashable> Proof<T> {
.children()
.find_map(|(i, hash)| {
if i == next_nibble as usize {
Some(hash)
Some(hash.clone())
} else {
None
}
Expand Down
3 changes: 3 additions & 0 deletions storage/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ fastrace = { version = "0.7.4" }
io-uring = { version = "0.7.4", optional = true }
triomphe = "0.1.14"
coarsetime = "0.1.35"
rlp = { version = "0.6.1", optional = true }
sha3 = { version = "0.10.8", optional = true }

[dev-dependencies]
rand = "0.9.0"
Expand All @@ -37,6 +39,7 @@ tempfile = "3.12.0"
logger = ["log"]
branch_factor_256 = []
io-uring = ["dep:io-uring"]
ethhash = [ "dep:rlp", "dep:sha3" ]

[[bench]]
name = "serializer"
Expand Down
2 changes: 1 addition & 1 deletion storage/benches/serializer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ fn branch(c: &mut Criterion) {
if i == 0 {
Some(storage::Child::AddressWithHash(
NonZeroU64::new(1).unwrap(),
storage::TrieHash::from([0; 32]),
storage::HashType::from([0; 32]),
))
} else {
None
Expand Down
Loading
Loading