Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(levm): add hive tests comparison LEVM and REVM #1611

Closed
wants to merge 11 commits into from
9 changes: 9 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ members = [
"cmd/ef_tests/levm",
"cmd/ethrex_l2",
"cmd/hive_report",
"cmd/hive_comparison",
"crates/vm/levm",
"crates/vm/levm/bench/revm_comparison",
"crates/l2/",
Expand Down
9 changes: 9 additions & 0 deletions cmd/hive_comparison/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
[package]
name = "hive_comparison"
version.workspace = true
edition.workspace = true

[dependencies]
serde_json.workspace = true
serde.workspace = true
hive_report = { path = "../hive_report" }
86 changes: 86 additions & 0 deletions cmd/hive_comparison/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
use std::fs::{self, File};
use std::io::BufReader;

use hive_report::{HiveResult, JsonFile};

fn main() -> Result<(), Box<dyn std::error::Error>> {
// 1. Clear logs, build image with LEVM and run
// 2. Store results_by_category in results_levm variable
// 1. Clear logs, build image with REVM and run
// 4. Store results_by_category in results_revm variable
// 5. Compare results_levm with results_revm. (They should have the same tests ran)
// For now we can just compare the amount of test passed on each category and see if it is the same.

// Warning: The code down below is copy-pasted just for testing purposes, progress has not been made yet :)

let mut results = Vec::new();

for entry in fs::read_dir("hive/workspace/logs")? {
let entry = entry?;
let path = entry.path();

if path.is_file()
&& path.extension().and_then(|s| s.to_str()) == Some("json")
&& path.file_name().and_then(|s| s.to_str()) != Some("hive.json")
{
let file_name = path
.file_name()
.and_then(|s| s.to_str())
.expect("Path should be a valid string");
let file = File::open(&path)?;
let reader = BufReader::new(file);

let json_data: JsonFile = match serde_json::from_reader(reader) {
Ok(data) => data,
Err(_) => {
eprintln!("Error processing file: {}", file_name);
continue;
}
};

let total_tests = json_data.test_cases.len();
let passed_tests = json_data
.test_cases
.values()
.filter(|test_case| test_case.summary_result.pass)
.count();

let result = HiveResult::new(json_data.name, passed_tests, total_tests);
if !result.should_skip() {
results.push(result);
}
}
}

// First by category ascending, then by passed tests descending, then by success percentage descending.
results.sort_by(|a, b| {
a.category
.cmp(&b.category)
.then_with(|| b.passed_tests.cmp(&a.passed_tests))
.then_with(|| {
b.success_percentage
.partial_cmp(&a.success_percentage)
.unwrap()
})
});

dbg!(&results);
let results_by_category = results.chunk_by(|a, b| a.category == b.category);

dbg!(&results_by_category);

// for results in results_by_category {
// // print category
// println!("*{}*", results[0].category);
// for result in results {
// println!("\t{}", result);
// }
// println!();
// }

Ok(())
}

// fn generate_results_by_category() {

// }
74 changes: 74 additions & 0 deletions cmd/hive_report/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
use serde::Deserialize;

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TestCase {
pub summary_result: SummaryResult,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SummaryResult {
pub pass: bool,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct JsonFile {
pub name: String,
pub test_cases: std::collections::HashMap<String, TestCase>,
}

#[derive(Debug)]
pub struct HiveResult {
pub category: String,
pub display_name: String,
pub passed_tests: usize,
pub total_tests: usize,
pub success_percentage: f64,
}

impl HiveResult {
pub fn new(suite: String, passed_tests: usize, total_tests: usize) -> Self {
let success_percentage = (passed_tests as f64 / total_tests as f64) * 100.0;

let (category, display_name) = match suite.as_str() {
"engine-api" => ("Engine", "Paris"),
"engine-auth" => ("Engine", "Auth"),
"engine-cancun" => ("Engine", "Cancun"),
"engine-exchange-capabilities" => ("Engine", "Exchange Capabilities"),
"engine-withdrawals" => ("Engine", "Shanghai"),
"discv4" => ("P2P", "Discovery V4"),
"eth" => ("P2P", "Eth capability"),
"snap" => ("P2P", "Snap capability"),
"rpc-compat" => ("RPC", "RPC API Compatibility"),
"sync" => ("Sync", "Node Syncing"),
other => {
eprintln!("Warn: Unknown suite: {}. Skipping", other);
("", "")
}
};

HiveResult {
category: category.to_string(),
display_name: display_name.to_string(),
passed_tests,
total_tests,
success_percentage,
}
}

pub fn should_skip(&self) -> bool {
self.category.is_empty()
}
}

impl std::fmt::Display for HiveResult {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}: {}/{} ({:.02}%)",
self.display_name, self.passed_tests, self.total_tests, self.success_percentage
)
}
}
73 changes: 1 addition & 72 deletions cmd/hive_report/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,78 +1,7 @@
use serde::Deserialize;
use std::fs::{self, File};
use std::io::BufReader;

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct TestCase {
summary_result: SummaryResult,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct SummaryResult {
pass: bool,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
struct JsonFile {
name: String,
test_cases: std::collections::HashMap<String, TestCase>,
}

struct HiveResult {
category: String,
display_name: String,
passed_tests: usize,
total_tests: usize,
success_percentage: f64,
}

impl HiveResult {
fn new(suite: String, passed_tests: usize, total_tests: usize) -> Self {
let success_percentage = (passed_tests as f64 / total_tests as f64) * 100.0;

let (category, display_name) = match suite.as_str() {
"engine-api" => ("Engine", "Paris"),
"engine-auth" => ("Engine", "Auth"),
"engine-cancun" => ("Engine", "Cancun"),
"engine-exchange-capabilities" => ("Engine", "Exchange Capabilities"),
"engine-withdrawals" => ("Engine", "Shanghai"),
"discv4" => ("P2P", "Discovery V4"),
"eth" => ("P2P", "Eth capability"),
"snap" => ("P2P", "Snap capability"),
"rpc-compat" => ("RPC", "RPC API Compatibility"),
"sync" => ("Sync", "Node Syncing"),
other => {
eprintln!("Warn: Unknown suite: {}. Skipping", other);
("", "")
}
};

HiveResult {
category: category.to_string(),
display_name: display_name.to_string(),
passed_tests,
total_tests,
success_percentage,
}
}

fn should_skip(&self) -> bool {
self.category.is_empty()
}
}

impl std::fmt::Display for HiveResult {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}: {}/{} ({:.02}%)",
self.display_name, self.passed_tests, self.total_tests, self.success_percentage
)
}
}
use hive_report::{HiveResult, JsonFile};

fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut results = Vec::new();
Expand Down
31 changes: 8 additions & 23 deletions crates/blockchain/blockchain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ use ethrex_core::types::{
use ethrex_core::H256;

use ethrex_storage::error::StoreError;
use ethrex_storage::{AccountUpdate, Store};
use ethrex_vm::{evm_state, execute_block, spec_id, EvmState, SpecId};
use ethrex_storage::Store;
use ethrex_vm::{execute_block, spec_id, SpecId};

//TODO: Implement a struct Chain or BlockChain to encapsulate
//functionality and canonical chain state and config
Expand All @@ -35,30 +35,15 @@ pub fn add_block(block: &Block, storage: &Store) -> Result<(), ChainError> {
storage.add_pending_block(block.clone())?;
return Err(ChainError::ParentNotFound);
};
let mut state = evm_state(storage.clone(), block.header.parent_hash);

// Validate the block pre-execution
validate_block(block, &parent_header, &state)?;
let (receipts, account_updates): (Vec<Receipt>, Vec<AccountUpdate>) = {
// TODO: Consider refactoring both implementations so that they have the same signature
#[cfg(feature = "levm")]
{
execute_block(block, &mut state)?
}
#[cfg(not(feature = "levm"))]
{
let receipts = execute_block(block, &mut state)?;
let account_updates = ethrex_vm::get_state_transitions(&mut state);
(receipts, account_updates)
}
};
validate_block(block, &parent_header, storage)?;

let (receipts, account_updates) = execute_block(block, storage)?;

validate_gas_used(&receipts, &block.header)?;

// Apply the account updates over the last block's state and compute the new state root
let new_state_root = state
.database()
.ok_or(ChainError::StoreError(StoreError::MissingStore))?
let new_state_root = storage
.apply_account_updates(block.header.parent_hash, &account_updates)?
.ok_or(ChainError::ParentStateNotFound)?;

Expand Down Expand Up @@ -149,10 +134,10 @@ pub fn find_parent_header(
pub fn validate_block(
block: &Block,
parent_header: &BlockHeader,
state: &EvmState,
store: &Store,
) -> Result<(), ChainError> {
let spec = spec_id(
&state.chain_config().map_err(ChainError::from)?,
&store.get_chain_config().map_err(ChainError::from)?,
block.header.timestamp,
);

Expand Down
12 changes: 5 additions & 7 deletions crates/l2/proposer/l1_committer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use ethrex_l2_sdk::{
eth_client::{eth_sender::Overrides, BlockByNumber, EthClient, WrappedTransaction},
};
use ethrex_storage::{error::StoreError, Store};
use ethrex_vm::{evm_state, execute_block, get_state_transitions};
use ethrex_vm::execute_block;
use keccak_hash::keccak;
use secp256k1::SecretKey;
use std::{collections::HashMap, time::Duration};
Expand Down Expand Up @@ -242,15 +242,13 @@ impl Committer {
) -> Result<StateDiff, CommitterError> {
info!("Preparing state diff for block {}", block.header.number);

let mut state = evm_state(store.clone(), block.header.parent_hash);
execute_block(block, &mut state).map_err(CommitterError::from)?;
let account_updates = get_state_transitions(&mut state);
let account_updates = execute_block(block, &store)
.map_err(CommitterError::from)?
.1;

let mut modified_accounts = HashMap::new();
for account_update in &account_updates {
let prev_nonce = match state
.database()
.ok_or(CommitterError::FailedToRetrieveDataFromStorage)?
let prev_nonce = match store
// If we want the state_diff of a batch, we will have to change the -1 with the `batch_size`
// and we may have to keep track of the latestCommittedBlock (last block of the batch),
// the batch_size and the latestCommittedBatch in the contract.
Expand Down
6 changes: 2 additions & 4 deletions crates/vm/execution_db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use revm::{
};
use serde::{Deserialize, Serialize};

use crate::{errors::ExecutionDBError, evm_state, execute_block, get_state_transitions};
use crate::{errors::ExecutionDBError, execute_block};

/// In-memory EVM database for caching execution data.
///
Expand Down Expand Up @@ -49,10 +49,8 @@ impl ExecutionDB {
// TODO: perform validation to exit early

// Execute and obtain account updates
let mut state = evm_state(store.clone(), block.header.parent_hash);
let chain_config = store.get_chain_config()?;
execute_block(block, &mut state).map_err(Box::new)?;
let account_updates = get_state_transitions(&mut state);
let account_updates = execute_block(block, store).map_err(Box::new)?.1;

// Store data touched by updates and get all touched storage keys for each account
let mut accounts = HashMap::new();
Expand Down
Loading
Loading