diff --git a/Cargo.lock b/Cargo.lock index 735b3ac932..1cf941b694 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -962,7 +962,7 @@ dependencies = [ [[package]] name = "bonsai-trie" version = "0.1.0" -source = "git+https://github.com/antiyro/bonsai-trie.git?branch=oss#6be56ff0296cbb3df0693ffc13e73a35329c0a0f" +source = "git+https://github.com/antiyro/bonsai-trie.git?branch=oss#1b9b3f14dfcf96fd872634f2127b218b9ed26c2e" dependencies = [ "bitvec", "derive_more", @@ -5962,7 +5962,6 @@ dependencies = [ "mc-storage", "mockito", "mp-block", - "mp-commitments", "mp-contract", "mp-fee", "mp-felt", @@ -6367,25 +6366,6 @@ dependencies = [ "starknet-ff 0.3.5 (git+https://github.com/jbcaron/starknet-rs.git?branch=classes)", ] -[[package]] -name = "mp-commitments" -version = "0.1.0" -dependencies = [ - "bitvec", - "derive_more", - "frame-support", - "mp-felt", - "mp-hashers", - "mp-transactions", - "parity-scale-codec", - "scale-info", - "serde", - "starknet-core", - "starknet-crypto 0.6.1 (git+https://github.com/jbcaron/starknet-rs.git?branch=classes)", - "starknet-ff 0.3.5 (git+https://github.com/jbcaron/starknet-rs.git?branch=classes)", - "starknet_api", -] - [[package]] name = "mp-contract" version = "0.1.0" @@ -7066,7 +7046,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c11e44798ad209ccdd91fc192f0526a369a01234f7373e1b141c96d7cee4f0e" dependencies = [ - "proc-macro-crate 2.0.0", + "proc-macro-crate 1.1.3", "proc-macro2", "quote", "syn 2.0.48", @@ -7334,7 +7314,6 @@ dependencies = [ "log", "mp-block", "mp-chain-id", - "mp-commitments", "mp-contract", "mp-digest-log", "mp-fee", @@ -11336,9 +11315,8 @@ dependencies = [ [[package]] name = "starknet-types-core" -version = "0.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d791c671fecde494f435170a01c6fcb2949d0dd61be0b31b7c410b041609f96" +version = "0.0.9" +source = "git+https://github.com/starknet-io/types-rs?branch=main#eae04aa3e6a3716982ce39ec7ba87a47698096dc" dependencies = [ "bitvec", "lambdaworks-crypto", @@ -12371,7 +12349,7 @@ checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", "digest 0.10.7", - "rand 0.8.5", + "rand 0.7.3", "static_assertions", ] diff --git a/Cargo.toml b/Cargo.toml index 14df39cacb..ae0366c1b3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,7 +13,6 @@ members = [ "crates/pallets/starknet/runtime_api/", "crates/primitives/block", "crates/primitives/chain-id", - "crates/primitives/commitments", "crates/primitives/contract", "crates/primitives/convert", "crates/primitives/digest-log", @@ -46,7 +45,6 @@ default-members = [ "crates/pallets/starknet/runtime_api/", "crates/primitives/block", "crates/primitives/chain-id", - "crates/primitives/commitments", "crates/primitives/digest-log", "crates/primitives/fee", "crates/primitives/felt", @@ -225,7 +223,6 @@ pallet-starknet-runtime-api = { path = "crates/pallets/starknet/runtime_api", de # Madara primtitives mp-block = { path = "crates/primitives/block", default-features = false } mp-chain-id = { path = "crates/primitives/chain-id", default-features = false } -mp-commitments = { path = "crates/primitives/commitments", default-features = false } mp-contract = { path = "crates/primitives/contract", default-features = false } mp-convert = { path = "crates/primitives/convert", default-features = false } mp-digest-log = { path = "crates/primitives/digest-log", default-features = false } @@ -271,7 +268,7 @@ starknet-crypto = { git = "https://github.com/jbcaron/starknet-rs.git", branch = starknet-ff = { git = "https://github.com/jbcaron/starknet-rs.git", branch = "classes", default-features = false } starknet-providers = { git = "https://github.com/jbcaron/starknet-rs.git", branch = "classes", default-features = false } starknet-signers = { git = "https://github.com/jbcaron/starknet-rs.git", branch = "classes", default-features = false } -starknet-types-core = { version = "0.0.7", default-features = false } +starknet-types-core = { git = "https://github.com/starknet-io/types-rs", branch = "main", default-features = false } blockifier = { git = "https://github.com/massalabs/blockifier", branch = "no_std-support-7578442-std", default-features = false, features = [ "parity-scale-codec", diff --git a/crates/client/db/src/bonsai_db.rs b/crates/client/db/src/bonsai_db.rs index 5519a7abe1..3c89dcb319 100644 --- a/crates/client/db/src/bonsai_db.rs +++ b/crates/client/db/src/bonsai_db.rs @@ -1,4 +1,6 @@ +use std::default; use std::marker::PhantomData; +use std::sync::atomic::AtomicU32; use std::sync::Arc; use bonsai_trie::id::Id; @@ -8,12 +10,58 @@ use sp_runtime::traits::Block as BlockT; use crate::error::BonsaiDbError; +#[derive(Debug)] +pub enum TrieColumn { + Class, + Contract, + Storage, +} + +#[derive(Debug)] +pub enum KeyType { + Trie, + Flat, + TrieLog, +} + +impl TrieColumn { + pub fn to_index(&self, key_type: KeyType) -> u32 { + match self { + TrieColumn::Class => match key_type { + KeyType::Trie => crate::columns::TRIE_BONSAI_CLASSES, + KeyType::Flat => crate::columns::FLAT_BONSAI_CLASSES, + KeyType::TrieLog => crate::columns::LOG_BONSAI_CLASSES, + }, + TrieColumn::Contract => match key_type { + KeyType::Trie => crate::columns::TRIE_BONSAI_CONTRACTS, + KeyType::Flat => crate::columns::FLAT_BONSAI_CONTRACTS, + KeyType::TrieLog => crate::columns::LOG_BONSAI_CONTRACTS, + }, + TrieColumn::Storage => match key_type { + KeyType::Trie => crate::columns::TRIE_BONSAI_STORAGE, + KeyType::Flat => crate::columns::FLAT_BONSAI_STORAGE, + KeyType::TrieLog => crate::columns::LOG_BONSAI_STORAGE, + }, + } + } +} + /// Represents a Bonsai database instance parameterized by a block type. pub struct BonsaiDb { /// Database interface for key-value operations. pub(crate) db: Arc, /// PhantomData to mark the block type used. pub(crate) _marker: PhantomData, + /// Set current column to give trie context + pub(crate) current_column: TrieColumn, +} + +pub fn key_type(key: &DatabaseKey) -> KeyType { + match key { + DatabaseKey::Trie(bytes) => return KeyType::Trie, + DatabaseKey::Flat(bytes) => return KeyType::Flat, + DatabaseKey::TrieLog(bytes) => return KeyType::TrieLog, + } } impl BonsaiDatabase for &BonsaiDb { @@ -27,7 +75,8 @@ impl BonsaiDatabase for &BonsaiDb { /// Retrieves a value by its database key. fn get(&self, key: &DatabaseKey) -> Result>, Self::DatabaseError> { - let column = crate::columns::BONSAI; + let key_type = key_type(key); + let column = self.current_column.to_index(key_type); let key_slice = key.as_slice(); self.db.get(column, key_slice).map_err(Into::into) } @@ -39,7 +88,9 @@ impl BonsaiDatabase for &BonsaiDb { value: &[u8], batch: Option<&mut Self::Batch>, ) -> Result>, Self::DatabaseError> { - let column = crate::columns::BONSAI; + // println!("Key and keytype: {:?} {:?}", self.current_column, key_type(key)); + let key_type = key_type(key); + let column = self.current_column.to_index(key_type); let key_slice = key.as_slice(); let previous_value = self.db.get(column, key_slice)?; @@ -56,14 +107,16 @@ impl BonsaiDatabase for &BonsaiDb { /// Checks if a key exists in the database. fn contains(&self, key: &DatabaseKey) -> Result { - let column = crate::columns::BONSAI; + let key_type = key_type(key); + let column = self.current_column.to_index(key_type); let key_slice = key.as_slice(); self.db.has_key(column, key_slice).map_err(Into::into) } /// Retrieves all key-value pairs starting with a given prefix. fn get_by_prefix(&self, prefix: &DatabaseKey) -> Result, Vec)>, Self::DatabaseError> { - let column = crate::columns::BONSAI; + let key_type = key_type(prefix); + let column = self.current_column.to_index(key_type); let prefix_slice = prefix.as_slice(); let mut result = Vec::new(); @@ -81,7 +134,8 @@ impl BonsaiDatabase for &BonsaiDb { key: &DatabaseKey, batch: Option<&mut Self::Batch>, ) -> Result>, Self::DatabaseError> { - let column = crate::columns::BONSAI; + let key_type = key_type(key); + let column = self.current_column.to_index(key_type); let key_slice = key.as_slice(); let previous_value = self.db.get(column, key_slice)?; @@ -98,7 +152,8 @@ impl BonsaiDatabase for &BonsaiDb { /// Removes all key-value pairs starting with a given prefix. fn remove_by_prefix(&mut self, prefix: &DatabaseKey) -> Result<(), Self::DatabaseError> { - let column = crate::columns::BONSAI; + let key_type = key_type(prefix); + let column = self.current_column.to_index(key_type); let prefix_slice = prefix.as_slice(); let mut transaction = self.create_batch(); transaction.delete_prefix(column, prefix_slice); diff --git a/crates/client/db/src/lib.rs b/crates/client/db/src/lib.rs index 6ba37ab246..0c124c6bc3 100644 --- a/crates/client/db/src/lib.rs +++ b/crates/client/db/src/lib.rs @@ -30,9 +30,10 @@ mod meta_db; use std::marker::PhantomData; use std::path::{Path, PathBuf}; +use std::sync::atomic::AtomicU32; use std::sync::Arc; -use bonsai_db::BonsaiDb; +use bonsai_db::{BonsaiDb, TrieColumn}; use da_db::DaDb; use l1_handler_tx_fee::L1HandlerTxFeeDb; use mapping_db::MappingDb; @@ -56,7 +57,7 @@ pub(crate) mod columns { // ===== /!\ =================================================================================== // MUST BE INCREMENTED WHEN A NEW COLUMN IN ADDED // ===== /!\ =================================================================================== - pub const NUM_COLUMNS: u32 = 10; + pub const NUM_COLUMNS: u32 = 18; pub const META: u32 = 0; pub const BLOCK_MAPPING: u32 = 1; @@ -79,8 +80,23 @@ pub(crate) mod columns { /// This column stores the fee paid on l1 for L1Handler transactions pub const L1_HANDLER_PAID_FEE: u32 = 8; - /// This column contains the bonsai trie keys - pub const BONSAI: u32 = 9; + /// The bonsai columns are triplicated since we need to set a column for + /// + /// const TRIE_LOG_CF: &str = "trie_log"; + /// const TRIE_CF: &str = "trie"; + /// const FLAT_CF: &str = "flat"; + /// as defined in https://github.com/keep-starknet-strange/bonsai-trie/blob/oss/src/databases/rocks_db.rs + /// + /// For each tries CONTRACTS, CLASSES and STORAGE + pub const TRIE_BONSAI_CONTRACTS: u32 = 9; + pub const FLAT_BONSAI_CONTRACTS: u32 = 10; + pub const LOG_BONSAI_CONTRACTS: u32 = 11; + pub const TRIE_BONSAI_CLASSES: u32 = 12; + pub const FLAT_BONSAI_CLASSES: u32 = 13; + pub const LOG_BONSAI_CLASSES: u32 = 14; + pub const TRIE_BONSAI_STORAGE: u32 = 15; + pub const FLAT_BONSAI_STORAGE: u32 = 16; + pub const LOG_BONSAI_STORAGE: u32 = 17; } pub mod static_keys { @@ -89,6 +105,14 @@ pub mod static_keys { pub const LAST_SYNCED_L1_EVENT_BLOCK: &[u8] = b"LAST_SYNCED_L1_EVENT_BLOCK"; } +/// The Bonsai databases backend +#[derive(Clone)] +pub struct BonsaiDbs { + pub contract: Arc>, + pub class: Arc>, + pub storage: Arc>, +} + /// The Madara client database backend /// /// Contains five distinct databases: `meta`, `mapping`, `messaging`, `da` and `bonsai``. @@ -104,7 +128,7 @@ pub struct Backend { messaging: Arc, sierra_classes: Arc, l1_handler_paid_fee: Arc, - bonsai: Arc>, + bonsai: BonsaiDbs, } /// Returns the Starknet database directory. @@ -143,6 +167,16 @@ impl Backend { let kvdb: Arc = db.0; let spdb: Arc> = db.1; + let bonsai_dbs = BonsaiDbs { + contract: Arc::new(BonsaiDb { + db: kvdb.clone(), + _marker: PhantomData, + current_column: TrieColumn::Contract, + }), + class: Arc::new(BonsaiDb { db: kvdb.clone(), _marker: PhantomData, current_column: TrieColumn::Class }), + storage: Arc::new(BonsaiDb { db: kvdb, _marker: PhantomData, current_column: TrieColumn::Storage }), + }; + Ok(Self { mapping: Arc::new(MappingDb::new(spdb.clone(), cache_more_things)), meta: Arc::new(MetaDb { db: spdb.clone(), _marker: PhantomData }), @@ -150,7 +184,7 @@ impl Backend { messaging: Arc::new(MessagingDb { db: spdb.clone() }), sierra_classes: Arc::new(SierraClassesDb { db: spdb.clone() }), l1_handler_paid_fee: Arc::new(L1HandlerTxFeeDb { db: spdb.clone() }), - bonsai: Arc::new(BonsaiDb { db: kvdb, _marker: PhantomData }), + bonsai: bonsai_dbs, }) } @@ -179,9 +213,16 @@ impl Backend { &self.sierra_classes } - /// Return the bonsai database manager - pub fn bonsai(&self) -> &Arc> { - &self.bonsai + pub fn bonsai_contract(&self) -> &Arc> { + &self.bonsai.contract + } + + pub fn bonsai_class(&self) -> &Arc> { + &self.bonsai.class + } + + pub fn bonsai_storage(&self) -> &Arc> { + &self.bonsai.storage } /// Return l1 handler tx paid fee database manager diff --git a/crates/client/deoxys/Cargo.toml b/crates/client/deoxys/Cargo.toml index 92f745bc89..7c74843ffb 100644 --- a/crates/client/deoxys/Cargo.toml +++ b/crates/client/deoxys/Cargo.toml @@ -63,7 +63,6 @@ mc-commitment-state-diff = { workspace = true } mc-rpc-core = { workspace = true } mc-storage = { workspace = true } mp-block = { workspace = true } -mp-commitments = { workspace = true } mp-contract = { workspace = true } mp-fee = { workspace = true } mp-felt = { workspace = true } diff --git a/crates/client/deoxys/src/commitments/classes.rs b/crates/client/deoxys/src/commitments/classes.rs new file mode 100644 index 0000000000..21554ea355 --- /dev/null +++ b/crates/client/deoxys/src/commitments/classes.rs @@ -0,0 +1,88 @@ +use std::sync::Arc; + +use bitvec::vec::BitVec; +use bonsai_trie::id::{BasicId, BasicIdBuilder}; +use bonsai_trie::{BonsaiStorage, BonsaiStorageConfig}; +use mc_db::bonsai_db::{BonsaiDb, TrieColumn}; +use mc_db::BonsaiDbError; +use mp_felt::Felt252Wrapper; +use mp_hashers::poseidon::PoseidonHasher; +use mp_hashers::HasherT; +use sp_runtime::traits::Block as BlockT; +use starknet_types_core::hash::Poseidon; + +/// Calculate class commitment trie leaf hash value. +/// +/// See: +/// +/// # Arguments +/// +/// * `compiled_class_hash` - The hash of the compiled class. +/// +/// # Returns +/// +/// The hash of the class commitment trie leaf. +pub fn calculate_class_commitment_leaf_hash(compiled_class_hash: Felt252Wrapper) -> Felt252Wrapper { + let contract_class_hash_version = Felt252Wrapper::try_from("CONTRACT_CLASS_LEAF_V0".as_bytes()).unwrap(); + + let hash = H::compute_hash_on_elements(&[contract_class_hash_version.0, compiled_class_hash.0]); + + hash.into() +} + +/// Update class trie root hash value with the new class definition. +/// +/// The classes trie encodes the information about the existing classes in the state of Starknet. +/// It maps (Cairo 1.0) class hashes to their compiled class hashes +/// +/// # Arguments +/// +/// * `class_hash` - The hash of the class. +/// * `compiled_class_hash` - The hash of the compiled class. +/// * `bonsai_db` - The bonsai database responsible to compute the tries +/// +/// # Returns +/// +/// The class trie root hash represented as a `Felt252Wrapper` or a `BonsaiDbError`. +pub fn update_class_trie( + class_hash: Felt252Wrapper, + compiled_class_hash: Felt252Wrapper, + backend: &Arc>, +) -> Result { + let config = BonsaiStorageConfig::default(); + let bonsai_db = backend.as_ref(); + let mut bonsai_storage = + BonsaiStorage::<_, _, Poseidon>::new(bonsai_db, config).expect("Failed to create bonsai storage"); + + let class_commitment_leaf_hash = calculate_class_commitment_leaf_hash::(compiled_class_hash); + let key = BitVec::from_vec(class_hash.0.to_bytes_be()[..31].to_vec()); + bonsai_storage + .insert(key.as_bitslice(), &class_commitment_leaf_hash.into()) + .expect("Failed to insert into bonsai storage"); + + let mut id_builder = BasicIdBuilder::new(); + let id = id_builder.new_id(); + bonsai_storage.commit(id).expect("Failed to commit to bonsai storage"); + + let root_hash = bonsai_storage.root_hash().expect("Failed to get root hash"); + Ok(Felt252Wrapper::from(root_hash)) +} + +/// Get the actual class trie root hash. +/// +/// # Arguments +/// +/// * `bonsai_db` - The database responsible for storing computing the state tries. +/// +/// # Returns +/// +/// The class trie root hash as a `Felt252Wrapper` or a `BonsaiDbError`. +pub fn get_class_trie_root(backend: &Arc>) -> Result { + let config = BonsaiStorageConfig::default(); + let bonsai_db = backend.as_ref(); + let bonsai_storage: BonsaiStorage, Poseidon> = + BonsaiStorage::<_, _, Poseidon>::new(bonsai_db, config).expect("Failed to create bonsai storage"); + + let root_hash = bonsai_storage.root_hash().expect("Failed to get root hash"); + Ok(Felt252Wrapper::from(root_hash)) +} diff --git a/crates/client/deoxys/src/commitments/contracts.rs b/crates/client/deoxys/src/commitments/contracts.rs new file mode 100644 index 0000000000..c9ba6570d7 --- /dev/null +++ b/crates/client/deoxys/src/commitments/contracts.rs @@ -0,0 +1,158 @@ +use std::sync::Arc; + +use bitvec::prelude::BitVec; +use blockifier::execution::contract_address; +use blockifier::state::cached_state::CommitmentStateDiff; +use bonsai_trie::id::{BasicId, BasicIdBuilder}; +use bonsai_trie::{BonsaiStorage, BonsaiStorageConfig}; +use ethers::addressbook::Contract; +use mc_db::bonsai_db::{BonsaiDb, TrieColumn}; +use mc_db::BonsaiDbError; +use mp_felt::Felt252Wrapper; +use mp_hashers::pedersen::PedersenHasher; +use mp_hashers::HasherT; +use sp_runtime::traits::Block as BlockT; +use starknet_api::api_core::ContractAddress; +use starknet_types_core::hash::Pedersen; + +pub struct ContractLeafParams { + pub class_hash: Felt252Wrapper, + pub storage_root: Felt252Wrapper, + pub nonce: Felt252Wrapper, +} + +/// Calculates the storage root. +/// +/// `storage_root` is the root of another Merkle-Patricia trie of height 251 that is constructed +/// from the contract’s storage. +/// +/// # Arguments +/// +/// +/// # Returns +/// +/// The storage root hash. +pub fn update_storage_trie( + contract_address: &ContractAddress, + commitment_state_diff: CommitmentStateDiff, + bonsai_db: &Arc>, +) -> Result { + let config = BonsaiStorageConfig::default(); + let bonsai_db = bonsai_db.as_ref(); + let mut bonsai_storage: BonsaiStorage, Pedersen> = + BonsaiStorage::<_, _, Pedersen>::new(bonsai_db, config).expect("Failed to create bonsai storage"); + + if let Some(updates) = commitment_state_diff.storage_updates.get(contract_address) { + for (storage_key, storage_value) in updates { + let key = BitVec::from_vec(Felt252Wrapper::from(storage_key.0.0).0.to_bytes_be()[..31].to_vec()); + let value = Felt252Wrapper::from(*storage_value); + bonsai_storage.insert(key.as_bitslice(), &value.into()).expect("Failed to insert storage update into trie"); + } + } + + let mut id_builder = BasicIdBuilder::new(); + let id = id_builder.new_id(); + bonsai_storage.commit(id).expect("Failed to commit to bonsai storage"); + + let root_hash = bonsai_storage.root_hash().expect("Failed to get root hash"); + + Ok(Felt252Wrapper::from(root_hash)) +} + +/// Get the storage trie root hash of the actual contract state. +/// +/// # Arguments +/// +/// * `bonsai_db` - The database responsible for storing computing the state tries. +/// +/// # Returns +/// +/// The contract state root hash as a `Felt252Wrapper` or a `BonsaiDbError`. +pub fn get_storage_trie_root(bonsai_db: &Arc>) -> Result { + let config = BonsaiStorageConfig::default(); + let bonsai_db = bonsai_db.as_ref(); + let bonsai_storage: BonsaiStorage, Pedersen> = + BonsaiStorage::<_, _, Pedersen>::new(bonsai_db, config).expect("Failed to create bonsai storage"); + + let root_hash = bonsai_storage.root_hash().expect("Failed to get root hash"); + Ok(Felt252Wrapper::from(root_hash)) +} + +/// Calculates the contract state hash. +/// +/// # Arguments +/// +/// * `hash` - The hash of the contract definition. +/// * `root` - The root of root of another Merkle-Patricia trie of height 251 that is constructed +/// from the contract’s storage. +/// * `nonce` - The current nonce of the contract. +/// +/// # Returns +/// +/// The contract state leaf hash. +pub fn calculate_contract_state_leaf_hash(contract_leaf_params: ContractLeafParams) -> Felt252Wrapper { + // Define the constant for the contract state hash version + const CONTRACT_STATE_HASH_VERSION: Felt252Wrapper = Felt252Wrapper::ZERO; + + // First hash: Combine class_hash and storage_root. + let contract_state_hash = + H::compute_hash_on_elements(&[contract_leaf_params.class_hash.0, contract_leaf_params.storage_root.0]); + let contract_state_hash = H::compute_hash_on_elements(&[contract_state_hash, contract_leaf_params.nonce.0]); + let contract_state_hash = H::compute_hash_on_elements(&[contract_state_hash, CONTRACT_STATE_HASH_VERSION.0]); + + contract_state_hash.into() +} + +/// Update the contract trie with the new contract state. +/// +/// # Arguments +/// +/// * `contract_hash` - The hash of the contract. +/// * `contract_leaf_params` - A struct containing the class hash, storage root and nonce. +/// * `bonsai_db` - The database responsible for storing computing the state tries. +/// +/// # Returns +/// +/// The contract state root hash as a `Felt252Wrapper` or a `BonsaiDbError`. +pub fn update_contract_trie( + contract_hash: Felt252Wrapper, + contract_leaf_params: ContractLeafParams, + bonsai_db: &Arc>, +) -> Result { + let config = BonsaiStorageConfig::default(); + let bonsai_db = bonsai_db.as_ref(); + let mut bonsai_storage = + BonsaiStorage::<_, _, Pedersen>::new(bonsai_db, config).expect("Failed to create bonsai storage"); + + let class_commitment_leaf_hash = calculate_contract_state_leaf_hash::(contract_leaf_params); + let key = BitVec::from_vec(contract_hash.0.to_bytes_be()[..31].to_vec()); + bonsai_storage + .insert(key.as_bitslice(), &class_commitment_leaf_hash.into()) + .expect("Failed to insert into bonsai storage"); + + let mut id_builder = BasicIdBuilder::new(); + let id = id_builder.new_id(); + bonsai_storage.commit(id).expect("Failed to commit to bonsai storage"); + + let root_hash = bonsai_storage.root_hash().expect("Failed to get root hash"); + Ok(Felt252Wrapper::from(root_hash)) +} + +/// Get the actual contract trie hash. +/// +/// # Arguments +/// +/// * `bonsai_db` - The database responsible for storing computing the state tries. +/// +/// # Returns +/// +/// The contract state root hash as a `Felt252Wrapper`or a `BonsaiDbError`. +pub fn get_contract_trie_root(bonsai_db: &Arc>) -> Result { + let config = BonsaiStorageConfig::default(); + let bonsai_db = bonsai_db.as_ref(); + let bonsai_storage: BonsaiStorage, Pedersen> = + BonsaiStorage::<_, _, Pedersen>::new(bonsai_db, config).expect("Failed to create bonsai storage"); + + let root_hash = bonsai_storage.root_hash().expect("Failed to get root hash"); + Ok(Felt252Wrapper::from(root_hash)) +} diff --git a/crates/client/deoxys/src/commitments/events.rs b/crates/client/deoxys/src/commitments/events.rs index 2be64c12a4..add5164d0d 100644 --- a/crates/client/deoxys/src/commitments/events.rs +++ b/crates/client/deoxys/src/commitments/events.rs @@ -2,10 +2,12 @@ use std::sync::Arc; use anyhow::Ok; use bitvec::vec::BitVec; -use bonsai_trie::id::BasicIdBuilder; +use bonsai_trie::databases::HashMapDb; +use bonsai_trie::id::{BasicId, BasicIdBuilder}; use bonsai_trie::{BonsaiStorage, BonsaiStorageConfig}; use mc_db::bonsai_db::BonsaiDb; use mp_felt::Felt252Wrapper; +use mp_hashers::pedersen::PedersenHasher; use mp_hashers::HasherT; use sp_runtime::traits::Block as BlockT; use starknet_api::transaction::Event; @@ -13,10 +15,15 @@ use starknet_ff::FieldElement; use starknet_types_core::felt::Felt; use starknet_types_core::hash::Pedersen; -/// Calculate the hash of an event. +/// Calculate the hash of the event. /// -/// See the [documentation](https://docs.starknet.io/documentation/architecture_and_concepts/Events/starknet-events/#event_hash) -/// for details. +/// # Arguments +/// +/// * `event` - The event we want to calculate the hash of. +/// +/// # Returns +/// +/// The event hash as `FieldElement`. pub fn calculate_event_hash(event: &Event) -> FieldElement { let keys_hash = H::compute_hash_on_elements( &event @@ -39,31 +46,24 @@ pub fn calculate_event_hash(event: &Event) -> FieldElement { H::compute_hash_on_elements(&[from_address, keys_hash, data_hash]) } -/// Calculate event commitment hash value. -/// -/// The event commitment is the root of the Patricia Merkle tree with height 64 -/// constructed by adding the event hash -/// (see https://docs.starknet.io/documentation/architecture_and_concepts/Events/starknet-events/#event_hash) -/// to the tree and computing the root hash. +/// Calculate the event commitment in storage using BonsaiDb (which is less efficient for this +/// usecase). /// /// # Arguments /// -/// * `events` - The events to calculate the commitment from. +/// * `events` - The events of the block +/// * `bonsai_db` - The bonsai database responsible to compute the tries /// /// # Returns /// -/// The merkle root of the merkle tree built from the events. -pub(crate) fn event_commitment( +/// The event commitment as `Felt252Wrapper`. +pub fn event_commitment( events: &[Event], - backend: &Arc>, -) -> Result -where - B: BlockT, - H: HasherT, -{ + bonsai_db: &Arc>, +) -> Result { if events.len() > 0 { let config = BonsaiStorageConfig::default(); - let bonsai_db = backend.as_ref(); + let bonsai_db = bonsai_db.as_ref(); let mut bonsai_storage = BonsaiStorage::<_, _, Pedersen>::new(bonsai_db, config).expect("Failed to create bonsai storage"); @@ -73,7 +73,7 @@ where bonsai_storage.commit(zero).expect("Failed to commit to bonsai storage"); for (i, event) in events.iter().enumerate() { - let event_hash = calculate_event_hash::(event); + let event_hash = calculate_event_hash::(event); let key = BitVec::from_vec(i.to_be_bytes().to_vec()); let value = Felt::from(Felt252Wrapper::from(event_hash)); bonsai_storage.insert(key.as_bitslice(), &value).expect("Failed to insert into bonsai storage"); @@ -90,3 +90,38 @@ where Ok(Felt252Wrapper::ZERO) } } + +/// Calculate the event commitment in memory using HashMapDb (which is more efficient for this +/// usecase). +/// +/// # Arguments +/// +/// * `events` - The events of the block +/// +/// # Returns +/// +/// The event commitment as `Felt252Wrapper`. +pub fn memory_event_commitment(events: &[Event]) -> Result { + if !events.is_empty() { + let config = BonsaiStorageConfig::default(); + let bonsai_db = HashMapDb::::default(); + let mut bonsai_storage = + BonsaiStorage::<_, _, Pedersen>::new(bonsai_db, config).expect("Failed to create bonsai storage"); + + for (i, event) in events.iter().enumerate() { + let event_hash = calculate_event_hash::(event); + let key = BitVec::from_vec(i.to_be_bytes().to_vec()); + let value = Felt::from(Felt252Wrapper::from(event_hash)); + bonsai_storage.insert(key.as_bitslice(), &value).expect("Failed to insert into bonsai storage"); + } + + let mut id_builder = BasicIdBuilder::new(); + let id = id_builder.new_id(); + bonsai_storage.commit(id).expect("Failed to commit to bonsai storage"); + + let root_hash = bonsai_storage.root_hash().expect("Failed to get root hash"); + Ok(Felt252Wrapper::from(root_hash)) + } else { + Ok(Felt252Wrapper::ZERO) + } +} diff --git a/crates/client/deoxys/src/commitments/lib.rs b/crates/client/deoxys/src/commitments/lib.rs index c54367d897..3ce2819f7d 100644 --- a/crates/client/deoxys/src/commitments/lib.rs +++ b/crates/client/deoxys/src/commitments/lib.rs @@ -1,52 +1,198 @@ use std::sync::Arc; +use blockifier::state::cached_state::CommitmentStateDiff; +use indexmap::IndexMap; +use mc_db::bonsai_db::BonsaiDb; +use mc_db::{BonsaiDbError, BonsaiDbs}; +use mp_block::state_update::StateUpdateWrapper; use mp_felt::Felt252Wrapper; +use mp_hashers::poseidon::PoseidonHasher; use mp_hashers::HasherT; use mp_transactions::Transaction; use sp_runtime::traits::Block as BlockT; +use starknet_api::api_core::{ClassHash, CompiledClassHash, ContractAddress, Nonce}; +use starknet_api::hash::StarkFelt; +use starknet_api::state::StorageKey; use starknet_api::transaction::Event; -use super::events::event_commitment; -use super::transactions::transaction_commitment; +use super::classes::{get_class_trie_root, update_class_trie}; +use super::contracts::{get_contract_trie_root, update_contract_trie, update_storage_trie, ContractLeafParams}; +use super::events::memory_event_commitment; +use super::transactions::memory_transaction_commitment; -/// Calculate the transaction commitment, the event commitment and the event count. +/// Calculate the transaction and event commitment. /// /// # Arguments /// /// * `transactions` - The transactions of the block +/// * `events` - The events of the block +/// * `chain_id` - The current chain id +/// * `block_number` - The current block number /// /// # Returns /// -/// The transaction commitment, the event commitment and the event count. -pub fn calculate_commitments( +/// The transaction and the event commitment as `Felt252Wrapper`. +pub fn calculate_commitments( transactions: &[Transaction], events: &[Event], chain_id: Felt252Wrapper, block_number: u64, - backend: Arc>, ) -> (Felt252Wrapper, Felt252Wrapper) { ( - transaction_commitment::(transactions, chain_id, block_number, &backend.bonsai().clone()) + memory_transaction_commitment(transactions, chain_id, block_number) .expect("Failed to calculate transaction commitment"), - event_commitment::(events, &backend.bonsai().clone()).expect("Failed to calculate event commitment"), + memory_event_commitment(events).expect("Failed to calculate event commitment"), ) } -// /// Calculate the transaction commitment, the event commitment and the event count. -// /// -// /// # Arguments -// /// -// /// * `transactions` - The transactions of the block -// /// -// /// # Returns -// /// -// /// The transaction commitment, the event commitment and the event count. -// pub fn calculate_state_commitments( -// transactions: &[Transaction], -// events: &[Event], -// chain_id: Felt252Wrapper, -// block_number: u64, -// backend: Arc>, -// ) -> Felt252Wrapper { state_commitment::(transactions, chain_id, block_number, -// &backend.bonsai().clone()) .expect("Failed to calculate transaction commitment") -// } +/// Builds a `CommitmentStateDiff` from the `StateUpdateWrapper`. +/// +/// # Arguments +/// +/// * `StateUpdateWrapper` - The last state update fetched and formated. +/// +/// # Returns +/// +/// The commitment state diff as a `CommitmentStateDiff`. +pub fn build_commitment_state_diff(state_update_wrapper: StateUpdateWrapper) -> CommitmentStateDiff { + let mut commitment_state_diff = CommitmentStateDiff { + address_to_class_hash: IndexMap::new(), + address_to_nonce: IndexMap::new(), + storage_updates: IndexMap::new(), + class_hash_to_compiled_class_hash: IndexMap::new(), + }; + + for deployed_contract in state_update_wrapper.state_diff.deployed_contracts.iter() { + let address = ContractAddress::from(deployed_contract.address.clone()); + let class_hash = if address == ContractAddress::from(Felt252Wrapper::ONE) { + // System contracts doesnt have class hashes + ClassHash::from(Felt252Wrapper::ZERO) + } else { + ClassHash::from(deployed_contract.class_hash.clone()) + }; + commitment_state_diff.address_to_class_hash.insert(address, class_hash); + } + + for (address, nonce) in state_update_wrapper.state_diff.nonces.iter() { + let contract_address = ContractAddress::from(address.clone()); + let nonce_value = Nonce::from(nonce.clone()); + commitment_state_diff.address_to_nonce.insert(contract_address, nonce_value); + } + + for (address, storage_diffs) in state_update_wrapper.state_diff.storage_diffs.iter() { + let contract_address = ContractAddress::from(address.clone()); + let mut storage_map = IndexMap::new(); + for storage_diff in storage_diffs.iter() { + let key = StorageKey::from(storage_diff.key.clone()); + let value = StarkFelt::from(storage_diff.value.clone()); + storage_map.insert(key, value); + } + commitment_state_diff.storage_updates.insert(contract_address, storage_map); + } + + for declared_class in state_update_wrapper.state_diff.declared_classes.iter() { + let class_hash = ClassHash::from(declared_class.class_hash.clone()); + let compiled_class_hash = CompiledClassHash::from(declared_class.compiled_class_hash.clone()); + commitment_state_diff.class_hash_to_compiled_class_hash.insert(class_hash, compiled_class_hash); + } + + commitment_state_diff +} + +/// Calculate state commitment hash value. +/// +/// The state commitment is the digest that uniquely (up to hash collisions) encodes the state. +/// It combines the roots of two binary Merkle-Patricia tries of height 251 using Poseidon/Pedersen +/// hashers. +/// +/// # Arguments +/// +/// * `contracts_trie_root` - The root of the contracts trie. +/// * `classes_trie_root` - The root of the classes trie. +/// +/// # Returns +/// +/// The state commitment as a `Felt252Wrapper`. +pub fn calculate_state_root( + contracts_trie_root: Felt252Wrapper, + classes_trie_root: Felt252Wrapper, +) -> Felt252Wrapper +where + H: HasherT, +{ + println!("classes_trie_root: {:?}", classes_trie_root); + println!("contracts_trie_root: {:?}", contracts_trie_root); + let starknet_state_prefix = Felt252Wrapper::try_from("STARKNET_STATE_V0".as_bytes()).unwrap(); + + let state_commitment_hash = + H::compute_hash_on_elements(&[starknet_state_prefix.0, contracts_trie_root.0, classes_trie_root.0]); + + state_commitment_hash.into() +} + +/// Update the state commitment hash value. +/// +/// The state commitment is the digest that uniquely (up to hash collisions) encodes the state. +/// It combines the roots of two binary Merkle-Patricia tries of height 251 using Poseidon/Pedersen +/// hashers. +/// +/// # Arguments +/// +/// * `CommitmentStateDiff` - The commitment state diff inducing unprocessed state changes. +/// * `BonsaiDb` - The database responsible for storing computing the state tries. +/// +/// # Returns +/// +/// The updated state root as a `Felt252Wrapper`. +pub fn update_state_root( + csd: CommitmentStateDiff, + bonsai_dbs: BonsaiDbs, +) -> Result { + let mut contract_trie_root = Felt252Wrapper::default(); + let mut class_trie_root = Felt252Wrapper::default(); + + for (contract_address, class_hash) in csd.address_to_class_hash.iter() { + let storage_root = update_storage_trie(contract_address, csd.clone(), &bonsai_dbs.storage) + .expect("Failed to update storage trie"); + let nonce = csd.address_to_nonce.get(contract_address).unwrap_or(&Felt252Wrapper::default().into()).clone(); + + let contract_leaf_params = + ContractLeafParams { class_hash: class_hash.clone().into(), storage_root, nonce: nonce.into() }; + + contract_trie_root = + update_contract_trie(contract_address.clone().into(), contract_leaf_params, &bonsai_dbs.contract)?; + } + + for (class_hash, compiled_class_hash) in csd.class_hash_to_compiled_class_hash.iter() { + class_trie_root = + update_class_trie(class_hash.clone().into(), compiled_class_hash.clone().into(), &bonsai_dbs.class)?; + } + + let state_root = calculate_state_root::(contract_trie_root, class_trie_root); + + Ok(state_root) +} + +/// Retrieves and compute the actual state root. +/// +/// The state commitment is the digest that uniquely (up to hash collisions) encodes the state. +/// It combines the roots of two binary Merkle-Patricia tries of height 251 using Poseidon/Pedersen +/// hasher. +/// +/// # Arguments +/// +/// * `BonsaiDb` - The database responsible for storing computing the state tries. +/// +/// # Returns +/// +/// The actual state root as a `Felt252Wrapper`. +pub fn state_root(bonsai_db: &Arc>) -> Felt252Wrapper +where + B: BlockT, + H: HasherT, +{ + let contract_trie_root = get_contract_trie_root(bonsai_db).expect("Failed to get contract trie root"); + let class_trie_root = get_class_trie_root(bonsai_db).expect("Failed to get class trie root"); + + calculate_state_root::(contract_trie_root, class_trie_root) +} diff --git a/crates/client/deoxys/src/commitments/mod.rs b/crates/client/deoxys/src/commitments/mod.rs index e8f370b0b8..5d6c8bcc01 100644 --- a/crates/client/deoxys/src/commitments/mod.rs +++ b/crates/client/deoxys/src/commitments/mod.rs @@ -1,4 +1,5 @@ +pub mod classes; +pub mod contracts; pub mod events; pub mod lib; -pub mod state; pub mod transactions; diff --git a/crates/client/deoxys/src/commitments/state.rs b/crates/client/deoxys/src/commitments/state.rs deleted file mode 100644 index 66195679c7..0000000000 --- a/crates/client/deoxys/src/commitments/state.rs +++ /dev/null @@ -1,110 +0,0 @@ -// State trie - -// Contract trie - -// Storage trie - -// Class trie - -use mp_felt::Felt252Wrapper; -use mp_hashers::HasherT; - -/// Hash of the StateCommitment tree -pub type StateCommitment = Felt252Wrapper; - -/// Hash of the leaf of the ClassCommitment tree -pub type ClassCommitmentLeafHash = Felt252Wrapper; - -/// Calculate state commitment hash value. -/// -/// The state commitment is the digest that uniquely (up to hash collisions) encodes the state. -/// It combines the roots of two binary Merkle-Patricia trees of height 251. -/// -/// # Arguments -/// -/// * `contracts_tree_root` - The root of the contracts tree. -/// * `classes_tree_root` - The root of the classes tree. -/// -/// # Returns -/// -/// The state commitment as a `StateCommitment`. -pub fn calculate_state_commitment( - contracts_tree_root: Felt252Wrapper, - classes_tree_root: Felt252Wrapper, -) -> StateCommitment -where - H: HasherT, -{ - let starknet_state_prefix = Felt252Wrapper::try_from("STARKNET_STATE_V0".as_bytes()).unwrap(); - - let state_commitment_hash = - H::compute_hash_on_elements(&[starknet_state_prefix.0, contracts_tree_root.0, classes_tree_root.0]); - - state_commitment_hash.into() -} - -/// Calculate class commitment tree leaf hash value. -/// -/// See: -/// -/// # Arguments -/// -/// * `compiled_class_hash` - The hash of the compiled class. -/// -/// # Returns -/// -/// The hash of the class commitment tree leaf. -pub fn calculate_class_commitment_leaf_hash( - compiled_class_hash: Felt252Wrapper, -) -> ClassCommitmentLeafHash { - let contract_class_hash_version = Felt252Wrapper::try_from("CONTRACT_CLASS_LEAF_V0".as_bytes()).unwrap(); // Unwrap safu - - let hash = H::compute_hash_on_elements(&[contract_class_hash_version.0, compiled_class_hash.0]); - - hash.into() -} - -/// Calculate class commitment tree root hash value. -/// -/// The classes tree encodes the information about the existing classes in the state of Starknet. -/// It maps (Cairo 1.0) class hashes to their compiled class hashes -/// -/// # Arguments -/// -/// * `classes` - The classes to get the root from. -/// -/// # Returns -/// -/// The merkle root of the merkle tree built from the classes. -pub fn calculate_class_commitment_tree_root_hash(_class_hashes: &[Felt252Wrapper]) -> Felt252Wrapper { - Felt252Wrapper::default() -} - -/// Calculates the contract state hash from its preimage. -/// -/// # Arguments -/// -/// * `hash` - The hash of the contract definition. -/// * `root` - The root of root of another Merkle-Patricia tree of height 251 that is constructed -/// from the contract’s storage. -/// * `nonce` - The current nonce of the contract. -/// -/// # Returns -/// -/// The contract state hash. -pub fn calculate_contract_state_hash( - hash: Felt252Wrapper, - root: Felt252Wrapper, - nonce: Felt252Wrapper, -) -> Felt252Wrapper { - // Define the constant for the contract state hash version, ensure this aligns with StarkNet - // specifications. - const CONTRACT_STATE_HASH_VERSION: Felt252Wrapper = Felt252Wrapper::ZERO; - - // First hash: Combine class_hash and storage_root. - let class_storage_hash = H::compute_hash_on_elements(&[hash.0, root.0]); - let nonce_hash = H::compute_hash_on_elements(&[class_storage_hash, nonce.0]); - let contract_state_hash = H::compute_hash_on_elements(&[nonce_hash, CONTRACT_STATE_HASH_VERSION.0]); - - contract_state_hash.into() -} diff --git a/crates/client/deoxys/src/commitments/transactions.rs b/crates/client/deoxys/src/commitments/transactions.rs index 74dce6e2f6..cd5ab51265 100644 --- a/crates/client/deoxys/src/commitments/transactions.rs +++ b/crates/client/deoxys/src/commitments/transactions.rs @@ -1,11 +1,13 @@ use std::sync::Arc; use bitvec::prelude::*; -use bonsai_trie::id::BasicIdBuilder; +use bonsai_trie::databases::HashMapDb; +use bonsai_trie::id::{BasicId, BasicIdBuilder}; use bonsai_trie::{BonsaiStorage, BonsaiStorageConfig}; use mc_db::bonsai_db::BonsaiDb; use mc_db::BonsaiDbError; use mp_felt::Felt252Wrapper; +use mp_hashers::pedersen::PedersenHasher; use mp_hashers::HasherT; use mp_transactions::compute_hash::ComputeTransactionHash; use mp_transactions::Transaction; @@ -22,13 +24,13 @@ use starknet_types_core::hash::Pedersen; /// /// # Arguments /// -/// * `tx` - The transaction to compute the hash of. +/// * `transaction` - The transaction to compute the hash of. /// /// # Returns /// /// The transaction hash with signature. pub fn calculate_transaction_hash_with_signature( - tx: &Transaction, + transaction: &Transaction, chain_id: Felt252Wrapper, block_number: u64, ) -> FieldElement @@ -37,35 +39,46 @@ where { let include_signature = block_number >= 61394; - let signature_hash = if matches!(tx, Transaction::Invoke(_)) || include_signature { + let signature_hash = if matches!(transaction, Transaction::Invoke(_)) || include_signature { // Include signatures for Invoke transactions or for all transactions // starting from block 61394 H::compute_hash_on_elements( - &tx.signature().iter().map(|elt| FieldElement::from(*elt)).collect::>(), + &transaction.signature().iter().map(|elt| FieldElement::from(*elt)).collect::>(), ) } else { // Before block 61394, and for non-Invoke transactions, signatures are not included H::compute_hash_on_elements(&[]) }; - let transaction_hashes = - H::hash_elements(FieldElement::from(tx.compute_hash::(chain_id, false, Some(block_number))), signature_hash); + let transaction_hashes = H::hash_elements( + FieldElement::from(transaction.compute_hash::(chain_id, false, Some(block_number))), + signature_hash, + ); transaction_hashes } -pub(crate) fn transaction_commitment( +/// Calculate the transaction commitment in storage using BonsaiDb (which is less efficient for this +/// usecase). +/// +/// # Arguments +/// +/// * `transactions` - The transactions of the block +/// * `chain_id` - The current chain id +/// * `block_number` - The current block number +/// * `bonsai_db` - The bonsai database responsible to compute the tries +/// +/// # Returns +/// +/// The transaction commitment as `Felt252Wrapper`. +pub fn transaction_commitment( transactions: &[Transaction], chain_id: Felt252Wrapper, block_number: u64, - backend: &Arc>, -) -> Result -where - B: BlockT, - H: HasherT, -{ + bonsai_db: &Arc>, +) -> Result { let config = BonsaiStorageConfig::default(); - let bonsai_db = backend.as_ref(); + let bonsai_db = bonsai_db.as_ref(); let mut bonsai_storage = BonsaiStorage::<_, _, Pedersen>::new(bonsai_db, config).expect("Failed to create bonsai storage"); @@ -75,7 +88,7 @@ where bonsai_storage.commit(zero).expect("Failed to commit to bonsai storage"); for (i, tx) in transactions.iter().enumerate() { - let tx_hash = calculate_transaction_hash_with_signature::(tx, chain_id, block_number); + let tx_hash = calculate_transaction_hash_with_signature::(tx, chain_id, block_number); let key = BitVec::from_vec(i.to_be_bytes().to_vec()); let value = Felt::from(Felt252Wrapper::from(tx_hash)); bonsai_storage.insert(key.as_bitslice(), &value).expect("Failed to insert into bonsai storage"); @@ -89,3 +102,40 @@ where Ok(Felt252Wrapper::from(root_hash)) } + +/// Calculate the transaction commitment in memory using HashMapDb (which is more efficient for this +/// usecase). +/// +/// # Arguments +/// +/// * `transactions` - The transactions of the block +/// * `chain_id` - The current chain id +/// * `block_number` - The current block number +/// +/// # Returns +/// +/// The transaction commitment as `Felt252Wrapper`. +pub fn memory_transaction_commitment( + transactions: &[Transaction], + chain_id: Felt252Wrapper, + block_number: u64, +) -> Result { + let config = BonsaiStorageConfig::default(); + let bonsai_db = HashMapDb::::default(); + let mut bonsai_storage = + BonsaiStorage::<_, _, Pedersen>::new(bonsai_db, config).expect("Failed to create bonsai storage"); + + for (i, tx) in transactions.iter().enumerate() { + let tx_hash = calculate_transaction_hash_with_signature::(tx, chain_id, block_number); + let key = BitVec::from_vec(i.to_be_bytes().to_vec()); + let value = Felt::from(Felt252Wrapper::from(tx_hash)); + bonsai_storage.insert(key.as_bitslice(), &value).expect("Failed to insert into bonsai storage"); + } + + let mut id_builder = BasicIdBuilder::new(); + let id = id_builder.new_id(); + bonsai_storage.commit(id).expect("Failed to commit to bonsai storage"); + + let root_hash = bonsai_storage.root_hash().expect("Failed to get root hash"); + Ok(Felt252Wrapper::from(root_hash)) +} diff --git a/crates/client/deoxys/src/l2.rs b/crates/client/deoxys/src/l2.rs index c250f33d86..f47b74b0e8 100644 --- a/crates/client/deoxys/src/l2.rs +++ b/crates/client/deoxys/src/l2.rs @@ -4,6 +4,8 @@ use std::sync::{Arc, Mutex}; use std::time::Duration; use itertools::Itertools; +use mc_db::bonsai_db::BonsaiDb; +use mc_db::BonsaiDbs; use mc_storage::OverrideHandle; use mp_block::state_update::StateUpdateWrapper; use mp_contract::class::{ClassUpdateWrapper, ContractClassData, ContractClassWrapper}; @@ -25,6 +27,7 @@ use starknet_providers::{Provider, SequencerGatewayProvider}; use tokio::sync::mpsc::Sender; use tokio::task::JoinSet; +use crate::commitments::lib::{build_commitment_state_diff, update_state_root}; use crate::utility::{get_block_hash_by_number, update_highest_block_hash_and_number}; use crate::CommandSink; @@ -142,12 +145,19 @@ pub async fn sync( update_config(&config); let SenderConfig { block_sender, state_update_sender, class_sender, command_sink, overrides } = &mut sender_config; let client = SequencerGatewayProvider::new(config.gateway.clone(), config.feeder_gateway.clone(), config.chain_id); - + let bonsai_dbs = BonsaiDbs { + contract: Arc::clone(backend.bonsai_contract()), + class: Arc::clone(backend.bonsai_class()), + storage: Arc::clone(backend.bonsai_storage()), + }; let mut current_block_number = start_at; let mut last_block_hash = None; let mut got_block = false; let mut got_state_update = false; let mut last_update_highest_block = tokio::time::Instant::now() - Duration::from_secs(20); + if current_block_number == 0 { + let _ = fetch_genesis_state_update(&client, bonsai_dbs.clone()).await; + } loop { if last_update_highest_block.elapsed() > Duration::from_secs(20) { last_update_highest_block = tokio::time::Instant::now(); @@ -157,7 +167,7 @@ pub async fn sync( } let (block, state_update) = match (got_block, got_state_update) { (false, false) => { - let block = fetch_block(&client, block_sender, current_block_number, backend.clone()); + let block = fetch_block(&client, block_sender, current_block_number); let state_update = fetch_state_and_class_update( &client, Arc::clone(&overrides), @@ -165,10 +175,11 @@ pub async fn sync( class_sender, current_block_number, rpc_port, + bonsai_dbs.clone(), ); tokio::join!(block, state_update) } - (false, true) => (fetch_block(&client, block_sender, current_block_number, backend.clone()).await, Ok(())), + (false, true) => (fetch_block(&client, block_sender, current_block_number).await, Ok(())), (true, false) => ( Ok(()), fetch_state_and_class_update( @@ -178,6 +189,7 @@ pub async fn sync( class_sender, current_block_number, rpc_port, + bonsai_dbs.clone(), ) .await, ), @@ -211,39 +223,36 @@ pub async fn sync( } } -async fn fetch_block( +async fn fetch_block( client: &SequencerGatewayProvider, block_sender: &Sender, block_number: u64, - backend: Arc>, ) -> Result<(), String> { let block = client.get_block(BlockId::Number(block_number)).await.map_err(|e| format!("failed to get block: {e}"))?; - block_sender - .send(crate::convert::block(&block, backend)) - .await - .map_err(|e| format!("failed to dispatch block: {e}"))?; + block_sender.send(crate::convert::block(&block)).await.map_err(|e| format!("failed to dispatch block: {e}"))?; Ok(()) } pub async fn fetch_genesis_block(config: FetchConfig) -> Result { let client = SequencerGatewayProvider::new(config.gateway.clone(), config.feeder_gateway.clone(), config.chain_id); - let _block = client.get_block(BlockId::Number(0)).await.map_err(|e| format!("failed to get block: {e}"))?; + let block = client.get_block(BlockId::Number(0)).await.map_err(|e| format!("failed to get block: {e}"))?; - Ok(mp_block::Block::default()) + Ok(crate::convert::block(&block)) } -async fn fetch_state_and_class_update( +async fn fetch_state_and_class_update( provider: &SequencerGatewayProvider, overrides: Arc, OpaqueExtrinsic>>>, state_update_sender: &Sender, class_sender: &Sender, block_number: u64, rpc_port: u16, + bonsai_dbs: BonsaiDbs, ) -> Result<(), String> { - let state_update = fetch_state_update(&provider, block_number).await?; + let state_update = fetch_state_update(&provider, block_number, bonsai_dbs).await?; let class_update = fetch_class_update(&provider, &state_update, overrides, block_number, rpc_port).await?; // Now send state_update, which moves it. This will be received @@ -263,13 +272,29 @@ async fn fetch_state_and_class_update( } /// retrieves state update from Starknet sequencer -async fn fetch_state_update(provider: &SequencerGatewayProvider, block_number: u64) -> Result { +async fn fetch_state_update( + provider: &SequencerGatewayProvider, + block_number: u64, + bonsai_dbs: BonsaiDbs, +) -> Result { let state_update = provider .get_state_update(BlockId::Number(block_number)) .await .map_err(|e| format!("failed to get state update: {e}"))?; - // Verify state update via verify_l2(starket_state_update).await + let _ = verify_l2(block_number, &state_update, bonsai_dbs); + + Ok(state_update) +} + +async fn fetch_genesis_state_update( + provider: &SequencerGatewayProvider, + bonsai_dbs: BonsaiDbs, +) -> Result { + let state_update = + provider.get_state_update(BlockId::Number(0)).await.map_err(|e| format!("failed to get state update: {e}"))?; + + let _ = verify_l2(0, &state_update, bonsai_dbs); Ok(state_update) } @@ -421,16 +446,24 @@ pub fn update_l2(state_update: L2StateUpdate) { } } -/// Verify the L2 state according to the latest state update -pub async fn verify_l2(_state_update: StateUpdateWrapper) -> Result<(), String> { - // 1. Retrieve state diff - // 2. Compute commitments - // state_root = state_commitment(csd) - // 3. Log latest L2 state verified on L2 - // println!("➡️ block_number {:?}, block_hash {:?}, state_root {:?}", block_number, block_hash, - // state_root; - // 4. Update hared latest L2 state update verified on L2 - // update_l2({block_number, block_hash, state_commitment}) +/// Verify and update the L2 state according to the latest state update +pub fn verify_l2( + block_number: u64, + state_update: &StateUpdate, + bonsai_dbs: BonsaiDbs, +) -> Result<(), String> { + let state_update_wrapper = StateUpdateWrapper::from(state_update); + let csd = build_commitment_state_diff(state_update_wrapper.clone()); + let state_root = update_state_root(csd, bonsai_dbs).expect("Failed to update state root"); + let block_hash = state_update.block_hash.expect("Block hash not found in state update"); + + update_l2(L2StateUpdate { + block_number, + global_root: state_root.into(), + block_hash: Felt252Wrapper::from(block_hash).into(), + }); + println!("➡️ block_number {:?}, block_hash {:?}, state_root {:?}", block_number, block_hash, state_root); + Ok(()) } diff --git a/crates/client/deoxys/src/utils/convert.rs b/crates/client/deoxys/src/utils/convert.rs index 9a419613bd..eea4337ccb 100644 --- a/crates/client/deoxys/src/utils/convert.rs +++ b/crates/client/deoxys/src/utils/convert.rs @@ -1,22 +1,19 @@ //! Converts types from [`starknet_providers`] to madara's expected types. -use std::sync::Arc; - use mp_fee::ResourcePrice; use mp_felt::Felt252Wrapper; -use sp_runtime::traits::Block as BlockT; use starknet_api::hash::StarkFelt; use starknet_ff::FieldElement; use starknet_providers::sequencer::models as p; use crate::commitments::lib::calculate_commitments; -pub fn block(block: &p::Block, backend: Arc>) -> mp_block::Block { +pub fn block(block: &p::Block) -> mp_block::Block { let transactions = transactions(&block.transactions); let events = events(&block.transaction_receipts); let block_number = block.block_number.expect("no block number provided"); let sequencer_address = block.sequencer_address.map_or(contract_address(FieldElement::ZERO), contract_address); - let (transaction_commitment, event_commitment) = commitments(&transactions, &events, block_number, backend); + let (transaction_commitment, event_commitment) = commitments(&transactions, &events, block_number); let l1_gas_price = resource_price(block.eth_l1_gas_price); let protocol_version = starknet_version(&block.starknet_version); @@ -190,17 +187,14 @@ fn event(event: &p::Event) -> starknet_api::transaction::Event { } } -fn commitments( +fn commitments( transactions: &[mp_transactions::Transaction], events: &[starknet_api::transaction::Event], block_number: u64, - backend: Arc>, ) -> (StarkFelt, StarkFelt) { - use mp_hashers::pedersen::PedersenHasher; - let chain_id = chain_id(); - let (a, b) = calculate_commitments::(transactions, events, chain_id, block_number, backend); + let (a, b) = calculate_commitments(transactions, events, chain_id, block_number); (a.into(), b.into()) } diff --git a/crates/node/src/chain_spec.rs b/crates/node/src/chain_spec.rs index 392ed1639a..25077b71d4 100644 --- a/crates/node/src/chain_spec.rs +++ b/crates/node/src/chain_spec.rs @@ -130,6 +130,7 @@ pub fn deoxys_config(sealing: SealingMode, chain_id: &str) -> Result Result { log::info!("🧪 Fetching genesis block"); let runtime = Runtime::new().unwrap(); diff --git a/crates/node/src/commands/run.rs b/crates/node/src/commands/run.rs index 68f93da8e1..26475c855c 100644 --- a/crates/node/src/commands/run.rs +++ b/crates/node/src/commands/run.rs @@ -2,6 +2,7 @@ use std::path::PathBuf; use std::result::Result as StdResult; use madara_runtime::SealingMode; +use mc_deoxys::l2::fetch_genesis_block; use reqwest::Url; use sc_cli::{Result, RpcMethods, RunCmd, SubstrateCli}; use serde::{Deserialize, Serialize}; @@ -169,10 +170,19 @@ pub fn run_node(mut cli: Cli) -> Result<()> { let sealing = cli.run.sealing.map(Into::into).unwrap_or_default(); let cache = cli.run.cache; let mut fetch_block_config = cli.run.network.block_fetch_config(); + let genesis_block = fetch_genesis_block(fetch_block_config.clone()).await.unwrap(); fetch_block_config.sound = cli.run.sound; - service::new_full(config, sealing, cli.run.base.rpc_port.unwrap(), l1_endpoint, cache, fetch_block_config) - .map_err(sc_cli::Error::Service) + service::new_full( + config, + sealing, + cli.run.base.rpc_port.unwrap(), + l1_endpoint, + cache, + fetch_block_config, + genesis_block, + ) + .map_err(sc_cli::Error::Service) }) } diff --git a/crates/node/src/genesis_block.rs b/crates/node/src/genesis_block.rs new file mode 100644 index 0000000000..79f3e58367 --- /dev/null +++ b/crates/node/src/genesis_block.rs @@ -0,0 +1,89 @@ +use std::marker::PhantomData; +use std::sync::Arc; + +use mp_digest_log::{Log, MADARA_ENGINE_ID}; +use mp_hashers::pedersen::PedersenHasher; +use sc_client_api::backend::Backend; +use sc_client_api::BlockImportOperation; +use sc_executor::RuntimeVersionOf; +use sc_service::{resolve_state_version_from_wasm, BuildGenesisBlock}; +use sp_api::Encode; +use sp_core::storage::{StateVersion, Storage}; +use sp_runtime::traits::{Block as BlockT, Hash as HashT, Header as HeaderT, Zero}; +use sp_runtime::{BuildStorage, Digest, DigestItem}; + +/// Custom genesis block builder for Madara. +pub struct MadaraGenesisBlockBuilder { + genesis_storage: Storage, + commit_genesis_state: bool, + backend: Arc, + executor: E, + _phantom: PhantomData, + genesis_block: mp_block::Block, +} + +impl, E: RuntimeVersionOf> MadaraGenesisBlockBuilder { + /// Constructs a new instance of [`MadaraGenesisBlockBuilder`]. + pub fn new( + build_genesis_storage: &dyn BuildStorage, + commit_genesis_state: bool, + backend: Arc, + executor: E, + genesis_block: mp_block::Block, + ) -> sp_blockchain::Result { + let genesis_storage = build_genesis_storage.build_storage().map_err(sp_blockchain::Error::Storage)?; + Ok(Self { + genesis_storage, + commit_genesis_state, + backend, + executor, + _phantom: PhantomData::, + genesis_block, + }) + } +} + +impl, E: RuntimeVersionOf> BuildGenesisBlock + for MadaraGenesisBlockBuilder +{ + type BlockImportOperation = >::BlockImportOperation; + + fn build_genesis_block(self) -> sp_blockchain::Result<(Block, Self::BlockImportOperation)> { + let Self { genesis_storage, commit_genesis_state, backend, executor, _phantom, genesis_block } = self; + + let genesis_state_version = resolve_state_version_from_wasm(&genesis_storage, &executor)?; + let mut op = backend.begin_operation()?; + let state_root = op.set_genesis_state(genesis_storage, commit_genesis_state, genesis_state_version)?; + let genesis_block = construct_genesis_block::(state_root, genesis_state_version, genesis_block); + + Ok((genesis_block, op)) + } +} + +/// Construct genesis block. +fn construct_genesis_block( + state_root: Block::Hash, + state_version: StateVersion, + genesis_block: mp_block::Block, +) -> Block { + let extrinsics_root = + <<::Header as HeaderT>::Hashing as HashT>::trie_root(Vec::new(), state_version); + + let mut digest = vec![]; + + // Load first block from genesis folders + // TODO remove unecessary code from madara for genesis build + digest.push(DigestItem::Consensus(MADARA_ENGINE_ID, Log::Block(genesis_block.clone()).encode())); + log::info!("🌱 Genesis block imported correctly {:?}", genesis_block.header().hash::()); + + Block::new( + <::Header as HeaderT>::new( + Zero::zero(), + extrinsics_root, + state_root, + Default::default(), + Digest { logs: digest }, + ), + Default::default(), + ) +} diff --git a/crates/node/src/main.rs b/crates/node/src/main.rs index 52a2ff9186..58bd3ee0ae 100644 --- a/crates/node/src/main.rs +++ b/crates/node/src/main.rs @@ -10,6 +10,7 @@ mod command; mod commands; mod configs; mod constants; +mod genesis_block; mod rpc; mod starknet; diff --git a/crates/node/src/service.rs b/crates/node/src/service.rs index 9fe5a84e22..b21a2ed0cb 100644 --- a/crates/node/src/service.rs +++ b/crates/node/src/service.rs @@ -32,7 +32,7 @@ use sc_consensus_grandpa::{GrandpaBlockImport, SharedVoterState}; use sc_consensus_manual_seal::{ConsensusDataProvider, Error}; pub use sc_executor::NativeElseWasmExecutor; use sc_service::error::Error as ServiceError; -use sc_service::{Configuration, TaskManager, WarpSyncParams}; +use sc_service::{new_db_backend, Configuration, TaskManager, WarpSyncParams}; use sc_telemetry::{Telemetry, TelemetryHandle, TelemetryWorker}; use sc_transaction_pool::FullPool; use sc_transaction_pool_api::OffchainTransactionPoolFactory; @@ -45,6 +45,7 @@ use sp_runtime::testing::Digest; use sp_runtime::traits::Block as BlockT; use sp_runtime::DigestItem; +use crate::genesis_block::MadaraGenesisBlockBuilder; use crate::rpc::StarknetDeps; use crate::starknet::{db_config_dir, MadaraBackend}; // Our native executor instance. @@ -86,6 +87,7 @@ pub fn new_partial( config: &Configuration, build_import_queue: BIQ, cache_more_things: bool, + genesis_block: mp_block::Block, ) -> Result< sc_service::PartialComponents< FullClient, @@ -127,10 +129,28 @@ where let executor = sc_service::new_native_or_wasm_executor(config); - let (client, backend, keystore_container, task_manager) = sc_service::new_full_parts::( + let backend = new_db_backend(config.db_config())?; + + let genesis_block_builder = MadaraGenesisBlockBuilder::::new( + config.chain_spec.as_storage_builder(), + true, + backend.clone(), + executor.clone(), + genesis_block, + ) + .unwrap(); + + let (client, backend, keystore_container, task_manager) = sc_service::new_full_parts_with_genesis_builder::< + Block, + RuntimeApi, + _, + MadaraGenesisBlockBuilder>, + >( config, telemetry.as_ref().map(|(_, telemetry)| telemetry.handle()), executor, + backend, + genesis_block_builder, )?; let client = Arc::new(client); @@ -257,6 +277,7 @@ pub fn new_full( l1_url: Url, cache_more_things: bool, fetch_config: mc_deoxys::FetchConfig, + genesis_block: mp_block::Block, ) -> Result { let build_import_queue = if sealing.is_default() { build_aura_grandpa_import_queue } else { build_manual_seal_import_queue }; @@ -270,7 +291,7 @@ pub fn new_full( select_chain, transaction_pool, other: (block_import, grandpa_link, mut telemetry, madara_backend), - } = new_partial(&config, build_import_queue, cache_more_things)?; + } = new_partial(&config, build_import_queue, cache_more_things, genesis_block)?; let mut net_config = sc_network::config::FullNetworkConfiguration::new(&config.network); @@ -732,6 +753,6 @@ type ChainOpsResult = pub fn new_chain_ops(config: &mut Configuration, cache_more_things: bool) -> ChainOpsResult { config.keystore = sc_service::config::KeystoreConfig::InMemory; let sc_service::PartialComponents { client, backend, import_queue, task_manager, other, .. } = - new_partial::<_>(config, build_aura_grandpa_import_queue, cache_more_things)?; + new_partial::<_>(config, build_aura_grandpa_import_queue, cache_more_things, mp_block::Block::default())?; Ok((client, backend, import_queue, task_manager, other.3)) } diff --git a/crates/pallets/starknet/Cargo.toml b/crates/pallets/starknet/Cargo.toml index 59936808cc..f8a2a2ce03 100644 --- a/crates/pallets/starknet/Cargo.toml +++ b/crates/pallets/starknet/Cargo.toml @@ -16,7 +16,6 @@ version.workspace = true # Madara primitives mp-block = { workspace = true } mp-chain-id = { workspace = true } -mp-commitments = { workspace = true } mp-contract = { workspace = true } mp-digest-log = { workspace = true } mp-fee = { workspace = true } diff --git a/crates/pallets/starknet/src/lib.rs b/crates/pallets/starknet/src/lib.rs index f2d19e94d3..7312da2bbd 100644 --- a/crates/pallets/starknet/src/lib.rs +++ b/crates/pallets/starknet/src/lib.rs @@ -1064,8 +1064,7 @@ impl Pallet { let sequencer_address = Self::sequencer_address(); let block_timestamp = Self::block_timestamp(); let chain_id = Self::chain_id(); - let (transaction_commitment, event_commitment) = - mp_commitments::calculate_commitments::(&transactions, &events, chain_id, block_number); + let (transaction_commitment, event_commitment) = (Felt252Wrapper::default(), Felt252Wrapper::default()); let protocol_version = T::ProtocolVersion::get(); let extra_data = None; let l1_gas_price = T::L1GasPrice::get(); diff --git a/crates/primitives/block/src/state_update.rs b/crates/primitives/block/src/state_update.rs index d99a9e1b3a..24cc2a4cd0 100644 --- a/crates/primitives/block/src/state_update.rs +++ b/crates/primitives/block/src/state_update.rs @@ -6,7 +6,7 @@ use mp_felt::Felt252Wrapper; pub extern crate alloc; use alloc::vec::Vec; -#[derive(Debug)] +#[derive(Debug, Clone)] #[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] pub struct StateUpdateWrapper { pub block_hash: Option, @@ -15,7 +15,7 @@ pub struct StateUpdateWrapper { pub state_diff: StateDiffWrapper, } -#[derive(Debug)] +#[derive(Debug, Clone)] #[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] pub struct StateDiffWrapper { pub storage_diffs: Vec<(Felt252Wrapper, Vec)>, @@ -26,21 +26,21 @@ pub struct StateDiffWrapper { pub replaced_classes: Vec, } -#[derive(Debug)] +#[derive(Debug, Clone)] #[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] pub struct StorageDiffWrapper { pub key: Felt252Wrapper, pub value: Felt252Wrapper, } -#[derive(Debug)] +#[derive(Debug, Clone)] #[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] pub struct DeployedContractWrapper { pub address: Felt252Wrapper, pub class_hash: Felt252Wrapper, } -#[derive(Debug)] +#[derive(Debug, Clone)] #[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] pub struct DeclaredContractWrapper { pub class_hash: Felt252Wrapper, @@ -70,6 +70,17 @@ pub mod convert { } } + impl From<&starknet_providers::sequencer::models::StateUpdate> for StateUpdateWrapper { + fn from(update: &StateUpdate) -> Self { + StateUpdateWrapper { + block_hash: update.block_hash.as_ref().cloned().map(Felt252Wrapper::from), + new_root: update.new_root.as_ref().cloned().map(Felt252Wrapper::from), + old_root: Felt252Wrapper::from(update.old_root), + state_diff: StateDiffWrapper::from(&update.state_diff), + } + } + } + impl From<&StateDiff> for StateDiffWrapper { fn from(diff: &StateDiff) -> Self { StateDiffWrapper { diff --git a/crates/primitives/commitments/Cargo.toml b/crates/primitives/commitments/Cargo.toml deleted file mode 100644 index 206871a00f..0000000000 --- a/crates/primitives/commitments/Cargo.toml +++ /dev/null @@ -1,47 +0,0 @@ -[package] -name = "mp-commitments" -version.workspace = true -edition.workspace = true -license = "MIT" -description = "Starknet commitment computation logic" -authors = { workspace = true } -repository = { workspace = true } - -[package.metadata.docs.rs] -targets = ["x86_64-unknown-linux-gnu"] - -[dependencies] -bitvec = { workspace = true } -derive_more = { workspace = true, features = ["constructor"] } -frame-support = { workspace = true } -mp-felt = { workspace = true } -mp-hashers = { workspace = true } -mp-transactions = { workspace = true } -starknet-crypto = { workspace = true } -starknet_api = { workspace = true } - -# Optional -parity-scale-codec = { workspace = true, features = [ - "derive", -], optional = true } -scale-info = { workspace = true, features = ["derive"], optional = true } -serde = { workspace = true, features = ["derive"], optional = true } -starknet-ff = { workspace = true, optional = true } - -[dev-dependencies] -starknet-core = { workspace = true } - - -[features] -default = ["std"] -std = [ - "mp-hashers/std", - "mp-transactions/std", - "mp-felt/std", - "starknet-crypto/std", - "bitvec/std", - "starknet_api/std", -] -parity-scale-codec = ["dep:parity-scale-codec", "mp-felt/parity-scale-codec"] -scale-info = ["dep:scale-info", "mp-felt/scale-info"] -serde = ["dep:serde", "dep:starknet-ff", "starknet-ff?/serde", "mp-felt/serde"] diff --git a/crates/primitives/commitments/src/lib.rs b/crates/primitives/commitments/src/lib.rs deleted file mode 100644 index 3050c02ba0..0000000000 --- a/crates/primitives/commitments/src/lib.rs +++ /dev/null @@ -1,367 +0,0 @@ -#![cfg_attr(not(feature = "std"), no_std)] - -#[doc(hidden)] -pub extern crate alloc; - -mod merkle_patricia_tree; - -use alloc::vec::Vec; - -use bitvec::vec::BitVec; -use merkle_patricia_tree::merkle_tree::{MerkleTree, NodesMapping, ProofNode}; -use merkle_patricia_tree::ref_merkle_tree::RefMerkleTree; -use mp_felt::Felt252Wrapper; -use mp_hashers::HasherT; -use mp_transactions::compute_hash::ComputeTransactionHash; -use mp_transactions::Transaction; -use starknet_api::transaction::Event; -use starknet_crypto::FieldElement; - -/// Hash of the StateCommitment tree -pub type StateCommitment = Felt252Wrapper; - -/// Hash of the leaf of the ClassCommitment tree -pub type ClassCommitmentLeafHash = Felt252Wrapper; - -/// A Patricia Merkle tree with height 64 used to compute transaction and event commitments. -/// -/// According to the [documentation](https://docs.starknet.io/documentation/architecture_and_concepts/Blocks/header/) -/// the commitment trees are of height 64, because the key used is the 64 bit representation -/// of the index of the transaction / event within the block. -/// -/// The tree height is 64 in our case since our set operation takes u64 index values. -struct CommitmentTree { - tree: RefMerkleTree, -} - -impl Default for CommitmentTree { - fn default() -> Self { - Self { tree: RefMerkleTree::empty() } - } -} - -impl CommitmentTree { - /// Sets the value of a key in the merkle tree. - /// - /// # Arguments - /// - /// * `index` - The index of the value to set. - /// * `value` - The value to set. - pub fn set(&mut self, index: u64, value: FieldElement) { - let key = index.to_be_bytes(); - self.tree.set(&BitVec::from_vec(key.to_vec()), Felt252Wrapper(value)) - } - - /// Get the merkle root of the tree. - pub fn commit(&mut self) -> Felt252Wrapper { - self.tree.commit() - } -} - -/// A Patricia Merkle tree with height 251 used to compute contract and class tree commitments. -/// -/// According to the [documentation](https://docs.starknet.io/documentation/architecture_and_concepts/State/starknet-state/) -/// the commitment trees are of height 251, because the key used is a Field Element. -/// -/// The tree height is 251 in our case since our set operation takes Fieldelement index values. -#[derive(Clone, Debug, PartialEq)] -#[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] -#[cfg_attr(feature = "scale-info", derive(scale_info::TypeInfo))] -pub struct StateCommitmentTree { - tree: MerkleTree, -} - -impl Default for StateCommitmentTree { - fn default() -> Self { - Self { tree: MerkleTree::empty() } - } -} - -impl StateCommitmentTree { - /// Sets the value of a key in the merkle tree. - /// - /// # Arguments - /// - /// * `index` - The index of the value to set. - /// * `value` - The value to set. - pub fn set(&mut self, index: Felt252Wrapper, value: Felt252Wrapper) { - let key = &index.0.to_bytes_be()[..31]; - self.tree.set(&BitVec::from_vec(key.to_vec()), value) - } - - /// Get the merkle root of the tree. - pub fn commit(&mut self) -> Felt252Wrapper { - self.tree.commit() - } - - /// Generates a proof for `key`. See [`MerkleTree::get_proof`]. - pub fn get_proof(&self, key: Felt252Wrapper) -> Vec { - let key = &key.0.to_bytes_be()[..31]; - self.tree.get_proof(&BitVec::from_vec(key.to_vec())) - } - - /// Returns a leaf of the tree stored at key `key` - /// - /// # Arguments - /// - /// * `key` - The key of the value to retrieve. - /// - /// # Returns - /// - /// `Some(value)` - Value stored at the given key. - pub fn get(&self, key: Felt252Wrapper) -> Option { - let key = &key.0.to_bytes_be()[..31]; - self.tree.get(&BitVec::from_vec(key.to_vec())) - } - - /// Returns the tree's nodes - pub fn nodes(&self) -> NodesMapping { - NodesMapping(self.tree.nodes()) - } - - /// Loads tree from root - pub fn load(root: Felt252Wrapper) -> Self { - let merkle_tree = MerkleTree::new(root); - Self { tree: merkle_tree } - } -} - -/// Calculate state commitment hash value. -/// -/// The state commitment is the digest that uniquely (up to hash collisions) encodes the state. -/// It combines the roots of two binary Merkle-Patricia trees of height 251. -/// -/// # Arguments -/// -/// * `contracts_tree_root` - The root of the contracts tree. -/// * `classes_tree_root` - The root of the classes tree. -/// -/// # Returns -/// -/// The state commitment as a `StateCommitment`. -pub fn calculate_state_commitment( - contracts_tree_root: Felt252Wrapper, - classes_tree_root: Felt252Wrapper, -) -> StateCommitment -where - H: HasherT, -{ - let starknet_state_prefix = Felt252Wrapper::try_from("STARKNET_STATE_V0".as_bytes()).unwrap(); - - let state_commitment_hash = - H::compute_hash_on_elements(&[starknet_state_prefix.0, contracts_tree_root.0, classes_tree_root.0]); - - state_commitment_hash.into() -} - -/// Calculate the transaction commitment, the event commitment and the event count. -/// -/// # Arguments -/// -/// * `transactions` - The transactions of the block -/// -/// # Returns -/// -/// The transaction commitment, the event commitment and the event count. -pub fn calculate_commitments( - transactions: &[Transaction], - events: &[Event], - chain_id: Felt252Wrapper, - block_number: u64, -) -> (Felt252Wrapper, Felt252Wrapper) { - ( - calculate_transaction_commitment::(transactions, chain_id, block_number), - calculate_event_commitment::(events), - ) -} - -/// Calculate transaction commitment hash value. -/// -/// The transaction commitment is the root of the Patricia Merkle tree with height 64 -/// constructed by adding the (transaction_index, transaction_hash_with_signature) -/// key-value pairs to the tree and computing the root hash. -/// -/// # Arguments -/// -/// * `transactions` - The transactions to get the root from. -/// -/// # Returns -/// -/// The merkle root of the merkle tree built from the transactions. -pub fn calculate_transaction_commitment( - transactions: &[Transaction], - chain_id: Felt252Wrapper, - block_number: u64, -) -> Felt252Wrapper { - let mut tree = CommitmentTree::::default(); - - transactions.iter().enumerate().for_each(|(idx, tx)| { - let idx: u64 = idx.try_into().expect("too many transactions while calculating commitment"); - let final_hash = calculate_transaction_hash_with_signature::(tx, chain_id, block_number); - tree.set(idx, final_hash); - }); - tree.commit() -} - -/// Calculate event commitment hash value. -/// -/// The event commitment is the root of the Patricia Merkle tree with height 64 -/// constructed by adding the event hash -/// (see https://docs.starknet.io/documentation/architecture_and_concepts/Events/starknet-events/#event_hash) -/// to the tree and computing the root hash. -/// -/// # Arguments -/// -/// * `events` - The events to calculate the commitment from. -/// -/// # Returns -/// -/// The merkle root of the merkle tree built from the events. -pub(crate) fn calculate_event_commitment(events: &[Event]) -> Felt252Wrapper { - let mut tree = CommitmentTree::::default(); - events.iter().enumerate().for_each(|(id, event)| { - let final_hash = calculate_event_hash::(event); - tree.set(id as u64, final_hash); - }); - tree.commit() -} - -/// Calculate class commitment tree leaf hash value. -/// -/// See: -/// -/// # Arguments -/// -/// * `compiled_class_hash` - The hash of the compiled class. -/// -/// # Returns -/// -/// The hash of the class commitment tree leaf. -pub fn calculate_class_commitment_leaf_hash( - compiled_class_hash: Felt252Wrapper, -) -> ClassCommitmentLeafHash { - let contract_class_hash_version = Felt252Wrapper::try_from("CONTRACT_CLASS_LEAF_V0".as_bytes()).unwrap(); // Unwrap safu - - let hash = H::compute_hash_on_elements(&[contract_class_hash_version.0, compiled_class_hash.0]); - - hash.into() -} - -/// Calculate class commitment tree root hash value. -/// -/// The classes tree encodes the information about the existing classes in the state of Starknet. -/// It maps (Cairo 1.0) class hashes to their compiled class hashes -/// -/// # Arguments -/// -/// * `classes` - The classes to get the root from. -/// -/// # Returns -/// -/// The merkle root of the merkle tree built from the classes. -pub fn calculate_class_commitment_tree_root_hash(class_hashes: &[Felt252Wrapper]) -> Felt252Wrapper { - let mut tree = StateCommitmentTree::::default(); - class_hashes.iter().for_each(|class_hash| { - let final_hash = calculate_class_commitment_leaf_hash::(*class_hash); - tree.set(*class_hash, final_hash); - }); - tree.commit() -} - -/// Calculates the contract state hash from its preimage. -/// -/// # Arguments -/// -/// * `hash` - The hash of the contract definition. -/// * `root` - The root of root of another Merkle-Patricia tree of height 251 that is constructed -/// from the contract’s storage. -/// * `nonce` - The current nonce of the contract. -/// -/// # Returns -/// -/// The contract state hash. -pub fn calculate_contract_state_hash( - hash: Felt252Wrapper, - root: Felt252Wrapper, - nonce: Felt252Wrapper, -) -> Felt252Wrapper { - // Define the constant for the contract state hash version, ensure this aligns with StarkNet - // specifications. - const CONTRACT_STATE_HASH_VERSION: Felt252Wrapper = Felt252Wrapper::ZERO; - - // First hash: Combine class_hash and storage_root. - let class_storage_hash = H::compute_hash_on_elements(&[hash.0, root.0]); - let nonce_hash = H::compute_hash_on_elements(&[class_storage_hash, nonce.0]); - let contract_state_hash = H::compute_hash_on_elements(&[nonce_hash, CONTRACT_STATE_HASH_VERSION.0]); - - contract_state_hash.into() -} - -/// Compute the combined hash of the transaction hash and the signature. -/// -/// Since the transaction hash doesn't take the signature values as its input -/// computing the transaction commitent uses a hash value that combines -/// the transaction hash with the array of signature values. -/// -/// # Arguments -/// -/// * `tx` - The transaction to compute the hash of. -/// -/// # Returns -/// -/// The transaction hash with signature. -pub fn calculate_transaction_hash_with_signature( - tx: &Transaction, - chain_id: Felt252Wrapper, - block_number: u64, -) -> FieldElement -where - H: HasherT, -{ - let include_signature = block_number >= 61394; - - let signature_hash = if matches!(tx, Transaction::Invoke(_)) || include_signature { - // Include signatures for Invoke transactions or for all transactions - // starting from block 61394 - H::compute_hash_on_elements( - &tx.signature().iter().map(|elt| FieldElement::from(*elt)).collect::>(), - ) - } else { - // Before block 61394, and for non-Invoke transactions, signatures are not included - H::compute_hash_on_elements(&[]) - }; - - let transaction_hashes = - H::hash_elements(FieldElement::from(tx.compute_hash::(chain_id, false, Some(block_number))), signature_hash); - - transaction_hashes -} - -/// Calculate the hash of an event. -/// -/// See the [documentation](https://docs.starknet.io/documentation/architecture_and_concepts/Events/starknet-events/#event_hash) -/// for details. -pub fn calculate_event_hash(event: &Event) -> FieldElement { - let keys_hash = H::compute_hash_on_elements( - &event - .content - .keys - .iter() - .map(|key| FieldElement::from(Felt252Wrapper::from(key.0))) - .collect::>(), - ); - let data_hash = H::compute_hash_on_elements( - &event - .content - .data - .0 - .iter() - .map(|data| FieldElement::from(Felt252Wrapper::from(*data))) - .collect::>(), - ); - let from_address = FieldElement::from(Felt252Wrapper::from(event.from_address.0.0)); - H::compute_hash_on_elements(&[from_address, keys_hash, data_hash]) -} - -#[cfg(test)] -mod tests; diff --git a/crates/primitives/commitments/src/merkle_patricia_tree/merkle_node.rs b/crates/primitives/commitments/src/merkle_patricia_tree/merkle_node.rs deleted file mode 100644 index 8329cfe24f..0000000000 --- a/crates/primitives/commitments/src/merkle_patricia_tree/merkle_node.rs +++ /dev/null @@ -1,303 +0,0 @@ -//! Contains constructs for describing the nodes in a Binary Merkle Patricia Tree -//! used by Starknet. -//! -//! For more information about how these Starknet trees are structured, see -//! [`MerkleTree`](super::merkle_tree::MerkleTree). - -use bitvec::order::Msb0; -use bitvec::prelude::BitVec; -use bitvec::slice::BitSlice; -use bitvec::view::BitView; -use mp_felt::Felt252Wrapper; -use mp_hashers::HasherT; -use starknet_api::stdlib::collections::HashMap; - -/// Id of a Node within the tree -#[derive(Copy, Clone, Debug, PartialEq, Eq, Default, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] -#[cfg_attr(feature = "scale-info", derive(scale_info::TypeInfo))] -pub struct NodeId(pub u64); - -impl NodeId { - /// Mutates the given NodeId to be the next one and returns it. - pub fn next_id(&mut self) -> NodeId { - self.0 += 1; - NodeId(self.0) - } -} - -/// A node in a Binary Merkle-Patricia Tree graph. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] -#[cfg_attr(feature = "scale-info", derive(scale_info::TypeInfo))] -pub enum Node { - /// A node that has not been fetched from storage yet. - /// - /// As such, all we know is its hash. - Unresolved(Felt252Wrapper), - /// A branch node with exactly two children. - Binary(BinaryNode), - /// Describes a path connecting two other nodes. - Edge(EdgeNode), - /// A leaf node that contains a value. - Leaf(Felt252Wrapper), -} - -/// Describes the [Node::Binary] variant. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] -#[cfg_attr(feature = "scale-info", derive(scale_info::TypeInfo))] -pub struct BinaryNode { - /// The hash of this node. Is [None] if the node - /// has not yet been committed. - pub hash: Option, - /// The height of this node in the tree. - pub height: u64, - /// [Left](Direction::Left) child. - pub left: NodeId, - /// [Right](Direction::Right) child. - pub right: NodeId, -} - -/// Node that is an edge. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] -#[cfg_attr(feature = "scale-info", derive(scale_info::TypeInfo))] -pub struct EdgeNode { - /// The hash of this node. Is [None] if the node - /// has not yet been committed. - pub hash: Option, - /// The starting height of this node in the tree. - pub height: u64, - /// The path this edge takes. - pub path: BitVec, - /// The child of this node. - pub child: NodeId, -} - -/// Describes the direction a child of a [BinaryNode] may have. -/// -/// Binary nodes have two children, one left and one right. -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[cfg_attr(feature = "parity-scale-codec", derive(parity_scale_codec::Encode, parity_scale_codec::Decode))] -#[cfg_attr(feature = "scale-info", derive(scale_info::TypeInfo))] -pub enum Direction { - /// Left direction. - Left, - /// Right direction. - Right, -} - -impl Direction { - /// Inverts the [Direction]. - /// - /// [Left] becomes [Right], and [Right] becomes [Left]. - /// - /// [Left]: Direction::Left - /// [Right]: Direction::Right - pub fn invert(self) -> Direction { - match self { - Direction::Left => Direction::Right, - Direction::Right => Direction::Left, - } - } -} - -impl From for Direction { - fn from(tf: bool) -> Self { - match tf { - true => Direction::Right, - false => Direction::Left, - } - } -} - -impl From for bool { - fn from(direction: Direction) -> Self { - match direction { - Direction::Left => false, - Direction::Right => true, - } - } -} - -impl BinaryNode { - /// Maps the key's bit at the binary node's height to a [Direction]. - /// - /// This can be used to check which direction the key describes in the context - /// of this binary node i.e. which direction the child along the key's path would - /// take. - /// - /// # Arguments - /// - /// * `key` - The key to get the direction of. - /// - /// # Returns - /// - /// The direction of the key. - pub fn direction(&self, key: &BitSlice) -> Direction { - key[self.height as usize].into() - } - - /// Returns the [Left] or [Right] child. - /// - /// [Left]: Direction::Left - /// [Right]: Direction::Right - /// - /// # Arguments - /// - /// `direction` - The direction where to get the child from. - /// - /// # Returns - /// - /// The child in the specified direction. - pub fn get_child(&self, direction: Direction) -> NodeId { - match direction { - Direction::Left => self.left, - Direction::Right => self.right, - } - } - - /// If possible, calculates and sets its own hash value. - /// - /// Does nothing if the hash is already [Some]. - /// - /// If either child's hash is [None], then the hash cannot - /// be calculated and it will remain [None]. - pub(crate) fn calculate_hash(&mut self, nodes: &HashMap) { - if self.hash.is_some() { - return; - } - - let left = match nodes.get(&self.left) { - Some(node) => match node.hash() { - Some(hash) => hash, - None => unreachable!("subtrees have to be committed first"), - }, - None => unreachable!("left child not found"), - }; - - let right = match nodes.get(&self.right) { - Some(node) => match node.hash() { - Some(hash) => hash, - None => unreachable!("subtrees have to be committed first"), - }, - None => unreachable!("right child not found"), - }; - - self.hash = Some(Felt252Wrapper(H::hash_elements(left.0, right.0))); - } -} - -impl Node { - /// Convenience function which sets the inner node's hash to [None], if - /// applicable. - /// - /// Used to indicate that this node has been mutated. - pub fn mark_dirty(&mut self) { - match self { - Node::Binary(inner) => inner.hash = None, - Node::Edge(inner) => inner.hash = None, - _ => {} - } - } - - /// Returns true if the node represents an empty node -- this is defined as a node - /// with the [Felt252Wrapper::ZERO]. - /// - /// This can occur for the root node in an empty graph. - pub fn is_empty(&self) -> bool { - match self { - Node::Unresolved(hash) => hash == &Felt252Wrapper::ZERO, - _ => false, - } - } - - /// Is the node a binary node. - pub fn is_binary(&self) -> bool { - matches!(self, Node::Binary(..)) - } - - /// Convert to node to binary node type (returns None if it's not a binary node). - pub fn as_binary(&self) -> Option<&BinaryNode> { - match self { - Node::Binary(binary) => Some(binary), - _ => None, - } - } - - /// Convert to node to edge node type (returns None if it's not a edge node). - pub fn as_edge(&self) -> Option<&EdgeNode> { - match self { - Node::Edge(edge) => Some(edge), - _ => None, - } - } - - /// Get the hash of a node. - pub fn hash(&self) -> Option { - match self { - Node::Unresolved(hash) => Some(*hash), - Node::Binary(binary) => binary.hash, - Node::Edge(edge) => edge.hash, - Node::Leaf(value) => Some(*value), - } - } -} - -impl EdgeNode { - /// Returns true if the edge node's path matches the same path given by the key. - /// - /// # Arguments - /// - /// * `key` - The key to check if the path matches with the edge node. - pub fn path_matches(&self, key: &BitSlice) -> bool { - self.path == key[(self.height as usize)..(self.height + self.path.len() as u64) as usize] - } - - /// Returns the common bit prefix between the edge node's path and the given key. - /// - /// This is calculated with the edge's height taken into account. - /// - /// # Arguments - /// - /// * `key` - The key to get the common path from. - pub fn common_path(&self, key: &BitSlice) -> &BitSlice { - let key_path = key.iter().skip(self.height as usize); - let common_length = key_path.zip(self.path.iter()).take_while(|(a, b)| a == b).count(); - - &self.path[..common_length] - } - - /// If possible, calculates and sets its own hash value. - /// - /// Does nothing if the hash is already [Some]. - /// - /// If the child's hash is [None], then the hash cannot - /// be calculated and it will remain [None]. - pub(crate) fn calculate_hash(&mut self, nodes: &HashMap) { - if self.hash.is_some() { - return; - } - - let child = match nodes.get(&self.child) { - Some(node) => match node.hash() { - Some(hash) => hash, - None => unreachable!("subtree has to be committed before"), - }, - None => unreachable!("child node not found"), - }; - - let mut bytes = [0u8; 32]; - bytes.view_bits_mut::()[256 - self.path.len()..].copy_from_bitslice(&self.path); - - let path = Felt252Wrapper::try_from(&bytes).unwrap(); - let mut length = [0; 32]; - // Safe as len() is guaranteed to be <= 251 - length[31] = self.path.len() as u8; - - let length = Felt252Wrapper::try_from(&length).unwrap(); - let hash = Felt252Wrapper(H::hash_elements(child.0, path.0) + length.0); - self.hash = Some(hash); - } -} diff --git a/crates/primitives/commitments/src/merkle_patricia_tree/merkle_tree.rs b/crates/primitives/commitments/src/merkle_patricia_tree/merkle_tree.rs deleted file mode 100644 index 7fea54788b..0000000000 --- a/crates/primitives/commitments/src/merkle_patricia_tree/merkle_tree.rs +++ /dev/null @@ -1,608 +0,0 @@ -//! This is a gigantic copy pasta from Thanks to the equilibrium team and whoever else contributed for the code. -use alloc::vec::Vec; -use core::iter::once; -use core::marker::PhantomData; - -use bitvec::prelude::{BitSlice, BitVec, Msb0}; -use derive_more::Constructor; -use mp_felt::Felt252Wrapper; -use mp_hashers::HasherT; -#[cfg(feature = "parity-scale-codec")] -use parity_scale_codec::{Decode, Encode, Error, Input, Output}; -#[cfg(feature = "scale-info")] -use scale_info::{build::Fields, Path, Type, TypeInfo}; -#[cfg(feature = "serde")] -use serde::{ser::SerializeStructVariant, Serialize}; -use starknet_api::stdlib::collections::HashMap; - -use super::merkle_node::{BinaryNode, Direction, EdgeNode, Node, NodeId}; - -/// Wrapper type for a [HashMap] object. (It's not really a wrapper it's a -/// copy of the type but we implement the necessary traits.) -#[derive(Clone, Debug, PartialEq, Eq, Default, Constructor)] -pub struct NodesMapping(pub HashMap); - -/// SCALE trait. -#[cfg(feature = "parity-scale-codec")] -impl Encode for NodesMapping { - fn encode_to(&self, dest: &mut T) { - // Convert the NodesMapping to Vec<(NodeId, Node)> to be - // able to use the Encode trait from this type. We implemented it for NodeId, derived it - // for Node so we can use it for Vec<(NodeId, Node)>. - let val: Vec<(NodeId, Node)> = self.0.clone().into_iter().collect(); - dest.write(&Encode::encode(&val)); - } -} -/// SCALE trait. -#[cfg(feature = "parity-scale-codec")] -impl Decode for NodesMapping { - fn decode(input: &mut I) -> Result { - // Convert the NodesMapping to Vec<(NodeId, Node)> to be - // able to use the Decode trait from this type. We implemented it for NodeId, derived it - // for Node so we can use it for Vec<(NodeId, Node)>. - let val: Vec<(NodeId, Node)> = - Decode::decode(input).map_err(|_| Error::from("Can't get NodesMapping from input buffer."))?; - Ok(NodesMapping(HashMap::from_iter(val))) - } -} - -/// SCALE trait. -#[cfg(feature = "scale-info")] -impl TypeInfo for NodesMapping { - type Identity = Self; - - // The type info is saying that the NodesMapping must be seen as an - // array of bytes. - fn type_info() -> Type { - Type::builder() - .path(Path::new("NodesMapping", module_path!())) - .composite(Fields::unnamed().field(|f| f.ty::<[u8]>().type_name("NodesMapping"))) - } -} - -/// Lightweight representation of [BinaryNode]. Only holds left and right hashes. -#[derive(Debug, Clone, PartialEq)] -#[cfg_attr(feature = "parity-scale-codec", derive(Encode, Decode))] -#[cfg_attr(feature = "scale-info", derive(TypeInfo))] -pub struct BinaryProofNode { - /// Left hash. - pub left_hash: Felt252Wrapper, - /// Right hash. - pub right_hash: Felt252Wrapper, -} - -/// Ligthtweight representation of [EdgeNode]. Only holds its path and its child's hash. -#[derive(Debug, Clone, PartialEq)] -#[cfg_attr(feature = "parity-scale-codec", derive(Encode, Decode))] -#[cfg_attr(feature = "scale-info", derive(TypeInfo))] -pub struct EdgeProofNode { - /// Path of the node. - pub path: BitVec, - /// Hash of the child node. - pub child_hash: Felt252Wrapper, -} - -fn get_proof_node(node: &Node, nodes: &HashMap) -> ProofNode { - match node { - Node::Binary(bin) => ProofNode::Binary(BinaryProofNode { - left_hash: nodes.get(&bin.left).unwrap().hash().expect("Node should be committed"), - right_hash: nodes.get(&bin.right).unwrap().hash().expect("Node should be committed"), - }), - Node::Edge(edge) => ProofNode::Edge(EdgeProofNode { - path: edge.path.clone(), - child_hash: nodes.get(&edge.child).unwrap().hash().expect("Node should be committed"), - }), - Node::Leaf(_) => panic!("Leaf nodes should not appear in a proof"), - Node::Unresolved(_) => panic!("Unresolved nodes should not appear in a proof"), - } -} - -/// [ProofNode] s are lightweight versions of their `Node` counterpart. -/// They only consist of [BinaryProofNode] and [EdgeProofNode] because `Leaf` -/// and `Unresolved` nodes should not appear in a proof. -#[derive(Debug, Clone, PartialEq)] -#[cfg_attr(feature = "parity-scale-codec", derive(Encode, Decode))] -#[cfg_attr(feature = "scale-info", derive(TypeInfo))] -pub enum ProofNode { - /// Binary node. - Binary(BinaryProofNode), - /// Edge node. - Edge(EdgeProofNode), -} - -#[cfg(feature = "serde")] -impl Serialize for ProofNode { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - /// Utility struct used for serializing. - #[derive(Debug, Serialize)] - struct PathWrapper { - value: starknet_ff::FieldElement, - len: usize, - } - - match &self { - ProofNode::Binary(bin) => { - let mut state = serializer.serialize_struct_variant("ProofNode", 0, "Binary", 2)?; - state.serialize_field("left", &bin.left_hash)?; - state.serialize_field("right", &bin.right_hash)?; - state.end() - } - ProofNode::Edge(edge) => { - let value = starknet_ff::FieldElement::from_byte_slice_be(edge.path.as_raw_slice()).unwrap(); - let path_wrapper = PathWrapper { value, len: edge.path.len() }; - - let mut state = serializer.serialize_struct_variant("ProofNode", 1, "Edge", 2)?; - state.serialize_field("path", &path_wrapper)?; - state.serialize_field("child", &edge.child_hash)?; - state.end() - } - } - } -} - -/// A Starknet binary Merkle-Patricia tree with a specific root entry-point and storage. -/// -/// This is used to update, mutate and access global Starknet state as well as individual contract -/// states. -/// -/// For more information on how this functions internally, see [here](super::merkle_node). -#[derive(Debug, Clone, PartialEq)] -#[cfg_attr(feature = "parity-scale-codec", derive(Encode, Decode))] -#[cfg_attr(feature = "scale-info", derive(TypeInfo))] -pub struct MerkleTree { - root: NodeId, - nodes: NodesMapping, - latest_node_id: NodeId, - _hasher: PhantomData, -} - -impl MerkleTree { - /// Less visible initialization for `MerkleTree` as the main entry points should be - /// [`MerkleTree::::load`] for persistent trees and [`MerkleTree::empty`] for - /// transient ones. - pub fn new(root: Felt252Wrapper) -> Self { - let root_node = Node::Unresolved(root); - let mut nodes_mapping: HashMap = HashMap::new(); - let root_id = NodeId(0); // Assign the appropriate initial node ID here - nodes_mapping.insert(root_id, root_node); - - Self { root: root_id, nodes: NodesMapping(nodes_mapping), latest_node_id: root_id, _hasher: PhantomData } - } - - /// Empty tree. - pub fn empty() -> Self { - Self::new(Felt252Wrapper::ZERO) - } - - /// Returns the nodes mapping - pub fn nodes(&self) -> HashMap { - self.nodes.0.clone() - } - - /// Persists all changes to storage and returns the new root hash. - /// - /// Note that the root is reference counted in storage. Committing the - /// same tree again will therefore increment the count again. - pub fn commit(&mut self) -> Felt252Wrapper { - self.commit_mut() - } - - /// Return the state root. - pub fn commit_mut(&mut self) -> Felt252Wrapper { - // Go through the tree, collect dirty nodes, calculate their hashes, and - // persist them. Take care to increment ref counts of child nodes. Start from - // the root and traverse the tree. - self.commit_subtree(&self.root.clone()); - - // Unwrap is safe as `commit_subtree` will set the hash. - let root_hash = self.nodes.0.get(&self.root).unwrap().hash().unwrap(); - root_hash - } - - /// Persists any changes in this subtree to storage. - /// - /// This necessitates recursively calculating the hash of, and - /// in turn persisting, any changed child nodes. This is necessary - /// as the parent node's hash relies on its children hashes. - /// - /// In effect, the entire subtree gets persisted. - /// - /// # Arguments - /// - /// * `node` - The top node from the subtree to commit. - fn commit_subtree(&mut self, node_id: &NodeId) { - use Node::*; - let mut nodes = self.nodes.0.clone(); - let node = nodes.get_mut(node_id).unwrap(); - match node { - Unresolved(_) => { /* Unresolved nodes are already persisted. */ } - Leaf(_) => { /* storage wouldn't persist these even if we asked. */ } - Binary(binary) if binary.hash.is_some() => { /* not dirty, already persisted */ } - Edge(edge) if edge.hash.is_some() => { /* not dirty, already persisted */ } - - Binary(binary) => { - self.commit_subtree(&binary.left); - self.commit_subtree(&binary.right); - // This will succeed as `commit_subtree` will set the child hashes. - binary.calculate_hash::(&self.nodes.0.clone()); - } - - Edge(edge) => { - self.commit_subtree(&edge.child); - edge.calculate_hash::(&self.nodes.0.clone()); - } - } - - // Update internal nodes mapping - self.nodes.0.insert(*node_id, node.clone()); - } - - /// Sets the value of a key. To delete a key, set the value to [Felt252Wrapper::ZERO]. - /// - /// # Arguments - /// - /// * `key` - The key to set. - /// * `value` - The value to set. - pub fn set(&mut self, key: &BitSlice, value: Felt252Wrapper) { - let mut nodes = self.nodes.0.clone(); - - if value == Felt252Wrapper::ZERO { - return self.delete_leaf(key); - } - - // Changing or inserting a new leaf into the tree will change the hashes - // of all nodes along the path to the leaf. - let path = self.traverse(key); - for node in &path { - nodes.get_mut(node).unwrap().mark_dirty(); - } - - // There are three possibilities. - // - // 1. The leaf exists, in which case we simply change its value. - // - // 2. The tree is empty, we insert the new leaf and the root becomes an edge node connecting to it. - // - // 3. The leaf does not exist, and the tree is not empty. The final node in the traversal will be an - // edge node who's path diverges from our new leaf node's. - // - // This edge must be split into a new subtree containing both the existing edge's child and the - // new leaf. This requires an edge followed by a binary node and then further edges to both the - // current child and the new leaf. Any of these new edges may also end with an empty path in - // which case they should be elided. It depends on the common path length of the current edge - // and the new leaf i.e. the split may be at the first bit (in which case there is no leading - // edge), or the split may be in the middle (requires both leading and post edges), or the - // split may be the final bit (no post edge). - use Node::*; - match path.last() { - Some(node) => { - let match_node = self.nodes.0.get(node).unwrap(); - let updated: Node = match match_node { - Edge(edge) => { - let common = edge.common_path(key); - - // Height of the binary node - let branch_height = edge.height as usize + common.len(); - // Height of the binary node's children - let child_height = branch_height + 1; - - // Path from binary node to new leaf - let new_path = key[child_height..].to_bitvec(); - // Path from binary node to existing child - let old_path = edge.path[common.len() + 1..].to_bitvec(); - - // The new leaf branch of the binary node. - // (this may be edge -> leaf, or just leaf depending). - let new_leaf = Node::Leaf(value); - nodes.insert(self.latest_node_id.next_id(), new_leaf); - - let new = if new_path.is_empty() { - self.latest_node_id - } else { - let new_edge = Node::Edge(EdgeNode { - hash: None, - height: child_height as u64, - path: new_path, - child: self.latest_node_id, - }); - nodes.insert(self.latest_node_id.next_id(), new_edge); - self.latest_node_id - }; - - // The existing child branch of the binary node. - let old = if old_path.is_empty() { - edge.child - } else { - let old_edge = Node::Edge(EdgeNode { - hash: None, - height: child_height as u64, - path: old_path, - child: edge.child, - }); - nodes.insert(self.latest_node_id.next_id(), old_edge); - self.latest_node_id - }; - - let new_direction = Direction::from(key[branch_height]); - let (left, right) = match new_direction { - Direction::Left => (new, old), - Direction::Right => (old, new), - }; - - let branch = Node::Binary(BinaryNode { hash: None, height: branch_height as u64, left, right }); - nodes.insert(self.latest_node_id.next_id(), branch.clone()); - - // We may require an edge leading to the binary node. - if common.is_empty() { - branch - } else { - let edge = Node::Edge(EdgeNode { - hash: None, - height: edge.height, - path: common.to_bitvec(), - child: self.latest_node_id, - }); - nodes.insert(self.latest_node_id.next_id(), edge.clone()); - edge - } - } - // Leaf exists, we replace its value. - Leaf(_) => { - let leaf = Node::Leaf(value); - nodes.insert(self.latest_node_id.next_id(), leaf.clone()); - leaf - } - Unresolved(_) | Binary(_) => { - unreachable!("The end of a traversion cannot be unresolved or binary") - } - }; - - // node.swap(&Box::new(updated)); - nodes.insert(*node, updated); - nodes.insert(self.latest_node_id, self.nodes.0.get(node).unwrap().clone()); - } - None => { - // Getting no travel nodes implies that the tree is empty. - // - // Create a new leaf node with the value, and the root becomes - // an edge node connecting to the leaf. - let leaf = Node::Leaf(value); - nodes.insert(self.latest_node_id.next_id(), leaf); - let edge = - Node::Edge(EdgeNode { hash: None, height: 0, path: key.to_bitvec(), child: self.latest_node_id }); - nodes.insert(self.latest_node_id.next_id(), edge); - - self.root = self.latest_node_id; - } - } - - // Updates self nodes mapping - self.nodes.0 = nodes; - } - - /// Deletes a leaf node from the tree. - /// - /// This is not an external facing API; the functionality is instead accessed by calling - /// [`MerkleTree::set`] with value set to [`Felt252Wrapper::ZERO`]. - /// - /// # Arguments - /// - /// * `key` - The key to delete. - fn delete_leaf(&mut self, key: &BitSlice) { - let mut nodes = self.nodes.0.clone(); - // Algorithm explanation: - // - // The leaf's parent node is either an edge, or a binary node. - // If it's an edge node, then it must also be deleted. And its parent - // must be a binary node. In either case we end up with a binary node - // who's one child is deleted. This changes the binary to an edge node. - // - // Note that its possible that there is no binary node -- if the resulting tree would be empty. - // - // This new edge node may need to merge with the old binary node's parent node - // and other remaining child node -- if they're also edges. - // - // Then we are done. - let path = self.traverse(key); - - // Do nothing if the leaf does not exist. - match path.last() { - Some(node) => match nodes.get(node).unwrap() { - Node::Leaf(_) => {} - _ => return, - }, - None => return, - } - - // All hashes along the path will become invalid (if they aren't deleted). - for node in &path { - nodes.get_mut(node).unwrap().mark_dirty(); - } - - // Go backwards until we hit a branch node. - let mut node_iter = path.into_iter().rev().skip_while(|node| !self.nodes.0.get(node).unwrap().is_binary()); - - match node_iter.next() { - Some(node) => { - let new_edge = { - let node = nodes.get_mut(&node).unwrap(); - // This node must be a binary node due to the iteration condition. - let binary = node.as_binary().cloned().unwrap(); - // Create an edge node to replace the old binary node - // i.e. with the remaining child (note the direction invert), - // and a path of just a single bit. - let direction = binary.direction(key).invert(); - let child = binary.get_child(direction.clone()); - let path = once(bool::from(direction)).collect::>(); - let mut edge = EdgeNode { hash: None, height: binary.height, path, child }; - - // Merge the remaining child if it's an edge. - self.merge_edges(&mut edge); - - edge - }; - // Replace the old binary node with the new edge node. - // node.swap(&Box::new(Node::Edge(new_edge))); - nodes.insert(node, Node::Edge(new_edge)); - nodes.insert(self.latest_node_id, nodes.get(&node).unwrap().clone()); - } - None => { - // We reached the root without a hitting binary node. The new tree - // must therefore be empty. - self.root = NodeId(0); - return; - } - }; - - // Check the parent of the new edge. If it is also an edge, then they must merge. - if let Some(node) = node_iter.next() { - if let Node::Edge(edge) = nodes.get_mut(&node).unwrap() { - self.merge_edges(edge); - } - } - } - - /// Returns the value stored at key, or `None` if it does not exist. - /// - /// # Arguments - /// - /// * `key` - The key of the value to get. - /// - /// # Returns - /// - /// The value of the key. - pub fn get(&self, key: &BitSlice) -> Option { - self.traverse(key).last().and_then(|node| match self.nodes.0.get(node).unwrap() { - Node::Leaf(value) if !value.eq(&Felt252Wrapper::ZERO) => Some(*value), - _ => None, - }) - } - - /// Generates a merkle-proof for a given `key`. - /// - /// Returns vector of [`ProofNode`] which form a chain from the root to the key, - /// if it exists, or down to the node which proves that the key does not exist. - /// - /// The nodes are returned in order, root first. - /// - /// Verification is performed by confirming that: - /// 1. the chain follows the path of `key`, and - /// 2. the hashes are correct, and - /// 3. the root hash matches the known root - /// - /// # Arguments - /// - /// * `key` - The key to get the merkle proof of. - /// - /// # Returns - /// - /// The merkle proof and all the child nodes hashes. - pub fn get_proof(&self, key: &BitSlice) -> Vec { - let mut nodes = self.traverse(key); - - // Return an empty list if tree is empty. - let node = match nodes.last() { - Some(node) => node, - None => return Vec::new(), - }; - - // A leaf node is redundant data as the information for it is already contained in the previous - // node. - if matches!(self.nodes.0.get(node).unwrap(), Node::Leaf(_)) { - nodes.pop(); - } - - nodes - .iter() - .map(|node| match self.nodes.0.get(node).unwrap() { - Node::Binary(bin) => get_proof_node(&Node::Binary(bin.clone()), &self.nodes.0), - Node::Edge(edge) => get_proof_node(&Node::Edge(edge.clone()), &self.nodes.0), - _ => unreachable!(), - }) - .collect() - } - - /// Traverses from the current root towards the destination [Leaf](Node::Leaf) node. - /// Returns the list of nodes along the path. - /// - /// If the destination node exists, it will be the final node in the list. - /// - /// This means that the final node will always be either a the destination [Leaf](Node::Leaf) - /// node, or an [Edge](Node::Edge) node who's path suffix does not match the leaf's path. - /// - /// The final node can __not__ be a [Binary](Node::Binary) node since it would always be - /// possible to continue on towards the destination. Nor can it be an - /// [Unresolved](Node::Unresolved) node since this would be resolved to check if we can - /// travel further. - /// - /// # Arguments - /// - /// * `dst` - The node to get to. - /// - /// # Returns - /// - /// The list of nodes along the path. - fn traverse(&self, dst: &BitSlice) -> Vec { - if self.nodes.0.get(&self.root).unwrap().is_empty() { - return Vec::new(); - } - - let mut current = self.root; - #[allow(unused_variables)] - let mut height = 0; - let mut nodes = Vec::new(); - loop { - use Node::*; - - let current_tmp = self.nodes.0.get(¤t).unwrap().clone(); - - let next = match current_tmp { - Unresolved(_hash) => panic!("Resolve is useless"), - Binary(binary) => { - nodes.push(current); - let next = binary.direction(dst); - let next = binary.get_child(next); - height += 1; - next - } - Edge(edge) if edge.path_matches(dst) => { - nodes.push(current); - height += edge.path.len(); - edge.child - } - Leaf(_) | Edge(_) => { - nodes.push(current); - return nodes; - } - }; - - current = next; - } - } - - /// This is a convenience function which merges the edge node with its child __iff__ it is also - /// an edge. - /// - /// Does nothing if the child is not also an edge node. - /// - /// This can occur when mutating the tree (e.g. deleting a child of a binary node), and is an - /// illegal state (since edge nodes __must be__ maximal subtrees). - /// - /// # Arguments - /// - /// * `parent` - The parent node to merge the child with. - fn merge_edges(&self, parent: &mut EdgeNode) { - let resolved_child = match self.nodes.0.get(&parent.child).unwrap() { - Node::Unresolved(_hash) => panic!("Resolve is useless"), - other => other.clone(), - }; - - if let Some(child_edge) = resolved_child.as_edge().cloned() { - parent.path.extend_from_bitslice(&child_edge.path); - parent.child = child_edge.child; - } - } -} diff --git a/crates/primitives/commitments/src/merkle_patricia_tree/mod.rs b/crates/primitives/commitments/src/merkle_patricia_tree/mod.rs deleted file mode 100644 index b6b1078ef0..0000000000 --- a/crates/primitives/commitments/src/merkle_patricia_tree/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -// Merkle Tree implementation using references -pub mod ref_merkle_node; -pub mod ref_merkle_tree; -// Merkle Tree implementation using nodes mapping -pub mod merkle_node; -pub mod merkle_tree; diff --git a/crates/primitives/commitments/src/merkle_patricia_tree/ref_merkle_node.rs b/crates/primitives/commitments/src/merkle_patricia_tree/ref_merkle_node.rs deleted file mode 100644 index 671a35c074..0000000000 --- a/crates/primitives/commitments/src/merkle_patricia_tree/ref_merkle_node.rs +++ /dev/null @@ -1,272 +0,0 @@ -//! Contains constructs for describing the nodes in a Binary Merkle Patricia Tree -//! used by Starknet. -//! -//! For more information about how these Starknet trees are structured, see -//! [`MerkleTree`](super::ref_merkle_tree::RefMerkleTree). - -use alloc::rc::Rc; -use core::cell::RefCell; - -use bitvec::order::Msb0; -use bitvec::prelude::BitVec; -use bitvec::slice::BitSlice; -use mp_felt::Felt252Wrapper; -use mp_hashers::HasherT; - -/// A node in a Binary Merkle-Patricia Tree graph. -#[derive(Clone, Debug, PartialEq)] -pub enum Node { - /// A node that has not been fetched from storage yet. - /// - /// As such, all we know is its hash. - Unresolved(Felt252Wrapper), - /// A branch node with exactly two children. - Binary(BinaryNode), - /// Describes a path connecting two other nodes. - Edge(EdgeNode), - /// A leaf node that contains a value. - Leaf(Felt252Wrapper), -} - -/// Describes the [Node::Binary] variant. -#[derive(Clone, Debug, PartialEq)] -pub struct BinaryNode { - /// The hash of this node. Is [None] if the node - /// has not yet been committed. - pub hash: Option, - /// The height of this node in the tree. - pub height: usize, - /// [Left](Direction::Left) child. - pub left: Rc>, - /// [Right](Direction::Right) child. - pub right: Rc>, -} - -/// Node that is an edge. -#[derive(Clone, Debug, PartialEq)] -pub struct EdgeNode { - /// The hash of this node. Is [None] if the node - /// has not yet been committed. - pub hash: Option, - /// The starting height of this node in the tree. - pub height: usize, - /// The path this edge takes. - pub path: BitVec, - /// The child of this node. - pub child: Rc>, -} - -/// Describes the direction a child of a [BinaryNode] may have. -/// -/// Binary nodes have two children, one left and one right. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum Direction { - /// Left direction. - Left, - /// Right direction. - Right, -} - -impl Direction { - /// Inverts the [Direction]. - /// - /// [Left] becomes [Right], and [Right] becomes [Left]. - /// - /// [Left]: Direction::Left - /// [Right]: Direction::Right - pub fn invert(self) -> Direction { - match self { - Direction::Left => Direction::Right, - Direction::Right => Direction::Left, - } - } -} - -impl From for Direction { - fn from(tf: bool) -> Self { - match tf { - true => Direction::Right, - false => Direction::Left, - } - } -} - -impl From for bool { - fn from(direction: Direction) -> Self { - match direction { - Direction::Left => false, - Direction::Right => true, - } - } -} - -impl BinaryNode { - /// Maps the key's bit at the binary node's height to a [Direction]. - /// - /// This can be used to check which direction the key describes in the context - /// of this binary node i.e. which direction the child along the key's path would - /// take. - /// - /// # Arguments - /// - /// * `key` - The key to get the direction of. - /// - /// # Returns - /// - /// The direction of the key. - pub fn direction(&self, key: &BitSlice) -> Direction { - key[self.height].into() - } - - /// Returns the [Left] or [Right] child. - /// - /// [Left]: Direction::Left - /// [Right]: Direction::Right - /// - /// # Arguments - /// - /// `direction` - The direction where to get the child from. - /// - /// # Returns - /// - /// The child in the specified direction. - pub fn get_child(&self, direction: Direction) -> Rc> { - match direction { - Direction::Left => self.left.clone(), - Direction::Right => self.right.clone(), - } - } - - /// If possible, calculates and sets its own hash value. - /// - /// Does nothing if the hash is already [Some]. - /// - /// If either child's hash is [None], then the hash cannot - /// be calculated and it will remain [None]. - pub(crate) fn calculate_hash(&mut self) { - if self.hash.is_some() { - return; - } - - let left = match self.left.borrow().hash() { - Some(hash) => hash, - None => unreachable!("subtrees have to be committed first"), - }; - - let right = match self.right.borrow().hash() { - Some(hash) => hash, - None => unreachable!("subtrees have to be committed first"), - }; - - self.hash = Some(Felt252Wrapper(H::hash_elements(left.0, right.0))); - } -} - -impl Node { - /// Convenience function which sets the inner node's hash to [None], if - /// applicable. - /// - /// Used to indicate that this node has been mutated. - pub fn mark_dirty(&mut self) { - match self { - Node::Binary(inner) => inner.hash = None, - Node::Edge(inner) => inner.hash = None, - _ => {} - } - } - - /// Returns true if the node represents an empty node -- this is defined as a node - /// with the [Felt252Wrapper::ZERO]. - /// - /// This can occur for the root node in an empty graph. - pub fn is_empty(&self) -> bool { - match self { - Node::Unresolved(hash) => hash == &Felt252Wrapper::ZERO, - _ => false, - } - } - - /// Is the node a binary node. - pub fn is_binary(&self) -> bool { - matches!(self, Node::Binary(..)) - } - - /// Convert to node to binary node type (returns None if it's not a binary node). - pub fn as_binary(&self) -> Option<&BinaryNode> { - match self { - Node::Binary(binary) => Some(binary), - _ => None, - } - } - - /// Convert to node to edge node type (returns None if it's not a edge node). - pub fn as_edge(&self) -> Option<&EdgeNode> { - match self { - Node::Edge(edge) => Some(edge), - _ => None, - } - } - - /// Get the hash of a node. - pub fn hash(&self) -> Option { - match self { - Node::Unresolved(hash) => Some(*hash), - Node::Binary(binary) => binary.hash, - Node::Edge(edge) => edge.hash, - Node::Leaf(value) => Some(*value), - } - } -} - -impl EdgeNode { - /// Returns true if the edge node's path matches the same path given by the key. - /// - /// # Arguments - /// - /// * `key` - The key to check if the path matches with the edge node. - pub fn path_matches(&self, key: &BitSlice) -> bool { - self.path == key[self.height..self.height + self.path.len()] - } - - /// Returns the common bit prefix between the edge node's path and the given key. - /// - /// This is calculated with the edge's height taken into account. - /// - /// # Arguments - /// - /// * `key` - The key to get the common path from. - pub fn common_path(&self, key: &BitSlice) -> &BitSlice { - let key_path = key.iter().skip(self.height); - let common_length = key_path.zip(self.path.iter()).take_while(|(a, b)| a == b).count(); - - &self.path[..common_length] - } - - /// If possible, calculates and sets its own hash value. - /// - /// Does nothing if the hash is already [Some]. - /// - /// If the child's hash is [None], then the hash cannot - /// be calculated and it will remain [None]. - pub(crate) fn calculate_hash(&mut self) { - if self.hash.is_some() { - return; - } - - let child = match self.child.borrow().hash() { - Some(hash) => hash, - None => unreachable!("subtree has to be committed before"), - }; - let mut temp_path = self.path.clone(); - temp_path.force_align(); - - let path = Felt252Wrapper::try_from(temp_path.into_vec().as_slice()).unwrap(); - let mut length = [0; 32]; - // Safe as len() is guaranteed to be <= 251 - length[31] = self.path.len() as u8; - - let length = Felt252Wrapper::try_from(&length).unwrap(); - let hash = Felt252Wrapper(H::hash_elements(child.0, path.0) + length.0); - self.hash = Some(hash); - } -} diff --git a/crates/primitives/commitments/src/merkle_patricia_tree/ref_merkle_tree.rs b/crates/primitives/commitments/src/merkle_patricia_tree/ref_merkle_tree.rs deleted file mode 100644 index eae943d9d3..0000000000 --- a/crates/primitives/commitments/src/merkle_patricia_tree/ref_merkle_tree.rs +++ /dev/null @@ -1,497 +0,0 @@ -//! This is a gigantic copy pasta from Thanks to the equilibrium team and whoever else contributed for the code. -use alloc::rc::Rc; -use alloc::vec::Vec; -use core::cell::RefCell; -use core::iter::once; -use core::marker::PhantomData; - -use bitvec::prelude::{BitSlice, BitVec, Msb0}; -use mp_felt::Felt252Wrapper; -use mp_hashers::HasherT; - -use super::ref_merkle_node::{BinaryNode, Direction, EdgeNode, Node}; - -/// Lightweight representation of [BinaryNode]. Only holds left and right -/// hashes. -#[derive(Debug, PartialEq, Eq)] -pub struct BinaryProofNode { - /// Left hash. - pub left_hash: Felt252Wrapper, - /// Right hash. - pub right_hash: Felt252Wrapper, -} - -impl From<&BinaryNode> for ProofNode { - fn from(bin: &BinaryNode) -> Self { - Self::Binary(BinaryProofNode { - left_hash: bin.left.borrow().hash().expect("Node should be committed"), - right_hash: bin.right.borrow().hash().expect("Node should be committed"), - }) - } -} - -/// Ligthtweight representation of [EdgeNode]. Only holds its path and its -/// child's hash. -#[derive(Debug, PartialEq, Eq)] -pub struct EdgeProofNode { - /// Path of the node. - pub path: BitVec, - /// Hash of the child node. - pub child_hash: Felt252Wrapper, -} - -impl From<&EdgeNode> for ProofNode { - fn from(edge: &EdgeNode) -> Self { - Self::Edge(EdgeProofNode { - path: edge.path.clone(), - child_hash: edge.child.borrow().hash().expect("Node should be committed"), - }) - } -} - -/// [ProofNode] s are lightweight versions of their `Node` counterpart. -/// They only consist of [BinaryProofNode] and [EdgeProofNode] because `Leaf` -/// and `Unresolved` nodes should not appear in a proof. -#[derive(Debug, PartialEq, Eq)] -pub enum ProofNode { - /// Binary node. - Binary(BinaryProofNode), - /// Edge node. - Edge(EdgeProofNode), -} - -/// A Starknet binary Merkle-Patricia tree with a specific root entry-point and -/// storage. -/// -/// This is used to update, mutate and access global Starknet state as well as -/// individual contract states. -/// -/// For more information on how this functions internally, see -/// [here](super::ref_merkle_tree). -#[derive(Debug, Clone)] -pub struct RefMerkleTree { - root: Rc>, - _hasher: PhantomData, -} - -impl RefMerkleTree { - /// Less visible initialization for `MerkleTree` as the main entry points - /// should be [`MerkleTree::::load`] for persistent trees - /// and [`MerkleTree::empty`] for transient ones. - fn new(root: Felt252Wrapper) -> Self { - let root_node = Rc::new(RefCell::new(Node::Unresolved(root))); - Self { root: root_node, _hasher: PhantomData } - } - - /// Empty tree. - pub fn empty() -> Self { - Self::new(Felt252Wrapper::ZERO) - } - - /// Persists all changes to storage and returns the new root hash. - /// - /// Note that the root is reference counted in storage. Committing the - /// same tree again will therefore increment the count again. - pub fn commit(&mut self) -> Felt252Wrapper { - self.commit_mut() - } - /// Return the state root. - pub fn commit_mut(&mut self) -> Felt252Wrapper { - // Go through tree, collect dirty nodes, calculate their hashes and - // persist them. Take care to increment ref counts of child nodes. So in order - // to do this correctly, will have to start back-to-front. - Self::commit_subtree(&mut self.root.borrow_mut()); - // unwrap is safe as `commit_subtree` will set the hash. - self.root.borrow().hash().unwrap() - } - - /// Persists any changes in this subtree to storage. - /// - /// This necessitates recursively calculating the hash of, and - /// in turn persisting, any changed child nodes. This is necessary - /// as the parent node's hash relies on its children hashes. - /// - /// In effect, the entire subtree gets persisted. - /// - /// # Arguments - /// - /// * `node` - The top node from the subtree to commit. - fn commit_subtree(node: &mut Node) { - use Node::*; - match node { - Unresolved(_) => { /* Unresolved nodes are already persisted. */ } - Leaf(_) => { /* storage wouldn't persist these even if we asked. */ } - Binary(binary) if binary.hash.is_some() => { /* not dirty, already persisted */ } - Edge(edge) if edge.hash.is_some() => { /* not dirty, already persisted */ } - - Binary(binary) => { - Self::commit_subtree(&mut binary.left.borrow_mut()); - Self::commit_subtree(&mut binary.right.borrow_mut()); - // This will succeed as `commit_subtree` will set the child hashes. - binary.calculate_hash::(); - } - - Edge(edge) => { - Self::commit_subtree(&mut edge.child.borrow_mut()); - // This will succeed as `commit_subtree` will set the child's hash. - edge.calculate_hash::(); - } - } - } - - /// Sets the value of a key. To delete a key, set the value to - /// [Felt252Wrapper::ZERO]. - /// - /// # Arguments - /// - /// * `key` - The key to set. - /// * `value` - The value to set. - pub fn set(&mut self, key: &BitSlice, value: Felt252Wrapper) { - if value == Felt252Wrapper::ZERO { - return self.delete_leaf(key); - } - - // Changing or inserting a new leaf into the tree will change the hashes - // of all nodes along the path to the leaf. - let path = self.traverse(key); - for node in &path { - node.borrow_mut().mark_dirty(); - } - - // There are three possibilities. - // - // 1. The leaf exists, in which case we simply change its value. - // - // 2. The tree is empty, we insert the new leaf and the root becomes an edge node connecting to it. - // - // 3. The leaf does not exist, and the tree is not empty. The final node in the traversal will be an - // edge node who's path diverges from our new leaf node's. - // - // This edge must be split into a new subtree containing both the existing - // edge's child and the new leaf. This requires an edge followed by a - // binary node and then further edges to both the current child and - // the new leaf. Any of these new edges may also end with an empty path in - // which case they should be elided. It depends on the common path length of - // the current edge and the new leaf i.e. the split may be at the - // first bit (in which case there is no leading edge), or the split - // may be in the middle (requires both leading and post edges), or the - // split may be the final bit (no post edge). - use Node::*; - match path.last() { - Some(node) => { - let updated = match &*node.borrow() { - Edge(edge) => { - let common = edge.common_path(key); - - // Height of the binary node - let branch_height = edge.height + common.len(); - // Height of the binary node's children - let child_height = branch_height + 1; - - // Path from binary node to new leaf - let new_path = key[child_height..].to_bitvec(); - // Path from binary node to existing child - let old_path = edge.path[common.len() + 1..].to_bitvec(); - - // The new leaf branch of the binary node. - // (this may be edge -> leaf, or just leaf depending). - let new_leaf = Node::Leaf(value); - let new = if new_path.is_empty() { - Rc::new(RefCell::new(new_leaf)) - } else { - let new_edge = Node::Edge(EdgeNode { - hash: None, - height: child_height, - path: new_path, - child: Rc::new(RefCell::new(new_leaf)), - }); - Rc::new(RefCell::new(new_edge)) - }; - - // The existing child branch of the binary node. - let old = if old_path.is_empty() { - edge.child.clone() - } else { - let old_edge = Node::Edge(EdgeNode { - hash: None, - height: child_height, - path: old_path, - child: edge.child.clone(), - }); - Rc::new(RefCell::new(old_edge)) - }; - - let new_direction = Direction::from(key[branch_height]); - let (left, right) = match new_direction { - Direction::Left => (new, old), - Direction::Right => (old, new), - }; - - let branch = Node::Binary(BinaryNode { hash: None, height: branch_height, left, right }); - - // We may require an edge leading to the binary node. - if common.is_empty() { - branch - } else { - Node::Edge(EdgeNode { - hash: None, - height: edge.height, - path: common.to_bitvec(), - child: Rc::new(RefCell::new(branch)), - }) - } - } - // Leaf exists, we replace its value. - Leaf(_) => Node::Leaf(value), - Unresolved(_) | Binary(_) => { - unreachable!("The end of a traversion cannot be unresolved or binary") - } - }; - - node.swap(&RefCell::new(updated)); - } - None => { - // Getting no travel nodes implies that the tree is empty. - // - // Create a new leaf node with the value, and the root becomes - // an edge node connecting to the leaf. - let leaf = Node::Leaf(value); - let edge = Node::Edge(EdgeNode { - hash: None, - height: 0, - path: key.to_bitvec(), - child: Rc::new(RefCell::new(leaf)), - }); - - self.root = Rc::new(RefCell::new(edge)); - } - } - } - - /// Deletes a leaf node from the tree. - /// - /// This is not an external facing API; the functionality is instead - /// accessed by calling [`MerkleTree::set`] with value set to - /// [`Felt252Wrapper::ZERO`]. - /// - /// # Arguments - /// - /// * `key` - The key to delete. - fn delete_leaf(&mut self, key: &BitSlice) { - // Algorithm explanation: - // - // The leaf's parent node is either an edge, or a binary node. - // If it's an edge node, then it must also be deleted. And its parent - // must be a binary node. In either case we end up with a binary node - // who's one child is deleted. This changes the binary to an edge node. - // - // Note that its possible that there is no binary node -- if the resulting tree - // would be empty. - // - // This new edge node may need to merge with the old binary node's parent node - // and other remaining child node -- if they're also edges. - // - // Then we are done. - let path = self.traverse(key); - - // Do nothing if the leaf does not exist. - match path.last() { - Some(node) => match &*node.borrow() { - Node::Leaf(_) => {} - _ => return, - }, - None => return, - } - - // All hashes along the path will become invalid (if they aren't deleted). - for node in &path { - node.borrow_mut().mark_dirty(); - } - - // Go backwards until we hit a branch node. - let mut node_iter = path.into_iter().rev().skip_while(|node| !node.borrow().is_binary()); - - match node_iter.next() { - Some(node) => { - let new_edge = { - // This node must be a binary node due to the iteration condition. - let binary = node.borrow().as_binary().cloned().unwrap(); - // Create an edge node to replace the old binary node - // i.e. with the remaining child (note the direction invert), - // and a path of just a single bit. - let direction = binary.direction(key).invert(); - let child = binary.get_child(direction); - let path = once(bool::from(direction)).collect::>(); - let mut edge = EdgeNode { hash: None, height: binary.height, path, child }; - - // Merge the remaining child if it's an edge. - self.merge_edges(&mut edge); - - edge - }; - // Replace the old binary node with the new edge node. - node.swap(&RefCell::new(Node::Edge(new_edge))); - } - None => { - // We reached the root without a hitting binary node. The new tree - // must therefore be empty. - self.root = Rc::new(RefCell::new(Node::Unresolved(Felt252Wrapper::ZERO))); - return; - } - }; - - // Check the parent of the new edge. If it is also an edge, then they must - // merge. - if let Some(node) = node_iter.next() { - if let Node::Edge(edge) = &mut *node.borrow_mut() { - self.merge_edges(edge); - } - } - } - - /// Returns the value stored at key, or `None` if it does not exist. - /// - /// # Arguments - /// - /// * `key` - The key of the value to get. - /// - /// # Returns - /// - /// The value of the key. - #[allow(unused)] - pub fn get(&self, key: &BitSlice) -> Option { - self.traverse(key).last().and_then(|node| match &*node.borrow() { - Node::Leaf(value) if !value.eq(&Felt252Wrapper::ZERO) => Some(*value), - _ => None, - }) - } - - /// Generates a merkle-proof for a given `key`. - /// - /// Returns vector of [`ProofNode`] which form a chain from the root to the - /// key, if it exists, or down to the node which proves that the key - /// does not exist. - /// - /// The nodes are returned in order, root first. - /// - /// Verification is performed by confirming that: - /// 1. the chain follows the path of `key`, and - /// 2. the hashes are correct, and - /// 3. the root hash matches the known root - /// - /// # Arguments - /// - /// * `key` - The key to get the merkle proof of. - /// - /// # Returns - /// - /// The merkle proof and all the child nodes hashes. - #[allow(unused)] - pub fn get_proof(&self, key: &BitSlice) -> Vec { - let mut nodes = self.traverse(key); - - // Return an empty list if tree is empty. - let node = match nodes.last() { - Some(node) => node, - None => return Vec::new(), - }; - - // A leaf node is redundant data as the information for it is already contained - // in the previous node. - if matches!(&*node.borrow(), Node::Leaf(_)) { - nodes.pop(); - } - - nodes - .iter() - .map(|node| match &*node.borrow() { - Node::Binary(bin) => ProofNode::from(bin), - Node::Edge(edge) => ProofNode::from(edge), - _ => unreachable!(), - }) - .collect() - } - - /// Traverses from the current root towards the destination - /// [Leaf](Node::Leaf) node. Returns the list of nodes along the path. - /// - /// If the destination node exists, it will be the final node in the list. - /// - /// This means that the final node will always be either a the destination - /// [Leaf](Node::Leaf) node, or an [Edge](Node::Edge) node who's path - /// suffix does not match the leaf's path. - /// - /// The final node can __not__ be a [Binary](Node::Binary) node since it - /// would always be possible to continue on towards the destination. Nor - /// can it be an [Unresolved](Node::Unresolved) node since this would be - /// resolved to check if we can travel further. - /// - /// # Arguments - /// - /// * `dst` - The node to get to. - /// - /// # Returns - /// - /// The list of nodes along the path. - fn traverse(&self, dst: &BitSlice) -> Vec>> { - if self.root.borrow().is_empty() { - return Vec::new(); - } - - let mut current = self.root.clone(); - #[allow(unused_variables)] - let mut height = 0; - let mut nodes = Vec::new(); - loop { - use Node::*; - - let current_tmp = current.borrow().clone(); - - let next = match current_tmp { - Unresolved(_hash) => panic!("Resolve is useless"), - Binary(binary) => { - nodes.push(current.clone()); - let next = binary.direction(dst); - let next = binary.get_child(next); - height += 1; - next - } - Edge(edge) if edge.path_matches(dst) => { - nodes.push(current.clone()); - height += edge.path.len(); - edge.child.clone() - } - Leaf(_) | Edge(_) => { - nodes.push(current); - return nodes; - } - }; - - current = next; - } - } - - /// This is a convenience function which merges the edge node with its child - /// __iff__ it is also an edge. - /// - /// Does nothing if the child is not also an edge node. - /// - /// This can occur when mutating the tree (e.g. deleting a child of a binary - /// node), and is an illegal state (since edge nodes __must be__ maximal - /// subtrees). - /// - /// # Arguments - /// - /// * `parent` - The parent node to merge the child with. - fn merge_edges(&self, parent: &mut EdgeNode) { - let resolved_child = match &*parent.child.borrow() { - Node::Unresolved(_hash) => panic!("Resolve is useless"), - other => other.clone(), - }; - - if let Some(child_edge) = resolved_child.as_edge().cloned() { - parent.path.extend_from_bitslice(&child_edge.path); - parent.child = child_edge.child; - } - } -} diff --git a/crates/primitives/commitments/src/tests.rs b/crates/primitives/commitments/src/tests.rs deleted file mode 100644 index 0b715aa63f..0000000000 --- a/crates/primitives/commitments/src/tests.rs +++ /dev/null @@ -1,119 +0,0 @@ -use mp_felt::Felt252Wrapper; -use mp_hashers::pedersen::PedersenHasher; -use mp_hashers::HasherT; -use starknet_api::stdlib::collections::HashMap; -use starknet_core::crypto::compute_hash_on_elements; -use starknet_crypto::FieldElement; - -use super::merkle_patricia_tree::merkle_node::{BinaryNode, Direction, Node, NodeId}; - -pub const PEDERSEN_ZERO_HASH: &str = "0x49EE3EBA8C1600700EE1B87EB599F16716B0B1022947733551FDE4050CA6804"; - -#[derive(Default)] -struct TestHasher; - -impl HasherT for TestHasher { - fn hash_bytes(_data: &[u8]) -> Felt252Wrapper { - unimplemented!() - } - - fn compute_hash_on_wrappers(_data: &[Felt252Wrapper]) -> Felt252Wrapper { - unimplemented!() - } - - fn hash_elements(a: FieldElement, b: FieldElement) -> FieldElement { - a + b - } - - fn compute_hash_on_elements(elements: &[FieldElement]) -> FieldElement { - if elements.is_empty() { - FieldElement::ZERO - } else { - let hash = elements.iter().fold(FieldElement::ZERO, |a, b| a + *b); - hash - } - } -} - -#[test] -fn test_binary_node_functions() { - let mut nodes: HashMap = HashMap::new(); - nodes.insert(NodeId(0), Node::Leaf(Felt252Wrapper::from(2_u32))); - nodes.insert(NodeId(1), Node::Leaf(Felt252Wrapper::from(3_u32))); - - let binary_node = - BinaryNode { hash: Some(Felt252Wrapper::from(1_u32)), height: 0, left: NodeId(0), right: NodeId(1) }; - - let unresolved_node = Node::Unresolved(Felt252Wrapper::from(6_u32)); - - let left_child = binary_node.get_child(Direction::Left); - let right_child = binary_node.get_child(Direction::Right); - - assert_eq!(left_child, NodeId(0)); - assert_eq!(right_child, NodeId(1)); - assert_eq!(nodes.get(&left_child).unwrap().hash(), Some(Felt252Wrapper::from(2_u32))); - assert_eq!(nodes.get(&right_child).unwrap().hash(), Some(Felt252Wrapper::from(3_u32))); - - assert_eq!(binary_node.hash, Some(Felt252Wrapper::from(1_u32))); - - assert!(!unresolved_node.is_empty()); - assert!(!unresolved_node.is_binary()); -} - -#[test] -fn test_direction_invert() { - let left = Direction::Left; - let right = Direction::Right; - - assert_eq!(left.invert(), Direction::Right); - assert_eq!(right.invert(), Direction::Left); -} - -#[test] -fn test_binary_node_calculate_hash() { - let mut nodes: HashMap = HashMap::new(); - nodes.insert(NodeId(0), Node::Leaf(Felt252Wrapper::from(2_u32))); - nodes.insert(NodeId(1), Node::Leaf(Felt252Wrapper::from(3_u32))); - - let mut binary_node = BinaryNode { hash: None, height: 0, left: NodeId(0), right: NodeId(1) }; - - binary_node.calculate_hash::(&nodes); - assert_eq!(binary_node.hash, Some(Felt252Wrapper::from(5_u32))); -} - -#[test] -fn test_binary_node_implementations() { - let mut nodes: HashMap = HashMap::new(); - nodes.insert(NodeId(0), Node::Leaf(Felt252Wrapper::from(2_u32))); - nodes.insert(NodeId(1), Node::Leaf(Felt252Wrapper::from(3_u32))); - - let test_node = BinaryNode { hash: None, height: 0, left: NodeId(0), right: NodeId(1) }; - - // Test Display trait implementation - let node_string = format!("{:?}", test_node); - assert_eq!(node_string, "BinaryNode { hash: None, height: 0, left: NodeId(0), right: NodeId(1) }"); - - // Test Debug trait implementation - let debug_string = format!("{:?}", test_node); - assert_eq!(debug_string, "BinaryNode { hash: None, height: 0, left: NodeId(0), right: NodeId(1) }"); -} - -#[test] -fn test_pedersen_hash_elements_zero() { - let elements = vec![Felt252Wrapper::ZERO, Felt252Wrapper::ONE]; - - let expected_hash = compute_hash_on_elements(&[FieldElement::ZERO, FieldElement::ONE]); - assert_eq!(PedersenHasher::compute_hash_on_wrappers(&elements), expected_hash.into()); -} - -#[test] -fn test_pedersen_hash_elements_empty() { - let elements = vec![]; - - assert_eq!( - PedersenHasher::compute_hash_on_wrappers(&elements), - Felt252Wrapper::from_hex_be(PEDERSEN_ZERO_HASH).unwrap() - ); -} - -// TODO: add tests to poseidon hasher too