From 7346894cd23bff80fa0910ed300beefc0abc4f6d Mon Sep 17 00:00:00 2001 From: Shams Asari Date: Tue, 19 Nov 2024 16:36:57 +0000 Subject: [PATCH 1/4] feat(rpc): v0.8.0 getMessagesStatus method --- CHANGELOG.md | 1 + Cargo.lock | 2 + crates/client/db/Cargo.toml | 1 + crates/client/db/src/l1_db.rs | 35 ++- crates/client/db/src/lib.rs | 8 + crates/client/eth/src/l1_messaging.rs | 212 +++++++++--------- crates/client/mempool/src/inner.rs | 14 +- crates/client/mempool/src/lib.rs | 5 +- crates/client/rpc/Cargo.toml | 1 + crates/client/rpc/src/RPC.md | 2 +- crates/client/rpc/src/versions/v0_8_0/api.rs | 8 + .../methods/read/get_messages_status.rs | 43 ++++ .../src/versions/v0_8_0/methods/read/lib.rs | 11 +- .../src/versions/v0_8_0/methods/read/mod.rs | 1 + crates/node/src/cli/analytics.rs | 2 +- 15 files changed, 218 insertions(+), 128 deletions(-) create mode 100644 crates/client/rpc/src/versions/v0_8_0/methods/read/get_messages_status.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index 800fa137e..18140441e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ## Next release +- feat(rpc): added `getMessagesStatus` method - fix: FeePayment conversion - fix(block_production): get l2-to-l1 messages recursively from the call tree - refactor: replace starknet-rs BlockId with types-rs BlockId and remove redundant mp_block::BlockId diff --git a/Cargo.lock b/Cargo.lock index 19fefba59..94fa1267c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5521,6 +5521,7 @@ dependencies = [ name = "mc-db" version = "0.7.0" dependencies = [ + "alloy", "anyhow", "bincode 1.3.3", "bonsai-trie", @@ -5789,6 +5790,7 @@ dependencies = [ name = "mc-rpc" version = "0.7.0" dependencies = [ + "alloy", "anyhow", "blockifier", "jsonrpsee", diff --git a/crates/client/db/Cargo.toml b/crates/client/db/Cargo.toml index 85ab46bc1..2d5f4a7c8 100644 --- a/crates/client/db/Cargo.toml +++ b/crates/client/db/Cargo.toml @@ -33,6 +33,7 @@ starknet-types-core = { workspace = true } starknet_api = { workspace = true } # Other +alloy = { workspace = true } anyhow.workspace = true bincode = { workspace = true } rayon = { workspace = true } diff --git a/crates/client/db/src/l1_db.rs b/crates/client/db/src/l1_db.rs index 10c235091..ffe45c57d 100644 --- a/crates/client/db/src/l1_db.rs +++ b/crates/client/db/src/l1_db.rs @@ -1,9 +1,10 @@ +use crate::error::DbError; +use crate::{Column, DatabaseExt, MadaraBackend, MadaraStorageError}; +use alloy::primitives::TxHash; use rocksdb::WriteOptions; use serde::{Deserialize, Serialize}; use starknet_api::core::Nonce; - -use crate::error::DbError; -use crate::{Column, DatabaseExt, MadaraBackend, MadaraStorageError}; +use starknet_types_core::felt::Felt; type Result = std::result::Result; @@ -128,4 +129,32 @@ impl MadaraBackend { self.db.put_cf_opt(&nonce_column, bincode::serialize(&nonce)?, /* empty value */ [], &writeopts)?; Ok(()) } + + pub fn get_l1_handler_tx_hashes(&self, l1_tx_hash: TxHash) -> Result, DbError> { + let l1_l2_mappings_column = self.db.get_column(Column::L1MessagingHandlerTxHashes); + let mut l1_handler_tx_hashes = vec![]; + for kv_bytes in self.db.prefix_iterator_cf(&l1_l2_mappings_column, l1_tx_hash.as_slice()) { + let l1_handler_tx_hash = Felt::from_bytes_be_slice(&kv_bytes?.1); + l1_handler_tx_hashes.push(l1_handler_tx_hash); + } + Ok(l1_handler_tx_hashes) + } + + /// Store mapping from L1 transaction to L1 handler transaction (on the L2). A unique order + /// value is required to ensure the handler transactions are retreived in the correct order. + pub fn add_l1_handler_tx_hash_mapping( + &self, + l1_tx_hash: TxHash, + l1_handler_tx_hash: Felt, + order: u64, + ) -> Result<(), DbError> { + let l1_l2_mappings_column = self.db.get_column(Column::L1MessagingHandlerTxHashes); + let mut key = [0u8; 40]; + key[..32].copy_from_slice(l1_tx_hash.as_slice()); + key[32..].copy_from_slice(&order.to_be_bytes()); // BE is important for the lexographic sorting + let mut writeopts = WriteOptions::default(); + writeopts.disable_wal(true); + self.db.put_cf_opt(&l1_l2_mappings_column, key, l1_handler_tx_hash.to_bytes_be(), &writeopts)?; + Ok(()) + } } diff --git a/crates/client/db/src/lib.rs b/crates/client/db/src/lib.rs index 1a592ddf7..1735c7473 100644 --- a/crates/client/db/src/lib.rs +++ b/crates/client/db/src/lib.rs @@ -1,5 +1,7 @@ //! Madara database +use alloy::primitives::private::alloy_rlp::MaxEncodedLenAssoc; +use alloy::primitives::TxHash; use anyhow::{Context, Result}; use bonsai_db::{BonsaiDb, DatabaseKeyMapping}; use bonsai_trie::id::BasicId; @@ -175,6 +177,7 @@ pub enum Column { L1Messaging, L1MessagingNonce, + L1MessagingHandlerTxHashes, /// Devnet: stores the private keys for the devnet predeployed contracts Devnet, @@ -222,6 +225,7 @@ impl Column { BonsaiClassesLog, L1Messaging, L1MessagingNonce, + L1MessagingHandlerTxHashes, PendingContractToClassHashes, PendingContractToNonces, PendingContractStorage, @@ -259,6 +263,7 @@ impl Column { ContractStorage => "contract_storage", L1Messaging => "l1_messaging", L1MessagingNonce => "l1_messaging_nonce", + L1MessagingHandlerTxHashes => "l1_messaging_handler_tx_hashes", PendingContractToClassHashes => "pending_contract_to_class_hashes", PendingContractToNonces => "pending_contract_to_nonces", PendingContractStorage => "pending_contract_storage", @@ -286,6 +291,9 @@ impl Column { contract_db::CONTRACT_NONCES_PREFIX_EXTRACTOR, )); } + Column::L1MessagingHandlerTxHashes => { + opts.set_prefix_extractor(SliceTransform::create_fixed_prefix(TxHash::LEN)); + } _ => {} } opts diff --git a/crates/client/eth/src/l1_messaging.rs b/crates/client/eth/src/l1_messaging.rs index a9e33c664..5cbb65ac8 100644 --- a/crates/client/eth/src/l1_messaging.rs +++ b/crates/client/eth/src/l1_messaging.rs @@ -1,10 +1,11 @@ +use crate::client::EthereumClient; use crate::client::StarknetCoreContract::LogMessageToL2; -use crate::client::{EthereumClient, StarknetCoreContract}; use crate::utils::u256_to_felt; use alloy::eips::BlockNumberOrTag; use alloy::primitives::{keccak256, FixedBytes, U256}; +use alloy::rpc::types::Log; use alloy::sol_types::SolValue; -use anyhow::Context; +use anyhow::{anyhow, Context}; use blockifier::transaction::transaction_execution::Transaction as BlockifierTransation; use futures::StreamExt; use mc_db::{l1_db::LastSyncedEventBlock, MadaraBackend}; @@ -55,9 +56,10 @@ pub async fn sync( return Err(e.into()); } }; - let event_filter = client.l1_core_contract.event_filter::(); - let mut event_stream = event_filter + let mut event_stream = client + .l1_core_contract + .event_filter::() .from_block(last_synced_event_block.block_number) .to_block(BlockNumberOrTag::Finalized) .watch() @@ -66,114 +68,83 @@ pub async fn sync( "Failed to watch event filter - Ensure you are using an L1 RPC endpoint that points to an archive node", )? .into_stream(); + while let Some(event_result) = channel_wait_or_graceful_shutdown(event_stream.next()).await { - if let Ok((event, meta)) = event_result { - tracing::info!( - "⟠ Processing L1 Message from block: {:?}, transaction_hash: {:?}, log_index: {:?}, fromAddress: {:?}", - meta.block_number, - meta.transaction_hash, - meta.log_index, - event.fromAddress - ); - - // Check if cancellation was initiated - let event_hash = get_l1_to_l2_msg_hash(&event)?; - tracing::info!("⟠ Checking for cancelation, event hash : {:?}", event_hash); - let cancellation_timestamp = client.get_l1_to_l2_message_cancellations(event_hash).await?; - if cancellation_timestamp != Felt::ZERO { - tracing::info!("⟠ L1 Message was cancelled in block at timestamp : {:?}", cancellation_timestamp); - let tx_nonce = Nonce(u256_to_felt(event.nonce)?); - // cancelled message nonce should be inserted to avoid reprocessing - match backend.has_l1_messaging_nonce(tx_nonce) { - Ok(false) => { - backend.set_l1_messaging_nonce(tx_nonce)?; - } - Ok(true) => {} - Err(e) => { - tracing::error!("⟠ Unexpected DB error: {:?}", e); - return Err(e.into()); - } + match event_result { + Ok((event, log)) => { + if let Err(e) = process_l1_to_l2_msg(backend, client, chain_id, &mempool, event, log).await { + tracing::error!("⟠ Unable to process L1 -> L2 messsage event: {e:?}"); }; - continue; - } - - match process_l1_message(backend, &event, &meta.block_number, &meta.log_index, chain_id, mempool.clone()) - .await - { - Ok(Some(tx_hash)) => { - tracing::info!( - "⟠ L1 Message from block: {:?}, transaction_hash: {:?}, log_index: {:?} submitted, \ - transaction hash on L2: {:?}", - meta.block_number, - meta.transaction_hash, - meta.log_index, - tx_hash - ); - } - Ok(None) => {} - Err(e) => { - tracing::error!( - "⟠ Unexpected error while processing L1 Message from block: {:?}, transaction_hash: {:?}, \ - log_index: {:?}, error: {:?}", - meta.block_number, - meta.transaction_hash, - meta.log_index, - e - ) - } } + Err(e) => tracing::error!("⟠ Unable to receive L1 -> L2 message event: {e}"), } } Ok(()) } -async fn process_l1_message( +async fn process_l1_to_l2_msg( backend: &MadaraBackend, - event: &LogMessageToL2, - l1_block_number: &Option, - event_index: &Option, + client: &EthereumClient, chain_id: &ChainId, - mempool: Arc, -) -> anyhow::Result> { - let transaction = parse_handle_l1_message_transaction(event)?; - let tx_nonce = transaction.nonce; + mempool: &Arc, + event: LogMessageToL2, + log: Log, +) -> anyhow::Result<()> { + tracing::debug!("⟠ Processing L1 -> L2 message event {event:#?}, contract address: {}, block number: {:?}, transaction index: {:?}, transaction hash: {:?}, log index: {:?}", + log.address(), log.block_number, log.transaction_index, log.transaction_hash, log.log_index + ); + let tx_nonce = Nonce(u256_to_felt(event.nonce)?); // Ensure that L1 message has not been executed - match backend.has_l1_messaging_nonce(tx_nonce) { - Ok(false) => { - backend.set_l1_messaging_nonce(tx_nonce)?; - } - Ok(true) => { - tracing::debug!("⟠ Event already processed: {:?}", transaction); - return Ok(None); - } - Err(e) => { - tracing::error!("⟠ Unexpected DB error: {:?}", e); - return Err(e.into()); - } - }; + if backend.has_l1_messaging_nonce(tx_nonce)? { + tracing::debug!("⟠ L1 -> L2 event already processed: {tx_nonce:?}"); + return Ok(()); + } else { + backend.set_l1_messaging_nonce(tx_nonce)?; + } + + // Check if cancellation was initiated + let event_hash = get_l1_to_l2_msg_hash(&event); + let cancellation_timestamp = client.get_l1_to_l2_message_cancellations(event_hash).await?; + if cancellation_timestamp != Felt::ZERO { + tracing::info!("⟠ L1 message was cancelled at timestamp {:?}", cancellation_timestamp.to_biguint()); + return Ok(()); + } + + let l1_handler_transaction = parse_handle_l1_message_transaction(&event)?; - let tx_hash = get_transaction_hash(&Transaction::L1Handler(transaction.clone()), chain_id, &transaction.version)?; + let tx_hash = get_transaction_hash( + &Transaction::L1Handler(l1_handler_transaction.clone()), + chain_id, + &l1_handler_transaction.version, + )?; let blockifier_transaction = BlockifierTransation::from_api( - Transaction::L1Handler(transaction), + Transaction::L1Handler(l1_handler_transaction.clone()), tx_hash, None, Some(Fee(event.fee.try_into()?)), None, false, )?; - let res = mempool.accept_l1_handler_tx(blockifier_transaction)?; + mempool.accept_l1_handler_tx(blockifier_transaction)?; + + let l1_tx_hash = log.transaction_hash.ok_or_else(|| anyhow!("Missing transaction hash"))?; + let block_number = log.block_number.ok_or_else(|| anyhow!("Event missing block number"))?; + let log_index = log.log_index.ok_or_else(|| anyhow!("Event missing log index"))?; + + // We use the log index for the order to ensure any L1 txs which have multiple messages are + // retrieved in the order they occured. + backend.add_l1_handler_tx_hash_mapping(l1_tx_hash, tx_hash.0, log_index)?; - // TODO: remove unwraps - // Ques: shall it panic if no block number of event_index? - let block_sent = LastSyncedEventBlock::new(l1_block_number.unwrap(), event_index.unwrap()); - backend.messaging_update_last_synced_l1_block_with_event(block_sent)?; + let last_synced_event_block = LastSyncedEventBlock::new(block_number, log_index); + backend.messaging_update_last_synced_l1_block_with_event(last_synced_event_block)?; - Ok(Some(res.transaction_hash)) + tracing::info!("⟠ L1 message processed: {:?}, transaction hash: {:?}", l1_handler_transaction, tx_hash); + Ok(()) } -pub fn parse_handle_l1_message_transaction(event: &LogMessageToL2) -> anyhow::Result { +fn parse_handle_l1_message_transaction(event: &LogMessageToL2) -> anyhow::Result { // L1 from address. let from_address = u256_to_felt(event.fromAddress.into_word().into())?; @@ -206,7 +177,7 @@ pub fn parse_handle_l1_message_transaction(event: &LogMessageToL2) -> anyhow::Re } /// Computes the message hashed with the given event data -fn get_l1_to_l2_msg_hash(event: &LogMessageToL2) -> anyhow::Result> { +fn get_l1_to_l2_msg_hash(event: &LogMessageToL2) -> FixedBytes<32> { let data = ( [0u8; 12], event.fromAddress.0 .0, @@ -216,14 +187,11 @@ fn get_l1_to_l2_msg_hash(event: &LogMessageToL2) -> anyhow::Result (handler_tx.tx, handler_tx.tx_hash.0), + Transaction::AccountTransaction(_) => panic!("Expecting L1 handler transaction"), + }; + assert_eq!(handler_tx.nonce, nonce); + assert_eq!( + handler_tx.contract_address, + ContractAddress::try_from( + Felt::from_dec_str("3256441166037631918262930812410838598500200462657642943867372734773841898370") + .unwrap() + ) + .unwrap() + ); + assert_eq!( + handler_tx.entry_point_selector, + EntryPointSelector( + Felt::from_dec_str("774397379524139446221206168840917193112228400237242521560346153613428128537") + .unwrap() + ) + ); + assert_eq!( + handler_tx.calldata.0[0], + Felt::from_dec_str("993696174272377493693496825928908586134624850969").unwrap() + ); - // TODO : Assert that the tx has been included in the mempool + // Assert the L1 -> L2 mapping is stored + let l1_handler_tx_hashes = db + .backend() + .get_l1_handler_tx_hashes( + TxHash::from_hex("4961b0fef9f7d7c46fb9095b2b97ea3dc8157fca04e4f2562d1461ac3bb03867").unwrap(), + ) + .expect("Unable to get L1 -> L2 tx hashes mapping from DB"); + assert_eq!(l1_handler_tx_hashes, vec![handler_tx_hash]); // Assert that the event is well stored in db let last_block = db.backend().messaging_last_synced_l1_block_with_event().expect("failed to retrieve block").unwrap(); assert_ne!(last_block.block_number, 0); - let nonce = Nonce(Felt::from_dec_str("10000000000000000").expect("failed to parse nonce string")); assert!(db.backend().has_l1_messaging_nonce(nonce).unwrap()); // TODO : Assert that the tx was correctly executed @@ -485,7 +480,7 @@ mod l1_messaging_tests { .unwrap() .block_number ); - assert!(logs_contain("Event already processed")); + assert!(logs_contain("L1 -> L2 event already processed")); worker_handle.abort(); } @@ -521,7 +516,7 @@ mod l1_messaging_tests { let nonce = Nonce(Felt::from_dec_str("10000000000000000").expect("failed to parse nonce string")); // cancelled message nonce should be inserted to avoid reprocessing assert!(db.backend().has_l1_messaging_nonce(nonce).unwrap()); - assert!(logs_contain("L1 Message was cancelled in block at timestamp : 0x66b4f105")); + assert!(logs_contain("L1 message was cancelled at timestamp 1723134213")); worker_handle.abort(); } @@ -547,8 +542,7 @@ mod l1_messaging_tests { ], nonce: U256::from(775628), fee: U256::ZERO, - }) - .expect("Failed to compute l1 to l2 msg hash"); + }); let expected_hash = <[u8; 32]>::from_hex("c51a543ef9563ad2545342b390b67edfcddf9886aa36846cf70382362fc5fab3").unwrap(); diff --git a/crates/client/mempool/src/inner.rs b/crates/client/mempool/src/inner.rs index 837263ec2..5ab052583 100644 --- a/crates/client/mempool/src/inner.rs +++ b/crates/client/mempool/src/inner.rs @@ -156,11 +156,11 @@ impl NonceChain { match self.transactions.entry(OrderMempoolTransactionByNonce(mempool_tx)) { btree_map::Entry::Occupied(entry) => { // duplicate nonce, either it's because the hash is duplicated or nonce conflict with another tx. - if entry.key().0.tx_hash() == mempool_tx_hash { - return Err(TxInsersionError::DuplicateTxn); + return if entry.key().0.tx_hash() == mempool_tx_hash { + Err(TxInsersionError::DuplicateTxn) } else { - return Err(TxInsersionError::NonceConflict); - } + Err(TxInsersionError::NonceConflict) + }; } btree_map::Entry::Vacant(entry) => *entry.insert(()), } @@ -169,7 +169,7 @@ impl NonceChain { }; let position = if self.front_nonce >= mempool_tx_nonce { - // We insrted at the front here + // We inserted at the front here let former_head_arrived_at = core::mem::replace(&mut self.front_arrived_at, mempool_tx_arrived_at); self.front_nonce = mempool_tx_nonce; self.front_tx_hash = mempool_tx_hash; @@ -307,7 +307,7 @@ impl MempoolInner { None }; - let is_replaced = match self.nonce_chains.entry(contract_addr) { + let replaced_state = match self.nonce_chains.entry(contract_addr) { hash_map::Entry::Occupied(mut entry) => { // Handle nonce collision. let (position, is_replaced) = match entry.get_mut().insert(mempool_tx, force) { @@ -350,7 +350,7 @@ impl MempoolInner { } }; - if is_replaced != ReplacedState::Replaced { + if replaced_state != ReplacedState::Replaced { if let Some(contract_address) = &deployed_contract_address { self.deployed_contracts.increment(*contract_address) } diff --git a/crates/client/mempool/src/lib.rs b/crates/client/mempool/src/lib.rs index 28445193e..703866132 100644 --- a/crates/client/mempool/src/lib.rs +++ b/crates/client/mempool/src/lib.rs @@ -101,10 +101,7 @@ impl Mempool { block } else { // No current pending block, we'll make an unsaved empty one for the sake of validating this tx. - let parent_block_hash = self - .backend - .get_block_hash(&BlockId::Tag(BlockTag::Latest))? - .unwrap_or(/* genesis block's parent hash */ Felt::ZERO); + let parent_block_hash = self.backend.get_block_hash(&BlockId::Tag(BlockTag::Latest))?.unwrap_or(Felt::ZERO); // Genesis block's parent hash MadaraPendingBlockInfo::new( make_pending_header(parent_block_hash, self.backend.chain_config(), self.l1_data_provider.as_ref()), vec![], diff --git a/crates/client/rpc/Cargo.toml b/crates/client/rpc/Cargo.toml index 531a7dae2..d2f0fd546 100644 --- a/crates/client/rpc/Cargo.toml +++ b/crates/client/rpc/Cargo.toml @@ -44,6 +44,7 @@ starknet-types-rpc = { workspace = true } starknet_api = { workspace = true, default-features = true } # Others +alloy = { workspace = true } anyhow = { workspace = true } jsonrpsee = { workspace = true, default-features = true, features = [ "macros", diff --git a/crates/client/rpc/src/RPC.md b/crates/client/rpc/src/RPC.md index f10d9504f..126480716 100644 --- a/crates/client/rpc/src/RPC.md +++ b/crates/client/rpc/src/RPC.md @@ -14,7 +14,7 @@ methods exist in isolation from `read` methods for example. _different versions_ of the same RPC method. This is mostly present for ease of development of new RPC versions, but also serves to assure a level of backwards compatibility. To select a specific version of an rpc method, you will need to -append `/rcp/v{version}` to the rpc url you are connecting to. +append `/rpc/v{version}` to the rpc url you are connecting to. **RPC versions are grouped under the `Starknet` struct**. This serves as a common point of implementation for all RPC methods across all versions, and is diff --git a/crates/client/rpc/src/versions/v0_8_0/api.rs b/crates/client/rpc/src/versions/v0_8_0/api.rs index 40f87b90a..c424cab96 100644 --- a/crates/client/rpc/src/versions/v0_8_0/api.rs +++ b/crates/client/rpc/src/versions/v0_8_0/api.rs @@ -1,3 +1,5 @@ +use crate::versions::v0_8_0::methods::read::get_messages_status::MessageStatus; +use alloy::primitives::TxHash; use jsonrpsee::core::RpcResult; use m_proc_macros::versioned_rpc; use mp_block::BlockId; @@ -18,4 +20,10 @@ pub trait StarknetReadRpcApi { #[method(name = "getCompiledCasm")] fn get_compiled_casm(&self, class_hash: Felt) -> RpcResult; + + /// For the given L1 transaction hash, return the associated L1 handler transaction hashes and + /// statuses for all L1 -> L2 messsages sent by the L1 transaction, ordered by the L1 + /// transaction sending order. + #[method(name = "getMessagesStatus")] + fn get_messages_status(&self, transaction_hash: TxHash) -> RpcResult>; } diff --git a/crates/client/rpc/src/versions/v0_8_0/methods/read/get_messages_status.rs b/crates/client/rpc/src/versions/v0_8_0/methods/read/get_messages_status.rs new file mode 100644 index 000000000..8be63cca4 --- /dev/null +++ b/crates/client/rpc/src/versions/v0_8_0/methods/read/get_messages_status.rs @@ -0,0 +1,43 @@ +use crate::utils::ResultExt; +use crate::versions::v0_7_1::methods::read::get_transaction_status::get_transaction_status; +use crate::{Starknet, StarknetRpcApiError, StarknetRpcResult}; +use alloy::primitives::TxHash; +use jsonrpsee::core::Serialize; +use serde::Deserialize; +use starknet_core::types::SequencerTransactionStatus; +use starknet_types_core::felt::Felt; + +pub fn get_messages_status(starknet: &Starknet, transaction_hash: TxHash) -> StarknetRpcResult> { + let l1_handler_tx_hashes = starknet + .backend + .get_l1_handler_tx_hashes(transaction_hash) + .or_internal_server_error("Retrieving L1 handler transactions from database")?; + if l1_handler_tx_hashes.is_empty() { + return Err(StarknetRpcApiError::TxnHashNotFound); + } + let mut message_statuses = vec![]; + for l1_handler_tx_hash in l1_handler_tx_hashes { + let finality_status = match get_transaction_status(starknet, l1_handler_tx_hash) { + Ok(tx_status) => tx_status.finality_status(), + Err(StarknetRpcApiError::TxnHashNotFound) => { + tracing::error!("L1 handler tx {l1_handler_tx_hash:?} for L1 tx {transaction_hash:?} not found"); + return Err(StarknetRpcApiError::InternalServerError); + } + Err(e) => return Err(e), + }; + message_statuses.push(MessageStatus { + transaction_hash: l1_handler_tx_hash, + finality_status, + // TODO Update this once get_transaction_status supports rejections + failure_reason: None, + }) + } + Ok(message_statuses) +} + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct MessageStatus { + pub transaction_hash: Felt, + pub finality_status: SequencerTransactionStatus, + pub failure_reason: Option, +} diff --git a/crates/client/rpc/src/versions/v0_8_0/methods/read/lib.rs b/crates/client/rpc/src/versions/v0_8_0/methods/read/lib.rs index dc3ca5966..d7c043517 100644 --- a/crates/client/rpc/src/versions/v0_8_0/methods/read/lib.rs +++ b/crates/client/rpc/src/versions/v0_8_0/methods/read/lib.rs @@ -1,12 +1,13 @@ +use crate::versions::v0_8_0::methods::read::get_messages_status::{get_messages_status, MessageStatus}; +use crate::versions::v0_8_0::StarknetReadRpcApiV0_8_0Server; +use crate::Starknet; +use alloy::primitives::TxHash; use jsonrpsee::core::{async_trait, RpcResult}; use mp_chain_config::RpcVersion; use starknet_types_core::felt::Felt; use super::get_compiled_casm::*; -use crate::versions::v0_8_0::StarknetReadRpcApiV0_8_0Server; -use crate::Starknet; - #[async_trait] impl StarknetReadRpcApiV0_8_0Server for Starknet { fn spec_version(&self) -> RpcResult { @@ -16,4 +17,8 @@ impl StarknetReadRpcApiV0_8_0Server for Starknet { fn get_compiled_casm(&self, class_hash: Felt) -> RpcResult { Ok(get_compiled_casm(self, class_hash)?) } + + fn get_messages_status(&self, transaction_hash: TxHash) -> RpcResult> { + Ok(get_messages_status(self, transaction_hash)?) + } } diff --git a/crates/client/rpc/src/versions/v0_8_0/methods/read/mod.rs b/crates/client/rpc/src/versions/v0_8_0/methods/read/mod.rs index ca920b2ba..4c94236c6 100644 --- a/crates/client/rpc/src/versions/v0_8_0/methods/read/mod.rs +++ b/crates/client/rpc/src/versions/v0_8_0/methods/read/mod.rs @@ -1,2 +1,3 @@ pub mod get_compiled_casm; +pub mod get_messages_status; pub mod lib; diff --git a/crates/node/src/cli/analytics.rs b/crates/node/src/cli/analytics.rs index 1f3ee7220..73be9e431 100644 --- a/crates/node/src/cli/analytics.rs +++ b/crates/node/src/cli/analytics.rs @@ -15,6 +15,6 @@ pub struct AnalyticsParams { pub analytics_log_level: Level, /// Endpoint of the analytics server. - #[arg(env = "OTEL_EXPORTER_OTLP_ENDPOINT", long, value_parser = parse_url, default_value = None)] + #[arg(env = "MADARA_ANALYTICS_COLLECTION_ENDPOINT", long, value_parser = parse_url, default_value = None)] pub analytics_collection_endpoint: Option, } From bbed31d4b4c6af1b793529fa6f4d03c4e3f4a0c5 Mon Sep 17 00:00:00 2001 From: Shams Asari Date: Wed, 20 Nov 2024 13:45:02 +0000 Subject: [PATCH 2/4] Merge fix --- crates/client/db/src/lib.rs | 2 -- crates/client/db/src/rocksdb_options.rs | 5 +++++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/client/db/src/lib.rs b/crates/client/db/src/lib.rs index 64eda0160..c1b67015b 100644 --- a/crates/client/db/src/lib.rs +++ b/crates/client/db/src/lib.rs @@ -1,7 +1,5 @@ //! Madara database -use alloy::primitives::private::alloy_rlp::MaxEncodedLenAssoc; -use alloy::primitives::TxHash; use anyhow::{Context, Result}; use bonsai_db::{BonsaiDb, DatabaseKeyMapping}; use bonsai_trie::id::BasicId; diff --git a/crates/client/db/src/rocksdb_options.rs b/crates/client/db/src/rocksdb_options.rs index 37aebb516..2fdd7248d 100644 --- a/crates/client/db/src/rocksdb_options.rs +++ b/crates/client/db/src/rocksdb_options.rs @@ -2,6 +2,8 @@ #![allow(non_upper_case_globals)] // allow KiB/MiB/GiB names use crate::{contract_db, Column}; +use alloy::primitives::private::alloy_rlp::MaxEncodedLenAssoc; +use alloy::primitives::TxHash; use anyhow::{Context, Result}; use rocksdb::{DBCompressionType, Env, Options, SliceTransform}; @@ -56,6 +58,9 @@ impl Column { contract_db::CONTRACT_NONCES_PREFIX_EXTRACTOR, )); } + Column::L1MessagingHandlerTxHashes => { + options.set_prefix_extractor(SliceTransform::create_fixed_prefix(TxHash::LEN)); + } _ => {} } From 09a5a54d4ea4e40eda4d6e27e55b44d772f179d9 Mon Sep 17 00:00:00 2001 From: Shams Asari Date: Thu, 21 Nov 2024 10:49:01 +0000 Subject: [PATCH 3/4] Merge fix --- crates/client/rpc/src/versions/user/v0_8_0/api.rs | 2 +- .../user/v0_8_0/methods/read/get_messages_status.rs | 2 +- .../rpc/src/versions/user/v0_8_0/methods/read/lib.rs | 7 ++----- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/crates/client/rpc/src/versions/user/v0_8_0/api.rs b/crates/client/rpc/src/versions/user/v0_8_0/api.rs index c424cab96..579d08d9f 100644 --- a/crates/client/rpc/src/versions/user/v0_8_0/api.rs +++ b/crates/client/rpc/src/versions/user/v0_8_0/api.rs @@ -1,4 +1,4 @@ -use crate::versions::v0_8_0::methods::read::get_messages_status::MessageStatus; +use crate::versions::user::v0_8_0::methods::read::get_messages_status::MessageStatus; use alloy::primitives::TxHash; use jsonrpsee::core::RpcResult; use m_proc_macros::versioned_rpc; diff --git a/crates/client/rpc/src/versions/user/v0_8_0/methods/read/get_messages_status.rs b/crates/client/rpc/src/versions/user/v0_8_0/methods/read/get_messages_status.rs index 8be63cca4..5e44a8a74 100644 --- a/crates/client/rpc/src/versions/user/v0_8_0/methods/read/get_messages_status.rs +++ b/crates/client/rpc/src/versions/user/v0_8_0/methods/read/get_messages_status.rs @@ -1,5 +1,5 @@ use crate::utils::ResultExt; -use crate::versions::v0_7_1::methods::read::get_transaction_status::get_transaction_status; +use crate::versions::user::v0_7_1::methods::read::get_transaction_status::get_transaction_status; use crate::{Starknet, StarknetRpcApiError, StarknetRpcResult}; use alloy::primitives::TxHash; use jsonrpsee::core::Serialize; diff --git a/crates/client/rpc/src/versions/user/v0_8_0/methods/read/lib.rs b/crates/client/rpc/src/versions/user/v0_8_0/methods/read/lib.rs index 08bc93d0f..a272ce7e3 100644 --- a/crates/client/rpc/src/versions/user/v0_8_0/methods/read/lib.rs +++ b/crates/client/rpc/src/versions/user/v0_8_0/methods/read/lib.rs @@ -1,5 +1,5 @@ -use crate::versions::v0_8_0::methods::read::get_messages_status::{get_messages_status, MessageStatus}; -use crate::versions::v0_8_0::StarknetReadRpcApiV0_8_0Server; +use crate::versions::user::v0_8_0::methods::read::get_messages_status::{get_messages_status, MessageStatus}; +use crate::versions::user::v0_8_0::StarknetReadRpcApiV0_8_0Server; use crate::Starknet; use alloy::primitives::TxHash; use jsonrpsee::core::{async_trait, RpcResult}; @@ -8,9 +8,6 @@ use starknet_types_core::felt::Felt; use super::get_compiled_casm::*; -use crate::versions::user::v0_8_0::StarknetReadRpcApiV0_8_0Server; -use crate::Starknet; - #[async_trait] impl StarknetReadRpcApiV0_8_0Server for Starknet { fn spec_version(&self) -> RpcResult { From 492218865dec7118e481134135b7a05205549978 Mon Sep 17 00:00:00 2001 From: Shams Asari Date: Mon, 25 Nov 2024 11:16:07 +0000 Subject: [PATCH 4/4] Address review comments --- crates/client/db/src/l1_db.rs | 10 ++--- crates/client/eth/src/l1_messaging.rs | 8 ++-- .../methods/read/get_messages_status.rs | 38 ++++++++++--------- 3 files changed, 29 insertions(+), 27 deletions(-) diff --git a/crates/client/db/src/l1_db.rs b/crates/client/db/src/l1_db.rs index ffe45c57d..73efca0f6 100644 --- a/crates/client/db/src/l1_db.rs +++ b/crates/client/db/src/l1_db.rs @@ -132,11 +132,11 @@ impl MadaraBackend { pub fn get_l1_handler_tx_hashes(&self, l1_tx_hash: TxHash) -> Result, DbError> { let l1_l2_mappings_column = self.db.get_column(Column::L1MessagingHandlerTxHashes); - let mut l1_handler_tx_hashes = vec![]; - for kv_bytes in self.db.prefix_iterator_cf(&l1_l2_mappings_column, l1_tx_hash.as_slice()) { - let l1_handler_tx_hash = Felt::from_bytes_be_slice(&kv_bytes?.1); - l1_handler_tx_hashes.push(l1_handler_tx_hash); - } + let l1_handler_tx_hashes = self + .db + .prefix_iterator_cf(&l1_l2_mappings_column, l1_tx_hash.as_slice()) + .map(|kv_bytes| Ok(Felt::from_bytes_be_slice(&kv_bytes?.1))) + .collect::>()?; Ok(l1_handler_tx_hashes) } diff --git a/crates/client/eth/src/l1_messaging.rs b/crates/client/eth/src/l1_messaging.rs index a47a07038..3d487c414 100644 --- a/crates/client/eth/src/l1_messaging.rs +++ b/crates/client/eth/src/l1_messaging.rs @@ -5,7 +5,7 @@ use alloy::eips::BlockNumberOrTag; use alloy::primitives::{keccak256, FixedBytes, U256}; use alloy::rpc::types::Log; use alloy::sol_types::SolValue; -use anyhow::{anyhow, Context}; +use anyhow::Context; use blockifier::transaction::transaction_execution::Transaction as BlockifierTransation; use futures::StreamExt; use mc_db::{l1_db::LastSyncedEventBlock, MadaraBackend}; @@ -130,9 +130,9 @@ async fn process_l1_to_l2_msg( )?; mempool.accept_l1_handler_tx(blockifier_transaction)?; - let l1_tx_hash = log.transaction_hash.ok_or_else(|| anyhow!("Missing transaction hash"))?; - let block_number = log.block_number.ok_or_else(|| anyhow!("Event missing block number"))?; - let log_index = log.log_index.ok_or_else(|| anyhow!("Event missing log index"))?; + let l1_tx_hash = log.transaction_hash.context("Missing transaction hash")?; + let block_number = log.block_number.context("Event missing block number")?; + let log_index = log.log_index.context("Event missing log index")?; // We use the log index for the order to ensure any L1 txs which have multiple messages are // retrieved in the order they occured. diff --git a/crates/client/rpc/src/versions/user/v0_8_0/methods/read/get_messages_status.rs b/crates/client/rpc/src/versions/user/v0_8_0/methods/read/get_messages_status.rs index 5e44a8a74..e4a8ec273 100644 --- a/crates/client/rpc/src/versions/user/v0_8_0/methods/read/get_messages_status.rs +++ b/crates/client/rpc/src/versions/user/v0_8_0/methods/read/get_messages_status.rs @@ -15,24 +15,26 @@ pub fn get_messages_status(starknet: &Starknet, transaction_hash: TxHash) -> Sta if l1_handler_tx_hashes.is_empty() { return Err(StarknetRpcApiError::TxnHashNotFound); } - let mut message_statuses = vec![]; - for l1_handler_tx_hash in l1_handler_tx_hashes { - let finality_status = match get_transaction_status(starknet, l1_handler_tx_hash) { - Ok(tx_status) => tx_status.finality_status(), - Err(StarknetRpcApiError::TxnHashNotFound) => { - tracing::error!("L1 handler tx {l1_handler_tx_hash:?} for L1 tx {transaction_hash:?} not found"); - return Err(StarknetRpcApiError::InternalServerError); - } - Err(e) => return Err(e), - }; - message_statuses.push(MessageStatus { - transaction_hash: l1_handler_tx_hash, - finality_status, - // TODO Update this once get_transaction_status supports rejections - failure_reason: None, - }) - } - Ok(message_statuses) + l1_handler_tx_hashes.iter().try_fold( + Vec::with_capacity(l1_handler_tx_hashes.len()), + |mut acc, l1_handler_tx_hash| { + let finality_status = match get_transaction_status(starknet, *l1_handler_tx_hash) { + Ok(tx_status) => tx_status.finality_status(), + Err(StarknetRpcApiError::TxnHashNotFound) => { + tracing::error!("L1 handler tx {l1_handler_tx_hash:?} for L1 tx {transaction_hash:?} not found"); + return Err(StarknetRpcApiError::InternalServerError); + } + Err(e) => return Err(e), + }; + acc.push(MessageStatus { + transaction_hash: *l1_handler_tx_hash, + finality_status, + // TODO Update this once get_transaction_status supports rejections + failure_reason: None, + }); + Ok(acc) + }, + ) } #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]