diff --git a/Cargo.lock b/Cargo.lock index 41f1d2707b060..386bb8573bbda 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4347,7 +4347,6 @@ dependencies = [ "aptos-crypto", "aptos-crypto-derive", "aptos-dkg", - "aptos-experimental-runtimes", "aptos-infallible", "aptos-proptest-helpers", "arbitrary", @@ -4359,7 +4358,6 @@ dependencies = [ "ark-relations", "ark-serialize", "ark-std", - "arr_macro", "async-trait", "base64 0.13.1", "bcs 0.1.4", @@ -11789,8 +11787,11 @@ dependencies = [ "move-bytecode-utils", "move-command-line-common", "move-compiler", + "move-compiler-v2", "move-core-types", "move-ir-types", + "move-model", + "move-package", "move-resource-viewer", "move-stdlib", "move-symbol-pool", diff --git a/api/src/context.rs b/api/src/context.rs index 1a77f5d5ee1f4..e1c6095d19307 100644 --- a/api/src/context.rs +++ b/api/src/context.rs @@ -22,7 +22,9 @@ use aptos_gas_schedule::{AptosGasParameters, FromOnChainGasSchedule}; use aptos_logger::{error, info, Schema}; use aptos_mempool::{MempoolClientRequest, MempoolClientSender, SubmissionStatus}; use aptos_storage_interface::{ - state_view::{DbStateView, DbStateViewAtVersion, LatestDbStateCheckpointView}, + state_store::state_view::db_state_view::{ + DbStateView, DbStateViewAtVersion, LatestDbStateCheckpointView, + }, AptosDbError, DbReader, Order, MAX_REQUEST_LIMIT, }; use aptos_types::{ diff --git a/api/test-context/src/test_context.rs b/api/test-context/src/test_context.rs index e8f36acdf997f..99ff148314c0c 100644 --- a/api/test-context/src/test_context.rs +++ b/api/test-context/src/test_context.rs @@ -31,7 +31,9 @@ use aptos_sdk::{ transaction::SignedTransaction, AccountKey, LocalAccount, }, }; -use aptos_storage_interface::{state_view::DbStateView, DbReaderWriter}; +use aptos_storage_interface::{ + state_store::state_view::db_state_view::DbStateView, DbReaderWriter, +}; use aptos_temppath::TempPath; use aptos_types::{ account_address::{create_multisig_account_address, AccountAddress}, diff --git a/aptos-move/aptos-e2e-comparison-testing/src/data_state_view.rs b/aptos-move/aptos-e2e-comparison-testing/src/data_state_view.rs index be1938618686f..47a5a2de72b93 100644 --- a/aptos-move/aptos-e2e-comparison-testing/src/data_state_view.rs +++ b/aptos-move/aptos-e2e-comparison-testing/src/data_state_view.rs @@ -5,7 +5,7 @@ use aptos_language_e2e_tests::data_store::FakeDataStore; use aptos_types::{ state_store::{ state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, - Result as StateViewResult, TStateView, + StateViewResult, TStateView, }, transaction::Version, }; diff --git a/aptos-move/aptos-gas-schedule/src/gas_schedule/transaction.rs b/aptos-move/aptos-gas-schedule/src/gas_schedule/transaction.rs index 5c51fa9ee314b..62a09562f1bdf 100644 --- a/aptos-move/aptos-gas-schedule/src/gas_schedule/transaction.rs +++ b/aptos-move/aptos-gas-schedule/src/gas_schedule/transaction.rs @@ -212,7 +212,7 @@ crate::gas_schedule::macros::define_gas_parameters!( [ max_execution_gas_gov: InternalGas, { RELEASE_V1_13.. => "max_execution_gas.gov" }, - 4_000_000_000, + 4_300_000_000, ], [ max_io_gas: InternalGas, diff --git a/aptos-move/aptos-release-builder/src/simulate.rs b/aptos-move/aptos-release-builder/src/simulate.rs index 76456c74801c9..a20a8699ca260 100644 --- a/aptos-move/aptos-release-builder/src/simulate.rs +++ b/aptos-move/aptos-release-builder/src/simulate.rs @@ -36,9 +36,8 @@ use aptos_types::{ account_config::ChainIdResource, on_chain_config::{ApprovedExecutionHashes, Features, GasScheduleV2, OnChainConfig}, state_store::{ - in_memory_state_view::InMemoryStateView, state_key::StateKey, - state_storage_usage::StateStorageUsage, state_value::StateValue, - Result as StateStoreResult, StateView, TStateView, + state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, + StateView, StateViewResult as StateStoreResult, TStateView, }, transaction::{ExecutionStatus, Script, TransactionArgument, TransactionStatus}, write_set::{TransactionWrite, WriteSet}, @@ -306,10 +305,6 @@ where fn get_usage(&self) -> StateStoreResult { Ok(StateStorageUsage::Untracked) } - - fn as_in_memory_state_view(&self) -> InMemoryStateView { - panic!("not supported") - } } /*************************************************************************************************** diff --git a/aptos-move/aptos-validator-interface/src/lib.rs b/aptos-move/aptos-validator-interface/src/lib.rs index 9414fb2298476..ba700ab1a4c36 100644 --- a/aptos-move/aptos-validator-interface/src/lib.rs +++ b/aptos-move/aptos-validator-interface/src/lib.rs @@ -12,7 +12,7 @@ use aptos_types::{ account_address::AccountAddress, state_store::{ state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, - Result as StateViewResult, StateViewId, TStateView, + StateViewId, StateViewResult, TStateView, }, transaction::{Transaction, TransactionInfo, Version}, }; diff --git a/aptos-move/aptos-vm-environment/src/environment.rs b/aptos-move/aptos-vm-environment/src/environment.rs index debdd654c92ac..0d3474ab64e96 100644 --- a/aptos-move/aptos-vm-environment/src/environment.rs +++ b/aptos-move/aptos-vm-environment/src/environment.rs @@ -181,12 +181,12 @@ impl Environment { // If no chain ID is in storage, we assume we are in a testing environment. let chain_id = fetch_config_and_update_hash::(&mut sha3_256, state_view) .unwrap_or_else(ChainId::test); - let timestamp = + let timestamp_micros = fetch_config_and_update_hash::(&mut sha3_256, state_view) - .map(|config| config.last_reconfiguration_time()) + .map(|config| config.last_reconfiguration_time_micros()) .unwrap_or(0); - let mut timed_features_builder = TimedFeaturesBuilder::new(chain_id, timestamp); + let mut timed_features_builder = TimedFeaturesBuilder::new(chain_id, timestamp_micros); if let Some(profile) = get_timed_feature_override() { // We need to ensure the override is taken into account for the hash. let profile_bytes = bcs::to_bytes(&profile) diff --git a/aptos-move/aptos-vm-types/src/resolver.rs b/aptos-move/aptos-vm-types/src/resolver.rs index e71673588eace..99e72df4c8140 100644 --- a/aptos-move/aptos-vm-types/src/resolver.rs +++ b/aptos-move/aptos-vm-types/src/resolver.rs @@ -5,7 +5,7 @@ use aptos_aggregator::resolver::{TAggregatorV1View, TDelayedFieldView}; use aptos_types::{ serde_helper::bcs_utils::size_u32_as_uleb128, state_store::{ - errors::StateviewError, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::{StateValue, StateValueMetadata}, @@ -181,9 +181,9 @@ pub trait StateStorageView { fn id(&self) -> StateViewId; /// Reads the state value from the DB. Used to enforce read-before-write for module writes. - fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateviewError>; + fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateViewError>; - fn get_usage(&self) -> Result; + fn get_usage(&self) -> Result; } /// A fine-grained view of the state during execution. @@ -289,12 +289,12 @@ where self.id() } - fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateviewError> { + fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateViewError> { self.get_state_value(state_key)?; Ok(()) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> Result { self.get_usage().map_err(Into::into) } } diff --git a/aptos-move/aptos-vm-types/src/resource_group_adapter.rs b/aptos-move/aptos-vm-types/src/resource_group_adapter.rs index 87293388dca28..2c2109d4b2fc5 100644 --- a/aptos-move/aptos-vm-types/src/resource_group_adapter.rs +++ b/aptos-move/aptos-vm-types/src/resource_group_adapter.rs @@ -377,7 +377,7 @@ mod tests { use super::*; use crate::tests::utils::{mock_tag_0, mock_tag_1, mock_tag_2}; use aptos_types::state_store::{ - errors::StateviewError, state_storage_usage::StateStorageUsage, state_value::StateValue, + errors::StateViewError, state_storage_usage::StateStorageUsage, state_value::StateValue, TStateView, }; use claims::{assert_gt, assert_none, assert_ok_eq, assert_some, assert_some_eq}; @@ -443,14 +443,14 @@ mod tests { fn get_state_value( &self, state_key: &Self::Key, - ) -> Result, StateviewError> { + ) -> Result, StateViewError> { Ok(self .group .get(state_key) .map(|entry| StateValue::new_legacy(entry.blob.clone().into()))) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> Result { unimplemented!(); } } diff --git a/aptos-move/aptos-vm/src/data_cache.rs b/aptos-move/aptos-vm/src/data_cache.rs index d2ae08b06194d..11da1abc1a9ee 100644 --- a/aptos-move/aptos-vm/src/data_cache.rs +++ b/aptos-move/aptos-vm/src/data_cache.rs @@ -17,7 +17,7 @@ use aptos_types::{ error::{PanicError, PanicOr}, on_chain_config::{ConfigStorage, Features, OnChainConfig}, state_store::{ - errors::StateviewError, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::{StateValue, StateValueMetadata}, @@ -318,11 +318,11 @@ impl<'e, E: ExecutorView> StateStorageView for StorageAdapter<'e, E> { self.executor_view.id() } - fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateviewError> { + fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateViewError> { self.executor_view.read_state_value(state_key) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> Result { self.executor_view.get_usage() } } diff --git a/aptos-move/aptos-vm/src/move_vm_ext/session/view_with_change_set.rs b/aptos-move/aptos-vm/src/move_vm_ext/session/view_with_change_set.rs index e1ccb6f506d91..a6309ce35af70 100644 --- a/aptos-move/aptos-vm/src/move_vm_ext/session/view_with_change_set.rs +++ b/aptos-move/aptos-vm/src/move_vm_ext/session/view_with_change_set.rs @@ -11,7 +11,7 @@ use aptos_aggregator::{ use aptos_types::{ error::{code_invariant_error, expect_ok, PanicError, PanicOr}, state_store::{ - errors::StateviewError, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::{StateValue, StateValueMetadata}, @@ -324,12 +324,12 @@ impl<'r> StateStorageView for ExecutorViewWithChangeSet<'r> { self.base_executor_view.id() } - fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateviewError> { + fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateViewError> { self.base_executor_view.read_state_value(state_key) } - fn get_usage(&self) -> Result { - Err(StateviewError::Other( + fn get_usage(&self) -> Result { + Err(StateViewError::Other( "Unexpected access to get_usage()".to_string(), )) } diff --git a/aptos-move/aptos-vm/src/sharded_block_executor/aggr_overridden_state_view.rs b/aptos-move/aptos-vm/src/sharded_block_executor/aggr_overridden_state_view.rs index 7872c1495d51b..6676421d4294c 100644 --- a/aptos-move/aptos-vm/src/sharded_block_executor/aggr_overridden_state_view.rs +++ b/aptos-move/aptos-vm/src/sharded_block_executor/aggr_overridden_state_view.rs @@ -3,13 +3,13 @@ use aptos_types::{ state_store::{ - errors::StateviewError, state_key::StateKey, state_storage_usage::StateStorageUsage, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, StateView, TStateView, }, write_set::TOTAL_SUPPLY_STATE_KEY, }; -type Result = std::result::Result; +type Result = std::result::Result; pub const TOTAL_SUPPLY_AGGR_BASE_VAL: u128 = u128::MAX >> 1; #[derive(Clone)] diff --git a/aptos-move/aptos-vm/src/sharded_block_executor/cross_shard_state_view.rs b/aptos-move/aptos-vm/src/sharded_block_executor/cross_shard_state_view.rs index dfc7e6342641a..2381d6ea63057 100644 --- a/aptos-move/aptos-vm/src/sharded_block_executor/cross_shard_state_view.rs +++ b/aptos-move/aptos-vm/src/sharded_block_executor/cross_shard_state_view.rs @@ -6,7 +6,7 @@ use aptos_logger::trace; use aptos_types::{ block_executor::partitioner::TransactionWithDependencies, state_store::{ - errors::StateviewError, state_key::StateKey, state_storage_usage::StateStorageUsage, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, StateView, TStateView, }, transaction::analyzed_transaction::AnalyzedTransaction, @@ -74,14 +74,14 @@ impl<'a, S: StateView + Sync + Send> CrossShardStateView<'a, S> { impl<'a, S: StateView + Sync + Send> TStateView for CrossShardStateView<'a, S> { type Key = StateKey; - fn get_state_value(&self, state_key: &StateKey) -> Result, StateviewError> { + fn get_state_value(&self, state_key: &StateKey) -> Result, StateViewError> { if let Some(value) = self.cross_shard_data.get(state_key) { return Ok(value.get_value()); } self.base_view.get_state_value(state_key) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> Result { Ok(StateStorageUsage::new_untracked()) } } @@ -90,19 +90,27 @@ impl<'a, S: StateView + Sync + Send> TStateView for CrossShardStateView<'a, S> { mod tests { use crate::sharded_block_executor::cross_shard_state_view::CrossShardStateView; use aptos_types::state_store::{ - in_memory_state_view::InMemoryStateView, state_key::StateKey, state_value::StateValue, - TStateView, - }; - use once_cell::sync::Lazy; - use std::{ - collections::{HashMap, HashSet}, - sync::Arc, - thread, - time::Duration, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, + state_value::StateValue, TStateView, }; + use std::{collections::HashSet, sync::Arc, thread, time::Duration}; + + struct EmptyView; - pub static EMPTY_VIEW: Lazy> = - Lazy::new(|| Arc::new(InMemoryStateView::new(HashMap::new()))); + impl TStateView for EmptyView { + type Key = StateKey; + + fn get_state_value( + &self, + _state_key: &StateKey, + ) -> Result, StateViewError> { + Ok(None) + } + + fn get_usage(&self) -> Result { + unreachable!() + } + } #[test] fn test_cross_shard_state_view_get_state_value() { @@ -114,7 +122,7 @@ mod tests { let mut state_keys = HashSet::new(); state_keys.insert(state_key.clone()); - let cross_shard_state_view = Arc::new(CrossShardStateView::new(state_keys, &EMPTY_VIEW)); + let cross_shard_state_view = Arc::new(CrossShardStateView::new(state_keys, &EmptyView)); let cross_shard_state_view_clone = cross_shard_state_view.clone(); let wait_thread = thread::spawn(move || { diff --git a/aptos-move/block-executor/src/proptest_types/types.rs b/aptos-move/block-executor/src/proptest_types/types.rs index ec416785fd80a..d594f134c2829 100644 --- a/aptos-move/block-executor/src/proptest_types/types.rs +++ b/aptos-move/block-executor/src/proptest_types/types.rs @@ -17,7 +17,7 @@ use aptos_types::{ fee_statement::FeeStatement, on_chain_config::CurrentTimeMicroseconds, state_store::{ - errors::StateviewError, + errors::StateViewError, state_storage_usage::StateStorageUsage, state_value::{StateValue, StateValueMetadata}, StateViewId, TStateView, @@ -73,7 +73,7 @@ where type Key = K; // Contains mock storage value with STORAGE_AGGREGATOR_VALUE. - fn get_state_value(&self, _: &K) -> Result, StateviewError> { + fn get_state_value(&self, _: &K) -> Result, StateViewError> { Ok(Some(StateValue::new_legacy( serialize(&STORAGE_AGGREGATOR_VALUE).into(), ))) @@ -83,7 +83,7 @@ where StateViewId::Miscellaneous } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> Result { unreachable!("Not used in tests"); } } @@ -99,7 +99,7 @@ where type Key = K; // Contains mock storage value with a non-empty group (w. value at RESERVED_TAG). - fn get_state_value(&self, key: &K) -> Result, StateviewError> { + fn get_state_value(&self, key: &K) -> Result, StateViewError> { if self.group_keys.contains(key) { let group: BTreeMap = BTreeMap::from([(RESERVED_TAG, vec![0].into())]); @@ -117,7 +117,7 @@ where StateViewId::Miscellaneous } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> Result { unreachable!("Not used in tests"); } } diff --git a/aptos-move/block-executor/src/view.rs b/aptos-move/block-executor/src/view.rs index 660faf6142104..7f4b3f3daa053 100644 --- a/aptos-move/block-executor/src/view.rs +++ b/aptos-move/block-executor/src/view.rs @@ -37,7 +37,7 @@ use aptos_types::{ error::{code_invariant_error, expect_ok, PanicError, PanicOr}, executable::{Executable, ModulePath}, state_store::{ - errors::StateviewError, + errors::StateViewError, state_storage_usage::StateStorageUsage, state_value::{StateValue, StateValueMetadata}, StateViewId, TStateView, @@ -1628,12 +1628,12 @@ impl<'a, T: Transaction, S: TStateView, X: Executable> StateStorag self.base_view.id() } - fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateviewError> { + fn read_state_value(&self, state_key: &Self::Key) -> Result<(), StateViewError> { self.base_view.get_state_value(state_key)?; Ok(()) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> Result { self.base_view.get_usage() } } diff --git a/aptos-move/e2e-tests/src/data_store.rs b/aptos-move/e2e-tests/src/data_store.rs index de1825b76203a..1d1e81f4aab1e 100644 --- a/aptos-move/e2e-tests/src/data_store.rs +++ b/aptos-move/e2e-tests/src/data_store.rs @@ -10,8 +10,8 @@ use aptos_types::{ chain_id::ChainId, on_chain_config::{Features, OnChainConfig}, state_store::{ - errors::StateviewError, in_memory_state_view::InMemoryStateView, state_key::StateKey, - state_storage_usage::StateStorageUsage, state_value::StateValue, TStateView, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, + state_value::StateValue, TStateView, }, transaction::ChangeSet, write_set::{TransactionWrite, WriteSet}, @@ -143,21 +143,17 @@ impl FakeDataStore { impl TStateView for FakeDataStore { type Key = StateKey; - fn get_state_value(&self, state_key: &StateKey) -> Result, StateviewError> { + fn get_state_value(&self, state_key: &StateKey) -> Result, StateViewError> { Ok(self.state_data.get(state_key).cloned()) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> Result { let mut usage = StateStorageUsage::new_untracked(); for (k, v) in self.state_data.iter() { usage.add_item(k.size() + v.size()) } Ok(usage) } - - fn as_in_memory_state_view(&self) -> InMemoryStateView { - InMemoryStateView::new(self.state_data.clone()) - } } #[cfg(test)] diff --git a/aptos-move/framework/aptos-framework/tests/compiler-v2-doc/function_info.md b/aptos-move/framework/aptos-framework/tests/compiler-v2-doc/function_info.md index b7f0a2482ac0a..2984cf918d4c0 100644 --- a/aptos-move/framework/aptos-framework/tests/compiler-v2-doc/function_info.md +++ b/aptos-move/framework/aptos-framework/tests/compiler-v2-doc/function_info.md @@ -147,7 +147,7 @@ Creates a new function info from names. -
public(friend) fun new_function_info_from_address(module_address: address, module_name: string::String, function_name: string::String): function_info::FunctionInfo
+
public fun new_function_info_from_address(module_address: address, module_name: string::String, function_name: string::String): function_info::FunctionInfo
 
@@ -156,7 +156,7 @@ Creates a new function info from names. Implementation -
public(friend) fun new_function_info_from_address(
+
public fun new_function_info_from_address(
     module_address: address,
     module_name: String,
     function_name: String,
@@ -363,7 +363,7 @@ if such module isn't accessed previously in the transaction.
 ### Function `new_function_info_from_address`
 
 
-
public(friend) fun new_function_info_from_address(module_address: address, module_name: string::String, function_name: string::String): function_info::FunctionInfo
+
public fun new_function_info_from_address(module_address: address, module_name: string::String, function_name: string::String): function_info::FunctionInfo
 
diff --git a/aptos-move/framework/aptos-framework/tests/compiler-v2-doc/stake.md b/aptos-move/framework/aptos-framework/tests/compiler-v2-doc/stake.md index 6dbfe59be07b9..48feb9dd3a1c1 100644 --- a/aptos-move/framework/aptos-framework/tests/compiler-v2-doc/stake.md +++ b/aptos-move/framework/aptos-framework/tests/compiler-v2-doc/stake.md @@ -1752,7 +1752,7 @@ Owner capability does not exist at the provided account. -Validator set change temporarily disabled because of in-progress reconfiguration. +Validator set change temporarily disabled because of in-progress reconfiguration. Please retry after 1 minute.
const ERECONFIGURATION_IN_PROGRESS: u64 = 20;
@@ -4566,6 +4566,39 @@ Returns validator's next epoch voting power, including pending_active, active, a
 
 
 
+
+
+
+
fun spec_validator_index_upper_bound(): u64 {
+   len(global<ValidatorPerformance>(@aptos_framework).validators)
+}
+
+ + + + + + + +
fun spec_has_stake_pool(a: address): bool {
+   exists<StakePool>(a)
+}
+
+ + + + + + + +
fun spec_has_validator_config(a: address): bool {
+   exists<ValidatorConfig>(a)
+}
+
+ + + + @@ -5611,39 +5644,6 @@ Returns validator's next epoch voting power, including pending_active, active, a - - - - -
fun spec_validator_index_upper_bound(): u64 {
-   len(global<ValidatorPerformance>(@aptos_framework).validators)
-}
-
- - - - - - - -
fun spec_has_stake_pool(a: address): bool {
-   exists<StakePool>(a)
-}
-
- - - - - - - -
fun spec_has_validator_config(a: address): bool {
-   exists<ValidatorConfig>(a)
-}
-
- - - ### Function `update_stake_pool` diff --git a/aptos-move/framework/aptos-stdlib/tests/compiler-v2-doc/big_vector.md b/aptos-move/framework/aptos-stdlib/tests/compiler-v2-doc/big_vector.md index 2e7a9a100357c..c255994e67731 100644 --- a/aptos-move/framework/aptos-stdlib/tests/compiler-v2-doc/big_vector.md +++ b/aptos-move/framework/aptos-stdlib/tests/compiler-v2-doc/big_vector.md @@ -502,7 +502,7 @@ Aborts if i is out of bounds. if (self.end_index == i) { return last_val }; - // because the lack of mem::swap, here we swap remove the requested value from the bucket + // because the lack of mem::swap, here we swap remove the requested value from the bucket // and append the last_val to the bucket then swap the last bucket val back let bucket = table_with_length::borrow_mut(&mut self.buckets, i / self.bucket_size); let bucket_len = vector::length(bucket); diff --git a/aptos-move/framework/aptos-stdlib/tests/compiler-v2-doc/from_bcs.md b/aptos-move/framework/aptos-stdlib/tests/compiler-v2-doc/from_bcs.md index 5838c78fdd758..91d0e365bcf12 100644 --- a/aptos-move/framework/aptos-stdlib/tests/compiler-v2-doc/from_bcs.md +++ b/aptos-move/framework/aptos-stdlib/tests/compiler-v2-doc/from_bcs.md @@ -5,7 +5,7 @@ This module provides a number of functions to convert _primitive_ types from their representation in std::bcs to values. This is the opposite of bcs::to_bytes. Note that it is not safe to define a generic public from_bytes -function because this can violate implicit struct invariants, therefore only primitive types are offerred. If +function because this can violate implicit struct invariants, therefore only primitive types are offered. If a general conversion back-and-force is needed, consider the aptos_std::Any type which preserves invariants. Example: diff --git a/aptos-move/framework/aptos-token-objects/tests/compiler-v2-doc/collection.md b/aptos-move/framework/aptos-token-objects/tests/compiler-v2-doc/collection.md index 185c3beca8d08..bf3628a4f9afa 100644 --- a/aptos-move/framework/aptos-token-objects/tests/compiler-v2-doc/collection.md +++ b/aptos-move/framework/aptos-token-objects/tests/compiler-v2-doc/collection.md @@ -1722,7 +1722,6 @@ After changing the collection's name, to create tokens - only call functions tha
public fun set_description(mutator_ref: &MutatorRef, description: String) acquires Collection {
     assert!(string::length(&description) <= MAX_DESCRIPTION_LENGTH, error::out_of_range(EDESCRIPTION_TOO_LONG));
     let collection = borrow_mut(mutator_ref);
-    collection.description = description;
     if (std::features::module_event_migration_enabled()) {
         event::emit(Mutation {
             mutated_field_name: string::utf8(b"description"),
@@ -1736,6 +1735,7 @@ After changing the collection's name, to create tokens - only call functions tha
             MutationEvent { mutated_field_name: string::utf8(b"description") },
         );
     };
+    collection.description = description;
 }
 
@@ -1761,7 +1761,6 @@ After changing the collection's name, to create tokens - only call functions tha
public fun set_uri(mutator_ref: &MutatorRef, uri: String) acquires Collection {
     assert!(string::length(&uri) <= MAX_URI_LENGTH, error::out_of_range(EURI_TOO_LONG));
     let collection = borrow_mut(mutator_ref);
-    collection.uri = uri;
     if (std::features::module_event_migration_enabled()) {
         event::emit(Mutation {
             mutated_field_name: string::utf8(b"uri"),
@@ -1775,6 +1774,7 @@ After changing the collection's name, to create tokens - only call functions tha
             MutationEvent { mutated_field_name: string::utf8(b"uri") },
         );
     };
+    collection.uri = uri;
 }
 
diff --git a/aptos-move/framework/aptos-token/doc/token_event_store.md b/aptos-move/framework/aptos-token/doc/token_event_store.md index f54e0f85b7344..8f19a648203c0 100644 --- a/aptos-move/framework/aptos-token/doc/token_event_store.md +++ b/aptos-move/framework/aptos-token/doc/token_event_store.md @@ -11,7 +11,7 @@ This module provides utils to add and emit new token events that are not in toke - [Struct `CollectionUriMutateEvent`](#0x3_token_event_store_CollectionUriMutateEvent) - [Struct `CollectionUriMutate`](#0x3_token_event_store_CollectionUriMutate) - [Struct `CollectionMaxiumMutateEvent`](#0x3_token_event_store_CollectionMaxiumMutateEvent) -- [Struct `CollectionMaxiumMutate`](#0x3_token_event_store_CollectionMaxiumMutate) +- [Struct `CollectionMaximumMutate`](#0x3_token_event_store_CollectionMaximumMutate) - [Struct `OptInTransferEvent`](#0x3_token_event_store_OptInTransferEvent) - [Struct `OptInTransfer`](#0x3_token_event_store_OptInTransfer) - [Struct `UriMutationEvent`](#0x3_token_event_store_UriMutationEvent) @@ -25,6 +25,7 @@ This module provides utils to add and emit new token events that are not in toke - [Struct `MaxiumMutateEvent`](#0x3_token_event_store_MaxiumMutateEvent) - [Struct `MaximumMutate`](#0x3_token_event_store_MaximumMutate) - [Resource `TokenEventStoreV1`](#0x3_token_event_store_TokenEventStoreV1) +- [Struct `CollectionMaxiumMutate`](#0x3_token_event_store_CollectionMaxiumMutate) - [Function `initialize_token_event_store`](#0x3_token_event_store_initialize_token_event_store) - [Function `emit_collection_uri_mutate_event`](#0x3_token_event_store_emit_collection_uri_mutate_event) - [Function `emit_collection_description_mutate_event`](#0x3_token_event_store_emit_collection_description_mutate_event) @@ -292,15 +293,15 @@ Event emitted when the collection maximum is mutated - + -## Struct `CollectionMaxiumMutate` +## Struct `CollectionMaximumMutate` Event emitted when the collection maximum is mutated
#[event]
-struct CollectionMaxiumMutate has drop, store
+struct CollectionMaximumMutate has drop, store
 
@@ -1066,6 +1067,53 @@ Event emitted when the token maximum is mutated + + + + +## Struct `CollectionMaxiumMutate` + + + +
#[event]
+#[deprecated]
+struct CollectionMaxiumMutate has drop, store
+
+ + + +
+Fields + + +
+
+creator_addr: address +
+
+ +
+
+collection_name: string::String +
+
+ +
+
+old_maximum: u64 +
+
+ +
+
+new_maximum: u64 +
+
+ +
+
+ +
@@ -1226,7 +1274,7 @@ Emit the collection maximum mutation event let token_event_store = borrow_global_mut<TokenEventStoreV1>(signer::address_of(creator)); if (std::features::module_event_migration_enabled()) { event::emit( - CollectionMaxiumMutate { + CollectionMaximumMutate { creator_addr: signer::address_of(creator), collection_name: collection, old_maximum, diff --git a/aptos-move/framework/aptos-token/sources/token_event_store.move b/aptos-move/framework/aptos-token/sources/token_event_store.move index a114f955e0746..9f30907969be1 100644 --- a/aptos-move/framework/aptos-token/sources/token_event_store.move +++ b/aptos-move/framework/aptos-token/sources/token_event_store.move @@ -59,7 +59,7 @@ module aptos_token::token_event_store { #[event] /// Event emitted when the collection maximum is mutated - struct CollectionMaxiumMutate has drop, store { + struct CollectionMaximumMutate has drop, store { creator_addr: address, collection_name: String, old_maximum: u64, @@ -295,7 +295,7 @@ module aptos_token::token_event_store { let token_event_store = borrow_global_mut(signer::address_of(creator)); if (std::features::module_event_migration_enabled()) { event::emit( - CollectionMaxiumMutate { + CollectionMaximumMutate { creator_addr: signer::address_of(creator), collection_name: collection, old_maximum, @@ -529,4 +529,13 @@ module aptos_token::token_event_store { ); }; } + + #[deprecated] + #[event] + struct CollectionMaxiumMutate has drop, store { + creator_addr: address, + collection_name: String, + old_maximum: u64, + new_maximum: u64, + } } diff --git a/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/bcs.md b/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/bcs.md index 9bdcf7f45f7d3..0a73d39902341 100644 --- a/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/bcs.md +++ b/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/bcs.md @@ -11,11 +11,13 @@ details on BCS. - [Function `to_bytes`](#0x1_bcs_to_bytes) - [Function `serialized_size`](#0x1_bcs_serialized_size) +- [Function `constant_serialized_size`](#0x1_bcs_constant_serialized_size) - [Specification](#@Specification_0) - [Function `serialized_size`](#@Specification_0_serialized_size) -
+
use 0x1::option;
+
@@ -65,6 +67,38 @@ Aborts with 0x1c5 error code if there is a failure when calculating + + + + +## Function `constant_serialized_size` + +If the type has known constant (always the same, independent of instance) serialized size +in BCS (Binary Canonical Serialization) format, returns it, otherwise returns None. +Aborts with 0x1c5 error code if there is a failure when calculating serialized size. + +Note: +For some types it might not be known they have constant size, and function might return None. +For example, signer appears to have constant size, but it's size might change. +If this function returned Some() for some type before - it is guaranteed to continue returning Some(). +On the other hand, if function has returned None for some type, +it might change in the future to return Some() instead, if size becomes "known". + + +
public(friend) fun constant_serialized_size<MoveValue>(): option::Option<u64>
+
+ + + +
+Implementation + + +
native public(friend) fun constant_serialized_size<MoveValue>(): Option<u64>;
+
+ + +
diff --git a/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/overview.md b/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/overview.md index 8eb0c67f05113..649873e8ab2f5 100644 --- a/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/overview.md +++ b/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/overview.md @@ -20,6 +20,7 @@ For on overview of the Move language, see the [Move Book][move-book]. - [`0x1::features`](features.md#0x1_features) - [`0x1::fixed_point32`](fixed_point32.md#0x1_fixed_point32) - [`0x1::hash`](hash.md#0x1_hash) +- [`0x1::mem`](mem.md#0x1_mem) - [`0x1::option`](option.md#0x1_option) - [`0x1::signer`](signer.md#0x1_signer) - [`0x1::string`](string.md#0x1_string) diff --git a/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/vector.md b/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/vector.md index d5e6a7bfa2ecd..b56160dbaf27e 100644 --- a/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/vector.md +++ b/aptos-move/framework/move-stdlib/tests/compiler-v2-doc/vector.md @@ -24,6 +24,7 @@ the return on investment didn't seem worth it for these simple functions. - [Function `pop_back`](#0x1_vector_pop_back) - [Function `destroy_empty`](#0x1_vector_destroy_empty) - [Function `swap`](#0x1_vector_swap) +- [Function `move_range`](#0x1_vector_move_range) - [Function `singleton`](#0x1_vector_singleton) - [Function `reverse`](#0x1_vector_reverse) - [Function `reverse_slice`](#0x1_vector_reverse_slice) @@ -340,6 +341,43 @@ Aborts if i or j is out of bounds. + + + + +## Function `move_range` + +Moves range of elements [removal_position, removal_position + length) from vector from, +to vector to, inserting them starting at the insert_position. +In the from vector, elements after the selected range are moved left to fill the hole +(i.e. range is removed, while the order of the rest of the elements is kept) +In the to vector, elements after the insert_position are moved to the right to make +space for new elements (i.e. range is inserted, while the order of the rest of the +elements is kept). +Move prevents from having two mutable references to the same value, so from and to +vectors are always distinct. + + +
public(friend) fun move_range<T>(from: &mut vector<T>, removal_position: u64, length: u64, to: &mut vector<T>, insert_position: u64)
+
+ + + +
+Implementation + + +
native public(friend) fun move_range<T>(
+    from: &mut vector<T>,
+    removal_position: u64,
+    length: u64,
+    to: &mut vector<T>,
+    insert_position: u64
+);
+
+ + +
diff --git a/aptos-move/vm-genesis/src/genesis_context.rs b/aptos-move/vm-genesis/src/genesis_context.rs index 3a0640c8f7cae..e54b2c319bc7f 100644 --- a/aptos-move/vm-genesis/src/genesis_context.rs +++ b/aptos-move/vm-genesis/src/genesis_context.rs @@ -7,7 +7,7 @@ use aptos_types::{ executable::ModulePath, state_store::{ - errors::StateviewError, state_key::StateKey, state_storage_usage::StateStorageUsage, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, TStateView, }, write_set::WriteOp, @@ -18,7 +18,7 @@ use claims::assert_some; use move_core_types::language_storage::ModuleId; use std::collections::{BTreeMap, HashMap}; -type Result = std::result::Result; +type Result = std::result::Result; // `StateView` has no data given we are creating the genesis pub(crate) struct GenesisStateView { diff --git a/aptos-node/src/utils.rs b/aptos-node/src/utils.rs index 03b7dcea75296..5a72eb60e988d 100644 --- a/aptos-node/src/utils.rs +++ b/aptos-node/src/utils.rs @@ -3,7 +3,9 @@ use anyhow::anyhow; use aptos_config::config::{NodeConfig, DEFAULT_EXECUTION_CONCURRENCY_LEVEL}; -use aptos_storage_interface::{state_view::LatestDbStateCheckpointView, DbReaderWriter}; +use aptos_storage_interface::{ + state_store::state_view::db_state_view::LatestDbStateCheckpointView, DbReaderWriter, +}; use aptos_types::{ account_config::ChainIdResource, chain_id::ChainId, on_chain_config::OnChainConfig, }; diff --git a/consensus/src/block_storage/sync_manager.rs b/consensus/src/block_storage/sync_manager.rs index 3f9e917479bad..07b20edf8a922 100644 --- a/consensus/src/block_storage/sync_manager.rs +++ b/consensus/src/block_storage/sync_manager.rs @@ -68,13 +68,6 @@ impl BlockStore { && !self.block_exists(li.commit_info().id())) || self.commit_root().round() + 30.max(2 * self.vote_back_pressure_limit) < li.commit_info().round() - // If the LI commit block timestamp is more than 30 secs ahead of self commit block - // timestamp, sync to the ledger info - || li - .commit_info() - .timestamp_usecs() - .saturating_sub(self.commit_root().timestamp_usecs()) - >= Duration::from_secs(30).as_micros() as u64 } /// Checks if quorum certificate can be inserted in block store without RPC diff --git a/crates/aptos/CHANGELOG.md b/crates/aptos/CHANGELOG.md index af13bddb60bfb..e1635f52de462 100644 --- a/crates/aptos/CHANGELOG.md +++ b/crates/aptos/CHANGELOG.md @@ -2,6 +2,11 @@ All notable changes to the Aptos CLI will be captured in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) and the format set out by [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). +# Unreleased + +- Add `--node-api-key` flag to `aptos move replay` to allow for querying the fullnode with an API key. + + ## [4.5.0] - 2024/11/15 - Determine network from URL to make explorer links better for legacy users - Add support for AIP-80 compliant strings when importing using the CLI arguments or manual input. diff --git a/crates/aptos/src/move_tool/mod.rs b/crates/aptos/src/move_tool/mod.rs index a59f28e5c966f..bd0da93576ca6 100644 --- a/crates/aptos/src/move_tool/mod.rs +++ b/crates/aptos/src/move_tool/mod.rs @@ -45,7 +45,7 @@ use aptos_move_debugger::aptos_debugger::AptosDebugger; use aptos_rest_client::{ aptos_api_types::{EntryFunctionId, HexEncodedBytes, IdentifierWrapper, MoveModuleId}, error::RestError, - Client, + AptosBaseUrl, Client, }; use aptos_types::{ account_address::{create_resource_address, AccountAddress}, @@ -2179,6 +2179,11 @@ pub struct Replay { /// If present, skip the comparison against the expected transaction output. #[clap(long)] pub(crate) skip_comparison: bool, + + /// Key to use for ratelimiting purposes with the node API. This value will be used + /// as `Authorization: Bearer ` + #[clap(long)] + pub(crate) node_api_key: Option, } impl FromStr for ReplayNetworkSelection { @@ -2216,10 +2221,20 @@ impl CliCommand for Replay { RestEndpoint(url) => url, }; - let debugger = AptosDebugger::rest_client(Client::new( + // Build the client + let client = Client::builder(AptosBaseUrl::Custom( Url::parse(rest_endpoint) .map_err(|_err| CliError::UnableToParse("url", rest_endpoint.to_string()))?, - ))?; + )); + + // add the node API key if it is provided + let client = if let Some(api_key) = self.node_api_key { + client.api_key(&api_key).unwrap().build() + } else { + client.build() + }; + + let debugger = AptosDebugger::rest_client(client)?; // Fetch the transaction to replay. let (txn, txn_info) = debugger diff --git a/crates/aptos/src/node/mod.rs b/crates/aptos/src/node/mod.rs index c9fe8c9a1bacf..e062e721cc94a 100644 --- a/crates/aptos/src/node/mod.rs +++ b/crates/aptos/src/node/mod.rs @@ -1443,7 +1443,7 @@ async fn get_epoch_info(client: &Client) -> CliTypedResult { let epoch_interval = block_resource.epoch_interval(); let epoch_interval_secs = epoch_interval / SECS_TO_MICROSECS; - let last_reconfig = reconfig_resource.last_reconfiguration_time(); + let last_reconfig = reconfig_resource.last_reconfiguration_time_micros(); Ok(EpochInfo { epoch: reconfig_resource.epoch(), epoch_interval_secs, diff --git a/execution/executor-benchmark/src/db_reliable_submitter.rs b/execution/executor-benchmark/src/db_reliable_submitter.rs index 8ed1ece5e1fb2..18639628921e8 100644 --- a/execution/executor-benchmark/src/db_reliable_submitter.rs +++ b/execution/executor-benchmark/src/db_reliable_submitter.rs @@ -4,7 +4,9 @@ use crate::db_access::DbAccessUtil; use anyhow::{Context, Result}; -use aptos_storage_interface::{state_view::LatestDbStateCheckpointView, DbReaderWriter}; +use aptos_storage_interface::{ + state_store::state_view::db_state_view::LatestDbStateCheckpointView, DbReaderWriter, +}; use aptos_transaction_generator_lib::{CounterState, ReliableTransactionSubmitter}; use aptos_types::{ account_address::AccountAddress, diff --git a/execution/executor-benchmark/src/lib.rs b/execution/executor-benchmark/src/lib.rs index 68285d5348396..88a2267acb89a 100644 --- a/execution/executor-benchmark/src/lib.rs +++ b/execution/executor-benchmark/src/lib.rs @@ -37,7 +37,9 @@ use aptos_jellyfish_merkle::metrics::{ use aptos_logger::{info, warn}; use aptos_metrics_core::Histogram; use aptos_sdk::types::LocalAccount; -use aptos_storage_interface::{state_view::LatestDbStateCheckpointView, DbReader, DbReaderWriter}; +use aptos_storage_interface::{ + state_store::state_view::db_state_view::LatestDbStateCheckpointView, DbReader, DbReaderWriter, +}; use aptos_transaction_generator_lib::{ create_txn_generator_creator, AlwaysApproveRootAccountHandle, TransactionGeneratorCreator, TransactionType::{self, CoinTransfer}, diff --git a/execution/executor-benchmark/src/transaction_generator.rs b/execution/executor-benchmark/src/transaction_generator.rs index 4703fa1a905b0..b7fc225dcd79e 100644 --- a/execution/executor-benchmark/src/transaction_generator.rs +++ b/execution/executor-benchmark/src/transaction_generator.rs @@ -12,7 +12,9 @@ use aptos_sdk::{ transaction_builder::{aptos_stdlib, TransactionFactory}, types::LocalAccount, }; -use aptos_storage_interface::{state_view::LatestDbStateCheckpointView, DbReader, DbReaderWriter}; +use aptos_storage_interface::{ + state_store::state_view::db_state_view::LatestDbStateCheckpointView, DbReader, DbReaderWriter, +}; use aptos_types::{ account_address::AccountAddress, account_config::{aptos_test_root_address, AccountResource}, diff --git a/execution/executor-service/src/local_executor_helper.rs b/execution/executor-service/src/local_executor_helper.rs index 4e128086db777..ff49ac4a68f56 100644 --- a/execution/executor-service/src/local_executor_helper.rs +++ b/execution/executor-service/src/local_executor_helper.rs @@ -3,7 +3,7 @@ use aptos_infallible::Mutex; use aptos_logger::info; -use aptos_storage_interface::cached_state_view::CachedStateView; +use aptos_storage_interface::state_store::state_view::cached_state_view::CachedStateView; use aptos_vm::{ sharded_block_executor::{local_executor_shard::LocalExecutorClient, ShardedBlockExecutor}, AptosVM, diff --git a/execution/executor-service/src/remote_executor_client.rs b/execution/executor-service/src/remote_executor_client.rs index 30ff095107a91..b14e5c3f66a4b 100644 --- a/execution/executor-service/src/remote_executor_client.rs +++ b/execution/executor-service/src/remote_executor_client.rs @@ -7,7 +7,7 @@ use crate::{ }; use aptos_logger::{info, trace}; use aptos_secure_net::network_controller::{Message, NetworkController}; -use aptos_storage_interface::cached_state_view::CachedStateView; +use aptos_storage_interface::state_store::state_view::cached_state_view::CachedStateView; use aptos_types::{ block_executor::{ config::BlockExecutorConfigFromOnchain, partitioner::PartitionedTransactions, diff --git a/execution/executor-service/src/remote_state_view.rs b/execution/executor-service/src/remote_state_view.rs index 2cb929c5f8433..e4d7c3232439a 100644 --- a/execution/executor-service/src/remote_state_view.rs +++ b/execution/executor-service/src/remote_state_view.rs @@ -17,7 +17,8 @@ use aptos_logger::trace; use aptos_types::{ block_executor::partitioner::ShardId, state_store::{ - state_storage_usage::StateStorageUsage, state_value::StateValue, Result, TStateView, + state_storage_usage::StateStorageUsage, state_value::StateValue, StateViewResult, + TStateView, }, }; use dashmap::DashMap; @@ -53,7 +54,7 @@ impl RemoteStateView { .or_insert(RemoteStateValue::waiting()); } - pub fn get_state_value(&self, state_key: &StateKey) -> Result> { + pub fn get_state_value(&self, state_key: &StateKey) -> StateViewResult> { if let Some(value) = self.state_values.get(state_key) { let value_clone = value.clone(); // It is possible that the value is not ready yet and the get_value call blocks. In that @@ -182,7 +183,7 @@ impl RemoteStateViewClient { impl TStateView for RemoteStateViewClient { type Key = StateKey; - fn get_state_value(&self, state_key: &StateKey) -> Result> { + fn get_state_value(&self, state_key: &StateKey) -> StateViewResult> { let state_view_reader = self.state_view.read().unwrap(); if state_view_reader.has_state_key(state_key) { // If the key is already in the cache then we return it. @@ -202,7 +203,7 @@ impl TStateView for RemoteStateViewClient { state_view_reader.get_state_value(state_key) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> StateViewResult { unimplemented!("get_usage is not implemented for RemoteStateView") } } diff --git a/execution/executor-test-helpers/src/integration_test_impl.rs b/execution/executor-test-helpers/src/integration_test_impl.rs index 7b0b08ba157be..60464ffe5a017 100644 --- a/execution/executor-test-helpers/src/integration_test_impl.rs +++ b/execution/executor-test-helpers/src/integration_test_impl.rs @@ -16,7 +16,7 @@ use aptos_sdk::{ types::{AccountKey, LocalAccount}, }; use aptos_storage_interface::{ - state_view::{DbStateViewAtVersion, VerifiedStateViewAtVersion}, + state_store::state_view::db_state_view::{DbStateViewAtVersion, VerifiedStateViewAtVersion}, DbReaderWriter, }; use aptos_types::{ diff --git a/execution/executor-types/src/error.rs b/execution/executor-types/src/error.rs index 0e3367d0ec3ca..355ecb8930108 100644 --- a/execution/executor-types/src/error.rs +++ b/execution/executor-types/src/error.rs @@ -4,7 +4,7 @@ use aptos_crypto::HashValue; use aptos_storage_interface::AptosDbError; -use aptos_types::{state_store::errors::StateviewError, transaction::Version}; +use aptos_types::{state_store::errors::StateViewError, transaction::Version}; use serde::{Deserialize, Serialize}; use std::fmt::Display; use thiserror::Error; @@ -59,8 +59,8 @@ impl From for ExecutorError { } } -impl From for ExecutorError { - fn from(error: StateviewError) -> Self { +impl From for ExecutorError { + fn from(error: StateViewError) -> Self { Self::InternalError { error: format!("{}", error), } diff --git a/execution/executor-types/src/execution_output.rs b/execution/executor-types/src/execution_output.rs index d4c04f09f27ac..0708274f60c98 100644 --- a/execution/executor-types/src/execution_output.rs +++ b/execution/executor-types/src/execution_output.rs @@ -9,7 +9,9 @@ use crate::{ transactions_with_output::{TransactionsToKeep, TransactionsWithOutput}, }; use aptos_drop_helper::DropHelper; -use aptos_storage_interface::{cached_state_view::StateCache, state_delta::StateDelta}; +use aptos_storage_interface::state_store::{ + state_delta::StateDelta, state_view::cached_state_view::StateCache, +}; use aptos_types::{ contract_event::ContractEvent, epoch_state::EpochState, diff --git a/execution/executor-types/src/state_checkpoint_output.rs b/execution/executor-types/src/state_checkpoint_output.rs index 0851b3341a3fa..61f5bb597a8d2 100644 --- a/execution/executor-types/src/state_checkpoint_output.rs +++ b/execution/executor-types/src/state_checkpoint_output.rs @@ -5,8 +5,9 @@ use aptos_crypto::HashValue; use aptos_drop_helper::DropHelper; -use aptos_storage_interface::state_delta::StateDelta; -use aptos_types::state_store::ShardedStateUpdates; +use aptos_storage_interface::state_store::{ + sharded_state_updates::ShardedStateUpdates, state_delta::StateDelta, +}; use derive_more::Deref; use std::sync::Arc; diff --git a/execution/executor-types/src/transactions_with_output.rs b/execution/executor-types/src/transactions_with_output.rs index cb6390e12a475..f69bcef083453 100644 --- a/execution/executor-types/src/transactions_with_output.rs +++ b/execution/executor-types/src/transactions_with_output.rs @@ -3,7 +3,7 @@ use crate::metrics::TIMER; use aptos_metrics_core::TimerHelper; -use aptos_storage_interface::sharded_state_update_refs::ShardedStateUpdateRefs; +use aptos_storage_interface::state_store::sharded_state_update_refs::ShardedStateUpdateRefs; use aptos_types::transaction::{Transaction, TransactionOutput}; use itertools::izip; use std::{ diff --git a/execution/executor/benches/data_collection.rs b/execution/executor/benches/data_collection.rs index 7adac11a27885..0a76e00bba9cf 100644 --- a/execution/executor/benches/data_collection.rs +++ b/execution/executor/benches/data_collection.rs @@ -1,11 +1,12 @@ // Copyright (c) Aptos Foundation // SPDX-License-Identifier: Apache-2.0 +use aptos_storage_interface::state_store::NUM_STATE_SHARDS; use aptos_types::{ account_address::AccountAddress, account_config::AccountResource, event::{EventHandle, EventKey}, - state_store::{state_key::StateKey, state_value::StateValue, NUM_STATE_SHARDS}, + state_store::{state_key::StateKey, state_value::StateValue}, transaction::Version, write_set::{WriteOp, WriteSet}, }; diff --git a/execution/executor/src/block_executor/mod.rs b/execution/executor/src/block_executor/mod.rs index f60548a94029a..40c90751bcec7 100644 --- a/execution/executor/src/block_executor/mod.rs +++ b/execution/executor/src/block_executor/mod.rs @@ -27,7 +27,10 @@ use aptos_infallible::RwLock; use aptos_logger::prelude::*; use aptos_metrics_core::{IntGaugeHelper, TimerHelper}; use aptos_storage_interface::{ - async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView, DbReaderWriter, + state_store::state_view::{ + async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView, + }, + DbReaderWriter, }; use aptos_types::{ block_executor::{ diff --git a/execution/executor/src/chunk_executor/chunk_commit_queue.rs b/execution/executor/src/chunk_executor/chunk_commit_queue.rs index 1af1e28908f00..c5794399e7586 100644 --- a/execution/executor/src/chunk_executor/chunk_commit_queue.rs +++ b/execution/executor/src/chunk_executor/chunk_commit_queue.rs @@ -11,7 +11,7 @@ use crate::{ }, }; use anyhow::{anyhow, ensure, Result}; -use aptos_storage_interface::{state_delta::StateDelta, DbReader, ExecutedTrees}; +use aptos_storage_interface::{state_store::state_delta::StateDelta, DbReader, ExecutedTrees}; use aptos_types::{proof::accumulator::InMemoryTransactionAccumulator, transaction::Version}; use std::{collections::VecDeque, sync::Arc}; diff --git a/execution/executor/src/chunk_executor/mod.rs b/execution/executor/src/chunk_executor/mod.rs index ddb42725791f5..290e630757cf8 100644 --- a/execution/executor/src/chunk_executor/mod.rs +++ b/execution/executor/src/chunk_executor/mod.rs @@ -24,8 +24,11 @@ use aptos_infallible::{Mutex, RwLock}; use aptos_logger::prelude::*; use aptos_metrics_core::{IntGaugeHelper, TimerHelper}; use aptos_storage_interface::{ - async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView, - state_delta::StateDelta, DbReaderWriter, + state_store::{ + state_delta::StateDelta, + state_view::{async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView}, + }, + DbReaderWriter, }; use aptos_types::{ block_executor::{ diff --git a/execution/executor/src/chunk_executor/transaction_chunk.rs b/execution/executor/src/chunk_executor/transaction_chunk.rs index 89a3e39ffb285..c1958bfb66647 100644 --- a/execution/executor/src/chunk_executor/transaction_chunk.rs +++ b/execution/executor/src/chunk_executor/transaction_chunk.rs @@ -9,7 +9,7 @@ use anyhow::Result; use aptos_executor_types::execution_output::ExecutionOutput; use aptos_experimental_runtimes::thread_manager::optimal_min_len; use aptos_metrics_core::TimerHelper; -use aptos_storage_interface::cached_state_view::CachedStateView; +use aptos_storage_interface::state_store::state_view::cached_state_view::CachedStateView; use aptos_types::{ block_executor::{ config::BlockExecutorConfigFromOnchain, diff --git a/execution/executor/src/db_bootstrapper/mod.rs b/execution/executor/src/db_bootstrapper/mod.rs index db0f85c28cb14..76380637cb834 100644 --- a/execution/executor/src/db_bootstrapper/mod.rs +++ b/execution/executor/src/db_bootstrapper/mod.rs @@ -12,8 +12,10 @@ use anyhow::{anyhow, ensure, format_err, Result}; use aptos_crypto::HashValue; use aptos_logger::prelude::*; use aptos_storage_interface::{ - async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView, DbReaderWriter, - DbWriter, ExecutedTrees, + state_store::state_view::{ + async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView, + }, + DbReaderWriter, DbWriter, ExecutedTrees, }; use aptos_types::{ account_config::CORE_CODE_ADDRESS, diff --git a/execution/executor/src/tests/mod.rs b/execution/executor/src/tests/mod.rs index 163cbfda435a7..0a599682eac87 100644 --- a/execution/executor/src/tests/mod.rs +++ b/execution/executor/src/tests/mod.rs @@ -13,7 +13,8 @@ use aptos_executor_types::{ BlockExecutorTrait, ChunkExecutorTrait, TransactionReplayer, VerifyExecutionMode, }; use aptos_storage_interface::{ - async_proof_fetcher::AsyncProofFetcher, DbReaderWriter, ExecutedTrees, Result, + state_store::state_view::async_proof_fetcher::AsyncProofFetcher, DbReaderWriter, ExecutedTrees, + Result, }; use aptos_types::{ account_address::AccountAddress, diff --git a/execution/executor/src/types/in_memory_state_calculator_v2.rs b/execution/executor/src/types/in_memory_state_calculator_v2.rs index b09700336fe8f..95bb07bfcd1b5 100644 --- a/execution/executor/src/types/in_memory_state_calculator_v2.rs +++ b/execution/executor/src/types/in_memory_state_calculator_v2.rs @@ -12,15 +12,15 @@ use aptos_executor_types::{ use aptos_logger::info; use aptos_metrics_core::TimerHelper; use aptos_scratchpad::FrozenSparseMerkleTree; -use aptos_storage_interface::{ - cached_state_view::{ShardedStateCache, StateCache}, +use aptos_storage_interface::state_store::{ sharded_state_update_refs::ShardedStateUpdateRefs, + sharded_state_updates::ShardedStateUpdates, state_delta::StateDelta, + state_view::cached_state_view::{ShardedStateCache, StateCache}, }; use aptos_types::{ state_store::{ state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, - ShardedStateUpdates, }, transaction::Version, write_set::WriteSet, diff --git a/execution/executor/src/types/partial_state_compute_result.rs b/execution/executor/src/types/partial_state_compute_result.rs index a3167de3f65c8..2a62efdab6d0c 100644 --- a/execution/executor/src/types/partial_state_compute_result.rs +++ b/execution/executor/src/types/partial_state_compute_result.rs @@ -7,7 +7,7 @@ use aptos_executor_types::{ execution_output::ExecutionOutput, state_checkpoint_output::StateCheckpointOutput, state_compute_result::StateComputeResult, LedgerUpdateOutput, }; -use aptos_storage_interface::state_delta::StateDelta; +use aptos_storage_interface::state_store::state_delta::StateDelta; use aptos_types::proof::accumulator::InMemoryTransactionAccumulator; use once_cell::sync::OnceCell; use std::sync::Arc; diff --git a/execution/executor/src/workflow/do_get_execution_output.rs b/execution/executor/src/workflow/do_get_execution_output.rs index a494598a53047..99be96683f624 100644 --- a/execution/executor/src/workflow/do_get_execution_output.rs +++ b/execution/executor/src/workflow/do_get_execution_output.rs @@ -23,7 +23,9 @@ use aptos_executor_types::{ use aptos_experimental_runtimes::thread_manager::THREAD_MANAGER; use aptos_logger::prelude::*; use aptos_metrics_core::TimerHelper; -use aptos_storage_interface::cached_state_view::{CachedStateView, StateCache}; +use aptos_storage_interface::state_store::state_view::cached_state_view::{ + CachedStateView, StateCache, +}; #[cfg(feature = "consensus-only-perf-test")] use aptos_types::transaction::ExecutionStatus; use aptos_types::{ @@ -486,21 +488,21 @@ impl<'a> TStateView for WriteSetStateView<'a> { fn get_state_value( &self, state_key: &Self::Key, - ) -> aptos_types::state_store::Result> { + ) -> aptos_types::state_store::StateViewResult> { Ok(self .write_set .get(state_key) .and_then(|write_op| write_op.as_state_value())) } - fn get_usage(&self) -> aptos_types::state_store::Result { + fn get_usage(&self) -> aptos_types::state_store::StateViewResult { unreachable!("Not supposed to be called on WriteSetStateView.") } } #[cfg(test)] mod tests { use super::Parser; - use aptos_storage_interface::cached_state_view::StateCache; + use aptos_storage_interface::state_store::state_view::cached_state_view::StateCache; use aptos_types::{ contract_event::ContractEvent, transaction::{ diff --git a/execution/executor/src/workflow/do_state_checkpoint.rs b/execution/executor/src/workflow/do_state_checkpoint.rs index a971ca2130a3a..1d4cfae66ec3a 100644 --- a/execution/executor/src/workflow/do_state_checkpoint.rs +++ b/execution/executor/src/workflow/do_state_checkpoint.rs @@ -7,7 +7,7 @@ use aptos_crypto::HashValue; use aptos_executor_types::{ execution_output::ExecutionOutput, state_checkpoint_output::StateCheckpointOutput, }; -use aptos_storage_interface::state_delta::StateDelta; +use aptos_storage_interface::state_store::state_delta::StateDelta; use std::sync::Arc; pub struct DoStateCheckpoint; diff --git a/execution/executor/tests/db_bootstrapper_test.rs b/execution/executor/tests/db_bootstrapper_test.rs index 6ec37d8c30048..96d6647bb898b 100644 --- a/execution/executor/tests/db_bootstrapper_test.rs +++ b/execution/executor/tests/db_bootstrapper_test.rs @@ -15,7 +15,9 @@ use aptos_executor_test_helpers::{ bootstrap_genesis, gen_ledger_info_with_sigs, get_test_signed_transaction, }; use aptos_executor_types::BlockExecutorTrait; -use aptos_storage_interface::{state_view::LatestDbStateCheckpointView, DbReaderWriter}; +use aptos_storage_interface::{ + state_store::state_view::db_state_view::LatestDbStateCheckpointView, DbReaderWriter, +}; use aptos_temppath::TempPath; use aptos_types::{ account_address::AccountAddress, diff --git a/execution/executor/tests/storage_integration_test.rs b/execution/executor/tests/storage_integration_test.rs index 33e077dc06b83..e0c77042d8423 100644 --- a/execution/executor/tests/storage_integration_test.rs +++ b/execution/executor/tests/storage_integration_test.rs @@ -11,7 +11,7 @@ use aptos_executor_test_helpers::{ }, }; use aptos_executor_types::BlockExecutorTrait; -use aptos_storage_interface::state_view::DbStateViewAtVersion; +use aptos_storage_interface::state_store::state_view::db_state_view::DbStateViewAtVersion; use aptos_types::{ account_config::{aptos_test_root_address, AccountResource, CORE_CODE_ADDRESS}, block_metadata::BlockMetadata, diff --git a/experimental/execution/ptx-executor/src/state_view.rs b/experimental/execution/ptx-executor/src/state_view.rs index 8cfaa248473d3..3bd7934543768 100644 --- a/experimental/execution/ptx-executor/src/state_view.rs +++ b/experimental/execution/ptx-executor/src/state_view.rs @@ -5,8 +5,8 @@ use crate::common::HashMap; use aptos_types::state_store::{ - state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, Result, - StateView, TStateView, + state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, + StateView, StateViewResult, TStateView, }; pub struct OverlayedStateView<'view> { base_view: &'view dyn StateView, @@ -33,7 +33,7 @@ impl<'view> OverlayedStateView<'view> { impl<'view> TStateView for OverlayedStateView<'view> { type Key = StateKey; - fn get_state_value(&self, state_key: &Self::Key) -> Result> { + fn get_state_value(&self, state_key: &Self::Key) -> StateViewResult> { // TODO(ptx): reject non-module reads once block_metadata is analyzed for R/W set // TODO(ptx): remove base_view reads once module reads are dealt with self.overlay @@ -43,7 +43,7 @@ impl<'view> TStateView for OverlayedStateView<'view> { .unwrap_or_else(|| self.base_view.get_state_value(state_key)) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> StateViewResult { // TODO(aldenhu): maybe remove get_usage() from StateView unimplemented!() } diff --git a/mempool/src/shared_mempool/tasks.rs b/mempool/src/shared_mempool/tasks.rs index 71a8c6f392a56..c54f0ba46196e 100644 --- a/mempool/src/shared_mempool/tasks.rs +++ b/mempool/src/shared_mempool/tasks.rs @@ -16,7 +16,7 @@ use crate::{ }, use_case_history::UseCaseHistory, }, - thread_pool::IO_POOL, + thread_pool::{IO_POOL, VALIDATION_POOL}, QuorumStoreRequest, QuorumStoreResponse, SubmissionStatus, }; use anyhow::Result; @@ -28,7 +28,7 @@ use aptos_logger::prelude::*; use aptos_mempool_notifications::CommittedTransaction; use aptos_metrics_core::HistogramTimer; use aptos_network::application::interface::NetworkClientInterface; -use aptos_storage_interface::state_view::LatestDbStateCheckpointView; +use aptos_storage_interface::state_store::state_view::db_state_view::LatestDbStateCheckpointView; use aptos_types::{ account_address::AccountAddress, mempool_status::{MempoolStatus, MempoolStatusCode}, @@ -45,7 +45,6 @@ use std::{ time::{Duration, Instant}, }; use tokio::runtime::Handle; - // ============================== // // broadcast_coordinator tasks // // ============================== // @@ -393,18 +392,20 @@ fn validate_and_add_transactions( let vm_validation_timer = counters::PROCESS_TXN_BREAKDOWN_LATENCY .with_label_values(&[counters::VM_VALIDATION_LABEL]) .start_timer(); - let validation_results = transactions - .par_iter() - .map(|t| { - let result = smp.validator.read().validate_transaction(t.0.clone()); - // Pre-compute the hash and length if the transaction is valid, before locking mempool - if result.is_ok() { - t.0.committed_hash(); - t.0.txn_bytes_len(); - } - result - }) - .collect::>(); + let validation_results = VALIDATION_POOL.install(|| { + transactions + .par_iter() + .map(|t| { + let result = smp.validator.read().validate_transaction(t.0.clone()); + // Pre-compute the hash and length if the transaction is valid, before locking mempool + if result.is_ok() { + t.0.committed_hash(); + t.0.txn_bytes_len(); + } + result + }) + .collect::>() + }); vm_validation_timer.stop_and_record(); { let mut mempool = smp.mempool.lock(); diff --git a/mempool/src/thread_pool.rs b/mempool/src/thread_pool.rs index 43c5dc2dc7f2e..7ad58b1c2114a 100644 --- a/mempool/src/thread_pool.rs +++ b/mempool/src/thread_pool.rs @@ -11,3 +11,10 @@ pub(crate) static IO_POOL: Lazy = Lazy::new(|| { .build() .unwrap() }); + +pub(crate) static VALIDATION_POOL: Lazy = Lazy::new(|| { + rayon::ThreadPoolBuilder::new() + .thread_name(|index| format!("mempool_vali_{}", index)) + .build() + .unwrap() +}); diff --git a/state-sync/inter-component/event-notifications/src/lib.rs b/state-sync/inter-component/event-notifications/src/lib.rs index 4ff95eaf18b8d..965a43fedad10 100644 --- a/state-sync/inter-component/event-notifications/src/lib.rs +++ b/state-sync/inter-component/event-notifications/src/lib.rs @@ -7,7 +7,9 @@ use anyhow::{anyhow, Result}; use aptos_channels::{aptos_channel, message_queues::QueueStyle}; use aptos_id_generator::{IdGenerator, U64IdGenerator}; use aptos_infallible::RwLock; -use aptos_storage_interface::{state_view::DbStateViewAtVersion, DbReader, DbReaderWriter}; +use aptos_storage_interface::{ + state_store::state_view::db_state_view::DbStateViewAtVersion, DbReader, DbReaderWriter, +}; use aptos_types::{ contract_event::ContractEvent, event::EventKey, diff --git a/storage/aptosdb/src/backup/restore_utils.rs b/storage/aptosdb/src/backup/restore_utils.rs index 3dd23badae6c2..851c3d1489b6b 100644 --- a/storage/aptosdb/src/backup/restore_utils.rs +++ b/storage/aptosdb/src/backup/restore_utils.rs @@ -18,7 +18,9 @@ use crate::{ }; use aptos_crypto::HashValue; use aptos_schemadb::{SchemaBatch, DB}; -use aptos_storage_interface::{db_ensure as ensure, state_delta::StateDelta, AptosDbError, Result}; +use aptos_storage_interface::{ + db_ensure as ensure, state_store::state_delta::StateDelta, AptosDbError, Result, +}; use aptos_types::{ contract_event::ContractEvent, ledger_info::LedgerInfoWithSignatures, diff --git a/storage/aptosdb/src/db/fake_aptosdb.rs b/storage/aptosdb/src/db/fake_aptosdb.rs index 423dcf8119e7f..2f3477765addb 100644 --- a/storage/aptosdb/src/db/fake_aptosdb.rs +++ b/storage/aptosdb/src/db/fake_aptosdb.rs @@ -16,7 +16,11 @@ use aptos_infallible::Mutex; use aptos_logger::debug; use aptos_scratchpad::SparseMerkleTree; use aptos_storage_interface::{ - cached_state_view::ShardedStateCache, db_ensure as ensure, state_delta::StateDelta, + db_ensure as ensure, + state_store::{ + sharded_state_updates::ShardedStateUpdates, state_delta::StateDelta, + state_view::cached_state_view::ShardedStateCache, + }, AptosDbError, DbReader, DbWriter, ExecutedTrees, MAX_REQUEST_LIMIT, }; use aptos_types::{ @@ -40,7 +44,7 @@ use aptos_types::{ state_key::{prefix::StateKeyPrefix, StateKey}, state_storage_usage::StateStorageUsage, state_value::{StateValue, StateValueChunkWithProof}, - table, ShardedStateUpdates, + table, }, transaction::{ Transaction, TransactionAuxiliaryData, TransactionInfo, TransactionListWithProof, @@ -971,7 +975,9 @@ mod tests { }; use anyhow::{anyhow, ensure, Result}; use aptos_crypto::{hash::CryptoHash, HashValue}; - use aptos_storage_interface::{cached_state_view::ShardedStateCache, DbReader, DbWriter}; + use aptos_storage_interface::{ + state_store::state_view::cached_state_view::ShardedStateCache, DbReader, DbWriter, + }; use aptos_temppath::TempPath; use aptos_types::{ account_address::AccountAddress, diff --git a/storage/aptosdb/src/db/include/aptosdb_internal.rs b/storage/aptosdb/src/db/include/aptosdb_internal.rs index d31b38778e1df..48ff86fffecf0 100644 --- a/storage/aptosdb/src/db/include/aptosdb_internal.rs +++ b/storage/aptosdb/src/db/include/aptosdb_internal.rs @@ -129,7 +129,7 @@ impl AptosDB { ); if indexer.next_version() < ledger_next_version { - use aptos_storage_interface::state_view::DbStateViewAtVersion; + use aptos_storage_interface::state_store::state_view::db_state_view::DbStateViewAtVersion; let db: Arc = self.state_store.clone(); let state_view = db.state_view_at_version(Some(ledger_next_version - 1))?; diff --git a/storage/aptosdb/src/db/include/aptosdb_testonly.rs b/storage/aptosdb/src/db/include/aptosdb_testonly.rs index bb617af181f32..77288df003b01 100644 --- a/storage/aptosdb/src/db/include/aptosdb_testonly.rs +++ b/storage/aptosdb/src/db/include/aptosdb_testonly.rs @@ -1,14 +1,13 @@ // Copyright © Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -use aptos_config::config::{ BUFFERED_STATE_TARGET_ITEMS_FOR_TEST, DEFAULT_MAX_NUM_NODES_PER_LRU_CACHE_SHARD}; +use aptos_config::config::{BUFFERED_STATE_TARGET_ITEMS_FOR_TEST, DEFAULT_MAX_NUM_NODES_PER_LRU_CACHE_SHARD}; use std::default::Default; -use aptos_storage_interface::cached_state_view::ShardedStateCache; -use aptos_storage_interface::state_delta::StateDelta; -use aptos_types::state_store::{ShardedStateUpdates}; +use aptos_storage_interface::state_store::state_view::cached_state_view::ShardedStateCache; +use aptos_storage_interface::state_store::state_delta::StateDelta; use aptos_types::transaction::{TransactionStatus, TransactionToCommit}; use aptos_executor_types::transactions_with_output::TransactionsToKeep; -use aptos_storage_interface::sharded_state_update_refs::ShardedStateUpdateRefs; +use aptos_storage_interface::state_store::sharded_state_update_refs::ShardedStateUpdateRefs; impl AptosDB { /// This opens db in non-readonly mode, without the pruner. diff --git a/storage/aptosdb/src/db/mod.rs b/storage/aptosdb/src/db/mod.rs index 7289b035d75ba..04a5e9bc539a5 100644 --- a/storage/aptosdb/src/db/mod.rs +++ b/storage/aptosdb/src/db/mod.rs @@ -40,8 +40,9 @@ use aptos_resource_viewer::AptosValueAnnotator; use aptos_schemadb::SchemaBatch; use aptos_scratchpad::SparseMerkleTree; use aptos_storage_interface::{ - db_ensure as ensure, db_other_bail as bail, AptosDbError, DbReader, DbWriter, ExecutedTrees, - Order, Result, StateSnapshotReceiver, MAX_REQUEST_LIMIT, + db_ensure as ensure, db_other_bail as bail, + state_store::sharded_state_updates::ShardedStateUpdates, AptosDbError, DbReader, DbWriter, + ExecutedTrees, Order, Result, StateSnapshotReceiver, MAX_REQUEST_LIMIT, }; use aptos_types::{ account_address::AccountAddress, diff --git a/storage/aptosdb/src/db/test_helper.rs b/storage/aptosdb/src/db/test_helper.rs index 3e79211014ec0..6de6bce71bb76 100644 --- a/storage/aptosdb/src/db/test_helper.rs +++ b/storage/aptosdb/src/db/test_helper.rs @@ -18,7 +18,7 @@ use aptos_executor_types::ProofReader; use aptos_jellyfish_merkle::node_type::{Node, NodeKey}; #[cfg(test)] use aptos_schemadb::SchemaBatch; -use aptos_storage_interface::{state_delta::StateDelta, DbReader, Order, Result}; +use aptos_storage_interface::{state_store::state_delta::StateDelta, DbReader, Order, Result}; use aptos_temppath::TempPath; #[cfg(test)] use aptos_types::state_store::state_storage_usage::StateStorageUsage; @@ -68,7 +68,8 @@ pub(crate) fn update_store( enable_sharding: bool, ) -> HashValue { use aptos_storage_interface::{ - jmt_update_refs, jmt_updates, sharded_state_update_refs::ShardedStateUpdateRefs, + jmt_update_refs, jmt_updates, + state_store::sharded_state_update_refs::ShardedStateUpdateRefs, }; let mut root_hash = *aptos_crypto::hash::SPARSE_MERKLE_PLACEHOLDER_HASH; diff --git a/storage/aptosdb/src/db_debugger/truncate/mod.rs b/storage/aptosdb/src/db_debugger/truncate/mod.rs index 160f91b3a11fc..548bdf8961bd1 100644 --- a/storage/aptosdb/src/db_debugger/truncate/mod.rs +++ b/storage/aptosdb/src/db_debugger/truncate/mod.rs @@ -177,9 +177,8 @@ mod test { }, utils::truncation_helper::num_frozen_nodes_in_accumulator, }; - use aptos_storage_interface::DbReader; + use aptos_storage_interface::{state_store::NUM_STATE_SHARDS, DbReader}; use aptos_temppath::TempPath; - use aptos_types::state_store::NUM_STATE_SHARDS; use proptest::prelude::*; proptest! { diff --git a/storage/aptosdb/src/pruner/state_merkle_pruner/test.rs b/storage/aptosdb/src/pruner/state_merkle_pruner/test.rs index f65495bf78eb2..ed5e8ffd2e598 100644 --- a/storage/aptosdb/src/pruner/state_merkle_pruner/test.rs +++ b/storage/aptosdb/src/pruner/state_merkle_pruner/test.rs @@ -20,7 +20,9 @@ use aptos_config::config::{LedgerPrunerConfig, StateMerklePrunerConfig}; use aptos_crypto::{hash::CryptoHash, HashValue}; use aptos_schemadb::SchemaBatch; use aptos_storage_interface::{ - jmt_update_refs, jmt_updates, sharded_state_update_refs::ShardedStateUpdateRefs, DbReader, + jmt_update_refs, jmt_updates, + state_store::{sharded_state_update_refs::ShardedStateUpdateRefs, NUM_STATE_SHARDS}, + DbReader, }; use aptos_temppath::TempPath; use aptos_types::{ @@ -28,7 +30,6 @@ use aptos_types::{ state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::{StaleStateValueByKeyHashIndex, StaleStateValueIndex, StateValue}, - NUM_STATE_SHARDS, }, transaction::Version, }; diff --git a/storage/aptosdb/src/rocksdb_property_reporter.rs b/storage/aptosdb/src/rocksdb_property_reporter.rs index a719318224c2f..9d5912b8c3e17 100644 --- a/storage/aptosdb/src/rocksdb_property_reporter.rs +++ b/storage/aptosdb/src/rocksdb_property_reporter.rs @@ -23,7 +23,7 @@ use aptos_infallible::Mutex; use aptos_logger::prelude::*; use aptos_metrics_core::IntGaugeVec; use aptos_schemadb::DB; -use aptos_types::state_store::NUM_STATE_SHARDS; +use aptos_storage_interface::state_store::NUM_STATE_SHARDS; use once_cell::sync::Lazy; use std::{ collections::HashMap, diff --git a/storage/aptosdb/src/state_kv_db.rs b/storage/aptosdb/src/state_kv_db.rs index f0cb7a27c2174..27d28bf7f2696 100644 --- a/storage/aptosdb/src/state_kv_db.rs +++ b/storage/aptosdb/src/state_kv_db.rs @@ -19,9 +19,9 @@ use aptos_experimental_runtimes::thread_manager::THREAD_MANAGER; use aptos_logger::prelude::info; use aptos_rocksdb_options::gen_rocksdb_options; use aptos_schemadb::{ReadOptions, SchemaBatch, DB}; -use aptos_storage_interface::Result; +use aptos_storage_interface::{state_store::NUM_STATE_SHARDS, Result}; use aptos_types::{ - state_store::{state_key::StateKey, state_value::StateValue, NUM_STATE_SHARDS}, + state_store::{state_key::StateKey, state_value::StateValue}, transaction::Version, }; use arr_macro::arr; diff --git a/storage/aptosdb/src/state_merkle_db.rs b/storage/aptosdb/src/state_merkle_db.rs index e61e461aa3453..7d7ad649d0431 100644 --- a/storage/aptosdb/src/state_merkle_db.rs +++ b/storage/aptosdb/src/state_merkle_db.rs @@ -25,11 +25,13 @@ use aptos_rocksdb_options::gen_rocksdb_options; use aptos_schemadb::{SchemaBatch, DB}; #[cfg(test)] use aptos_scratchpad::get_state_shard_id; -use aptos_storage_interface::{db_ensure as ensure, AptosDbError, Result}; +use aptos_storage_interface::{ + db_ensure as ensure, state_store::NUM_STATE_SHARDS, AptosDbError, Result, +}; use aptos_types::{ nibble::{nibble_path::NibblePath, ROOT_NIBBLE_HEIGHT}, proof::{SparseMerkleProofExt, SparseMerkleRangeProof}, - state_store::{state_key::StateKey, NUM_STATE_SHARDS}, + state_store::state_key::StateKey, transaction::Version, }; use arr_macro::arr; diff --git a/storage/aptosdb/src/state_store/buffered_state.rs b/storage/aptosdb/src/state_store/buffered_state.rs index fa398701d166e..89f1e305e6f51 100644 --- a/storage/aptosdb/src/state_store/buffered_state.rs +++ b/storage/aptosdb/src/state_store/buffered_state.rs @@ -10,8 +10,12 @@ use crate::{ use aptos_logger::info; use aptos_metrics_core::TimerHelper; use aptos_scratchpad::SmtAncestors; -use aptos_storage_interface::{db_ensure as ensure, state_delta::StateDelta, AptosDbError, Result}; -use aptos_types::state_store::{state_value::StateValue, ShardedStateUpdates}; +use aptos_storage_interface::{ + db_ensure as ensure, + state_store::{sharded_state_updates::ShardedStateUpdates, state_delta::StateDelta}, + AptosDbError, Result, +}; +use aptos_types::state_store::state_value::StateValue; use std::{ sync::{ mpsc, diff --git a/storage/aptosdb/src/state_store/mod.rs b/storage/aptosdb/src/state_store/mod.rs index 4c68a0c0bc6b3..3f32b78f6d482 100644 --- a/storage/aptosdb/src/state_store/mod.rs +++ b/storage/aptosdb/src/state_store/mod.rs @@ -51,11 +51,16 @@ use aptos_metrics_core::TimerHelper; use aptos_schemadb::SchemaBatch; use aptos_scratchpad::{SmtAncestors, SparseMerkleTree}; use aptos_storage_interface::{ - async_proof_fetcher::AsyncProofFetcher, - cached_state_view::{CachedStateView, ShardedStateCache}, db_ensure as ensure, db_other_bail as bail, - sharded_state_update_refs::ShardedStateUpdateRefs, - state_delta::StateDelta, + state_store::{ + sharded_state_update_refs::ShardedStateUpdateRefs, + state_delta::StateDelta, + state_view::{ + async_proof_fetcher::AsyncProofFetcher, + cached_state_view::{CachedStateView, ShardedStateCache}, + }, + NUM_STATE_SHARDS, + }, AptosDbError, DbReader, Result, StateSnapshotReceiver, }; use aptos_types::{ @@ -67,7 +72,7 @@ use aptos_types::{ StaleStateValueByKeyHashIndex, StaleStateValueIndex, StateValue, StateValueChunkWithProof, }, - StateViewId, NUM_STATE_SHARDS, + StateViewId, }, transaction::Version, write_set::WriteSet, diff --git a/storage/aptosdb/src/state_store/state_merkle_batch_committer.rs b/storage/aptosdb/src/state_store/state_merkle_batch_committer.rs index 3c9bce81f10b7..85e5c4dcbfcf3 100644 --- a/storage/aptosdb/src/state_store/state_merkle_batch_committer.rs +++ b/storage/aptosdb/src/state_store/state_merkle_batch_committer.rs @@ -16,7 +16,7 @@ use aptos_logger::{info, trace}; use aptos_metrics_core::TimerHelper; use aptos_schemadb::SchemaBatch; use aptos_scratchpad::SmtAncestors; -use aptos_storage_interface::state_delta::StateDelta; +use aptos_storage_interface::state_store::state_delta::StateDelta; use aptos_types::state_store::state_value::StateValue; use std::sync::{mpsc::Receiver, Arc}; diff --git a/storage/aptosdb/src/state_store/state_snapshot_committer.rs b/storage/aptosdb/src/state_store/state_snapshot_committer.rs index f0668ba250a94..b45c49fa784fd 100644 --- a/storage/aptosdb/src/state_store/state_snapshot_committer.rs +++ b/storage/aptosdb/src/state_store/state_snapshot_committer.rs @@ -15,8 +15,12 @@ use crate::{ use aptos_experimental_runtimes::thread_manager::THREAD_MANAGER; use aptos_logger::trace; use aptos_scratchpad::SmtAncestors; -use aptos_storage_interface::{jmt_update_refs, jmt_updates, state_delta::StateDelta, Result}; -use aptos_types::state_store::{state_value::StateValue, NUM_STATE_SHARDS}; +use aptos_storage_interface::{ + jmt_update_refs, jmt_updates, + state_store::{state_delta::StateDelta, NUM_STATE_SHARDS}, + Result, +}; +use aptos_types::state_store::state_value::StateValue; use rayon::prelude::*; use static_assertions::const_assert; use std::{ diff --git a/storage/aptosdb/src/utils/mod.rs b/storage/aptosdb/src/utils/mod.rs index 239521005712e..ac8d76901ddb9 100644 --- a/storage/aptosdb/src/utils/mod.rs +++ b/storage/aptosdb/src/utils/mod.rs @@ -6,8 +6,8 @@ pub(crate) mod truncation_helper; use crate::schema::db_metadata::{DbMetadataKey, DbMetadataSchema}; use aptos_schemadb::{SchemaBatch, DB}; -use aptos_storage_interface::Result; -use aptos_types::{state_store::NUM_STATE_SHARDS, transaction::Version}; +use aptos_storage_interface::{state_store::NUM_STATE_SHARDS, Result}; +use aptos_types::transaction::Version; use arr_macro::arr; pub(crate) type ShardedStateKvSchemaBatch = [SchemaBatch; NUM_STATE_SHARDS]; diff --git a/storage/db-tool/src/replay_on_archive.rs b/storage/db-tool/src/replay_on_archive.rs index d4bd05161a90c..74000e50941ac 100644 --- a/storage/db-tool/src/replay_on_archive.rs +++ b/storage/db-tool/src/replay_on_archive.rs @@ -11,7 +11,9 @@ use aptos_config::config::{ }; use aptos_db::{backup::backup_handler::BackupHandler, AptosDB}; use aptos_logger::{error, info}; -use aptos_storage_interface::{state_view::DbStateViewAtVersion, AptosDbError, DbReader}; +use aptos_storage_interface::{ + state_store::state_view::db_state_view::DbStateViewAtVersion, AptosDbError, DbReader, +}; use aptos_types::{ contract_event::ContractEvent, transaction::{ @@ -30,6 +32,7 @@ use std::{ sync::{atomic::AtomicU64, Arc}, time::Instant, }; + // Replay Verify controller is responsible for providing legit range with start and end versions. #[derive(Parser)] pub struct Opt { diff --git a/storage/indexer/src/db_v2.rs b/storage/indexer/src/db_v2.rs index 01ad40d096251..fae7813789d5a 100644 --- a/storage/indexer/src/db_v2.rs +++ b/storage/indexer/src/db_v2.rs @@ -13,7 +13,8 @@ use aptos_logger::info; use aptos_resource_viewer::{AnnotatedMoveValue, AptosValueAnnotator}; use aptos_schemadb::{SchemaBatch, DB}; use aptos_storage_interface::{ - db_other_bail as bail, state_view::DbStateViewAtVersion, AptosDbError, DbReader, Result, + db_other_bail as bail, state_store::state_view::db_state_view::DbStateViewAtVersion, + AptosDbError, DbReader, Result, }; use aptos_types::{ access_path::Path, diff --git a/storage/indexer/src/lib.rs b/storage/indexer/src/lib.rs index 7a041c5dbbaca..34331dd702c31 100644 --- a/storage/indexer/src/lib.rs +++ b/storage/indexer/src/lib.rs @@ -23,7 +23,8 @@ use aptos_resource_viewer::{AnnotatedMoveValue, AptosValueAnnotator}; use aptos_rocksdb_options::gen_rocksdb_options; use aptos_schemadb::{SchemaBatch, DB}; use aptos_storage_interface::{ - db_ensure, db_other_bail, state_view::DbStateViewAtVersion, AptosDbError, DbReader, Result, + db_ensure, db_other_bail, state_store::state_view::db_state_view::DbStateViewAtVersion, + AptosDbError, DbReader, Result, }; use aptos_types::{ access_path::Path, diff --git a/storage/storage-interface/src/chunk_to_commit.rs b/storage/storage-interface/src/chunk_to_commit.rs index 86b9755b7169b..5148bfd23c709 100644 --- a/storage/storage-interface/src/chunk_to_commit.rs +++ b/storage/storage-interface/src/chunk_to_commit.rs @@ -1,14 +1,11 @@ // Copyright (c) Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -use crate::{ - cached_state_view::ShardedStateCache, sharded_state_update_refs::ShardedStateUpdateRefs, - state_delta::StateDelta, -}; -use aptos_types::{ - state_store::ShardedStateUpdates, - transaction::{Transaction, TransactionInfo, TransactionOutput, Version}, +use crate::state_store::{ + sharded_state_update_refs::ShardedStateUpdateRefs, sharded_state_updates::ShardedStateUpdates, + state_delta::StateDelta, state_view::cached_state_view::ShardedStateCache, }; +use aptos_types::transaction::{Transaction, TransactionInfo, TransactionOutput, Version}; #[derive(Clone)] pub struct ChunkToCommit<'a> { diff --git a/storage/storage-interface/src/errors.rs b/storage/storage-interface/src/errors.rs index 8e380126700fa..7acb9e202024e 100644 --- a/storage/storage-interface/src/errors.rs +++ b/storage/storage-interface/src/errors.rs @@ -3,7 +3,7 @@ // SPDX-License-Identifier: Apache-2.0 //! This module defines error types used by `AptosDB`. -use aptos_types::state_store::errors::StateviewError; +use aptos_types::state_store::errors::StateViewError; use std::sync::mpsc::RecvError; use thiserror::Error; @@ -65,22 +65,22 @@ impl From for AptosDbError { } } -impl From for StateviewError { +impl From for StateViewError { fn from(error: AptosDbError) -> Self { match error { - AptosDbError::NotFound(msg) => StateviewError::NotFound(msg), - AptosDbError::Other(msg) => StateviewError::Other(msg), - _ => StateviewError::Other(format!("{}", error)), + AptosDbError::NotFound(msg) => StateViewError::NotFound(msg), + AptosDbError::Other(msg) => StateViewError::Other(msg), + _ => StateViewError::Other(format!("{}", error)), } } } -impl From for AptosDbError { - fn from(error: StateviewError) -> Self { +impl From for AptosDbError { + fn from(error: StateViewError) -> Self { match error { - StateviewError::NotFound(msg) => AptosDbError::NotFound(msg), - StateviewError::Other(msg) => AptosDbError::Other(msg), - StateviewError::BcsError(err) => AptosDbError::BcsError(err.to_string()), + StateViewError::NotFound(msg) => AptosDbError::NotFound(msg), + StateViewError::Other(msg) => AptosDbError::Other(msg), + StateViewError::BcsError(err) => AptosDbError::BcsError(err.to_string()), } } } diff --git a/storage/storage-interface/src/executed_trees.rs b/storage/storage-interface/src/executed_trees.rs index 58c6194fa8668..9f3ad490c7f51 100644 --- a/storage/storage-interface/src/executed_trees.rs +++ b/storage/storage-interface/src/executed_trees.rs @@ -2,18 +2,21 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView, - state_delta::StateDelta, DbReader, + state_store::{ + state_delta::StateDelta, + state_view::{async_proof_fetcher::AsyncProofFetcher, cached_state_view::CachedStateView}, + }, + DbReader, }; use aptos_crypto::HashValue; use aptos_types::{ proof::accumulator::{InMemoryAccumulator, InMemoryTransactionAccumulator}, - state_store::{errors::StateviewError, state_storage_usage::StateStorageUsage, StateViewId}, + state_store::{errors::StateViewError, state_storage_usage::StateStorageUsage, StateViewId}, transaction::Version, }; use std::sync::Arc; -type Result = std::result::Result; +type Result = std::result::Result; /// A wrapper of the in-memory state sparse merkle tree and the transaction accumulator that /// represent a specific state collectively. Usually it is a state after executing a block. diff --git a/storage/storage-interface/src/lib.rs b/storage/storage-interface/src/lib.rs index a22b9c4d9bfab..ac92e5e39f663 100644 --- a/storage/storage-interface/src/lib.rs +++ b/storage/storage-interface/src/lib.rs @@ -34,18 +34,14 @@ use serde::{Deserialize, Serialize}; use std::{collections::HashMap, sync::Arc}; use thiserror::Error; -pub mod async_proof_fetcher; pub mod block_info; -pub mod cached_state_view; pub mod chunk_to_commit; pub mod errors; mod executed_trees; mod metrics; #[cfg(any(test, feature = "fuzzing"))] pub mod mock; -pub mod sharded_state_update_refs; -pub mod state_delta; -pub mod state_view; +pub mod state_store; use crate::chunk_to_commit::ChunkToCommit; use aptos_scratchpad::SparseMerkleTree; diff --git a/storage/storage-interface/src/state_store/mod.rs b/storage/storage-interface/src/state_store/mod.rs new file mode 100644 index 0000000000000..e6e30c8654052 --- /dev/null +++ b/storage/storage-interface/src/state_store/mod.rs @@ -0,0 +1,9 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +pub mod sharded_state_update_refs; +pub mod sharded_state_updates; +pub mod state_delta; +pub mod state_view; + +pub const NUM_STATE_SHARDS: usize = 16; diff --git a/storage/storage-interface/src/sharded_state_update_refs.rs b/storage/storage-interface/src/state_store/sharded_state_update_refs.rs similarity index 93% rename from storage/storage-interface/src/sharded_state_update_refs.rs rename to storage/storage-interface/src/state_store/sharded_state_update_refs.rs index f15824f341792..f98ba4732c28e 100644 --- a/storage/storage-interface/src/sharded_state_update_refs.rs +++ b/storage/storage-interface/src/state_store/sharded_state_update_refs.rs @@ -1,10 +1,10 @@ // Copyright (c) Aptos Foundation // SPDX-License-Identifier: Apache-2.0 -use crate::metrics::TIMER; +use crate::{metrics::TIMER, state_store::NUM_STATE_SHARDS}; use aptos_metrics_core::TimerHelper; use aptos_types::{ - state_store::{state_key::StateKey, state_value::StateValue, NUM_STATE_SHARDS}, + state_store::{state_key::StateKey, state_value::StateValue}, write_set::WriteSet, }; use arr_macro::arr; diff --git a/storage/storage-interface/src/state_store/sharded_state_updates.rs b/storage/storage-interface/src/state_store/sharded_state_updates.rs new file mode 100644 index 0000000000000..4abf2c5e9be9a --- /dev/null +++ b/storage/storage-interface/src/state_store/sharded_state_updates.rs @@ -0,0 +1,62 @@ +// Copyright (c) Aptos Foundation +// SPDX-License-Identifier: Apache-2.0 + +use aptos_experimental_runtimes::thread_manager::THREAD_MANAGER; +use aptos_types::state_store::{state_key::StateKey, state_value::StateValue}; +use arr_macro::arr; +use rayon::iter::{ + IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, + IntoParallelRefMutIterator, ParallelIterator, +}; +use std::collections::HashMap; + +#[derive(Clone, Debug)] +pub struct ShardedStateUpdates { + pub shards: [HashMap>; 16], +} + +impl ShardedStateUpdates { + pub fn new_empty() -> Self { + Self { + shards: arr![HashMap::new(); 16], + } + } + + pub fn all_shards_empty(&self) -> bool { + self.shards.iter().all(|shard| shard.is_empty()) + } + + pub fn total_len(&self) -> usize { + self.shards.iter().map(|shard| shard.len()).sum() + } + + pub fn merge(&mut self, other: Self) { + THREAD_MANAGER.get_exe_cpu_pool().install(|| { + self.shards + .par_iter_mut() + .zip_eq(other.shards.into_par_iter()) + .for_each(|(l, r)| { + l.extend(r); + }) + }) + } + + pub fn clone_merge(&mut self, other: &Self) { + THREAD_MANAGER.get_exe_cpu_pool().install(|| { + self.shards + .par_iter_mut() + .zip_eq(other.shards.par_iter()) + .for_each(|(l, r)| { + l.extend(r.clone()); + }) + }) + } + + pub fn insert( + &mut self, + key: StateKey, + value: Option, + ) -> Option> { + self.shards[key.get_shard_id() as usize].insert(key, value) + } +} diff --git a/storage/storage-interface/src/state_delta.rs b/storage/storage-interface/src/state_store/state_delta.rs similarity index 95% rename from storage/storage-interface/src/state_delta.rs rename to storage/storage-interface/src/state_store/state_delta.rs index bffeb6e03ef99..6e5e0a456f3c7 100644 --- a/storage/storage-interface/src/state_delta.rs +++ b/storage/storage-interface/src/state_store/state_delta.rs @@ -1,13 +1,12 @@ // Copyright © Aptos Foundation // SPDX-License-Identifier: Apache-2.0 +use crate::state_store::sharded_state_updates::ShardedStateUpdates; use aptos_crypto::HashValue; use aptos_drop_helper::DropHelper; use aptos_scratchpad::SparseMerkleTree; use aptos_types::{ - state_store::{ - state_storage_usage::StateStorageUsage, state_value::StateValue, ShardedStateUpdates, - }, + state_store::{state_storage_usage::StateStorageUsage, state_value::StateValue}, transaction::Version, }; diff --git a/storage/storage-interface/src/async_proof_fetcher.rs b/storage/storage-interface/src/state_store/state_view/async_proof_fetcher.rs similarity index 100% rename from storage/storage-interface/src/async_proof_fetcher.rs rename to storage/storage-interface/src/state_store/state_view/async_proof_fetcher.rs diff --git a/storage/storage-interface/src/cached_state_view.rs b/storage/storage-interface/src/state_store/state_view/cached_state_view.rs similarity index 97% rename from storage/storage-interface/src/cached_state_view.rs rename to storage/storage-interface/src/state_store/state_view/cached_state_view.rs index cf9571cf10c44..271f395e9dd7c 100644 --- a/storage/storage-interface/src/cached_state_view.rs +++ b/storage/storage-interface/src/state_store/state_view/cached_state_view.rs @@ -2,7 +2,9 @@ // SPDX-License-Identifier: Apache-2.0 use crate::{ - async_proof_fetcher::AsyncProofFetcher, metrics::TIMER, state_view::DbStateView, DbReader, + metrics::TIMER, + state_store::state_view::{async_proof_fetcher::AsyncProofFetcher, db_state_view::DbStateView}, + DbReader, }; use aptos_crypto::{hash::CryptoHash, HashValue}; use aptos_experimental_runtimes::thread_manager::THREAD_MANAGER; @@ -10,7 +12,7 @@ use aptos_scratchpad::{FrozenSparseMerkleTree, SparseMerkleTree, StateStoreStatu use aptos_types::{ proof::SparseMerkleProofExt, state_store::{ - errors::StateviewError, state_key::StateKey, state_storage_usage::StateStorageUsage, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, StateViewId, TStateView, }, transaction::Version, @@ -35,7 +37,7 @@ static IO_POOL: Lazy = Lazy::new(|| { .unwrap() }); -type Result = std::result::Result; +type Result = std::result::Result; type StateCacheShard = DashMap, Option)>; // Sharded by StateKey.get_shard_id(). The version in the value indicates there is an entry on that @@ -163,7 +165,7 @@ impl CachedStateView { let speculative_state = speculative_state.freeze(&base_smt); let snapshot = reader .get_state_snapshot_before(next_version) - .map_err(Into::::into)?; + .map_err(Into::::into)?; Ok(Self::new_impl( id, diff --git a/storage/storage-interface/src/state_view.rs b/storage/storage-interface/src/state_store/state_view/db_state_view.rs similarity index 80% rename from storage/storage-interface/src/state_view.rs rename to storage/storage-interface/src/state_store/state_view/db_state_view.rs index 194cb42b56a69..5756a00db15e4 100644 --- a/storage/storage-interface/src/state_view.rs +++ b/storage/storage-interface/src/state_store/state_view/db_state_view.rs @@ -1,5 +1,4 @@ -// Copyright © Aptos Foundation -// Parts of the project are originally copyright © Meta Platforms, Inc. +// Copyright (c) Aptos Foundation // SPDX-License-Identifier: Apache-2.0 use crate::DbReader; @@ -7,15 +6,13 @@ use aptos_crypto::{hash::CryptoHash, HashValue}; use aptos_types::{ ledger_info::LedgerInfo, state_store::{ - errors::StateviewError, state_key::StateKey, state_storage_usage::StateStorageUsage, - state_value::StateValue, TStateView, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, + state_value::StateValue, StateViewResult, TStateView, }, transaction::Version, }; use std::sync::Arc; -type Result = std::result::Result; - #[derive(Clone)] pub struct DbStateView { db: Arc, @@ -26,7 +23,7 @@ pub struct DbStateView { } impl DbStateView { - fn get(&self, key: &StateKey) -> Result> { + fn get(&self, key: &StateKey) -> StateViewResult> { if let Some(version) = self.version { if let Some(root_hash) = self.maybe_verify_against_state_root_hash { // DB doesn't support returning proofs for buffered state, so only optionally @@ -49,11 +46,11 @@ impl DbStateView { impl TStateView for DbStateView { type Key = StateKey; - fn get_state_value(&self, state_key: &StateKey) -> Result> { + fn get_state_value(&self, state_key: &StateKey) -> StateViewResult> { self.get(state_key).map_err(Into::into) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> StateViewResult { self.db .get_state_storage_usage(self.version) .map_err(Into::into) @@ -61,27 +58,27 @@ impl TStateView for DbStateView { } pub trait LatestDbStateCheckpointView { - fn latest_state_checkpoint_view(&self) -> Result; + fn latest_state_checkpoint_view(&self) -> StateViewResult; } impl LatestDbStateCheckpointView for Arc { - fn latest_state_checkpoint_view(&self) -> Result { + fn latest_state_checkpoint_view(&self) -> StateViewResult { Ok(DbStateView { db: self.clone(), version: self .get_latest_state_checkpoint_version() - .map_err(Into::::into)?, + .map_err(Into::::into)?, maybe_verify_against_state_root_hash: None, }) } } pub trait DbStateViewAtVersion { - fn state_view_at_version(&self, version: Option) -> Result; + fn state_view_at_version(&self, version: Option) -> StateViewResult; } impl DbStateViewAtVersion for Arc { - fn state_view_at_version(&self, version: Option) -> Result { + fn state_view_at_version(&self, version: Option) -> StateViewResult { Ok(DbStateView { db: self.clone(), version, @@ -95,7 +92,7 @@ pub trait VerifiedStateViewAtVersion { &self, version: Option, ledger_info: &LedgerInfo, - ) -> Result; + ) -> StateViewResult; } impl VerifiedStateViewAtVersion for Arc { @@ -103,7 +100,7 @@ impl VerifiedStateViewAtVersion for Arc { &self, version: Option, ledger_info: &LedgerInfo, - ) -> Result { + ) -> StateViewResult { let db = self.clone(); if let Some(version) = version { @@ -115,7 +112,7 @@ impl VerifiedStateViewAtVersion for Arc { .proof .transaction_info .state_checkpoint_hash() - .ok_or_else(|| StateviewError::NotFound("state_checkpoint_hash".to_string()))?; + .ok_or_else(|| StateViewError::NotFound("state_checkpoint_hash".to_string()))?; Ok(DbStateView { db, diff --git a/storage/storage-interface/src/state_store/state_view/mod.rs b/storage/storage-interface/src/state_store/state_view/mod.rs new file mode 100644 index 0000000000000..8600b7101ed50 --- /dev/null +++ b/storage/storage-interface/src/state_store/state_view/mod.rs @@ -0,0 +1,7 @@ +// Copyright © Aptos Foundation +// Parts of the project are originally copyright © Meta Platforms, Inc. +// SPDX-License-Identifier: Apache-2.0 + +pub mod async_proof_fetcher; +pub mod cached_state_view; +pub mod db_state_view; diff --git a/third_party/move/move-compiler-v2/src/file_format_generator/function_generator.rs b/third_party/move/move-compiler-v2/src/file_format_generator/function_generator.rs index e35c6257716cb..3a61f63bd19e6 100644 --- a/third_party/move/move-compiler-v2/src/file_format_generator/function_generator.rs +++ b/third_party/move/move-compiler-v2/src/file_format_generator/function_generator.rs @@ -159,8 +159,15 @@ impl<'a> FunctionGenerator<'a> { .get_extension::() .expect("Options is available"); if options.experiment_on(Experiment::PEEPHOLE_OPTIMIZATION) { - // TODO: fix source mapping (#14167) - peephole_optimizer::run(&mut code); + let transformed_code_chunk = peephole_optimizer::optimize(&code.code); + // Fix the source map for the optimized code. + fun_gen + .gen + .source_map + .remap_code_map(def_idx, transformed_code_chunk.original_offsets) + .expect(SOURCE_MAP_OK); + // Replace the code with the optimized one. + code.code = transformed_code_chunk.code; } } else { // Write the spec block table back to the environment. diff --git a/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer.rs b/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer.rs index 70919d6dafa7a..812e662f70b3e 100644 --- a/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer.rs +++ b/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer.rs @@ -12,17 +12,18 @@ pub mod reducible_pairs; use inefficient_loads::InefficientLoads; use move_binary_format::{ control_flow_graph::{ControlFlowGraph, VMControlFlowGraph}, - file_format::{Bytecode, CodeOffset, CodeUnit}, + file_format::{Bytecode, CodeOffset}, }; -use optimizers::{BasicBlockOptimizer, WindowProcessor}; +use optimizers::{BasicBlockOptimizer, TransformedCodeChunk, WindowProcessor}; use reducible_pairs::ReduciblePairs; -use std::{collections::BTreeMap, mem}; +use std::collections::BTreeMap; /// Pre-requisite: `code` should not have spec block associations. /// Run peephole optimizers on the given `code`, possibly modifying it. -pub fn run(code: &mut CodeUnit) { - let original_code = mem::take(&mut code.code); - code.code = BasicBlockOptimizerPipeline::default().optimize(original_code); +/// Returns the optimized code, along with mapping to original offsets +/// in `code`. +pub fn optimize(code: &[Bytecode]) -> TransformedCodeChunk { + BasicBlockOptimizerPipeline::default().optimize(code) } /// A pipeline of basic block optimizers. @@ -44,23 +45,24 @@ impl BasicBlockOptimizerPipeline { /// Run the basic block optimization pipeline on the given `code`, /// returning new (possibly optimized) code. - pub fn optimize(&self, mut code: Vec) -> Vec { - let mut cfg = VMControlFlowGraph::new(&code); + pub fn optimize(&self, code: &[Bytecode]) -> TransformedCodeChunk { + let mut code_chunk = TransformedCodeChunk::make_from(code); + let mut cfg = VMControlFlowGraph::new(&code_chunk.code); loop { - let optimized_blocks = self.get_optimized_blocks(&code, &cfg); - let optimized_code = Self::flatten_blocks(optimized_blocks); - let optimized_cfg = VMControlFlowGraph::new(&optimized_code); + let optimized_blocks = self.get_optimized_blocks(&code_chunk.code, &cfg); + let optimized_code_chunk = Self::flatten_blocks(optimized_blocks); + let optimized_cfg = VMControlFlowGraph::new(&optimized_code_chunk.code); if optimized_cfg.num_blocks() == cfg.num_blocks() { // Proxy for convergence of basic block optimizations. // This is okay for peephole optimizations that merge basic blocks. // But may need to revisit if we have peephole optimizations that can // split a basic block. - return optimized_code; + return optimized_code_chunk; } else { // Number of basic blocks changed, re-run the basic-block // optimization pipeline again on the new basic blocks. cfg = optimized_cfg; - code = optimized_code; + code_chunk = optimized_code_chunk.remap(code_chunk.original_offsets); } } } @@ -71,14 +73,16 @@ impl BasicBlockOptimizerPipeline { &self, code: &[Bytecode], cfg: &VMControlFlowGraph, - ) -> BTreeMap> { + ) -> BTreeMap { let mut optimized_blocks = BTreeMap::new(); for block_id in cfg.blocks() { let start = cfg.block_start(block_id); let end = cfg.block_end(block_id); // `end` is inclusive - let mut block = code[start as usize..=end as usize].to_vec(); + let mut block = TransformedCodeChunk::make_from(&code[start as usize..=end as usize]); for bb_optimizer in self.optimizers.iter() { - block = bb_optimizer.optimize(&block); + block = bb_optimizer + .optimize(&block.code) + .remap(block.original_offsets); } optimized_blocks.insert(start, block); } @@ -86,14 +90,16 @@ impl BasicBlockOptimizerPipeline { } /// Flatten the individually optimized basic blocks into a single code vector. - fn flatten_blocks(optimized_blocks: BTreeMap>) -> Vec { - let mut optimized_code = vec![]; + fn flatten_blocks( + optimized_blocks: BTreeMap, + ) -> TransformedCodeChunk { + let mut optimized_code = TransformedCodeChunk::empty(); let mut block_mapping = BTreeMap::new(); - for (offset, mut block) in optimized_blocks { - block_mapping.insert(offset, optimized_code.len() as CodeOffset); - optimized_code.append(&mut block); + for (offset, block) in optimized_blocks { + block_mapping.insert(offset, optimized_code.code.len() as CodeOffset); + optimized_code.extend(block, offset); } - Self::remap_branch_targets(&mut optimized_code, &block_mapping); + Self::remap_branch_targets(&mut optimized_code.code, &block_mapping); optimized_code } diff --git a/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/inefficient_loads.rs b/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/inefficient_loads.rs index 6aa84b2812c29..16719913cc029 100644 --- a/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/inefficient_loads.rs +++ b/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/inefficient_loads.rs @@ -24,8 +24,11 @@ //! cannot use it without a subsequent store. //! So, skipping the store to `u` is safe. -use crate::file_format_generator::peephole_optimizer::optimizers::WindowOptimizer; -use move_binary_format::file_format::Bytecode; +use crate::file_format_generator::peephole_optimizer::optimizers::{ + TransformedCodeChunk, WindowOptimizer, +}; +use move_binary_format::file_format::{Bytecode, CodeOffset}; +use std::iter; /// An optimizer for inefficient loads. pub struct InefficientLoads; @@ -37,7 +40,7 @@ impl InefficientLoads { } impl WindowOptimizer for InefficientLoads { - fn optimize_window(&self, window: &[Bytecode]) -> Option<(Vec, usize)> { + fn optimize_window(&self, window: &[Bytecode]) -> Option<(TransformedCodeChunk, usize)> { use Bytecode::*; if window.len() < Self::MIN_WINDOW_SIZE { return None; @@ -61,8 +64,14 @@ impl WindowOptimizer for InefficientLoads { // We have reached the end of the pattern (point 4 in the module documentation). let sequence = &window[2..index + 2]; let load_constant = &window[0..1]; + let transformed_code = [sequence, load_constant].concat(); + // original_offsets are 2..index+2 (representing `sequence`), + // followed by 0 (representing `load_constant`). + let original_offsets = (2..(index + 2) as CodeOffset) + .chain(iter::once(0)) + .collect::>(); return Some(( - [sequence, load_constant].concat(), + TransformedCodeChunk::new(transformed_code, original_offsets), index + Self::MIN_WINDOW_SIZE, )); }, diff --git a/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/optimizers.rs b/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/optimizers.rs index 0bcabb5675092..d29335b56b3a5 100644 --- a/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/optimizers.rs +++ b/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/optimizers.rs @@ -3,33 +3,91 @@ //! This module contains setup for basic block peephole optimizers. -use move_binary_format::file_format::Bytecode; +use move_binary_format::file_format::{Bytecode, CodeOffset}; + +/// A contiguous chunk of bytecode that may have been transformed from some +/// other "original" contiguous chunk of bytecode. +pub struct TransformedCodeChunk { + /// The transformed bytecode. + pub code: Vec, + /// Mapping to the original offsets. + /// The instruction in `code[i]` corresponds to the instruction at + /// `original_offsets[i]` in the original bytecode. + pub original_offsets: Vec, +} + +impl TransformedCodeChunk { + /// Create an instance of `TransformedCodeChunk` from the given `code` + /// and `original_offsets`. + pub fn new(code: Vec, original_offsets: Vec) -> Self { + debug_assert_eq!(code.len(), original_offsets.len()); + Self { + code, + original_offsets, + } + } + + /// Create an empty chunk. + pub fn empty() -> Self { + Self::new(vec![], vec![]) + } + + /// Extend this chunk with another `other` chunk. + /// The `original_offsets` for the `other` chunk are incremented by `adjust`. + pub fn extend(&mut self, other: TransformedCodeChunk, adjust: CodeOffset) { + self.code.extend(other.code); + self.original_offsets + .extend(other.original_offsets.into_iter().map(|off| off + adjust)); + } + + /// Make a new chunk from the given `code`. + pub fn make_from(code: &[Bytecode]) -> Self { + Self::new(code.to_vec(), Vec::from_iter(0..code.len() as CodeOffset)) + } + + /// Remap the original offsets using the given `previous_offsets`. + pub fn remap(self, previous_offsets: Vec) -> Self { + Self::new( + self.code, + self.original_offsets + .into_iter() + .map(|off| previous_offsets[off as usize]) + .collect(), + ) + } +} /// A basic block optimizer that optimizes a basic block of bytecode. pub trait BasicBlockOptimizer { - /// Given a basic `block`, return its optimized version. - fn optimize(&self, block: &[Bytecode]) -> Vec; + /// Given a basic `block`, return its optimized version [*]. + /// + /// [*] The optimized version returned via `TransformedCodeChunk` maintains a + /// mapping to the original offsets in `block`. + fn optimize(&self, block: &[Bytecode]) -> TransformedCodeChunk; } /// An optimizer for a window of bytecode within a basic block. /// The window is always a suffix of a basic block. pub trait WindowOptimizer { /// Given a `window` of bytecode, return a tuple containing: - /// 1. an optimized version of a non-empty prefix of the `window`. + /// 1. an optimized version of a non-empty prefix of the `window`. [*] /// 2. size of this prefix (should be non-zero). /// If `None` is returned, the `window` is not optimized. - fn optimize_window(&self, window: &[Bytecode]) -> Option<(Vec, usize)>; + /// + /// [*] When an optimized version is returned, the corresponding `TransformedCodeChunk` + /// maintains a mapping to the original offsets of `window`. + fn optimize_window(&self, window: &[Bytecode]) -> Option<(TransformedCodeChunk, usize)>; } /// A processor to perform window optimizations of a particular style on a basic block. pub struct WindowProcessor(T); impl BasicBlockOptimizer for WindowProcessor { - fn optimize(&self, block: &[Bytecode]) -> Vec { - let mut old_block = block.to_vec(); + fn optimize(&self, block: &[Bytecode]) -> TransformedCodeChunk { + let mut old_block = TransformedCodeChunk::make_from(block); // Run single passes until code stops changing. - while let Some(new_block) = self.optimize_single_pass(&old_block) { - old_block = new_block; + while let Some(new_chunk) = self.optimize_single_pass(&old_block.code) { + old_block = new_chunk.remap(old_block.original_offsets); } old_block } @@ -43,19 +101,22 @@ impl WindowProcessor { /// Run a single pass of the window peephole optimization on the given basic `block`. /// If the block cannot be optimized further, return `None`. - fn optimize_single_pass(&self, block: &[Bytecode]) -> Option> { + fn optimize_single_pass(&self, block: &[Bytecode]) -> Option { let mut changed = false; - let mut new_block: Vec = vec![]; + let mut new_block = TransformedCodeChunk::empty(); let mut left = 0; while left < block.len() { let window = &block[left..]; if let Some((optimized_window, consumed)) = self.0.optimize_window(window) { debug_assert!(consumed != 0); - new_block.extend(optimized_window); + new_block.extend(optimized_window, left as CodeOffset); left += consumed; changed = true; } else { - new_block.push(block[left].clone()); + new_block.extend( + TransformedCodeChunk::make_from(&block[left..left + 1]), + left as CodeOffset, + ); left += 1; } } diff --git a/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/reducible_pairs.rs b/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/reducible_pairs.rs index ed6813e4196fa..55319dbba1ef6 100644 --- a/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/reducible_pairs.rs +++ b/third_party/move/move-compiler-v2/src/file_format_generator/peephole_optimizer/reducible_pairs.rs @@ -39,7 +39,9 @@ //! Finally, note that fixed window optimizations are performed on windows within a basic //! block, not spanning across multiple basic blocks. -use crate::file_format_generator::peephole_optimizer::optimizers::WindowOptimizer; +use crate::file_format_generator::peephole_optimizer::optimizers::{ + TransformedCodeChunk, WindowOptimizer, +}; use move_binary_format::file_format::Bytecode; pub struct ReduciblePairs; @@ -49,7 +51,7 @@ impl ReduciblePairs { } impl WindowOptimizer for ReduciblePairs { - fn optimize_window(&self, window: &[Bytecode]) -> Option<(Vec, usize)> { + fn optimize_window(&self, window: &[Bytecode]) -> Option<(TransformedCodeChunk, usize)> { use Bytecode::*; if window.len() < Self::WINDOW_SIZE { return None; @@ -59,13 +61,17 @@ impl WindowOptimizer for ReduciblePairs { (StLoc(u), MoveLoc(v)) | (CopyLoc(u), StLoc(v)) | (MoveLoc(u), StLoc(v)) if *u == *v => { - vec![] + TransformedCodeChunk::new(vec![], vec![]) }, - (CopyLoc(_), Pop) => vec![], - (LdTrue, BrTrue(target)) | (LdFalse, BrFalse(target)) => vec![Branch(*target)], - (LdTrue, BrFalse(_)) | (LdFalse, BrTrue(_)) => vec![], - (Not, BrFalse(target)) => vec![BrTrue(*target)], - (Not, BrTrue(target)) => vec![BrFalse(*target)], + (CopyLoc(_), Pop) => TransformedCodeChunk::new(vec![], vec![]), + (LdTrue, BrTrue(target)) | (LdFalse, BrFalse(target)) => { + TransformedCodeChunk::new(vec![Branch(*target)], vec![0]) + }, + (LdTrue, BrFalse(_)) | (LdFalse, BrTrue(_)) => { + TransformedCodeChunk::new(vec![], vec![]) + }, + (Not, BrFalse(target)) => TransformedCodeChunk::new(vec![BrTrue(*target)], vec![0]), + (Not, BrTrue(target)) => TransformedCodeChunk::new(vec![BrFalse(*target)], vec![0]), _ => return None, }; Some((optimized, Self::WINDOW_SIZE)) diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/module_call_constraints_not_satisfied.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/module_call_constraints_not_satisfied.exp index f6d70f0d9eaaf..bcc27300553be 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/module_call_constraints_not_satisfied.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/module_call_constraints_not_satisfied.exp @@ -1,213 +1,219 @@ Diagnostics: error: type `S` is missing required ability `key` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:28:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:32:9 │ 15 │ fun both(_r: R, _c: C) { │ - declaration of type parameter `R` · -28 │ both(S{}, Coin{}); +32 │ both(S{}, Coin{}); │ ^^^^ │ = required by instantiating type parameter `R:key` of function `both` -error: type `Coin` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:29:9 - │ -15 │ fun both(_r: R, _c: C) { - │ - declaration of type parameter `C` - · -29 │ both(0, Coin{}) - │ ^^^^ +error: constraint `integer` does not have required ability `key` + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:33:14 │ - = required by instantiating type parameter `C:copy` of function `both` +33 │ both(0, Coin{}) + │ ^ error: type `Box` is missing required ability `key` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:33:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:37:9 │ 15 │ fun both(_r: R, _c: C) { │ - declaration of type parameter `R` · -33 │ both(new_box(), new_box()) +37 │ both(new_box(), new_box()) │ ^^^^ │ = required by instantiating type parameter `R:key` of function `both` error: type `Box3` is missing required ability `key` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:37:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:41:9 │ -23 │ fun rsrc(_r: R) { +27 │ fun rsrc(_r: R) { │ - declaration of type parameter `R` · -37 │ rsrc(new_box3()); +41 │ rsrc(new_box3()); │ ^^^^ │ = required by instantiating type parameter `R:key` of function `rsrc` error: type `R` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:39:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:43:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -39 │ cpy(new_box3()); +43 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `C` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:40:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:44:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -40 │ cpy(new_box3()); +44 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `C` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:41:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:45:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -41 │ cpy(new_box3()); +45 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `C` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:43:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:47:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -43 │ cpy(new_box3()); +47 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `R` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:44:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:48:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -44 │ cpy(new_box3()); +48 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `R` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:45:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:49:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -45 │ cpy(new_box3()); +49 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `R` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:47:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:51:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -47 │ cpy(new_box3()); +51 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `U` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:51:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:55:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -51 │ cpy(new_box3()); +55 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `C` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:52:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:56:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -52 │ cpy(new_box3()); +56 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `C` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:53:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:57:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -53 │ cpy(new_box3()); +57 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `C` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:55:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:59:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -55 │ cpy(new_box3()); +59 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `U` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:56:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:60:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -56 │ cpy(new_box3()); +60 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `U` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:57:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:61:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -57 │ cpy(new_box3()); +61 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` error: type `U` is missing required ability `copy` (type was inferred) - ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:59:9 + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:63:9 │ -19 │ fun cpy(_c: C) { +23 │ fun cpy(_c: C) { │ - declaration of type parameter `C` · -59 │ cpy(new_box3()); +63 │ cpy(new_box3()); │ ^^^ │ = required by instantiating type parameter `T1` of struct `Box3` = required by instantiating type parameter `C:copy` of function `cpy` + +error: type `Coin` is missing required ability `copy` (type was inferred) + ┌─ tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move:67:9 + │ +19 │ fun both_1(_r: R, _c: C) { + │ - declaration of type parameter `R` + · +67 │ both_1(Coin{}, 0) + │ ^^^^^^ + │ + = required by instantiating type parameter `R:copy` of function `both_1` diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move index 22470e4d2fbb4..7120211abf571 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/module_call_constraints_not_satisfied.move @@ -16,6 +16,10 @@ module 0x8675309::M { abort 0 } + fun both_1(_r: R, _c: C) { + abort 0 + } + fun cpy(_c: C) { abort 0 } @@ -58,4 +62,8 @@ module 0x8675309::M { cpy(new_box3()); } + + fun t4() { + both_1(Coin{}, 0) + } } diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/pack_constraint_not_satisfied.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/pack_constraint_not_satisfied.exp index 4dc6e79fbcfaa..735d68e511b94 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/pack_constraint_not_satisfied.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/pack_constraint_not_satisfied.exp @@ -1,5 +1,11 @@ Diagnostics: +error: constraint `integer` does not have required ability `key` + ┌─ tests/ability-check/v1-typing/pack_constraint_not_satisfied.move:7:27 + │ +7 │ R {r:_ } = R { r: 0 }; + │ ^ + error: type `Coin` is missing required ability `drop` (type was inferred) ┌─ tests/ability-check/v1-typing/pack_constraint_not_satisfied.move:8:9 │ @@ -11,6 +17,17 @@ error: type `Coin` is missing required ability `drop` (type was inferred) │ = required by instantiating type parameter `T:drop` of struct `S` +error: type `R<*error*>` is missing required ability `key` (type was inferred) + ┌─ tests/ability-check/v1-typing/pack_constraint_not_satisfied.move:12:30 + │ + 3 │ struct R { r: T } + │ - declaration of type parameter `T` + · +12 │ R {r: R { r: _ } } = R { r: R { r: 0 }}; + │ ^ + │ + = required by instantiating type parameter `T:key` of struct `R` + error: type `R` is missing required ability `key` (type was inferred) ┌─ tests/ability-check/v1-typing/pack_constraint_not_satisfied.move:12:30 │ @@ -22,6 +39,12 @@ error: type `R` is missing required ability `key` (type was inferred) │ = required by instantiating type parameter `T:key` of struct `R` +error: constraint `integer` does not have required ability `key` + ┌─ tests/ability-check/v1-typing/pack_constraint_not_satisfied.move:12:44 + │ +12 │ R {r: R { r: _ } } = R { r: R { r: 0 }}; + │ ^ + error: type `Coin` is missing required ability `drop` (type was inferred) ┌─ tests/ability-check/v1-typing/pack_constraint_not_satisfied.move:13:16 │ diff --git a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/pack_constraint_not_satisfied2.exp b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/pack_constraint_not_satisfied2.exp index 159105ab0cca0..0310e87e78cca 100644 --- a/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/pack_constraint_not_satisfied2.exp +++ b/third_party/move/move-compiler-v2/tests/ability-check/v1-typing/pack_constraint_not_satisfied2.exp @@ -1,7 +1,7 @@ Diagnostics: -bug: unexpected type: _ - ┌─ tests/ability-check/v1-typing/pack_constraint_not_satisfied2.move:6:9 +error: constraint `integer` does not have required ability `key` + ┌─ tests/ability-check/v1-typing/pack_constraint_not_satisfied2.move:7:27 │ -6 │ fun t0() { - │ ^^ +7 │ R {r:_ } = R { r: 0 }; + │ ^ diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629.exp new file mode 100644 index 0000000000000..74202644fcec6 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629.exp @@ -0,0 +1,77 @@ +// -- Model dump before bytecode pipeline +module 0x8675309::M { + struct R { + r: T, + } + struct X { + r: T, + } + private fun t0() { + { + let y: R> = pack M::R>(pack M::X(0)); + { + let M::R>{ r: _r } = y; + Tuple() + } + } + } + private fun t0_u128() { + { + let y: R> = pack M::R>(pack M::X(18446744073709551616)); + { + let M::R>{ r: _r } = y; + Tuple() + } + } + } +} // end 0x8675309::M + +// -- Sourcified model before bytecode pipeline +module 0x8675309::M { + struct R { + r: T, + } + struct X has drop, key { + r: T, + } + fun t0() { + let y = R>{r: X{r: 0}}; + let R>{r: _r} = y; + } + fun t0_u128() { + let y = R>{r: X{r: 18446744073709551616u128}}; + let R>{r: _r} = y; + } +} + +============ initial bytecode ================ + +[variant baseline] +fun M::t0() { + var $t0: 0x8675309::M::R<0x8675309::M::X> + var $t1: 0x8675309::M::X + var $t2: u64 + var $t3: 0x8675309::M::X + 0: $t2 := 0 + 1: $t1 := pack 0x8675309::M::X($t2) + 2: $t0 := pack 0x8675309::M::R<0x8675309::M::X>($t1) + 3: $t3 := unpack 0x8675309::M::R<0x8675309::M::X>($t0) + 4: return () +} + + +[variant baseline] +fun M::t0_u128() { + var $t0: 0x8675309::M::R<0x8675309::M::X> + var $t1: 0x8675309::M::X + var $t2: u128 + var $t3: 0x8675309::M::X + 0: $t2 := 18446744073709551616 + 1: $t1 := pack 0x8675309::M::X($t2) + 2: $t0 := pack 0x8675309::M::R<0x8675309::M::X>($t1) + 3: $t3 := unpack 0x8675309::M::R<0x8675309::M::X>($t0) + 4: return () +} + + +============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629.move b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629.move new file mode 100644 index 0000000000000..4d521eb2741ec --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629.move @@ -0,0 +1,17 @@ +module 0x8675309::M { + const MAX_U64: u128 = 18446744073709551615; + struct R { r: T } + struct X has key, drop { + r: T + } + + fun t0() { + let y = R { r: X { r: 0} }; + let R { r: _r } = y; + } + + fun t0_u128() { + let y = R { r: X { r: MAX_U64 + 1} }; + let R { r: _r } = y; + } +} diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629_fail.exp b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629_fail.exp new file mode 100644 index 0000000000000..2a80c7c6433bd --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629_fail.exp @@ -0,0 +1,7 @@ + +Diagnostics: +error: constraint `integer` does not have required ability `key` + ┌─ tests/bytecode-generator/bug_14629_fail.move:6:32 + │ +6 │ let R { r: _ } = R {r: 0}; + │ ^ diff --git a/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629_fail.move b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629_fail.move new file mode 100644 index 0000000000000..839d55f56bf5e --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/bytecode-generator/bug_14629_fail.move @@ -0,0 +1,8 @@ +module 0x8675309::M { + // struct Coin {} + struct R { r: T } + + fun t1() { + let R { r: _ } = R {r: 0}; + } +} diff --git a/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins_invalid.exp b/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins_invalid.exp index 73c27aa098912..8297687e57d65 100644 --- a/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins_invalid.exp +++ b/third_party/move/move-compiler-v2/tests/checking/typing/global_builtins_invalid.exp @@ -42,6 +42,12 @@ error: cannot use `integer` with an operator which expects a value of type `R` 14 │ let () = move_to(a, 0); │ ^ +error: constraint `integer` does not have required ability `key` + ┌─ tests/checking/typing/global_builtins_invalid.move:15:29 + │ +15 │ let () = move_to(a, 0); + │ ^ + error: cannot use `integer` with an operator which expects a value of type `address` ┌─ tests/checking/typing/global_builtins_invalid.move:16:39 │ diff --git a/third_party/move/move-compiler-v2/tests/eager-pushes/move_stdlib_reduced.exp b/third_party/move/move-compiler-v2/tests/eager-pushes/move_stdlib_reduced.exp new file mode 100644 index 0000000000000..5ff4fea83d589 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/eager-pushes/move_stdlib_reduced.exp @@ -0,0 +1,152 @@ +============ initial bytecode ================ + +[variant baseline] +fun m::bar($t0: &mut u64, $t1: u64) { + 0: write_ref($t0, $t1) + 1: return () +} + + +[variant baseline] +public fun m::foo($t0: &mut u64, $t1: u64) { + var $t2: bool + var $t3: u64 + var $t4: &mut u64 + var $t5: u64 + var $t6: u64 + var $t7: u64 + 0: label L0 + 1: $t3 := 0 + 2: $t2 := >($t1, $t3) + 3: if ($t2) goto 4 else goto 12 + 4: label L2 + 5: $t4 := infer($t0) + 6: $t5 := m::one() + 7: m::bar($t4, $t5) + 8: $t7 := 1 + 9: $t6 := -($t1, $t7) + 10: $t1 := infer($t6) + 11: goto 14 + 12: label L3 + 13: goto 16 + 14: label L4 + 15: goto 0 + 16: label L1 + 17: return () +} + + +[variant baseline] +fun m::one(): u64 { + var $t0: u64 + 0: $t0 := 1 + 1: return $t0 +} + +============ after LiveVarAnalysisProcessor: ================ + +[variant baseline] +fun m::bar($t0: &mut u64, $t1: u64) { + # live vars: $t0, $t1 + 0: write_ref($t0, $t1) + # live vars: + 1: return () +} + + +[variant baseline] +public fun m::foo($t0: &mut u64, $t1: u64) { + var $t2: bool + var $t3: u64 + var $t4: &mut u64 + var $t5: u64 [unused] + var $t6: u64 [unused] + var $t7: u64 [unused] + # live vars: $t0, $t1 + 0: label L0 + # live vars: $t0, $t1 + 1: $t3 := 0 + # live vars: $t0, $t1, $t3 + 2: $t2 := >($t1, $t3) + # live vars: $t0, $t1, $t2 + 3: if ($t2) goto 4 else goto 12 + # live vars: $t0, $t1 + 4: label L2 + # live vars: $t0, $t1 + 5: $t4 := copy($t0) + # live vars: $t0, $t1, $t4 + 6: $t3 := m::one() + # live vars: $t0, $t1, $t3, $t4 + 7: m::bar($t4, $t3) + # live vars: $t0, $t1 + 8: $t3 := 1 + # live vars: $t0, $t1, $t3 + 9: $t3 := -($t1, $t3) + # live vars: $t0, $t3 + 10: $t1 := move($t3) + # live vars: $t0, $t1 + 11: goto 0 + # live vars: $t0, $t1 + 12: label L3 + # live vars: $t0 + 13: drop($t0) + # live vars: + 14: return () +} + + +[variant baseline] +fun m::one(): u64 { + var $t0: u64 + # live vars: + 0: $t0 := 1 + # live vars: $t0 + 1: return $t0 +} + + +============ disassembled file-format ================== +// Move bytecode v7 +module c0ffee.m { + + +bar(Arg0: &mut u64, Arg1: u64) /* def_idx: 0 */ { +B0: + 0: MoveLoc[1](Arg1: u64) + 1: MoveLoc[0](Arg0: &mut u64) + 2: WriteRef + 3: Ret +} +public foo(Arg0: &mut u64, Arg1: u64) /* def_idx: 1 */ { +L2: loc0: u64 +L3: loc1: &mut u64 +B0: + 0: CopyLoc[1](Arg1: u64) + 1: LdU64(0) + 2: Gt + 3: BrFalse(16) +B1: + 4: CopyLoc[0](Arg0: &mut u64) + 5: StLoc[3](loc1: &mut u64) + 6: Call one(): u64 + 7: StLoc[2](loc0: u64) + 8: MoveLoc[3](loc1: &mut u64) + 9: MoveLoc[2](loc0: u64) + 10: Call bar(&mut u64, u64) + 11: MoveLoc[1](Arg1: u64) + 12: LdU64(1) + 13: Sub + 14: StLoc[1](Arg1: u64) + 15: Branch(0) +B2: + 16: MoveLoc[0](Arg0: &mut u64) + 17: Pop + 18: Ret +} +one(): u64 /* def_idx: 2 */ { +B0: + 0: LdU64(1) + 1: Ret +} +} +============ bytecode verification succeeded ======== diff --git a/third_party/move/move-compiler-v2/tests/eager-pushes/move_stdlib_reduced.move b/third_party/move/move-compiler-v2/tests/eager-pushes/move_stdlib_reduced.move new file mode 100644 index 0000000000000..262b41aa98b94 --- /dev/null +++ b/third_party/move/move-compiler-v2/tests/eager-pushes/move_stdlib_reduced.move @@ -0,0 +1,17 @@ +// Test case to showcase https://github.com/aptos-labs/aptos-core/issues/15339 +module 0xc0ffee::m { + fun one(): u64 { + 1 + } + + fun bar(x: &mut u64, i: u64) { + *x = i; + } + + public fun foo(x: &mut u64, len: u64) { + while (len > 0) { + bar(x, one()); + len = len - 1; + }; + } +} diff --git a/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/writeref_borrow_invalid.old.exp b/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/writeref_borrow_invalid.old.exp index 781744ed45a72..8b5dcbf17e00a 100644 --- a/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/writeref_borrow_invalid.old.exp +++ b/third_party/move/move-compiler-v2/tests/reference-safety/v1-borrow-tests/writeref_borrow_invalid.old.exp @@ -4,7 +4,7 @@ Diagnostics: bug: bytecode verification failed with unexpected status code `WRITEREF_EXISTS_BORROW_ERROR`. This is a compiler bug, consider reporting it. Error message: none - ┌─ tests/reference-safety/v1-borrow-tests/writeref_borrow_invalid.move:10:18 + ┌─ tests/reference-safety/v1-borrow-tests/writeref_borrow_invalid.move:10:9 │ 10 │ *g_mut = G { v: 0 }; - │ ^^^^^^^^^^ + │ ^^^^^^^^^^^^^^^^^^^ diff --git a/third_party/move/move-ir-compiler/move-bytecode-source-map/src/source_map.rs b/third_party/move/move-ir-compiler/move-bytecode-source-map/src/source_map.rs index ec101a84c7001..4f0b3364d55dc 100644 --- a/third_party/move/move-ir-compiler/move-bytecode-source-map/src/source_map.rs +++ b/third_party/move/move-ir-compiler/move-bytecode-source-map/src/source_map.rs @@ -83,7 +83,7 @@ pub struct SourceMap { // A mapping of `StructDefinitionIndex` to source map for each struct/resource. struct_map: BTreeMap, - // A mapping of `FunctionDefinitionIndex` to the soure map for that function. + // A mapping of `FunctionDefinitionIndex` to the source map for that function. // For scripts, this map has a single element that points to a source map corresponding to the // script's "main" function. function_map: BTreeMap, @@ -202,6 +202,31 @@ impl FunctionSourceMap { }; } + /// Remap the code map based on the given `remap`. + /// If `remap[i] == j`, then the code location associated with code offset `j` + /// will now be associated with code offset `i`. + pub fn remap_code_map(&mut self, remap: Vec) { + let mut prev_loc = None; + let new_code_map = remap + .iter() + .map(|old_offset| self.get_code_location(*old_offset)) + .enumerate() + .filter_map(|(new_offset, loc)| { + if prev_loc == loc { + // optimization: if a series of instructions map to the same location, we only need + // to add code mapping for the first one, because the code map is a segment map. + None + } else if let Some(loc) = loc { + prev_loc = Some(loc); + Some((new_offset as CodeOffset, loc)) + } else { + None + } + }) + .collect(); + self.code_map = new_code_map; + } + /// Record the code offset for an Nop label pub fn add_nop_mapping(&mut self, label: NopLabel, offset: CodeOffset) { assert!(self.nops.insert(label, offset).is_none()) @@ -362,6 +387,22 @@ impl SourceMap { Ok(()) } + /// Remap the code map for the function given by `fdef_idx`, according to `remap`. + /// If `remap[i] == j`, then the code location associated with code offset `j` + /// will now be associated with code offset `i`. + pub fn remap_code_map( + &mut self, + fdef_idx: FunctionDefinitionIndex, + remap: Vec, + ) -> Result<()> { + let func_entry = self + .function_map + .get_mut(&fdef_idx.0) + .ok_or_else(|| format_err!("Tried to remap code map of undefined function index"))?; + func_entry.remap_code_map(remap); + Ok(()) + } + pub fn add_nop_mapping( &mut self, fdef_idx: FunctionDefinitionIndex, diff --git a/third_party/move/move-model/src/builder/exp_builder.rs b/third_party/move/move-model/src/builder/exp_builder.rs index 992dbb07cf603..8f16fa60111de 100644 --- a/third_party/move/move-model/src/builder/exp_builder.rs +++ b/third_party/move/move-model/src/builder/exp_builder.rs @@ -2089,7 +2089,8 @@ impl<'env, 'translator, 'module_translator> ExpTranslator<'env, 'translator, 'mo Pattern::Struct(sid, std, variant, patterns) => { let mut new_inst = vec![]; for ty in &std.inst { - let nty = subs.specialize(ty); + // use `specialize_with_defaults` to get type info from constraints + let nty: Type = subs.specialize_with_defaults(ty); new_inst.push(nty); } let mut new_std = std.clone(); diff --git a/third_party/move/move-model/src/ty.rs b/third_party/move/move-model/src/ty.rs index 2bb9288cbd53b..6e947c60d7442 100644 --- a/third_party/move/move-model/src/ty.rs +++ b/third_party/move/move-model/src/ty.rs @@ -453,12 +453,14 @@ impl Constraint { /// Joins the two constraints. If they are incompatible, produces a type unification error. /// Otherwise, returns true if `self` absorbs the `other` constraint (and waives the `other`). + /// ctx_opt is for additional error info pub fn join( &mut self, context: &mut impl UnificationContext, subs: &mut Substitution, loc: &Loc, other: &Constraint, + ctx_opt: Option, ) -> Result { match (&mut *self, other) { (Constraint::SomeNumber(opts1), Constraint::SomeNumber(opts2)) => { @@ -543,6 +545,34 @@ impl Constraint { *a1 = a1.union(*a2); Ok(true) }, + // After the above checks on same type of constraint + // Check compatibility between ability and number + // This check is needed because sometime the concrete integer type is not available + // TODO: check other combination of constraints may be necessary as well. + (Constraint::HasAbilities(a1, _), Constraint::SomeNumber(_)) => { + let unsupported_abilities = a1.setminus(AbilitySet::PRIMITIVES); + if !unsupported_abilities.is_empty() { + return Err(TypeUnificationError::MissingAbilitiesForConstraints( + loc.clone(), + other.clone(), + unsupported_abilities, + ctx_opt, + )); + } + Ok(false) + }, + (Constraint::SomeNumber(_), Constraint::HasAbilities(a1, _)) => { + let unsupported_abilities = a1.setminus(AbilitySet::PRIMITIVES); + if !unsupported_abilities.is_empty() { + return Err(TypeUnificationError::MissingAbilitiesForConstraints( + loc.clone(), + self.clone(), + unsupported_abilities, + ctx_opt, + )); + } + Ok(false) + }, // After the above checks, if one of the constraints is // accumulating, indicate its compatible but cannot be joined. (c1, c2) if c1.accumulating() || c2.accumulating() => Ok(false), @@ -702,6 +732,8 @@ pub enum TypeUnificationError { ), /// The `HasAbilities` constraint failed: `MissingAbilities(loc, ty, missing, ctx)`. MissingAbilities(Loc, Type, AbilitySet, Option), + /// The `HasAbilities` constraint failed: `MissingAbilitiesForConstraints(loc, ctr, missing, ctx)`. + MissingAbilitiesForConstraints(Loc, Constraint, AbilitySet, Option), /// The two constraints are incompatible and cannot be joined. ConstraintsIncompatible(Loc, Constraint, Constraint), /// A cyclic substitution when trying to unify the given types. @@ -1668,7 +1700,7 @@ impl Substitution { let mut absorbed = false; for (_, _, c) in current.iter_mut() { // Join constraints. If join returns true the constraint is absorbed. - absorbed = c.join(context, self, &loc, &ctr)?; + absorbed = c.join(context, self, &loc, &ctr, ctx_opt.clone())?; if absorbed { break; } @@ -3015,6 +3047,27 @@ impl TypeUnificationError { labels, ) }, + TypeUnificationError::MissingAbilitiesForConstraints(_, ctr, missing, ctx_opt) => { + let (note, hints, labels) = ctx_opt + .as_ref() + .map(|ctx| ctx.describe(display_context)) + .unwrap_or_default(); + ( + format!( + "constraint `{}` does not have required {} `{}`{}", + ctr.display(display_context), + pluralize("ability", missing.iter().count()), + missing, + if !note.is_empty() { + format!(" ({})", note) + } else { + "".to_string() + } + ), + hints, + labels, + ) + }, TypeUnificationError::ConstraintUnsatisfied(_, ty, order, constr, ctx_opt) => { let item_name = || match ctx_opt { Some(ConstraintContext { diff --git a/third_party/move/move-prover/boogie-backend/src/bytecode_translator.rs b/third_party/move/move-prover/boogie-backend/src/bytecode_translator.rs index 661c782d8aac8..c6f7d71839415 100644 --- a/third_party/move/move-prover/boogie-backend/src/bytecode_translator.rs +++ b/third_party/move/move-prover/boogie-backend/src/bytecode_translator.rs @@ -445,7 +445,12 @@ impl<'env> StructTranslator<'env> { "", // not inlined! &format!("$IsValid'{}'(s: {}): bool", suffix_variant, struct_name), || { - if struct_env.is_intrinsic() || struct_env.get_field_count() == 0 { + if struct_env.is_intrinsic() + || struct_env + .get_fields_of_variant(variant) + .collect_vec() + .is_empty() + { emitln!(writer, "true") } else { let mut sep = ""; diff --git a/third_party/move/move-prover/tests/sources/functional/aborts_if.v2_exp b/third_party/move/move-prover/tests/sources/functional/aborts_if.v2_exp index 8179a66a39bdf..6b904aef88910 100644 --- a/third_party/move/move-prover/tests/sources/functional/aborts_if.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/aborts_if.v2_exp @@ -102,7 +102,7 @@ error: abort not covered by any of the `aborts_if` clauses ┌─ tests/sources/functional/aborts_if.move:139:5 │ 137 │ if (x == 2 || x == 3) abort 1; - │ ----------------------------- abort happened here with code 0x1 + │ ------- abort happened here with code 0x1 138 │ } 139 │ ╭ spec abort_at_2_or_3_total_incorrect { 140 │ │ // Counter check that we get an error message without the pragma. @@ -114,8 +114,6 @@ error: abort not covered by any of the `aborts_if` clauses = at tests/sources/functional/aborts_if.move:136: abort_at_2_or_3_total_incorrect = x = = at tests/sources/functional/aborts_if.move:137: abort_at_2_or_3_total_incorrect - = at tests/sources/functional/aborts_if.move:136: abort_at_2_or_3_total_incorrect - = at tests/sources/functional/aborts_if.move:137: abort_at_2_or_3_total_incorrect = = = at tests/sources/functional/aborts_if.move:137: abort_at_2_or_3_total_incorrect = ABORTED @@ -129,17 +127,16 @@ error: function does not abort under this condition = at tests/sources/functional/aborts_if.move:145: abort_at_2_or_3_spec_incorrect = x = = at tests/sources/functional/aborts_if.move:146: abort_at_2_or_3_spec_incorrect - = at tests/sources/functional/aborts_if.move:145: abort_at_2_or_3_spec_incorrect - = at tests/sources/functional/aborts_if.move:146: abort_at_2_or_3_spec_incorrect = = = at tests/sources/functional/aborts_if.move:146: abort_at_2_or_3_spec_incorrect + = at tests/sources/functional/aborts_if.move:145: abort_at_2_or_3_spec_incorrect = at tests/sources/functional/aborts_if.move:151: abort_at_2_or_3_spec_incorrect (spec) error: abort not covered by any of the `aborts_if` clauses ┌─ tests/sources/functional/aborts_if.move:157:5 │ 155 │ if (x == 2 || x == 3) abort 1; - │ ----------------------------- abort happened here with code 0x1 + │ ------- abort happened here with code 0x1 156 │ } 157 │ ╭ spec abort_at_2_or_3_strict_incorrect { 158 │ │ // When the strict mode is enabled, no aborts_if clause means aborts_if false. @@ -150,8 +147,6 @@ error: abort not covered by any of the `aborts_if` clauses = at tests/sources/functional/aborts_if.move:154: abort_at_2_or_3_strict_incorrect = x = = at tests/sources/functional/aborts_if.move:155: abort_at_2_or_3_strict_incorrect - = at tests/sources/functional/aborts_if.move:154: abort_at_2_or_3_strict_incorrect - = at tests/sources/functional/aborts_if.move:155: abort_at_2_or_3_strict_incorrect = = = at tests/sources/functional/aborts_if.move:155: abort_at_2_or_3_strict_incorrect = ABORTED diff --git a/third_party/move/move-prover/tests/sources/functional/aborts_if_with_code.v2_exp b/third_party/move/move-prover/tests/sources/functional/aborts_if_with_code.v2_exp index 36844a9bf44f1..930280cf0144d 100644 --- a/third_party/move/move-prover/tests/sources/functional/aborts_if_with_code.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/aborts_if_with_code.v2_exp @@ -2,22 +2,21 @@ Move prover returns: exiting with verification errors error: abort code not covered by any of the `aborts_if` or `aborts_with` clauses ┌─ tests/sources/functional/aborts_if_with_code.move:38:5 │ -30 │ ╭ if (x == 1) { -31 │ │ abort 2 -32 │ │ }; - │ ╰─────────' abort happened here with code 0x2 - · │ -38 │ ╭ spec conditional_abort_invalid { -39 │ │ aborts_if x == 1 with 1; // wrong code -40 │ │ aborts_if y == 2 with 3; -41 │ │ ensures result == x; -42 │ │ } - │ ╰───────^ +31 │ abort 2 + │ ------- abort happened here with code 0x2 + · +38 │ ╭ spec conditional_abort_invalid { +39 │ │ aborts_if x == 1 with 1; // wrong code +40 │ │ aborts_if y == 2 with 3; +41 │ │ ensures result == x; +42 │ │ } + │ ╰─────^ │ = at tests/sources/functional/aborts_if_with_code.move:29: conditional_abort_invalid = x = = y = = at tests/sources/functional/aborts_if_with_code.move:30: conditional_abort_invalid + = at tests/sources/functional/aborts_if_with_code.move:31: conditional_abort_invalid = ABORTED error: abort code not covered by any of the `aborts_if` or `aborts_with` clauses @@ -40,8 +39,8 @@ error: abort code not covered by any of the `aborts_if` or `aborts_with` clauses error: abort code not covered by any of the `aborts_if` or `aborts_with` clauses ┌─ tests/sources/functional/aborts_if_with_code.move:77:5 │ -73 │ if (x == 2) { - │ ------ abort happened here with code 0x2 +74 │ abort(2) + │ -------- abort happened here with code 0x2 · 77 │ ╭ spec aborts_if_with_code_mixed_invalid { 78 │ │ aborts_if x == 1; @@ -52,15 +51,15 @@ error: abort code not covered by any of the `aborts_if` or `aborts_with` clauses = at tests/sources/functional/aborts_if_with_code.move:69: aborts_if_with_code_mixed_invalid = x = = at tests/sources/functional/aborts_if_with_code.move:70: aborts_if_with_code_mixed_invalid - = at tests/sources/functional/aborts_if_with_code.move:71: aborts_if_with_code_mixed_invalid = at tests/sources/functional/aborts_if_with_code.move:73: aborts_if_with_code_mixed_invalid + = at tests/sources/functional/aborts_if_with_code.move:74: aborts_if_with_code_mixed_invalid = ABORTED error: abort code not covered by any of the `aborts_if` or `aborts_with` clauses ┌─ tests/sources/functional/aborts_if_with_code.move:105:5 │ -101 │ if (x == 2) { - │ ------ abort happened here with code 0x2 +102 │ abort(2) + │ -------- abort happened here with code 0x2 · 105 │ ╭ spec aborts_with_invalid { 106 │ │ aborts_with 1,3; @@ -70,15 +69,15 @@ error: abort code not covered by any of the `aborts_if` or `aborts_with` clauses = at tests/sources/functional/aborts_if_with_code.move:97: aborts_with_invalid = x = = at tests/sources/functional/aborts_if_with_code.move:98: aborts_with_invalid - = at tests/sources/functional/aborts_if_with_code.move:99: aborts_with_invalid = at tests/sources/functional/aborts_if_with_code.move:101: aborts_with_invalid + = at tests/sources/functional/aborts_if_with_code.move:102: aborts_with_invalid = ABORTED error: abort code not covered by any of the `aborts_if` or `aborts_with` clauses ┌─ tests/sources/functional/aborts_if_with_code.move:131:5 │ -127 │ if (x == 2) { - │ ------ abort happened here with code 0x1 +128 │ abort(1) + │ -------- abort happened here with code 0x1 · 131 │ ╭ spec aborts_with_mixed_invalid { 132 │ │ pragma aborts_if_is_partial = true; @@ -90,6 +89,6 @@ error: abort code not covered by any of the `aborts_if` or `aborts_with` clauses = at tests/sources/functional/aborts_if_with_code.move:123: aborts_with_mixed_invalid = x = = at tests/sources/functional/aborts_if_with_code.move:124: aborts_with_mixed_invalid - = at tests/sources/functional/aborts_if_with_code.move:125: aborts_with_mixed_invalid = at tests/sources/functional/aborts_if_with_code.move:127: aborts_with_mixed_invalid + = at tests/sources/functional/aborts_if_with_code.move:128: aborts_with_mixed_invalid = ABORTED diff --git a/third_party/move/move-prover/tests/sources/functional/bv_aborts.v2_exp b/third_party/move/move-prover/tests/sources/functional/bv_aborts.v2_exp index 2d106370311c2..59c574ebbd267 100644 --- a/third_party/move/move-prover/tests/sources/functional/bv_aborts.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/bv_aborts.v2_exp @@ -8,24 +8,22 @@ error: function does not abort under this condition = at tests/sources/functional/bv_aborts.move:11: assert_with_spec = x = = at tests/sources/functional/bv_aborts.move:12: assert_with_spec + = at tests/sources/functional/bv_aborts.move:11: assert_with_spec = at tests/sources/functional/bv_aborts.move:16: assert_with_spec (spec) error: abort not covered by any of the `aborts_if` clauses ┌─ tests/sources/functional/bv_aborts.move:14:5 │ -11 │ fun assert_with_spec(x: u64) { - │ ╭──────────────────────────────────' -12 │ │ assert!(x > 815); -13 │ │ } - │ ╰─────' abort happened here -14 │ ╭ spec assert_with_spec { -15 │ │ // This will fail -16 │ │ aborts_if x > 815 with std::error::internal(0) | (0xCA26CBD9BE << 24); -17 │ │ } - │ ╰───────^ +12 │ assert!(x > 815); + │ ------ abort happened here +13 │ } +14 │ ╭ spec assert_with_spec { +15 │ │ // This will fail +16 │ │ aborts_if x > 815 with std::error::internal(0) | (0xCA26CBD9BE << 24); +17 │ │ } + │ ╰─────^ │ = at tests/sources/functional/bv_aborts.move:11: assert_with_spec = x = = at tests/sources/functional/bv_aborts.move:12: assert_with_spec - = at tests/sources/functional/bv_aborts.move:11: assert_with_spec = ABORTED diff --git a/third_party/move/move-prover/tests/sources/functional/choice.v2_exp b/third_party/move/move-prover/tests/sources/functional/choice.v2_exp index a5d5c96f98fbe..8717b9158c33c 100644 --- a/third_party/move/move-prover/tests/sources/functional/choice.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/choice.v2_exp @@ -48,8 +48,11 @@ error: post-condition does not hold = at tests/sources/functional/choice.move:78: test_not_using_min_incorrect = at tests/sources/functional/choice.move:79: test_not_using_min_incorrect = at tests/sources/functional/choice.move:80: test_not_using_min_incorrect - = v = = at tests/sources/functional/choice.move:81: test_not_using_min_incorrect + = at tests/sources/functional/choice.move:82: test_not_using_min_incorrect + = v = + = at tests/sources/functional/choice.move:83: test_not_using_min_incorrect + = at tests/sources/functional/choice.move:75: test_not_using_min_incorrect = result = = at tests/sources/functional/choice.move:84: test_not_using_min_incorrect = at tests/sources/functional/choice.move:87: test_not_using_min_incorrect (spec) diff --git a/third_party/move/move-prover/tests/sources/functional/emits.v2_exp b/third_party/move/move-prover/tests/sources/functional/emits.v2_exp index 2387c17c66674..a3c55d75f67cc 100644 --- a/third_party/move/move-prover/tests/sources/functional/emits.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/emits.v2_exp @@ -76,7 +76,6 @@ error: function does not emit the expected event = x = = handle = = at tests/sources/functional/emits.move:106: conditional_wrong_condition_incorrect - = at tests/sources/functional/emits.move:107: conditional_wrong_condition_incorrect = handle = = at tests/sources/functional/emits.move:111: conditional_wrong_condition_incorrect (spec) @@ -90,7 +89,6 @@ error: function does not emit the expected event = x = = handle = = at tests/sources/functional/emits.move:115: conditional_missing_condition_incorrect - = at tests/sources/functional/emits.move:116: conditional_missing_condition_incorrect = handle = = at tests/sources/functional/emits.move:120: conditional_missing_condition_incorrect (spec) diff --git a/third_party/move/move-prover/tests/sources/functional/enum.exp b/third_party/move/move-prover/tests/sources/functional/enum.exp index 914ff680e45c2..a4b90881bf502 100644 --- a/third_party/move/move-prover/tests/sources/functional/enum.exp +++ b/third_party/move/move-prover/tests/sources/functional/enum.exp @@ -2,23 +2,29 @@ Move prover returns: exiting with model building errors error: unsupported language construct ┌─ tests/sources/functional/enum.move:3:5 │ -3 │ enum CommonFields has copy, drop { +3 │ enum TestNoField has copy, drop { │ ^^^^ Move 2 language construct is not enabled: enum types error: unsupported language construct - ┌─ tests/sources/functional/enum.move:10:25 + ┌─ tests/sources/functional/enum.move:11:5 │ -10 │ invariant (self is CommonFields::Bar) ==> self.z > 10; +11 │ enum CommonFields has copy, drop { + │ ^^^^ Move 2 language construct is not enabled: enum types + +error: unsupported language construct + ┌─ tests/sources/functional/enum.move:18:25 + │ +18 │ invariant (self is CommonFields::Bar) ==> self.z > 10; │ ^^ Move 2 language construct is not enabled: `is` expression error: unsupported language construct - ┌─ tests/sources/functional/enum.move:39:9 + ┌─ tests/sources/functional/enum.move:47:9 │ -39 │ match (&common) { +47 │ match (&common) { │ ^^^^^ Move 2 language construct is not enabled: match expression error: unsupported language construct - ┌─ tests/sources/functional/enum.move:49:5 + ┌─ tests/sources/functional/enum.move:57:5 │ -49 │ enum CommonFieldsVector has drop { +57 │ enum CommonFieldsVector has drop { │ ^^^^ Move 2 language construct is not enabled: enum types diff --git a/third_party/move/move-prover/tests/sources/functional/enum.move b/third_party/move/move-prover/tests/sources/functional/enum.move index 9ac6876a40186..89d005e08f2a3 100644 --- a/third_party/move/move-prover/tests/sources/functional/enum.move +++ b/third_party/move/move-prover/tests/sources/functional/enum.move @@ -1,5 +1,13 @@ module 0x815::m { + enum TestNoField has copy, drop { + NoField + } + + fun test_no_field(): TestNoField { + TestNoField::NoField + } + enum CommonFields has copy, drop { Foo{x: u64, y: u8}, Bar{x: u64, y: u8, z: u32} diff --git a/third_party/move/move-prover/tests/sources/functional/enum.v2_exp b/third_party/move/move-prover/tests/sources/functional/enum.v2_exp index 737366c5714f8..9a4eed455c87b 100644 --- a/third_party/move/move-prover/tests/sources/functional/enum.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/enum.v2_exp @@ -1,62 +1,63 @@ Move prover returns: exiting with verification errors error: data invariant does not hold - ┌─ tests/sources/functional/enum.move:9:9 - │ -9 │ invariant self.x > 20; - │ ^^^^^^^^^^^^^^^^^^^^^^ - │ - = at tests/sources/functional/enum.move:15: t9_common_field - = at tests/sources/functional/enum.move:16: t9_common_field - = at tests/sources/functional/enum.move:17: t9_common_field - = at tests/sources/functional/enum.move:14: t9_common_field - = at tests/sources/functional/enum.move:9 - = at tests/sources/functional/enum.move:10 - = at tests/sources/functional/enum.move:14: t9_common_field - = common = - = at tests/sources/functional/enum.move:19: t9_common_field - = at tests/sources/functional/enum.move:9 + ┌─ tests/sources/functional/enum.move:17:9 + │ +17 │ invariant self.x > 20; + │ ^^^^^^^^^^^^^^^^^^^^^^ + │ + = at tests/sources/functional/enum.move:23: t9_common_field + = at tests/sources/functional/enum.move:24: t9_common_field + = at tests/sources/functional/enum.move:25: t9_common_field + = at tests/sources/functional/enum.move:22: t9_common_field + = at tests/sources/functional/enum.move:17 + = at tests/sources/functional/enum.move:18 + = at tests/sources/functional/enum.move:22: t9_common_field + = common = + = at tests/sources/functional/enum.move:27: t9_common_field + = at tests/sources/functional/enum.move:17 error: data invariant does not hold - ┌─ tests/sources/functional/enum.move:10:9 + ┌─ tests/sources/functional/enum.move:18:9 │ -10 │ invariant (self is CommonFields::Bar) ==> self.z > 10; +18 │ invariant (self is CommonFields::Bar) ==> self.z > 10; │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ │ - = at tests/sources/functional/enum.move:25: test_data_invariant - = at tests/sources/functional/enum.move:26: test_data_invariant - = at tests/sources/functional/enum.move:27: test_data_invariant - = at tests/sources/functional/enum.move:24: test_data_invariant - = at tests/sources/functional/enum.move:9 - = at tests/sources/functional/enum.move:10 - = at tests/sources/functional/enum.move:24: test_data_invariant + = at tests/sources/functional/enum.move:33: test_data_invariant + = at tests/sources/functional/enum.move:34: test_data_invariant + = at tests/sources/functional/enum.move:35: test_data_invariant + = at tests/sources/functional/enum.move:32: test_data_invariant + = at tests/sources/functional/enum.move:17 + = at tests/sources/functional/enum.move:18 + = at tests/sources/functional/enum.move:32: test_data_invariant = common = - = at tests/sources/functional/enum.move:29: test_data_invariant + = at tests/sources/functional/enum.move:37: test_data_invariant = = = z = - = at tests/sources/functional/enum.move:9 - = at tests/sources/functional/enum.move:10 + = at tests/sources/functional/enum.move:38: test_data_invariant + = at tests/sources/functional/enum.move:17 + = at tests/sources/functional/enum.move:18 error: unknown assertion failed - ┌─ tests/sources/functional/enum.move:68:13 + ┌─ tests/sources/functional/enum.move:76:13 │ -68 │ assert _common_vector_1.x != _common_vector_2.x; // this fails +76 │ assert _common_vector_1.x != _common_vector_2.x; // this fails │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ │ - = at tests/sources/functional/enum.move:56: test_enum_vector - = at tests/sources/functional/enum.move:55: test_enum_vector - = _common_vector_1 = - = at tests/sources/functional/enum.move:59: test_enum_vector - = at tests/sources/functional/enum.move:60: test_enum_vector - = at tests/sources/functional/enum.move:61: test_enum_vector - = at tests/sources/functional/enum.move:58: test_enum_vector - = at tests/sources/functional/enum.move:9 - = at tests/sources/functional/enum.move:10 - = at tests/sources/functional/enum.move:58: test_enum_vector - = _common_fields = = at tests/sources/functional/enum.move:64: test_enum_vector - = at tests/sources/functional/enum.move:65: test_enum_vector - = at tests/sources/functional/enum.move:9 - = at tests/sources/functional/enum.move:65: test_enum_vector = at tests/sources/functional/enum.move:63: test_enum_vector - = _common_vector_2 = + = _common_vector_1 = + = at tests/sources/functional/enum.move:67: test_enum_vector = at tests/sources/functional/enum.move:68: test_enum_vector + = at tests/sources/functional/enum.move:69: test_enum_vector + = at tests/sources/functional/enum.move:66: test_enum_vector + = at tests/sources/functional/enum.move:17 + = at tests/sources/functional/enum.move:18 + = at tests/sources/functional/enum.move:66: test_enum_vector + = _common_fields = + = at tests/sources/functional/enum.move:72: test_enum_vector + = at tests/sources/functional/enum.move:73: test_enum_vector + = at tests/sources/functional/enum.move:17 + = at tests/sources/functional/enum.move:73: test_enum_vector + = at tests/sources/functional/enum.move:71: test_enum_vector + = _common_vector_2 = + = at tests/sources/functional/enum.move:76: test_enum_vector diff --git a/third_party/move/move-prover/tests/sources/functional/enum_abort.v2_exp b/third_party/move/move-prover/tests/sources/functional/enum_abort.v2_exp index e913c0054b4b3..d9bd3b8ee1f3b 100644 --- a/third_party/move/move-prover/tests/sources/functional/enum_abort.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/enum_abort.v2_exp @@ -81,7 +81,7 @@ error: abort not covered by any of the `aborts_if` clauses ┌─ tests/sources/functional/enum_abort.move:129:5 │ 123 │ abort 30 // aborts - │ -- abort happened here with code 0x1E + │ -------- abort happened here with code 0x1E · 129 │ ╭ spec test_match_abort { 130 │ │ aborts_if false; @@ -103,7 +103,6 @@ error: abort not covered by any of the `aborts_if` clauses = = = at tests/sources/functional/enum_abort.move:122: test_match_abort = _z = - = at tests/sources/functional/enum_abort.move:123: test_match_abort = at tests/sources/functional/enum_abort.move:122: test_match_abort = = = at tests/sources/functional/enum_abort.move:122: test_match_abort diff --git a/third_party/move/move-prover/tests/sources/functional/loop_unroll.v2_exp b/third_party/move/move-prover/tests/sources/functional/loop_unroll.v2_exp index c561bd90334e9..10d8fa2f9595e 100644 --- a/third_party/move/move-prover/tests/sources/functional/loop_unroll.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/loop_unroll.v2_exp @@ -66,7 +66,7 @@ error: abort not covered by any of the `aborts_if` clauses ┌─ tests/sources/functional/loop_unroll.move:128:5 │ 124 │ assert!(i != 5, 0); - │ ------ abort happened here with code 0x0 + │ ------ abort happened here with code 0x0 · 128 │ ╭ spec t7_failure { 129 │ │ pragma unroll = 6; diff --git a/third_party/move/move-prover/tests/sources/functional/mono.v2_exp b/third_party/move/move-prover/tests/sources/functional/mono.v2_exp index 573e40d78820e..58c8d24e7a195 100644 --- a/third_party/move/move-prover/tests/sources/functional/mono.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/mono.v2_exp @@ -64,6 +64,7 @@ error: post-condition does not hold = x = = at tests/sources/functional/mono.move:80: vec_vec = x = + = at tests/sources/functional/mono.move:79: vec_vec = result = = at tests/sources/functional/mono.move:81: vec_vec = at tests/sources/functional/mono.move:82: vec_vec (spec) diff --git a/third_party/move/move-prover/tests/sources/functional/schema_exp.v2_exp b/third_party/move/move-prover/tests/sources/functional/schema_exp.v2_exp index cdc6c3cede166..b96c99a669024 100644 --- a/third_party/move/move-prover/tests/sources/functional/schema_exp.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/schema_exp.v2_exp @@ -3,7 +3,7 @@ error: abort not covered by any of the `aborts_if` clauses ┌─ tests/sources/functional/schema_exp.move:29:5 │ 26 │ if (!c) abort(1); - │ --- abort happened here with code 0x1 + │ -------- abort happened here with code 0x1 · 29 │ ╭ spec bar_incorrect { 30 │ │ // Once we include a schema with aborts, even conditionally, we need to provide a full spec of the aborts diff --git a/third_party/move/move-prover/tests/sources/functional/verify_custom_table.v2_exp b/third_party/move/move-prover/tests/sources/functional/verify_custom_table.v2_exp index a859dc99038f4..d865f02b759e5 100644 --- a/third_party/move/move-prover/tests/sources/functional/verify_custom_table.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/verify_custom_table.v2_exp @@ -14,6 +14,7 @@ error: post-condition does not hold = at tests/sources/functional/verify_custom_table.move:72: add_fail = t = = at tests/sources/functional/verify_custom_table.move:73: add_fail + = at tests/sources/functional/verify_custom_table.move:68: add_fail = result = = at tests/sources/functional/verify_custom_table.move:74: add_fail = at tests/sources/functional/verify_custom_table.move:76: add_fail (spec) @@ -29,6 +30,7 @@ error: post-condition does not hold = at tests/sources/functional/verify_custom_table.move:203: create_and_insert_fail_due_to_typed_key_encoding = t = = at tests/sources/functional/verify_custom_table.move:204: create_and_insert_fail_due_to_typed_key_encoding + = at tests/sources/functional/verify_custom_table.move:201: create_and_insert_fail_due_to_typed_key_encoding = result = = at tests/sources/functional/verify_custom_table.move:205: create_and_insert_fail_due_to_typed_key_encoding = at tests/sources/functional/verify_custom_table.move:210: create_and_insert_fail_due_to_typed_key_encoding (spec) @@ -44,6 +46,7 @@ error: post-condition does not hold = at tests/sources/functional/verify_custom_table.move:215: create_and_insert_fail1 = t = = at tests/sources/functional/verify_custom_table.move:216: create_and_insert_fail1 + = at tests/sources/functional/verify_custom_table.move:213: create_and_insert_fail1 = result = = at tests/sources/functional/verify_custom_table.move:217: create_and_insert_fail1 = at tests/sources/functional/verify_custom_table.move:219: create_and_insert_fail1 (spec) @@ -59,6 +62,7 @@ error: post-condition does not hold = at tests/sources/functional/verify_custom_table.move:224: create_and_insert_fail2 = t = = at tests/sources/functional/verify_custom_table.move:225: create_and_insert_fail2 + = at tests/sources/functional/verify_custom_table.move:222: create_and_insert_fail2 = result = = at tests/sources/functional/verify_custom_table.move:226: create_and_insert_fail2 = at tests/sources/functional/verify_custom_table.move:228: create_and_insert_fail2 (spec) diff --git a/third_party/move/move-prover/tests/sources/functional/verify_table.v2_exp b/third_party/move/move-prover/tests/sources/functional/verify_table.v2_exp index afae3e87880c6..ba8cc35bd29d7 100644 --- a/third_party/move/move-prover/tests/sources/functional/verify_table.v2_exp +++ b/third_party/move/move-prover/tests/sources/functional/verify_table.v2_exp @@ -14,6 +14,7 @@ error: post-condition does not hold = at tests/sources/functional/verify_table.move:27: add_fail = t = = at tests/sources/functional/verify_table.move:28: add_fail + = at tests/sources/functional/verify_table.move:23: add_fail = result = = at tests/sources/functional/verify_table.move:29: add_fail = at tests/sources/functional/verify_table.move:31: add_fail (spec) diff --git a/third_party/move/tools/move-unit-test/Cargo.toml b/third_party/move/tools/move-unit-test/Cargo.toml index d45644fef2d80..c0e29367cb1a0 100644 --- a/third_party/move/tools/move-unit-test/Cargo.toml +++ b/third_party/move/tools/move-unit-test/Cargo.toml @@ -25,8 +25,11 @@ regex = { workspace = true } move-command-line-common = { path = "../../move-command-line-common" } move-compiler = { path = "../../move-compiler" } +move-compiler-v2 = { path = "../../move-compiler-v2" } move-core-types = { path = "../../move-core/types" } move-ir-types = { path = "../../move-ir/types" } +move-model = { path = "../../move-model" } +move-package = { path = "../move-package" } move-resource-viewer = { path = "../move-resource-viewer" } move-stdlib = { path = "../../move-stdlib", features = ["testing"] } move-symbol-pool = { path = "../../move-symbol-pool" } diff --git a/third_party/move/tools/move-unit-test/src/lib.rs b/third_party/move/tools/move-unit-test/src/lib.rs index 4595059dcdd51..c2f34fb8f15b6 100644 --- a/third_party/move/tools/move-unit-test/src/lib.rs +++ b/third_party/move/tools/move-unit-test/src/lib.rs @@ -8,7 +8,9 @@ pub mod test_runner; use crate::test_runner::TestRunner; use clap::*; -use move_command_line_common::files::verify_and_create_named_address_mapping; +use move_command_line_common::{ + env::get_move_compiler_v2_from_env, files::verify_and_create_named_address_mapping, +}; use move_compiler::{ self, diagnostics::{self, codes::Severity}, @@ -16,7 +18,10 @@ use move_compiler::{ unit_test::{self, TestPlan}, Compiler, Flags, PASS_CFGIR, }; +use move_compiler_v2::plan_builder as plan_builder_v2; use move_core_types::{effects::ChangeSet, language_storage::ModuleId}; +use move_model::metadata::{CompilerVersion, LanguageVersion}; +use move_package::compilation::compiled_package::build_and_report_v2_driver; use move_vm_runtime::native_functions::NativeFunctionTable; use std::{ collections::BTreeMap, @@ -156,37 +161,59 @@ impl UnitTestingConfig { ) -> Option { let addresses = verify_and_create_named_address_mapping(self.named_address_values.clone()).ok()?; - let (files, comments_and_compiler_res) = Compiler::from_files( - source_files, - deps, - addresses, - Flags::testing().set_skip_attribute_checks(false), - KnownAttribute::get_all_attribute_names(), - ) - .run::() - .unwrap(); - let (_, compiler) = - diagnostics::unwrap_or_report_diagnostics(&files, comments_and_compiler_res); - - let (mut compiler, cfgir) = compiler.into_ast(); - let compilation_env = compiler.compilation_env(); - let test_plan = unit_test::plan_builder::construct_test_plan(compilation_env, None, &cfgir); - - if let Err(diags) = compilation_env.check_diags_at_or_above_severity( - if self.ignore_compile_warnings { - Severity::NonblockingError - } else { - Severity::Warning - }, - ) { - diagnostics::report_diagnostics(&files, diags); - } + let (test_plan, files, units) = if get_move_compiler_v2_from_env() { + let options = move_compiler_v2::Options { + compile_test_code: true, + testing: true, + sources: source_files, + dependencies: deps, + compiler_version: Some(CompilerVersion::latest_stable()), + language_version: Some(LanguageVersion::latest_stable()), + named_address_mapping: addresses + .iter() + .map(|(string, num_addr)| format!("{}={}", string, num_addr)) + .collect(), + ..Default::default() + }; + let (files, units, opt_env) = build_and_report_v2_driver(options).unwrap(); + let env = opt_env.expect("v2 driver should return env"); + let test_plan = plan_builder_v2::construct_test_plan(&env, None); + (test_plan, files, units) + } else { + let (files, comments_and_compiler_res) = Compiler::from_files( + source_files, + deps, + addresses, + Flags::testing().set_skip_attribute_checks(false), + KnownAttribute::get_all_attribute_names(), + ) + .run::() + .unwrap(); + let (_, compiler) = + diagnostics::unwrap_or_report_diagnostics(&files, comments_and_compiler_res); + + let (mut compiler, cfgir) = compiler.into_ast(); + let compilation_env = compiler.compilation_env(); + let test_plan = + unit_test::plan_builder::construct_test_plan(compilation_env, None, &cfgir); + + if let Err(diags) = compilation_env.check_diags_at_or_above_severity( + if self.ignore_compile_warnings { + Severity::NonblockingError + } else { + Severity::Warning + }, + ) { + diagnostics::report_diagnostics(&files, diags); + } - let compilation_result = compiler.at_cfgir(cfgir).build(); + let compilation_result = compiler.at_cfgir(cfgir).build(); - let (units, warnings) = - diagnostics::unwrap_or_report_diagnostics(&files, compilation_result); - diagnostics::report_warnings(&files, warnings); + let (units, warnings) = + diagnostics::unwrap_or_report_diagnostics(&files, compilation_result); + diagnostics::report_warnings(&files, warnings); + (test_plan, files, units) + }; test_plan.map(|tests| TestPlan::new(tests, files, units)) } diff --git a/third_party/move/tools/move-unit-test/tests/test_sources/abort_location.exp b/third_party/move/tools/move-unit-test/tests/test_sources/abort_location.exp new file mode 100644 index 0000000000000..fd6103f388abd --- /dev/null +++ b/third_party/move/tools/move-unit-test/tests/test_sources/abort_location.exp @@ -0,0 +1,29 @@ +Running Move unit tests +[ FAIL ] 0xc0ffee::Example::test_validate_parameters +0xc0ffee::Example::test_validate_parameters +Output: Ok(Changes { accounts: {} }) + +Test failures: + +Failures in 0xc0ffee::Example: + +┌── test_validate_parameters ────── +│ error[E11001]: test failure +│ ┌─ abort_location.move:36:13 +│ │ +│ 9 │ fun validate_parameters( +│ │ ------------------- In this function in 0xc0ffee::Example +│ · +│ 36 │ ╭ assert!( +│ 37 │ │ is_valid_a, +│ 38 │ │ if (is_positive) E_CONDITION_B else E_CONDITION_C +│ 39 │ │ ); +│ │ ╰─────────────^ Test was not expected to error, but it aborted with code 2 originating in the module 0000000000000000000000000000000000000000000000000000000000c0ffee::Example rooted here +│ +│ +│ stack trace +│ Example::test_validate_parameters(tests/test_sources/abort_location.move:86-92) +│ +└────────────────── + +Test result: FAILED. Total tests: 1; passed: 0; failed: 1 diff --git a/third_party/move/tools/move-unit-test/tests/test_sources/abort_location.move b/third_party/move/tools/move-unit-test/tests/test_sources/abort_location.move new file mode 100644 index 0000000000000..f9f337e2a6005 --- /dev/null +++ b/third_party/move/tools/move-unit-test/tests/test_sources/abort_location.move @@ -0,0 +1,94 @@ +module 0xc0ffee::Example { + use std::option::{Option}; + + const E_CONDITION_A: u64 = 1; + const E_CONDITION_B: u64 = 2; + const E_CONDITION_C: u64 = 3; + const E_CONDITION_D: u64 = 4; + + fun validate_parameters( + param_a: Option, // Optional first parameter to validate + param_b: Option, // Optional second parameter to validate + reference_value: u64, // Reference value for comparisons + threshold_value: u64, // Threshold for additional validation + flag: bool // Indicates a positive or negative condition + ) { + // Determine the context based on the flag + let is_positive = flag; + + // Check and validate param_a if it exists + if (option::is_some(¶m_a)) { + // Extract the value from the Option + let value_a = option::extract(&mut param_a); + + // Ensure the value is non-zero + assert!(value_a > 0, E_CONDITION_A); + + // Validate based on the condition (is_positive) + let is_valid_a = + if (is_positive) { + value_a > reference_value // For positive condition, value_a must be greater than reference_value + } else { + value_a < reference_value // For negative condition, value_a must be less than reference_value + }; + + // Assert that the validation passed + assert!( + is_valid_a, + if (is_positive) E_CONDITION_B else E_CONDITION_C + ); + }; + + // Check and validate param_b if it exists + if (option::is_some(¶m_b)) { + // Extract the value from the Option + let value_b = option::extract(&mut param_b); + + // Ensure the value is non-zero + assert!(value_b > 0, E_CONDITION_A); + + // Validate based on the condition (is_positive) + let is_valid_b = + if (is_positive) { + value_b < reference_value // For positive condition, value_b must be less than reference_value + } else { + value_b > reference_value // For negative condition, value_b must be greater than reference_value + }; + + // Assert that the validation passed + assert!( + is_valid_b, + if (is_positive) E_CONDITION_C else E_CONDITION_D + ); + + // Additional validation against the threshold value if it exists + if (threshold_value > 0) { + let is_valid_threshold = + if (is_positive) { + value_b > threshold_value // For positive condition, value_b must be greater than threshold_value + } else { + value_b < threshold_value // For negative condition, value_b must be less than threshold_value + }; + + // Assert that the threshold validation passed + assert!(is_valid_threshold, E_CONDITION_A); + } + }; + } + + #[test_only] + use std::option; + + #[test] + fun test_validate_parameters() { + // Passing Invalid param_a for positive condition + // This should throw E_CONDITION_B error + validate_parameters( + option::some(40), // Less than reference_value (60) + option::some(50), + 60, + 30, + true + ); + } +} diff --git a/third_party/move/tools/move-unit-test/tests/test_sources/abort_location.v2_exp b/third_party/move/tools/move-unit-test/tests/test_sources/abort_location.v2_exp new file mode 100644 index 0000000000000..ee9ce750daf17 --- /dev/null +++ b/third_party/move/tools/move-unit-test/tests/test_sources/abort_location.v2_exp @@ -0,0 +1,26 @@ +Running Move unit tests +[ FAIL ] 0xc0ffee::Example::test_validate_parameters +0xc0ffee::Example::test_validate_parameters +Output: Ok(Changes { accounts: {} }) + +Test failures: + +Failures in 0xc0ffee::Example: + +┌── test_validate_parameters ────── +│ error[E11001]: test failure +│ ┌─ abort_location.move:36:13 +│ │ +│ 9 │ fun validate_parameters( +│ │ ------------------- In this function in 0xc0ffee::Example +│ · +│ 36 │ assert!( +│ │ ^^^^^^ Test was not expected to error, but it aborted with code 2 originating in the module 0000000000000000000000000000000000000000000000000000000000c0ffee::Example rooted here +│ +│ +│ stack trace +│ Example::test_validate_parameters(tests/test_sources/abort_location.move:86-92) +│ +└────────────────── + +Test result: FAILED. Total tests: 1; passed: 0; failed: 1 diff --git a/third_party/move/tools/move-unit-test/tests/test_sources/missing_data.v2_exp b/third_party/move/tools/move-unit-test/tests/test_sources/missing_data.v2_exp index 2b61f4e0d5a72..9a63c9e43f498 100644 --- a/third_party/move/tools/move-unit-test/tests/test_sources/missing_data.v2_exp +++ b/third_party/move/tools/move-unit-test/tests/test_sources/missing_data.v2_exp @@ -23,7 +23,7 @@ Failures in 0x1::MissingData: │ 5 │ fun missing_data() acquires Missing { │ │ ------------ In this function in 0x1::MissingData │ 6 │ borrow_global(@0x0); -│ │ ^^^^^^^^^^^^^ Test was not expected to error, but it gave a MISSING_DATA (code 4008) error with error message: "Failed to borrow global resource from 0000000000000000000000000000000000000000000000000000000000000000". Error originating in the module 0000000000000000000000000000000000000000000000000000000000000001::MissingData rooted here +│ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Test was not expected to error, but it gave a MISSING_DATA (code 4008) error with error message: "Failed to borrow global resource from 0000000000000000000000000000000000000000000000000000000000000000". Error originating in the module 0000000000000000000000000000000000000000000000000000000000000001::MissingData rooted here │ │ └────────────────── @@ -36,7 +36,7 @@ Failures in 0x1::MissingData: │ 5 │ fun missing_data() acquires Missing { │ │ ------------ In this function in 0x1::MissingData │ 6 │ borrow_global(@0x0); -│ │ ^^^^^^^^^^^^^ Test was not expected to error, but it gave a MISSING_DATA (code 4008) error with error message: "Failed to borrow global resource from 0000000000000000000000000000000000000000000000000000000000000000". Error originating in the module 0000000000000000000000000000000000000000000000000000000000000001::MissingData rooted here +│ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Test was not expected to error, but it gave a MISSING_DATA (code 4008) error with error message: "Failed to borrow global resource from 0000000000000000000000000000000000000000000000000000000000000000". Error originating in the module 0000000000000000000000000000000000000000000000000000000000000001::MissingData rooted here │ │ │ stack trace diff --git a/third_party/move/tools/move-unit-test/tests/test_sources/out_of_gas.v2_exp b/third_party/move/tools/move-unit-test/tests/test_sources/out_of_gas.v2_exp index ee93f3921bb8c..f33824c63ed26 100644 --- a/third_party/move/tools/move-unit-test/tests/test_sources/out_of_gas.v2_exp +++ b/third_party/move/tools/move-unit-test/tests/test_sources/out_of_gas.v2_exp @@ -20,12 +20,12 @@ Failures in 0x42::m: ┌── t1 ────── │ error[E11001]: test failure -│ ┌─ out_of_gas.move:10:10 +│ ┌─ out_of_gas.move:10:5 │ │ │ 9 │ fun t1() { │ │ -- In this function in 0x42::m │ 10 │ loop {} -│ │ ^^ Test did not error as expected. Expected test to give an arithmetic error originating in the module 0000000000000000000000000000000000000000000000000000000000000042::m but instead it ran out of gas in the module 0000000000000000000000000000000000000000000000000000000000000042::m rooted here +│ │ ^^^^^^^ Test did not error as expected. Expected test to give an arithmetic error originating in the module 0000000000000000000000000000000000000000000000000000000000000042::m but instead it ran out of gas in the module 0000000000000000000000000000000000000000000000000000000000000042::m rooted here │ │ └────────────────── @@ -33,12 +33,12 @@ Failures in 0x42::m: ┌── t2 ────── │ error[E11001]: test failure -│ ┌─ out_of_gas.move:16:7 +│ ┌─ out_of_gas.move:16:5 │ │ │ 15 │ fun t2() { │ │ -- In this function in 0x42::m │ 16 │ 0 - 1; -│ │ ^ Test did not error as expected. Expected test to run out of gas in the module 0000000000000000000000000000000000000000000000000000000000000042::m but instead it gave an arithmetic error with error message: "Subtraction overflow". Error originating in the module 0000000000000000000000000000000000000000000000000000000000000042::m rooted here +│ │ ^^^^^ Test did not error as expected. Expected test to run out of gas in the module 0000000000000000000000000000000000000000000000000000000000000042::m but instead it gave an arithmetic error with error message: "Subtraction overflow". Error originating in the module 0000000000000000000000000000000000000000000000000000000000000042::m rooted here │ │ └────────────────── diff --git a/third_party/move/tools/move-unit-test/tests/test_sources/proposal_test.v2_exp b/third_party/move/tools/move-unit-test/tests/test_sources/proposal_test.v2_exp index 2bfad174e6b53..83b5ab9ae67e3 100644 --- a/third_party/move/tools/move-unit-test/tests/test_sources/proposal_test.v2_exp +++ b/third_party/move/tools/move-unit-test/tests/test_sources/proposal_test.v2_exp @@ -30,7 +30,7 @@ Failures in 0x1::Module: │ │ ------- In this function in 0x1::Module │ · │ 102 │ assert!(d(@0x2, 6), 3); -│ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ Test was not expected to error, but it aborted with code 3 originating in the module 0000000000000000000000000000000000000000000000000000000000000001::Module rooted here +│ │ ^^^^^^ Test was not expected to error, but it aborted with code 3 originating in the module 0000000000000000000000000000000000000000000000000000000000000001::Module rooted here │ │ └────────────────── diff --git a/types/Cargo.toml b/types/Cargo.toml index 26e8db6d58771..5296eb1af3eab 100644 --- a/types/Cargo.toml +++ b/types/Cargo.toml @@ -18,7 +18,6 @@ aptos-bitvec = { workspace = true } aptos-crypto = { workspace = true } aptos-crypto-derive = { workspace = true } aptos-dkg = { workspace = true } -aptos-experimental-runtimes = { workspace = true } aptos-infallible = { workspace = true } arbitrary = { workspace = true, features = ["derive"], optional = true } ark-bn254 = { workspace = true } @@ -28,7 +27,6 @@ ark-groth16 = { workspace = true } ark-relations = { workspace = true } ark-serialize = { workspace = true } ark-std = { workspace = true } -arr_macro = { workspace = true } base64 = { workspace = true } bcs = { workspace = true } bytes = { workspace = true } diff --git a/types/src/on_chain_config/mod.rs b/types/src/on_chain_config/mod.rs index 03b5c5f2a409a..4a1bd1d9c023f 100644 --- a/types/src/on_chain_config/mod.rs +++ b/types/src/on_chain_config/mod.rs @@ -238,6 +238,7 @@ pub fn struct_tag_for_config(config_id: ConfigID) -> StructTag { #[derive(Debug, Deserialize, Serialize)] pub struct ConfigurationResource { epoch: u64, + /// Unix epoch timestamp (in microseconds) of the last reconfiguration time. last_reconfiguration_time: u64, events: EventHandle, } @@ -247,7 +248,8 @@ impl ConfigurationResource { self.epoch } - pub fn last_reconfiguration_time(&self) -> u64 { + /// Return the last Unix epoch timestamp (in microseconds) of the last reconfiguration time. + pub fn last_reconfiguration_time_micros(&self) -> u64 { self.last_reconfiguration_time } diff --git a/types/src/on_chain_config/timed_features.rs b/types/src/on_chain_config/timed_features.rs index ea7cafa68a7b9..aa590a7026db8 100644 --- a/types/src/on_chain_config/timed_features.rs +++ b/types/src/on_chain_config/timed_features.rs @@ -6,10 +6,6 @@ use serde::Serialize; use strum::{EnumCount, IntoEnumIterator}; use strum_macros::{EnumCount as EnumCountMacro, EnumIter}; -// A placeholder that can be used to represent activation times that have not been determined. -const NOT_YET_SPECIFIED: u64 = END_OF_TIME; /* Thursday, December 31, 2099 11:59:59 PM */ - -pub const END_OF_TIME: u64 = 4102444799000; /* Thursday, December 31, 2099 11:59:59 PM */ #[derive(Debug, EnumCountMacro, EnumIter, Clone, Copy, Eq, PartialEq)] pub enum TimedFeatureFlag { DisableInvariantViolationCheckInSwapLoc, @@ -23,7 +19,8 @@ pub enum TimedFeatureFlag { enum TimedFeaturesImpl { OnNamedChain { named_chain: NamedChain, - timestamp: u64, + // Unix Epoch timestamp in microseconds. + timestamp_micros: u64, }, EnableAll, } @@ -44,7 +41,6 @@ impl TimedFeatureOverride { Replay => match flag { LimitTypeTagSize => true, ModuleComplexityCheck => true, - EntryCompatibility => true, // Add overrides for replay here. _ => return None, }, @@ -57,19 +53,22 @@ impl TimedFeatureOverride { } impl TimedFeatureFlag { - pub const fn activation_time_on(&self, chain_id: &NamedChain) -> u64 { + /// Returns the activation time of the feature on the given chain. + /// The time is specified as a Unix Epoch timestamp in microseconds. + pub const fn activation_time_micros_on(&self, chain_id: &NamedChain) -> u64 { use NamedChain::*; use TimedFeatureFlag::*; match (self, chain_id) { - (DisableInvariantViolationCheckInSwapLoc, TESTNET) => NOT_YET_SPECIFIED, - (DisableInvariantViolationCheckInSwapLoc, MAINNET) => NOT_YET_SPECIFIED, + // Enabled from the beginning of time. + (DisableInvariantViolationCheckInSwapLoc, TESTNET) => 0, + (DisableInvariantViolationCheckInSwapLoc, MAINNET) => 0, - (ModuleComplexityCheck, TESTNET) => 1719356400000, /* Tuesday, June 21, 2024 16:00:00 AM GMT-07:00 */ - (ModuleComplexityCheck, MAINNET) => 1720033200000, /* Wednesday, July 3, 2024 12:00:00 AM GMT-07:00 */ + (ModuleComplexityCheck, TESTNET) => 1_719_356_400_000_000, /* Tuesday, June 21, 2024 16:00:00 AM GMT-07:00 */ + (ModuleComplexityCheck, MAINNET) => 1_720_033_200_000_000, /* Wednesday, July 3, 2024 12:00:00 AM GMT-07:00 */ - (EntryCompatibility, TESTNET) => 1730923200000, /* Wednesday, Nov 6, 2024 12:00:00 AM GMT-07:00 */ - (EntryCompatibility, MAINNET) => 1731441600000, /* Tuesday, Nov 12, 2024 12:00:00 AM GMT-07:00 */ + (EntryCompatibility, TESTNET) => 1_730_923_200_000_000, /* Wednesday, Nov 6, 2024 12:00:00 AM GMT-07:00 */ + (EntryCompatibility, MAINNET) => 1_731_441_600_000_000, /* Tuesday, Nov 12, 2024 12:00:00 AM GMT-07:00 */ // If unspecified, a timed feature is considered enabled from the very beginning of time. _ => 0, @@ -84,11 +83,12 @@ pub struct TimedFeaturesBuilder { } impl TimedFeaturesBuilder { - pub fn new(chain_id: ChainId, timestamp: u64) -> Self { + /// `timestamp_micros` is a Unix Epoch timestamp in microseconds. + pub fn new(chain_id: ChainId, timestamp_micros: u64) -> Self { let inner = match NamedChain::from_chain_id(&chain_id) { Ok(named_chain) => TimedFeaturesImpl::OnNamedChain { named_chain, - timestamp, + timestamp_micros, }, Err(_) => TimedFeaturesImpl::EnableAll, // Unknown chain => enable all features by default. }; @@ -125,8 +125,8 @@ impl TimedFeaturesBuilder { match &self.inner { OnNamedChain { named_chain, - timestamp, - } => *timestamp >= flag.activation_time_on(named_chain), + timestamp_micros, + } => *timestamp_micros >= flag.activation_time_micros_on(named_chain), EnableAll => true, } } @@ -161,4 +161,86 @@ mod test { let testing = assert_ok!(bcs::to_bytes(&TimedFeatureOverride::Testing)); assert_ne!(replay, testing); } + + #[test] + fn test_timed_features_activation() { + use TimedFeatureFlag::*; + // Monday, Jan 01, 2024 12:00:00.000 AM GMT + let jan_1_2024_micros: u64 = 1_704_067_200_000_000; + // Friday, November 15, 2024 12:00:00 AM GMT + let nov_15_2024_micros: u64 = 1_731_628_800_000_000; + + // Check testnet on Jan 1, 2024. + let testnet_jan_1_2024 = TimedFeaturesBuilder::new(ChainId::testnet(), jan_1_2024_micros); + assert!( + testnet_jan_1_2024.is_enabled(DisableInvariantViolationCheckInSwapLoc), + "DisableInvariantViolationCheckInSwapLoc should always be enabled" + ); + assert!( + testnet_jan_1_2024.is_enabled(LimitTypeTagSize), + "LimitTypeTagSize should always be enabled" + ); + assert!( + !testnet_jan_1_2024.is_enabled(ModuleComplexityCheck), + "ModuleComplexityCheck should be disabled on Jan 1, 2024 on testnet" + ); + assert!( + !testnet_jan_1_2024.is_enabled(EntryCompatibility), + "EntryCompatibility should be disabled on Jan 1, 2024 on testnet" + ); + // Check testnet on Nov 15, 2024. + let testnet_nov_15_2024 = TimedFeaturesBuilder::new(ChainId::testnet(), nov_15_2024_micros); + assert!( + testnet_nov_15_2024.is_enabled(DisableInvariantViolationCheckInSwapLoc), + "DisableInvariantViolationCheckInSwapLoc should always be enabled" + ); + assert!( + testnet_nov_15_2024.is_enabled(LimitTypeTagSize), + "LimitTypeTagSize should always be enabled" + ); + assert!( + testnet_nov_15_2024.is_enabled(ModuleComplexityCheck), + "ModuleComplexityCheck should be enabled on Nov 15, 2024 on testnet" + ); + assert!( + testnet_nov_15_2024.is_enabled(EntryCompatibility), + "EntryCompatibility should be enabled on Nov 15, 2024 on testnet" + ); + // Check mainnet on Jan 1, 2024. + let mainnet_jan_1_2024 = TimedFeaturesBuilder::new(ChainId::mainnet(), jan_1_2024_micros); + assert!( + mainnet_jan_1_2024.is_enabled(DisableInvariantViolationCheckInSwapLoc), + "DisableInvariantViolationCheckInSwapLoc should always be enabled" + ); + assert!( + mainnet_jan_1_2024.is_enabled(LimitTypeTagSize), + "LimitTypeTagSize should always be enabled" + ); + assert!( + !mainnet_jan_1_2024.is_enabled(ModuleComplexityCheck), + "ModuleComplexityCheck should be disabled on Jan 1, 2024 on mainnet" + ); + assert!( + !mainnet_jan_1_2024.is_enabled(EntryCompatibility), + "EntryCompatibility should be disabled on Jan 1, 2024 on mainnet" + ); + // Check mainnet on Nov 15, 2024. + let mainnet_nov_15_2024 = TimedFeaturesBuilder::new(ChainId::mainnet(), nov_15_2024_micros); + assert!( + mainnet_nov_15_2024.is_enabled(DisableInvariantViolationCheckInSwapLoc), + "DisableInvariantViolationCheckInSwapLoc should always be enabled" + ); + assert!( + mainnet_nov_15_2024.is_enabled(LimitTypeTagSize), + "LimitTypeTagSize should always be enabled" + ); + assert!( + mainnet_nov_15_2024.is_enabled(ModuleComplexityCheck), + "ModuleComplexityCheck should be enabled on Nov 15, 2024 on mainnet" + ); + assert!( + mainnet_nov_15_2024.is_enabled(EntryCompatibility), + "EntryCompatibility should be enabled on Nov 15, 2024 on mainnet" + ); + } } diff --git a/types/src/state_store/errors.rs b/types/src/state_store/errors.rs index 838d600519244..c0240d67c70fe 100644 --- a/types/src/state_store/errors.rs +++ b/types/src/state_store/errors.rs @@ -5,7 +5,7 @@ use thiserror::Error; #[derive(Debug, Error)] -pub enum StateviewError { +pub enum StateViewError { #[error("{0} not found.")] NotFound(String), /// Other non-classified error. @@ -15,7 +15,7 @@ pub enum StateviewError { BcsError(#[from] bcs::Error), } -impl From for StateviewError { +impl From for StateViewError { fn from(error: anyhow::Error) -> Self { Self::Other(format!("{}", error)) } diff --git a/types/src/state_store/in_memory_state_view.rs b/types/src/state_store/in_memory_state_view.rs deleted file mode 100644 index a87320868f532..0000000000000 --- a/types/src/state_store/in_memory_state_view.rs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright © Aptos Foundation -// Parts of the project are originally copyright © Meta Platforms, Inc. -// SPDX-License-Identifier: Apache-2.0 - -use crate::state_store::{ - state_key::StateKey, state_storage_usage::StateStorageUsage, state_value::StateValue, Result, - TStateView, -}; -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; - -// A State view backed by in-memory hashmap. -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct InMemoryStateView { - state_data: HashMap, -} - -impl InMemoryStateView { - pub fn new(state_data: HashMap) -> Self { - Self { state_data } - } -} - -impl TStateView for InMemoryStateView { - type Key = StateKey; - - fn get_state_value(&self, state_key: &StateKey) -> Result> { - Ok(self.state_data.get(state_key).cloned()) - } - - fn get_usage(&self) -> Result { - Ok(StateStorageUsage::new_untracked()) - } - - fn as_in_memory_state_view(&self) -> InMemoryStateView { - self.clone() - } -} diff --git a/types/src/state_store/mod.rs b/types/src/state_store/mod.rs index e85552c84cb86..7456b439d6287 100644 --- a/types/src/state_store/mod.rs +++ b/types/src/state_store/mod.rs @@ -4,31 +4,25 @@ use crate::{ account_address::AccountAddress, state_store::{ - errors::StateviewError, in_memory_state_view::InMemoryStateView, state_key::StateKey, - state_storage_usage::StateStorageUsage, state_value::StateValue, + errors::StateViewError, state_key::StateKey, state_storage_usage::StateStorageUsage, + state_value::StateValue, }, transaction::Version, }; use aptos_crypto::HashValue; -use aptos_experimental_runtimes::thread_manager::THREAD_MANAGER; -use arr_macro::arr; use bytes::Bytes; use move_core_types::move_resource::MoveResource; -use rayon::prelude::*; #[cfg(any(test, feature = "testing"))] use std::hash::Hash; use std::{collections::HashMap, ops::Deref}; pub mod errors; -pub mod in_memory_state_view; pub mod state_key; pub mod state_storage_usage; pub mod state_value; pub mod table; -pub const NUM_STATE_SHARDS: usize = 16; - -pub type Result = std::result::Result; +pub type StateViewResult = std::result::Result; /// A trait that defines a read-only snapshot of the global state. It is passed to the VM for /// transaction execution, during which the VM is guaranteed to read anything at the given state. @@ -41,20 +35,16 @@ pub trait TStateView { } /// Gets the state value bytes for a given state key. - fn get_state_value_bytes(&self, state_key: &Self::Key) -> Result> { + fn get_state_value_bytes(&self, state_key: &Self::Key) -> StateViewResult> { let val_opt = self.get_state_value(state_key)?; Ok(val_opt.map(|val| val.bytes().clone())) } /// Gets the state value for a given state key. - fn get_state_value(&self, state_key: &Self::Key) -> Result>; + fn get_state_value(&self, state_key: &Self::Key) -> StateViewResult>; /// Get state storage usage info at epoch ending. - fn get_usage(&self) -> Result; - - fn as_in_memory_state_view(&self) -> InMemoryStateView { - unreachable!("in-memory state view conversion not supported yet") - } + fn get_usage(&self) -> StateViewResult; } pub trait StateView: TStateView {} @@ -91,11 +81,11 @@ where self.deref().id() } - fn get_state_value(&self, state_key: &K) -> Result> { + fn get_state_value(&self, state_key: &K) -> StateViewResult> { self.deref().get_state_value(state_key) } - fn get_usage(&self) -> Result { + fn get_usage(&self) -> StateViewResult { self.deref().get_usage() } } @@ -123,69 +113,23 @@ impl MockStateView { impl TStateView for MockStateView { type Key = K; - fn get_state_value(&self, state_key: &Self::Key) -> Result, StateviewError> { + fn get_state_value( + &self, + state_key: &Self::Key, + ) -> StateViewResult, StateViewError> { Ok(self.data.get(state_key).cloned()) } - fn get_usage(&self) -> std::result::Result { + fn get_usage(&self) -> std::result::Result { unimplemented!("Irrelevant for tests"); } } -#[derive(Clone, Debug)] -pub struct ShardedStateUpdates { - pub shards: [HashMap>; 16], -} - -impl ShardedStateUpdates { - pub fn new_empty() -> Self { - Self { - shards: arr![HashMap::new(); 16], - } - } - - pub fn all_shards_empty(&self) -> bool { - self.shards.iter().all(|shard| shard.is_empty()) - } - - pub fn total_len(&self) -> usize { - self.shards.iter().map(|shard| shard.len()).sum() - } - - pub fn merge(&mut self, other: Self) { - self.shards - .par_iter_mut() - .zip_eq(other.shards.into_par_iter()) - .for_each(|(l, r)| { - l.extend(r); - }) - } - - pub fn clone_merge(&mut self, other: &Self) { - THREAD_MANAGER.get_exe_cpu_pool().install(|| { - self.shards - .par_iter_mut() - .zip_eq(other.shards.par_iter()) - .for_each(|(l, r)| { - l.extend(r.clone()); - }) - }) - } - - pub fn insert( - &mut self, - key: StateKey, - value: Option, - ) -> Option> { - self.shards[key.get_shard_id() as usize].insert(key, value) - } -} - pub trait MoveResourceExt: MoveResource { fn fetch_move_resource( state_view: &dyn StateView, address: &AccountAddress, - ) -> Result> { + ) -> StateViewResult> { let state_key = StateKey::resource_typed::(address)?; Ok(state_view .get_state_value_bytes(&state_key)? diff --git a/vm-validator/src/unit_tests/vm_validator_test.rs b/vm-validator/src/unit_tests/vm_validator_test.rs index 62990d9d093dd..f4bc8b915354d 100644 --- a/vm-validator/src/unit_tests/vm_validator_test.rs +++ b/vm-validator/src/unit_tests/vm_validator_test.rs @@ -7,7 +7,9 @@ use aptos_cached_packages::aptos_stdlib; use aptos_crypto::{ed25519::Ed25519PrivateKey, PrivateKey, Uniform}; use aptos_db::AptosDB; use aptos_gas_schedule::{InitialGasSchedule, TransactionGasParameters}; -use aptos_storage_interface::{state_view::LatestDbStateCheckpointView, DbReaderWriter}; +use aptos_storage_interface::{ + state_store::state_view::db_state_view::LatestDbStateCheckpointView, DbReaderWriter, +}; use aptos_types::{ account_address, account_config, chain_id::ChainId, diff --git a/vm-validator/src/vm_validator.rs b/vm-validator/src/vm_validator.rs index 6fff21434fe05..aa3c90e2e8ac3 100644 --- a/vm-validator/src/vm_validator.rs +++ b/vm-validator/src/vm_validator.rs @@ -5,8 +5,10 @@ use anyhow::Result; use aptos_logger::info; use aptos_storage_interface::{ - cached_state_view::CachedDbStateView, - state_view::{DbStateView, LatestDbStateCheckpointView}, + state_store::state_view::{ + cached_state_view::CachedDbStateView, + db_state_view::{DbStateView, LatestDbStateCheckpointView}, + }, DbReader, }; use aptos_types::{