diff --git a/cli/src/lib.rs b/cli/src/lib.rs index f3b07c6f282..bb72b92c049 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -137,7 +137,7 @@ impl NetworkRelay { tokio::select! { // Receive message from network Some(msg) = receiver.recv() => self.handle_message(msg).await, - _ = self.shutdown_notify.notified() => { + () = self.shutdown_notify.notified() => { iroha_logger::info!("NetworkRelay is being shut down."); break; } diff --git a/client/tests/integration/transfer_asset.rs b/client/tests/integration/transfer_asset.rs index d8087401a8b..b62ba7dc216 100644 --- a/client/tests/integration/transfer_asset.rs +++ b/client/tests/integration/transfer_asset.rs @@ -26,9 +26,9 @@ fn simulate_transfer_fixed() { ) } -#[should_panic] #[test] #[ignore = "long"] +#[should_panic(expected = "insufficient funds")] fn simulate_insufficient_funds() { simulate_transfer( Fixed::try_from(20_f64).expect("Valid"), diff --git a/config/base/derive/src/utils.rs b/config/base/derive/src/utils.rs index 3e8fa7e1f1c..36f79a76384 100644 --- a/config/base/derive/src/utils.rs +++ b/config/base/derive/src/utils.rs @@ -357,7 +357,9 @@ pub fn extract_box_generic(box_seg: &mut syn::PathSegment) -> &mut syn::Type { generics.args.len() == 1, "`Box` should have exactly one generic argument" ); - let syn::GenericArgument::Type(generic_type) = generics.args.first_mut().expect("Can't be empty") else { + let syn::GenericArgument::Type(generic_type) = + generics.args.first_mut().expect("Can't be empty") + else { panic!("`Box` should have type as a generic argument") }; diff --git a/config/base/tests/simple.rs b/config/base/tests/simple.rs index 9084f582e30..a33ccb4601e 100644 --- a/config/base/tests/simple.rs +++ b/config/base/tests/simple.rs @@ -11,11 +11,11 @@ use serde::{Deserialize, Serialize}; struct ConfigurationProxy { /// Inner structure #[config(inner)] - optional_inner: Option, + inner: Option, #[config(serde_as_str)] - pub optional_string_wrapper: Option, - pub optional_string: Option, - pub optional_data: Option, + pub string_wrapper: Option, + pub string: Option, + pub data: Option, } #[derive(Clone, Debug, Deserialize, Serialize, Documented)] @@ -33,13 +33,13 @@ struct Configuration { impl ConfigurationProxy { fn new_with_placeholders() -> Self { Self { - optional_inner: Some(InnerConfigurationProxy { + inner: Some(InnerConfigurationProxy { a: Some("string".to_owned()), b: Some(42), }), - optional_string_wrapper: Some(StringWrapper("string".to_owned())), - optional_string: Some("cool string".to_owned()), - optional_data: Some(Data { + string_wrapper: Some(StringWrapper("string".to_owned())), + string: Some("cool string".to_owned()), + data: Some(Data { key: "key".to_owned(), value: 34, }), @@ -48,10 +48,10 @@ impl ConfigurationProxy { fn new_with_none() -> Self { Self { - optional_inner: None, - optional_string_wrapper: None, - optional_string: None, - optional_data: None, + inner: None, + string_wrapper: None, + string: None, + data: None, } } } @@ -157,13 +157,10 @@ fn test_env_factory() -> TestEnv { fn test_proxy_load_from_env() { let config = ConfigurationProxy::new_with_placeholders(); let env_config = ConfigurationProxy::from_env(&test_env_factory()).expect("valid env"); - assert_eq!(&env_config.optional_data, &config.optional_data); - assert_eq!( - &env_config.optional_string_wrapper, - &config.optional_string_wrapper - ); - assert_eq!(&env_config.optional_string, &config.optional_string); - assert_eq!(&env_config.optional_inner, &config.optional_inner); + assert_eq!(&env_config.data, &config.data); + assert_eq!(&env_config.string_wrapper, &config.string_wrapper); + assert_eq!(&env_config.string, &config.string); + assert_eq!(&env_config.inner, &config.inner); } #[test] @@ -172,7 +169,7 @@ fn test_can_load_inner_without_the_wrapping_config() { env.remove_var("CONF_OPTIONAL_INNER"); let config = ConfigurationProxy::new_with_placeholders(); let env_config = ConfigurationProxy::from_env(&env).expect("valid env"); - assert_eq!(&env_config.optional_inner, &config.optional_inner); + assert_eq!(&env_config.inner, &config.inner); } #[test] @@ -180,7 +177,7 @@ fn test_proxy_combine_does_not_overload_with_none() { let config = ConfigurationProxy::new_with_none(); let env_config = ConfigurationProxy::from_env(&test_env_factory()).expect("valid env"); let combine_config = env_config.clone().override_with(config); - assert_eq!(&env_config.optional_data, &combine_config.optional_data); + assert_eq!(&env_config.data, &combine_config.data); } #[test] diff --git a/config/src/iroha.rs b/config/src/iroha.rs index cd11b80812e..e5cc9688ba2 100644 --- a/config/src/iroha.rs +++ b/config/src/iroha.rs @@ -268,7 +268,7 @@ mod tests { } #[test] - #[should_panic] + #[should_panic(expected = "Failed to parse Trusted Peers: ")] fn parse_trusted_peers_fail_duplicate_peer_id() { let trusted_peers_string = r#"[{"address":"127.0.0.1:1337", "public_key": "ed0120954C83A4220FAFFB2C1D23FC5225B3E7952D53ACBB2A065FF30C631E5E1D6B10"}, {"address":"127.0.0.1:1337", "public_key": "ed0120954C83A4220FAFFB2C1D23FC5225B3E7952D53ACBB2A065FF30C631E5E1D6B10"}, {"address":"localhost:1338", "public_key": "ed0120954C83A4220FAFFB2C1D23FC5225B3E7952D53ACBB2A065FF30C631E5E1D6B10"}, {"address": "195.162.0.1:23", "public_key": "ed0120954C83A4220FAFFB2C1D23FC5225B3E7952D53ACBB2A065FF30C631E5E1D6B10"}]"#; let _result: TrustedPeers = diff --git a/configs/peer/executor.wasm b/configs/peer/executor.wasm index 98e55964961..2ef54f969b6 100644 Binary files a/configs/peer/executor.wasm and b/configs/peer/executor.wasm differ diff --git a/core/benches/blocks/apply_blocks.rs b/core/benches/blocks/apply_blocks.rs index 6a996a4d9e1..f255922105c 100644 --- a/core/benches/blocks/apply_blocks.rs +++ b/core/benches/blocks/apply_blocks.rs @@ -42,7 +42,7 @@ impl WsvApplyBlocks { .map(|instructions| { let block = create_block(&mut wsv, instructions, account_id.clone(), key_pair.clone()); - wsv.apply_without_execution(&block).map(|_| block) + wsv.apply_without_execution(&block).map(|()| block) }) .collect::, _>>()? }; diff --git a/core/src/block_sync.rs b/core/src/block_sync.rs index 1f8d8fe1fcb..22adcfc2ef8 100644 --- a/core/src/block_sync.rs +++ b/core/src/block_sync.rs @@ -56,7 +56,7 @@ impl BlockSynchronizer { loop { tokio::select! { _ = gossip_period.tick() => self.request_block().await, - _ = self.sumeragi.wsv_updated() => { + () = self.sumeragi.wsv_updated() => { let (latest_hash, previous_hash) = self .sumeragi .apply_wsv(|wsv| (wsv.latest_block_hash(), wsv.previous_block_hash())); diff --git a/core/src/gossiper.rs b/core/src/gossiper.rs index 7f1ea21b690..5856dfd4a1b 100644 --- a/core/src/gossiper.rs +++ b/core/src/gossiper.rs @@ -79,7 +79,7 @@ impl TransactionGossiper { loop { tokio::select! { _ = gossip_period.tick() => self.gossip_transactions(), - _ = self.sumeragi.wsv_updated() => { + () = self.sumeragi.wsv_updated() => { self.wsv = self.sumeragi.wsv_clone(); } transaction_gossip = message_receiver.recv() => { @@ -118,7 +118,7 @@ impl TransactionGossiper { match AcceptedTransaction::accept(tx, transaction_limits) { Ok(tx) => match self.queue.push(tx, &self.wsv) { - Ok(_) => {} + Ok(()) => {} Err(crate::queue::Failure { tx, err: crate::queue::Error::InBlockchain, diff --git a/core/src/kura.rs b/core/src/kura.rs index 671b0c958c8..ec0052a76a4 100644 --- a/core/src/kura.rs +++ b/core/src/kura.rs @@ -165,7 +165,7 @@ impl Kura { let mut block_data_buffer = vec![0_u8; block.length.try_into()?]; match block_store.read_block_data(block.start, &mut block_data_buffer) { - Ok(_) => match SignedBlock::decode_all_versioned(&block_data_buffer) { + Ok(()) => match SignedBlock::decode_all_versioned(&block_data_buffer) { Ok(decoded_block) => { if previous_block_hash != decoded_block.payload().header.previous_block_hash { @@ -416,7 +416,7 @@ impl BlockStore { .map_err(|e| Error::MkDir(e, store_path.to_path_buf())) { Err(e) => Err(e), - Ok(_) => { + Ok(()) => { if let Err(e) = fs::File::options() .read(true) .write(true) @@ -560,7 +560,7 @@ impl BlockStore { hashes_file .read_exact(&mut buffer) .add_err_context(&path) - .and_then(|_| HashOf::decode_all(&mut buffer.as_slice()).map_err(Error::Codec)) + .and_then(|()| HashOf::decode_all(&mut buffer.as_slice()).map_err(Error::Codec)) }) .collect() } @@ -1036,7 +1036,7 @@ mod tests { } #[test] - #[should_panic] + #[should_panic(expected = "Kura must be able to lock the blockstore: ")] fn concurrent_lock() { let dir = tempfile::tempdir().unwrap(); let _store = BlockStore::new(dir.path(), LockStatus::Unlocked); diff --git a/core/src/query/cursor.rs b/core/src/query/cursor.rs index b1ef3393c8f..c7e903de4dc 100644 --- a/core/src/query/cursor.rs +++ b/core/src/query/cursor.rs @@ -62,11 +62,7 @@ impl> Batched { self.cursor = if let Some(cursor) = self.cursor { if batch_size >= self.batch_size.get() { - let batch_size = self - .batch_size - .get() - .try_into() - .expect("usize should fit in u64"); + let batch_size = self.batch_size.get().into(); Some( cursor .checked_add(batch_size) @@ -76,12 +72,7 @@ impl> Batched { None } } else if batch_size >= self.batch_size.get() { - Some( - self.batch_size - .get() - .try_into() - .expect("usize should fit in u64"), - ) + Some(self.batch_size.get().into()) } else { None }; diff --git a/core/src/query/store.rs b/core/src/query/store.rs index 432f3ab142c..92684de1e09 100644 --- a/core/src/query/store.rs +++ b/core/src/query/store.rs @@ -326,13 +326,19 @@ mod tests { .handle_query_output(query_output, &sorting, pagination, fetch_size) .unwrap() .into(); - let Value::Vec(v) = batch else { panic!("not expected result") }; + let Value::Vec(v) = batch else { + panic!("not expected result") + }; counter += v.len(); while cursor.cursor.is_some() { - let Ok(batched) = query_store_handle.handle_query_cursor(cursor) else { break }; + let Ok(batched) = query_store_handle.handle_query_cursor(cursor) else { + break; + }; let (batch, new_cursor) = batched.into(); - let Value::Vec(v) = batch else { panic!("not expected result") }; + let Value::Vec(v) = batch else { + panic!("not expected result") + }; counter += v.len(); cursor = new_cursor; diff --git a/core/src/queue.rs b/core/src/queue.rs index 9a3365750d5..bc3d860cd1f 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -46,9 +46,9 @@ impl AcceptedTransaction { #[derive(Debug)] pub struct Queue { /// The queue for transactions - queue: ArrayQueue>, + tx_hashes: ArrayQueue>, /// [`AcceptedTransaction`]s addressed by `Hash` - txs: DashMap, AcceptedTransaction>, + accepted_txs: DashMap, AcceptedTransaction>, /// Amount of transactions per user in the queue txs_per_user: DashMap, /// The maximum number of transactions in the queue @@ -99,8 +99,8 @@ impl Queue { /// Makes queue from configuration pub fn from_configuration(cfg: &Configuration) -> Self { Self { - queue: ArrayQueue::new(cfg.max_transactions_in_queue as usize), - txs: DashMap::new(), + tx_hashes: ArrayQueue::new(cfg.max_transactions_in_queue as usize), + accepted_txs: DashMap::new(), txs_per_user: DashMap::new(), max_txs: cfg.max_transactions_in_queue as usize, max_txs_per_user: cfg.max_transactions_in_queue_per_user as usize, @@ -140,7 +140,7 @@ impl Queue { &'wsv self, wsv: &'wsv WorldStateView, ) -> impl Iterator + 'wsv { - self.txs.iter().filter_map(|tx| { + self.accepted_txs.iter().filter_map(|tx| { if self.is_pending(tx.value(), wsv) { return Some(tx.value().clone()); } @@ -151,7 +151,7 @@ impl Queue { /// Returns `n` randomly selected transaction from the queue. pub fn n_random_transactions(&self, n: u32, wsv: &WorldStateView) -> Vec { - self.txs + self.accepted_txs .iter() .filter(|e| self.is_pending(e.value(), wsv)) .map(|e| e.value().clone()) @@ -193,9 +193,9 @@ impl Queue { } // Get `txs_len` before entry to avoid deadlock - let txs_len = self.txs.len(); + let txs_len = self.accepted_txs.len(); let hash = tx.payload().hash(); - let entry = match self.txs.entry(hash) { + let entry = match self.accepted_txs.entry(hash) { Entry::Occupied(mut old_tx) => { // MST case let signatures_amount_before = old_tx.get().signatures().len(); @@ -226,10 +226,10 @@ impl Queue { // Insert entry first so that the `tx` popped from `queue` will always have a `(hash, tx)` record in `txs`. entry.insert(tx); - self.queue.push(hash).map_err(|err_hash| { + self.tx_hashes.push(hash).map_err(|err_hash| { warn!("Queue is full"); let (_, err_tx) = self - .txs + .accepted_txs .remove(&err_hash) .expect("Inserted just before match"); self.decrease_per_user_tx_count(&err_tx.payload().authority); @@ -238,7 +238,7 @@ impl Queue { err: Error::Full, } })?; - trace!("Transaction queue length = {}", self.queue.len(),); + trace!("Transaction queue length = {}", self.tx_hashes.len(),); Ok(()) } @@ -250,10 +250,10 @@ impl Queue { expired_transactions: &mut Vec, ) -> Option { loop { - let Some(hash) = self.queue.pop() else { + let Some(hash) = self.tx_hashes.pop() else { return None; }; - let entry = match self.txs.entry(hash) { + let entry = match self.accepted_txs.entry(hash) { Entry::Occupied(entry) => entry, // FIXME: Reachable under high load. Investigate, see if it's a problem. // As practice shows this code is not `unreachable!()`. @@ -288,7 +288,7 @@ impl Queue { /// Return the number of transactions in the queue. pub fn tx_len(&self) -> usize { - self.txs.len() + self.accepted_txs.len() } /// Gets transactions till they fill whole block or till the end of queue. @@ -335,7 +335,7 @@ impl Queue { seen_queue .into_iter() - .try_for_each(|hash| self.queue.push(hash)) + .try_for_each(|hash| self.tx_hashes.push(hash)) .expect("Exceeded the number of transactions pending"); expired_transactions.extend(expired_transactions_queue); } @@ -623,7 +623,7 @@ mod tests { .. }) )); - assert_eq!(queue.txs.len(), 0); + assert_eq!(queue.accepted_txs.len(), 0); } #[test] @@ -653,7 +653,7 @@ mod tests { .len(), 0 ); - assert_eq!(queue.txs.len(), 0); + assert_eq!(queue.accepted_txs.len(), 0); } #[test] @@ -850,11 +850,11 @@ mod tests { get_txs_handle.join().unwrap(); // Validate the queue state. - let array_queue: Vec<_> = core::iter::from_fn(|| queue.queue.pop()).collect(); + let array_queue: Vec<_> = core::iter::from_fn(|| queue.tx_hashes.pop()).collect(); - assert_eq!(array_queue.len(), queue.txs.len()); + assert_eq!(array_queue.len(), queue.accepted_txs.len()); for tx in array_queue { - assert!(queue.txs.contains_key(&tx)); + assert!(queue.accepted_txs.contains_key(&tx)); } } @@ -889,7 +889,7 @@ mod tests { .. }) )); - assert_eq!(queue.txs.len(), 1); + assert_eq!(queue.accepted_txs.len(), 1); } #[test] diff --git a/core/src/smartcontracts/isi/account.rs b/core/src/smartcontracts/isi/account.rs index 508772dbd6b..2c1e67ca25c 100644 --- a/core/src/smartcontracts/isi/account.rs +++ b/core/src/smartcontracts/isi/account.rs @@ -48,9 +48,7 @@ pub mod isi { match wsv.asset(&asset_id) { Err(err) => match err { - QueryExecutionFail::Find(find_err) - if matches!(find_err, FindError::Asset(_)) => - { + QueryExecutionFail::Find(FindError::Asset(_)) => { assert_can_register(&asset_id.definition_id, wsv, &self.object.value)?; let asset = wsv .asset_or_insert(asset_id.clone(), self.object.value) diff --git a/core/src/smartcontracts/isi/triggers/set.rs b/core/src/smartcontracts/isi/triggers/set.rs index 3cd20738837..624fc2a6acd 100644 --- a/core/src/smartcontracts/isi/triggers/set.rs +++ b/core/src/smartcontracts/isi/triggers/set.rs @@ -175,7 +175,7 @@ impl Serialize for TriggersWithContext<'_, F> { S: Serializer, { let mut map = serializer.serialize_map(Some(self.triggers.len()))?; - for (id, action) in self.triggers.iter() { + for (id, action) in self.triggers { let action = self.set.get_original_action(action.clone()); map.serialize_entry(&id, &action)?; } diff --git a/core/src/smartcontracts/wasm.rs b/core/src/smartcontracts/wasm.rs index 014960c147f..0d0116dba2f 100644 --- a/core/src/smartcontracts/wasm.rs +++ b/core/src/smartcontracts/wasm.rs @@ -1103,7 +1103,7 @@ impl<'wrld> import::traits::GetExecutorPayloads import::traits::GetExecutorPayloads import::traits::GetExecutorPayloads { + () = self.sumeragi.finalized_wsv_updated() => { self.sumeragi.apply_finalized_wsv(|finalized_wsv| self.new_wsv_available = finalized_wsv.height() > 0); } _ = message_receiver.recv() => { diff --git a/core/src/wsv.rs b/core/src/wsv.rs index ba80418e408..13a899845e9 100644 --- a/core/src/wsv.rs +++ b/core/src/wsv.rs @@ -561,7 +561,7 @@ impl WorldStateView { } let wsv = self.clone(); let event = match self.process_trigger(&id, &action, event) { - Ok(_) => { + Ok(()) => { succeed.push(id.clone()); TriggerCompletedEvent::new(id, TriggerCompletedOutcome::Success) } diff --git a/crypto/src/lib.rs b/crypto/src/lib.rs index eb11d4bf977..5ae39a89ea2 100755 --- a/crypto/src/lib.rs +++ b/crypto/src/lib.rs @@ -367,10 +367,7 @@ impl FromStr for PublicKey { #[cfg(not(feature = "ffi_import"))] impl PublicKey { fn normalize(&self) -> String { - let multihash: &multihash::Multihash = &self - .clone() - .try_into() - .expect("Failed to get multihash representation."); + let multihash: &multihash::Multihash = &self.clone().into(); let bytes = Vec::try_from(multihash).expect("Failed to convert multihash to bytes."); let mut bytes_iter = bytes.into_iter(); diff --git a/crypto/src/signature/bls/implementation.rs b/crypto/src/signature/bls/implementation.rs index c3dff70bc9f..d7084aa82c7 100644 --- a/crypto/src/signature/bls/implementation.rs +++ b/crypto/src/signature/bls/implementation.rs @@ -81,7 +81,7 @@ impl PublicKey { pub fn from_bytes(bytes: &[u8]) -> Result { Ok(Self( - C::Generator::from_bytes(bytes).map_err(|e| Error::Parse(format!("{:?}", e)))?, + C::Generator::from_bytes(bytes).map_err(|e| Error::Parse(format!("{e:?}")))?, )) } } @@ -129,7 +129,7 @@ impl Signature { pub fn from_bytes(bytes: &[u8]) -> Result { Ok(Signature( - C::SignatureGroup::from_bytes(bytes).map_err(|e| Error::Parse(format!("{:?}", e)))?, + C::SignatureGroup::from_bytes(bytes).map_err(|e| Error::Parse(format!("{e:?}")))?, )) } } @@ -140,13 +140,13 @@ impl BlsImpl { fn parse_public_key(pk: &IrohaPublicKey) -> Result, Error> { assert_eq!(pk.digest_function, C::ALGORITHM); PublicKey::from_bytes(&pk.payload) - .map_err(|e| Error::Parse(format!("Failed to parse public key: {}", e))) + .map_err(|e| Error::Parse(format!("Failed to parse public key: {e}"))) } fn parse_private_key(sk: &IrohaPrivateKey) -> Result { assert_eq!(sk.digest_function, C::ALGORITHM); PrivateKey::from_bytes(&sk.payload) - .map_err(|e| Error::Parse(format!("Failed to parse private key: {}", e))) + .map_err(|e| Error::Parse(format!("Failed to parse private key: {e}"))) } // the names are from an RFC, not a good idea to change them @@ -165,7 +165,7 @@ impl BlsImpl { let mut okm = [0u8; PRIVATE_KEY_SIZE]; let h = hkdf::Hkdf::::new(Some(&salt[..]), &ikm); h.expand(&info[..], &mut okm).map_err(|err| { - Error::KeyGen(format!("Failed to generate keypair: {}", err)) + Error::KeyGen(format!("Failed to generate keypair: {err}")) })?; let private_key: PrivateKey = PrivateKey::from(&okm); ( diff --git a/crypto/src/signature/ed25519.rs b/crypto/src/signature/ed25519.rs index 6f462ee9b75..0312bff8c12 100644 --- a/crypto/src/signature/ed25519.rs +++ b/crypto/src/signature/ed25519.rs @@ -2,10 +2,6 @@ use std::convert::TryFrom; use arrayref::array_ref; use ed25519_dalek::{Signature, SigningKey, VerifyingKey as PK}; -pub use ed25519_dalek::{ - EXPANDED_SECRET_KEY_LENGTH as PRIVATE_KEY_SIZE, PUBLIC_KEY_LENGTH as PUBLIC_KEY_SIZE, - SIGNATURE_LENGTH as SIGNATURE_SIZE, -}; use iroha_primitives::const_vec::ConstVec; use rand::{rngs::OsRng, SeedableRng}; use rand_chacha::ChaChaRng; @@ -146,7 +142,7 @@ mod test { assert!(result.is_ok()); assert!(result.unwrap()); - assert_eq!(sig.len(), SIGNATURE_SIZE); + assert_eq!(sig.len(), ed25519_dalek::SIGNATURE_LENGTH); assert_eq!(hex::encode(sig.as_slice()), SIGNATURE_1); //Check if libsodium signs the message and this module still can verify it diff --git a/crypto/src/signature/secp256k1.rs b/crypto/src/signature/secp256k1.rs index 8cca69a1fd5..4d7ec814141 100644 --- a/crypto/src/signature/secp256k1.rs +++ b/crypto/src/signature/secp256k1.rs @@ -81,7 +81,7 @@ mod ecdsa_secp256k1 { pub fn sign(message: &[u8], sk: &PrivateKey) -> Result, Error> { assert_eq!(sk.digest_function, ALGORITHM); let signing_key = k256::SecretKey::from_slice(&sk.payload[..]) - .map_err(|e| Error::Signing(format!("{:?}", e)))?; + .map_err(|e| Error::Signing(format!("{e:?}")))?; let signing_key = k256::ecdsa::SigningKey::from(signing_key); let signature: k256::ecdsa::Signature = signing_key.sign(message); @@ -91,9 +91,9 @@ mod ecdsa_secp256k1 { pub fn verify(message: &[u8], signature: &[u8], pk: &PublicKey) -> Result { let compressed_pk = Self::public_key_compressed(pk); let verifying_key = k256::PublicKey::from_sec1_bytes(&compressed_pk) - .map_err(|e| Error::Signing(format!("{:?}", e)))?; + .map_err(|e| Error::Signing(format!("{e:?}")))?; let signature = k256::ecdsa::Signature::from_slice(signature) - .map_err(|e| Error::Signing(format!("{:?}", e)))?; + .map_err(|e| Error::Signing(format!("{e:?}")))?; let verifying_key = k256::ecdsa::VerifyingKey::from(verifying_key); diff --git a/data_model/derive/tests/partial_tagged_serde_self.rs b/data_model/derive/tests/partial_tagged_serde_self.rs index 1600b04453a..fcb0b173db0 100644 --- a/data_model/derive/tests/partial_tagged_serde_self.rs +++ b/data_model/derive/tests/partial_tagged_serde_self.rs @@ -18,7 +18,7 @@ fn partially_tagged_serde() { Negate(Box::new(Atom(42))), Negate(Box::new(Negate(Box::new(Atom(42))))), ]; - let serialized_values = [r#"42"#, r#"{"Negate":42}"#, r#"{"Negate":{"Negate":42}}"#]; + let serialized_values = [r"42", r#"{"Negate":42}"#, r#"{"Negate":{"Negate":42}}"#]; for (value, serialized_value) in values.iter().zip(serialized_values.iter()) { let serialized = serde_json::to_string(value) diff --git a/data_model/src/events/pipeline.rs b/data_model/src/events/pipeline.rs index 27dbb58ac59..62a208fb0d9 100644 --- a/data_model/src/events/pipeline.rs +++ b/data_model/src/events/pipeline.rs @@ -276,10 +276,10 @@ mod tests { ], events .iter() - .cloned() - .filter(|event| PipelineEventFilter::new() + .filter(|&event| PipelineEventFilter::new() .hash(Hash::prehashed([0_u8; Hash::LENGTH])) .matches(event)) + .cloned() .collect::>() ); assert_eq!( @@ -290,10 +290,10 @@ mod tests { }], events .iter() - .cloned() - .filter(|event| PipelineEventFilter::new() + .filter(|&event| PipelineEventFilter::new() .entity_kind(PipelineEntityKind::Block) .matches(event)) + .cloned() .collect::>() ); assert_eq!( @@ -304,19 +304,19 @@ mod tests { }], events .iter() - .cloned() - .filter(|event| PipelineEventFilter::new() + .filter(|&event| PipelineEventFilter::new() .entity_kind(PipelineEntityKind::Transaction) .hash(Hash::prehashed([2_u8; Hash::LENGTH])) .matches(event)) + .cloned() .collect::>() ); assert_eq!( events, events .iter() + .filter(|&event| PipelineEventFilter::new().matches(event)) .cloned() - .filter(|event| PipelineEventFilter::new().matches(event)) .collect::>() ) } diff --git a/data_model/src/lib.rs b/data_model/src/lib.rs index 8bb1f0d1a98..683d52b6a1b 100644 --- a/data_model/src/lib.rs +++ b/data_model/src/lib.rs @@ -1633,6 +1633,7 @@ where /// } /// } /// + /// ```no_run /// #[test] /// fn test() { /// let good = Check::Good; diff --git a/data_model/src/predicate.rs b/data_model/src/predicate.rs index 119625f6293..4e5d72d8c0c 100644 --- a/data_model/src/predicate.rs +++ b/data_model/src/predicate.rs @@ -1293,7 +1293,7 @@ pub mod ip_addr { self.0 .iter() .copied() - .zip(input.into_iter()) + .zip(input) .all(|(myself, other)| myself.applies(other)) } } diff --git a/data_model/src/smart_contract.rs b/data_model/src/smart_contract.rs index 0800fbc27df..6e363f07218 100644 --- a/data_model/src/smart_contract.rs +++ b/data_model/src/smart_contract.rs @@ -49,7 +49,7 @@ pub mod payloads { /// Height of the latest block in the blockchain pub block_height: u64, /// Operation to be validated - pub to_validate: T, + pub target: T, } } diff --git a/ffi/src/lib.rs b/ffi/src/lib.rs index 7d84fe3bec5..dcdd52f0d9a 100644 --- a/ffi/src/lib.rs +++ b/ffi/src/lib.rs @@ -329,6 +329,7 @@ macro_rules! ffi_type { type Target = $target; #[inline] + #[allow(clippy::redundant_closure_call)] unsafe fn is_valid(target: &Self::Target) -> bool { $validity_fn(target) } diff --git a/ffi/src/repr_c.rs b/ffi/src/repr_c.rs index b7896a7273f..621b160b173 100644 --- a/ffi/src/repr_c.rs +++ b/ffi/src/repr_c.rs @@ -606,11 +606,11 @@ impl CTypeConvert<'_, Robust, R> for R { type RustStore = (); type FfiStore = (); - fn into_repr_c(self, _: &mut ()) -> R { + fn into_repr_c(self, (): &mut ()) -> R { self } - unsafe fn try_from_repr_c(source: R, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: R, (): &mut ()) -> Result { Ok(source) } } @@ -622,7 +622,7 @@ impl CTypeConvert<'_, Robust, *mut [R; N]> for [R; N] store.insert(self) } - unsafe fn try_from_repr_c(source: *mut [R; N], _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: *mut [R; N], (): &mut ()) -> Result { if source.is_null() { return Err(FfiReturn::ArgIsNull); } @@ -660,7 +660,7 @@ impl CTypeConvert<'_, Box, *mut R> for Box { &mut **store.insert(self) } - unsafe fn try_from_repr_c(source: *mut R, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: *mut R, (): &mut ()) -> Result { if source.is_null() { return Err(FfiReturn::ArgIsNull); } @@ -699,7 +699,7 @@ impl CTypeConvert<'_, Box<[Robust]>, SliceMut> for Box<[R]> { SliceMut::from_slice(Some(store.as_mut())) } - unsafe fn try_from_repr_c(source: SliceMut, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: SliceMut, (): &mut ()) -> Result { source .into_rust() .ok_or(FfiReturn::ArgIsNull) @@ -741,11 +741,11 @@ impl CTypeConvert<'_, &[Robust], SliceRef> for &[R] { type RustStore = (); type FfiStore = (); - fn into_repr_c(self, _: &mut ()) -> SliceRef { + fn into_repr_c(self, (): &mut ()) -> SliceRef { SliceRef::from_slice(Some(self)) } - unsafe fn try_from_repr_c(source: SliceRef, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: SliceRef, (): &mut ()) -> Result { source.into_rust().ok_or(FfiReturn::ArgIsNull) } } @@ -779,11 +779,11 @@ impl CTypeConvert<'_, &mut [Robust], SliceMut> for &mut [R] { type RustStore = (); type FfiStore = (); - fn into_repr_c(self, _: &mut ()) -> SliceMut { + fn into_repr_c(self, (): &mut ()) -> SliceMut { SliceMut::from_slice(Some(self)) } - unsafe fn try_from_repr_c(source: SliceMut, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: SliceMut, (): &mut ()) -> Result { source.into_rust().ok_or(FfiReturn::ArgIsNull) } } @@ -814,7 +814,7 @@ impl CTypeConvert<'_, Vec, SliceMut> for Vec { SliceMut::from_slice(Some(store)) } - unsafe fn try_from_repr_c(source: SliceMut, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: SliceMut, (): &mut ()) -> Result { source .into_rust() .ok_or(FfiReturn::ArgIsNull) @@ -865,10 +865,10 @@ impl CTypeConvert<'_, Opaque, *mut R> for R { type RustStore = (); type FfiStore = (); - fn into_repr_c(self, _: &mut ()) -> *mut R { + fn into_repr_c(self, (): &mut ()) -> *mut R { Box::into_raw(Box::new(self)) } - unsafe fn try_from_repr_c(source: *mut R, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: *mut R, (): &mut ()) -> Result { if source.is_null() { return Err(FfiReturn::ArgIsNull); } @@ -893,11 +893,11 @@ impl CTypeConvert<'_, Box, *mut R> for Box { type RustStore = (); type FfiStore = (); - fn into_repr_c(self, _: &mut ()) -> *mut R { + fn into_repr_c(self, (): &mut ()) -> *mut R { Box::into_raw(self) } - unsafe fn try_from_repr_c(source: *mut R, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: *mut R, (): &mut ()) -> Result { if source.is_null() { return Err(FfiReturn::ArgIsNull); } @@ -932,7 +932,7 @@ impl CTypeConvert<'_, Box<[Opaque]>, SliceMut<*mut R>> for Box<[R]> { SliceMut::from_slice(Some(store)) } - unsafe fn try_from_repr_c(source: SliceMut<*mut R>, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: SliceMut<*mut R>, (): &mut ()) -> Result { source .into_rust() .ok_or(FfiReturn::ArgIsNull)? @@ -1064,7 +1064,7 @@ impl CTypeConvert<'_, Vec, SliceMut<*mut R>> for Vec { SliceMut::from_slice(Some(store)) } - unsafe fn try_from_repr_c(source: SliceMut<*mut R>, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: SliceMut<*mut R>, (): &mut ()) -> Result { source .into_rust() .ok_or(FfiReturn::ArgIsNull)? @@ -1098,7 +1098,7 @@ impl CTypeConvert<'_, [Opaque; N], [*mut R; N]> for [R; N] { type RustStore = (); type FfiStore = (); - fn into_repr_c(self, _: &mut Self::RustStore) -> [*mut R; N] { + fn into_repr_c(self, (): &mut Self::RustStore) -> [*mut R; N] { let array = self .into_iter() .map(Box::new) @@ -1110,7 +1110,7 @@ impl CTypeConvert<'_, [Opaque; N], [*mut R; N]> for [R; N] { unsafe { array.unwrap_unchecked() } } - unsafe fn try_from_repr_c(source: [*mut R; N], _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: [*mut R; N], (): &mut ()) -> Result { Ok(source .into_iter() .map(|item| { @@ -1133,7 +1133,7 @@ impl CTypeConvert<'_, [Opaque; N], *mut [*mut R; N]> for [R; store.insert(self.into_repr_c(&mut ())) } - unsafe fn try_from_repr_c(source: *mut [*mut R; N], _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: *mut [*mut R; N], (): &mut ()) -> Result { if source.is_null() { return Err(FfiReturn::ArgIsNull); } @@ -1229,11 +1229,11 @@ impl CTypeConvert<'_, Box, *mut Extern> for Box { type RustStore = (); type FfiStore = (); - fn into_repr_c(self, _: &mut ()) -> *mut Extern { + fn into_repr_c(self, (): &mut ()) -> *mut Extern { ManuallyDrop::new(*self).as_extern_ptr_mut() } - unsafe fn try_from_repr_c(source: *mut Extern, _: &mut ()) -> Result { + unsafe fn try_from_repr_c(source: *mut Extern, (): &mut ()) -> Result { if source.is_null() { return Err(FfiReturn::ArgIsNull); } diff --git a/ffi/src/slice.rs b/ffi/src/slice.rs index d66cf3d0200..8c3edfc9b8e 100644 --- a/ffi/src/slice.rs +++ b/ffi/src/slice.rs @@ -29,19 +29,19 @@ pub struct OutBoxedSlice(*mut C, usize); impl Copy for SliceRef {} impl Clone for SliceRef { fn clone(&self) -> Self { - Self(self.0, self.1) + *self } } impl Copy for SliceMut {} impl Clone for SliceMut { fn clone(&self) -> Self { - Self(self.0, self.1) + *self } } impl Copy for OutBoxedSlice {} impl Clone for OutBoxedSlice { fn clone(&self) -> Self { - Self(self.0, self.1) + *self } } diff --git a/genesis/src/lib.rs b/genesis/src/lib.rs index 5fd2f1fc56a..0428a2240ac 100644 --- a/genesis/src/lib.rs +++ b/genesis/src/lib.rs @@ -63,7 +63,7 @@ impl GenesisNetwork { let transactions_iter = std::iter::once(GenesisTransactionBuilder { isi: vec![UpgradeExpr::new(Executor::try_from(raw_block.executor)?).into()], }) - .chain(raw_block.transactions.into_iter()); + .chain(raw_block.transactions); #[cfg(test)] let transactions_iter = raw_block.transactions.into_iter(); diff --git a/primitives/src/cmpext.rs b/primitives/src/cmpext.rs index 0de287e3f3c..ea4d40239e2 100644 --- a/primitives/src/cmpext.rs +++ b/primitives/src/cmpext.rs @@ -92,7 +92,7 @@ macro_rules! impl_as_dyn_key { impl PartialOrd for dyn $trait + '_ { fn partial_cmp(&self, other: &Self) -> Option<::core::cmp::Ordering> { - self.as_key().partial_cmp(&other.as_key()) + Some(self.cmp(other)) } } diff --git a/primitives/src/unique_vec.rs b/primitives/src/unique_vec.rs index 4448aef397f..728d920370e 100644 --- a/primitives/src/unique_vec.rs +++ b/primitives/src/unique_vec.rs @@ -335,7 +335,7 @@ mod tests { } #[test] - #[should_panic] + #[should_panic(expected = "removal index (is 3) should be < len (is 3)")] fn remove_out_of_bounds_panics() { let mut unique_vec = unique_vec![1, 2, 3]; unique_vec.remove(3); diff --git a/smart_contract/executor/derive/src/entrypoint.rs b/smart_contract/executor/derive/src/entrypoint.rs index 3e4e6daa601..60b6cd9ad65 100644 --- a/smart_contract/executor/derive/src/entrypoint.rs +++ b/smart_contract/executor/derive/src/entrypoint.rs @@ -112,7 +112,7 @@ fn impl_validate_entrypoint( unsafe extern "C" fn #generated_entrypoint_ident() -> *const u8 { let payload = ::iroha_executor::#get_validation_payload_fn_ident(); let verdict: ::iroha_executor::data_model::executor::Result = - #fn_name(payload.authority, payload.to_validate, payload.block_height); + #fn_name(payload.authority, payload.target, payload.block_height); let bytes_box = ::core::mem::ManuallyDrop::new(::iroha_executor::utils::encode_with_length_prefix(&verdict)); bytes_box.as_ptr() diff --git a/smart_contract/executor/src/lib.rs b/smart_contract/executor/src/lib.rs index cdbb7fe199c..8b9a7403a52 100644 --- a/smart_contract/executor/src/lib.rs +++ b/smart_contract/executor/src/lib.rs @@ -227,7 +227,6 @@ pub mod prelude { pub use super::{ data_model::{ executor::{MigrationError, MigrationResult, Result}, - prelude::*, visit::Visit, ValidationFail, }, diff --git a/smart_contract/src/lib.rs b/smart_contract/src/lib.rs index 5ec0acd2083..45232f9cdfb 100644 --- a/smart_contract/src/lib.rs +++ b/smart_contract/src/lib.rs @@ -269,7 +269,7 @@ impl QueryOutputCursor { /// May fail due to the same reasons [`QueryOutputCursorIterator`] can fail to iterate. pub fn collect(self) -> Result>> { let Value::Vec(v) = self.batch else { - return Ok(self.batch) + return Ok(self.batch); }; // Making sure we received all values diff --git a/telemetry/src/ws.rs b/telemetry/src/ws.rs index 67579104b91..700860f2de8 100644 --- a/telemetry/src/ws.rs +++ b/telemetry/src/ws.rs @@ -139,7 +139,7 @@ where async fn send_message(&mut self, msg: Message) { if let Some(sink) = self.sink.as_mut() { match sink.send(msg).await { - Ok(_) => {} + Ok(()) => {} Err(Error::AlreadyClosed | Error::ConnectionClosed) => { iroha_logger::debug!("Closed connection to telemetry"); self.sink = None; @@ -305,7 +305,7 @@ mod tests { let this = Pin::into_inner(self); match this.sender.poll_ready(cx) { Poll::Ready(r) => { - let result = (this.before_send)().map(|_| r.expect("failed to send")); + let result = (this.before_send)().map(|()| r.expect("failed to send")); Poll::Ready(result) } Poll::Pending => Poll::Pending, diff --git a/tools/kagami/src/genesis.rs b/tools/kagami/src/genesis.rs index 41ff4c71237..48caeb74337 100644 --- a/tools/kagami/src/genesis.rs +++ b/tools/kagami/src/genesis.rs @@ -15,9 +15,9 @@ use serde_json::json; use super::*; -const INLINED_EXECUTOR_WARNING: &str = r#"WARN: You're using genesis with inlined executor. +const INLINED_EXECUTOR_WARNING: &str = r"WARN: You're using genesis with inlined executor. Consider specifying a separate executor file using `--executor-path-in-genesis` instead. -Use `--help` for more information."#; +Use `--help` for more information."; #[derive(Parser, Debug, Clone)] #[clap(group = ArgGroup::new("executor").required(true))] diff --git a/tools/wasm_test_runner/src/main.rs b/tools/wasm_test_runner/src/main.rs index a2d3b5ca91f..951889169e8 100644 --- a/tools/wasm_test_runner/src/main.rs +++ b/tools/wasm_test_runner/src/main.rs @@ -23,7 +23,7 @@ fn main() -> Result { }; // Modules can be compiled through either the text or binary format let engine = Engine::default(); - let module = Module::from_file(&engine, &file)?; + let module = Module::from_file(&engine, file)?; let mut tests = Vec::new(); for export in module.exports() { if let Some(name) = export.name().strip_prefix("$webassembly-test$") {