From 3fdbddff81259c24ed563f2480034014c679ad7d Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 11:46:03 +0200 Subject: [PATCH 01/12] refactor(aggregator): simplify `mockall::automock` imports Use full path instead of needing a use gated by an `cfg(test)` that add noises with the other imports. --- mithril-aggregator/src/artifact_builder/interface.rs | 5 +---- .../src/database/repository/signer_store.rs | 4 +--- mithril-aggregator/src/multi_signer.rs | 5 +---- mithril-aggregator/src/runtime/runner.rs | 5 +---- mithril-aggregator/src/services/message.rs | 5 +---- mithril-aggregator/src/services/signed_entity.rs | 5 +---- mithril-aggregator/src/services/stake_distribution.rs | 5 +---- mithril-aggregator/src/signer_registerer.rs | 8 +++----- .../src/snapshot_uploaders/snapshot_uploader.rs | 5 +---- mithril-aggregator/src/store/verification_key_store.rs | 5 +---- mithril-aggregator/src/tools/remote_file_uploader.rs | 5 +---- mithril-aggregator/src/tools/signer_importer.rs | 6 ++---- 12 files changed, 15 insertions(+), 48 deletions(-) diff --git a/mithril-aggregator/src/artifact_builder/interface.rs b/mithril-aggregator/src/artifact_builder/interface.rs index 28d8a669432..2db853b2a83 100644 --- a/mithril-aggregator/src/artifact_builder/interface.rs +++ b/mithril-aggregator/src/artifact_builder/interface.rs @@ -5,11 +5,8 @@ use mithril_common::{ StdResult, }; -#[cfg(test)] -use mockall::automock; - /// ArtifactBuilder is trait for building an artifact -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait ArtifactBuilder: Send + Sync where diff --git a/mithril-aggregator/src/database/repository/signer_store.rs b/mithril-aggregator/src/database/repository/signer_store.rs index 1a0d73fae1a..ac20553c090 100644 --- a/mithril-aggregator/src/database/repository/signer_store.rs +++ b/mithril-aggregator/src/database/repository/signer_store.rs @@ -3,8 +3,6 @@ use std::sync::Arc; use async_trait::async_trait; use chrono::Utc; -#[cfg(test)] -use mockall::automock; use mithril_common::StdResult; use mithril_persistence::sqlite::{ConnectionExtensions, SqliteConnection}; @@ -16,7 +14,7 @@ use crate::database::record::SignerRecord; use crate::SignerRecorder; /// Service to get [SignerRecord]. -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait SignerGetter: Sync + Send { /// Return all stored records. diff --git a/mithril-aggregator/src/multi_signer.rs b/mithril-aggregator/src/multi_signer.rs index 5f7baaa9bd3..865cb5e5ac8 100644 --- a/mithril-aggregator/src/multi_signer.rs +++ b/mithril-aggregator/src/multi_signer.rs @@ -12,11 +12,8 @@ use mithril_common::{ use crate::dependency_injection::EpochServiceWrapper; use crate::entities::OpenMessage; -#[cfg(test)] -use mockall::automock; - /// MultiSigner is the cryptographic engine in charge of producing multi signatures from individual signatures -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait MultiSigner: Sync + Send { /// Verify a single signature diff --git a/mithril-aggregator/src/runtime/runner.rs b/mithril-aggregator/src/runtime/runner.rs index ddce241007a..fce0016216b 100644 --- a/mithril-aggregator/src/runtime/runner.rs +++ b/mithril-aggregator/src/runtime/runner.rs @@ -13,9 +13,6 @@ use mithril_persistence::store::StakeStorer; use crate::entities::OpenMessage; use crate::DependencyContainer; -#[cfg(test)] -use mockall::automock; - /// Configuration structure dedicated to the AggregatorRuntime. #[derive(Debug, Clone)] pub struct AggregatorConfig { @@ -165,7 +162,7 @@ impl AggregatorRunner { } } -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] impl AggregatorRunnerTrait for AggregatorRunner { /// Return the current time point from the chain diff --git a/mithril-aggregator/src/services/message.rs b/mithril-aggregator/src/services/message.rs index e40c1aac014..4bcd035d7cc 100644 --- a/mithril-aggregator/src/services/message.rs +++ b/mithril-aggregator/src/services/message.rs @@ -18,9 +18,6 @@ use mithril_common::{ use crate::database::repository::{CertificateRepository, SignedEntityStorer}; -#[cfg(test)] -use mockall::automock; - /// Error related to the [MessageService] #[derive(Debug, Error)] pub enum MessageServiceError { @@ -29,7 +26,7 @@ pub enum MessageServiceError { PendingCertificateDoesNotExist, } /// HTTP Message service trait. -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait MessageService: Sync + Send { /// Return the message representation of a certificate if it exists. diff --git a/mithril-aggregator/src/services/signed_entity.rs b/mithril-aggregator/src/services/signed_entity.rs index 2482be8c592..eca3a5bf55b 100644 --- a/mithril-aggregator/src/services/signed_entity.rs +++ b/mithril-aggregator/src/services/signed_entity.rs @@ -25,11 +25,8 @@ use crate::{ database::{record::SignedEntityRecord, repository::SignedEntityStorer}, }; -#[cfg(test)] -use mockall::automock; - /// ArtifactBuilder Service trait -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait SignedEntityService: Send + Sync { /// Create artifact for a signed entity type and a certificate diff --git a/mithril-aggregator/src/services/stake_distribution.rs b/mithril-aggregator/src/services/stake_distribution.rs index f6958fb8720..38a560e9513 100644 --- a/mithril-aggregator/src/services/stake_distribution.rs +++ b/mithril-aggregator/src/services/stake_distribution.rs @@ -17,9 +17,6 @@ use mithril_persistence::store::StakeStorer; use crate::database::repository::StakePoolStore; -#[cfg(test)] -use mockall::automock; - /// Errors related to the [StakeDistributionService]. #[derive(Debug)] pub enum StakePoolDistributionServiceError { @@ -89,7 +86,7 @@ impl Display for StakePoolDistributionServiceError { impl std::error::Error for StakePoolDistributionServiceError {} /// Responsible of synchronizing with Cardano stake distribution. -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait StakeDistributionService: Sync + Send { /// Return the stake distribution fot the given epoch. diff --git a/mithril-aggregator/src/signer_registerer.rs b/mithril-aggregator/src/signer_registerer.rs index 8830160c7f2..e314950b180 100644 --- a/mithril-aggregator/src/signer_registerer.rs +++ b/mithril-aggregator/src/signer_registerer.rs @@ -14,8 +14,6 @@ use mithril_common::{ use crate::VerificationKeyStorer; use mithril_common::chain_observer::ChainObserverError; -#[cfg(test)] -use mockall::automock; /// Error type for signer registerer service. #[derive(Error, Debug)] @@ -74,7 +72,7 @@ impl SignerRegistrationRound { } /// Trait to register a signer -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait SignerRegisterer: Sync + Send { /// Register a signer @@ -89,7 +87,7 @@ pub trait SignerRegisterer: Sync + Send { } /// Trait to open a signer registration round -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait SignerRegistrationRoundOpener: Sync + Send { /// Open a signer registration round @@ -104,7 +102,7 @@ pub trait SignerRegistrationRoundOpener: Sync + Send { } /// Signer recorder trait -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait SignerRecorder: Sync + Send { /// Record a signer registration diff --git a/mithril-aggregator/src/snapshot_uploaders/snapshot_uploader.rs b/mithril-aggregator/src/snapshot_uploaders/snapshot_uploader.rs index 7459e13d967..d76b4a746e4 100644 --- a/mithril-aggregator/src/snapshot_uploaders/snapshot_uploader.rs +++ b/mithril-aggregator/src/snapshot_uploaders/snapshot_uploader.rs @@ -2,13 +2,10 @@ use async_trait::async_trait; use mithril_common::StdResult; use std::path::Path; -#[cfg(test)] -use mockall::automock; - pub type SnapshotLocation = String; /// SnapshotUploader represents a snapshot uploader interactor -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait SnapshotUploader: Sync + Send { /// Upload a snapshot diff --git a/mithril-aggregator/src/store/verification_key_store.rs b/mithril-aggregator/src/store/verification_key_store.rs index 85499092941..dd8968f9668 100644 --- a/mithril-aggregator/src/store/verification_key_store.rs +++ b/mithril-aggregator/src/store/verification_key_store.rs @@ -7,16 +7,13 @@ use tokio::sync::RwLock; use mithril_common::entities::{Epoch, PartyId, Signer, SignerWithStake}; use mithril_persistence::store::adapter::StoreAdapter; -#[cfg(test)] -use mockall::automock; - type Adapter = Box>>; /// Store and get signers verification keys for given epoch. /// /// Important note: This store works on the **recording** epoch, the epoch at which the signers /// are signed into a certificate so they can sign single signatures at the next epoch. -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait VerificationKeyStorer: Sync + Send { /// Save the verification key, for the given [Signer] for the given [Epoch], returns the diff --git a/mithril-aggregator/src/tools/remote_file_uploader.rs b/mithril-aggregator/src/tools/remote_file_uploader.rs index 19c06b5ac05..2e1594ef7b9 100644 --- a/mithril-aggregator/src/tools/remote_file_uploader.rs +++ b/mithril-aggregator/src/tools/remote_file_uploader.rs @@ -9,11 +9,8 @@ use slog_scope::info; use std::{env, path::Path}; use tokio_util::{codec::BytesCodec, codec::FramedRead}; -#[cfg(test)] -use mockall::automock; - /// RemoteFileUploader represents a remote file uploader interactor -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait RemoteFileUploader: Sync + Send { /// Upload a snapshot diff --git a/mithril-aggregator/src/tools/signer_importer.rs b/mithril-aggregator/src/tools/signer_importer.rs index eec1a1c045f..cedaa01ebec 100644 --- a/mithril-aggregator/src/tools/signer_importer.rs +++ b/mithril-aggregator/src/tools/signer_importer.rs @@ -10,8 +10,6 @@ use std::time::Duration; use crate::database::repository::SignerStore; -#[cfg(test)] -use mockall::automock; use slog_scope::{info, warn}; pub type PoolTicker = String; @@ -66,7 +64,7 @@ impl SignersImporter { } /// Trait that define how a [SignersImporter] retrieve the signers to import. -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait SignersImporterRetriever: Sync + Send { /// Retrieve the signers list. @@ -74,7 +72,7 @@ pub trait SignersImporterRetriever: Sync + Send { } /// Trait that define how a [SignersImporter] persist the retrieved signers. -#[cfg_attr(test, automock)] +#[cfg_attr(test, mockall::automock)] #[async_trait] pub trait SignersImporterPersister: Sync + Send { /// Persist the given list of signers. From d40dd6bf7b21731dd38e64336e4e2645471a1e6a Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 11:00:04 +0200 Subject: [PATCH 02/12] feat(common): new logger extension to create logger with given name To create child logger when there's no component to hold them (such as the http server in aggregator). --- mithril-common/src/logging.rs | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/mithril-common/src/logging.rs b/mithril-common/src/logging.rs index 9b4ae29baa0..60fc5a7a083 100644 --- a/mithril-common/src/logging.rs +++ b/mithril-common/src/logging.rs @@ -6,11 +6,18 @@ use slog::Logger; pub trait LoggerExtensions { /// Create a new child logger with a `src` key containing the component name. fn new_with_component_name(&self) -> Self; + + /// Create a new child logger with a `src` key containing the provided name. + fn new_with_name(&self, name: &str) -> Self; } impl LoggerExtensions for Logger { fn new_with_component_name(&self) -> Self { - self.new(slog::o!("src" => component_name::())) + self.new_with_name(component_name::()) + } + + fn new_with_name(&self, name: &str) -> Self { + self.new(slog::o!("src" => name.to_owned())) } } @@ -78,4 +85,22 @@ mod tests { "log should contain `src` key for `TestStruct` as component name was provided, logs:\n{logs}" ); } + + #[test] + fn logger_extension_new_with_name() { + let expected_name = "my name"; + let log_path = + TempDir::create("common_logging", "logger_extension_new_with_name").join("test.log"); + { + let root_logger = TestLogger::file(&log_path); + let child_logger = root_logger.new_with_name(expected_name); + info!(child_logger, "Child log"); + } + + let logs = std::fs::read_to_string(&log_path).unwrap(); + assert!( + logs.contains("src") && logs.contains(expected_name), + "log should contain `src` key for `{expected_name}` as a name was provided, logs:\n{logs}" + ); + } } From b153b5ff88294da5f95e81244852a4ccd2d0885b Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 11:24:31 +0200 Subject: [PATCH 03/12] refactor(aggregator): add a child logger to all components that use logs * Tagged with the component name. * Existing logs are still using `slog_scope` but this prepare its removal. --- .../cardano_immutable_files_full.rs | 15 ++++- .../src/commands/tools_command.rs | 1 + .../src/dependency_injection/builder.rs | 35 ++++++++--- .../src/dependency_injection/containers.rs | 4 ++ mithril-aggregator/src/event_store/runner.rs | 13 ++-- .../src/event_store/transmitter_service.rs | 14 +++-- .../src/http_server/routes/middlewares.rs | 11 +++- .../src/http_server/routes/mod.rs | 5 ++ mithril-aggregator/src/multi_signer.rs | 61 +++++++++++-------- mithril-aggregator/src/runtime/runner.rs | 9 ++- .../src/runtime/state_machine.rs | 14 +++-- .../services/cardano_transactions_importer.rs | 3 +- .../services/certifier/buffered_certifier.rs | 3 +- .../services/certifier/certifier_service.rs | 10 +-- .../src/services/epoch_service.rs | 7 +++ mithril-aggregator/src/services/prover.rs | 8 ++- .../src/services/signed_entity.rs | 8 +++ mithril-aggregator/src/services/upkeep.rs | 3 +- .../local_snapshot_uploader.rs | 28 ++++++--- .../remote_snapshot_uploader.rs | 27 ++++++-- mithril-aggregator/src/snapshotter.rs | 16 ++++- .../src/tools/certificates_hash_migrator.rs | 8 +++ .../src/tools/remote_file_uploader.rs | 13 +++- .../src/tools/signer_importer.rs | 60 ++++++++++++++---- .../tools/single_signature_authenticator.rs | 3 +- 25 files changed, 289 insertions(+), 90 deletions(-) diff --git a/mithril-aggregator/src/artifact_builder/cardano_immutable_files_full.rs b/mithril-aggregator/src/artifact_builder/cardano_immutable_files_full.rs index e8104acbe6e..63e0413a18d 100644 --- a/mithril-aggregator/src/artifact_builder/cardano_immutable_files_full.rs +++ b/mithril-aggregator/src/artifact_builder/cardano_immutable_files_full.rs @@ -1,6 +1,7 @@ use anyhow::Context; use async_trait::async_trait; use semver::Version; +use slog::Logger; use slog_scope::{debug, warn}; use std::sync::Arc; use thiserror::Error; @@ -11,6 +12,7 @@ use crate::{ }; use super::ArtifactBuilder; +use mithril_common::logging::LoggerExtensions; use mithril_common::{ entities::{ CardanoDbBeacon, Certificate, CompressionAlgorithm, ProtocolMessagePartKey, Snapshot, @@ -33,6 +35,7 @@ pub struct CardanoImmutableFilesFullArtifactBuilder { snapshotter: Arc, snapshot_uploader: Arc, compression_algorithm: CompressionAlgorithm, + logger: Logger, } impl CardanoImmutableFilesFullArtifactBuilder { @@ -42,12 +45,14 @@ impl CardanoImmutableFilesFullArtifactBuilder { snapshotter: Arc, snapshot_uploader: Arc, compression_algorithm: CompressionAlgorithm, + logger: Logger, ) -> Self { Self { cardano_node_version: cardano_node_version.clone(), snapshotter, snapshot_uploader, compression_algorithm, + logger: logger.new_with_component_name::(), } } @@ -167,7 +172,10 @@ mod tests { use super::*; - use crate::{snapshot_uploaders::MockSnapshotUploader, DumbSnapshotUploader, DumbSnapshotter}; + use crate::{ + snapshot_uploaders::MockSnapshotUploader, test_tools::TestLogger, DumbSnapshotUploader, + DumbSnapshotter, + }; #[tokio::test] async fn should_compute_valid_artifact() { @@ -187,6 +195,7 @@ mod tests { dumb_snapshotter.clone(), dumb_snapshot_uploader.clone(), CompressionAlgorithm::Zstandard, + TestLogger::stdout(), ); let artifact = cardano_immutable_files_full_artifact_builder .compute_artifact(beacon.clone(), &certificate) @@ -224,6 +233,7 @@ mod tests { Arc::new(DumbSnapshotter::new()), Arc::new(DumbSnapshotUploader::new()), CompressionAlgorithm::default(), + TestLogger::stdout(), ); cardano_immutable_files_full_artifact_builder @@ -248,6 +258,7 @@ mod tests { Arc::new(DumbSnapshotter::new()), Arc::new(DumbSnapshotUploader::new()), CompressionAlgorithm::Gzip, + TestLogger::stdout(), ); let ongoing_snapshot = cardano_immutable_files_full_artifact_builder @@ -275,6 +286,7 @@ mod tests { Arc::new(DumbSnapshotter::new()), Arc::new(DumbSnapshotUploader::new()), algorithm, + TestLogger::stdout(), ); let ongoing_snapshot = cardano_immutable_files_full_artifact_builder @@ -316,6 +328,7 @@ mod tests { Arc::new(DumbSnapshotter::new()), Arc::new(snapshot_uploader), CompressionAlgorithm::default(), + TestLogger::stdout(), ); cardano_immutable_files_full_artifact_builder diff --git a/mithril-aggregator/src/commands/tools_command.rs b/mithril-aggregator/src/commands/tools_command.rs index 4ba49759a48..5f0faba0ece 100644 --- a/mithril-aggregator/src/commands/tools_command.rs +++ b/mithril-aggregator/src/commands/tools_command.rs @@ -67,6 +67,7 @@ impl RecomputeCertificatesHashCommand { let migrator = CertificatesHashMigrator::new( CertificateRepository::new(connection.clone()), Arc::new(SignedEntityStore::new(connection.clone())), + dependencies_builder.get_logger()?, ); migrator diff --git a/mithril-aggregator/src/dependency_injection/builder.rs b/mithril-aggregator/src/dependency_injection/builder.rs index 01d0d7d0898..ce70cd0e66a 100644 --- a/mithril-aggregator/src/dependency_injection/builder.rs +++ b/mithril-aggregator/src/dependency_injection/builder.rs @@ -417,6 +417,7 @@ impl DependenciesBuilder { } async fn build_snapshot_uploader(&mut self) -> Result> { + let logger = self.get_logger()?; if self.configuration.environment == ExecutionEnvironment::Production { match self.configuration.snapshot_uploader_type { SnapshotUploaderType::Gcp => { @@ -431,14 +432,16 @@ impl DependenciesBuilder { })?; Ok(Arc::new(RemoteSnapshotUploader::new( - Box::new(GcpFileUploader::new(bucket.clone())), + Box::new(GcpFileUploader::new(bucket.clone(), logger.clone())), bucket, self.configuration.snapshot_use_cdn_domain, + logger, ))) } SnapshotUploaderType::Local => Ok(Arc::new(LocalSnapshotUploader::new( self.configuration.get_server_url(), &self.configuration.snapshot_directory, + logger, ))), } } else { @@ -456,7 +459,8 @@ impl DependenciesBuilder { } async fn build_multi_signer(&mut self) -> Result> { - let multi_signer = MultiSignerImpl::new(self.get_epoch_service().await?); + let multi_signer = + MultiSignerImpl::new(self.get_epoch_service().await?, self.get_logger()?); Ok(Arc::new(multi_signer)) } @@ -823,6 +827,7 @@ impl DependenciesBuilder { self.configuration.db_directory.clone(), ongoing_snapshot_directory, algorithm, + self.get_logger()?, )?) } _ => Arc::new(DumbSnapshotter::new()), @@ -1025,7 +1030,7 @@ impl DependenciesBuilder { async fn build_event_transmitter(&mut self) -> Result>> { let sender = self.get_event_transmitter_sender().await?; - let event_transmitter = Arc::new(TransmitterService::new(sender)); + let event_transmitter = Arc::new(TransmitterService::new(sender, self.get_logger()?)); Ok(event_transmitter) } @@ -1157,6 +1162,7 @@ impl DependenciesBuilder { } async fn build_signed_entity_service(&mut self) -> Result> { + let logger = self.get_logger()?; let signed_entity_storer = self.build_signed_entity_storer().await?; let epoch_service = self.get_epoch_service().await?; let mithril_stake_distribution_artifact_builder = Arc::new( @@ -1172,6 +1178,7 @@ impl DependenciesBuilder { snapshotter, snapshot_uploader, self.configuration.snapshot_compression_algorithm, + logger.clone(), )); let prover_service = self.get_prover_service().await?; let cardano_transactions_artifact_builder = Arc::new( @@ -1187,6 +1194,7 @@ impl DependenciesBuilder { cardano_transactions_artifact_builder, self.get_signed_entity_lock().await?, cardano_stake_distribution_artifact_builder, + logger, )); // Compute the cache pool for prover service @@ -1226,6 +1234,7 @@ impl DependenciesBuilder { verification_key_store, network, allowed_discriminants, + self.get_logger()?, ))); Ok(epoch_service) @@ -1344,6 +1353,7 @@ impl DependenciesBuilder { let dependency_manager = DependencyContainer { config: self.configuration.clone(), allowed_discriminants: self.get_allowed_signed_entity_types_discriminants()?, + root_logger: self.get_logger()?, sqlite_connection: self.get_sqlite_connection().await?, sqlite_connection_cardano_transaction_pool: self .get_sqlite_connection_cardano_transaction_pool() @@ -1391,7 +1401,10 @@ impl DependenciesBuilder { /// Create dependencies for the [EventStore] task. pub async fn create_event_store(&mut self) -> Result { - let event_store = EventStore::new(self.get_event_transmitter_receiver().await?); + let event_store = EventStore::new( + self.get_event_transmitter_receiver().await?, + self.get_logger()?, + ); Ok(event_store) } @@ -1405,6 +1418,7 @@ impl DependenciesBuilder { config, None, Arc::new(AggregatorRunner::new(dependency_container)), + self.get_logger()?, ) .await .map_err(|e| DependenciesBuilderError::Initialization { @@ -1472,11 +1486,18 @@ impl DependenciesBuilder { &mut self, cexplorer_pools_url: &str, ) -> Result { - let retriever = - CExplorerSignerRetriever::new(cexplorer_pools_url, Some(Duration::from_secs(30)))?; + let retriever = CExplorerSignerRetriever::new( + cexplorer_pools_url, + Some(Duration::from_secs(30)), + self.get_logger()?, + )?; let persister = self.get_signer_store().await?; - Ok(SignersImporter::new(Arc::new(retriever), persister)) + Ok(SignersImporter::new( + Arc::new(retriever), + persister, + self.get_logger()?, + )) } /// Create [TickerService] instance. diff --git a/mithril-aggregator/src/dependency_injection/containers.rs b/mithril-aggregator/src/dependency_injection/containers.rs index c23ca6d58e2..5a8efa3c616 100644 --- a/mithril-aggregator/src/dependency_injection/containers.rs +++ b/mithril-aggregator/src/dependency_injection/containers.rs @@ -1,3 +1,4 @@ +use slog::Logger; use std::{collections::BTreeSet, sync::Arc}; use tokio::sync::RwLock; @@ -53,6 +54,9 @@ pub struct DependencyContainer { /// List of signed entity discriminants that are allowed to be processed pub allowed_discriminants: BTreeSet, + /// Application root logger + pub root_logger: Logger, + /// SQLite database connection /// /// This is not a real service, but it is needed to instantiate all store diff --git a/mithril-aggregator/src/event_store/runner.rs b/mithril-aggregator/src/event_store/runner.rs index d19cb66b40c..a5636e67047 100644 --- a/mithril-aggregator/src/event_store/runner.rs +++ b/mithril-aggregator/src/event_store/runner.rs @@ -1,22 +1,27 @@ use anyhow::Context; +use mithril_common::logging::LoggerExtensions; +use mithril_common::StdResult; +use slog::Logger; use slog_scope::{debug, info}; use sqlite::Connection; use std::{path::PathBuf, sync::Arc}; use tokio::sync::mpsc::UnboundedReceiver; -use mithril_common::StdResult; - use super::{EventMessage, EventPersister}; /// EventMessage receiver service. pub struct EventStore { receiver: UnboundedReceiver, + logger: Logger, } impl EventStore { /// Instantiate the EventMessage receiver service. - pub fn new(receiver: UnboundedReceiver) -> Self { - Self { receiver } + pub fn new(receiver: UnboundedReceiver, logger: Logger) -> Self { + Self { + receiver, + logger: logger.new_with_component_name::(), + } } /// Launch the service. It runs until all the transmitters are gone and all diff --git a/mithril-aggregator/src/event_store/transmitter_service.rs b/mithril-aggregator/src/event_store/transmitter_service.rs index 9b5df027a1c..2af94a39aa5 100644 --- a/mithril-aggregator/src/event_store/transmitter_service.rs +++ b/mithril-aggregator/src/event_store/transmitter_service.rs @@ -1,9 +1,11 @@ -use std::fmt::Debug; - use serde::Serialize; +use slog::Logger; use slog_scope::warn; +use std::fmt::Debug; use tokio::sync::mpsc::UnboundedSender; +use mithril_common::logging::LoggerExtensions; + use super::EventMessage; /// The transmitter service is used to allow inter process channel @@ -13,6 +15,7 @@ where MSG: Debug + Sync + Send, { transmitter: UnboundedSender, + logger: Logger, } impl TransmitterService @@ -20,8 +23,11 @@ where MSG: Debug + Sync + Send, { /// Instantiate a new Service by passing a MPSC transmitter. - pub fn new(transmitter: UnboundedSender) -> Self { - Self { transmitter } + pub fn new(transmitter: UnboundedSender, logger: Logger) -> Self { + Self { + transmitter, + logger: logger.new_with_component_name::(), + } } /// Clone the internal transmitter and return it. diff --git a/mithril-aggregator/src/http_server/routes/middlewares.rs b/mithril-aggregator/src/http_server/routes/middlewares.rs index 267a410d90d..995746b49de 100644 --- a/mithril-aggregator/src/http_server/routes/middlewares.rs +++ b/mithril-aggregator/src/http_server/routes/middlewares.rs @@ -1,7 +1,7 @@ +use slog::Logger; use std::collections::BTreeSet; use std::convert::Infallible; use std::sync::Arc; - use warp::Filter; use mithril_common::api_version::APIVersionProvider; @@ -10,12 +10,21 @@ use mithril_common::entities::SignedEntityTypeDiscriminants; use crate::database::repository::SignerGetter; use crate::dependency_injection::EpochServiceWrapper; use crate::event_store::{EventMessage, TransmitterService}; +use crate::http_server::routes::http_server_child_logger; use crate::services::{CertifierService, MessageService, ProverService, SignedEntityService}; use crate::{ CertificatePendingStore, Configuration, DependencyContainer, SignerRegisterer, SingleSignatureAuthenticator, VerificationKeyStorer, }; +/// With logger middleware +pub(crate) fn with_logger( + dependency_manager: &DependencyContainer, +) -> impl Filter + Clone { + let logger = http_server_child_logger(&dependency_manager.root_logger); + warp::any().map(move || logger.clone()) +} + /// With certificate pending store pub(crate) fn with_certificate_pending_store( dependency_manager: &DependencyContainer, diff --git a/mithril-aggregator/src/http_server/routes/mod.rs b/mithril-aggregator/src/http_server/routes/mod.rs index d806afe4326..a4a6eca6338 100644 --- a/mithril-aggregator/src/http_server/routes/mod.rs +++ b/mithril-aggregator/src/http_server/routes/mod.rs @@ -26,3 +26,8 @@ macro_rules! unwrap_to_internal_server_error { } }; } + +pub(crate) fn http_server_child_logger(logger: &slog::Logger) -> slog::Logger { + use mithril_common::logging::LoggerExtensions; + logger.new_with_name("http_server") +} diff --git a/mithril-aggregator/src/multi_signer.rs b/mithril-aggregator/src/multi_signer.rs index 865cb5e5ac8..1fe8f9a3a98 100644 --- a/mithril-aggregator/src/multi_signer.rs +++ b/mithril-aggregator/src/multi_signer.rs @@ -1,10 +1,12 @@ use anyhow::{anyhow, Context}; use async_trait::async_trait; +use slog::Logger; use slog_scope::{debug, warn}; use mithril_common::{ crypto_helper::{ProtocolAggregationError, ProtocolMultiSignature}, entities::{self}, + logging::LoggerExtensions, protocol::MultiSigner as ProtocolMultiSigner, StdResult, }; @@ -40,13 +42,17 @@ pub trait MultiSigner: Sync + Send { /// MultiSignerImpl is an implementation of the MultiSigner pub struct MultiSignerImpl { epoch_service: EpochServiceWrapper, + logger: Logger, } impl MultiSignerImpl { /// MultiSignerImpl factory - pub fn new(epoch_service: EpochServiceWrapper) -> Self { + pub fn new(epoch_service: EpochServiceWrapper, logger: Logger) -> Self { debug!("New MultiSignerImpl created"); - Self { epoch_service } + Self { + epoch_service, + logger: logger.new_with_component_name::(), + } } fn run_verify_single_signature( @@ -141,6 +147,7 @@ mod tests { use crate::entities::AggregatorEpochSettings; use crate::services::{FakeEpochService, FakeEpochServiceBuilder}; + use crate::test_tools::TestLogger; use super::*; @@ -169,26 +176,29 @@ mod tests { let epoch = Epoch(5); let fixture = MithrilFixtureBuilder::default().with_signers(5).build(); let next_fixture = MithrilFixtureBuilder::default().with_signers(4).build(); - let multi_signer = MultiSignerImpl::new(Arc::new(RwLock::new( - FakeEpochServiceBuilder { - epoch_settings: AggregatorEpochSettings { - protocol_parameters: fixture.protocol_parameters(), - ..AggregatorEpochSettings::dummy() - }, - next_epoch_settings: AggregatorEpochSettings { - protocol_parameters: next_fixture.protocol_parameters(), - ..AggregatorEpochSettings::dummy() - }, - upcoming_epoch_settings: AggregatorEpochSettings { - protocol_parameters: next_fixture.protocol_parameters(), - ..AggregatorEpochSettings::dummy() - }, - current_signers_with_stake: fixture.signers_with_stake(), - next_signers_with_stake: next_fixture.signers_with_stake(), - ..FakeEpochServiceBuilder::dummy(epoch) - } - .build(), - ))); + let multi_signer = MultiSignerImpl::new( + Arc::new(RwLock::new( + FakeEpochServiceBuilder { + epoch_settings: AggregatorEpochSettings { + protocol_parameters: fixture.protocol_parameters(), + ..AggregatorEpochSettings::dummy() + }, + next_epoch_settings: AggregatorEpochSettings { + protocol_parameters: next_fixture.protocol_parameters(), + ..AggregatorEpochSettings::dummy() + }, + upcoming_epoch_settings: AggregatorEpochSettings { + protocol_parameters: next_fixture.protocol_parameters(), + ..AggregatorEpochSettings::dummy() + }, + current_signers_with_stake: fixture.signers_with_stake(), + next_signers_with_stake: next_fixture.signers_with_stake(), + ..FakeEpochServiceBuilder::dummy(epoch) + } + .build(), + )), + TestLogger::stdout(), + ); { let message = setup_message(); @@ -226,9 +236,10 @@ mod tests { let epoch = Epoch(5); let fixture = MithrilFixtureBuilder::default().with_signers(5).build(); let protocol_parameters = fixture.protocol_parameters(); - let multi_signer = MultiSignerImpl::new(Arc::new(RwLock::new( - FakeEpochService::from_fixture(epoch, &fixture), - ))); + let multi_signer = MultiSignerImpl::new( + Arc::new(RwLock::new(FakeEpochService::from_fixture(epoch, &fixture))), + TestLogger::stdout(), + ); let message = setup_message(); diff --git a/mithril-aggregator/src/runtime/runner.rs b/mithril-aggregator/src/runtime/runner.rs index fce0016216b..772fdddb3ef 100644 --- a/mithril-aggregator/src/runtime/runner.rs +++ b/mithril-aggregator/src/runtime/runner.rs @@ -1,5 +1,6 @@ use anyhow::{anyhow, Context}; use async_trait::async_trait; +use slog::Logger; use slog_scope::{debug, warn}; use std::sync::Arc; use std::time::Duration; @@ -7,6 +8,7 @@ use std::time::Duration; use mithril_common::entities::{ Certificate, CertificatePending, Epoch, ProtocolMessage, SignedEntityType, Signer, TimePoint, }; +use mithril_common::logging::LoggerExtensions; use mithril_common::StdResult; use mithril_persistence::store::StakeStorer; @@ -133,12 +135,17 @@ pub trait AggregatorRunnerTrait: Sync + Send { /// holds services and configuration. pub struct AggregatorRunner { dependencies: Arc, + logger: Logger, } impl AggregatorRunner { /// Create a new instance of the Aggregator Runner. pub fn new(dependencies: Arc) -> Self { - Self { dependencies } + let logger = dependencies.root_logger.new_with_component_name::(); + Self { + dependencies, + logger, + } } async fn list_available_signed_entity_types( diff --git a/mithril-aggregator/src/runtime/state_machine.rs b/mithril-aggregator/src/runtime/state_machine.rs index 19f8d04721a..133a147ff68 100644 --- a/mithril-aggregator/src/runtime/state_machine.rs +++ b/mithril-aggregator/src/runtime/state_machine.rs @@ -6,6 +6,8 @@ use crate::{ use anyhow::Context; use mithril_common::entities::TimePoint; +use mithril_common::logging::LoggerExtensions; +use slog::Logger; use slog_scope::{crit, info, trace, warn}; use std::fmt::Display; use std::sync::Arc; @@ -57,14 +59,10 @@ impl Display for AggregatorState { /// [documentation](https://mithril.network/doc/mithril/mithril-network/aggregator#under-the-hood) /// for more explanations about the Aggregator state machine. pub struct AggregatorRuntime { - /// Configuration config: AggregatorConfig, - - /// the internal state of the automate state: AggregatorState, - - /// specific runner for this state machine runner: Arc, + logger: Logger, } impl AggregatorRuntime { @@ -73,7 +71,9 @@ impl AggregatorRuntime { aggregator_config: AggregatorConfig, init_state: Option, runner: Arc, + logger: Logger, ) -> Result { + let logger = logger.new_with_component_name::(); info!("initializing runtime"); let state = if let Some(init_state) = init_state { @@ -90,6 +90,7 @@ impl AggregatorRuntime { config: aggregator_config, state, runner, + logger, }) } @@ -398,6 +399,8 @@ mod tests { use mithril_common::test_utils::fake_data; + use crate::test_tools::TestLogger; + use super::super::runner::MockAggregatorRunner; use super::*; @@ -409,6 +412,7 @@ mod tests { AggregatorConfig::new(Duration::from_millis(20)), init_state, Arc::new(runner), + TestLogger::stdout(), ) .await .unwrap() diff --git a/mithril-aggregator/src/services/cardano_transactions_importer.rs b/mithril-aggregator/src/services/cardano_transactions_importer.rs index 8413fea87e0..9c71b8a792a 100644 --- a/mithril-aggregator/src/services/cardano_transactions_importer.rs +++ b/mithril-aggregator/src/services/cardano_transactions_importer.rs @@ -12,6 +12,7 @@ use mithril_common::crypto_helper::{MKTree, MKTreeNode, MKTreeStoreInMemory}; use mithril_common::entities::{ BlockNumber, BlockRange, CardanoTransaction, ChainPoint, SlotNumber, }; +use mithril_common::logging::LoggerExtensions; use mithril_common::signable_builder::TransactionsImporter; use mithril_common::StdResult; @@ -68,7 +69,7 @@ impl CardanoTransactionsImporter { Self { block_scanner, transaction_store, - logger, + logger: logger.new_with_component_name::(), } } diff --git a/mithril-aggregator/src/services/certifier/buffered_certifier.rs b/mithril-aggregator/src/services/certifier/buffered_certifier.rs index 7edf5538798..9c8cf1025e4 100644 --- a/mithril-aggregator/src/services/certifier/buffered_certifier.rs +++ b/mithril-aggregator/src/services/certifier/buffered_certifier.rs @@ -6,6 +6,7 @@ use mithril_common::entities::{ Certificate, Epoch, ProtocolMessage, SignedEntityType, SignedEntityTypeDiscriminants, SingleSignatures, }; +use mithril_common::logging::LoggerExtensions; use mithril_common::StdResult; use crate::entities::OpenMessage; @@ -35,7 +36,7 @@ impl BufferedCertifierService { Self { certifier_service, buffered_single_signature_store, - logger, + logger: logger.new_with_component_name::(), } } diff --git a/mithril-aggregator/src/services/certifier/certifier_service.rs b/mithril-aggregator/src/services/certifier/certifier_service.rs index a2c26097cfc..99b3d368ac8 100644 --- a/mithril-aggregator/src/services/certifier/certifier_service.rs +++ b/mithril-aggregator/src/services/certifier/certifier_service.rs @@ -11,6 +11,7 @@ use mithril_common::entities::{ Certificate, CertificateMetadata, CertificateSignature, Epoch, ProtocolMessage, SignedEntityType, SingleSignatures, StakeDistributionParty, }; +use mithril_common::logging::LoggerExtensions; use mithril_common::protocol::ToMessage; use mithril_common::{CardanoNetwork, StdResult, TickerService}; @@ -35,7 +36,7 @@ pub struct MithrilCertifierService { // todo: should be removed after removing immutable file number from the certificate metadata ticker_service: Arc, epoch_service: EpochServiceWrapper, - _logger: Logger, + logger: Logger, } impl MithrilCertifierService { @@ -63,7 +64,7 @@ impl MithrilCertifierService { genesis_verifier, ticker_service, epoch_service, - _logger: logger, + logger: logger.new_with_component_name::(), } } @@ -378,7 +379,7 @@ impl CertifierService for MithrilCertifierService { mod tests { use crate::{ dependency_injection::DependenciesBuilder, multi_signer::MockMultiSigner, - services::FakeEpochService, Configuration, + services::FakeEpochService, test_tools::TestLogger, Configuration, }; use chrono::{DateTime, Days}; use mithril_common::{ @@ -404,7 +405,6 @@ mod tests { let multi_signer = dependency_builder.get_multi_signer().await.unwrap(); let ticker_service = dependency_builder.get_ticker_service().await.unwrap(); let epoch_service = dependency_builder.get_epoch_service().await.unwrap(); - let logger = dependency_builder.get_logger().unwrap(); Self::new( network, @@ -416,7 +416,7 @@ mod tests { multi_signer, ticker_service, epoch_service, - logger, + TestLogger::stdout(), ) } } diff --git a/mithril-aggregator/src/services/epoch_service.rs b/mithril-aggregator/src/services/epoch_service.rs index 8183e6bbb41..8f9ca7f949c 100644 --- a/mithril-aggregator/src/services/epoch_service.rs +++ b/mithril-aggregator/src/services/epoch_service.rs @@ -1,5 +1,6 @@ use anyhow::Context; use async_trait::async_trait; +use slog::Logger; use slog_scope::debug; use std::collections::BTreeSet; use std::sync::Arc; @@ -10,6 +11,7 @@ use mithril_common::entities::{ CardanoTransactionsSigningConfig, Epoch, ProtocolParameters, SignedEntityConfig, SignedEntityTypeDiscriminants, Signer, SignerWithStake, }; +use mithril_common::logging::LoggerExtensions; use mithril_common::protocol::{MultiSigner as ProtocolMultiSigner, SignerBuilder}; use mithril_common::{CardanoNetwork, StdResult}; @@ -130,6 +132,7 @@ pub struct MithrilEpochService { verification_key_store: Arc, network: CardanoNetwork, allowed_signed_entity_discriminants: BTreeSet, + logger: Logger, } impl MithrilEpochService { @@ -140,6 +143,7 @@ impl MithrilEpochService { verification_key_store: Arc, network: CardanoNetwork, allowed_discriminants: BTreeSet, + logger: Logger, ) -> Self { Self { future_epoch_settings, @@ -149,6 +153,7 @@ impl MithrilEpochService { verification_key_store, network, allowed_signed_entity_discriminants: allowed_discriminants, + logger: logger.new_with_component_name::(), } } @@ -648,6 +653,7 @@ mod tests { use std::collections::{BTreeSet, HashMap}; use crate::store::FakeEpochSettingsStorer; + use crate::test_tools::TestLogger; use crate::VerificationKeyStore; use super::*; @@ -801,6 +807,7 @@ mod tests { Arc::new(vkey_store), self.network, self.allowed_discriminants, + TestLogger::stdout(), ) } } diff --git a/mithril-aggregator/src/services/prover.rs b/mithril-aggregator/src/services/prover.rs index 02464f3ac2f..34d86b4920b 100644 --- a/mithril-aggregator/src/services/prover.rs +++ b/mithril-aggregator/src/services/prover.rs @@ -12,6 +12,7 @@ use mithril_common::{ entities::{ BlockNumber, BlockRange, CardanoTransaction, CardanoTransactionsSetProof, TransactionHash, }, + logging::LoggerExtensions, resource_pool::ResourcePool, signable_builder::BlockRangeRootRetriever, StdResult, @@ -70,7 +71,7 @@ impl MithrilProverService { transaction_retriever, block_range_root_retriever, mk_map_pool: ResourcePool::new(mk_map_pool_size, vec![]), - logger, + logger: logger.new_with_component_name::(), } } @@ -219,6 +220,8 @@ mod tests { use mockall::mock; use mockall::predicate::eq; + use crate::test_tools::TestLogger; + use super::*; mock! { @@ -366,13 +369,12 @@ mod tests { let mut block_range_root_retriever = MockBlockRangeRootRetrieverImpl::new(); block_range_root_retriever_mock_config(&mut block_range_root_retriever); let mk_map_pool_size = 1; - let logger = slog_scope::logger(); MithrilProverService::new( Arc::new(transaction_retriever), Arc::new(block_range_root_retriever), mk_map_pool_size, - logger, + TestLogger::stdout(), ) } diff --git a/mithril-aggregator/src/services/signed_entity.rs b/mithril-aggregator/src/services/signed_entity.rs index eca3a5bf55b..481497ea3ed 100644 --- a/mithril-aggregator/src/services/signed_entity.rs +++ b/mithril-aggregator/src/services/signed_entity.rs @@ -5,6 +5,7 @@ use anyhow::{anyhow, Context}; use async_trait::async_trait; use chrono::Utc; +use slog::Logger; use slog_scope::info; use std::sync::Arc; use tokio::task::JoinHandle; @@ -15,6 +16,7 @@ use mithril_common::{ Certificate, Epoch, MithrilStakeDistribution, SignedEntity, SignedEntityType, SignedEntityTypeDiscriminants, Snapshot, }, + logging::LoggerExtensions, signable_builder::Artifact, signed_entity_type_lock::SignedEntityTypeLock, StdResult, @@ -87,6 +89,7 @@ pub struct MithrilSignedEntityService { signed_entity_type_lock: Arc, cardano_stake_distribution_artifact_builder: Arc>, + logger: Logger, } impl MithrilSignedEntityService { @@ -106,6 +109,7 @@ impl MithrilSignedEntityService { cardano_stake_distribution_artifact_builder: Arc< dyn ArtifactBuilder, >, + logger: Logger, ) -> Self { Self { signed_entity_storer, @@ -114,6 +118,7 @@ impl MithrilSignedEntityService { cardano_transactions_artifact_builder, signed_entity_type_lock, cardano_stake_distribution_artifact_builder, + logger: logger.new_with_component_name::(), } } @@ -397,6 +402,7 @@ mod tests { use crate::artifact_builder::MockArtifactBuilder; use crate::database::repository::MockSignedEntityStorer; + use crate::test_tools::TestLogger; use super::*; @@ -470,6 +476,7 @@ mod tests { Arc::new(self.mock_cardano_transactions_artifact_builder), Arc::new(SignedEntityTypeLock::default()), Arc::new(self.mock_cardano_stake_distribution_artifact_builder), + TestLogger::stdout(), ) } @@ -521,6 +528,7 @@ mod tests { Arc::new(self.mock_cardano_transactions_artifact_builder), Arc::new(SignedEntityTypeLock::default()), Arc::new(self.mock_cardano_stake_distribution_artifact_builder), + TestLogger::stdout(), ) } diff --git a/mithril-aggregator/src/services/upkeep.rs b/mithril-aggregator/src/services/upkeep.rs index c3e59434c3c..533e672e618 100644 --- a/mithril-aggregator/src/services/upkeep.rs +++ b/mithril-aggregator/src/services/upkeep.rs @@ -11,6 +11,7 @@ use anyhow::Context; use async_trait::async_trait; use slog::{info, Logger}; +use mithril_common::logging::LoggerExtensions; use mithril_common::signed_entity_type_lock::SignedEntityTypeLock; use mithril_common::StdResult; use mithril_persistence::sqlite::{ @@ -48,7 +49,7 @@ impl AggregatorUpkeepService { main_db_connection, cardano_tx_connection_pool, signed_entity_type_lock, - logger, + logger: logger.new_with_component_name::(), } } diff --git a/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs b/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs index 384e427a315..31f52ccf72d 100644 --- a/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs +++ b/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs @@ -1,9 +1,12 @@ use anyhow::Context; use async_trait::async_trait; -use mithril_common::StdResult; +use slog::Logger; use slog_scope::debug; use std::path::{Path, PathBuf}; +use mithril_common::logging::LoggerExtensions; +use mithril_common::StdResult; + use crate::http_server; use crate::snapshot_uploaders::{SnapshotLocation, SnapshotUploader}; use crate::tools; @@ -15,15 +18,18 @@ pub struct LocalSnapshotUploader { /// Target folder where to store snapshots archive target_location: PathBuf, + + logger: Logger, } impl LocalSnapshotUploader { /// LocalSnapshotUploader factory - pub(crate) fn new(snapshot_server_url: String, target_location: &Path) -> Self { + pub(crate) fn new(snapshot_server_url: String, target_location: &Path, logger: Logger) -> Self { debug!("New LocalSnapshotUploader created"; "snapshot_server_url" => &snapshot_server_url); Self { snapshot_server_url, target_location: target_location.to_path_buf(), + logger: logger.new_with_component_name::(), } } } @@ -51,14 +57,17 @@ impl SnapshotUploader for LocalSnapshotUploader { #[cfg(test)] mod tests { - use super::LocalSnapshotUploader; - use crate::http_server; - use crate::snapshot_uploaders::SnapshotUploader; use std::fs::File; use std::io::Write; use std::path::{Path, PathBuf}; use tempfile::tempdir; + use crate::http_server; + use crate::snapshot_uploaders::SnapshotUploader; + use crate::test_tools::TestLogger; + + use super::LocalSnapshotUploader; + fn create_fake_archive(dir: &Path, digest: &str) -> PathBuf { let file_path = dir.join(format!("test.{digest}.tar.gz")); let mut file = File::create(&file_path).unwrap(); @@ -84,7 +93,7 @@ mod tests { http_server::SERVER_BASE_PATH, &digest ); - let uploader = LocalSnapshotUploader::new(url, target_dir.path()); + let uploader = LocalSnapshotUploader::new(url, target_dir.path(), TestLogger::stdout()); let location = uploader .upload_snapshot(&archive) @@ -100,8 +109,11 @@ mod tests { let target_dir = tempdir().unwrap(); let digest = "41e27b9ed5a32531b95b2b7ff3c0757591a06a337efaf19a524a998e348028e7"; let archive = create_fake_archive(source_dir.path(), digest); - let uploader = - LocalSnapshotUploader::new("http://test.com:8080/".to_string(), target_dir.path()); + let uploader = LocalSnapshotUploader::new( + "http://test.com:8080/".to_string(), + target_dir.path(), + TestLogger::stdout(), + ); uploader.upload_snapshot(&archive).await.unwrap(); assert!(target_dir diff --git a/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs b/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs index 47170ad2eab..036a2996480 100644 --- a/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs +++ b/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs @@ -1,8 +1,11 @@ use async_trait::async_trait; -use mithril_common::StdResult; +use slog::Logger; use slog_scope::debug; use std::path::Path; +use mithril_common::logging::LoggerExtensions; +use mithril_common::StdResult; + use crate::snapshot_uploaders::{SnapshotLocation, SnapshotUploader}; use crate::tools::RemoteFileUploader; @@ -11,6 +14,7 @@ pub struct RemoteSnapshotUploader { bucket: String, file_uploader: Box, use_cdn_domain: bool, + logger: Logger, } impl RemoteSnapshotUploader { @@ -19,12 +23,14 @@ impl RemoteSnapshotUploader { file_uploader: Box, bucket: String, use_cdn_domain: bool, + logger: Logger, ) -> Self { debug!("New GCPSnapshotUploader created"); Self { bucket, file_uploader, use_cdn_domain, + logger: logger.new_with_component_name::(), } } } @@ -50,12 +56,15 @@ impl SnapshotUploader for RemoteSnapshotUploader { #[cfg(test)] mod tests { - use super::RemoteSnapshotUploader; - use crate::snapshot_uploaders::SnapshotUploader; - use crate::tools::MockRemoteFileUploader; use anyhow::anyhow; use std::path::Path; + use crate::snapshot_uploaders::SnapshotUploader; + use crate::test_tools::TestLogger; + use crate::tools::MockRemoteFileUploader; + + use super::RemoteSnapshotUploader; + #[tokio::test] async fn test_upload_snapshot_not_using_cdn_domain_ok() { let use_cdn_domain = false; @@ -65,6 +74,7 @@ mod tests { Box::new(file_uploader), "cardano-testnet".to_string(), use_cdn_domain, + TestLogger::stdout(), ); let snapshot_filepath = Path::new("test/snapshot.xxx.tar.gz"); let expected_location = @@ -87,6 +97,7 @@ mod tests { Box::new(file_uploader), "cdn.mithril.network".to_string(), use_cdn_domain, + TestLogger::stdout(), ); let snapshot_filepath = Path::new("test/snapshot.xxx.tar.gz"); let expected_location = "https://cdn.mithril.network/snapshot.xxx.tar.gz".to_string(); @@ -105,8 +116,12 @@ mod tests { file_uploader .expect_upload_file() .returning(|_| Err(anyhow!("unexpected error"))); - let snapshot_uploader = - RemoteSnapshotUploader::new(Box::new(file_uploader), "".to_string(), false); + let snapshot_uploader = RemoteSnapshotUploader::new( + Box::new(file_uploader), + "".to_string(), + false, + TestLogger::stdout(), + ); let snapshot_filepath = Path::new("test/snapshot.xxx.tar.gz"); let result = snapshot_uploader diff --git a/mithril-aggregator/src/snapshotter.rs b/mithril-aggregator/src/snapshotter.rs index 9965794e6e4..d9f288dbbad 100644 --- a/mithril-aggregator/src/snapshotter.rs +++ b/mithril-aggregator/src/snapshotter.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Context}; use flate2::Compression; use flate2::{read::GzDecoder, write::GzEncoder}; -use mithril_common::StdResult; +use slog::Logger; use slog_scope::{info, warn}; use std::fs::{self, File}; use std::io::{self, Read, Seek, SeekFrom}; @@ -11,6 +11,9 @@ use tar::{Archive, Entry, EntryType}; use thiserror::Error; use zstd::{Decoder, Encoder}; +use mithril_common::logging::LoggerExtensions; +use mithril_common::StdResult; + use crate::dependency_injection::DependenciesBuilderError; use crate::ZstandardCompressionParameters; @@ -45,6 +48,8 @@ pub struct CompressedArchiveSnapshotter { /// Compression algorithm used for the archive compression_algorithm: SnapshotterCompressionAlgorithm, + + logger: Logger, } #[derive(Debug, Clone, PartialEq, Eq)] @@ -119,6 +124,7 @@ impl CompressedArchiveSnapshotter { db_directory: PathBuf, ongoing_snapshot_directory: PathBuf, compression_algorithm: SnapshotterCompressionAlgorithm, + logger: Logger, ) -> StdResult { if ongoing_snapshot_directory.exists() { fs::remove_dir_all(&ongoing_snapshot_directory).with_context(|| { @@ -143,6 +149,7 @@ impl CompressedArchiveSnapshotter { db_directory, ongoing_snapshot_directory, compression_algorithm, + logger: logger.new_with_component_name::(), }) } @@ -391,6 +398,8 @@ mod tests { use mithril_common::digesters::DummyImmutablesDbBuilder; use mithril_common::test_utils::TempDir; + use crate::test_tools::TestLogger; + use super::*; fn get_test_directory(dir_name: &str) -> PathBuf { @@ -427,6 +436,7 @@ mod tests { db_directory, pending_snapshot_directory.clone(), SnapshotterCompressionAlgorithm::Gzip, + TestLogger::stdout(), ) .unwrap(), ); @@ -450,6 +460,7 @@ mod tests { db_directory, pending_snapshot_directory.clone(), SnapshotterCompressionAlgorithm::Gzip, + TestLogger::stdout(), ) .unwrap(), ); @@ -470,6 +481,7 @@ mod tests { db_directory, pending_snapshot_directory.clone(), SnapshotterCompressionAlgorithm::Gzip, + TestLogger::stdout(), ) .unwrap(), ); @@ -505,6 +517,7 @@ mod tests { db_directory, pending_snapshot_directory.clone(), SnapshotterCompressionAlgorithm::Gzip, + TestLogger::stdout(), ) .unwrap(), ); @@ -543,6 +556,7 @@ mod tests { db_directory, pending_snapshot_directory.clone(), ZstandardCompressionParameters::default().into(), + TestLogger::stdout(), ) .unwrap(), ); diff --git a/mithril-aggregator/src/tools/certificates_hash_migrator.rs b/mithril-aggregator/src/tools/certificates_hash_migrator.rs index 29f76735a4c..a370df737a0 100644 --- a/mithril-aggregator/src/tools/certificates_hash_migrator.rs +++ b/mithril-aggregator/src/tools/certificates_hash_migrator.rs @@ -1,8 +1,10 @@ use std::{collections::HashMap, sync::Arc}; use anyhow::{anyhow, Context}; +use slog::Logger; use slog_scope::{debug, info, trace}; +use mithril_common::logging::LoggerExtensions; use mithril_common::{entities::Certificate, StdResult}; use crate::database::repository::{CertificateRepository, SignedEntityStorer}; @@ -11,6 +13,7 @@ use crate::database::repository::{CertificateRepository, SignedEntityStorer}; pub struct CertificatesHashMigrator { certificate_repository: CertificateRepository, signed_entity_storer: Arc, + logger: Logger, } impl CertificatesHashMigrator { @@ -18,10 +21,12 @@ impl CertificatesHashMigrator { pub fn new( certificate_repository: CertificateRepository, signed_entity_storer: Arc, + logger: Logger, ) -> Self { Self { certificate_repository, signed_entity_storer, + logger: logger.new_with_component_name::(), } } @@ -198,6 +203,7 @@ mod test { use crate::database::record::{CertificateRecord, SignedEntityRecord}; use crate::database::repository::SignedEntityStore; + use crate::test_tools::TestLogger; use super::*; @@ -449,6 +455,7 @@ mod test { let migrator = CertificatesHashMigrator::new( CertificateRepository::new(sqlite_connection.clone()), Arc::new(SignedEntityStore::new(sqlite_connection.clone())), + TestLogger::stdout(), ); migrator .migrate() @@ -613,6 +620,7 @@ mod test { let migrator = CertificatesHashMigrator::new( CertificateRepository::new(connection.clone()), Arc::new(SignedEntityStore::new(connection.clone())), + TestLogger::stdout(), ); migrator .migrate() diff --git a/mithril-aggregator/src/tools/remote_file_uploader.rs b/mithril-aggregator/src/tools/remote_file_uploader.rs index 2e1594ef7b9..7b305f60648 100644 --- a/mithril-aggregator/src/tools/remote_file_uploader.rs +++ b/mithril-aggregator/src/tools/remote_file_uploader.rs @@ -4,11 +4,14 @@ use cloud_storage::{ bucket::Entity, bucket_access_control::Role, object_access_control::NewObjectAccessControl, Client, }; -use mithril_common::StdResult; +use slog::Logger; use slog_scope::info; use std::{env, path::Path}; use tokio_util::{codec::BytesCodec, codec::FramedRead}; +use mithril_common::logging::LoggerExtensions; +use mithril_common::StdResult; + /// RemoteFileUploader represents a remote file uploader interactor #[cfg_attr(test, mockall::automock)] #[async_trait] @@ -20,12 +23,16 @@ pub trait RemoteFileUploader: Sync + Send { /// GcpFileUploader represents a Google Cloud Platform file uploader interactor pub struct GcpFileUploader { bucket: String, + logger: Logger, } impl GcpFileUploader { /// GcpFileUploader factory - pub fn new(bucket: String) -> Self { - Self { bucket } + pub fn new(bucket: String, logger: Logger) -> Self { + Self { + bucket, + logger: logger.new_with_component_name::(), + } } } diff --git a/mithril-aggregator/src/tools/signer_importer.rs b/mithril-aggregator/src/tools/signer_importer.rs index cedaa01ebec..34ed8489eb9 100644 --- a/mithril-aggregator/src/tools/signer_importer.rs +++ b/mithril-aggregator/src/tools/signer_importer.rs @@ -1,16 +1,18 @@ use anyhow::Context; use async_trait::async_trait; -use mithril_common::{entities::PartyId, StdResult}; use reqwest::{IntoUrl, Url}; use serde::{Deserialize, Serialize}; +use slog::Logger; +use slog_scope::{info, warn}; use std::collections::HashMap; use std::ops::Not; use std::sync::Arc; use std::time::Duration; -use crate::database::repository::SignerStore; +use mithril_common::logging::LoggerExtensions; +use mithril_common::{entities::PartyId, StdResult}; -use slog_scope::{info, warn}; +use crate::database::repository::SignerStore; pub type PoolTicker = String; @@ -18,6 +20,7 @@ pub type PoolTicker = String; pub struct SignersImporter { retriever: Arc, persister: Arc, + logger: Logger, } impl SignersImporter { @@ -25,10 +28,12 @@ impl SignersImporter { pub fn new( retriever: Arc, persister: Arc, + logger: Logger, ) -> Self { Self { retriever, persister, + logger: logger.new_with_component_name::(), } } @@ -97,11 +102,16 @@ pub struct CExplorerSignerRetriever { /// Url from which a SPO list using the CExplorer format will be fetch. source_url: Url, client: reqwest::Client, + logger: Logger, } impl CExplorerSignerRetriever { /// Create a new [CExplorerSignerRetriever] that will fetch data from the given url. - pub(crate) fn new(source_url: T, timeout: Option) -> StdResult { + pub(crate) fn new( + source_url: T, + timeout: Option, + logger: Logger, + ) -> StdResult { let source_url = source_url .into_url() .with_context(|| "Given `source_url` is not a valid Url")?; @@ -113,7 +123,11 @@ impl CExplorerSignerRetriever { .build() .with_context(|| "Http Client build failed")?; - Ok(Self { source_url, client }) + Ok(Self { + source_url, + client, + logger, + }) } } @@ -190,6 +204,7 @@ mod tests { use crate::database::repository::{SignerGetter, SignerStore}; use crate::database::test_helper::main_db_connection; use crate::http_server::routes::reply; + use crate::test_tools::TestLogger; use super::*; @@ -270,8 +285,12 @@ mod tests { }"# })); - let retriever = - CExplorerSignerRetriever::new(format!("{}/list", server.url()), None).unwrap(); + let retriever = CExplorerSignerRetriever::new( + format!("{}/list", server.url()), + None, + TestLogger::stdout(), + ) + .unwrap(); let result = retriever .retrieve() .await @@ -292,8 +311,12 @@ mod tests { let server = test_http_server(warp::path("list").map(|| reply::internal_server_error("whatever"))); - let retriever = - CExplorerSignerRetriever::new(format!("{}/list", server.url()), None).unwrap(); + let retriever = CExplorerSignerRetriever::new( + format!("{}/list", server.url()), + None, + TestLogger::stdout(), + ) + .unwrap(); retriever .retrieve() .await @@ -304,8 +327,12 @@ mod tests { async fn retriever_yield_error_when_json_is_malformed() { let server = test_http_server(warp::path("list").map(|| r#"{ "data": [ {"pool_" ] }"#)); - let retriever = - CExplorerSignerRetriever::new(format!("{}/list", server.url()), None).unwrap(); + let retriever = CExplorerSignerRetriever::new( + format!("{}/list", server.url()), + None, + TestLogger::stdout(), + ) + .unwrap(); retriever .retrieve() .await @@ -322,6 +349,7 @@ mod tests { let retriever = CExplorerSignerRetriever::new( format!("{}/list", server.url()), Some(Duration::from_millis(10)), + TestLogger::stdout(), ) .unwrap(); retriever @@ -344,6 +372,7 @@ mod tests { let importer = SignersImporter::new( Arc::new(retriever), Arc::new(SignerStore::new(connection.clone())), + TestLogger::stdout(), ); importer .run() @@ -388,6 +417,7 @@ mod tests { let importer = SignersImporter::new( Arc::new(retriever), Arc::new(SignerStore::new(connection.clone())), + TestLogger::stdout(), ); importer .run() @@ -436,9 +466,15 @@ mod tests { let importer = SignersImporter::new( Arc::new( - CExplorerSignerRetriever::new(format!("{}/list", server.url()), None).unwrap(), + CExplorerSignerRetriever::new( + format!("{}/list", server.url()), + None, + TestLogger::stdout(), + ) + .unwrap(), ), Arc::new(SignerStore::new(connection.clone())), + TestLogger::stdout(), ); importer .run() diff --git a/mithril-aggregator/src/tools/single_signature_authenticator.rs b/mithril-aggregator/src/tools/single_signature_authenticator.rs index 768d13705eb..80a5c64f07c 100644 --- a/mithril-aggregator/src/tools/single_signature_authenticator.rs +++ b/mithril-aggregator/src/tools/single_signature_authenticator.rs @@ -2,6 +2,7 @@ use slog::{debug, Logger}; use std::sync::Arc; use mithril_common::entities::{SingleSignatureAuthenticationStatus, SingleSignatures}; +use mithril_common::logging::LoggerExtensions; use mithril_common::StdResult; use crate::MultiSigner; @@ -17,7 +18,7 @@ impl SingleSignatureAuthenticator { pub fn new(multi_signer: Arc, logger: Logger) -> Self { Self { multi_signer, - logger, + logger: logger.new_with_component_name::(), } } From 00c1e1adc11383d50a0a44ac33bd18fd2052aaf8 Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 17:10:04 +0200 Subject: [PATCH 04/12] refactor(aggregator): use dedicated logger instead of `slog_scope` in http server To tag the logs with a source so they may be easily filtered. --- .../artifact_routes/cardano_transaction.rs | 16 +++-- .../mithril_stake_distribution.rs | 16 +++-- .../routes/artifact_routes/snapshot.rs | 29 ++++++---- .../http_server/routes/certificate_routes.rs | 22 ++++--- .../src/http_server/routes/epoch_routes.rs | 6 +- .../src/http_server/routes/mod.rs | 4 +- .../src/http_server/routes/proof_routes.rs | 13 +++-- .../src/http_server/routes/root_routes.rs | 8 ++- .../src/http_server/routes/router.rs | 23 +++++--- .../http_server/routes/signatures_routes.rs | 20 ++++--- .../src/http_server/routes/signer_routes.rs | 58 ++++++++++++------- 11 files changed, 134 insertions(+), 81 deletions(-) diff --git a/mithril-aggregator/src/http_server/routes/artifact_routes/cardano_transaction.rs b/mithril-aggregator/src/http_server/routes/artifact_routes/cardano_transaction.rs index 203f72abaaf..fbd83e47adc 100644 --- a/mithril-aggregator/src/http_server/routes/artifact_routes/cardano_transaction.rs +++ b/mithril-aggregator/src/http_server/routes/artifact_routes/cardano_transaction.rs @@ -15,6 +15,7 @@ fn artifact_cardano_transactions( ) -> impl Filter + Clone { warp::path!("artifact" / "cardano-transactions") .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::list_artifacts) } @@ -25,6 +26,7 @@ fn artifact_cardano_transaction_by_id( ) -> impl Filter + Clone { warp::path!("artifact" / "cardano-transaction" / String) .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::get_artifact_by_signed_entity_id) } @@ -33,7 +35,7 @@ pub mod handlers { use crate::http_server::routes::reply; use crate::services::MessageService; - use slog_scope::{debug, warn}; + use slog::{debug, warn, Logger}; use std::convert::Infallible; use std::sync::Arc; use warp::http::StatusCode; @@ -42,9 +44,10 @@ pub mod handlers { /// List Cardano Transactions set artifacts pub async fn list_artifacts( + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: artifacts"); + debug!(logger, "⇄ HTTP SERVER: artifacts"); match http_message_service .get_cardano_transaction_list_message(LIST_MAX_ITEMS) @@ -52,7 +55,7 @@ pub mod handlers { { Ok(message) => Ok(reply::json(&message, StatusCode::OK)), Err(err) => { - warn!("list_artifacts_cardano_transactions"; "error" => ?err); + warn!(logger, "list_artifacts_cardano_transactions"; "error" => ?err); Ok(reply::server_error(err)) } @@ -62,9 +65,10 @@ pub mod handlers { /// Get Artifact by signed entity id pub async fn get_artifact_by_signed_entity_id( signed_entity_id: String, + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: artifact/{signed_entity_id}"); + debug!(logger, "⇄ HTTP SERVER: artifact/{signed_entity_id}"); match http_message_service .get_cardano_transaction_message(&signed_entity_id) @@ -72,11 +76,11 @@ pub mod handlers { { Ok(Some(message)) => Ok(reply::json(&message, StatusCode::OK)), Ok(None) => { - warn!("get_cardano_transaction_details::not_found"); + warn!(logger, "get_cardano_transaction_details::not_found"); Ok(reply::empty(StatusCode::NOT_FOUND)) } Err(err) => { - warn!("get_cardano_transaction_details::error"; "error" => ?err); + warn!(logger, "get_cardano_transaction_details::error"; "error" => ?err); Ok(reply::server_error(err)) } } diff --git a/mithril-aggregator/src/http_server/routes/artifact_routes/mithril_stake_distribution.rs b/mithril-aggregator/src/http_server/routes/artifact_routes/mithril_stake_distribution.rs index 9f6ce379e46..b101705ae1b 100644 --- a/mithril-aggregator/src/http_server/routes/artifact_routes/mithril_stake_distribution.rs +++ b/mithril-aggregator/src/http_server/routes/artifact_routes/mithril_stake_distribution.rs @@ -16,6 +16,7 @@ fn artifact_mithril_stake_distributions( ) -> impl Filter + Clone { warp::path!("artifact" / "mithril-stake-distributions") .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::list_artifacts) } @@ -26,6 +27,7 @@ fn artifact_mithril_stake_distribution_by_id( ) -> impl Filter + Clone { warp::path!("artifact" / "mithril-stake-distribution" / String) .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::get_artifact_by_signed_entity_id) } @@ -34,7 +36,7 @@ pub mod handlers { use crate::http_server::routes::reply; use crate::services::MessageService; - use slog_scope::{debug, warn}; + use slog::{debug, warn, Logger}; use std::convert::Infallible; use std::sync::Arc; use warp::http::StatusCode; @@ -43,9 +45,10 @@ pub mod handlers { /// List MithrilStakeDistribution artifacts pub async fn list_artifacts( + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: artifacts"); + debug!(logger, "⇄ HTTP SERVER: artifacts"); match http_message_service .get_mithril_stake_distribution_list_message(LIST_MAX_ITEMS) @@ -53,7 +56,7 @@ pub mod handlers { { Ok(message) => Ok(reply::json(&message, StatusCode::OK)), Err(err) => { - warn!("list_artifacts_mithril_stake_distribution"; "error" => ?err); + warn!(logger,"list_artifacts_mithril_stake_distribution"; "error" => ?err); Ok(reply::server_error(err)) } } @@ -62,9 +65,10 @@ pub mod handlers { /// Get Artifact by signed entity id pub async fn get_artifact_by_signed_entity_id( signed_entity_id: String, + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: artifact/{signed_entity_id}"); + debug!(logger, "⇄ HTTP SERVER: artifact/{signed_entity_id}"); match http_message_service .get_mithril_stake_distribution_message(&signed_entity_id) @@ -72,11 +76,11 @@ pub mod handlers { { Ok(Some(message)) => Ok(reply::json(&message, StatusCode::OK)), Ok(None) => { - warn!("get_mithril_stake_distribution_details::not_found"); + warn!(logger, "get_mithril_stake_distribution_details::not_found"); Ok(reply::empty(StatusCode::NOT_FOUND)) } Err(err) => { - warn!("get_mithril_stake_distribution_details::error"; "error" => ?err); + warn!(logger,"get_mithril_stake_distribution_details::error"; "error" => ?err); Ok(reply::server_error(err)) } } diff --git a/mithril-aggregator/src/http_server/routes/artifact_routes/snapshot.rs b/mithril-aggregator/src/http_server/routes/artifact_routes/snapshot.rs index 3c77e05e6bb..45d7ddd6f87 100644 --- a/mithril-aggregator/src/http_server/routes/artifact_routes/snapshot.rs +++ b/mithril-aggregator/src/http_server/routes/artifact_routes/snapshot.rs @@ -23,6 +23,7 @@ fn artifact_cardano_full_immutable_snapshots( ) -> impl Filter + Clone { warp::path!("artifact" / "snapshots") .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::list_artifacts) } @@ -33,6 +34,7 @@ fn artifact_cardano_full_immutable_snapshot_by_id( ) -> impl Filter + Clone { warp::path!("artifact" / "snapshot" / String) .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::get_artifact_by_signed_entity_id) } @@ -43,6 +45,7 @@ fn snapshot_download( ) -> impl Filter + Clone { warp::path!("artifact" / "snapshot" / String / "download") .and(warp::get().or(warp::head()).unify()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_config(dependency_manager)) .and(middlewares::with_signed_entity_service(dependency_manager)) .and_then(handlers::snapshot_download) @@ -55,6 +58,7 @@ fn serve_snapshots_dir( warp::path("snapshot_download") .and(warp::fs::dir(config.snapshot_directory)) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_signed_entity_service(dependency_manager)) .and_then(handlers::ensure_downloaded_file_is_a_snapshot) } @@ -92,7 +96,7 @@ mod handlers { use crate::http_server::SERVER_BASE_PATH; use crate::services::MessageService; use crate::{services::SignedEntityService, Configuration}; - use slog_scope::{debug, warn}; + use slog::{debug, warn, Logger}; use std::convert::Infallible; use std::str::FromStr; use std::sync::Arc; @@ -102,9 +106,10 @@ mod handlers { /// List Snapshot artifacts pub async fn list_artifacts( + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: artifacts"); + debug!(logger, "⇄ HTTP SERVER: artifacts"); match http_message_service .get_snapshot_list_message(LIST_MAX_ITEMS) @@ -112,7 +117,7 @@ mod handlers { { Ok(message) => Ok(reply::json(&message, StatusCode::OK)), Err(err) => { - warn!("list_artifacts_snapshot"; "error" => ?err); + warn!(logger,"list_artifacts_snapshot"; "error" => ?err); Ok(reply::server_error(err)) } } @@ -121,20 +126,21 @@ mod handlers { /// Get Artifact by signed entity id pub async fn get_artifact_by_signed_entity_id( signed_entity_id: String, + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: artifact/{signed_entity_id}"); + debug!(logger, "⇄ HTTP SERVER: artifact/{signed_entity_id}"); match http_message_service .get_snapshot_message(&signed_entity_id) .await { Ok(Some(signed_entity)) => Ok(reply::json(&signed_entity, StatusCode::OK)), Ok(None) => { - warn!("snapshot_details::not_found"); + warn!(logger, "snapshot_details::not_found"); Ok(reply::empty(StatusCode::NOT_FOUND)) } Err(err) => { - warn!("snapshot_details::error"; "error" => ?err); + warn!(logger,"snapshot_details::error"; "error" => ?err); Ok(reply::server_error(err)) } } @@ -143,10 +149,12 @@ mod handlers { /// Download a file if and only if it's a snapshot archive pub async fn ensure_downloaded_file_is_a_snapshot( reply: warp::fs::File, + logger: Logger, signed_entity_service: Arc, ) -> Result { let filepath = reply.path().to_path_buf(); debug!( + logger, "⇄ HTTP SERVER: ensure_downloaded_file_is_a_snapshot / file: `{}`", filepath.display() ); @@ -167,7 +175,7 @@ mod handlers { _ => Ok(reply::empty(StatusCode::NOT_FOUND)), }, Err(err) => { - warn!("ensure_downloaded_file_is_a_snapshot::error"; "error" => ?err); + warn!(logger,"ensure_downloaded_file_is_a_snapshot::error"; "error" => ?err); Ok(reply::empty(StatusCode::NOT_FOUND)) } } @@ -176,10 +184,11 @@ mod handlers { /// Snapshot download pub async fn snapshot_download( digest: String, + logger: Logger, config: Configuration, signed_entity_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: snapshot_download/{}", digest); + debug!(logger, "⇄ HTTP SERVER: snapshot_download/{}", digest); match signed_entity_service .get_signed_snapshot_by_id(&digest) @@ -206,11 +215,11 @@ mod handlers { Ok(Box::new(warp::redirect::found(snapshot_uri)) as Box) } Ok(None) => { - warn!("snapshot_download::not_found"); + warn!(logger, "snapshot_download::not_found"); Ok(reply::empty(StatusCode::NOT_FOUND)) } Err(err) => { - warn!("snapshot_download::error"; "error" => ?err); + warn!(logger,"snapshot_download::error"; "error" => ?err); Ok(reply::server_error(err)) } } diff --git a/mithril-aggregator/src/http_server/routes/certificate_routes.rs b/mithril-aggregator/src/http_server/routes/certificate_routes.rs index f0ec6f1f8cd..d6b75864dd3 100644 --- a/mithril-aggregator/src/http_server/routes/certificate_routes.rs +++ b/mithril-aggregator/src/http_server/routes/certificate_routes.rs @@ -16,6 +16,7 @@ fn certificate_pending( ) -> impl Filter + Clone { warp::path!("certificate-pending") .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_certificate_pending_store( dependency_manager, )) @@ -28,6 +29,7 @@ fn certificate_certificates( ) -> impl Filter + Clone { warp::path!("certificates") .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::certificate_certificates) } @@ -38,6 +40,7 @@ fn certificate_certificate_hash( ) -> impl Filter + Clone { warp::path!("certificate" / String) .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::certificate_certificate_hash) } @@ -48,7 +51,7 @@ mod handlers { ToCertificatePendingMessageAdapter, }; - use slog_scope::{debug, warn}; + use slog::{debug, warn, Logger}; use std::convert::Infallible; use std::sync::Arc; use warp::http::StatusCode; @@ -57,9 +60,10 @@ mod handlers { /// Certificate Pending pub async fn certificate_pending( + logger: Logger, certificate_pending_store: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: certificate_pending"); + debug!(logger, "⇄ HTTP SERVER: certificate_pending"); match certificate_pending_store.get().await { Ok(Some(certificate_pending)) => Ok(reply::json( @@ -68,7 +72,7 @@ mod handlers { )), Ok(None) => Ok(reply::empty(StatusCode::NO_CONTENT)), Err(err) => { - warn!("certificate_pending::error"; "error" => ?err); + warn!(logger,"certificate_pending::error"; "error" => ?err); Ok(reply::server_error(err)) } } @@ -76,9 +80,10 @@ mod handlers { /// List all Certificates pub async fn certificate_certificates( + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: certificate_certificates",); + debug!(logger, "⇄ HTTP SERVER: certificate_certificates",); match http_message_service .get_certificate_list_message(LIST_MAX_ITEMS) @@ -86,7 +91,7 @@ mod handlers { { Ok(certificates) => Ok(reply::json(&certificates, StatusCode::OK)), Err(err) => { - warn!("certificate_certificates::error"; "error" => ?err); + warn!(logger,"certificate_certificates::error"; "error" => ?err); Ok(reply::server_error(err)) } } @@ -95,11 +100,12 @@ mod handlers { /// Certificate by certificate hash pub async fn certificate_certificate_hash( certificate_hash: String, + logger: Logger, http_message_service: Arc, ) -> Result { debug!( - "⇄ HTTP SERVER: certificate_certificate_hash/{}", - certificate_hash + logger, + "⇄ HTTP SERVER: certificate_certificate_hash/{}", certificate_hash ); match http_message_service @@ -109,7 +115,7 @@ mod handlers { Ok(Some(certificate)) => Ok(reply::json(&certificate, StatusCode::OK)), Ok(None) => Ok(reply::empty(StatusCode::NOT_FOUND)), Err(err) => { - warn!("certificate_certificate_hash::error"; "error" => ?err); + warn!(logger,"certificate_certificate_hash::error"; "error" => ?err); Ok(reply::server_error(err)) } } diff --git a/mithril-aggregator/src/http_server/routes/epoch_routes.rs b/mithril-aggregator/src/http_server/routes/epoch_routes.rs index d0bb49d0806..39d221625e6 100644 --- a/mithril-aggregator/src/http_server/routes/epoch_routes.rs +++ b/mithril-aggregator/src/http_server/routes/epoch_routes.rs @@ -23,6 +23,7 @@ fn epoch_settings( ) -> impl Filter + Clone { warp::path!("epoch-settings") .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_epoch_service(dependency_manager)) .and(middlewares::with_allowed_signed_entity_type_discriminants( dependency_manager, @@ -68,7 +69,7 @@ async fn get_epoch_settings_message( } mod handlers { - use slog_scope::debug; + use slog::{debug, Logger}; use std::collections::BTreeSet; use std::convert::Infallible; use warp::http::StatusCode; @@ -81,10 +82,11 @@ mod handlers { /// Epoch Settings pub async fn epoch_settings( + logger: Logger, epoch_service: EpochServiceWrapper, allowed_discriminants: BTreeSet, ) -> Result { - debug!("⇄ HTTP SERVER: epoch_settings"); + debug!(logger, "⇄ HTTP SERVER: epoch_settings"); let epoch_settings_message = get_epoch_settings_message(epoch_service, allowed_discriminants).await; diff --git a/mithril-aggregator/src/http_server/routes/mod.rs b/mithril-aggregator/src/http_server/routes/mod.rs index a4a6eca6338..23fb5ae5957 100644 --- a/mithril-aggregator/src/http_server/routes/mod.rs +++ b/mithril-aggregator/src/http_server/routes/mod.rs @@ -14,11 +14,11 @@ mod statistics_routes; /// if it was an Error. Else return the unwrapped value. #[macro_export] macro_rules! unwrap_to_internal_server_error { - ($code:expr, $($warn_comment:tt)*) => { + ($code:expr, $logger:expr => $($warn_comment:tt)*) => { match $code { Ok(res) => res, Err(err) => { - warn!($($warn_comment)*; "error" => ?err); + slog::warn!($logger, $($warn_comment)*; "error" => ?err); return Ok($crate::http_server::routes::reply::server_error( err, )); diff --git a/mithril-aggregator/src/http_server/routes/proof_routes.rs b/mithril-aggregator/src/http_server/routes/proof_routes.rs index ec34c5e73e6..79917773e69 100644 --- a/mithril-aggregator/src/http_server/routes/proof_routes.rs +++ b/mithril-aggregator/src/http_server/routes/proof_routes.rs @@ -38,6 +38,7 @@ fn proof_cardano_transaction( warp::path!("proof" / "cardano-transaction") .and(warp::get()) .and(warp::query::()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_signed_entity_service(dependency_manager)) .and(middlewares::validators::with_prover_transactions_hash_validator(dependency_manager)) .and(middlewares::with_prover_service(dependency_manager)) @@ -50,7 +51,7 @@ mod handlers { messages::CardanoTransactionsProofsMessage, StdResult, }; - use slog_scope::{debug, warn}; + use slog::{debug, warn, Logger}; use std::{convert::Infallible, sync::Arc}; use warp::http::StatusCode; @@ -65,18 +66,20 @@ mod handlers { pub async fn proof_cardano_transaction( transaction_parameters: CardanoTransactionProofQueryParams, + logger: Logger, signed_entity_service: Arc, validator: ProverTransactionsHashValidator, prover_service: Arc, ) -> Result { let transaction_hashes = transaction_parameters.split_transactions_hashes(); debug!( + logger, "⇄ HTTP SERVER: proof_cardano_transaction?transaction_hashes={}", transaction_parameters.transaction_hashes ); if let Err(error) = validator.validate(&transaction_hashes) { - warn!("proof_cardano_transaction::bad_request"); + warn!(logger, "proof_cardano_transaction::bad_request"); return Ok(reply::bad_request(error.label, error.message)); } @@ -86,17 +89,17 @@ mod handlers { signed_entity_service .get_last_cardano_transaction_snapshot() .await, - "proof_cardano_transaction::error" + logger => "proof_cardano_transaction::error" ) { Some(signed_entity) => { let message = unwrap_to_internal_server_error!( build_response_message(prover_service, signed_entity, sanitized_hashes).await, - "proof_cardano_transaction" + logger => "proof_cardano_transaction" ); Ok(reply::json(&message, StatusCode::OK)) } None => { - warn!("proof_cardano_transaction::not_found"); + warn!(logger, "proof_cardano_transaction::not_found"); Ok(reply::empty(StatusCode::NOT_FOUND)) } } diff --git a/mithril-aggregator/src/http_server/routes/root_routes.rs b/mithril-aggregator/src/http_server/routes/root_routes.rs index 5db5a59cc01..f02b93745ac 100644 --- a/mithril-aggregator/src/http_server/routes/root_routes.rs +++ b/mithril-aggregator/src/http_server/routes/root_routes.rs @@ -14,6 +14,7 @@ fn root( dependency_manager: &DependencyContainer, ) -> impl Filter + Clone { warp::path::end() + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_api_version_provider(dependency_manager)) .and(middlewares::with_allowed_signed_entity_type_discriminants( dependency_manager, @@ -26,7 +27,7 @@ mod handlers { use std::collections::BTreeSet; use std::{convert::Infallible, sync::Arc}; - use slog_scope::{debug, warn}; + use slog::{debug, Logger}; use warp::http::StatusCode; use mithril_common::api_version::APIVersionProvider; @@ -40,15 +41,16 @@ mod handlers { /// Root pub async fn root( + logger: Logger, api_version_provider: Arc, allowed_signed_entity_type_discriminants: BTreeSet, configuration: Configuration, ) -> Result { - debug!("⇄ HTTP SERVER: root"); + debug!(logger, "⇄ HTTP SERVER: root"); let open_api_version = unwrap_to_internal_server_error!( api_version_provider.compute_current_version(), - "root::error" + logger => "root::error" ); let mut capabilities = AggregatorCapabilities { diff --git a/mithril-aggregator/src/http_server/routes/router.rs b/mithril-aggregator/src/http_server/routes/router.rs index 3b3b4eb93fb..7c153fa2fc5 100644 --- a/mithril-aggregator/src/http_server/routes/router.rs +++ b/mithril-aggregator/src/http_server/routes/router.rs @@ -1,6 +1,6 @@ use crate::http_server::routes::{ - artifact_routes, certificate_routes, epoch_routes, root_routes, signatures_routes, - signer_routes, statistics_routes, + artifact_routes, certificate_routes, epoch_routes, http_server_child_logger, root_routes, + signatures_routes, signer_routes, statistics_routes, }; use crate::http_server::SERVER_BASE_PATH; use crate::DependencyContainer; @@ -8,7 +8,7 @@ use crate::DependencyContainer; use mithril_common::api_version::APIVersionProvider; use mithril_common::MITHRIL_API_VERSION_HEADER; -use slog_scope::warn; +use slog::{warn, Logger}; use std::sync::Arc; use warp::http::Method; use warp::http::StatusCode; @@ -39,6 +39,7 @@ pub fn routes( warp::any() .and(header_must_be( dependency_manager.api_version_provider.clone(), + http_server_child_logger(&dependency_manager.root_logger), )) .and(warp::path(SERVER_BASE_PATH)) .and( @@ -78,11 +79,13 @@ pub fn routes( /// API Version verification fn header_must_be( api_version_provider: Arc, + logger: Logger, ) -> impl Filter + Clone { warp::header::optional(MITHRIL_API_VERSION_HEADER) .and(warp::any().map(move || api_version_provider.clone())) + .and(warp::any().map(move || logger.clone())) .and_then( - move |maybe_header: Option, api_version_provider: Arc| async move { + move |maybe_header: Option, api_version_provider: Arc, logger: Logger| async move { match maybe_header { None => Ok(()), Some(version) => match semver::Version::parse(&version) { @@ -96,7 +99,7 @@ fn header_must_be( } Ok(_version) => Err(warp::reject::custom(VersionMismatchError)), Err(err) => { - warn!("⇄ HTTP SERVER::api_version_check::parse_error"; "error" => ?err); + warn!(logger, "⇄ HTTP SERVER::api_version_check::parse_error"; "error" => ?err); Err(warp::reject::custom(VersionParseError)) } }, @@ -124,13 +127,15 @@ mod tests { era::{EraChecker, SupportedEra}, }; + use crate::test_tools::TestLogger; + use super::*; #[tokio::test] async fn test_no_version() { let era_checker = EraChecker::new(SupportedEra::dummy(), Epoch(1)); let api_version_provider = Arc::new(APIVersionProvider::new(Arc::new(era_checker))); - let filters = header_must_be(api_version_provider); + let filters = header_must_be(api_version_provider, TestLogger::stdout()); warp::test::request() .path("/aggregator/whatever") .filter(&filters) @@ -142,7 +147,7 @@ mod tests { async fn test_parse_version_error() { let era_checker = EraChecker::new(SupportedEra::dummy(), Epoch(1)); let api_version_provider = Arc::new(APIVersionProvider::new(Arc::new(era_checker))); - let filters = header_must_be(api_version_provider); + let filters = header_must_be(api_version_provider, TestLogger::stdout()); warp::test::request() .header(MITHRIL_API_VERSION_HEADER, "not_a_version") .path("/aggregator/whatever") @@ -161,7 +166,7 @@ mod tests { open_api_versions.insert("openapi.yaml".to_string(), Version::new(1, 0, 0)); version_provider.update_open_api_versions(open_api_versions); let api_version_provider = Arc::new(version_provider); - let filters = header_must_be(api_version_provider); + let filters = header_must_be(api_version_provider, TestLogger::stdout()); warp::test::request() .header(MITHRIL_API_VERSION_HEADER, "0.0.999") .path("/aggregator/whatever") @@ -178,7 +183,7 @@ mod tests { open_api_versions.insert("openapi.yaml".to_string(), Version::new(0, 1, 0)); version_provider.update_open_api_versions(open_api_versions); let api_version_provider = Arc::new(version_provider); - let filters = header_must_be(api_version_provider); + let filters = header_must_be(api_version_provider, TestLogger::stdout()); warp::test::request() .header(MITHRIL_API_VERSION_HEADER, "0.1.2") .path("/aggregator/whatever") diff --git a/mithril-aggregator/src/http_server/routes/signatures_routes.rs b/mithril-aggregator/src/http_server/routes/signatures_routes.rs index 3a7589ecace..c1cd99b0af7 100644 --- a/mithril-aggregator/src/http_server/routes/signatures_routes.rs +++ b/mithril-aggregator/src/http_server/routes/signatures_routes.rs @@ -15,6 +15,7 @@ fn register_signatures( warp::path!("register-signatures") .and(warp::post()) .and(warp::body::json()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_certifier_service(dependency_manager)) .and(middlewares::with_single_signature_authenticator( dependency_manager, @@ -23,7 +24,7 @@ fn register_signatures( } mod handlers { - use slog_scope::{debug, trace, warn}; + use slog::{debug, trace, warn, Logger}; use std::convert::Infallible; use std::sync::Arc; use warp::http::StatusCode; @@ -40,11 +41,12 @@ mod handlers { /// Register Signatures pub async fn register_signatures( message: RegisterSignatureMessage, + logger: Logger, certifier_service: Arc, single_signer_authenticator: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: register_signatures/{:?}", message); - trace!("⇄ HTTP SERVER: register_signatures"; "complete_message" => #?message ); + debug!(logger, "⇄ HTTP SERVER: register_signatures/{:?}", message); + trace!(logger,"⇄ HTTP SERVER: register_signatures"; "complete_message" => #?message ); let signed_entity_type = message.signed_entity_type.clone(); let signed_message = message.signed_message.clone(); @@ -52,7 +54,7 @@ mod handlers { let mut signatures = match FromRegisterSingleSignatureAdapter::try_adapt(message) { Ok(signature) => signature, Err(err) => { - warn!("register_signatures::payload decoding error"; "error" => ?err); + warn!(logger,"register_signatures::payload decoding error"; "error" => ?err); return Ok(reply::bad_request( "Could not decode signature payload".to_string(), @@ -66,11 +68,11 @@ mod handlers { single_signer_authenticator .authenticate(&mut signatures, &signed_message) .await, - "single_signer_authenticator::error" + logger => "single_signer_authenticator::error" ); if !signatures.is_authenticated() { - debug!("register_signatures::unauthenticated_signature"); + debug!(logger, "register_signatures::unauthenticated_signature"); return Ok(reply::bad_request( "Could not authenticate signature".to_string(), "Signature could not be authenticated".to_string(), @@ -84,15 +86,15 @@ mod handlers { { Err(err) => match err.downcast_ref::() { Some(CertifierServiceError::AlreadyCertified(signed_entity_type)) => { - debug!("register_signatures::open_message_already_certified"; "signed_entity_type" => ?signed_entity_type); + debug!(logger,"register_signatures::open_message_already_certified"; "signed_entity_type" => ?signed_entity_type); Ok(reply::empty(StatusCode::GONE)) } Some(CertifierServiceError::NotFound(signed_entity_type)) => { - debug!("register_signatures::not_found"; "signed_entity_type" => ?signed_entity_type); + debug!(logger,"register_signatures::not_found"; "signed_entity_type" => ?signed_entity_type); Ok(reply::empty(StatusCode::NOT_FOUND)) } Some(_) | None => { - warn!("register_signatures::error"; "error" => ?err); + warn!(logger,"register_signatures::error"; "error" => ?err); Ok(reply::server_error(err)) } }, diff --git a/mithril-aggregator/src/http_server/routes/signer_routes.rs b/mithril-aggregator/src/http_server/routes/signer_routes.rs index cff94a30cff..3ab00a820f8 100644 --- a/mithril-aggregator/src/http_server/routes/signer_routes.rs +++ b/mithril-aggregator/src/http_server/routes/signer_routes.rs @@ -1,4 +1,4 @@ -use slog_scope::warn; +use slog::warn; use warp::Filter; use crate::dependency_injection::EpochServiceWrapper; @@ -25,6 +25,7 @@ fn register_signer( MITHRIL_SIGNER_VERSION_HEADER, )) .and(warp::body::json()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_signer_registerer(dependency_manager)) .and(middlewares::with_event_transmitter(dependency_manager)) .and(middlewares::with_epoch_service(dependency_manager)) @@ -37,6 +38,7 @@ fn signers_tickers( ) -> impl Filter + Clone { warp::path!("signers" / "tickers") .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_config(dependency_manager)) .and(middlewares::with_signer_getter(dependency_manager)) .and_then(handlers::signers_tickers) @@ -48,15 +50,22 @@ fn registered_signers( ) -> impl Filter + Clone { warp::path!("signers" / "registered" / String) .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_verification_key_store(dependency_manager)) .and_then(handlers::registered_signers) } -async fn fetch_epoch_header_value(epoch_service: EpochServiceWrapper) -> String { +async fn fetch_epoch_header_value( + epoch_service: EpochServiceWrapper, + logger: &slog::Logger, +) -> String { match epoch_service.read().await.epoch_of_current_data() { Ok(epoch) => format!("{epoch}"), Err(e) => { - warn!("Could not fetch epoch header value from Epoch service: {e}"); + warn!( + logger, + "Could not fetch epoch header value from Epoch service: {e}" + ); String::new() } } @@ -76,7 +85,7 @@ mod handlers { use crate::{FromRegisterSignerAdapter, VerificationKeyStorer}; use mithril_common::entities::Epoch; use mithril_common::messages::{RegisterSignerMessage, TryFromMessageAdapter}; - use slog_scope::{debug, trace, warn}; + use slog::{debug, trace, warn, Logger}; use std::convert::Infallible; use std::sync::Arc; use warp::http::StatusCode; @@ -85,15 +94,16 @@ mod handlers { pub async fn register_signer( signer_node_version: Option, register_signer_message: RegisterSignerMessage, + logger: Logger, signer_registerer: Arc, event_transmitter: Arc>, epoch_service: EpochServiceWrapper, ) -> Result { debug!( - "⇄ HTTP SERVER: register_signer/{:?}", - register_signer_message + logger, + "⇄ HTTP SERVER: register_signer/{register_signer_message:?}" ); - trace!( + trace!(logger, "⇄ HTTP SERVER: register_signer"; "complete_message" => #?register_signer_message ); @@ -103,7 +113,7 @@ mod handlers { let signer = match FromRegisterSignerAdapter::try_adapt(register_signer_message) { Ok(signer) => signer, Err(err) => { - warn!("register_signer::payload decoding error"; "error" => ?err); + warn!(logger,"register_signer::payload decoding error"; "error" => ?err); return Ok(reply::bad_request( "Could not decode signer payload".to_string(), err.to_string(), @@ -116,7 +126,7 @@ mod handlers { None => Vec::new(), }; - let epoch_str = fetch_epoch_header_value(epoch_service).await; + let epoch_str = fetch_epoch_header_value(epoch_service, &logger).await; if !epoch_str.is_empty() { headers.push(("epoch", epoch_str.as_str())); } @@ -136,7 +146,7 @@ mod handlers { Ok(reply::empty(StatusCode::CREATED)) } Err(SignerRegistrationError::ExistingSigner(signer_with_stake)) => { - debug!("register_signer::already_registered"); + debug!(logger, "register_signer::already_registered"); let _ = event_transmitter.send_event_message( "HTTP::signer_register", "register_signer", @@ -146,20 +156,20 @@ mod handlers { Ok(reply::empty(StatusCode::CREATED)) } Err(SignerRegistrationError::FailedSignerRegistration(err)) => { - warn!("register_signer::failed_signer_registration"; "error" => ?err); + warn!(logger,"register_signer::failed_signer_registration"; "error" => ?err); Ok(reply::bad_request( "failed_signer_registration".to_string(), err.to_string(), )) } Err(SignerRegistrationError::RegistrationRoundNotYetOpened) => { - warn!("register_signer::registration_round_not_yed_opened"); + warn!(logger, "register_signer::registration_round_not_yed_opened"); Ok(reply::service_unavailable( SignerRegistrationError::RegistrationRoundNotYetOpened.to_string(), )) } Err(err) => { - warn!("register_signer::error"; "error" => ?err); + warn!(logger,"register_signer::error"; "error" => ?err); Ok(reply::server_error(err)) } } @@ -168,14 +178,18 @@ mod handlers { /// Get Registered Signers for a given epoch pub async fn registered_signers( registered_at: String, + logger: Logger, verification_key_store: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: signers/registered/{:?}", registered_at); + debug!( + logger, + "⇄ HTTP SERVER: signers/registered/{:?}", registered_at + ); let registered_at = match registered_at.parse::() { Ok(epoch) => Epoch(epoch), Err(err) => { - warn!("registered_signers::invalid_epoch"; "error" => ?err); + warn!(logger,"registered_signers::invalid_epoch"; "error" => ?err); return Ok(reply::bad_request( "invalid_epoch".to_string(), err.to_string(), @@ -194,21 +208,22 @@ mod handlers { Ok(reply::json(&message, StatusCode::OK)) } Ok(None) => { - warn!("registered_signers::not_found"); + warn!(logger, "registered_signers::not_found"); Ok(reply::empty(StatusCode::NOT_FOUND)) } Err(err) => { - warn!("registered_signers::error"; "error" => ?err); + warn!(logger,"registered_signers::error"; "error" => ?err); Ok(reply::server_error(err)) } } } pub async fn signers_tickers( + logger: Logger, configuration: Configuration, signer_getter: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: signers/tickers"); + debug!(logger, "⇄ HTTP SERVER: signers/tickers"); let network = configuration.network; match signer_getter.get_all().await { @@ -227,7 +242,7 @@ mod handlers { )) } Err(err) => { - warn!("registered_signers::error"; "error" => ?err); + warn!(logger,"registered_signers::error"; "error" => ?err); Ok(reply::server_error(err)) } } @@ -262,6 +277,7 @@ mod tests { services::FakeEpochService, signer_registerer::MockSignerRegisterer, store::MockVerificationKeyStorer, + test_tools::TestLogger, SignerRegistrationError, }; @@ -664,7 +680,7 @@ mod tests { &fixture, ))); - let epoch_str = fetch_epoch_header_value(epoch_service).await; + let epoch_str = fetch_epoch_header_value(epoch_service, &TestLogger::stdout()).await; assert_eq!(epoch_str, "84".to_string()); } @@ -673,7 +689,7 @@ mod tests { async fn test_fetch_epoch_header_value_when_epoch_service_error_return_empty_string() { let epoch_service = Arc::new(RwLock::new(FakeEpochService::without_data())); - let epoch_str = fetch_epoch_header_value(epoch_service).await; + let epoch_str = fetch_epoch_header_value(epoch_service, &TestLogger::stdout()).await; assert_eq!(epoch_str, "".to_string()); } From 24f7381c535cc5626439d3ee4420b43c33726dd1 Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 12:48:07 +0200 Subject: [PATCH 05/12] refactor(aggregator): use dedicated logger instead of `slog_scope` in services & tools To tag the logs with a source so they may be easily filtered. --- .../cardano_immutable_files_full.rs | 24 ++++--- .../src/dependency_injection/builder.rs | 13 ++-- mithril-aggregator/src/event_store/runner.rs | 14 ++-- .../src/event_store/transmitter_service.rs | 7 +- .../cardano_stake_distribution.rs | 29 +++++--- mithril-aggregator/src/multi_signer.rs | 23 +++++-- mithril-aggregator/src/runtime/runner.rs | 42 ++++++------ .../src/runtime/state_machine.rs | 67 +++++++++++++------ .../services/certifier/certifier_service.rs | 55 +++++++++------ .../src/services/epoch_service.rs | 12 ++-- .../src/services/signed_entity.rs | 5 +- .../local_snapshot_uploader.rs | 8 +-- .../remote_snapshot_uploader.rs | 8 +-- mithril-aggregator/src/snapshotter.rs | 8 +-- .../src/store/epoch_settings_storer.rs | 2 - .../src/tools/certificates_hash_migrator.rs | 48 +++++++++---- mithril-aggregator/src/tools/genesis.rs | 6 +- .../src/tools/remote_file_uploader.rs | 13 ++-- .../src/tools/signer_importer.rs | 22 +++--- 19 files changed, 246 insertions(+), 160 deletions(-) diff --git a/mithril-aggregator/src/artifact_builder/cardano_immutable_files_full.rs b/mithril-aggregator/src/artifact_builder/cardano_immutable_files_full.rs index 63e0413a18d..c5643a72267 100644 --- a/mithril-aggregator/src/artifact_builder/cardano_immutable_files_full.rs +++ b/mithril-aggregator/src/artifact_builder/cardano_immutable_files_full.rs @@ -1,8 +1,7 @@ use anyhow::Context; use async_trait::async_trait; use semver::Version; -use slog::Logger; -use slog_scope::{debug, warn}; +use slog::{debug, warn, Logger}; use std::sync::Arc; use thiserror::Error; @@ -61,7 +60,10 @@ impl CardanoImmutableFilesFullArtifactBuilder { beacon: &CardanoDbBeacon, snapshot_digest: &str, ) -> StdResult { - debug!("CardanoImmutableFilesFullArtifactBuilder: create snapshot archive"); + debug!( + self.logger, + "CardanoImmutableFilesFullArtifactBuilder: create snapshot archive" + ); let snapshotter = self.snapshotter.clone(); let snapshot_name = format!( @@ -79,7 +81,7 @@ impl CardanoImmutableFilesFullArtifactBuilder { }) .await??; - debug!(" > snapshot created: '{:?}'", ongoing_snapshot); + debug!(self.logger, " > snapshot created: '{ongoing_snapshot:?}'"); Ok(ongoing_snapshot) } @@ -88,7 +90,10 @@ impl CardanoImmutableFilesFullArtifactBuilder { &self, ongoing_snapshot: &OngoingSnapshot, ) -> StdResult> { - debug!("CardanoImmutableFilesFullArtifactBuilder: upload snapshot archive"); + debug!( + self.logger, + "CardanoImmutableFilesFullArtifactBuilder: upload snapshot archive" + ); let location = self .snapshot_uploader .upload_snapshot(ongoing_snapshot.get_file_path()) @@ -96,8 +101,8 @@ impl CardanoImmutableFilesFullArtifactBuilder { if let Err(error) = tokio::fs::remove_file(ongoing_snapshot.get_file_path()).await { warn!( - " > Post upload ongoing snapshot file removal failure: {}", - error + self.logger, + " > Post upload ongoing snapshot file removal failure: {error}" ); } @@ -111,7 +116,10 @@ impl CardanoImmutableFilesFullArtifactBuilder { snapshot_digest: String, remote_locations: Vec, ) -> StdResult { - debug!("CardanoImmutableFilesFullArtifactBuilder: create snapshot"); + debug!( + self.logger, + "CardanoImmutableFilesFullArtifactBuilder: create snapshot" + ); let snapshot = Snapshot::new( snapshot_digest, diff --git a/mithril-aggregator/src/dependency_injection/builder.rs b/mithril-aggregator/src/dependency_injection/builder.rs index ce70cd0e66a..78ed10196e9 100644 --- a/mithril-aggregator/src/dependency_injection/builder.rs +++ b/mithril-aggregator/src/dependency_injection/builder.rs @@ -1,6 +1,6 @@ use anyhow::Context; use semver::Version; -use slog::Logger; +use slog::{debug, Logger}; use std::{collections::BTreeSet, sync::Arc}; use tokio::{ sync::{ @@ -560,6 +560,7 @@ impl DependenciesBuilder { } async fn build_epoch_settings_storer(&mut self) -> Result> { + let logger = self.get_logger()?; let epoch_settings_store = EpochSettingsStore::new( self.get_sqlite_connection().await?, self.configuration.safe_epoch_retention_limit(), @@ -591,11 +592,13 @@ impl DependenciesBuilder { .replace_cardano_signing_config_empty_values(cardano_signing_config)?; } + let epoch_settings_configuration = self.get_epoch_settings_configuration()?; + debug!( + logger, + "Handle discrepancies at startup of epoch settings store, will record epoch settings from the configuration for epoch {current_epoch}: {epoch_settings_configuration:?}" + ); epoch_settings_store - .handle_discrepancies_at_startup( - current_epoch, - &self.get_epoch_settings_configuration()?, - ) + .handle_discrepancies_at_startup(current_epoch, &epoch_settings_configuration) .await .map_err(|e| DependenciesBuilderError::Initialization { message: "can not create aggregator runner".to_string(), diff --git a/mithril-aggregator/src/event_store/runner.rs b/mithril-aggregator/src/event_store/runner.rs index a5636e67047..bb31db410ba 100644 --- a/mithril-aggregator/src/event_store/runner.rs +++ b/mithril-aggregator/src/event_store/runner.rs @@ -1,8 +1,7 @@ use anyhow::Context; use mithril_common::logging::LoggerExtensions; use mithril_common::StdResult; -use slog::Logger; -use slog_scope::{debug, info}; +use slog::{debug, info, Logger}; use sqlite::Connection; use std::{path::PathBuf, sync::Arc}; use tokio::sync::mpsc::UnboundedReceiver; @@ -36,16 +35,19 @@ impl EventStore { Arc::new(connection) }; let persister = EventPersister::new(connection); - info!("monitoring: starting event loop to log messages."); + info!( + self.logger, + "monitoring: starting event loop to log messages." + ); loop { if let Some(message) = self.receiver.recv().await { - debug!("Event received: {message:?}"); + debug!(self.logger, "Event received: {message:?}"); let event = persister .persist(message) .with_context(|| "event persist failure")?; - debug!("event ID={} created", event.event_id); + debug!(self.logger, "event ID={} created", event.event_id); } else { - info!("No more events to proceed, quitting…"); + info!(self.logger, "No more events to proceed, quitting…"); break; } } diff --git a/mithril-aggregator/src/event_store/transmitter_service.rs b/mithril-aggregator/src/event_store/transmitter_service.rs index 2af94a39aa5..303acf63328 100644 --- a/mithril-aggregator/src/event_store/transmitter_service.rs +++ b/mithril-aggregator/src/event_store/transmitter_service.rs @@ -1,6 +1,5 @@ use serde::Serialize; -use slog::Logger; -use slog_scope::warn; +use slog::{warn, Logger}; use std::fmt::Debug; use tokio::sync::mpsc::UnboundedSender; @@ -53,7 +52,7 @@ impl TransmitterService { { let content = serde_json::to_string(content).map_err(|e| { let error_msg = format!("Serialization error while forging event message: {e}"); - warn!("Event message error => «{error_msg}»"); + warn!(self.logger, "Event message error => «{error_msg}»"); error_msg })?; @@ -69,7 +68,7 @@ impl TransmitterService { self.get_transmitter().send(message.clone()).map_err(|e| { let error_msg = format!("An error occurred when sending message {message:?} to monitoring: '{e}'."); - warn!("Event message error => «{error_msg}»"); + warn!(self.logger, "Event message error => «{error_msg}»"); error_msg }) diff --git a/mithril-aggregator/src/http_server/routes/artifact_routes/cardano_stake_distribution.rs b/mithril-aggregator/src/http_server/routes/artifact_routes/cardano_stake_distribution.rs index b65408b9e69..6f2a1709c8d 100644 --- a/mithril-aggregator/src/http_server/routes/artifact_routes/cardano_stake_distribution.rs +++ b/mithril-aggregator/src/http_server/routes/artifact_routes/cardano_stake_distribution.rs @@ -20,6 +20,7 @@ fn artifact_cardano_stake_distributions( ) -> impl Filter + Clone { warp::path!("artifact" / "cardano-stake-distributions") .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::list_artifacts) } @@ -30,6 +31,7 @@ fn artifact_cardano_stake_distribution_by_id( ) -> impl Filter + Clone { warp::path!("artifact" / "cardano-stake-distribution" / String) .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::get_artifact_by_signed_entity_id) } @@ -40,6 +42,7 @@ fn artifact_cardano_stake_distribution_by_epoch( ) -> impl Filter + Clone { warp::path!("artifact" / "cardano-stake-distribution" / "epoch" / String) .and(warp::get()) + .and(middlewares::with_logger(dependency_manager)) .and(middlewares::with_http_message_service(dependency_manager)) .and_then(handlers::get_artifact_by_epoch) } @@ -49,7 +52,7 @@ pub mod handlers { use crate::services::MessageService; use mithril_common::entities::Epoch; - use slog_scope::{debug, warn}; + use slog::{debug, warn, Logger}; use std::convert::Infallible; use std::sync::Arc; use warp::http::StatusCode; @@ -58,9 +61,10 @@ pub mod handlers { /// List CardanoStakeDistribution artifacts pub async fn list_artifacts( + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: artifacts"); + debug!(logger, "⇄ HTTP SERVER: artifacts"); match http_message_service .get_cardano_stake_distribution_list_message(LIST_MAX_ITEMS) @@ -68,7 +72,7 @@ pub mod handlers { { Ok(message) => Ok(reply::json(&message, StatusCode::OK)), Err(err) => { - warn!("list_artifacts_cardano_stake_distribution"; "error" => ?err); + warn!(logger, "list_artifacts_cardano_stake_distribution"; "error" => ?err); Ok(reply::server_error(err)) } } @@ -77,9 +81,10 @@ pub mod handlers { /// Get Artifact by signed entity id pub async fn get_artifact_by_signed_entity_id( signed_entity_id: String, + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: artifact/{signed_entity_id}"); + debug!(logger, "⇄ HTTP SERVER: artifact/{signed_entity_id}"); match http_message_service .get_cardano_stake_distribution_message(&signed_entity_id) @@ -87,11 +92,11 @@ pub mod handlers { { Ok(Some(message)) => Ok(reply::json(&message, StatusCode::OK)), Ok(None) => { - warn!("get_cardano_stake_distribution_details::not_found"); + warn!(logger, "get_cardano_stake_distribution_details::not_found"); Ok(reply::empty(StatusCode::NOT_FOUND)) } Err(err) => { - warn!("get_cardano_stake_distribution_details::error"; "error" => ?err); + warn!(logger, "get_cardano_stake_distribution_details::error"; "error" => ?err); Ok(reply::server_error(err)) } } @@ -100,14 +105,15 @@ pub mod handlers { /// Get Artifact by epoch pub async fn get_artifact_by_epoch( epoch: String, + logger: Logger, http_message_service: Arc, ) -> Result { - debug!("⇄ HTTP SERVER: artifact/epoch/{epoch}"); + debug!(logger, "⇄ HTTP SERVER: artifact/epoch/{epoch}"); let artifact_epoch = match epoch.parse::() { Ok(epoch) => Epoch(epoch), Err(err) => { - warn!("get_artifact_by_epoch::invalid_epoch"; "error" => ?err); + warn!(logger, "get_artifact_by_epoch::invalid_epoch"; "error" => ?err); return Ok(reply::bad_request( "invalid_epoch".to_string(), err.to_string(), @@ -121,11 +127,14 @@ pub mod handlers { { Ok(Some(message)) => Ok(reply::json(&message, StatusCode::OK)), Ok(None) => { - warn!("get_cardano_stake_distribution_details_by_epoch::not_found"); + warn!( + logger, + "get_cardano_stake_distribution_details_by_epoch::not_found" + ); Ok(reply::empty(StatusCode::NOT_FOUND)) } Err(err) => { - warn!("get_cardano_stake_distribution_details_by_epoch::error"; "error" => ?err); + warn!(logger, "get_cardano_stake_distribution_details_by_epoch::error"; "error" => ?err); Ok(reply::server_error(err)) } } diff --git a/mithril-aggregator/src/multi_signer.rs b/mithril-aggregator/src/multi_signer.rs index 1fe8f9a3a98..2d0ba920a83 100644 --- a/mithril-aggregator/src/multi_signer.rs +++ b/mithril-aggregator/src/multi_signer.rs @@ -1,7 +1,6 @@ use anyhow::{anyhow, Context}; use async_trait::async_trait; -use slog::Logger; -use slog_scope::{debug, warn}; +use slog::{debug, warn, Logger}; use mithril_common::{ crypto_helper::{ProtocolAggregationError, ProtocolMultiSignature}, @@ -48,10 +47,11 @@ pub struct MultiSignerImpl { impl MultiSignerImpl { /// MultiSignerImpl factory pub fn new(epoch_service: EpochServiceWrapper, logger: Logger) -> Self { - debug!("New MultiSignerImpl created"); + let logger = logger.new_with_component_name::(); + debug!(logger, "New MultiSignerImpl created"); Self { epoch_service, - logger: logger.new_with_component_name::(), + logger, } } @@ -62,8 +62,11 @@ impl MultiSignerImpl { protocol_multi_signer: &ProtocolMultiSigner, ) -> StdResult<()> { debug!( + self.logger, "Verify single signature from {} at indexes {:?} for message {:?}", - single_signature.party_id, single_signature.won_indexes, message + single_signature.party_id, + single_signature.won_indexes, + message ); protocol_multi_signer @@ -111,7 +114,10 @@ impl MultiSigner for MultiSignerImpl { &self, open_message: &OpenMessage, ) -> StdResult> { - debug!("MultiSigner:create_multi_signature({open_message:?})"); + debug!( + self.logger, + "MultiSigner:create_multi_signature({open_message:?})" + ); let epoch_service = self.epoch_service.read().await; let protocol_multi_signer = epoch_service.protocol_multi_signer().with_context(|| { @@ -124,7 +130,10 @@ impl MultiSigner for MultiSignerImpl { ) { Ok(multi_signature) => Ok(Some(multi_signature)), Err(ProtocolAggregationError::NotEnoughSignatures(actual, expected)) => { - warn!("Could not compute multi-signature: Not enough signatures. Got only {} out of {}.", actual, expected); + warn!( + self.logger, + "Could not compute multi-signature: Not enough signatures. Got only {actual} out of {expected}." + ); Ok(None) } Err(err) => Err(anyhow!(err).context(format!( diff --git a/mithril-aggregator/src/runtime/runner.rs b/mithril-aggregator/src/runtime/runner.rs index 772fdddb3ef..b1af5cce612 100644 --- a/mithril-aggregator/src/runtime/runner.rs +++ b/mithril-aggregator/src/runtime/runner.rs @@ -1,7 +1,6 @@ use anyhow::{anyhow, Context}; use async_trait::async_trait; -use slog::Logger; -use slog_scope::{debug, warn}; +use slog::{debug, warn, Logger}; use std::sync::Arc; use std::time::Duration; @@ -174,7 +173,7 @@ impl AggregatorRunner { impl AggregatorRunnerTrait for AggregatorRunner { /// Return the current time point from the chain async fn get_time_point_from_chain(&self) -> StdResult { - debug!("RUNNER: get time point from chain"); + debug!(self.logger, "RUNNER: get time point from chain"); let time_point = self .dependencies .ticker_service @@ -188,7 +187,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { &self, signed_entity_type: &SignedEntityType, ) -> StdResult> { - debug!("RUNNER: get_current_open_message_for_signed_entity_type"; "signed_entity_type" => ?signed_entity_type); + debug!(self.logger,"RUNNER: get_current_open_message_for_signed_entity_type"; "signed_entity_type" => ?signed_entity_type); self.mark_open_message_if_expired(signed_entity_type) .await?; @@ -204,7 +203,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { &self, current_time_point: &TimePoint, ) -> StdResult> { - debug!("RUNNER: get_current_non_certified_open_message"; "time_point" => #?current_time_point); + debug!(self.logger,"RUNNER: get_current_non_certified_open_message"; "time_point" => #?current_time_point); let signed_entity_types = self .list_available_signed_entity_types(current_time_point) .await?; @@ -233,7 +232,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { } async fn is_certificate_chain_valid(&self, time_point: &TimePoint) -> StdResult<()> { - debug!("RUNNER: is_certificate_chain_valid"); + debug!(self.logger, "RUNNER: is_certificate_chain_valid"); self.dependencies .certifier_service .verify_certificate_chain(time_point.epoch) @@ -243,7 +242,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { } async fn update_stake_distribution(&self, new_time_point: &TimePoint) -> StdResult<()> { - debug!("RUNNER: update stake distribution"; "time_point" => #?new_time_point); + debug!(self.logger,"RUNNER: update stake distribution"; "time_point" => #?new_time_point); self.dependencies .stake_distribution_service .update_stake_distribution() @@ -252,7 +251,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { } async fn open_signer_registration_round(&self, new_time_point: &TimePoint) -> StdResult<()> { - debug!("RUNNER: open signer registration round"; "time_point" => #?new_time_point); + debug!(self.logger,"RUNNER: open signer registration round"; "time_point" => #?new_time_point); let registration_epoch = new_time_point.epoch.offset_to_recording_epoch(); let stakes = self @@ -269,7 +268,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { } async fn close_signer_registration_round(&self) -> StdResult<()> { - debug!("RUNNER: close signer registration round"); + debug!(self.logger, "RUNNER: close signer registration round"); self.dependencies .signer_registration_round_opener @@ -290,7 +289,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { &self, signed_entity_type: &SignedEntityType, ) -> StdResult { - debug!("RUNNER: compute protocol message"); + debug!(self.logger, "RUNNER: compute protocol message"); let protocol_message = self .dependencies .signable_builder_service @@ -305,7 +304,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { &self, signed_entity_type: &SignedEntityType, ) -> StdResult> { - debug!("RUNNER: mark expired open message"); + debug!(self.logger, "RUNNER: mark expired open message"); let expired_open_message = self .dependencies @@ -315,8 +314,8 @@ impl AggregatorRunnerTrait for AggregatorRunner { .with_context(|| "CertifierService can not mark expired open message")?; debug!( - "RUNNER: marked expired open messages: {:#?}", - expired_open_message + self.logger, + "RUNNER: marked expired open messages: {:#?}", expired_open_message ); Ok(expired_open_message) @@ -327,7 +326,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { time_point: TimePoint, signed_entity_type: &SignedEntityType, ) -> StdResult { - debug!("RUNNER: create new pending certificate"); + debug!(self.logger, "RUNNER: create new pending certificate"); let epoch_service = self.dependencies.epoch_service.read().await; let signers = epoch_service.current_signers_with_stake()?; @@ -359,7 +358,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { &self, pending_certificate: CertificatePending, ) -> StdResult<()> { - debug!("RUNNER: saving pending certificate"); + debug!(self.logger, "RUNNER: saving pending certificate"); let signed_entity_type = pending_certificate.signed_entity_type.clone(); self.dependencies @@ -371,11 +370,11 @@ impl AggregatorRunnerTrait for AggregatorRunner { } async fn drop_pending_certificate(&self) -> StdResult> { - debug!("RUNNER: drop pending certificate"); + debug!(self.logger, "RUNNER: drop pending certificate"); let certificate_pending = self.dependencies.certificate_pending_store.remove().await?; if certificate_pending.is_none() { - warn!(" > drop_pending_certificate::no certificate pending in store, did the previous loop crashed ?"); + warn!(self.logger," > drop_pending_certificate::no certificate pending in store, did the previous loop crashed ?"); } Ok(certificate_pending) @@ -385,7 +384,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { &self, signed_entity_type: &SignedEntityType, ) -> StdResult> { - debug!("RUNNER: create_certificate"); + debug!(self.logger, "RUNNER: create_certificate"); self.dependencies .certifier_service @@ -403,7 +402,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { signed_entity_type: &SignedEntityType, certificate: &Certificate, ) -> StdResult<()> { - debug!("RUNNER: create artifact"); + debug!(self.logger, "RUNNER: create artifact"); self.dependencies .signed_entity_service .create_artifact(signed_entity_type.to_owned(), certificate) @@ -438,6 +437,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { .era_checker .change_era(current_era, token.get_current_epoch()); debug!( + self.logger, "Current Era is {} (Epoch {}).", current_era, token.get_current_epoch() @@ -445,7 +445,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { if token.get_next_supported_era().is_err() { let era_name = &token.get_next_era_marker().unwrap().name; - warn!("Upcoming Era '{era_name}' is not supported by this version of the software. Please update!"); + warn!(self.logger,"Upcoming Era '{era_name}' is not supported by this version of the software. Please update!"); } Ok(()) @@ -479,7 +479,7 @@ impl AggregatorRunnerTrait for AggregatorRunner { } async fn upkeep(&self) -> StdResult<()> { - debug!("RUNNER: upkeep"); + debug!(self.logger, "RUNNER: upkeep"); self.dependencies.upkeep_service.run().await } diff --git a/mithril-aggregator/src/runtime/state_machine.rs b/mithril-aggregator/src/runtime/state_machine.rs index 133a147ff68..8904376136a 100644 --- a/mithril-aggregator/src/runtime/state_machine.rs +++ b/mithril-aggregator/src/runtime/state_machine.rs @@ -7,8 +7,7 @@ use crate::{ use anyhow::Context; use mithril_common::entities::TimePoint; use mithril_common::logging::LoggerExtensions; -use slog::Logger; -use slog_scope::{crit, info, trace, warn}; +use slog::{crit, info, trace, warn, Logger}; use std::fmt::Display; use std::sync::Arc; use tokio::time::sleep; @@ -74,13 +73,13 @@ impl AggregatorRuntime { logger: Logger, ) -> Result { let logger = logger.new_with_component_name::(); - info!("initializing runtime"); + info!(logger, "initializing runtime"); let state = if let Some(init_state) = init_state { - trace!("got initial state from caller"); + trace!(logger, "got initial state from caller"); init_state } else { - trace!("idle state, no current time point"); + trace!(logger, "idle state, no current time point"); AggregatorState::Idle(IdleState { current_time_point: None, }) @@ -105,18 +104,21 @@ impl AggregatorRuntime { /// Launches an infinite loop ticking the state machine. pub async fn run(&mut self) -> Result<(), RuntimeError> { - info!("STATE MACHINE: launching"); + info!(self.logger, "STATE MACHINE: launching"); loop { if let Err(e) = self.cycle().await { - warn!("State machine issued an error: {e}"); + warn!(self.logger, "State machine issued an error: {e}"); match &e { RuntimeError::Critical { message: _, nested_error: _, } => { - crit!("state machine: a critical error occurred: {e:?}"); + crit!( + self.logger, + "state machine: a critical error occurred: {e:?}" + ); return Err(e); } @@ -125,6 +127,7 @@ impl AggregatorRuntime { nested_error, } => { warn!( + self.logger, "KeepState Error: {message}. Nested error: «{}».", nested_error .as_ref() @@ -137,6 +140,7 @@ impl AggregatorRuntime { nested_error, } => { warn!( + self.logger, "ReInit Error: {message}. Nested error: «{}».", nested_error .as_ref() @@ -151,6 +155,7 @@ impl AggregatorRuntime { } info!( + self.logger, "… Cycle finished, Sleeping for {} ms", self.config.interval.as_millis() ); @@ -160,8 +165,11 @@ impl AggregatorRuntime { /// Perform one tick of the state machine. pub async fn cycle(&mut self) -> Result<(), RuntimeError> { - info!("================================================================================"); - info!("STATE MACHINE: new cycle: {}", self.state); + info!( + self.logger, + "================================================================================" + ); + info!(self.logger, "STATE MACHINE: new cycle: {}", self.state); match self.state.clone() { AggregatorState::Idle(state) => { @@ -169,7 +177,7 @@ impl AggregatorRuntime { || "AggregatorRuntime in the state IDLE can not get current time point from chain", )?; - info!("→ trying to transition to READY"; "last_time_point" => ?last_time_point); + info!(self.logger, "→ trying to transition to READY"; "last_time_point" => ?last_time_point); self.try_transition_from_idle_to_ready( state.current_time_point, @@ -191,7 +199,7 @@ impl AggregatorRuntime { if state.current_time_point.epoch < last_time_point.epoch { // transition READY > IDLE - info!("→ Epoch has changed, transitioning to IDLE"; "last_time_point" => ?last_time_point); + info!(self.logger, "→ Epoch has changed, transitioning to IDLE"; "last_time_point" => ?last_time_point); self.state = AggregatorState::Idle(IdleState { current_time_point: Some(state.current_time_point), }); @@ -202,7 +210,7 @@ impl AggregatorRuntime { .with_context(|| "AggregatorRuntime can not get the current open message")? { // transition READY > SIGNING - info!("→ transitioning to SIGNING"); + info!(self.logger, "→ transitioning to SIGNING"); let new_state = self .transition_from_ready_to_signing(last_time_point.clone(), open_message.clone()) .await.with_context(|| format!("AggregatorRuntime can not perform a transition from READY state to SIGNING with entity_type: '{:?}'", open_message.signed_entity_type))?; @@ -210,7 +218,7 @@ impl AggregatorRuntime { } else { // READY > READY info!( - " ⋅ no open message to certify, waiting…"; + self.logger, " ⋅ no open message to certify, waiting…"; "time_point" => ?state.current_time_point ); self.state = AggregatorState::Ready(ReadyState { @@ -234,12 +242,15 @@ impl AggregatorRuntime { if state.current_time_point.epoch < last_time_point.epoch { // SIGNING > IDLE - info!("→ Epoch changed, transitioning to IDLE"); + info!(self.logger, "→ Epoch changed, transitioning to IDLE"); let new_state = self.transition_from_signing_to_idle(state).await?; self.state = AggregatorState::Idle(new_state); } else if is_outdated { // SIGNING > READY - info!("→ Open message changed, transitioning to READY"); + info!( + self.logger, + "→ Open message changed, transitioning to READY" + ); let new_state = self .transition_from_signing_to_ready_new_open_message(state) .await?; @@ -249,7 +260,7 @@ impl AggregatorRuntime { let new_state = self .transition_from_signing_to_ready_multisignature(state) .await?; - info!("→ a multi-signature has been created, build an artifact & a certificate and transitioning back to READY"); + info!(self.logger, "→ a multi-signature has been created, build an artifact & a certificate and transitioning back to READY"); self.state = AggregatorState::Ready(new_state); } } @@ -264,7 +275,7 @@ impl AggregatorRuntime { maybe_current_time_point: Option, new_time_point: TimePoint, ) -> Result<(), RuntimeError> { - trace!("trying transition from IDLE to READY state"); + trace!(self.logger, "trying transition from IDLE to READY state"); if maybe_current_time_point.is_none() || maybe_current_time_point.unwrap().epoch < new_time_point.epoch @@ -303,7 +314,10 @@ impl AggregatorRuntime { &self, state: SigningState, ) -> Result { - trace!("launching transition from SIGNING to READY state"); + trace!( + self.logger, + "launching transition from SIGNING to READY state" + ); let certificate = self .runner .create_certificate(&state.open_message.signed_entity_type) @@ -340,7 +354,10 @@ impl AggregatorRuntime { &self, state: SigningState, ) -> Result { - trace!("launching transition from SIGNING to IDLE state"); + trace!( + self.logger, + "launching transition from SIGNING to IDLE state" + ); self.runner.drop_pending_certificate().await?; Ok(IdleState { @@ -354,7 +371,10 @@ impl AggregatorRuntime { &self, state: SigningState, ) -> Result { - trace!("launching transition from SIGNING to READY state"); + trace!( + self.logger, + "launching transition from SIGNING to READY state" + ); self.runner.drop_pending_certificate().await?; Ok(ReadyState { @@ -369,7 +389,10 @@ impl AggregatorRuntime { new_time_point: TimePoint, open_message: OpenMessage, ) -> Result { - trace!("launching transition from READY to SIGNING state"); + trace!( + self.logger, + "launching transition from READY to SIGNING state" + ); let certificate_pending = self .runner diff --git a/mithril-aggregator/src/services/certifier/certifier_service.rs b/mithril-aggregator/src/services/certifier/certifier_service.rs index 99b3d368ac8..91747b712e3 100644 --- a/mithril-aggregator/src/services/certifier/certifier_service.rs +++ b/mithril-aggregator/src/services/certifier/certifier_service.rs @@ -1,8 +1,7 @@ use anyhow::Context; use async_trait::async_trait; use chrono::Utc; -use slog::Logger; -use slog_scope::{debug, info, trace, warn}; +use slog::{debug, info, trace, warn, Logger}; use std::sync::Arc; use mithril_common::certificate_chain::CertificateVerifier; @@ -73,6 +72,7 @@ impl MithrilCertifierService { signed_entity_type: &SignedEntityType, ) -> StdResult> { debug!( + self.logger, "CertifierService::get_open_message_record(signed_entity_type: {signed_entity_type:?})" ); @@ -89,7 +89,10 @@ impl MithrilCertifierService { #[async_trait] impl CertifierService for MithrilCertifierService { async fn inform_epoch(&self, epoch: Epoch) -> StdResult<()> { - debug!("CertifierService::inform_epoch(epoch: {epoch:?})"); + debug!( + self.logger, + "CertifierService::inform_epoch(epoch: {epoch:?})" + ); let nb = self .open_message_repository .clean_epoch(epoch) @@ -97,7 +100,7 @@ impl CertifierService for MithrilCertifierService { .with_context(|| { format!("Certifier can not clean open messages from epoch '{epoch}'") })?; - info!("MithrilCertifierService: Informed of a new Epoch: {epoch:?}. Cleaned {nb} open messages along with their single signatures."); + info!(self.logger, "MithrilCertifierService: Informed of a new Epoch: {epoch:?}. Cleaned {nb} open messages along with their single signatures."); Ok(()) } @@ -107,25 +110,25 @@ impl CertifierService for MithrilCertifierService { signed_entity_type: &SignedEntityType, signature: &SingleSignatures, ) -> StdResult { - debug!("CertifierService::register_single_signature(signed_entity_type: {signed_entity_type:?}, single_signatures: {signature:?}"); - trace!("CertifierService::register_single_signature"; "complete_single_signatures" => #?signature); + debug!(self.logger, "CertifierService::register_single_signature(signed_entity_type: {signed_entity_type:?}, single_signatures: {signature:?}"); + trace!(self.logger, "CertifierService::register_single_signature"; "complete_single_signatures" => #?signature); let open_message = self .get_open_message_record(signed_entity_type) .await.with_context(|| format!("CertifierService can not get open message record for signed_entity_type: '{signed_entity_type}'"))? .ok_or_else(|| { - warn!("CertifierService::register_single_signature: OpenMessage not found for type {signed_entity_type:?}."); + warn!(self.logger, "CertifierService::register_single_signature: OpenMessage not found for type {signed_entity_type:?}."); CertifierServiceError::NotFound(signed_entity_type.clone()) })?; if open_message.is_certified { - warn!("CertifierService::register_single_signature: open message {signed_entity_type:?} is already certified, cannot register single signature."); + warn!(self.logger, "CertifierService::register_single_signature: open message {signed_entity_type:?} is already certified, cannot register single signature."); return Err(CertifierServiceError::AlreadyCertified(signed_entity_type.clone()).into()); } if open_message.is_expired { - warn!("CertifierService::register_single_signature: open message {signed_entity_type:?} has expired, cannot register single signature."); + warn!(self.logger, "CertifierService::register_single_signature: open message {signed_entity_type:?} has expired, cannot register single signature."); return Err(CertifierServiceError::Expired(signed_entity_type.clone()).into()); } @@ -141,8 +144,8 @@ impl CertifierService for MithrilCertifierService { .single_signature_repository .create_single_signature(signature, &open_message.clone().into()) .await.with_context(|| format!("Certifier can not create the single signature from single_signature: '{signature:?}', open_message: '{open_message:?}'"))?; - info!("CertifierService::register_single_signature: created pool '{}' single signature for {signed_entity_type:?}.", single_signature.signer_id); - debug!("CertifierService::register_single_signature: created single signature for open message ID='{}'.", single_signature.open_message_id); + info!(self.logger, "CertifierService::register_single_signature: created pool '{}' single signature for {signed_entity_type:?}.", single_signature.signer_id); + debug!(self.logger, "CertifierService::register_single_signature: created single signature for open message ID='{}'.", single_signature.open_message_id); Ok(SignatureRegistrationStatus::Registered) } @@ -152,7 +155,7 @@ impl CertifierService for MithrilCertifierService { signed_entity_type: &SignedEntityType, protocol_message: &ProtocolMessage, ) -> StdResult { - debug!("CertifierService::create_open_message(signed_entity_type: {signed_entity_type:?}, protocol_message: {protocol_message:?})"); + debug!(self.logger, "CertifierService::create_open_message(signed_entity_type: {signed_entity_type:?}, protocol_message: {protocol_message:?})"); let open_message = self .open_message_repository .create_open_message( @@ -168,8 +171,9 @@ impl CertifierService for MithrilCertifierService { signed_entity_type.get_epoch_when_signed_entity_type_is_signed() ) })?; - info!("CertifierService::create_open_message: created open message for {signed_entity_type:?}"); + info!(self.logger, "CertifierService::create_open_message: created open message for {signed_entity_type:?}"); debug!( + self.logger, "CertifierService::create_open_message: created open message ID='{}'", open_message.open_message_id ); @@ -181,7 +185,10 @@ impl CertifierService for MithrilCertifierService { &self, signed_entity_type: &SignedEntityType, ) -> StdResult> { - debug!("CertifierService::get_open_message(signed_entity_type: {signed_entity_type:?})"); + debug!( + self.logger, + "CertifierService::get_open_message(signed_entity_type: {signed_entity_type:?})" + ); let open_message = self .open_message_repository @@ -197,7 +204,10 @@ impl CertifierService for MithrilCertifierService { &self, signed_entity_type: &SignedEntityType, ) -> StdResult> { - debug!("CertifierService::mark_open_message_if_expired"); + debug!( + self.logger, + "CertifierService::mark_open_message_if_expired" + ); let mut open_message_record = self .open_message_repository @@ -219,24 +229,27 @@ impl CertifierService for MithrilCertifierService { &self, signed_entity_type: &SignedEntityType, ) -> StdResult> { - debug!("CertifierService::create_certificate(signed_entity_type: {signed_entity_type:?})"); + debug!( + self.logger, + "CertifierService::create_certificate(signed_entity_type: {signed_entity_type:?})" + ); let open_message_record = self .get_open_message_record(signed_entity_type) .await? .ok_or_else(|| { - warn!("CertifierService::create_certificate: OpenMessage not found for type {signed_entity_type:?}."); + warn!(self.logger, "CertifierService::create_certificate: OpenMessage not found for type {signed_entity_type:?}."); CertifierServiceError::NotFound(signed_entity_type.clone()) })?; let open_message: OpenMessage = open_message_record.clone().into(); if open_message.is_certified { - warn!("CertifierService::create_certificate: open message {signed_entity_type:?} is already certified, cannot create certificate."); + warn!(self.logger, "CertifierService::create_certificate: open message {signed_entity_type:?} is already certified, cannot create certificate."); return Err(CertifierServiceError::AlreadyCertified(signed_entity_type.clone()).into()); } if open_message.is_expired { - warn!("CertifierService::create_certificate: open message {signed_entity_type:?} is expired, cannot create certificate."); + warn!(self.logger, "CertifierService::create_certificate: open message {signed_entity_type:?} is expired, cannot create certificate."); return Err(CertifierServiceError::Expired(signed_entity_type.clone()).into()); } @@ -247,11 +260,11 @@ impl CertifierService for MithrilCertifierService { .await? { None => { - debug!("CertifierService::create_certificate: No multi-signature could be created for open message {signed_entity_type:?}"); + debug!(self.logger, "CertifierService::create_certificate: No multi-signature could be created for open message {signed_entity_type:?}"); return Ok(None); } Some(signature) => { - info!("CertifierService::create_certificate: multi-signature created for open message {signed_entity_type:?}"); + info!(self.logger, "CertifierService::create_certificate: multi-signature created for open message {signed_entity_type:?}"); signature } }; diff --git a/mithril-aggregator/src/services/epoch_service.rs b/mithril-aggregator/src/services/epoch_service.rs index 8f9ca7f949c..c6de2fec6d5 100644 --- a/mithril-aggregator/src/services/epoch_service.rs +++ b/mithril-aggregator/src/services/epoch_service.rs @@ -1,7 +1,6 @@ use anyhow::Context; use async_trait::async_trait; -use slog::Logger; -use slog_scope::debug; +use slog::{debug, Logger}; use std::collections::BTreeSet; use std::sync::Arc; use thiserror::Error; @@ -189,8 +188,7 @@ impl MithrilEpochService { let recording_epoch = actual_epoch.offset_to_epoch_settings_recording_epoch(); debug!( - "EpochService: inserting epoch settings in epoch {}", - recording_epoch; + self.logger, "EpochService: inserting epoch settings in epoch {recording_epoch}"; "epoch_settings" => ?self.future_epoch_settings ); @@ -222,7 +220,7 @@ impl MithrilEpochService { #[async_trait] impl EpochService for MithrilEpochService { async fn inform_epoch(&mut self, epoch: Epoch) -> StdResult<()> { - debug!("EpochService::inform_epoch(epoch: {epoch:?})"); + debug!(self.logger, "EpochService::inform_epoch(epoch: {epoch:?})"); let signer_retrieval_epoch = epoch.offset_to_signer_retrieval_epoch().with_context(|| { @@ -279,7 +277,7 @@ impl EpochService for MithrilEpochService { } async fn update_epoch_settings(&mut self) -> StdResult<()> { - debug!("EpochService::update_epoch_settings"); + debug!(self.logger, "EpochService::update_epoch_settings"); let data = self.unwrap_data().with_context(|| { "can't update epoch settings if inform_epoch has not been called first" @@ -289,7 +287,7 @@ impl EpochService for MithrilEpochService { } async fn precompute_epoch_data(&mut self) -> StdResult<()> { - debug!("EpochService::precompute_epoch_data"); + debug!(self.logger, "EpochService::precompute_epoch_data"); let data = self.unwrap_data().with_context(|| { "can't precompute epoch data if inform_epoch has not been called first" diff --git a/mithril-aggregator/src/services/signed_entity.rs b/mithril-aggregator/src/services/signed_entity.rs index 481497ea3ed..caf5296289e 100644 --- a/mithril-aggregator/src/services/signed_entity.rs +++ b/mithril-aggregator/src/services/signed_entity.rs @@ -5,8 +5,7 @@ use anyhow::{anyhow, Context}; use async_trait::async_trait; use chrono::Utc; -use slog::Logger; -use slog_scope::info; +use slog::{info, Logger}; use std::sync::Arc; use tokio::task::JoinHandle; @@ -128,7 +127,7 @@ impl MithrilSignedEntityService { certificate: &Certificate, ) -> StdResult<()> { info!( - "MithrilSignedEntityService::create_artifact"; + self.logger, "MithrilSignedEntityService::create_artifact"; "signed_entity_type" => ?signed_entity_type, "certificate_hash" => &certificate.hash ); diff --git a/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs b/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs index 31f52ccf72d..c2f898de0c9 100644 --- a/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs +++ b/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs @@ -1,7 +1,6 @@ use anyhow::Context; use async_trait::async_trait; -use slog::Logger; -use slog_scope::debug; +use slog::{debug, Logger}; use std::path::{Path, PathBuf}; use mithril_common::logging::LoggerExtensions; @@ -25,11 +24,12 @@ pub struct LocalSnapshotUploader { impl LocalSnapshotUploader { /// LocalSnapshotUploader factory pub(crate) fn new(snapshot_server_url: String, target_location: &Path, logger: Logger) -> Self { - debug!("New LocalSnapshotUploader created"; "snapshot_server_url" => &snapshot_server_url); + let logger = logger.new_with_component_name::(); + debug!(logger, "New LocalSnapshotUploader created"; "snapshot_server_url" => &snapshot_server_url); Self { snapshot_server_url, target_location: target_location.to_path_buf(), - logger: logger.new_with_component_name::(), + logger, } } } diff --git a/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs b/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs index 036a2996480..e99fb97a076 100644 --- a/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs +++ b/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs @@ -1,6 +1,5 @@ use async_trait::async_trait; -use slog::Logger; -use slog_scope::debug; +use slog::{debug, Logger}; use std::path::Path; use mithril_common::logging::LoggerExtensions; @@ -25,12 +24,13 @@ impl RemoteSnapshotUploader { use_cdn_domain: bool, logger: Logger, ) -> Self { - debug!("New GCPSnapshotUploader created"); + let logger = logger.new_with_component_name::(); + debug!(logger, "New GCPSnapshotUploader created"); Self { bucket, file_uploader, use_cdn_domain, - logger: logger.new_with_component_name::(), + logger, } } } diff --git a/mithril-aggregator/src/snapshotter.rs b/mithril-aggregator/src/snapshotter.rs index d9f288dbbad..9aaeb1da3f3 100644 --- a/mithril-aggregator/src/snapshotter.rs +++ b/mithril-aggregator/src/snapshotter.rs @@ -1,8 +1,7 @@ use anyhow::{anyhow, Context}; use flate2::Compression; use flate2::{read::GzDecoder, write::GzEncoder}; -use slog::Logger; -use slog_scope::{info, warn}; +use slog::{info, warn, Logger}; use std::fs::{self, File}; use std::io::{self, Read, Seek, SeekFrom}; use std::path::{Path, PathBuf}; @@ -102,7 +101,7 @@ impl Snapshotter for CompressedArchiveSnapshotter { let filesize = self.create_and_verify_archive(&archive_path).inspect_err(|_err| { if archive_path.exists() { if let Err(remove_error) = fs::remove_file(&archive_path) { - warn!( + warn!(self.logger, " > Post snapshotter.snapshot failure, could not remove temporary archive at path: path:{}, err: {}", archive_path.display(), remove_error @@ -162,6 +161,7 @@ impl CompressedArchiveSnapshotter { fn create_archive(&self, archive_path: &Path) -> StdResult { info!( + self.logger, "compressing {} into {}", self.db_directory.display(), archive_path.display() @@ -247,7 +247,7 @@ impl CompressedArchiveSnapshotter { // Verify if an archive is corrupted (i.e. at least one entry is invalid) fn verify_archive(&self, archive_path: &Path) -> StdResult<()> { - info!("verifying archive: {}", archive_path.display()); + info!(self.logger, "verifying archive: {}", archive_path.display()); let mut snapshot_file_tar = File::open(archive_path) .map_err(|e| SnapshotError::InvalidArchiveError(e.to_string()))?; diff --git a/mithril-aggregator/src/store/epoch_settings_storer.rs b/mithril-aggregator/src/store/epoch_settings_storer.rs index d90c4a67374..504c055ddb6 100644 --- a/mithril-aggregator/src/store/epoch_settings_storer.rs +++ b/mithril-aggregator/src/store/epoch_settings_storer.rs @@ -2,7 +2,6 @@ use std::collections::HashMap; use async_trait::async_trait; use mithril_common::StdResult; -use slog_scope::debug; use tokio::sync::RwLock; use mithril_common::entities::{Epoch, ProtocolParameters}; @@ -37,7 +36,6 @@ pub trait EpochSettingsStorer: Sync + Send { for epoch_offset in 0..=2 { let epoch = current_epoch + epoch_offset; if self.get_epoch_settings(epoch).await?.is_none() { - debug!("Handle discrepancies at startup of epoch settings store, will record epoch settings from the configuration for epoch {epoch}: {epoch_settings_configuration:?}"); self.save_epoch_settings(epoch, epoch_settings_configuration.clone()) .await?; } diff --git a/mithril-aggregator/src/tools/certificates_hash_migrator.rs b/mithril-aggregator/src/tools/certificates_hash_migrator.rs index a370df737a0..b9a1e18adee 100644 --- a/mithril-aggregator/src/tools/certificates_hash_migrator.rs +++ b/mithril-aggregator/src/tools/certificates_hash_migrator.rs @@ -1,8 +1,7 @@ use std::{collections::HashMap, sync::Arc}; use anyhow::{anyhow, Context}; -use slog::Logger; -use slog_scope::{debug, info, trace}; +use slog::{debug, info, trace, Logger}; use mithril_common::logging::LoggerExtensions; use mithril_common::{entities::Certificate, StdResult}; @@ -32,7 +31,7 @@ impl CertificatesHashMigrator { /// Recompute all the certificates hashes the database. pub async fn migrate(&self) -> StdResult<()> { - info!("🔧 Certificate Hash Migrator: starting"); + info!(self.logger, "🔧 Certificate Hash Migrator: starting"); let (old_certificates, old_and_new_hashes) = self.create_certificates_with_updated_hash().await?; @@ -41,7 +40,10 @@ impl CertificatesHashMigrator { self.cleanup(old_certificates).await?; - info!("🔧 Certificate Hash Migrator: all certificates have been migrated successfully"); + info!( + self.logger, + "🔧 Certificate Hash Migrator: all certificates have been migrated successfully" + ); Ok(()) } @@ -50,7 +52,10 @@ impl CertificatesHashMigrator { async fn create_certificates_with_updated_hash( &self, ) -> StdResult<(Vec, HashMap)> { - info!("🔧 Certificate Hash Migrator: recomputing all certificates hash"); + info!( + self.logger, + "🔧 Certificate Hash Migrator: recomputing all certificates hash" + ); let old_certificates = self .certificate_repository // arbitrary high value to get all existing certificates @@ -65,7 +70,10 @@ impl CertificatesHashMigrator { // Note: get_latest_certificates retrieve certificates from the earliest to the older, // in order to have a strong guarantee that when inserting a certificate in the db its // previous_hash exist we have to work in the reverse order. - debug!("🔧 Certificate Hash Migrator: computing new hash for all certificates"); + debug!( + self.logger, + "🔧 Certificate Hash Migrator: computing new hash for all certificates" + ); for mut certificate in old_certificates.into_iter().rev() { let old_previous_hash = if certificate.is_genesis() { certificate.previous_hash.clone() @@ -90,14 +98,14 @@ impl CertificatesHashMigrator { if certificate.is_genesis() { trace!( - "🔧 Certificate Hash Migrator: new hash computed for genesis certificate {:?}", + self.logger, "🔧 Certificate Hash Migrator: new hash computed for genesis certificate {:?}", certificate.signed_entity_type(); "old_hash" => &certificate.hash, "new_hash" => &new_hash, ); } else { trace!( - "🔧 Certificate Hash Migrator: new hash computed for certificate {:?}", + self.logger, "🔧 Certificate Hash Migrator: new hash computed for certificate {:?}", certificate.signed_entity_type(); "old_hash" => &certificate.hash, "new_hash" => &new_hash, @@ -116,7 +124,10 @@ impl CertificatesHashMigrator { // 2 - Certificates migrated, we can insert them in the db // (we do this by chunks in order to avoid reaching the limit of 32766 variables in a single query) - debug!("🔧 Certificate Hash Migrator: inserting migrated certificates in the database"); + debug!( + self.logger, + "🔧 Certificate Hash Migrator: inserting migrated certificates in the database" + ); let migrated_certificates_chunk_size = 250; for migrated_certificates_chunk in migrated_certificates.chunks(migrated_certificates_chunk_size) @@ -136,7 +147,10 @@ impl CertificatesHashMigrator { &self, old_and_new_certificate_hashes: HashMap, ) -> StdResult<()> { - info!("🔧 Certificate Hash Migrator: updating signed entities certificate ids"); + info!( + self.logger, + "🔧 Certificate Hash Migrator: updating signed entities certificate ids" + ); let old_hashes: Vec<&str> = old_and_new_certificate_hashes .keys() .map(|k| k.as_str()) @@ -151,7 +165,7 @@ impl CertificatesHashMigrator { ) )?; - debug!("🔧 Certificate Hash Migrator: updating signed entities certificate_ids to new computed hash"); + debug!(self.logger,"🔧 Certificate Hash Migrator: updating signed entities certificate_ids to new computed hash"); for signed_entity_record in records_to_migrate.iter_mut() { let new_certificate_hash = old_and_new_certificate_hashes @@ -163,7 +177,7 @@ impl CertificatesHashMigrator { .to_owned(); trace!( - "🔧 Certificate Hash Migrator: migrating signed entity {} certificate hash computed for certificate", + self.logger, "🔧 Certificate Hash Migrator: migrating signed entity {} certificate hash computed for certificate", signed_entity_record.signed_entity_id; "old_certificate_hash" => &signed_entity_record.certificate_id, "new_certificate_hash" => &new_certificate_hash @@ -171,7 +185,10 @@ impl CertificatesHashMigrator { signed_entity_record.certificate_id = new_certificate_hash; } - debug!("🔧 Certificate Hash Migrator: updating migrated signed entities in the database"); + debug!( + self.logger, + "🔧 Certificate Hash Migrator: updating migrated signed entities in the database" + ); self.signed_entity_storer .update_signed_entities(records_to_migrate) .await @@ -181,7 +198,10 @@ impl CertificatesHashMigrator { } async fn cleanup(&self, old_certificates: Vec) -> StdResult<()> { - info!("🔧 Certificate Hash Migrator: deleting old certificates in the database"); + info!( + self.logger, + "🔧 Certificate Hash Migrator: deleting old certificates in the database" + ); self.certificate_repository .delete_certificates(&old_certificates.iter().collect::>()) .await diff --git a/mithril-aggregator/src/tools/genesis.rs b/mithril-aggregator/src/tools/genesis.rs index de28c9a8f59..0ea2ae0370e 100644 --- a/mithril-aggregator/src/tools/genesis.rs +++ b/mithril-aggregator/src/tools/genesis.rs @@ -206,7 +206,6 @@ impl GenesisTools { #[cfg(test)] mod tests { - use crate::database::test_helper::main_db_connection; use mithril_common::{ certificate_chain::MithrilCertificateVerifier, crypto_helper::ProtocolGenesisSigner, @@ -214,6 +213,9 @@ mod tests { }; use std::path::PathBuf; + use crate::database::test_helper::main_db_connection; + use crate::test_tools::TestLogger; + use super::*; fn get_temp_dir(dir_name: &str) -> PathBuf { @@ -237,7 +239,7 @@ mod tests { let connection = main_db_connection().unwrap(); let certificate_store = Arc::new(CertificateRepository::new(Arc::new(connection))); let certificate_verifier = Arc::new(MithrilCertificateVerifier::new( - slog_scope::logger(), + TestLogger::stdout(), certificate_store.clone(), )); let genesis_avk = create_fake_genesis_avk(); diff --git a/mithril-aggregator/src/tools/remote_file_uploader.rs b/mithril-aggregator/src/tools/remote_file_uploader.rs index 7b305f60648..9c386fb22a0 100644 --- a/mithril-aggregator/src/tools/remote_file_uploader.rs +++ b/mithril-aggregator/src/tools/remote_file_uploader.rs @@ -4,8 +4,7 @@ use cloud_storage::{ bucket::Entity, bucket_access_control::Role, object_access_control::NewObjectAccessControl, Client, }; -use slog::Logger; -use slog_scope::info; +use slog::{info, Logger}; use std::{env, path::Path}; use tokio_util::{codec::BytesCodec, codec::FramedRead}; @@ -47,7 +46,7 @@ impl RemoteFileUploader for GcpFileUploader { let filename = filepath.file_name().unwrap().to_str().unwrap(); - info!("uploading {}", filename); + info!(self.logger, "uploading {filename}"); let client = Client::default(); let file = tokio::fs::File::open(filepath).await.unwrap(); let stream = FramedRead::new(file, BytesCodec::new()); @@ -63,7 +62,7 @@ impl RemoteFileUploader for GcpFileUploader { .await .with_context(|| "remote uploading failure")?; - info!("uploaded {}", filename); + info!(self.logger, "uploaded {filename}"); // ensure the uploaded file as public read access // when a file is uploaded to Google cloud storage its permissions are overwritten so @@ -74,8 +73,8 @@ impl RemoteFileUploader for GcpFileUploader { }; info!( - "updating acl for {}: {:?}", - filename, new_bucket_access_control + self.logger, + "updating acl for {filename}: {new_bucket_access_control:?}" ); client @@ -84,7 +83,7 @@ impl RemoteFileUploader for GcpFileUploader { .await .with_context(|| "updating acl failure")?; - info!("updated acl for {} ", filename); + info!(self.logger, "updated acl for {filename}"); Ok(()) } diff --git a/mithril-aggregator/src/tools/signer_importer.rs b/mithril-aggregator/src/tools/signer_importer.rs index 34ed8489eb9..c5d97ec7d53 100644 --- a/mithril-aggregator/src/tools/signer_importer.rs +++ b/mithril-aggregator/src/tools/signer_importer.rs @@ -2,8 +2,7 @@ use anyhow::Context; use async_trait::async_trait; use reqwest::{IntoUrl, Url}; use serde::{Deserialize, Serialize}; -use slog::Logger; -use slog_scope::{info, warn}; +use slog::{info, warn, Logger}; use std::collections::HashMap; use std::ops::Not; use std::sync::Arc; @@ -39,12 +38,17 @@ impl SignersImporter { /// Import and persist the signers pub async fn run(&self) -> StdResult<()> { - info!("🔧 Signer Importer: starting"); + info!(self.logger, "🔧 Signer Importer: starting"); let items = self .retriever .retrieve() .await .with_context(|| "Failed to retrieve signers from remote service")?; + + info!(self.logger, + "🔧 Signer Importer: persisting retrieved data in the database"; + "number_of_signer_to_insert" => items.len() + ); self.persister .persist(items) .await @@ -58,9 +62,13 @@ impl SignersImporter { loop { interval.tick().await; if let Err(error) = self.run().await { - warn!("Signer retriever failed: Error: «{:?}».", error); + warn!( + self.logger, + "Signer retriever failed: Error: «{:?}».", error + ); } info!( + self.logger, "🔧 Signer Importer: Cycle finished, Sleeping for {} min", run_interval.as_secs() / 60 ); @@ -87,10 +95,6 @@ pub trait SignersImporterPersister: Sync + Send { #[async_trait] impl SignersImporterPersister for SignerStore { async fn persist(&self, signers: HashMap>) -> StdResult<()> { - info!( - "🔧 Signer Importer: persisting retrieved data in the database"; - "number_of_signer_to_insert" => signers.len() - ); self.import_many_signers(signers).await?; Ok(()) @@ -135,7 +139,7 @@ impl CExplorerSignerRetriever { impl SignersImporterRetriever for CExplorerSignerRetriever { async fn retrieve(&self) -> StdResult>> { info!( - "🔧 Signer Importer: retrieving data from source"; + self.logger, "🔧 Signer Importer: retrieving data from source"; "source_url" => &self.source_url.as_str() ); let response = self From 358126139f8135f712bd9f839f942736c49fe493 Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 18:48:06 +0200 Subject: [PATCH 06/12] refactor(aggregator): create & dispatch a root logger in commands --- .../src/commands/era_command.rs | 38 ++++++++---- .../src/commands/genesis_command.rs | 58 ++++++++++++++----- mithril-aggregator/src/commands/mod.rs | 23 ++++---- .../src/commands/serve_command.rs | 18 ++++-- .../src/commands/tools_command.rs | 30 +++++++--- mithril-aggregator/src/main.rs | 4 +- 6 files changed, 119 insertions(+), 52 deletions(-) diff --git a/mithril-aggregator/src/commands/era_command.rs b/mithril-aggregator/src/commands/era_command.rs index fd9b2bae468..dfede5f9ad5 100644 --- a/mithril-aggregator/src/commands/era_command.rs +++ b/mithril-aggregator/src/commands/era_command.rs @@ -8,7 +8,7 @@ use mithril_common::{ entities::{Epoch, HexEncodedEraMarkersSecretKey}, StdResult, }; -use slog_scope::debug; +use slog::{debug, Logger}; use crate::tools::EraTools; @@ -21,8 +21,14 @@ pub struct EraCommand { } impl EraCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { - self.era_subcommand.execute(config_builder).await + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { + self.era_subcommand + .execute(root_logger, config_builder) + .await } } @@ -37,10 +43,14 @@ pub enum EraSubCommand { } impl EraSubCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { match self { - Self::List(cmd) => cmd.execute(config_builder).await, - Self::GenerateTxDatum(cmd) => cmd.execute(config_builder).await, + Self::List(cmd) => cmd.execute(root_logger, config_builder).await, + Self::GenerateTxDatum(cmd) => cmd.execute(root_logger, config_builder).await, } } } @@ -54,8 +64,12 @@ pub struct ListEraSubCommand { } impl ListEraSubCommand { - pub async fn execute(&self, _config_builder: ConfigBuilder) -> StdResult<()> { - debug!("LIST ERA command"); + pub async fn execute( + &self, + root_logger: Logger, + _config_builder: ConfigBuilder, + ) -> StdResult<()> { + debug!(root_logger, "LIST ERA command"); let era_tools = EraTools::new(); let eras = era_tools.get_supported_eras_list()?; @@ -91,8 +105,12 @@ pub struct GenerateTxDatumEraSubCommand { } impl GenerateTxDatumEraSubCommand { - pub async fn execute(&self, _config_builder: ConfigBuilder) -> StdResult<()> { - debug!("GENERATETXDATUM ERA command"); + pub async fn execute( + &self, + root_logger: Logger, + _config_builder: ConfigBuilder, + ) -> StdResult<()> { + debug!(root_logger, "GENERATETXDATUM ERA command"); let era_tools = EraTools::new(); let era_markers_secret_key = diff --git a/mithril-aggregator/src/commands/genesis_command.rs b/mithril-aggregator/src/commands/genesis_command.rs index c5b2a182793..d25fbef7a34 100644 --- a/mithril-aggregator/src/commands/genesis_command.rs +++ b/mithril-aggregator/src/commands/genesis_command.rs @@ -6,7 +6,7 @@ use mithril_common::{ entities::HexEncodedGenesisSecretKey, StdResult, }; -use slog_scope::debug; +use slog::{debug, Logger}; use std::path::PathBuf; use crate::{dependency_injection::DependenciesBuilder, tools::GenesisTools, Configuration}; @@ -20,8 +20,14 @@ pub struct GenesisCommand { } impl GenesisCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { - self.genesis_subcommand.execute(config_builder).await + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { + self.genesis_subcommand + .execute(root_logger, config_builder) + .await } } @@ -42,12 +48,16 @@ pub enum GenesisSubCommand { } impl GenesisSubCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { match self { - Self::Bootstrap(cmd) => cmd.execute(config_builder).await, - Self::Export(cmd) => cmd.execute(config_builder).await, - Self::Import(cmd) => cmd.execute(config_builder).await, - Self::Sign(cmd) => cmd.execute(config_builder).await, + Self::Bootstrap(cmd) => cmd.execute(root_logger, config_builder).await, + Self::Export(cmd) => cmd.execute(root_logger, config_builder).await, + Self::Import(cmd) => cmd.execute(root_logger, config_builder).await, + Self::Sign(cmd) => cmd.execute(root_logger, config_builder).await, } } } @@ -61,13 +71,17 @@ pub struct ExportGenesisSubCommand { } impl ExportGenesisSubCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { let config: Configuration = config_builder .build() .with_context(|| "configuration build error")? .try_deserialize() .with_context(|| "configuration deserialize error")?; - debug!("EXPORT GENESIS command"; "config" => format!("{config:?}")); + debug!(root_logger, "EXPORT GENESIS command"; "config" => format!("{config:?}")); println!( "Genesis export payload to sign to {}", self.target_path.display() @@ -98,13 +112,17 @@ pub struct ImportGenesisSubCommand { } impl ImportGenesisSubCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { let config: Configuration = config_builder .build() .with_context(|| "configuration build error")? .try_deserialize() .with_context(|| "configuration deserialize error")?; - debug!("IMPORT GENESIS command"; "config" => format!("{config:?}")); + debug!(root_logger, "IMPORT GENESIS command"; "config" => format!("{config:?}")); println!( "Genesis import signed payload from {}", self.signed_payload_path.to_string_lossy() @@ -144,8 +162,12 @@ pub struct SignGenesisSubCommand { } impl SignGenesisSubCommand { - pub async fn execute(&self, _config_builder: ConfigBuilder) -> StdResult<()> { - debug!("SIGN GENESIS command"); + pub async fn execute( + &self, + root_logger: Logger, + _config_builder: ConfigBuilder, + ) -> StdResult<()> { + debug!(root_logger, "SIGN GENESIS command"); println!( "Genesis sign payload from {} to {}", self.to_sign_payload_path.to_string_lossy(), @@ -171,13 +193,17 @@ pub struct BootstrapGenesisSubCommand { } impl BootstrapGenesisSubCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { let config: Configuration = config_builder .build() .with_context(|| "configuration build error")? .try_deserialize() .with_context(|| "configuration deserialize error")?; - debug!("BOOTSTRAP GENESIS command"; "config" => format!("{config:?}")); + debug!(root_logger, "BOOTSTRAP GENESIS command"; "config" => format!("{config:?}")); println!("Genesis bootstrap for test only!"); let mut dependencies_builder = DependenciesBuilder::new(config.clone()); let dependencies = dependencies_builder diff --git a/mithril-aggregator/src/commands/mod.rs b/mithril-aggregator/src/commands/mod.rs index b993cc4e804..0cdac03d933 100644 --- a/mithril-aggregator/src/commands/mod.rs +++ b/mithril-aggregator/src/commands/mod.rs @@ -8,8 +8,7 @@ use clap::{CommandFactory, Parser, Subcommand}; use config::{builder::DefaultState, ConfigBuilder, Map, Source, Value, ValueKind}; use mithril_common::StdResult; use mithril_doc::{Documenter, DocumenterDefault, StructDoc}; -use slog::Level; -use slog_scope::debug; +use slog::{debug, Level, Logger}; use std::path::PathBuf; use crate::{Configuration, DefaultConfiguration}; @@ -35,12 +34,16 @@ pub enum CommandType { } impl MainCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { match self { - Self::Genesis(cmd) => cmd.execute(config_builder).await, - Self::Era(cmd) => cmd.execute(config_builder).await, - Self::Serve(cmd) => cmd.execute(config_builder).await, - Self::Tools(cmd) => cmd.execute(config_builder).await, + Self::Genesis(cmd) => cmd.execute(root_logger, config_builder).await, + Self::Era(cmd) => cmd.execute(root_logger, config_builder).await, + Self::Serve(cmd) => cmd.execute(root_logger, config_builder).await, + Self::Tools(cmd) => cmd.execute(root_logger, config_builder).await, Self::GenerateDoc(cmd) => { let config_infos = vec![Configuration::extract(), DefaultConfiguration::extract()]; cmd.execute_with_configurations(&mut MainOpts::command(), &config_infos) @@ -111,7 +114,7 @@ impl Source for MainOpts { impl MainOpts { /// execute command - pub async fn execute(&self) -> StdResult<()> { + pub async fn execute(&self, root_logger: Logger) -> StdResult<()> { let config_file_path = self .config_directory .join(format!("{}.json", self.run_mode)); @@ -122,9 +125,9 @@ impl MainOpts { ) .add_source(config::Environment::default().separator("__")) .add_source(self.clone()); - debug!("Started"; "run_mode" => &self.run_mode, "node_version" => env!("CARGO_PKG_VERSION")); + debug!(root_logger, "Started"; "run_mode" => &self.run_mode, "node_version" => env!("CARGO_PKG_VERSION")); - self.command.execute(config_builder).await + self.command.execute(root_logger, config_builder).await } /// get log level from parameters diff --git a/mithril-aggregator/src/commands/serve_command.rs b/mithril-aggregator/src/commands/serve_command.rs index fcb8aad12bd..c17efc175fc 100644 --- a/mithril-aggregator/src/commands/serve_command.rs +++ b/mithril-aggregator/src/commands/serve_command.rs @@ -2,7 +2,7 @@ use anyhow::Context; use clap::Parser; use config::{builder::DefaultState, ConfigBuilder, Map, Source, Value, ValueKind}; use mithril_common::StdResult; -use slog_scope::{crit, debug, info, warn}; +use slog::{crit, debug, info, warn, Logger}; use std::time::Duration; use std::{net::IpAddr, path::PathBuf}; use tokio::{sync::oneshot, task::JoinSet}; @@ -80,14 +80,18 @@ impl Source for ServeCommand { } impl ServeCommand { - pub async fn execute(&self, mut config_builder: ConfigBuilder) -> StdResult<()> { + pub async fn execute( + &self, + root_logger: Logger, + mut config_builder: ConfigBuilder, + ) -> StdResult<()> { config_builder = config_builder.add_source(self.clone()); let config: Configuration = config_builder .build() .with_context(|| "configuration build error")? .try_deserialize() .with_context(|| "configuration deserialize error")?; - debug!("SERVE command"; "config" => format!("{config:?}")); + debug!(root_logger, "SERVE command"; "config" => format!("{config:?}")); let mut dependencies_builder = DependenciesBuilder::new(config.clone()); // start servers @@ -172,8 +176,10 @@ impl ServeCommand { } Err(error) => { warn!( + root_logger, "Failed to build the `SignersImporter`:\n url to import `{}`\n Error: {:?}", - cexplorer_pools_url, error + cexplorer_pools_url, + error ); } } @@ -183,7 +189,7 @@ impl ServeCommand { dependencies_builder.vanish().await; if let Err(e) = join_set.join_next().await.unwrap()? { - crit!("A critical error occurred: {e}"); + crit!(root_logger, "A critical error occurred: {e}"); } // stop servers @@ -194,7 +200,7 @@ impl ServeCommand { preload_task.abort(); } - info!("Event store is finishing..."); + info!(root_logger, "Event store is finishing..."); event_store_thread.await.unwrap(); println!("Services stopped, exiting."); diff --git a/mithril-aggregator/src/commands/tools_command.rs b/mithril-aggregator/src/commands/tools_command.rs index 5f0faba0ece..959191917ae 100644 --- a/mithril-aggregator/src/commands/tools_command.rs +++ b/mithril-aggregator/src/commands/tools_command.rs @@ -3,7 +3,7 @@ use clap::{Parser, Subcommand}; use config::{builder::DefaultState, ConfigBuilder}; use mithril_common::StdResult; use mithril_persistence::sqlite::{SqliteCleaner, SqliteCleaningTask}; -use slog_scope::debug; +use slog::{debug, Logger}; use std::sync::Arc; use crate::{ @@ -22,8 +22,14 @@ pub struct ToolsCommand { } impl ToolsCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { - self.genesis_subcommand.execute(config_builder).await + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { + self.genesis_subcommand + .execute(root_logger, config_builder) + .await } } @@ -39,9 +45,13 @@ pub enum ToolsSubCommand { } impl ToolsSubCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { match self { - Self::RecomputeCertificatesHash(cmd) => cmd.execute(config_builder).await, + Self::RecomputeCertificatesHash(cmd) => cmd.execute(root_logger, config_builder).await, } } } @@ -51,13 +61,17 @@ impl ToolsSubCommand { pub struct RecomputeCertificatesHashCommand {} impl RecomputeCertificatesHashCommand { - pub async fn execute(&self, config_builder: ConfigBuilder) -> StdResult<()> { + pub async fn execute( + &self, + root_logger: Logger, + config_builder: ConfigBuilder, + ) -> StdResult<()> { let config: Configuration = config_builder .build() .with_context(|| "configuration build error")? .try_deserialize() .with_context(|| "configuration deserialize error")?; - debug!("RECOMPUTE CERTIFICATES HASH command"; "config" => format!("{config:?}")); + debug!(root_logger, "RECOMPUTE CERTIFICATES HASH command"; "config" => format!("{config:?}")); println!("Recomputing all certificate hash",); let mut dependencies_builder = DependenciesBuilder::new(config.clone()); let connection = dependencies_builder @@ -67,7 +81,7 @@ impl RecomputeCertificatesHashCommand { let migrator = CertificatesHashMigrator::new( CertificateRepository::new(connection.clone()), Arc::new(SignedEntityStore::new(connection.clone())), - dependencies_builder.get_logger()?, + root_logger, ); migrator diff --git a/mithril-aggregator/src/main.rs b/mithril-aggregator/src/main.rs index cfb18b86bc1..3d9c1e8b371 100644 --- a/mithril-aggregator/src/main.rs +++ b/mithril-aggregator/src/main.rs @@ -28,10 +28,10 @@ pub fn build_logger(args: &MainOpts) -> Logger { async fn main() -> StdResult<()> { // Load args let args = MainOpts::parse(); - let _guard = slog_scope::set_global_logger(build_logger(&args)); + let root_logger = build_logger(&args); #[cfg(feature = "bundle_openssl")] openssl_probe::init_ssl_cert_env_vars(); - args.execute().await + args.execute(root_logger).await } From 1111de0d482b749b8f9ad6f0e1254ed076f7f3fd Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 18:52:02 +0200 Subject: [PATCH 07/12] refactor(aggregator): dispatch root logger when building dependencies --- .../src/commands/genesis_command.rs | 9 ++- .../src/commands/serve_command.rs | 3 +- .../src/commands/tools_command.rs | 3 +- .../repository/certificate_repository.rs | 30 +++---- .../src/dependency_injection/builder.rs | 80 ++++++++++--------- .../src/dependency_injection/containers.rs | 2 +- .../src/http_server/routes/proof_routes.rs | 10 +-- .../src/http_server/routes/root_routes.rs | 4 +- .../http_server/routes/statistics_routes.rs | 2 +- .../services/certifier/certifier_service.rs | 2 +- mithril-aggregator/src/services/message.rs | 34 ++++---- .../src/services/stake_distribution.rs | 3 +- .../tests/test_extensions/runtime_tester.rs | 11 +-- 13 files changed, 103 insertions(+), 90 deletions(-) diff --git a/mithril-aggregator/src/commands/genesis_command.rs b/mithril-aggregator/src/commands/genesis_command.rs index d25fbef7a34..381814e9d7a 100644 --- a/mithril-aggregator/src/commands/genesis_command.rs +++ b/mithril-aggregator/src/commands/genesis_command.rs @@ -86,7 +86,8 @@ impl ExportGenesisSubCommand { "Genesis export payload to sign to {}", self.target_path.display() ); - let mut dependencies_builder = DependenciesBuilder::new(config.clone()); + let mut dependencies_builder = + DependenciesBuilder::new(root_logger.clone(), config.clone()); let dependencies = dependencies_builder .create_genesis_container() .await @@ -127,7 +128,8 @@ impl ImportGenesisSubCommand { "Genesis import signed payload from {}", self.signed_payload_path.to_string_lossy() ); - let mut dependencies_builder = DependenciesBuilder::new(config.clone()); + let mut dependencies_builder = + DependenciesBuilder::new(root_logger.clone(), config.clone()); let dependencies = dependencies_builder .create_genesis_container() .await @@ -205,7 +207,8 @@ impl BootstrapGenesisSubCommand { .with_context(|| "configuration deserialize error")?; debug!(root_logger, "BOOTSTRAP GENESIS command"; "config" => format!("{config:?}")); println!("Genesis bootstrap for test only!"); - let mut dependencies_builder = DependenciesBuilder::new(config.clone()); + let mut dependencies_builder = + DependenciesBuilder::new(root_logger.clone(), config.clone()); let dependencies = dependencies_builder .create_genesis_container() .await diff --git a/mithril-aggregator/src/commands/serve_command.rs b/mithril-aggregator/src/commands/serve_command.rs index c17efc175fc..cccd6e48114 100644 --- a/mithril-aggregator/src/commands/serve_command.rs +++ b/mithril-aggregator/src/commands/serve_command.rs @@ -92,7 +92,8 @@ impl ServeCommand { .try_deserialize() .with_context(|| "configuration deserialize error")?; debug!(root_logger, "SERVE command"; "config" => format!("{config:?}")); - let mut dependencies_builder = DependenciesBuilder::new(config.clone()); + let mut dependencies_builder = + DependenciesBuilder::new(root_logger.clone(), config.clone()); // start servers println!("Starting server..."); diff --git a/mithril-aggregator/src/commands/tools_command.rs b/mithril-aggregator/src/commands/tools_command.rs index 959191917ae..e36ced14030 100644 --- a/mithril-aggregator/src/commands/tools_command.rs +++ b/mithril-aggregator/src/commands/tools_command.rs @@ -73,7 +73,8 @@ impl RecomputeCertificatesHashCommand { .with_context(|| "configuration deserialize error")?; debug!(root_logger, "RECOMPUTE CERTIFICATES HASH command"; "config" => format!("{config:?}")); println!("Recomputing all certificate hash",); - let mut dependencies_builder = DependenciesBuilder::new(config.clone()); + let mut dependencies_builder = + DependenciesBuilder::new(root_logger.clone(), config.clone()); let connection = dependencies_builder .get_sqlite_connection() .await diff --git a/mithril-aggregator/src/database/repository/certificate_repository.rs b/mithril-aggregator/src/database/repository/certificate_repository.rs index 53390170a68..3cea511ca87 100644 --- a/mithril-aggregator/src/database/repository/certificate_repository.rs +++ b/mithril-aggregator/src/database/repository/certificate_repository.rs @@ -239,7 +239,7 @@ mod tests { async fn repository_get_certificate() { let (certificates, _) = setup_certificate_chain(5, 2); let expected_hash = certificates[0].hash.clone(); - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); insert_certificate_records(&connection, certificates.clone()); @@ -262,7 +262,7 @@ mod tests { #[tokio::test] async fn repository_get_latest_certificates() { let (certificates, _) = setup_certificate_chain(5, 2); - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); insert_certificate_records(&connection, certificates.clone()); @@ -278,7 +278,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_no_certificate_recorded_returns_none() { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let repository: CertificateRepository = CertificateRepository::new(connection); @@ -292,7 +292,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_one_cert_in_current_epoch_recorded_returns_that_one() { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let certificate = CertificateRecord::dummy_genesis("1", Epoch(1), 1); let expected_certificate: Certificate = certificate.clone().into(); @@ -311,7 +311,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_current_epoch_returns_first_of_current_epoch() { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), @@ -334,7 +334,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_previous_epoch_none_in_the_current_returns_first_of_previous_epoch( ) { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), @@ -357,7 +357,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_previous_one_cert_in_current_epoch_returns_one_in_current_epoch( ) { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), @@ -381,7 +381,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_previous_multiple_in_current_epoch_returns_first_of_current_epoch( ) { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), @@ -406,7 +406,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_penultimate_epoch_none_in_previous_returns_none( ) { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), @@ -427,7 +427,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_second_genesis_after_multiple_cert_in_current_epoch_returns_last_genesis( ) { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), @@ -451,7 +451,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_second_genesis_after_multiple_cert_in_multiple_epochs_returns_last_genesis( ) { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), @@ -477,7 +477,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_new_genesis_after_multiple_cert_in_previous_epoch_returns_last_genesis( ) { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), @@ -503,7 +503,7 @@ mod tests { let (certificates, _) = setup_certificate_chain(3, 1); let expected_certificate_id = &certificates[2].hash; let epoch = &certificates[2].epoch; - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); insert_certificate_records(&connection, certificates.clone()); @@ -520,7 +520,7 @@ mod tests { #[tokio::test] async fn save_certificate() { let (certificates, _) = setup_certificate_chain(5, 3); - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let repository: CertificateRepository = CertificateRepository::new(connection); let certificate = repository @@ -544,7 +544,7 @@ mod tests { #[tokio::test] async fn delete_only_given_certificates() { - let mut deps = DependenciesBuilder::new(Configuration::new_sample()); + let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); let connection = deps.get_sqlite_connection().await.unwrap(); let repository = CertificateRepository::new(connection.clone()); let records = vec![ diff --git a/mithril-aggregator/src/dependency_injection/builder.rs b/mithril-aggregator/src/dependency_injection/builder.rs index 78ed10196e9..310eb15c6c8 100644 --- a/mithril-aggregator/src/dependency_injection/builder.rs +++ b/mithril-aggregator/src/dependency_injection/builder.rs @@ -98,6 +98,9 @@ pub struct DependenciesBuilder { /// Configuration parameters pub configuration: Configuration, + /// Application root logger + pub root_logger: Logger, + /// SQLite database connection pub sqlite_connection: Option>, @@ -237,9 +240,10 @@ pub struct DependenciesBuilder { impl DependenciesBuilder { /// Create a new clean dependency builder - pub fn new(configuration: Configuration) -> Self { + pub fn new(root_logger: Logger, configuration: Configuration) -> Self { Self { configuration, + root_logger, sqlite_connection: None, sqlite_connection_cardano_transaction_pool: None, stake_store: None, @@ -303,7 +307,7 @@ impl DependenciesBuilder { sqlite_file_name: &str, migrations: Vec, ) -> Result { - let logger = self.get_logger()?; + let logger = self.root_logger(); let connection_builder = match self.configuration.environment { ExecutionEnvironment::Test if self.configuration.data_stores_directory.to_string_lossy() == ":memory:" => @@ -417,7 +421,7 @@ impl DependenciesBuilder { } async fn build_snapshot_uploader(&mut self) -> Result> { - let logger = self.get_logger()?; + let logger = self.root_logger(); if self.configuration.environment == ExecutionEnvironment::Production { match self.configuration.snapshot_uploader_type { SnapshotUploaderType::Gcp => { @@ -460,7 +464,7 @@ impl DependenciesBuilder { async fn build_multi_signer(&mut self) -> Result> { let multi_signer = - MultiSignerImpl::new(self.get_epoch_service().await?, self.get_logger()?); + MultiSignerImpl::new(self.get_epoch_service().await?, self.root_logger()); Ok(Arc::new(multi_signer)) } @@ -560,7 +564,7 @@ impl DependenciesBuilder { } async fn build_epoch_settings_storer(&mut self) -> Result> { - let logger = self.get_logger()?; + let logger = self.root_logger(); let epoch_settings_store = EpochSettingsStore::new( self.get_sqlite_connection().await?, self.configuration.safe_epoch_retention_limit(), @@ -702,7 +706,7 @@ impl DependenciesBuilder { &self.configuration.data_stores_directory, &format!("immutables_digests_{}.json", self.configuration.network), ) - .with_logger(self.get_logger()?) + .with_logger(self.root_logger()) .should_reset_digests_cache(self.configuration.reset_digests_cache) .build() .await?; @@ -721,14 +725,9 @@ impl DependenciesBuilder { Ok(self.immutable_cache_provider.as_ref().cloned().unwrap()) } - fn create_logger(&self) -> Result { - Ok(slog_scope::logger()) - } - - /// This method does not cache the logger since it is managed internally by - /// its own crate. - pub fn get_logger(&self) -> Result { - self.create_logger() + /// Return a copy of the root logger. + pub fn root_logger(&self) -> Logger { + self.root_logger.clone() } async fn build_transaction_repository(&mut self) -> Result> { @@ -755,7 +754,7 @@ impl DependenciesBuilder { let chain_block_reader = PallasChainReader::new( &self.configuration.cardano_node_socket_path, self.configuration.get_network()?, - self.get_logger()?, + self.root_logger(), ); Ok(Arc::new(Mutex::new(chain_block_reader))) @@ -775,7 +774,7 @@ impl DependenciesBuilder { self.get_chain_block_reader().await?, self.configuration .cardano_transactions_block_streamer_max_roll_forwards_per_poll, - self.get_logger()?, + self.root_logger(), ); Ok(Arc::new(block_scanner)) @@ -795,7 +794,7 @@ impl DependenciesBuilder { ExecutionEnvironment::Production => Some(self.get_immutable_cache_provider().await?), _ => None, }; - let digester = CardanoImmutableDigester::new(immutable_digester_cache, self.get_logger()?); + let digester = CardanoImmutableDigester::new(immutable_digester_cache, self.root_logger()); Ok(Arc::new(digester)) } @@ -830,7 +829,7 @@ impl DependenciesBuilder { self.configuration.db_directory.clone(), ongoing_snapshot_directory, algorithm, - self.get_logger()?, + self.root_logger(), )?) } _ => Arc::new(DumbSnapshotter::new()), @@ -850,7 +849,7 @@ impl DependenciesBuilder { async fn build_certificate_verifier(&mut self) -> Result> { let verifier = Arc::new(MithrilCertificateVerifier::new( - self.get_logger()?, + self.root_logger(), self.get_certificate_repository().await?, )); @@ -1033,7 +1032,7 @@ impl DependenciesBuilder { async fn build_event_transmitter(&mut self) -> Result>> { let sender = self.get_event_transmitter_sender().await?; - let event_transmitter = Arc::new(TransmitterService::new(sender, self.get_logger()?)); + let event_transmitter = Arc::new(TransmitterService::new(sender, self.root_logger())); Ok(event_transmitter) } @@ -1109,7 +1108,7 @@ impl DependenciesBuilder { let immutable_signable_builder = Arc::new(CardanoImmutableFilesFullSignableBuilder::new( self.get_immutable_digester().await?, &self.configuration.db_directory, - self.get_logger()?, + self.root_logger(), )); let transactions_importer = self.get_transactions_importer().await?; let block_range_root_retriever = self.get_transaction_repository().await?; @@ -1118,7 +1117,7 @@ impl DependenciesBuilder { >::new( transactions_importer, block_range_root_retriever, - self.get_logger()?, + self.root_logger(), )); let cardano_stake_distribution_builder = Arc::new( CardanoStakeDistributionSignableBuilder::new(self.get_stake_store().await?), @@ -1165,7 +1164,7 @@ impl DependenciesBuilder { } async fn build_signed_entity_service(&mut self) -> Result> { - let logger = self.get_logger()?; + let logger = self.root_logger(); let signed_entity_storer = self.build_signed_entity_storer().await?; let epoch_service = self.get_epoch_service().await?; let mithril_stake_distribution_artifact_builder = Arc::new( @@ -1237,7 +1236,7 @@ impl DependenciesBuilder { verification_key_store, network, allowed_discriminants, - self.get_logger()?, + self.root_logger(), ))); Ok(epoch_service) @@ -1285,7 +1284,7 @@ impl DependenciesBuilder { let transactions_importer = Arc::new(CardanoTransactionsImporter::new( self.get_block_scanner().await?, self.get_transaction_repository().await?, - self.get_logger()?, + self.root_logger(), )); Ok(transactions_importer) @@ -1305,7 +1304,7 @@ impl DependenciesBuilder { self.get_sqlite_connection_cardano_transaction_pool() .await?, self.get_signed_entity_lock().await?, - self.get_logger()?, + self.root_logger(), )); Ok(upkeep_service) @@ -1323,7 +1322,7 @@ impl DependenciesBuilder { &mut self, ) -> Result> { let authenticator = - SingleSignatureAuthenticator::new(self.get_multi_signer().await?, self.get_logger()?); + SingleSignatureAuthenticator::new(self.get_multi_signer().await?, self.root_logger()); Ok(Arc::new(authenticator)) } @@ -1356,7 +1355,7 @@ impl DependenciesBuilder { let dependency_manager = DependencyContainer { config: self.configuration.clone(), allowed_discriminants: self.get_allowed_signed_entity_types_discriminants()?, - root_logger: self.get_logger()?, + root_logger: self.root_logger(), sqlite_connection: self.get_sqlite_connection().await?, sqlite_connection_cardano_transaction_pool: self .get_sqlite_connection_cardano_transaction_pool() @@ -1406,7 +1405,7 @@ impl DependenciesBuilder { pub async fn create_event_store(&mut self) -> Result { let event_store = EventStore::new( self.get_event_transmitter_receiver().await?, - self.get_logger()?, + self.root_logger(), ); Ok(event_store) @@ -1421,7 +1420,7 @@ impl DependenciesBuilder { config, None, Arc::new(AggregatorRunner::new(dependency_container)), - self.get_logger()?, + self.root_logger(), ) .await .map_err(|e| DependenciesBuilderError::Initialization { @@ -1455,7 +1454,7 @@ impl DependenciesBuilder { .cardano_transactions_signing_config .security_parameter, self.get_chain_observer().await?, - self.get_logger()?, + self.root_logger(), Arc::new(CardanoTransactionsPreloaderActivation::new(activation)), ); @@ -1492,14 +1491,14 @@ impl DependenciesBuilder { let retriever = CExplorerSignerRetriever::new( cexplorer_pools_url, Some(Duration::from_secs(30)), - self.get_logger()?, + self.root_logger(), )?; let persister = self.get_signer_store().await?; Ok(SignersImporter::new( Arc::new(retriever), persister, - self.get_logger()?, + self.root_logger(), )) } @@ -1538,7 +1537,7 @@ impl DependenciesBuilder { let multi_signer = self.get_multi_signer().await?; let ticker_service = self.get_ticker_service().await?; let epoch_service = self.get_epoch_service().await?; - let logger = self.get_logger()?; + let logger = self.root_logger(); let certifier = Arc::new(MithrilCertifierService::new( cardano_network, @@ -1556,7 +1555,7 @@ impl DependenciesBuilder { Ok(Arc::new(BufferedCertifierService::new( certifier, Arc::new(BufferedSingleSignatureRepository::new(sqlite_connection)), - self.get_logger()?, + self.root_logger(), ))) } @@ -1596,7 +1595,7 @@ impl DependenciesBuilder { .cardano_transactions_prover_cache_pool_size; let transaction_retriever = self.get_transaction_repository().await?; let block_range_root_retriever = self.get_transaction_repository().await?; - let logger = self.get_logger()?; + let logger = self.root_logger(); let prover_service = MithrilProverService::::new( transaction_retriever, block_range_root_retriever, @@ -1622,6 +1621,13 @@ impl DependenciesBuilder { } } +#[cfg(test)] +impl DependenciesBuilder { + pub(crate) fn new_with_stdout_logger(configuration: Configuration) -> Self { + Self::new(crate::test_tools::TestLogger::stdout(), configuration) + } +} + #[cfg(test)] mod tests { use mithril_common::entities::SignedEntityTypeDiscriminants; @@ -1651,7 +1657,7 @@ mod tests { signed_entity_types: Some(signed_entity_types), ..Configuration::new_sample() }; - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let cardano_transactions_preloader = dep_builder .create_cardano_transactions_preloader() diff --git a/mithril-aggregator/src/dependency_injection/containers.rs b/mithril-aggregator/src/dependency_injection/containers.rs index 5a8efa3c616..f4d1a43724b 100644 --- a/mithril-aggregator/src/dependency_injection/containers.rs +++ b/mithril-aggregator/src/dependency_injection/containers.rs @@ -304,7 +304,7 @@ pub mod tests { pub async fn initialize_dependencies() -> DependencyContainer { let config = Configuration::new_sample(); - let mut builder = DependenciesBuilder::new(config); + let mut builder = DependenciesBuilder::new_with_stdout_logger(config); builder.build_dependency_container().await.unwrap() } diff --git a/mithril-aggregator/src/http_server/routes/proof_routes.rs b/mithril-aggregator/src/http_server/routes/proof_routes.rs index 79917773e69..e013dddfc53 100644 --- a/mithril-aggregator/src/http_server/routes/proof_routes.rs +++ b/mithril-aggregator/src/http_server/routes/proof_routes.rs @@ -198,7 +198,7 @@ mod tests { #[tokio::test] async fn proof_cardano_transaction_ok() { let config = Configuration::new_sample(); - let mut builder = DependenciesBuilder::new(config); + let mut builder = DependenciesBuilder::new_with_stdout_logger(config); let mut dependency_manager = builder.build_dependency_container().await.unwrap(); let mut mock_signed_entity_service = MockSignedEntityService::new(); mock_signed_entity_service @@ -240,7 +240,7 @@ mod tests { #[tokio::test] async fn proof_cardano_transaction_not_found() { let config = Configuration::new_sample(); - let mut builder = DependenciesBuilder::new(config); + let mut builder = DependenciesBuilder::new_with_stdout_logger(config); let dependency_manager = builder.build_dependency_container().await.unwrap(); let method = Method::GET.as_str(); @@ -271,7 +271,7 @@ mod tests { #[tokio::test] async fn proof_cardano_transaction_ko() { let config = Configuration::new_sample(); - let mut builder = DependenciesBuilder::new(config); + let mut builder = DependenciesBuilder::new_with_stdout_logger(config); let mut dependency_manager = builder.build_dependency_container().await.unwrap(); let mut mock_signed_entity_service = MockSignedEntityService::new(); mock_signed_entity_service @@ -307,7 +307,7 @@ mod tests { #[tokio::test] async fn proof_cardano_transaction_return_bad_request_with_invalid_hashes() { let config = Configuration::new_sample(); - let mut builder = DependenciesBuilder::new(config); + let mut builder = DependenciesBuilder::new_with_stdout_logger(config); let dependency_manager = builder.build_dependency_container().await.unwrap(); let method = Method::GET.as_str(); @@ -337,7 +337,7 @@ mod tests { async fn proof_cardano_transaction_route_deduplicate_hashes() { let tx = fake_data::transaction_hashes()[0].to_string(); let config = Configuration::new_sample(); - let mut builder = DependenciesBuilder::new(config); + let mut builder = DependenciesBuilder::new_with_stdout_logger(config); let mut dependency_manager = builder.build_dependency_container().await.unwrap(); let mut mock_signed_entity_service = MockSignedEntityService::new(); mock_signed_entity_service diff --git a/mithril-aggregator/src/http_server/routes/root_routes.rs b/mithril-aggregator/src/http_server/routes/root_routes.rs index f02b93745ac..1cb96128cb8 100644 --- a/mithril-aggregator/src/http_server/routes/root_routes.rs +++ b/mithril-aggregator/src/http_server/routes/root_routes.rs @@ -132,7 +132,7 @@ mod tests { )), ..Configuration::new_sample() }; - let mut builder = DependenciesBuilder::new(config); + let mut builder = DependenciesBuilder::new_with_stdout_logger(config); let dependency_manager = builder.build_dependency_container().await.unwrap(); let expected_open_api_version = dependency_manager @@ -193,7 +193,7 @@ mod tests { )), ..Configuration::new_sample() }; - let mut builder = DependenciesBuilder::new(config); + let mut builder = DependenciesBuilder::new_with_stdout_logger(config); let mut dependency_manager = builder.build_dependency_container().await.unwrap(); dependency_manager .config diff --git a/mithril-aggregator/src/http_server/routes/statistics_routes.rs b/mithril-aggregator/src/http_server/routes/statistics_routes.rs index eefd6b6c9bd..ba566057b4e 100644 --- a/mithril-aggregator/src/http_server/routes/statistics_routes.rs +++ b/mithril-aggregator/src/http_server/routes/statistics_routes.rs @@ -80,7 +80,7 @@ mod tests { #[tokio::test] async fn post_statistics_ok() { let config = Configuration::new_sample(); - let mut builder = DependenciesBuilder::new(config); + let mut builder = DependenciesBuilder::new_with_stdout_logger(config); let mut rx = builder.get_event_transmitter_receiver().await.unwrap(); let dependency_manager = builder.build_dependency_container().await.unwrap(); let snapshot_download_message = SnapshotDownloadMessage::dummy(); diff --git a/mithril-aggregator/src/services/certifier/certifier_service.rs b/mithril-aggregator/src/services/certifier/certifier_service.rs index 91747b712e3..5bddc253518 100644 --- a/mithril-aggregator/src/services/certifier/certifier_service.rs +++ b/mithril-aggregator/src/services/certifier/certifier_service.rs @@ -442,7 +442,7 @@ mod tests { current_epoch: Option, ) -> MithrilCertifierService { let configuration = Configuration::new_sample(); - let mut dependency_builder = DependenciesBuilder::new(configuration); + let mut dependency_builder = DependenciesBuilder::new_with_stdout_logger(configuration); if let Some(epoch) = current_epoch { dependency_builder.epoch_service = Some(Arc::new(RwLock::new( diff --git a/mithril-aggregator/src/services/message.rs b/mithril-aggregator/src/services/message.rs index 4bcd035d7cc..a0ea8c04811 100644 --- a/mithril-aggregator/src/services/message.rs +++ b/mithril-aggregator/src/services/message.rs @@ -267,7 +267,7 @@ mod tests { async fn get_no_certificate() { // setup let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let service = dep_builder.get_message_service().await.unwrap(); // test @@ -283,7 +283,7 @@ mod tests { async fn get_certificate() { // setup let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let repository = dep_builder.get_certificate_repository().await.unwrap(); let service = dep_builder.get_message_service().await.unwrap(); let fixture = MithrilFixtureBuilder::default().with_signers(3).build(); @@ -305,7 +305,7 @@ mod tests { #[tokio::test] async fn get_last_certificates() { let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let repository = dep_builder.get_certificate_repository().await.unwrap(); let service = dep_builder.get_message_service().await.unwrap(); let fixture = MithrilFixtureBuilder::default().with_signers(3).build(); @@ -330,7 +330,7 @@ mod tests { #[tokio::test] async fn get_snapshot_not_exist() { let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let service = dep_builder.get_message_service().await.unwrap(); let snapshot = service.get_snapshot_message("whatever").await.unwrap(); @@ -351,7 +351,7 @@ mod tests { // setup let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_signed_entity() @@ -383,7 +383,7 @@ mod tests { // setup let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_last_signed_entities_by_type() @@ -408,7 +408,7 @@ mod tests { }; let message = ToMithrilStakeDistributionMessageAdapter::adapt(entity); let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_signed_entity() @@ -428,7 +428,7 @@ mod tests { #[tokio::test] async fn get_mithril_stake_distribution_not_exist() { let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_signed_entity() @@ -456,7 +456,7 @@ mod tests { }]; let message = ToMithrilStakeDistributionListMessageAdapter::adapt(vec![entity]); let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_last_signed_entities_by_type() @@ -487,7 +487,7 @@ mod tests { }; let message = ToCardanoTransactionMessageAdapter::adapt(entity); let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_signed_entity() @@ -507,7 +507,7 @@ mod tests { #[tokio::test] async fn get_cardano_transaction_not_exist() { let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_signed_entity() @@ -538,7 +538,7 @@ mod tests { }]; let message = ToCardanoTransactionListMessageAdapter::adapt(vec![entity]); let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_last_signed_entities_by_type() @@ -566,7 +566,7 @@ mod tests { }; let message = ToCardanoStakeDistributionMessageAdapter::adapt(entity); let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_signed_entity() @@ -586,7 +586,7 @@ mod tests { #[tokio::test] async fn get_cardano_stake_distribution_not_exist() { let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_signed_entity() @@ -614,7 +614,7 @@ mod tests { }; let message = ToCardanoStakeDistributionMessageAdapter::adapt(entity.clone()); let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_cardano_stake_distribution_signed_entity_by_epoch() @@ -634,7 +634,7 @@ mod tests { #[tokio::test] async fn get_cardano_stake_distribution_by_epoch_not_exist() { let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_cardano_stake_distribution_signed_entity_by_epoch() @@ -662,7 +662,7 @@ mod tests { }]; let message = ToCardanoStakeDistributionListMessageAdapter::adapt(vec![entity]); let configuration = Configuration::new_sample(); - let mut dep_builder = DependenciesBuilder::new(configuration); + let mut dep_builder = DependenciesBuilder::new_with_stdout_logger(configuration); let mut storer = MockSignedEntityStorer::new(); storer .expect_get_last_signed_entities_by_type() diff --git a/mithril-aggregator/src/services/stake_distribution.rs b/mithril-aggregator/src/services/stake_distribution.rs index 38a560e9513..d97a66a39f3 100644 --- a/mithril-aggregator/src/services/stake_distribution.rs +++ b/mithril-aggregator/src/services/stake_distribution.rs @@ -230,7 +230,8 @@ mod tests { use super::*; async fn get_service(chain_observer: MockChainObserver) -> MithrilStakeDistributionService { - let mut builder = DependenciesBuilder::new(crate::Configuration::new_sample()); + let mut builder = + DependenciesBuilder::new_with_stdout_logger(crate::Configuration::new_sample()); let stake_service = MithrilStakeDistributionService::new( builder.get_stake_store().await.unwrap(), Arc::new(chain_observer), diff --git a/mithril-aggregator/tests/test_extensions/runtime_tester.rs b/mithril-aggregator/tests/test_extensions/runtime_tester.rs index 40b403f9fd0..8f0c2d02add 100644 --- a/mithril-aggregator/tests/test_extensions/runtime_tester.rs +++ b/mithril-aggregator/tests/test_extensions/runtime_tester.rs @@ -81,19 +81,20 @@ pub struct RuntimeTester { pub observer: Arc, pub open_message_repository: Arc, pub block_scanner: Arc, - _logs_guard: slog_scope::GlobalLoggerGuard, + _global_logger_guard: slog_scope::GlobalLoggerGuard, } -fn build_logger() -> slog_scope::GlobalLoggerGuard { +fn build_logger() -> slog::Logger { let decorator = slog_term::PlainDecorator::new(slog_term::TestStdoutWriter); let drain = slog_term::CompactFormat::new(decorator).build().fuse(); let drain = slog_async::Async::new(drain).build().fuse(); - slog_scope::set_global_logger(slog::Logger::root(Arc::new(drain), slog::o!())) + slog::Logger::root(Arc::new(drain), slog::o!()) } impl RuntimeTester { pub async fn build(start_time_point: TimePoint, configuration: Configuration) -> Self { let logger = build_logger(); + let global_logger = slog_scope::set_global_logger(logger.clone()); let network = configuration.network.clone(); let snapshot_uploader = Arc::new(DumbSnapshotUploader::new()); let immutable_file_observer = Arc::new(DumbImmutableFileObserver::new()); @@ -110,7 +111,7 @@ impl RuntimeTester { Some(Epoch(0)), )])); let block_scanner = Arc::new(DumbBlockScanner::new()); - let mut deps_builder = DependenciesBuilder::new(configuration); + let mut deps_builder = DependenciesBuilder::new(logger.clone(), configuration); deps_builder.snapshot_uploader = Some(snapshot_uploader.clone()); deps_builder.chain_observer = Some(chain_observer.clone()); deps_builder.immutable_file_observer = Some(immutable_file_observer.clone()); @@ -140,7 +141,7 @@ impl RuntimeTester { observer, open_message_repository, block_scanner, - _logs_guard: logger, + _global_logger_guard: global_logger, } } From e71d93093dc270d2d27aee59f2c419bc8af7d123 Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 18:56:18 +0200 Subject: [PATCH 08/12] test(aggregator): use simpler method to build sqlite connection in `certificate_repository` --- .../repository/certificate_repository.rs | 50 ++++++------------- 1 file changed, 16 insertions(+), 34 deletions(-) diff --git a/mithril-aggregator/src/database/repository/certificate_repository.rs b/mithril-aggregator/src/database/repository/certificate_repository.rs index 3cea511ca87..84885667390 100644 --- a/mithril-aggregator/src/database/repository/certificate_repository.rs +++ b/mithril-aggregator/src/database/repository/certificate_repository.rs @@ -130,8 +130,6 @@ mod tests { use mithril_common::crypto_helper::tests_setup::setup_certificate_chain; use crate::database::test_helper::{insert_certificate_records, main_db_connection}; - use crate::dependency_injection::DependenciesBuilder; - use crate::Configuration; use super::*; @@ -239,8 +237,7 @@ mod tests { async fn repository_get_certificate() { let (certificates, _) = setup_certificate_chain(5, 2); let expected_hash = certificates[0].hash.clone(); - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); insert_certificate_records(&connection, certificates.clone()); let repository: CertificateRepository = CertificateRepository::new(connection); @@ -262,8 +259,7 @@ mod tests { #[tokio::test] async fn repository_get_latest_certificates() { let (certificates, _) = setup_certificate_chain(5, 2); - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); insert_certificate_records(&connection, certificates.clone()); let repository = CertificateRepository::new(connection); @@ -278,8 +274,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_no_certificate_recorded_returns_none() { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let repository: CertificateRepository = CertificateRepository::new(connection); let certificate = repository @@ -292,8 +287,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_one_cert_in_current_epoch_recorded_returns_that_one() { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let certificate = CertificateRecord::dummy_genesis("1", Epoch(1), 1); let expected_certificate: Certificate = certificate.clone().into(); insert_certificate_records(&connection, vec![certificate]); @@ -311,8 +305,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_current_epoch_returns_first_of_current_epoch() { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), CertificateRecord::dummy_db_snapshot("2", "1", Epoch(1), 2), @@ -334,8 +327,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_previous_epoch_none_in_the_current_returns_first_of_previous_epoch( ) { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), CertificateRecord::dummy_db_snapshot("2", "1", Epoch(1), 2), @@ -357,8 +349,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_previous_one_cert_in_current_epoch_returns_one_in_current_epoch( ) { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), CertificateRecord::dummy_db_snapshot("2", "1", Epoch(1), 2), @@ -381,8 +372,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_previous_multiple_in_current_epoch_returns_first_of_current_epoch( ) { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), CertificateRecord::dummy_db_snapshot("2", "1", Epoch(1), 2), @@ -406,8 +396,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_multiple_cert_in_penultimate_epoch_none_in_previous_returns_none( ) { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), CertificateRecord::dummy_db_snapshot("2", "1", Epoch(1), 2), @@ -427,8 +416,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_second_genesis_after_multiple_cert_in_current_epoch_returns_last_genesis( ) { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), CertificateRecord::dummy_db_snapshot("2", "1", Epoch(1), 2), @@ -451,8 +439,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_second_genesis_after_multiple_cert_in_multiple_epochs_returns_last_genesis( ) { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), CertificateRecord::dummy_db_snapshot("2", "1", Epoch(1), 2), @@ -477,8 +464,7 @@ mod tests { #[tokio::test] async fn get_master_certificate_new_genesis_after_multiple_cert_in_previous_epoch_returns_last_genesis( ) { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let certificates = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), CertificateRecord::dummy_db_snapshot("2", "1", Epoch(1), 2), @@ -503,8 +489,7 @@ mod tests { let (certificates, _) = setup_certificate_chain(3, 1); let expected_certificate_id = &certificates[2].hash; let epoch = &certificates[2].epoch; - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); insert_certificate_records(&connection, certificates.clone()); let repository: CertificateRepository = CertificateRepository::new(connection); @@ -520,9 +505,8 @@ mod tests { #[tokio::test] async fn save_certificate() { let (certificates, _) = setup_certificate_chain(5, 3); - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); - let repository: CertificateRepository = CertificateRepository::new(connection); + let connection = Arc::new(main_db_connection().unwrap()); + let repository: CertificateRepository = CertificateRepository::new(connection.clone()); let certificate = repository .create_certificate(certificates[4].clone()) .await @@ -530,7 +514,6 @@ mod tests { assert_eq!(certificates[4].hash, certificate.hash); { - let connection = deps.get_sqlite_connection().await.unwrap(); let cert = connection .fetch_first(GetCertificateRecordQuery::by_certificate_id( &certificates[4].hash, @@ -544,8 +527,7 @@ mod tests { #[tokio::test] async fn delete_only_given_certificates() { - let mut deps = DependenciesBuilder::new_for_test(Configuration::new_sample()); - let connection = deps.get_sqlite_connection().await.unwrap(); + let connection = Arc::new(main_db_connection().unwrap()); let repository = CertificateRepository::new(connection.clone()); let records = vec![ CertificateRecord::dummy_genesis("1", Epoch(1), 1), From a2fc819126f724ed1ca99397b192541ead6210ee Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 19:01:56 +0200 Subject: [PATCH 09/12] feat(aggregator): add logs to snapshot uploaders --- .../src/snapshot_uploaders/local_snapshot_uploader.rs | 1 + .../src/snapshot_uploaders/remote_snapshot_uploader.rs | 2 ++ 2 files changed, 3 insertions(+) diff --git a/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs b/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs index c2f898de0c9..461ac0dda4d 100644 --- a/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs +++ b/mithril-aggregator/src/snapshot_uploaders/local_snapshot_uploader.rs @@ -51,6 +51,7 @@ impl SnapshotUploader for LocalSnapshotUploader { digest.unwrap() ); + debug!(self.logger, "Snapshot 'uploaded' to local storage"; "location" => &location); Ok(location) } } diff --git a/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs b/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs index e99fb97a076..be0345bd1d3 100644 --- a/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs +++ b/mithril-aggregator/src/snapshot_uploaders/remote_snapshot_uploader.rs @@ -48,7 +48,9 @@ impl SnapshotUploader for RemoteSnapshotUploader { ) }; + debug!(self.logger, "Uploading snapshot to remote storage"; "location" => &location); self.file_uploader.upload_file(snapshot_filepath).await?; + debug!(self.logger, "Snapshot upload to remote storage completed"; "location" => &location); Ok(location) } From 1c06fede17ad0640b80b887c7d68b987df7e3da9 Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 19:03:30 +0200 Subject: [PATCH 10/12] chore: remove `slog_scope` from aggregator main dependencies It's still a `dev-dependencies` as it is useful for integration tests. --- mithril-aggregator/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mithril-aggregator/Cargo.toml b/mithril-aggregator/Cargo.toml index 24c888d08d4..9d3ade0000c 100644 --- a/mithril-aggregator/Cargo.toml +++ b/mithril-aggregator/Cargo.toml @@ -44,7 +44,6 @@ slog = { version = "2.7.0", features = [ ] } slog-async = "2.8.0" slog-bunyan = "2.5.0" -slog-scope = "4.4.0" sqlite = { version = "0.36.1", features = ["bundled"] } tar = "0.4.41" thiserror = "1.0.63" @@ -70,6 +69,7 @@ mithril-common = { path = "../mithril-common", features = [ "test_tools", ] } mockall = "0.13.0" +slog-scope = "4.4.0" slog-term = "2.9.1" tempfile = "3.12.0" From d1a1ee59124cc420d7e13647747afe4bed7624e3 Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Fri, 11 Oct 2024 19:06:58 +0200 Subject: [PATCH 11/12] style(aggregator): set application name in log to `mithril-aggregator` Instead of the default `slog-rs`. --- mithril-aggregator/src/main.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/mithril-aggregator/src/main.rs b/mithril-aggregator/src/main.rs index 3d9c1e8b371..03f88b31232 100644 --- a/mithril-aggregator/src/main.rs +++ b/mithril-aggregator/src/main.rs @@ -1,17 +1,21 @@ #![doc = include_str!("../README.md")] use clap::Parser; -use mithril_aggregator::{CommandType, MainOpts}; -use mithril_common::StdResult; use slog::{Drain, Fuse, Level, Logger}; use slog_async::Async; use std::sync::Arc; +use mithril_aggregator::{CommandType, MainOpts}; +use mithril_common::StdResult; + fn build_io_logger(log_level: Level, io: W) -> Fuse { - let drain = slog_bunyan::new(io).set_pretty(false).build().fuse(); + let drain = slog_bunyan::with_name("mithril-aggregator", io) + .set_pretty(false) + .build() + .fuse(); let drain = slog::LevelFilter::new(drain, log_level).fuse(); - slog_async::Async::new(drain).build().fuse() + Async::new(drain).build().fuse() } /// Build a logger from args. From 01bf63f383fe9d2c5aee51d906da43a90724fb4d Mon Sep 17 00:00:00 2001 From: DJO <790521+Alenar@users.noreply.github.com> Date: Mon, 14 Oct 2024 12:40:16 +0200 Subject: [PATCH 12/12] chore: upgrade crate versions * mithril-aggregator from `0.5.80` to `0.5.81` * mithril-common from `0.4.67` to `0.4.68` --- Cargo.lock | 4 ++-- mithril-aggregator/Cargo.toml | 2 +- mithril-common/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d75f9f66211..dce9e239c12 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3403,7 +3403,7 @@ dependencies = [ [[package]] name = "mithril-aggregator" -version = "0.5.80" +version = "0.5.81" dependencies = [ "anyhow", "async-trait", @@ -3559,7 +3559,7 @@ dependencies = [ [[package]] name = "mithril-common" -version = "0.4.67" +version = "0.4.68" dependencies = [ "anyhow", "async-trait", diff --git a/mithril-aggregator/Cargo.toml b/mithril-aggregator/Cargo.toml index 9d3ade0000c..2fb7be2e5c1 100644 --- a/mithril-aggregator/Cargo.toml +++ b/mithril-aggregator/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "mithril-aggregator" -version = "0.5.80" +version = "0.5.81" description = "A Mithril Aggregator server" authors = { workspace = true } edition = { workspace = true } diff --git a/mithril-common/Cargo.toml b/mithril-common/Cargo.toml index 4ea4e38c8bc..99f12801e4f 100644 --- a/mithril-common/Cargo.toml +++ b/mithril-common/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "mithril-common" -version = "0.4.67" +version = "0.4.68" description = "Common types, interfaces, and utilities for Mithril nodes." authors = { workspace = true } edition = { workspace = true }