Skip to content

Commit

Permalink
[Bug fix]Use global sync dag store (#4347)
Browse files Browse the repository at this point in the history
* use global sync dag store

* add timeout in waiting for the parents ready
  • Loading branch information
jackzhhuang authored Dec 13, 2024
1 parent d69e48b commit 33523f0
Show file tree
Hide file tree
Showing 9 changed files with 61 additions and 27 deletions.
33 changes: 31 additions & 2 deletions sync/src/parallel/executor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ use tokio::{
task::JoinHandle,
};

const MAX_TOTAL_WAITING_TIME: u64 = 3600000; // an hour

#[derive(Debug)]
pub enum ExecuteState {
Executing(HashValue),
Expand Down Expand Up @@ -96,6 +98,8 @@ impl DagBlockExecutor {
block.header().id()
);

let mut total_waiting_time: u64 = 0;
let waiting_per_time: u64 = 100;
loop {
match Self::waiting_for_parents(
&self.dag,
Expand All @@ -104,9 +108,34 @@ impl DagBlockExecutor {
) {
Ok(true) => break,
Ok(false) => {
if total_waiting_time >= MAX_TOTAL_WAITING_TIME {
error!(
"failed to check parents: {:?}, for reason: timeout",
header
);
match self
.sender
.send(ExecuteState::Error(Box::new(header.clone())))
.await
{
Ok(_) => (),
Err(e) => {
error!(
"failed to send error state: {:?}, for reason: {:?}",
header, e
);
return;
}
}
return;
}
tokio::task::yield_now().await;
tokio::time::sleep(tokio::time::Duration::from_millis(100))
.await
tokio::time::sleep(tokio::time::Duration::from_millis(
waiting_per_time,
))
.await;
total_waiting_time =
total_waiting_time.saturating_add(waiting_per_time);
}
Err(e) => {
error!(
Expand Down
4 changes: 2 additions & 2 deletions sync/src/parallel/sender.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ struct DagBlockWorker {
}

pub struct DagBlockSender<'a> {
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
executors: Vec<DagBlockWorker>,
queue_size: usize,
time_service: Arc<dyn TimeService>,
Expand All @@ -38,7 +38,7 @@ pub struct DagBlockSender<'a> {

impl<'a> DagBlockSender<'a> {
pub fn new(
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
queue_size: usize,
time_service: Arc<dyn TimeService>,
storage: Arc<dyn Store>,
Expand Down
17 changes: 10 additions & 7 deletions sync/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ pub struct SyncService {
stage: SyncStage,
config: Arc<NodeConfig>,
storage: Arc<Storage>,
sync_dag_store: Arc<SyncDagStore>,
metrics: Option<SyncMetrics>,
peer_score_metrics: Option<PeerScoreMetrics>,
vm_metrics: Option<VMMetrics>,
Expand All @@ -83,6 +84,13 @@ impl SyncService {
let head_block_info = storage
.get_block_info(head_block_hash)?
.ok_or_else(|| format_err!("can't get block info by hash {}", head_block_hash))?;
let sync_dag_store = Arc::new(SyncDagStore::create_from_path(
config.storage.sync_dir(),
SyncDagStoreConfig::create_with_params(
config.storage.cache_size(),
RocksdbConfig::default(),
),
)?);
//TODO bail PrometheusError after use custom metrics registry.
let metrics = config
.metrics
Expand All @@ -97,6 +105,7 @@ impl SyncService {
stage: SyncStage::NotStart,
config,
storage,
sync_dag_store,
metrics,
peer_score_metrics,
vm_metrics,
Expand Down Expand Up @@ -226,13 +235,7 @@ impl SyncService {
let sync_metrics = self.metrics.clone();
let vm_metrics = self.vm_metrics.clone();
let dag = ctx.get_shared::<BlockDAG>()?;
let sync_dag_store = SyncDagStore::create_from_path(
config.storage.sync_dir(),
SyncDagStoreConfig::create_with_params(
config.storage.cache_size(),
RocksdbConfig::default(),
),
)?;
let sync_dag_store = self.sync_dag_store.clone();
let fut = async move {
let startup_info = storage
.get_startup_info()?
Expand Down
4 changes: 2 additions & 2 deletions sync/src/tasks/block_sync_task.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ pub struct BlockCollector<N, H> {
local_store: Arc<dyn Store>,
fetcher: Arc<dyn BlockFetcher>,
latest_block_id: HashValue,
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
}

impl<N, H> ContinueChainOperator for BlockCollector<N, H>
Expand Down Expand Up @@ -264,7 +264,7 @@ where
skip_pow_verify: bool,
local_store: Arc<dyn Store>,
fetcher: Arc<dyn BlockFetcher>,
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
) -> Self {
let latest_block_id = chain.current_header().id();
Self {
Expand Down
4 changes: 2 additions & 2 deletions sync/src/tasks/continue_execute_absent_block.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@ pub trait ContinueChainOperator {
pub struct ContinueExecuteAbsentBlock<'a> {
operator: &'a mut dyn ContinueChainOperator,
local_store: Arc<dyn Store>,
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
}

impl<'a> ContinueExecuteAbsentBlock<'a> {
pub fn new(
operator: &'a mut dyn ContinueChainOperator,
local_store: Arc<dyn Store>,
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
) -> anyhow::Result<ContinueExecuteAbsentBlock<'a>> {
anyhow::Result::Ok(ContinueExecuteAbsentBlock {
operator,
Expand Down
4 changes: 2 additions & 2 deletions sync/src/tasks/inner_sync_task.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ where
peer_provider: N,
custom_error_handle: Arc<dyn CustomErrorHandle>,
dag: BlockDAG,
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
}

impl<H, F, N> InnerSyncTask<H, F, N>
Expand All @@ -58,7 +58,7 @@ where
peer_provider: N,
custom_error_handle: Arc<dyn CustomErrorHandle>,
dag: BlockDAG,
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
) -> Self {
Self {
ancestor,
Expand Down
16 changes: 9 additions & 7 deletions sync/src/tasks/mock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ pub struct SyncNodeMocker {
pub peer_id: PeerId,
pub chain_mocker: MockChain,
pub err_mocker: ErrorMocker,
pub sync_dag_store: SyncDagStore,
pub sync_dag_store: Arc<SyncDagStore>,
peer_selector: PeerSelector,
}

Expand All @@ -155,8 +155,10 @@ impl SyncNodeMocker {
None,
);
let peer_selector = PeerSelector::new(vec![peer_info], PeerStrategy::default(), None);
let sync_dag_store = SyncDagStore::create_for_testing()
.context("Failed to create SyncDagStore for testing")?;
let sync_dag_store = Arc::new(
SyncDagStore::create_for_testing()
.context("Failed to create SyncDagStore for testing")?,
);
Ok(Self::new_inner(
peer_id,
chain,
Expand Down Expand Up @@ -186,7 +188,7 @@ impl SyncNodeMocker {
None,
);
let peer_selector = PeerSelector::new(vec![peer_info], PeerStrategy::default(), None);
let sync_dag_store = SyncDagStore::create_for_testing()?;
let sync_dag_store = Arc::new(SyncDagStore::create_for_testing()?);
Ok(Self::new_inner(
peer_id,
chain,
Expand All @@ -206,7 +208,7 @@ impl SyncNodeMocker {
let peer_id = PeerId::random();
let peer_info = PeerInfo::new(peer_id.clone(), chain.chain_info(), vec![], vec![], None);
let peer_selector = PeerSelector::new(vec![peer_info], PeerStrategy::default(), None);
let sync_dag_store = SyncDagStore::create_for_testing()?;
let sync_dag_store = Arc::new(SyncDagStore::create_for_testing()?);
Ok(Self::new_inner(
peer_id,
chain,
Expand All @@ -223,7 +225,7 @@ impl SyncNodeMocker {
delay_milliseconds: u64,
random_error_percent: u32,
peer_selector: PeerSelector,
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
) -> Self {
Self::new_inner(
peer_id,
Expand All @@ -241,7 +243,7 @@ impl SyncNodeMocker {
error_strategy: ErrorStrategy,
random_error_percent: u32,
peer_selector: PeerSelector,
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
) -> Self {
Self {
peer_id: peer_id.clone(),
Expand Down
2 changes: 1 addition & 1 deletion sync/src/tasks/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -622,7 +622,7 @@ pub fn full_sync_task<H, A, F, N>(
sync_metrics: Option<SyncMetrics>,
vm_metrics: Option<VMMetrics>,
dag: BlockDAG,
sync_dag_store: SyncDagStore,
sync_dag_store: Arc<SyncDagStore>,
) -> Result<(
BoxFuture<'static, Result<BlockChain, TaskError>>,
TaskHandle,
Expand Down
4 changes: 2 additions & 2 deletions sync/src/tasks/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ which is no longer suitable for the dag"]
pub async fn test_failed_block() -> Result<()> {
let net = ChainNetwork::new_builtin(BuiltinNetworkID::Halley);
let (storage, chain_info, _, dag) = Genesis::init_storage_for_test(&net)?;
let sync_dag_store = SyncDagStore::create_for_testing()?;
let sync_dag_store = Arc::new(SyncDagStore::create_for_testing()?);

let chain = BlockChain::new(
net.time_service(),
Expand Down Expand Up @@ -917,7 +917,7 @@ async fn test_sync_target() {
300,
0,
peer_selector,
SyncDagStore::create_for_testing().expect("failed to create the sync dag store"),
Arc::new(SyncDagStore::create_for_testing().expect("failed to create the sync dag store")),
));
let full_target = node2
.get_best_target(genesis_chain_info.total_difficulty())
Expand Down

0 comments on commit 33523f0

Please sign in to comment.