Skip to content

Commit

Permalink
A0-4215: Make extender synchronous (#436)
Browse files Browse the repository at this point in the history
* Make extender synchronous

* Compute order extension last

Probably doesn't actually impact sending delays, but it also doesn't hurt.
  • Loading branch information
timorleph authored Apr 15, 2024
1 parent 3a2bafc commit eebbf2b
Show file tree
Hide file tree
Showing 13 changed files with 344 additions and 547 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion consensus/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "aleph-bft"
version = "0.36.4"
version = "0.36.5"
edition = "2021"
authors = ["Cardinal Cryptography"]
categories = ["algorithms", "data-structures", "cryptography", "database"]
Expand Down
28 changes: 11 additions & 17 deletions consensus/src/dag/reconstruction/dag.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,14 @@
use crate::{
dag::reconstruction::ReconstructedUnit,
units::{HashFor, Unit},
};
use crate::units::{HashFor, UnitWithParents};
use std::collections::{HashMap, HashSet, VecDeque};

struct OrphanedUnit<U: Unit> {
unit: ReconstructedUnit<U>,
struct OrphanedUnit<U: UnitWithParents> {
unit: U,
missing_parents: HashSet<HashFor<U>>,
}

impl<U: Unit> OrphanedUnit<U> {
impl<U: UnitWithParents> OrphanedUnit<U> {
/// If there are no missing parents then returns just the internal unit.
pub fn new(
unit: ReconstructedUnit<U>,
missing_parents: HashSet<HashFor<U>>,
) -> Result<Self, ReconstructedUnit<U>> {
pub fn new(unit: U, missing_parents: HashSet<HashFor<U>>) -> Result<Self, U> {
match missing_parents.is_empty() {
true => Err(unit),
false => Ok(OrphanedUnit {
Expand All @@ -24,8 +18,8 @@ impl<U: Unit> OrphanedUnit<U> {
}
}

/// If this was the last missing parent return the reconstructed unit.
pub fn resolve_parent(self, parent: HashFor<U>) -> Result<ReconstructedUnit<U>, Self> {
/// If this was the last missing parent return the unit.
pub fn resolve_parent(self, parent: HashFor<U>) -> Result<U, Self> {
let OrphanedUnit {
unit,
mut missing_parents,
Expand Down Expand Up @@ -53,13 +47,13 @@ impl<U: Unit> OrphanedUnit<U> {

/// A structure ensuring that units added to it are output in an order
/// in agreement with the DAG order.
pub struct Dag<U: Unit> {
pub struct Dag<U: UnitWithParents> {
orphaned_units: HashMap<HashFor<U>, OrphanedUnit<U>>,
waiting_for: HashMap<HashFor<U>, Vec<HashFor<U>>>,
dag_units: HashSet<HashFor<U>>,
}

impl<U: Unit> Dag<U> {
impl<U: UnitWithParents> Dag<U> {
/// Create a new empty DAG.
pub fn new() -> Self {
Dag {
Expand All @@ -69,7 +63,7 @@ impl<U: Unit> Dag<U> {
}
}

fn move_to_dag(&mut self, unit: ReconstructedUnit<U>) -> Vec<ReconstructedUnit<U>> {
fn move_to_dag(&mut self, unit: U) -> Vec<U> {
let mut result = Vec::new();
let mut ready_units = VecDeque::from([unit]);
while let Some(unit) = ready_units.pop_front() {
Expand All @@ -95,7 +89,7 @@ impl<U: Unit> Dag<U> {

/// Add a unit to the Dag. Returns all the units that now have all their parents in the Dag,
/// in an order agreeing with the Dag structure.
pub fn add_unit(&mut self, unit: ReconstructedUnit<U>) -> Vec<ReconstructedUnit<U>> {
pub fn add_unit(&mut self, unit: U) -> Vec<U> {
if self.dag_units.contains(&unit.hash()) {
// Deduplicate.
return Vec::new();
Expand Down
34 changes: 14 additions & 20 deletions consensus/src/dag/reconstruction/mod.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
use std::collections::HashMap;

use crate::{
extension::ExtenderUnit,
units::{ControlHash, HashFor, Unit, UnitCoord, WrappedUnit},
Hasher, NodeMap,
units::{ControlHash, HashFor, Unit, UnitCoord, UnitWithParents, WrappedUnit},
Hasher, NodeMap, SessionId,
};

mod dag;
Expand Down Expand Up @@ -41,21 +40,6 @@ impl<U: Unit> ReconstructedUnit<U> {
parents: NodeMap::with_size(n_members),
}
}

/// The reconstructed parents, guaranteed to be correct.
pub fn parents(&self) -> &NodeMap<HashFor<U>> {
&self.parents
}

/// Create an extender unit from this one.
pub fn extender_unit(&self) -> ExtenderUnit<U::Hasher> {
ExtenderUnit::new(
self.unit.creator(),
self.unit.round(),
self.hash(),
self.parents.clone(),
)
}
}

impl<U: Unit> Unit for ReconstructedUnit<U> {
Expand All @@ -72,6 +56,10 @@ impl<U: Unit> Unit for ReconstructedUnit<U> {
fn control_hash(&self) -> &ControlHash<Self::Hasher> {
self.unit.control_hash()
}

fn session_id(&self) -> SessionId {
self.unit.session_id()
}
}

impl<U: Unit> WrappedUnit<U::Hasher> for ReconstructedUnit<U> {
Expand All @@ -82,6 +70,12 @@ impl<U: Unit> WrappedUnit<U::Hasher> for ReconstructedUnit<U> {
}
}

impl<U: Unit> UnitWithParents for ReconstructedUnit<U> {
fn parents(&self) -> &NodeMap<HashFor<Self>> {
&self.parents
}
}

/// What we need to request to reconstruct units.
#[derive(Debug, PartialEq, Eq)]
pub enum Request<H: Hasher> {
Expand Down Expand Up @@ -148,7 +142,7 @@ impl<U: Unit> ReconstructionResult<U> {
/// it eventually outputs versions with explicit parents in an order conforming to the Dag order.
pub struct Reconstruction<U: Unit> {
parents: ParentReconstruction<U>,
dag: Dag<U>,
dag: Dag<ReconstructedUnit<U>>,
}

impl<U: Unit> Reconstruction<U> {
Expand Down Expand Up @@ -194,7 +188,7 @@ mod test {

use crate::{
dag::reconstruction::{ReconstructedUnit, Reconstruction, ReconstructionResult, Request},
units::{random_full_parent_units_up_to, Unit, UnitCoord},
units::{random_full_parent_units_up_to, Unit, UnitCoord, UnitWithParents},
NodeCount, NodeIndex,
};

Expand Down
2 changes: 1 addition & 1 deletion consensus/src/dag/reconstruction/parents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ mod test {
dag::reconstruction::{
parents::Reconstruction, ReconstructedUnit, ReconstructionResult, Request,
},
units::{random_full_parent_units_up_to, Unit, UnitCoord},
units::{random_full_parent_units_up_to, Unit, UnitCoord, UnitWithParents},
NodeCount, NodeIndex,
};

Expand Down
Loading

0 comments on commit eebbf2b

Please sign in to comment.