Skip to content

Commit

Permalink
Bump hashbrown dependency to 0.13
Browse files Browse the repository at this point in the history
While this isn't expected to materially improve performance, it
does get us ahash 0.8, which allows us to reduce fuzzing
randomness, making our fuzzers much happier.

Sadly, by default `ahash` no longer tries to autodetect a
randomness source, so we cannot simply rely on `hashbrown` to do
randomization for us, but rather have to also explicitly depend on
`ahash`.
  • Loading branch information
TheBlueMatt committed Jan 2, 2024
1 parent b35ab51 commit 727887f
Show file tree
Hide file tree
Showing 8 changed files with 72 additions and 25 deletions.
2 changes: 1 addition & 1 deletion fuzz/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ lightning = { path = "../lightning", features = ["regex", "hashbrown", "_test_ut
lightning-rapid-gossip-sync = { path = "../lightning-rapid-gossip-sync" }
bitcoin = { version = "0.30.2", features = ["secp-lowmemory"] }
hex = { package = "hex-conservative", version = "0.1.1", default-features = false }
hashbrown = "0.8"
hashbrown = "0.13"

afl = { version = "0.12", optional = true }
honggfuzz = { version = "0.5", optional = true, default-features = false }
Expand Down
2 changes: 1 addition & 1 deletion lightning-invoice/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ bech32 = { version = "0.9.0", default-features = false }
lightning = { version = "0.0.119", path = "../lightning", default-features = false }
secp256k1 = { version = "0.27.0", default-features = false, features = ["recovery", "alloc"] }
num-traits = { version = "0.2.8", default-features = false }
hashbrown = { version = "0.8", optional = true }
hashbrown = { version = "0.13", optional = true }
serde = { version = "1.0.118", optional = true }
bitcoin = { version = "0.30.2", default-features = false }

Expand Down
5 changes: 3 additions & 2 deletions lightning/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ unsafe_revoked_tx_signing = []
# Override signing to not include randomness when generating signatures for test vectors.
_test_vectors = []

no-std = ["hashbrown", "bitcoin/no-std", "core2/alloc", "libm"]
no-std = ["hashbrown", "ahash", "bitcoin/no-std", "core2/alloc", "libm"]
std = ["bitcoin/std"]

# Generates low-r bitcoin signatures, which saves 1 byte in 50% of the cases
Expand All @@ -42,7 +42,8 @@ default = ["std", "grind_signatures"]
[dependencies]
bitcoin = { version = "0.30.2", default-features = false, features = ["secp-recovery"] }

hashbrown = { version = "0.8", optional = true }
hashbrown = { version = "0.13", optional = true }
ahash = { version = "0.8", optional = true, default-features = false, features = ["runtime-rng"] }
hex = { package = "hex-conservative", version = "0.1.1", default-features = false }
regex = { version = "1.5.6", optional = true }
backtrace = { version = "0.3", optional = true }
Expand Down
2 changes: 1 addition & 1 deletion lightning/src/chain/onchaintx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -686,7 +686,7 @@ impl<ChannelSigner: WriteableEcdsaChannelSigner> OnchainTxHandler<ChannelSigner>
if let Some(claim_id) = claim_id {
if let Some(claim) = self.pending_claim_requests.remove(&claim_id) {
for outpoint in claim.outpoints() {
self.claimable_outpoints.remove(&outpoint);
self.claimable_outpoints.remove(outpoint);
}
}
} else {
Expand Down
70 changes: 58 additions & 12 deletions lightning/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,9 +165,19 @@ mod io_extras {
mod prelude {
#[cfg(feature = "hashbrown")]
extern crate hashbrown;
#[cfg(feature = "ahash")]
extern crate ahash;

pub use alloc::{vec, vec::Vec, string::String, collections::VecDeque, boxed::Box};

pub use alloc::borrow::ToOwned;
pub use alloc::string::ToString;

// For no-std builds, we need to use hashbrown, however, by default, it doesn't randomize the
// hashing and is vulnerable to HashDoS attacks. Thus, when not fuzzing, we use its default
// ahash hashing algorithm but randomize, opting to not randomize when fuzzing to avoid false
// positive branch coverage.

#[cfg(not(feature = "hashbrown"))]
mod std_hashtables {
pub(crate) use std::collections::{HashMap, HashSet, hash_map};
Expand All @@ -181,29 +191,65 @@ mod prelude {
pub(crate) use std_hashtables::*;

#[cfg(feature = "hashbrown")]
mod hashbrown_tables {
pub(crate) use hashbrown::{HashMap, HashSet, hash_map};
pub(crate) use self::hashbrown::hash_map;

#[cfg(all(feature = "hashbrown", fuzzing))]
mod nonrandomized_hashbrown {
pub(crate) use self::hashbrown::{HashMap, HashSet};

pub(crate) type OccupiedHashMapEntry<'a, K, V> =
hashbrown::hash_map::OccupiedEntry<'a, K, V, hash_map::DefaultHashBuilder>;
pub(crate) type VacantHashMapEntry<'a, K, V> =
hashbrown::hash_map::VacantEntry<'a, K, V, hash_map::DefaultHashBuilder>;
}
#[cfg(feature = "hashbrown")]
pub(crate) use hashbrown_tables::*;
#[cfg(all(feature = "hashbrown", fuzzing))]
pub(crate) use nonrandomized_hashbrown::*;


#[cfg(all(feature = "hashbrown", not(fuzzing)))]
mod randomized_hashtables {
use super::*;
use ahash::RandomState;

pub(crate) type HashMap<K, V> = hashbrown::HashMap<K, V, RandomState>;
pub(crate) type HashSet<K> = hashbrown::HashSet<K, RandomState>;

pub(crate) type OccupiedHashMapEntry<'a, K, V> =
hashbrown::hash_map::OccupiedEntry<'a, K, V, RandomState>;
pub(crate) type VacantHashMapEntry<'a, K, V> =
hashbrown::hash_map::VacantEntry<'a, K, V, RandomState>;

pub(crate) fn new_hash_map<K: core::hash::Hash + Eq, V>() -> HashMap<K, V> { HashMap::new() }
pub(crate) fn hash_map_with_capacity<K: core::hash::Hash + Eq, V>(cap: usize) -> HashMap<K, V> {
HashMap::with_capacity(cap)
pub(crate) fn new_hash_map<K, V>() -> HashMap<K, V> {
HashMap::with_hasher(RandomState::new())
}
pub(crate) fn hash_map_with_capacity<K, V>(cap: usize) -> HashMap<K, V> {
HashMap::with_capacity_and_hasher(cap, RandomState::new())
}

pub(crate) fn new_hash_set<K>() -> HashSet<K> {
HashSet::with_hasher(RandomState::new())
}
pub(crate) fn hash_set_with_capacity<K>(cap: usize) -> HashSet<K> {
HashSet::with_capacity_and_hasher(cap, RandomState::new())
}
}

pub(crate) fn new_hash_set<K: core::hash::Hash + Eq>() -> HashSet<K> { HashSet::new() }
pub(crate) fn hash_set_with_capacity<K: core::hash::Hash + Eq>(cap: usize) -> HashSet<K> {
HashSet::with_capacity(cap)
#[cfg(any(not(feature = "hashbrown"), fuzzing))]
mod randomized_hashtables {
use super::*;

pub(crate) fn new_hash_map<K, V>() -> HashMap<K, V> { HashMap::new() }
pub(crate) fn hash_map_with_capacity<K, V>(cap: usize) -> HashMap<K, V> {
HashMap::with_capacity(cap)
}

pub(crate) fn new_hash_set<K>() -> HashSet<K> { HashSet::new() }
pub(crate) fn hash_set_with_capacity<K>(cap: usize) -> HashSet<K> {
HashSet::with_capacity(cap)
}
}

pub use alloc::borrow::ToOwned;
pub use alloc::string::ToString;
pub(crate) use randomized_hashtables::*;
}

#[cfg(all(not(ldk_bench), feature = "backtrace", feature = "std", test))]
Expand Down
4 changes: 2 additions & 2 deletions lightning/src/ln/channelmanager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5914,7 +5914,7 @@ where
// TODO: Once we can rely on the counterparty_node_id from the
// monitor event, this and the outpoint_to_peer map should be removed.
let outpoint_to_peer = self.outpoint_to_peer.lock().unwrap();
match outpoint_to_peer.get(&funding_txo) {
match outpoint_to_peer.get(funding_txo) {
Some(cp_id) => cp_id.clone(),
None => return,
}
Expand Down Expand Up @@ -10988,7 +10988,7 @@ where
downstream_counterparty_and_funding_outpoint:
Some((blocked_node_id, blocked_channel_outpoint, blocking_action)), ..
} = action {
if let Some(blocked_peer_state) = per_peer_state.get(&blocked_node_id) {
if let Some(blocked_peer_state) = per_peer_state.get(blocked_node_id) {
log_trace!(logger,
"Holding the next revoke_and_ack from {} until the preimage is durably persisted in the inbound edge's ChannelMonitor",
blocked_channel_outpoint.to_channel_id());
Expand Down
8 changes: 4 additions & 4 deletions lightning/src/routing/router.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2512,9 +2512,9 @@ where L::Target: Logger {
let mut aggregate_path_contribution_msat = path_value_msat;

for (idx, (hop, prev_hop_id)) in hop_iter.zip(prev_hop_iter).enumerate() {
let target = private_hop_key_cache.get(&prev_hop_id).unwrap();
let target = private_hop_key_cache.get(prev_hop_id).unwrap();

if let Some(first_channels) = first_hop_targets.get(&target) {
if let Some(first_channels) = first_hop_targets.get(target) {
if first_channels.iter().any(|d| d.outbound_scid_alias == Some(hop.short_channel_id)) {
log_trace!(logger, "Ignoring route hint with SCID {} (and any previous) due to it being a direct channel of ours.",
hop.short_channel_id);
Expand All @@ -2524,7 +2524,7 @@ where L::Target: Logger {

let candidate = network_channels
.get(&hop.short_channel_id)
.and_then(|channel| channel.as_directed_to(&target))
.and_then(|channel| channel.as_directed_to(target))
.map(|(info, _)| CandidateRouteHop::PublicHop {
info,
short_channel_id: hop.short_channel_id,
Expand Down Expand Up @@ -2565,7 +2565,7 @@ where L::Target: Logger {
.saturating_add(1);

// Searching for a direct channel between last checked hop and first_hop_targets
if let Some(first_channels) = first_hop_targets.get_mut(&target) {
if let Some(first_channels) = first_hop_targets.get_mut(target) {
sort_first_hop_channels(first_channels, &used_liquidities,
recommended_value_msat, our_node_pubkey);
for details in first_channels {
Expand Down
4 changes: 2 additions & 2 deletions lightning/src/routing/scoring.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1326,7 +1326,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ScoreLookUp for Probabilistic
_ => return 0,
};
let source = candidate.source();
if let Some(penalty) = score_params.manual_node_penalties.get(&target) {
if let Some(penalty) = score_params.manual_node_penalties.get(target) {
return *penalty;
}

Expand Down Expand Up @@ -1356,7 +1356,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref> ScoreLookUp for Probabilistic
let amount_msat = usage.amount_msat.saturating_add(usage.inflight_htlc_msat);
let capacity_msat = usage.effective_capacity.as_msat();
self.channel_liquidities
.get(&scid)
.get(scid)
.unwrap_or(&ChannelLiquidity::new(Duration::ZERO))
.as_directed(&source, &target, capacity_msat)
.penalty_msat(amount_msat, score_params)
Expand Down

0 comments on commit 727887f

Please sign in to comment.