Skip to content

Commit

Permalink
Merge pull request #2848 from TheBlueMatt/2024-01-121-bindings
Browse files Browse the repository at this point in the history
0.0.121 bindings changes
  • Loading branch information
TheBlueMatt authored Jan 25, 2024
2 parents 4bab9c8 + e01edf6 commit c57cd65
Show file tree
Hide file tree
Showing 33 changed files with 284 additions and 244 deletions.
3 changes: 2 additions & 1 deletion fuzz/src/chanmon_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ use lightning::sign::{KeyMaterial, InMemorySigner, Recipient, EntropySource, Nod
use lightning::events;
use lightning::events::MessageSendEventsProvider;
use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret};
use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentSendFailure, ChannelManagerReadArgs, PaymentId, RecipientOnionFields};
use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, ChannelManagerReadArgs, PaymentId};
use lightning::ln::outbound_payment::{RecipientOnionFields, PaymentSendFailure};
use lightning::ln::channel::FEE_SPIKE_BUFFER_FEE_INCREASE_MULTIPLE;
use lightning::ln::msgs::{self, CommitmentUpdate, ChannelMessageHandler, DecodeError, UpdateAddHTLC, Init};
use lightning::ln::script::ShutdownScript;
Expand Down
3 changes: 2 additions & 1 deletion fuzz/src/full_stack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,8 @@ use lightning::chain::transaction::OutPoint;
use lightning::sign::{InMemorySigner, Recipient, KeyMaterial, EntropySource, NodeSigner, SignerProvider};
use lightning::events::Event;
use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret};
use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentId, RecipientOnionFields, Retry};
use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentId};
use lightning::ln::outbound_payment::{RecipientOnionFields, Retry};
use lightning::ln::peer_handler::{MessageHandler,PeerManager,SocketDescriptor,IgnoringMessageHandler};
use lightning::ln::msgs::{self, DecodeError};
use lightning::ln::script::ShutdownScript;
Expand Down
45 changes: 19 additions & 26 deletions lightning-background-processor/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -694,7 +694,10 @@ where
persister, chain_monitor,
chain_monitor.process_pending_events_async(async_event_handler).await,
channel_manager, channel_manager.process_pending_events_async(async_event_handler).await,
peer_manager, process_onion_message_handler_events_async(&peer_manager, async_event_handler).await,
peer_manager,
for event in onion_message_handler_events(peer_manager) {
handler(event).await
},
gossip_sync, logger, scorer, should_break, {
let fut = Selector {
a: channel_manager.get_event_or_persistence_needed_future(),
Expand All @@ -719,23 +722,11 @@ where
)
}

#[cfg(feature = "futures")]
async fn process_onion_message_handler_events_async<
EventHandlerFuture: core::future::Future<Output = ()>,
EventHandler: Fn(Event) -> EventHandlerFuture,
PM: 'static + Deref + Send + Sync,
>(
peer_manager: &PM, handler: EventHandler
)
where
PM::Target: APeerManager + Send + Sync,
{
let events = core::cell::RefCell::new(Vec::new());
peer_manager.onion_message_handler().process_pending_events(&|e| events.borrow_mut().push(e));

for event in events.into_inner() {
handler(event).await
}
fn onion_message_handler_events<PM: 'static + Deref + Send + Sync>(
peer_manager: &PM
) -> impl Iterator<Item=Event> where PM::Target: APeerManager + Send + Sync {
peer_manager.onion_message_handler().get_and_clear_connections_needed()
.into_iter().map(|(node_id, addresses)| Event::ConnectionNeeded { node_id, addresses })
}

#[cfg(feature = "std")]
Expand Down Expand Up @@ -851,7 +842,9 @@ impl BackgroundProcessor {
persister, chain_monitor, chain_monitor.process_pending_events(&event_handler),
channel_manager, channel_manager.process_pending_events(&event_handler),
peer_manager,
peer_manager.onion_message_handler().process_pending_events(&event_handler),
for event in onion_message_handler_events(&peer_manager) {
event_handler.handle_event(event);
},
gossip_sync, logger, scorer, stop_thread.load(Ordering::Acquire),
{ Sleeper::from_two_futures(
channel_manager.get_event_or_persistence_needed_future(),
Expand Down Expand Up @@ -984,9 +977,8 @@ mod tests {
Arc<DefaultRouter<
Arc<NetworkGraph<Arc<test_utils::TestLogger>>>,
Arc<test_utils::TestLogger>,
Arc<LockingWrapper<TestScorer>>,
(),
TestScorer>
Arc<KeysManager>,
Arc<LockingWrapper<TestScorer>>>
>,
Arc<test_utils::TestLogger>>;

Expand Down Expand Up @@ -1143,9 +1135,10 @@ mod tests {
}

impl ScoreLookUp for TestScorer {
#[cfg(not(c_bindings))]
type ScoreParams = ();
fn channel_penalty_msat(
&self, _candidate: &CandidateRouteHop, _usage: ChannelUsage, _score_params: &Self::ScoreParams
&self, _candidate: &CandidateRouteHop, _usage: ChannelUsage, _score_params: &lightning::routing::scoring::ProbabilisticScoringFeeParameters
) -> u64 { unimplemented!(); }
}

Expand Down Expand Up @@ -1263,12 +1256,12 @@ mod tests {
let genesis_block = genesis_block(network);
let network_graph = Arc::new(NetworkGraph::new(network, logger.clone()));
let scorer = Arc::new(LockingWrapper::new(TestScorer::new()));
let now = Duration::from_secs(genesis_block.header.time as u64);
let seed = [i as u8; 32];
let router = Arc::new(DefaultRouter::new(network_graph.clone(), logger.clone(), seed, scorer.clone(), Default::default()));
let keys_manager = Arc::new(KeysManager::new(&seed, now.as_secs(), now.subsec_nanos()));
let router = Arc::new(DefaultRouter::new(network_graph.clone(), logger.clone(), Arc::clone(&keys_manager), scorer.clone(), Default::default()));
let chain_source = Arc::new(test_utils::TestChainSource::new(Network::Bitcoin));
let kv_store = Arc::new(FilesystemStore::new(format!("{}_persister_{}", &persist_dir, i).into()));
let now = Duration::from_secs(genesis_block.header.time as u64);
let keys_manager = Arc::new(KeysManager::new(&seed, now.as_secs(), now.subsec_nanos()));
let chain_monitor = Arc::new(chainmonitor::ChainMonitor::new(Some(chain_source.clone()), tx_broadcaster.clone(), logger.clone(), fee_estimator.clone(), kv_store.clone()));
let best_block = BestBlock::from_network(network);
let params = ChainParameters { network, best_block };
Expand Down
5 changes: 3 additions & 2 deletions lightning-invoice/src/payment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use crate::Bolt11Invoice;
use bitcoin::hashes::Hash;

use lightning::ln::PaymentHash;
use lightning::ln::channelmanager::RecipientOnionFields;
use lightning::ln::outbound_payment::RecipientOnionFields;
use lightning::routing::router::{PaymentParameters, RouteParameters};

/// Builds the necessary parameters to pay or pre-flight probe the given zero-amount
Expand Down Expand Up @@ -170,7 +170,8 @@ mod tests {
#[cfg(feature = "std")]
fn payment_metadata_end_to_end() {
use lightning::events::Event;
use lightning::ln::channelmanager::{Retry, PaymentId};
use lightning::ln::channelmanager::PaymentId;
use lightning::ln::outbound_payment::Retry;
use lightning::ln::msgs::ChannelMessageHandler;
use lightning::ln::functional_test_utils::*;
// Test that a payment metadata read from an invoice passed to `pay_invoice` makes it all
Expand Down
3 changes: 2 additions & 1 deletion lightning-invoice/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -825,7 +825,8 @@ mod test {
use lightning::ln::PaymentHash;
#[cfg(feature = "std")]
use lightning::ln::PaymentPreimage;
use lightning::ln::channelmanager::{PhantomRouteHints, MIN_FINAL_CLTV_EXPIRY_DELTA, PaymentId, RecipientOnionFields, Retry};
use lightning::ln::channelmanager::{PhantomRouteHints, MIN_FINAL_CLTV_EXPIRY_DELTA, PaymentId};
use lightning::ln::outbound_payment::{RecipientOnionFields, Retry};
use lightning::ln::functional_test_utils::*;
use lightning::ln::msgs::ChannelMessageHandler;
use lightning::routing::router::{PaymentParameters, RouteParameters};
Expand Down
40 changes: 0 additions & 40 deletions lightning-rapid-gossip-sync/src/error.rs

This file was deleted.

39 changes: 32 additions & 7 deletions lightning-rapid-gossip-sync/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,17 +74,42 @@ use core::ops::Deref;
use core::sync::atomic::{AtomicBool, Ordering};

use lightning::io;
use lightning::ln::msgs::{DecodeError, LightningError};
use lightning::routing::gossip::NetworkGraph;
use lightning::util::logger::Logger;

pub use crate::error::GraphSyncError;

/// Error types that these functions can return
mod error;

/// Core functionality of this crate
mod processing;

/// All-encompassing standard error type that processing can return
#[derive(Debug)]
pub enum GraphSyncError {
/// Error trying to read the update data, typically due to an erroneous data length indication
/// that is greater than the actual amount of data provided
DecodeError(DecodeError),
/// Error applying the patch to the network graph, usually the result of updates that are too
/// old or missing prerequisite data to the application of updates out of order
LightningError(LightningError),
}

impl From<lightning::io::Error> for GraphSyncError {
fn from(error: lightning::io::Error) -> Self {
Self::DecodeError(DecodeError::Io(error.kind()))
}
}

impl From<DecodeError> for GraphSyncError {
fn from(error: DecodeError) -> Self {
Self::DecodeError(error)
}
}

impl From<LightningError> for GraphSyncError {
fn from(error: LightningError) -> Self {
Self::LightningError(error)
}
}

/// The main Rapid Gossip Sync object.
///
/// See [crate-level documentation] for usage.
Expand Down Expand Up @@ -167,7 +192,7 @@ mod tests {
use lightning::ln::msgs::DecodeError;
use lightning::routing::gossip::NetworkGraph;
use lightning::util::test_utils::TestLogger;
use crate::RapidGossipSync;
use crate::{GraphSyncError, RapidGossipSync};

#[test]
fn test_sync_from_file() {
Expand Down Expand Up @@ -265,7 +290,7 @@ mod tests {
let start = std::time::Instant::now();
let sync_result = rapid_sync
.sync_network_graph_with_file_path("./res/full_graph.lngossip");
if let Err(crate::error::GraphSyncError::DecodeError(DecodeError::Io(io_error))) = &sync_result {
if let Err(GraphSyncError::DecodeError(DecodeError::Io(io_error))) = &sync_result {
let error_string = format!("Input file lightning-rapid-gossip-sync/res/full_graph.lngossip is missing! Download it from https://bitcoin.ninja/ldk-compressed_graph-285cb27df79-2022-07-21.bin\n\n{:?}", io_error);
#[cfg(not(require_route_graph_test))]
{
Expand Down
6 changes: 2 additions & 4 deletions lightning-rapid-gossip-sync/src/processing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ use lightning::{log_debug, log_warn, log_trace, log_given_level, log_gossip};
use lightning::util::ser::{BigSize, Readable};
use lightning::io;

use crate::error::GraphSyncError;
use crate::RapidGossipSync;
use crate::{GraphSyncError, RapidGossipSync};

#[cfg(all(feature = "std", not(test)))]
use std::time::{SystemTime, UNIX_EPOCH};
Expand Down Expand Up @@ -269,9 +268,8 @@ mod tests {
use lightning::routing::gossip::NetworkGraph;
use lightning::util::test_utils::TestLogger;

use crate::error::GraphSyncError;
use crate::processing::STALE_RGS_UPDATE_AGE_LIMIT_SECS;
use crate::RapidGossipSync;
use crate::{GraphSyncError, RapidGossipSync};

const VALID_RGS_BINARY: [u8; 300] = [
76, 68, 75, 1, 111, 226, 140, 10, 182, 241, 179, 114, 193, 166, 162, 70, 174, 99, 247,
Expand Down
8 changes: 4 additions & 4 deletions lightning/src/blinded_path/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ impl BlindedPath {
// be in relation to a specific channel.
let htlc_maximum_msat = u64::max_value();
Self::new_for_payment(
&[], payee_node_id, payee_tlvs, htlc_maximum_msat, entropy_source, secp_ctx
Vec::new(), payee_node_id, payee_tlvs, htlc_maximum_msat, entropy_source, secp_ctx
)
}

Expand All @@ -106,19 +106,19 @@ impl BlindedPath {
/// [`ForwardTlvs`]: crate::blinded_path::payment::ForwardTlvs
// TODO: make all payloads the same size with padding + add dummy hops
pub fn new_for_payment<ES: EntropySource + ?Sized, T: secp256k1::Signing + secp256k1::Verification>(
intermediate_nodes: &[payment::ForwardNode], payee_node_id: PublicKey,
intermediate_nodes: Vec<payment::ForwardNode>, payee_node_id: PublicKey,
payee_tlvs: payment::ReceiveTlvs, htlc_maximum_msat: u64, entropy_source: &ES,
secp_ctx: &Secp256k1<T>
) -> Result<(BlindedPayInfo, Self), ()> {
let blinding_secret_bytes = entropy_source.get_secure_random_bytes();
let blinding_secret = SecretKey::from_slice(&blinding_secret_bytes[..]).expect("RNG is busted");

let blinded_payinfo = payment::compute_payinfo(intermediate_nodes, &payee_tlvs, htlc_maximum_msat)?;
let blinded_payinfo = payment::compute_payinfo(&intermediate_nodes, &payee_tlvs, htlc_maximum_msat)?;
Ok((blinded_payinfo, BlindedPath {
introduction_node_id: intermediate_nodes.first().map_or(payee_node_id, |n| n.node_id),
blinding_point: PublicKey::from_secret_key(secp_ctx, &blinding_secret),
blinded_hops: payment::blinded_hops(
secp_ctx, intermediate_nodes, payee_node_id, payee_tlvs, &blinding_secret
secp_ctx, &intermediate_nodes, payee_node_id, payee_tlvs, &blinding_secret
).map_err(|_| ())?,
}))
}
Expand Down
6 changes: 3 additions & 3 deletions lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -753,7 +753,7 @@ pub struct ChannelMonitor<Signer: WriteableEcdsaChannelSigner> {
pub(super) inner: Mutex<ChannelMonitorImpl<Signer>>,
}

impl<Signer: WriteableEcdsaChannelSigner> Clone for ChannelMonitor<Signer> where Signer: Clone {
impl<Signer: WriteableEcdsaChannelSigner> Clone for ChannelMonitor<Signer> {
fn clone(&self) -> Self {
let inner = self.inner.lock().unwrap().clone();
ChannelMonitor::from_impl(inner)
Expand Down Expand Up @@ -4338,8 +4338,8 @@ where

const MAX_ALLOC_SIZE: usize = 64*1024;

impl<'a, 'b, ES: EntropySource, SP: SignerProvider> ReadableArgs<(&'a ES, &'b SP)>
for (BlockHash, ChannelMonitor<SP::EcdsaSigner>) {
impl<'a, 'b, ES: EntropySource, Signer: WriteableEcdsaChannelSigner, SP: SignerProvider<EcdsaSigner=Signer>> ReadableArgs<(&'a ES, &'b SP)>
for (BlockHash, ChannelMonitor<Signer>) {
fn read<R: io::Read>(reader: &mut R, args: (&'a ES, &'b SP)) -> Result<Self, DecodeError> {
macro_rules! unwrap_obj {
($key: expr) => {
Expand Down
4 changes: 4 additions & 0 deletions lightning/src/events/bump_transaction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,8 @@ impl Utxo {
}

/// Returns a `Utxo` with the `satisfaction_weight` estimate for a P2WPKH nested in P2SH output.
///
/// This is not exported to bindings users as WPubkeyHash is not yet exported
pub fn new_nested_p2wpkh(outpoint: OutPoint, value: u64, pubkey_hash: &WPubkeyHash) -> Self {
let script_sig_size = 1 /* script_sig length */ +
1 /* OP_0 */ +
Expand All @@ -281,6 +283,8 @@ impl Utxo {
}

/// Returns a `Utxo` with the `satisfaction_weight` estimate for a SegWit v0 P2WPKH output.
///
/// This is not exported to bindings users as WPubkeyHash is not yet exported
pub fn new_v0_p2wpkh(outpoint: OutPoint, value: u64, pubkey_hash: &WPubkeyHash) -> Self {
Self {
outpoint,
Expand Down
4 changes: 2 additions & 2 deletions lightning/src/ln/channel.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5604,7 +5604,7 @@ impl<SP: Deref> Channel<SP> where
return None;
}
};
let our_node_sig = match node_signer.sign_gossip_message(msgs::UnsignedGossipMessage::ChannelAnnouncement(&announcement)) {
let our_node_sig = match node_signer.sign_gossip_message(msgs::UnsignedGossipMessage::ChannelAnnouncement(announcement.clone())) {
Err(_) => {
log_error!(logger, "Failed to generate node signature for channel_announcement. Channel will not be announced!");
return None;
Expand Down Expand Up @@ -5650,7 +5650,7 @@ impl<SP: Deref> Channel<SP> where
.map_err(|_| ChannelError::Ignore("Signer failed to retrieve own public key".to_owned()))?);
let were_node_one = announcement.node_id_1 == our_node_key;

let our_node_sig = node_signer.sign_gossip_message(msgs::UnsignedGossipMessage::ChannelAnnouncement(&announcement))
let our_node_sig = node_signer.sign_gossip_message(msgs::UnsignedGossipMessage::ChannelAnnouncement(announcement.clone()))
.map_err(|_| ChannelError::Ignore("Failed to generate node signature for channel_announcement".to_owned()))?;
match &self.context.holder_signer {
ChannelSignerType::Ecdsa(ecdsa) => {
Expand Down
Loading

0 comments on commit c57cd65

Please sign in to comment.