Skip to content

Commit

Permalink
Remove Outpoint::to_channel_id method
Browse files Browse the repository at this point in the history
To avoid confusion and for accuracy going forward, we remove this method
as it is inconsistent with channel IDs generated during V2 channel
establishment. If one wants to create a V1, funding outpoint-based
channel ID, then `ChannelId::v1_from_funding_outpoint` should be used
instead.

A large portion of the library has always made the assumption that having
the funding outpoint will always allow us to generate the channel ID.
This will not be the case anymore and we need to pass the channel ID along
where appropriate. All channels that could have been persisted up to this
point could only have used V1 establishment, so if some structures don't
store a channel ID for them they can safely fall back to the funding
outpoint-based version.
  • Loading branch information
dunxen committed Jan 15, 2024
1 parent 4b70921 commit 175de40
Show file tree
Hide file tree
Showing 27 changed files with 453 additions and 349 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,4 @@ lightning-rapid-gossip-sync/res/full_graph.lngossip
lightning-custom-message/target
lightning-transaction-sync/target
no-std-check/target
msrv-no-dev-deps-check/target
57 changes: 30 additions & 27 deletions fuzz/src/chanmon_consistency.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ use lightning::chain::chaininterface::{BroadcasterInterface, ConfirmationTarget,
use lightning::sign::{KeyMaterial, InMemorySigner, Recipient, EntropySource, NodeSigner, SignerProvider};
use lightning::events;
use lightning::events::MessageSendEventsProvider;
use lightning::ln::{PaymentHash, PaymentPreimage, PaymentSecret};
use lightning::ln::{ChannelId, PaymentHash, PaymentPreimage, PaymentSecret};
use lightning::ln::channelmanager::{ChainParameters, ChannelDetails, ChannelManager, PaymentSendFailure, ChannelManagerReadArgs, PaymentId, RecipientOnionFields};
use lightning::ln::channel::FEE_SPIKE_BUFFER_FEE_INCREASE_MULTIPLE;
use lightning::ln::msgs::{self, CommitmentUpdate, ChannelMessageHandler, DecodeError, UpdateAddHTLC, Init};
Expand Down Expand Up @@ -167,16 +167,16 @@ impl TestChainMonitor {
}
}
impl chain::Watch<TestChannelSigner> for TestChainMonitor {
fn watch_channel(&self, funding_txo: OutPoint, monitor: channelmonitor::ChannelMonitor<TestChannelSigner>) -> Result<chain::ChannelMonitorUpdateStatus, ()> {
fn watch_channel(&self, funding_txo: OutPoint, channel_id: ChannelId, monitor: channelmonitor::ChannelMonitor<TestChannelSigner>) -> Result<chain::ChannelMonitorUpdateStatus, ()> {
let mut ser = VecWriter(Vec::new());
monitor.write(&mut ser).unwrap();
if let Some(_) = self.latest_monitors.lock().unwrap().insert(funding_txo, (monitor.get_latest_update_id(), ser.0)) {
panic!("Already had monitor pre-watch_channel");
}
self.chain_monitor.watch_channel(funding_txo, monitor)
self.chain_monitor.watch_channel(funding_txo, channel_id, monitor)
}

fn update_channel(&self, funding_txo: OutPoint, update: &channelmonitor::ChannelMonitorUpdate) -> chain::ChannelMonitorUpdateStatus {
fn update_channel(&self, funding_txo: OutPoint, channel_id: ChannelId, update: &channelmonitor::ChannelMonitorUpdate) -> chain::ChannelMonitorUpdateStatus {
let mut map_lock = self.latest_monitors.lock().unwrap();
let mut map_entry = match map_lock.entry(funding_txo) {
hash_map::Entry::Occupied(entry) => entry,
Expand All @@ -188,10 +188,10 @@ impl chain::Watch<TestChannelSigner> for TestChainMonitor {
let mut ser = VecWriter(Vec::new());
deserialized_monitor.write(&mut ser).unwrap();
map_entry.insert((update.update_id, ser.0));
self.chain_monitor.update_channel(funding_txo, update)
self.chain_monitor.update_channel(funding_txo, channel_id, update)
}

fn release_pending_monitor_events(&self) -> Vec<(OutPoint, Vec<MonitorEvent>, Option<PublicKey>)> {
fn release_pending_monitor_events(&self) -> Vec<(OutPoint, ChannelId, Vec<MonitorEvent>, Option<PublicKey>)> {
return self.chain_monitor.release_pending_monitor_events();
}
}
Expand Down Expand Up @@ -539,7 +539,8 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {

let res = (<(BlockHash, ChanMan)>::read(&mut Cursor::new(&$ser.0), read_args).expect("Failed to read manager").1, chain_monitor.clone());
for (funding_txo, mon) in monitors.drain() {
assert_eq!(chain_monitor.chain_monitor.watch_channel(funding_txo, mon),
let channel_id = mon.get_channel_id();
assert_eq!(chain_monitor.chain_monitor.watch_channel(funding_txo, channel_id, mon),
Ok(ChannelMonitorUpdateStatus::Completed));
}
res
Expand Down Expand Up @@ -704,7 +705,9 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
lock_fundings!(nodes);

let chan_a = nodes[0].list_usable_channels()[0].short_channel_id.unwrap();
let chan_1_id = nodes[0].list_usable_channels()[0].channel_id;
let chan_b = nodes[2].list_usable_channels()[0].short_channel_id.unwrap();
let chan_2_id = nodes[2].list_usable_channels()[0].channel_id;

let mut payment_id: u8 = 0;
let mut payment_idx: u64 = 0;
Expand Down Expand Up @@ -1060,25 +1063,25 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {

0x08 => {
if let Some((id, _)) = monitor_a.latest_monitors.lock().unwrap().get(&chan_1_funding) {
monitor_a.chain_monitor.force_channel_monitor_updated(chan_1_funding, *id);
monitor_a.chain_monitor.force_channel_monitor_updated(chan_1_funding, chan_1_id, *id);
nodes[0].process_monitor_events();
}
},
0x09 => {
if let Some((id, _)) = monitor_b.latest_monitors.lock().unwrap().get(&chan_1_funding) {
monitor_b.chain_monitor.force_channel_monitor_updated(chan_1_funding, *id);
monitor_b.chain_monitor.force_channel_monitor_updated(chan_1_funding, chan_1_id, *id);
nodes[1].process_monitor_events();
}
},
0x0a => {
if let Some((id, _)) = monitor_b.latest_monitors.lock().unwrap().get(&chan_2_funding) {
monitor_b.chain_monitor.force_channel_monitor_updated(chan_2_funding, *id);
monitor_b.chain_monitor.force_channel_monitor_updated(chan_2_funding, chan_2_id, *id);
nodes[1].process_monitor_events();
}
},
0x0b => {
if let Some((id, _)) = monitor_c.latest_monitors.lock().unwrap().get(&chan_2_funding) {
monitor_c.chain_monitor.force_channel_monitor_updated(chan_2_funding, *id);
monitor_c.chain_monitor.force_channel_monitor_updated(chan_2_funding, chan_2_id, *id);
nodes[2].process_monitor_events();
}
},
Expand Down Expand Up @@ -1292,87 +1295,87 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
0xf0 => {
let pending_updates = monitor_a.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
if let Some(id) = pending_updates.get(0) {
monitor_a.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
monitor_a.chain_monitor.channel_monitor_updated(chan_1_funding, chan_1_id, *id).unwrap();
}
nodes[0].process_monitor_events();
}
0xf1 => {
let pending_updates = monitor_a.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
if let Some(id) = pending_updates.get(1) {
monitor_a.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
monitor_a.chain_monitor.channel_monitor_updated(chan_1_funding, chan_1_id, *id).unwrap();
}
nodes[0].process_monitor_events();
}
0xf2 => {
let pending_updates = monitor_a.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
if let Some(id) = pending_updates.last() {
monitor_a.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
monitor_a.chain_monitor.channel_monitor_updated(chan_1_funding, chan_1_id, *id).unwrap();
}
nodes[0].process_monitor_events();
}

0xf4 => {
let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
if let Some(id) = pending_updates.get(0) {
monitor_b.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
monitor_b.chain_monitor.channel_monitor_updated(chan_1_funding, chan_1_id, *id).unwrap();
}
nodes[1].process_monitor_events();
}
0xf5 => {
let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
if let Some(id) = pending_updates.get(1) {
monitor_b.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
monitor_b.chain_monitor.channel_monitor_updated(chan_1_funding, chan_1_id, *id).unwrap();
}
nodes[1].process_monitor_events();
}
0xf6 => {
let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_1_funding).unwrap();
if let Some(id) = pending_updates.last() {
monitor_b.chain_monitor.channel_monitor_updated(chan_1_funding, *id).unwrap();
monitor_b.chain_monitor.channel_monitor_updated(chan_1_funding, chan_1_id, *id).unwrap();
}
nodes[1].process_monitor_events();
}

0xf8 => {
let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
if let Some(id) = pending_updates.get(0) {
monitor_b.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
monitor_b.chain_monitor.channel_monitor_updated(chan_2_funding, chan_2_id, *id).unwrap();
}
nodes[1].process_monitor_events();
}
0xf9 => {
let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
if let Some(id) = pending_updates.get(1) {
monitor_b.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
monitor_b.chain_monitor.channel_monitor_updated(chan_2_funding, chan_2_id, *id).unwrap();
}
nodes[1].process_monitor_events();
}
0xfa => {
let pending_updates = monitor_b.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
if let Some(id) = pending_updates.last() {
monitor_b.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
monitor_b.chain_monitor.channel_monitor_updated(chan_2_funding, chan_2_id, *id).unwrap();
}
nodes[1].process_monitor_events();
}

0xfc => {
let pending_updates = monitor_c.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
if let Some(id) = pending_updates.get(0) {
monitor_c.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
monitor_c.chain_monitor.channel_monitor_updated(chan_2_funding, chan_2_id, *id).unwrap();
}
nodes[2].process_monitor_events();
}
0xfd => {
let pending_updates = monitor_c.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
if let Some(id) = pending_updates.get(1) {
monitor_c.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
monitor_c.chain_monitor.channel_monitor_updated(chan_2_funding, chan_2_id, *id).unwrap();
}
nodes[2].process_monitor_events();
}
0xfe => {
let pending_updates = monitor_c.chain_monitor.list_pending_monitor_updates().remove(&chan_2_funding).unwrap();
if let Some(id) = pending_updates.last() {
monitor_c.chain_monitor.channel_monitor_updated(chan_2_funding, *id).unwrap();
monitor_c.chain_monitor.channel_monitor_updated(chan_2_funding, chan_2_id, *id).unwrap();
}
nodes[2].process_monitor_events();
}
Expand All @@ -1387,19 +1390,19 @@ pub fn do_test<Out: Output>(data: &[u8], underlying_out: Out, anchors: bool) {
*monitor_c.persister.update_ret.lock().unwrap() = ChannelMonitorUpdateStatus::Completed;

if let Some((id, _)) = monitor_a.latest_monitors.lock().unwrap().get(&chan_1_funding) {
monitor_a.chain_monitor.force_channel_monitor_updated(chan_1_funding, *id);
monitor_a.chain_monitor.force_channel_monitor_updated(chan_1_funding, chan_1_id, *id);
nodes[0].process_monitor_events();
}
if let Some((id, _)) = monitor_b.latest_monitors.lock().unwrap().get(&chan_1_funding) {
monitor_b.chain_monitor.force_channel_monitor_updated(chan_1_funding, *id);
monitor_b.chain_monitor.force_channel_monitor_updated(chan_1_funding, chan_1_id, *id);
nodes[1].process_monitor_events();
}
if let Some((id, _)) = monitor_b.latest_monitors.lock().unwrap().get(&chan_2_funding) {
monitor_b.chain_monitor.force_channel_monitor_updated(chan_2_funding, *id);
monitor_b.chain_monitor.force_channel_monitor_updated(chan_2_funding, chan_1_id, *id);
nodes[1].process_monitor_events();
}
if let Some((id, _)) = monitor_c.latest_monitors.lock().unwrap().get(&chan_2_funding) {
monitor_c.chain_monitor.force_channel_monitor_updated(chan_2_funding, *id);
monitor_c.chain_monitor.force_channel_monitor_updated(chan_2_funding, chan_1_id, *id);
nodes[2].process_monitor_events();
}

Expand Down
5 changes: 2 additions & 3 deletions fuzz/src/full_stack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@ use bitcoin::consensus::encode::deserialize;
use bitcoin::network::constants::Network;

use bitcoin::hashes::hex::FromHex;
use bitcoin::hashes::Hash as TraitImport;
use bitcoin::hashes::HashEngine as TraitImportEngine;
use bitcoin::hashes::Hash as _;
use bitcoin::hashes::sha256::Hash as Sha256;
use bitcoin::hashes::sha256d::Hash as Sha256dHash;
use bitcoin::hash_types::{Txid, BlockHash, WPubkeyHash};
Expand Down Expand Up @@ -651,7 +650,7 @@ pub fn do_test(data: &[u8], logger: &Arc<dyn Logger>) {
if let None = loss_detector.txids_confirmed.get(&funding_txid) {
let outpoint = OutPoint { txid: funding_txid, index: 0 };
for chan in channelmanager.list_channels() {
if chan.channel_id == outpoint.to_channel_id() {
if chan.funding_txo == Some(outpoint) {
tx.version += 1;
continue 'search_loop;
}
Expand Down
5 changes: 3 additions & 2 deletions fuzz/src/utils/test_persister.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use lightning::chain;
use lightning::chain::{chainmonitor, channelmonitor};
use lightning::chain::chainmonitor::MonitorUpdateId;
use lightning::chain::transaction::OutPoint;
use lightning::ln::ChannelId;
use lightning::util::test_channel_signer::TestChannelSigner;

use std::sync::Mutex;
Expand All @@ -10,11 +11,11 @@ pub struct TestPersister {
pub update_ret: Mutex<chain::ChannelMonitorUpdateStatus>,
}
impl chainmonitor::Persist<TestChannelSigner> for TestPersister {
fn persist_new_channel(&self, _funding_txo: OutPoint, _data: &channelmonitor::ChannelMonitor<TestChannelSigner>, _update_id: MonitorUpdateId) -> chain::ChannelMonitorUpdateStatus {
fn persist_new_channel(&self, _funding_txo: OutPoint, _channel_id: ChannelId, _data: &channelmonitor::ChannelMonitor<TestChannelSigner>, _update_id: MonitorUpdateId) -> chain::ChannelMonitorUpdateStatus {
self.update_ret.lock().unwrap().clone()
}

fn update_persisted_channel(&self, _funding_txo: OutPoint, _update: Option<&channelmonitor::ChannelMonitorUpdate>, _data: &channelmonitor::ChannelMonitor<TestChannelSigner>, _update_id: MonitorUpdateId) -> chain::ChannelMonitorUpdateStatus {
fn update_persisted_channel(&self, _funding_txo: OutPoint, _channel_id: ChannelId, _update: Option<&channelmonitor::ChannelMonitorUpdate>, _data: &channelmonitor::ChannelMonitor<TestChannelSigner>, _update_id: MonitorUpdateId) -> chain::ChannelMonitorUpdateStatus {
self.update_ret.lock().unwrap().clone()
}
}
4 changes: 2 additions & 2 deletions lightning-background-processor/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -929,7 +929,7 @@ mod tests {
use lightning::chain::transaction::OutPoint;
use lightning::events::{Event, PathFailure, MessageSendEventsProvider, MessageSendEvent};
use lightning::{get_event_msg, get_event};
use lightning::ln::PaymentHash;
use lightning::ln::{PaymentHash, ChannelId};
use lightning::ln::channelmanager;
use lightning::ln::channelmanager::{BREAKDOWN_TIMEOUT, ChainParameters, MIN_CLTV_EXPIRY_DELTA, PaymentId};
use lightning::ln::features::{ChannelFeatures, NodeFeatures};
Expand Down Expand Up @@ -1414,7 +1414,7 @@ mod tests {
}

// Force-close the channel.
nodes[0].node.force_close_broadcasting_latest_txn(&OutPoint { txid: tx.txid(), index: 0 }.to_channel_id(), &nodes[1].node.get_our_node_id()).unwrap();
nodes[0].node.force_close_broadcasting_latest_txn(&ChannelId::v1_from_funding_outpoint(OutPoint { txid: tx.txid(), index: 0 }), &nodes[1].node.get_our_node_id()).unwrap();

// Check that the force-close updates are persisted.
check_persisted_data!(nodes[0].node, filepath.clone());
Expand Down
5 changes: 4 additions & 1 deletion lightning-block-sync/src/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ BlockSourceResult<ValidatedBlockHeader> where B::Target: BlockSource {
/// use lightning::chain::chaininterface::FeeEstimator;
/// use lightning::sign;
/// use lightning::sign::{EntropySource, NodeSigner, SignerProvider};
/// use lightning::ln::ChannelId;
/// use lightning::ln::channelmanager::{ChannelManager, ChannelManagerReadArgs};
/// use lightning::routing::router::Router;
/// use lightning::util::config::UserConfig;
Expand Down Expand Up @@ -119,7 +120,9 @@ BlockSourceResult<ValidatedBlockHeader> where B::Target: BlockSource {
///
/// // Allow the chain monitor to watch any channels.
/// let monitor = monitor_listener.0;
/// chain_monitor.watch_channel(monitor.get_funding_txo().0, monitor);
/// let funding_outpoint = monitor.get_funding_txo().0;
/// let channel_id = monitor.get_channel_id();
/// chain_monitor.watch_channel(funding_outpoint, channel_id, monitor);
///
/// // Create an SPV client to notify the chain monitor and channel manager of block events.
/// let chain_poller = poll::ChainPoller::new(block_source, Network::Bitcoin);
Expand Down
11 changes: 7 additions & 4 deletions lightning-persister/src/fs_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -450,7 +450,8 @@ mod tests {
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
let update_id = update_map.get(&added_monitors[0].0.to_channel_id()).unwrap();
let channel_id = &added_monitors[0].1.get_channel_id();
let update_id = update_map.get(&channel_id).unwrap();

// Set the store's directory to read-only, which should result in
// returning an unrecoverable failure when we then attempt to persist a
Expand All @@ -464,7 +465,7 @@ mod tests {
txid: Txid::from_str("8984484a580b825b9972d7adb15050b3ab624ccd731946b3eeddb92f4e7ef6be").unwrap(),
index: 0
};
match store.persist_new_channel(test_txo, &added_monitors[0].1, update_id.2) {
match store.persist_new_channel(test_txo, *channel_id, &added_monitors[0].1, update_id.2) {
ChannelMonitorUpdateStatus::UnrecoverableError => {},
_ => panic!("unexpected result from persisting new channel")
}
Expand All @@ -489,7 +490,9 @@ mod tests {
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
let update_id = update_map.get(&added_monitors[0].0.to_channel_id()).unwrap();
let funding_outpoint = &added_monitors[0].0;
let channel_id = &added_monitors[0].1.get_channel_id();
let update_id = update_map.get(channel_id).unwrap();

// Create the store with an invalid directory name and test that the
// channel fails to open because the directories fail to be created. There
Expand All @@ -501,7 +504,7 @@ mod tests {
txid: Txid::from_str("8984484a580b825b9972d7adb15050b3ab624ccd731946b3eeddb92f4e7ef6be").unwrap(),
index: 0
};
match store.persist_new_channel(test_txo, &added_monitors[0].1, update_id.2) {
match store.persist_new_channel(test_txo, *channel_id, &added_monitors[0].1, update_id.2) {
ChannelMonitorUpdateStatus::UnrecoverableError => {},
_ => panic!("unexpected result from persisting new channel")
}
Expand Down
Loading

0 comments on commit 175de40

Please sign in to comment.