Skip to content

Updated ClosureReason::HolderForceClosed with whether txn was broadcasted. #3107

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions lightning-persister/src/fs_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,7 @@ mod tests {
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
let error_message = "Channel force-closed";
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000);
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();

// Set the store's directory to read-only, which should result in
Expand Down Expand Up @@ -485,7 +485,7 @@ mod tests {
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
let error_message = "Channel force-closed";
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000);
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
let update_id = update_map.get(&added_monitors[0].1.channel_id()).unwrap();
Expand Down
2 changes: 1 addition & 1 deletion lightning-persister/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
// updates.
let error_message = "Channel force-closed";
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
check_closed_broadcast!(nodes[0], true);
check_added_monitors!(nodes[0], 1);

Expand Down
2 changes: 1 addition & 1 deletion lightning/src/chain/channelmonitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2873,7 +2873,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
F::Target: FeeEstimator,
L::Target: Logger,
{
let (claimable_outpoints, _) = self.generate_claimable_outpoints_and_watch_outputs(ClosureReason::HolderForceClosed);
let (claimable_outpoints, _) = self.generate_claimable_outpoints_and_watch_outputs(ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) });
self.onchain_tx_handler.update_claims_view_from_requests(
claimable_outpoints, self.best_block.height, self.best_block.height, broadcaster,
fee_estimator, logger
Expand Down
27 changes: 24 additions & 3 deletions lightning/src/events/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,21 @@ pub enum ClosureReason {
/// Closure generated from [`ChannelManager::force_close_channel`], called by the user.
///
/// [`ChannelManager::force_close_channel`]: crate::ln::channelmanager::ChannelManager::force_close_channel.
HolderForceClosed,
HolderForceClosed {
/// Whether or not the latest transaction was broadcasted when the channel was force
/// closed.
///
/// Channels closed using [`ChannelManager::force_close_broadcasting_latest_txn`] will have
/// this field set to true, whereas channels closed using [`ChannelManager::force_close_without_broadcasting_txn`]
/// or force-closed prior to being funded will have this field set to false.
///
/// This will be `None` for objects generated or written by LDK 0.0.123 and
/// earlier.
///
/// [`ChannelManager::force_close_broadcasting_latest_txn`]: crate::ln::channelmanager::ChannelManager::force_close_broadcasting_latest_txn.
/// [`ChannelManager::force_close_without_broadcasting_txn`]: crate::ln::channelmanager::ChannelManager::force_close_without_broadcasting_txn.
broadcasted_latest_txn: Option<bool>
},
/// The channel was closed after negotiating a cooperative close and we've now broadcasted
/// the cooperative close transaction. Note the shutdown may have been initiated by us.
///
Expand Down Expand Up @@ -340,7 +354,14 @@ impl core::fmt::Display for ClosureReason {
ClosureReason::CounterpartyForceClosed { peer_msg } => {
f.write_fmt(format_args!("counterparty force-closed with message: {}", peer_msg))
},
ClosureReason::HolderForceClosed => f.write_str("user force-closed the channel"),
ClosureReason::HolderForceClosed { broadcasted_latest_txn } => {
f.write_str("user force-closed the channel")?;
if let Some(brodcasted) = broadcasted_latest_txn {
write!(f, " and {} the latest transaction", if *brodcasted { "broadcasted" } else { "did not broadcast" })
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: did not broadcast -> was not broadcasted.

"did not" implies we tried to broadcast it.

} else {
Ok(())
}
},
ClosureReason::LegacyCooperativeClosure => f.write_str("the channel was cooperatively closed"),
ClosureReason::CounterpartyInitiatedCooperativeClosure => f.write_str("the channel was cooperatively closed by our peer"),
ClosureReason::LocallyInitiatedCooperativeClosure => f.write_str("the channel was cooperatively closed by us"),
Expand All @@ -362,7 +383,7 @@ impl core::fmt::Display for ClosureReason {
impl_writeable_tlv_based_enum_upgradable!(ClosureReason,
(0, CounterpartyForceClosed) => { (1, peer_msg, required) },
(1, FundingTimedOut) => {},
(2, HolderForceClosed) => {},
(2, HolderForceClosed) => { (1, broadcasted_latest_txn, option) },
(6, CommitmentTxConfirmed) => {},
(4, LegacyCooperativeClosure) => {},
(8, ProcessingError) => { (1, err, required) },
Expand Down
2 changes: 1 addition & 1 deletion lightning/src/ln/async_signer_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -373,7 +373,7 @@ fn do_test_async_holder_signatures(anchors: bool, remote_commitment: bool) {
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
check_added_monitors(&nodes[1], 1);
check_closed_broadcast(&nodes[1], 1, true);
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100_000);
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[0].node.get_our_node_id()], 100_000);
} else {
// We'll connect blocks until the sender has to go onchain to time out the HTLC.
connect_blocks(&nodes[0], TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + 1);
Expand Down
10 changes: 5 additions & 5 deletions lightning/src/ln/chanmon_update_fail_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
// PaymentPathFailed event

assert_eq!(nodes[0].node.list_channels().len(), 0);
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
}

#[test]
Expand Down Expand Up @@ -3232,13 +3232,13 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_bc, &nodes[2].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_broadcast(&nodes[1], 1, true);
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[2].node.get_our_node_id()], 100000);
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[2].node.get_our_node_id()], 100000);
}

chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_broadcast(&nodes[1], 1, true);
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000);
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[0].node.get_our_node_id()], 100000);
}

// Now reload node B
Expand All @@ -3260,7 +3260,7 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
let error_message = "Channel force-closed";

nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000);
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100000);
let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
assert_eq!(as_closing_tx.len(), 1);

Expand Down Expand Up @@ -3403,7 +3403,7 @@ fn do_test_reload_mon_update_completion_actions(close_during_reload: bool) {
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
check_added_monitors!(nodes[0], 1);
check_closed_broadcast!(nodes[0], true);
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100_000);
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100_000);
let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
mine_transaction_without_consistency_checks(&nodes[1], &as_closing_tx[0]);
}
Expand Down
14 changes: 7 additions & 7 deletions lightning/src/ln/channelmanager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3153,7 +3153,7 @@ where
}
} else {
let mut chan_phase = remove_channel_phase!(self, chan_phase_entry);
shutdown_result = Some(chan_phase.context_mut().force_shutdown(false, ClosureReason::HolderForceClosed));
shutdown_result = Some(chan_phase.context_mut().force_shutdown(false, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }));
}
},
hash_map::Entry::Vacant(_) => {
Expand Down Expand Up @@ -3322,7 +3322,7 @@ where
let closure_reason = if let Some(peer_msg) = peer_msg {
ClosureReason::CounterpartyForceClosed { peer_msg: UntrustedString(peer_msg.to_string()) }
} else {
ClosureReason::HolderForceClosed
ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(broadcast) }
};
let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id), None);
if let hash_map::Entry::Occupied(chan_phase_entry) = peer_state.channel_by_id.entry(channel_id.clone()) {
Expand Down Expand Up @@ -5497,7 +5497,7 @@ where
log_error!(logger,
"Force-closing pending channel with ID {} for not establishing in a timely manner", chan_id);
update_maps_on_chan_removal!(self, &context);
shutdown_channels.push(context.force_shutdown(false, ClosureReason::HolderForceClosed));
shutdown_channels.push(context.force_shutdown(false, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }));
pending_msg_events.push(MessageSendEvent::HandleError {
node_id: counterparty_node_id,
action: msgs::ErrorAction::SendErrorMessage {
Expand Down Expand Up @@ -7984,7 +7984,7 @@ where
let reason = if let MonitorEvent::HolderForceClosedWithInfo { reason, .. } = monitor_event {
reason
} else {
ClosureReason::HolderForceClosed
ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }
};
failed_channels.push(chan.context.force_shutdown(false, reason.clone()));
if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
Expand Down Expand Up @@ -12458,7 +12458,7 @@ mod tests {

nodes[0].node.force_close_channel_with_peer(&chan.2, &nodes[1].node.get_our_node_id(), None, true).unwrap();
check_added_monitors!(nodes[0], 1);
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);

// Confirm that the channel_update was not sent immediately to node[1] but was cached.
let node_1_events = nodes[1].node.get_and_clear_pending_msg_events();
Expand Down Expand Up @@ -12517,7 +12517,7 @@ mod tests {
nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_broadcast!(nodes[0], true);
check_added_monitors!(nodes[0], 1);
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);

{
// Assert that nodes[1] is awaiting removal for nodes[0] once nodes[1] has been
Expand Down Expand Up @@ -13254,7 +13254,7 @@ mod tests {
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
check_closed_broadcast(&nodes[0], 1, true);
check_added_monitors(&nodes[0], 1);
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
{
let txn = nodes[0].tx_broadcaster.txn_broadcast();
assert_eq!(txn.len(), 1);
Expand Down
Loading
Loading