Skip to content

Commit 97c13b0

Browse files
committed
Parameterize disconnection and error message on manual force close
1 parent 7326334 commit 97c13b0

12 files changed

+75
-59
lines changed

lightning-persister/src/fs_store.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -445,7 +445,7 @@ mod tests {
445445
let node_chanmgrs = create_node_chanmgrs(2, &node_cfgs, &[None, None]);
446446
let nodes = create_network(2, &node_cfgs, &node_chanmgrs);
447447
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
448-
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id()).unwrap();
448+
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), true, None).unwrap();
449449
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
450450
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
451451

lightning-persister/src/test_utils.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
104104

105105
// Force close because cooperative close doesn't result in any persisted
106106
// updates.
107-
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id()).unwrap();
107+
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), true, None).unwrap();
108108
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
109109
check_closed_broadcast!(nodes[0], true);
110110
check_added_monitors!(nodes[0], 1);

lightning/src/ln/blinded_payment_tests.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -498,7 +498,7 @@ fn do_forward_fail_in_process_pending_htlc_fwds(check: ProcessPendingHTLCsCheck,
498498
ProcessPendingHTLCsCheck::FwdChannelClosed => {
499499
// Force close the next-hop channel so when we go to forward in process_pending_htlc_forwards,
500500
// the intro node will error backwards.
501-
$curr_node.node.force_close_broadcasting_latest_txn(&$failed_chan_id, &$next_node.node.get_our_node_id()).unwrap();
501+
$curr_node.node.force_close_broadcasting_latest_txn(&$failed_chan_id, &$next_node.node.get_our_node_id(), true, None).unwrap();
502502
let events = $curr_node.node.get_and_clear_pending_events();
503503
match events[0] {
504504
crate::events::Event::PendingHTLCsForwardable { .. } => {},

lightning/src/ln/chanmon_update_fail_tests.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -207,7 +207,7 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
207207
}
208208

209209
// ...and make sure we can force-close a frozen channel
210-
nodes[0].node.force_close_broadcasting_latest_txn(&channel_id, &nodes[1].node.get_our_node_id()).unwrap();
210+
nodes[0].node.force_close_broadcasting_latest_txn(&channel_id, &nodes[1].node.get_our_node_id(), true, None).unwrap();
211211
check_added_monitors!(nodes[0], 1);
212212
check_closed_broadcast!(nodes[0], true);
213213

@@ -3228,13 +3228,13 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
32283228
if close_chans_before_reload {
32293229
if !close_only_a {
32303230
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
3231-
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_bc, &nodes[2].node.get_our_node_id()).unwrap();
3231+
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_bc, &nodes[2].node.get_our_node_id(), true, None).unwrap();
32323232
check_closed_broadcast(&nodes[1], 1, true);
32333233
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[2].node.get_our_node_id()], 100000);
32343234
}
32353235

32363236
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
3237-
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[0].node.get_our_node_id()).unwrap();
3237+
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[0].node.get_our_node_id(), true, None).unwrap();
32383238
check_closed_broadcast(&nodes[1], 1, true);
32393239
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000);
32403240
}
@@ -3256,7 +3256,7 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
32563256
}
32573257
}
32583258

3259-
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id()).unwrap();
3259+
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), true, None).unwrap();
32603260
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000);
32613261
let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
32623262
assert_eq!(as_closing_tx.len(), 1);
@@ -3396,7 +3396,7 @@ fn do_test_reload_mon_update_completion_actions(close_during_reload: bool) {
33963396
if close_during_reload {
33973397
// Test that we still free the B<->C channel if the A<->B channel closed while we reloaded
33983398
// (as learned about during the on-reload block connection).
3399-
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id()).unwrap();
3399+
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), true, None).unwrap();
34003400
check_added_monitors!(nodes[0], 1);
34013401
check_closed_broadcast!(nodes[0], true);
34023402
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100_000);

lightning/src/ln/channelmanager.rs

+37-21
Original file line numberDiff line numberDiff line change
@@ -1360,7 +1360,8 @@ where
13601360
/// Event::OpenChannelRequest { temporary_channel_id, counterparty_node_id, .. } => {
13611361
/// if !is_trusted(counterparty_node_id) {
13621362
/// match channel_manager.force_close_without_broadcasting_txn(
1363-
/// &temporary_channel_id, &counterparty_node_id
1363+
/// &temporary_channel_id, &counterparty_node_id, false, Some("Untrusted
1364+
/// counterparty node id")
13641365
/// ) {
13651366
/// Ok(()) => println!("Rejecting channel {}", temporary_channel_id),
13661367
/// Err(e) => println!("Error rejecting channel {}: {:?}", temporary_channel_id, e),
@@ -3700,19 +3701,28 @@ where
37003701
Ok(counterparty_node_id)
37013702
}
37023703

3703-
fn force_close_sending_error(&self, channel_id: &ChannelId, counterparty_node_id: &PublicKey, broadcast: bool) -> Result<(), APIError> {
3704+
fn force_close_sending_error(
3705+
&self, channel_id: &ChannelId, counterparty_node_id: &PublicKey, broadcast: bool, disconnect_peer: bool, err_msg: Option<&str>
3706+
) -> Result<(), APIError> {
37043707
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
37053708
match self.force_close_channel_with_peer(channel_id, counterparty_node_id, None, broadcast) {
37063709
Ok(counterparty_node_id) => {
37073710
let per_peer_state = self.per_peer_state.read().unwrap();
37083711
if let Some(peer_state_mutex) = per_peer_state.get(&counterparty_node_id) {
37093712
let mut peer_state = peer_state_mutex.lock().unwrap();
3713+
let msg = msgs::ErrorMessage {
3714+
channel_id: *channel_id,
3715+
data: err_msg.unwrap_or("Channel force-closed").to_owned()
3716+
};
3717+
let action = if disconnect_peer {
3718+
msgs::ErrorAction::DisconnectPeer { msg: Some(msg) }
3719+
} else {
3720+
msgs::ErrorAction::SendErrorMessage { msg }
3721+
};
37103722
peer_state.pending_msg_events.push(
37113723
events::MessageSendEvent::HandleError {
37123724
node_id: counterparty_node_id,
3713-
action: msgs::ErrorAction::DisconnectPeer {
3714-
msg: Some(msgs::ErrorMessage { channel_id: *channel_id, data: "Channel force-closed".to_owned() })
3715-
},
3725+
action,
37163726
}
37173727
);
37183728
}
@@ -3726,35 +3736,41 @@ where
37263736
/// rejecting new HTLCs on the given channel. Fails if `channel_id` is unknown to
37273737
/// the manager, or if the `counterparty_node_id` isn't the counterparty of the corresponding
37283738
/// channel.
3729-
pub fn force_close_broadcasting_latest_txn(&self, channel_id: &ChannelId, counterparty_node_id: &PublicKey)
3730-
-> Result<(), APIError> {
3731-
self.force_close_sending_error(channel_id, counterparty_node_id, true)
3739+
///
3740+
/// By default, this will send a generic force close error message to the peer if `err_msg`
3741+
/// is not specified. If `disconnect_peer` is set to true, the peer will also be disconnected.
3742+
pub fn force_close_broadcasting_latest_txn(
3743+
&self, channel_id: &ChannelId, counterparty_node_id: &PublicKey, disconnect_peer: bool, err_msg: Option<&str>
3744+
) -> Result<(), APIError> {
3745+
self.force_close_sending_error(channel_id, counterparty_node_id, true, disconnect_peer, err_msg)
37323746
}
37333747

37343748
/// Force closes a channel, rejecting new HTLCs on the given channel but skips broadcasting
37353749
/// the latest local transaction(s). Fails if `channel_id` is unknown to the manager, or if the
37363750
/// `counterparty_node_id` isn't the counterparty of the corresponding channel.
37373751
///
37383752
/// You can always broadcast the latest local transaction(s) via
3739-
/// [`ChannelMonitor::broadcast_latest_holder_commitment_txn`].
3740-
pub fn force_close_without_broadcasting_txn(&self, channel_id: &ChannelId, counterparty_node_id: &PublicKey)
3741-
-> Result<(), APIError> {
3742-
self.force_close_sending_error(channel_id, counterparty_node_id, false)
3753+
/// [`ChannelMonitor::broadcast_latest_holder_commitment_txn`]. Similarly, you can specify
3754+
/// whether to disconnect the peer, as well as a custom error message to send to the peer.
3755+
pub fn force_close_without_broadcasting_txn(
3756+
&self, channel_id: &ChannelId, counterparty_node_id: &PublicKey, disconnect_peer: bool, err_msg: Option<&str>
3757+
) -> Result<(), APIError> {
3758+
self.force_close_sending_error(channel_id, counterparty_node_id, false, disconnect_peer, err_msg)
37433759
}
37443760

37453761
/// Force close all channels, immediately broadcasting the latest local commitment transaction
37463762
/// for each to the chain and rejecting new HTLCs on each.
37473763
pub fn force_close_all_channels_broadcasting_latest_txn(&self) {
37483764
for chan in self.list_channels() {
3749-
let _ = self.force_close_broadcasting_latest_txn(&chan.channel_id, &chan.counterparty.node_id);
3765+
let _ = self.force_close_broadcasting_latest_txn(&chan.channel_id, &chan.counterparty.node_id, true, None);
37503766
}
37513767
}
37523768

37533769
/// Force close all channels rejecting new HTLCs on each but without broadcasting the latest
37543770
/// local transaction(s).
37553771
pub fn force_close_all_channels_without_broadcasting_txn(&self) {
37563772
for chan in self.list_channels() {
3757-
let _ = self.force_close_without_broadcasting_txn(&chan.channel_id, &chan.counterparty.node_id);
3773+
let _ = self.force_close_without_broadcasting_txn(&chan.channel_id, &chan.counterparty.node_id, true, None);
37583774
}
37593775
}
37603776

@@ -12970,7 +12986,7 @@ mod tests {
1297012986
nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
1297112987
nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
1297212988

12973-
nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id()).unwrap();
12989+
nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id(), true, None).unwrap();
1297412990
check_closed_broadcast!(nodes[0], true);
1297512991
check_added_monitors!(nodes[0], 1);
1297612992
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
@@ -13195,9 +13211,9 @@ mod tests {
1319513211

1319613212
check_unkown_peer_error(nodes[0].node.close_channel(&channel_id, &unkown_public_key), unkown_public_key);
1319713213

13198-
check_unkown_peer_error(nodes[0].node.force_close_broadcasting_latest_txn(&channel_id, &unkown_public_key), unkown_public_key);
13214+
check_unkown_peer_error(nodes[0].node.force_close_broadcasting_latest_txn(&channel_id, &unkown_public_key, true, None), unkown_public_key);
1319913215

13200-
check_unkown_peer_error(nodes[0].node.force_close_without_broadcasting_txn(&channel_id, &unkown_public_key), unkown_public_key);
13216+
check_unkown_peer_error(nodes[0].node.force_close_without_broadcasting_txn(&channel_id, &unkown_public_key, true, None), unkown_public_key);
1320113217

1320213218
check_unkown_peer_error(nodes[0].node.forward_intercepted_htlc(intercept_id, &channel_id, unkown_public_key, 1_000_000), unkown_public_key);
1320313219

@@ -13225,9 +13241,9 @@ mod tests {
1322513241

1322613242
check_channel_unavailable_error(nodes[0].node.close_channel(&channel_id, &counterparty_node_id), channel_id, counterparty_node_id);
1322713243

13228-
check_channel_unavailable_error(nodes[0].node.force_close_broadcasting_latest_txn(&channel_id, &counterparty_node_id), channel_id, counterparty_node_id);
13244+
check_channel_unavailable_error(nodes[0].node.force_close_broadcasting_latest_txn(&channel_id, &counterparty_node_id, true, None), channel_id, counterparty_node_id);
1322913245

13230-
check_channel_unavailable_error(nodes[0].node.force_close_without_broadcasting_txn(&channel_id, &counterparty_node_id), channel_id, counterparty_node_id);
13246+
check_channel_unavailable_error(nodes[0].node.force_close_without_broadcasting_txn(&channel_id, &counterparty_node_id, true, None), channel_id, counterparty_node_id);
1323113247

1323213248
check_channel_unavailable_error(nodes[0].node.forward_intercepted_htlc(InterceptId([0; 32]), &channel_id, counterparty_node_id, 1_000_000), channel_id, counterparty_node_id);
1323313249

@@ -13590,7 +13606,7 @@ mod tests {
1359013606
let events = nodes[1].node.get_and_clear_pending_events();
1359113607
match events[0] {
1359213608
Event::OpenChannelRequest { temporary_channel_id, .. } => {
13593-
nodes[1].node.force_close_broadcasting_latest_txn(&temporary_channel_id, &nodes[0].node.get_our_node_id()).unwrap();
13609+
nodes[1].node.force_close_broadcasting_latest_txn(&temporary_channel_id, &nodes[0].node.get_our_node_id(), true, None).unwrap();
1359413610
}
1359513611
_ => panic!("Unexpected event"),
1359613612
}
@@ -13703,7 +13719,7 @@ mod tests {
1370313719
let (_, _, chan_id, funding_tx) = create_announced_chan_between_nodes(&nodes, 0, 1);
1370413720
nodes[0].node.peer_disconnected(&nodes[1].node.get_our_node_id());
1370513721
nodes[1].node.peer_disconnected(&nodes[0].node.get_our_node_id());
13706-
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id()).unwrap();
13722+
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), true, None).unwrap();
1370713723
check_closed_broadcast(&nodes[0], 1, true);
1370813724
check_added_monitors(&nodes[0], 1);
1370913725
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);

0 commit comments

Comments
 (0)