Skip to content

Commit 1bc8529

Browse files
committed
Updated ClosureReason::HolderForceClosed with broadcasted txn.
1 parent 2701bc5 commit 1bc8529

14 files changed

+61
-50
lines changed

lightning-persister/src/fs_store.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -447,7 +447,7 @@ mod tests {
447447
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
448448
let error_message = "Channel force-closed";
449449
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
450-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
450+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000);
451451
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
452452

453453
// Set the store's directory to read-only, which should result in
@@ -485,7 +485,7 @@ mod tests {
485485
let chan = create_announced_chan_between_nodes(&nodes, 0, 1);
486486
let error_message = "Channel force-closed";
487487
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
488-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
488+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000);
489489
let mut added_monitors = nodes[1].chain_monitor.added_monitors.lock().unwrap();
490490
let update_map = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap();
491491
let update_id = update_map.get(&added_monitors[0].1.channel_id()).unwrap();

lightning-persister/src/test_utils.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ pub(crate) fn do_test_store<K: KVStore>(store_0: &K, store_1: &K) {
106106
// updates.
107107
let error_message = "Channel force-closed";
108108
nodes[0].node.force_close_broadcasting_latest_txn(&nodes[0].node.list_channels()[0].channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
109-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
109+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
110110
check_closed_broadcast!(nodes[0], true);
111111
check_added_monitors!(nodes[0], 1);
112112

lightning/src/chain/channelmonitor.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -2873,7 +2873,7 @@ impl<Signer: EcdsaChannelSigner> ChannelMonitorImpl<Signer> {
28732873
F::Target: FeeEstimator,
28742874
L::Target: Logger,
28752875
{
2876-
let (claimable_outpoints, _) = self.generate_claimable_outpoints_and_watch_outputs(ClosureReason::HolderForceClosed);
2876+
let (claimable_outpoints, _) = self.generate_claimable_outpoints_and_watch_outputs(ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) });
28772877
self.onchain_tx_handler.update_claims_view_from_requests(
28782878
claimable_outpoints, self.best_block.height, self.best_block.height, broadcaster,
28792879
fee_estimator, logger

lightning/src/events/mod.rs

+14-3
Original file line numberDiff line numberDiff line change
@@ -278,7 +278,11 @@ pub enum ClosureReason {
278278
/// Closure generated from [`ChannelManager::force_close_channel`], called by the user.
279279
///
280280
/// [`ChannelManager::force_close_channel`]: crate::ln::channelmanager::ChannelManager::force_close_channel.
281-
HolderForceClosed,
281+
HolderForceClosed {
282+
/// Whether or not the latest transaction was broadcasted when the channel was force
283+
/// closed.
284+
broadcasted_latest_txn: Option<bool>
285+
},
282286
/// The channel was closed after negotiating a cooperative close and we've now broadcasted
283287
/// the cooperative close transaction. Note the shutdown may have been initiated by us.
284288
///
@@ -340,7 +344,14 @@ impl core::fmt::Display for ClosureReason {
340344
ClosureReason::CounterpartyForceClosed { peer_msg } => {
341345
f.write_fmt(format_args!("counterparty force-closed with message: {}", peer_msg))
342346
},
343-
ClosureReason::HolderForceClosed => f.write_str("user force-closed the channel"),
347+
ClosureReason::HolderForceClosed { broadcasted_latest_txn } => {
348+
f.write_str("user force-closed the channel")?;
349+
if let Some(brodcasted) = broadcasted_latest_txn {
350+
write!(f, " and {} the latest transaction", if *brodcasted { "broadcasted" } else { "did not broadcast" })
351+
} else {
352+
Ok(())
353+
}
354+
},
344355
ClosureReason::LegacyCooperativeClosure => f.write_str("the channel was cooperatively closed"),
345356
ClosureReason::CounterpartyInitiatedCooperativeClosure => f.write_str("the channel was cooperatively closed by our peer"),
346357
ClosureReason::LocallyInitiatedCooperativeClosure => f.write_str("the channel was cooperatively closed by us"),
@@ -362,7 +373,7 @@ impl core::fmt::Display for ClosureReason {
362373
impl_writeable_tlv_based_enum_upgradable!(ClosureReason,
363374
(0, CounterpartyForceClosed) => { (1, peer_msg, required) },
364375
(1, FundingTimedOut) => {},
365-
(2, HolderForceClosed) => {},
376+
(2, HolderForceClosed) => { (1, broadcasted_latest_txn, option) },
366377
(6, CommitmentTxConfirmed) => {},
367378
(4, LegacyCooperativeClosure) => {},
368379
(8, ProcessingError) => { (1, err, required) },

lightning/src/ln/chanmon_update_fail_tests.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -216,7 +216,7 @@ fn do_test_simple_monitor_temporary_update_fail(disconnect: bool) {
216216
// PaymentPathFailed event
217217

218218
assert_eq!(nodes[0].node.list_channels().len(), 0);
219-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
219+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
220220
}
221221

222222
#[test]
@@ -3232,13 +3232,13 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
32323232
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
32333233
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_bc, &nodes[2].node.get_our_node_id(), error_message.to_string()).unwrap();
32343234
check_closed_broadcast(&nodes[1], 1, true);
3235-
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[2].node.get_our_node_id()], 100000);
3235+
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[2].node.get_our_node_id()], 100000);
32363236
}
32373237

32383238
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
32393239
nodes[1].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
32403240
check_closed_broadcast(&nodes[1], 1, true);
3241-
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000);
3241+
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[0].node.get_our_node_id()], 100000);
32423242
}
32433243

32443244
// Now reload node B
@@ -3260,7 +3260,7 @@ fn do_test_durable_preimages_on_closed_channel(close_chans_before_reload: bool,
32603260
let error_message = "Channel force-closed";
32613261

32623262
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
3263-
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000);
3263+
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100000);
32643264
let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
32653265
assert_eq!(as_closing_tx.len(), 1);
32663266

@@ -3403,7 +3403,7 @@ fn do_test_reload_mon_update_completion_actions(close_during_reload: bool) {
34033403
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id_ab, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
34043404
check_added_monitors!(nodes[0], 1);
34053405
check_closed_broadcast!(nodes[0], true);
3406-
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100_000);
3406+
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[nodes[1].node.get_our_node_id()], 100_000);
34073407
let as_closing_tx = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
34083408
mine_transaction_without_consistency_checks(&nodes[1], &as_closing_tx[0]);
34093409
}

lightning/src/ln/channelmanager.rs

+7-7
Original file line numberDiff line numberDiff line change
@@ -3153,7 +3153,7 @@ where
31533153
}
31543154
} else {
31553155
let mut chan_phase = remove_channel_phase!(self, chan_phase_entry);
3156-
shutdown_result = Some(chan_phase.context_mut().force_shutdown(false, ClosureReason::HolderForceClosed));
3156+
shutdown_result = Some(chan_phase.context_mut().force_shutdown(false, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }));
31573157
}
31583158
},
31593159
hash_map::Entry::Vacant(_) => {
@@ -3322,7 +3322,7 @@ where
33223322
let closure_reason = if let Some(peer_msg) = peer_msg {
33233323
ClosureReason::CounterpartyForceClosed { peer_msg: UntrustedString(peer_msg.to_string()) }
33243324
} else {
3325-
ClosureReason::HolderForceClosed
3325+
ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(broadcast) }
33263326
};
33273327
let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id), None);
33283328
if let hash_map::Entry::Occupied(chan_phase_entry) = peer_state.channel_by_id.entry(channel_id.clone()) {
@@ -5497,7 +5497,7 @@ where
54975497
log_error!(logger,
54985498
"Force-closing pending channel with ID {} for not establishing in a timely manner", chan_id);
54995499
update_maps_on_chan_removal!(self, &context);
5500-
shutdown_channels.push(context.force_shutdown(false, ClosureReason::HolderForceClosed));
5500+
shutdown_channels.push(context.force_shutdown(false, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }));
55015501
pending_msg_events.push(MessageSendEvent::HandleError {
55025502
node_id: counterparty_node_id,
55035503
action: msgs::ErrorAction::SendErrorMessage {
@@ -7984,7 +7984,7 @@ where
79847984
let reason = if let MonitorEvent::HolderForceClosedWithInfo { reason, .. } = monitor_event {
79857985
reason
79867986
} else {
7987-
ClosureReason::HolderForceClosed
7987+
ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }
79887988
};
79897989
failed_channels.push(chan.context.force_shutdown(false, reason.clone()));
79907990
if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
@@ -12458,7 +12458,7 @@ mod tests {
1245812458

1245912459
nodes[0].node.force_close_channel_with_peer(&chan.2, &nodes[1].node.get_our_node_id(), None, true).unwrap();
1246012460
check_added_monitors!(nodes[0], 1);
12461-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
12461+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
1246212462

1246312463
// Confirm that the channel_update was not sent immediately to node[1] but was cached.
1246412464
let node_1_events = nodes[1].node.get_and_clear_pending_msg_events();
@@ -12517,7 +12517,7 @@ mod tests {
1251712517
nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
1251812518
check_closed_broadcast!(nodes[0], true);
1251912519
check_added_monitors!(nodes[0], 1);
12520-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
12520+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
1252112521

1252212522
{
1252312523
// Assert that nodes[1] is awaiting removal for nodes[0] once nodes[1] has been
@@ -13254,7 +13254,7 @@ mod tests {
1325413254
nodes[0].node.force_close_broadcasting_latest_txn(&chan_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
1325513255
check_closed_broadcast(&nodes[0], 1, true);
1325613256
check_added_monitors(&nodes[0], 1);
13257-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
13257+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
1325813258
{
1325913259
let txn = nodes[0].tx_broadcaster.txn_broadcast();
1326013260
assert_eq!(txn.len(), 1);

lightning/src/ln/functional_tests.rs

+13-13
Original file line numberDiff line numberDiff line change
@@ -2307,7 +2307,7 @@ fn channel_monitor_network_test() {
23072307
nodes[1].node.force_close_broadcasting_latest_txn(&chan_1.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
23082308
check_added_monitors!(nodes[1], 1);
23092309
check_closed_broadcast!(nodes[1], true);
2310-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
2310+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000);
23112311
{
23122312
let mut node_txn = test_txn_broadcast(&nodes[1], &chan_1, None, HTLCType::NONE);
23132313
assert_eq!(node_txn.len(), 1);
@@ -2345,7 +2345,7 @@ fn channel_monitor_network_test() {
23452345
check_closed_broadcast!(nodes[2], true);
23462346
assert_eq!(nodes[1].node.list_channels().len(), 0);
23472347
assert_eq!(nodes[2].node.list_channels().len(), 1);
2348-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[2].node.get_our_node_id()], 100000);
2348+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[2].node.get_our_node_id()], 100000);
23492349
check_closed_event!(nodes[2], 1, ClosureReason::CommitmentTxConfirmed, [nodes[1].node.get_our_node_id()], 100000);
23502350

23512351
macro_rules! claim_funds {
@@ -2391,7 +2391,7 @@ fn channel_monitor_network_test() {
23912391
check_closed_broadcast!(nodes[3], true);
23922392
assert_eq!(nodes[2].node.list_channels().len(), 0);
23932393
assert_eq!(nodes[3].node.list_channels().len(), 1);
2394-
check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed, [nodes[3].node.get_our_node_id()], 100000);
2394+
check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[3].node.get_our_node_id()], 100000);
23952395
check_closed_event!(nodes[3], 1, ClosureReason::CommitmentTxConfirmed, [nodes[2].node.get_our_node_id()], 100000);
23962396

23972397
// Drop the ChannelMonitor for the previous channel to avoid it broadcasting transactions and
@@ -3605,7 +3605,7 @@ fn test_htlc_ignore_latest_remote_commitment() {
36053605
connect_blocks(&nodes[0], TEST_FINAL_CLTV + LATENCY_GRACE_PERIOD_BLOCKS + 1);
36063606
check_closed_broadcast!(nodes[0], true);
36073607
check_added_monitors!(nodes[0], 1);
3608-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
3608+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
36093609

36103610
let node_txn = nodes[0].tx_broadcaster.unique_txn_broadcast();
36113611
assert_eq!(node_txn.len(), 2);
@@ -3668,7 +3668,7 @@ fn test_force_close_fail_back() {
36683668
nodes[2].node.force_close_broadcasting_latest_txn(&payment_event.commitment_msg.channel_id, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
36693669
check_closed_broadcast!(nodes[2], true);
36703670
check_added_monitors!(nodes[2], 1);
3671-
check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
3671+
check_closed_event!(nodes[2], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
36723672
let commitment_tx = {
36733673
let mut node_txn = nodes[2].tx_broadcaster.txn_broadcasted.lock().unwrap();
36743674
// Note that we don't bother broadcasting the HTLC-Success transaction here as we don't
@@ -4550,7 +4550,7 @@ fn test_claim_sizeable_push_msat() {
45504550
nodes[1].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[0].node.get_our_node_id(), error_message.to_string()).unwrap();
45514551
check_closed_broadcast!(nodes[1], true);
45524552
check_added_monitors!(nodes[1], 1);
4553-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[0].node.get_our_node_id()], 100000);
4553+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[0].node.get_our_node_id()], 100000);
45544554
let node_txn = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().clone();
45554555
assert_eq!(node_txn.len(), 1);
45564556
check_spends!(node_txn[0], chan.3);
@@ -4580,7 +4580,7 @@ fn test_claim_on_remote_sizeable_push_msat() {
45804580
nodes[0].node.force_close_broadcasting_latest_txn(&chan.2, &nodes[1].node.get_our_node_id(), error_message.to_string()).unwrap();
45814581
check_closed_broadcast!(nodes[0], true);
45824582
check_added_monitors!(nodes[0], 1);
4583-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
4583+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[1].node.get_our_node_id()], 100000);
45844584

45854585
let node_txn = nodes[0].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
45864586
assert_eq!(node_txn.len(), 1);
@@ -8856,7 +8856,7 @@ fn do_test_onchain_htlc_settlement_after_close(broadcast_alice: bool, go_onchain
88568856
nodes[force_closing_node].node.force_close_broadcasting_latest_txn(&chan_ab.2, &nodes[counterparty_node].node.get_our_node_id(), error_message.to_string()).unwrap();
88578857
check_closed_broadcast!(nodes[force_closing_node], true);
88588858
check_added_monitors!(nodes[force_closing_node], 1);
8859-
check_closed_event!(nodes[force_closing_node], 1, ClosureReason::HolderForceClosed, [nodes[counterparty_node].node.get_our_node_id()], 100000);
8859+
check_closed_event!(nodes[force_closing_node], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[counterparty_node].node.get_our_node_id()], 100000);
88608860
if go_onchain_before_fulfill {
88618861
let txn_to_broadcast = match broadcast_alice {
88628862
true => alice_txn.clone(),
@@ -9589,7 +9589,7 @@ fn do_test_tx_confirmed_skipping_blocks_immediate_broadcast(test_height_before_t
95899589
let error_message = "Channel force-closed";
95909590
nodes[1].node.force_close_broadcasting_latest_txn(&channel_id, &nodes[2].node.get_our_node_id(), error_message.to_string()).unwrap();
95919591
check_closed_broadcast!(nodes[1], true);
9592-
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed, [nodes[2].node.get_our_node_id()], 100000);
9592+
check_closed_event!(nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, [nodes[2].node.get_our_node_id()], 100000);
95939593
check_added_monitors!(nodes[1], 1);
95949594
let node_txn = nodes[1].tx_broadcaster.txn_broadcasted.lock().unwrap().split_off(0);
95959595
assert_eq!(node_txn.len(), 1);
@@ -10647,7 +10647,7 @@ fn test_remove_expired_outbound_unfunded_channels() {
1064710647
},
1064810648
_ => panic!("Unexpected event"),
1064910649
}
10650-
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed, false, &[nodes[1].node.get_our_node_id()], 100000);
10650+
check_closed_event(&nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }, false, &[nodes[1].node.get_our_node_id()], 100000);
1065110651
}
1065210652

1065310653
#[test]
@@ -10698,7 +10698,7 @@ fn test_remove_expired_inbound_unfunded_channels() {
1069810698
},
1069910699
_ => panic!("Unexpected event"),
1070010700
}
10701-
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed, false, &[nodes[0].node.get_our_node_id()], 100000);
10701+
check_closed_event(&nodes[1], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }, false, &[nodes[0].node.get_our_node_id()], 100000);
1070210702
}
1070310703

1070410704
#[test]
@@ -10732,7 +10732,7 @@ fn test_channel_close_when_not_timely_accepted() {
1073210732

1073310733
// Since we disconnected from peer and did not connect back within time,
1073410734
// we should have forced-closed the channel by now.
10735-
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed, [nodes[1].node.get_our_node_id()], 100000);
10735+
check_closed_event!(nodes[0], 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(false) }, [nodes[1].node.get_our_node_id()], 100000);
1073610736
assert_eq!(nodes[0].node.list_channels().len(), 0);
1073710737

1073810738
{
@@ -11140,7 +11140,7 @@ fn do_test_funding_and_commitment_tx_confirm_same_block(confirm_remote_commitmen
1114011140
_ => panic!("Unexpected event"),
1114111141
}
1114211142
check_added_monitors(closing_node, 1);
11143-
check_closed_event(closing_node, 1, ClosureReason::HolderForceClosed, false, &[other_node.node.get_our_node_id()], 1_000_000);
11143+
check_closed_event(closing_node, 1, ClosureReason::HolderForceClosed { broadcasted_latest_txn: Some(true) }, false, &[other_node.node.get_our_node_id()], 1_000_000);
1114411144

1114511145
let commitment_tx = {
1114611146
let mut txn = closing_node.tx_broadcaster.txn_broadcast();

0 commit comments

Comments
 (0)