Skip to content

Commit 8f6fd1e

Browse files
committed
f - missed logging sites plus some clean-up
1 parent 4cbaa80 commit 8f6fd1e

File tree

1 file changed

+35
-17
lines changed

1 file changed

+35
-17
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 35 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1884,7 +1884,10 @@ macro_rules! handle_error {
18841884
}
18851885
}
18861886

1887-
log_error!(WithContext::from(&$self.logger, Some($counterparty_node_id), chan_id.map(|(cid, _)| cid)), "{}", err.err);
1887+
let logger = WithContext::from(
1888+
&$self.logger, Some($counterparty_node_id), chan_id.map(|(chan_id, _)| chan_id)
1889+
);
1890+
log_error!(logger, "{}", err.err);
18881891
if let msgs::ErrorAction::IgnoreError = err.action {
18891892
} else {
18901893
msg_events.push(events::MessageSendEvent::HandleError {
@@ -2836,7 +2839,7 @@ where
28362839
} else {
28372840
ClosureReason::HolderForceClosed
28382841
};
2839-
let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(channel_id.clone()));
2842+
let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id));
28402843
if let hash_map::Entry::Occupied(chan_phase_entry) = peer_state.channel_by_id.entry(channel_id.clone()) {
28412844
log_error!(logger, "Force-closing channel {}", channel_id);
28422845
self.issue_channel_close_events(&chan_phase_entry.get().context(), closure_reason);
@@ -3157,7 +3160,8 @@ where
31573160
if chan.context.get_short_channel_id().is_none() {
31583161
return Err(LightningError{err: "Channel not yet established".to_owned(), action: msgs::ErrorAction::IgnoreError});
31593162
}
3160-
log_trace!(WithChannelContext::from(&self.logger, &chan.context), "Attempting to generate broadcast channel update for channel {}", &chan.context.channel_id());
3163+
let logger = WithChannelContext::from(&self.logger, &chan.context);
3164+
log_trace!(logger, "Attempting to generate broadcast channel update for channel {}", &chan.context.channel_id());
31613165
self.get_channel_update_for_unicast(chan)
31623166
}
31633167

@@ -3173,7 +3177,8 @@ where
31733177
/// [`channel_update`]: msgs::ChannelUpdate
31743178
/// [`internal_closing_signed`]: Self::internal_closing_signed
31753179
fn get_channel_update_for_unicast(&self, chan: &Channel<SP>) -> Result<msgs::ChannelUpdate, LightningError> {
3176-
log_trace!(self.logger, "Attempting to generate channel update for channel {}", log_bytes!(chan.context.channel_id().0));
3180+
let logger = WithChannelContext::from(&self.logger, &chan.context);
3181+
log_trace!(logger, "Attempting to generate channel update for channel {}", log_bytes!(chan.context.channel_id().0));
31773182
let short_channel_id = match chan.context.get_short_channel_id().or(chan.context.latest_inbound_scid_alias()) {
31783183
None => return Err(LightningError{err: "Channel not yet established".to_owned(), action: msgs::ErrorAction::IgnoreError}),
31793184
Some(id) => id,
@@ -3183,7 +3188,8 @@ where
31833188
}
31843189

31853190
fn get_channel_update_for_onion(&self, short_channel_id: u64, chan: &Channel<SP>) -> Result<msgs::ChannelUpdate, LightningError> {
3186-
log_trace!(self.logger, "Generating channel update for channel {}", log_bytes!(chan.context.channel_id().0));
3191+
let logger = WithChannelContext::from(&self.logger, &chan.context);
3192+
log_trace!(logger, "Generating channel update for channel {}", log_bytes!(chan.context.channel_id().0));
31873193
let were_node_one = self.our_network_pubkey.serialize()[..] < chan.context.get_counterparty_node_id().serialize()[..];
31883194

31893195
let enabled = chan.context.is_usable() && match chan.channel_update_status() {
@@ -4716,7 +4722,8 @@ where
47164722
| {
47174723
context.maybe_expire_prev_config();
47184724
if unfunded_context.should_expire_unfunded_channel() {
4719-
log_error!(self.logger,
4725+
let logger = WithChannelContext::from(&self.logger, context);
4726+
log_error!(logger,
47204727
"Force-closing pending channel with ID {} for not establishing in a timely manner", chan_id);
47214728
update_maps_on_chan_removal!(self, &context);
47224729
self.issue_channel_close_events(&context, ClosureReason::HolderForceClosed);
@@ -4830,7 +4837,8 @@ where
48304837

48314838
for (chan_id, req) in peer_state.inbound_channel_request_by_id.iter_mut() {
48324839
if { req.ticks_remaining -= 1 ; req.ticks_remaining } <= 0 {
4833-
log_error!(self.logger, "Force-closing unaccepted inbound channel {} for not accepting in a timely manner", &chan_id);
4840+
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(*chan_id));
4841+
log_error!(logger, "Force-closing unaccepted inbound channel {} for not accepting in a timely manner", &chan_id);
48344842
peer_state.pending_msg_events.push(
48354843
events::MessageSendEvent::HandleError {
48364844
node_id: counterparty_node_id,
@@ -5742,7 +5750,8 @@ where
57425750
pending.retain(|upd| upd.update_id > highest_applied_update_id);
57435751
pending.len()
57445752
} else { 0 };
5745-
log_trace!(WithChannelContext::from(&self.logger, &channel.context), "ChannelMonitor updated to {}. Current highest is {}. {} pending in-flight updates.",
5753+
let logger = WithChannelContext::from(&self.logger, &channel.context);
5754+
log_trace!(logger, "ChannelMonitor updated to {}. Current highest is {}. {} pending in-flight updates.",
57465755
highest_applied_update_id, channel.context.get_latest_monitor_update_id(),
57475756
remaining_in_flight);
57485757
if !channel.is_awaiting_monitor_update() || channel.context.get_latest_monitor_update_id() != highest_applied_update_id {
@@ -6129,7 +6138,8 @@ where
61296138
}
61306139
Ok(())
61316140
} else {
6132-
log_error!(self.logger, "Persisting initial ChannelMonitor failed, implying the funding outpoint was duplicated");
6141+
let logger = WithChannelContext::from(&self.logger, &chan.context);
6142+
log_error!(logger, "Persisting initial ChannelMonitor failed, implying the funding outpoint was duplicated");
61336143
let channel_id = match funding_msg_opt {
61346144
Some(msg) => msg.channel_id,
61356145
None => chan.context.channel_id(),
@@ -6441,7 +6451,8 @@ where
64416451
if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
64426452
let res = try_chan_phase_entry!(self, chan.update_fulfill_htlc(&msg), chan_phase_entry);
64436453
if let HTLCSource::PreviousHopData(prev_hop) = &res.0 {
6444-
log_trace!(self.logger,
6454+
let logger = WithChannelContext::from(&self.logger, &chan.context);
6455+
log_trace!(logger,
64456456
"Holding the next revoke_and_ack from {} until the preimage is durably persisted in the inbound edge's ChannelMonitor",
64466457
msg.channel_id);
64476458
peer_state.actions_blocking_raa_monitor_updates.entry(msg.channel_id)
@@ -6593,7 +6604,8 @@ where
65936604
prev_short_channel_id, prev_funding_outpoint, prev_htlc_id, prev_user_channel_id, forward_info });
65946605
},
65956606
hash_map::Entry::Occupied(_) => {
6596-
log_info!(WithContext::from(&self.logger, None, Some(prev_funding_outpoint.to_channel_id())), "Failed to forward incoming HTLC: detected duplicate intercepted payment over short channel id {}", scid);
6607+
let logger = WithContext::from(&self.logger, None, Some(prev_funding_outpoint.to_channel_id()));
6608+
log_info!(logger, "Failed to forward incoming HTLC: detected duplicate intercepted payment over short channel id {}", scid);
65976609
let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
65986610
short_channel_id: prev_short_channel_id,
65996611
user_channel_id: Some(prev_user_channel_id),
@@ -7748,12 +7760,14 @@ where
77487760
/// operation. It will double-check that nothing *else* is also blocking the same channel from
77497761
/// making progress and then let any blocked [`ChannelMonitorUpdate`]s fly.
77507762
fn handle_monitor_update_release(&self, counterparty_node_id: PublicKey, channel_funding_outpoint: OutPoint, mut completed_blocker: Option<RAAMonitorUpdateBlockingAction>) {
7763+
let logger = WithContext::from(
7764+
&self.logger, Some(counterparty_node_id), Some(channel_funding_outpoint.to_channel_id())
7765+
);
77517766
loop {
77527767
let per_peer_state = self.per_peer_state.read().unwrap();
77537768
if let Some(peer_state_mtx) = per_peer_state.get(&counterparty_node_id) {
77547769
let mut peer_state_lck = peer_state_mtx.lock().unwrap();
77557770
let peer_state = &mut *peer_state_lck;
7756-
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(channel_funding_outpoint.to_channel_id()));
77577771
if let Some(blocker) = completed_blocker.take() {
77587772
// Only do this on the first iteration of the loop.
77597773
if let Some(blockers) = peer_state.actions_blocking_raa_monitor_updates
@@ -7775,7 +7789,6 @@ where
77757789

77767790
if let hash_map::Entry::Occupied(mut chan_phase_entry) = peer_state.channel_by_id.entry(channel_funding_outpoint.to_channel_id()) {
77777791
if let ChannelPhase::Funded(chan) = chan_phase_entry.get_mut() {
7778-
let logger = WithChannelContext::from(&self.logger, &chan.context);
77797792
debug_assert_eq!(chan.context.get_funding_txo().unwrap(), channel_funding_outpoint);
77807793
if let Some((monitor_update, further_update_exists)) = chan.unblock_next_blocked_monitor_update() {
77817794
log_debug!(logger, "Unlocking monitor updating for channel {} and updating monitor",
@@ -7794,7 +7807,6 @@ where
77947807
}
77957808
}
77967809
} else {
7797-
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(channel_funding_outpoint.to_channel_id()));
77987810
log_debug!(logger,
77997811
"Got a release post-RAA monitor update for peer {} but the channel is gone",
78007812
log_pubkey!(counterparty_node_id));
@@ -8552,7 +8564,10 @@ where
85528564
timed_out_htlcs.push((prev_hop_data, htlc.forward_info.payment_hash,
85538565
HTLCFailReason::from_failure_code(0x2000 | 2),
85548566
HTLCDestination::InvalidForward { requested_forward_scid }));
8555-
log_trace!(self.logger, "Timing out intercepted HTLC with requested forward scid {}", requested_forward_scid);
8567+
let logger = WithContext::from(
8568+
&self.logger, None, Some(htlc.prev_funding_outpoint.to_channel_id())
8569+
);
8570+
log_trace!(logger, "Timing out intercepted HTLC with requested forward scid {}", requested_forward_scid);
85568571
false
85578572
} else { true }
85588573
});
@@ -8859,8 +8874,11 @@ where
88598874
let mut failed_channels = Vec::new();
88608875
let mut per_peer_state = self.per_peer_state.write().unwrap();
88618876
let remove_peer = {
8862-
log_debug!(WithContext::from(&self.logger, Some(*counterparty_node_id), None), "Marking channels with {} disconnected and generating channel_updates.",
8863-
log_pubkey!(counterparty_node_id));
8877+
log_debug!(
8878+
WithContext::from(&self.logger, Some(*counterparty_node_id), None),
8879+
"Marking channels with {} disconnected and generating channel_updates.",
8880+
log_pubkey!(counterparty_node_id)
8881+
);
88648882
if let Some(peer_state_mutex) = per_peer_state.get(counterparty_node_id) {
88658883
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
88668884
let peer_state = &mut *peer_state_lock;

0 commit comments

Comments
 (0)