Skip to content

Commit 7091aeb

Browse files
committed
Add PaymentHash to Record
Adding the `payment_hash` to `Record` so we are able to track it in logs.
1 parent 6d5c952 commit 7091aeb

File tree

6 files changed

+94
-78
lines changed

6 files changed

+94
-78
lines changed

lightning/src/chain/chainmonitor.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -790,7 +790,7 @@ where C::Target: chain::Filter,
790790
let monitors = self.monitors.read().unwrap();
791791
match monitors.get(&funding_txo) {
792792
None => {
793-
let logger = WithContext::from(&self.logger, update.counterparty_node_id, Some(channel_id));
793+
let logger = WithContext::from(&self.logger, update.counterparty_node_id, Some(channel_id), None);
794794
log_error!(logger, "Failed to update channel monitor: no such monitor registered");
795795

796796
// We should never ever trigger this from within ChannelManager. Technically a

lightning/src/ln/channel.rs

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -896,12 +896,14 @@ pub(super) struct WithChannelContext<'a, L: Deref> where L::Target: Logger {
896896
pub logger: &'a L,
897897
pub peer_id: Option<PublicKey>,
898898
pub channel_id: Option<ChannelId>,
899+
pub payment_hash: Option<PaymentHash>,
899900
}
900901

901902
impl<'a, L: Deref> Logger for WithChannelContext<'a, L> where L::Target: Logger {
902903
fn log(&self, mut record: Record) {
903904
record.peer_id = self.peer_id;
904905
record.channel_id = self.channel_id;
906+
record.payment_hash = self.payment_hash;
905907
self.logger.log(record)
906908
}
907909
}
@@ -915,6 +917,7 @@ where L::Target: Logger {
915917
logger,
916918
peer_id: Some(context.counterparty_node_id),
917919
channel_id: Some(context.channel_id),
920+
payment_hash: None
918921
}
919922
}
920923
}
@@ -1540,7 +1543,7 @@ impl<SP: Deref> ChannelContext<SP> where SP::Target: SignerProvider {
15401543
L::Target: Logger,
15411544
SP::Target: SignerProvider,
15421545
{
1543-
let logger = WithContext::from(logger, Some(counterparty_node_id), Some(open_channel_fields.temporary_channel_id));
1546+
let logger = WithContext::from(logger, Some(counterparty_node_id), Some(open_channel_fields.temporary_channel_id), None);
15441547
let announced_channel = if (open_channel_fields.channel_flags & 1) == 1 { true } else { false };
15451548

15461549
let channel_value_satoshis = our_funding_satoshis.saturating_add(open_channel_fields.funding_satoshis);
@@ -7703,7 +7706,7 @@ impl<SP: Deref> InboundV1Channel<SP> where SP::Target: SignerProvider {
77037706
F::Target: FeeEstimator,
77047707
L::Target: Logger,
77057708
{
7706-
let logger = WithContext::from(logger, Some(counterparty_node_id), Some(msg.common_fields.temporary_channel_id));
7709+
let logger = WithContext::from(logger, Some(counterparty_node_id), Some(msg.common_fields.temporary_channel_id), None);
77077710

77087711
// First check the channel type is known, failing before we do anything else if we don't
77097712
// support this channel type.

lightning/src/ln/channelmanager.rs

Lines changed: 28 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -2008,7 +2008,7 @@ macro_rules! handle_error {
20082008
let counterparty_node_id = shutdown_res.counterparty_node_id;
20092009
let channel_id = shutdown_res.channel_id;
20102010
let logger = WithContext::from(
2011-
&$self.logger, Some(counterparty_node_id), Some(channel_id),
2011+
&$self.logger, Some(counterparty_node_id), Some(channel_id), None
20122012
);
20132013
log_error!(logger, "Force-closing channel: {}", err.err);
20142014

@@ -2888,7 +2888,7 @@ where
28882888
}
28892889

28902890
let logger = WithContext::from(
2891-
&self.logger, Some(shutdown_res.counterparty_node_id), Some(shutdown_res.channel_id),
2891+
&self.logger, Some(shutdown_res.counterparty_node_id), Some(shutdown_res.channel_id), None
28922892
);
28932893

28942894
log_debug!(logger, "Finishing closure of channel due to {} with {} HTLCs to fail",
@@ -2964,7 +2964,7 @@ where
29642964
} else {
29652965
ClosureReason::HolderForceClosed
29662966
};
2967-
let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id));
2967+
let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id), None);
29682968
if let hash_map::Entry::Occupied(chan_phase_entry) = peer_state.channel_by_id.entry(channel_id.clone()) {
29692969
log_error!(logger, "Force-closing channel {}", channel_id);
29702970
let mut chan_phase = remove_channel_phase!(self, chan_phase_entry);
@@ -3095,7 +3095,7 @@ where
30953095
($msg: expr, $err_code: expr, $data: expr) => {
30963096
{
30973097
log_info!(
3098-
WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id)),
3098+
WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id), Some(msg.payment_hash)),
30993099
"Failed to accept/forward incoming HTLC: {}", $msg
31003100
);
31013101
// If `msg.blinding_point` is set, we must always fail with malformed.
@@ -3259,7 +3259,7 @@ where
32593259
macro_rules! return_err {
32603260
($msg: expr, $err_code: expr, $data: expr) => {
32613261
{
3262-
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id));
3262+
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id), Some(msg.payment_hash));
32633263
log_info!(logger, "Failed to accept/forward incoming HTLC: {}", $msg);
32643264
if msg.blinding_point.is_some() {
32653265
return PendingHTLCStatus::Fail(HTLCFailureMsg::Malformed(
@@ -3414,22 +3414,22 @@ where
34143414
&self.secp_ctx, &path, &session_priv, total_value, recipient_onion, cur_height,
34153415
payment_hash, keysend_preimage, prng_seed
34163416
).map_err(|e| {
3417-
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None);
3417+
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None, Some(*payment_hash));
34183418
log_error!(logger, "Failed to build an onion for path for payment hash {}", payment_hash);
34193419
e
34203420
})?;
34213421

34223422
let err: Result<(), _> = loop {
34233423
let (counterparty_node_id, id) = match self.short_to_chan_info.read().unwrap().get(&path.hops.first().unwrap().short_channel_id) {
34243424
None => {
3425-
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None);
3425+
let logger = WithContext::from(&self.logger, Some(path.hops.first().unwrap().pubkey), None, Some(*payment_hash));
34263426
log_error!(logger, "Failed to find first-hop for payment hash {}", payment_hash);
34273427
return Err(APIError::ChannelUnavailable{err: "No channel available with first hop!".to_owned()})
34283428
},
34293429
Some((cp_id, chan_id)) => (cp_id.clone(), chan_id.clone()),
34303430
};
34313431

3432-
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(id));
3432+
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(id), Some(*payment_hash));
34333433
log_trace!(logger,
34343434
"Attempting to send payment with payment hash {} along path with next hop {}",
34353435
payment_hash, path.hops.first().unwrap().short_channel_id);
@@ -4201,7 +4201,7 @@ where
42014201
None => {
42024202
let error = format!("Channel with id {} not found for the passed counterparty node_id {}",
42034203
next_hop_channel_id, next_node_id);
4204-
let logger = WithContext::from(&self.logger, Some(next_node_id), Some(*next_hop_channel_id));
4204+
let logger = WithContext::from(&self.logger, Some(next_node_id), Some(*next_hop_channel_id), None);
42054205
log_error!(logger, "{} when attempting to forward intercepted HTLC", error);
42064206
return Err(APIError::ChannelUnavailable {
42074207
err: error
@@ -4306,7 +4306,7 @@ where
43064306
}) => {
43074307
macro_rules! failure_handler {
43084308
($msg: expr, $err_code: expr, $err_data: expr, $phantom_ss: expr, $next_hop_unknown: expr) => {
4309-
let logger = WithContext::from(&self.logger, forwarding_counterparty, Some(prev_channel_id));
4309+
let logger = WithContext::from(&self.logger, forwarding_counterparty, Some(prev_channel_id), Some(payment_hash));
43104310
log_info!(logger, "Failed to accept/forward incoming HTLC: {}", $msg);
43114311

43124312
let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
@@ -5075,7 +5075,7 @@ where
50755075

50765076
for (chan_id, req) in peer_state.inbound_channel_request_by_id.iter_mut() {
50775077
if { req.ticks_remaining -= 1 ; req.ticks_remaining } <= 0 {
5078-
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(*chan_id));
5078+
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(*chan_id), None);
50795079
log_error!(logger, "Force-closing unaccepted inbound channel {} for not accepting in a timely manner", &chan_id);
50805080
peer_state.pending_msg_events.push(
50815081
events::MessageSendEvent::HandleError {
@@ -5356,7 +5356,7 @@ where
53565356
ref phantom_shared_secret, outpoint: _, ref blinded_failure, ref channel_id, ..
53575357
}) => {
53585358
log_trace!(
5359-
WithContext::from(&self.logger, None, Some(*channel_id)),
5359+
WithContext::from(&self.logger, None, Some(*channel_id), Some(*payment_hash)),
53605360
"Failing {}HTLC with payment_hash {} backwards from us: {:?}",
53615361
if blinded_failure.is_some() { "blinded " } else { "" }, &payment_hash, onion_error
53625362
);
@@ -5552,7 +5552,7 @@ where
55525552
if let msgs::ErrorAction::IgnoreError = err.err.action {
55535553
// We got a temporary failure updating monitor, but will claim the
55545554
// HTLC when the monitor updating is restored (or on chain).
5555-
let logger = WithContext::from(&self.logger, None, Some(prev_hop_chan_id));
5555+
let logger = WithContext::from(&self.logger, None, Some(prev_hop_chan_id), Some(payment_hash));
55565556
log_error!(logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
55575557
} else { errs.push((pk, err)); }
55585558
}
@@ -5704,7 +5704,7 @@ where
57045704
// with a preimage we *must* somehow manage to propagate it to the upstream
57055705
// channel, or we must have an ability to receive the same event and try
57065706
// again on restart.
5707-
log_error!(WithContext::from(&self.logger, None, Some(prev_hop.channel_id)),
5707+
log_error!(WithContext::from(&self.logger, None, Some(prev_hop.channel_id), None),
57085708
"Critical error: failed to update channel monitor with preimage {:?}: {:?}",
57095709
payment_preimage, update_res);
57105710
}
@@ -6084,7 +6084,7 @@ where
60846084

60856085
fn do_accept_inbound_channel(&self, temporary_channel_id: &ChannelId, counterparty_node_id: &PublicKey, accept_0conf: bool, user_channel_id: u128) -> Result<(), APIError> {
60866086

6087-
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(*temporary_channel_id));
6087+
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(*temporary_channel_id), None);
60886088
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
60896089

60906090
let peers_without_funded_channels =
@@ -6511,7 +6511,8 @@ where
65116511
let logger = WithContext::from(
65126512
&self.logger,
65136513
Some(chan.context.get_counterparty_node_id()),
6514-
Some(chan.context.channel_id())
6514+
Some(chan.context.channel_id()),
6515+
None
65156516
);
65166517
let res =
65176518
chan.funding_signed(&msg, best_block, &self.signer_provider, &&logger);
@@ -6722,7 +6723,7 @@ where
67226723
};
67236724
if let Some(broadcast_tx) = tx {
67246725
let channel_id = chan_option.as_ref().map(|channel| channel.context().channel_id());
6725-
log_info!(WithContext::from(&self.logger, Some(*counterparty_node_id), channel_id), "Broadcasting {}", log_tx!(broadcast_tx));
6726+
log_info!(WithContext::from(&self.logger, Some(*counterparty_node_id), channel_id, None), "Broadcasting {}", log_tx!(broadcast_tx));
67266727
self.tx_broadcaster.broadcast_transactions(&[&broadcast_tx]);
67276728
}
67286729
if let Some(ChannelPhase::Funded(chan)) = chan_option {
@@ -6995,7 +6996,7 @@ where
69956996
prev_short_channel_id, prev_funding_outpoint, prev_channel_id, prev_htlc_id, prev_user_channel_id, forward_info });
69966997
},
69976998
hash_map::Entry::Occupied(_) => {
6998-
let logger = WithContext::from(&self.logger, None, Some(prev_channel_id));
6999+
let logger = WithContext::from(&self.logger, None, Some(prev_channel_id), Some(forward_info.payment_hash));
69997000
log_info!(logger, "Failed to forward incoming HTLC: detected duplicate intercepted payment over short channel id {}", scid);
70007001
let htlc_source = HTLCSource::PreviousHopData(HTLCPreviousHopData {
70017002
short_channel_id: prev_short_channel_id,
@@ -7259,7 +7260,7 @@ where
72597260
msg.channel_id
72607261
)
72617262
})?;
7262-
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id));
7263+
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), Some(msg.channel_id), None);
72637264
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
72647265
let peer_state = &mut *peer_state_lock;
72657266
match peer_state.channel_by_id.entry(msg.channel_id) {
@@ -7361,7 +7362,7 @@ where
73617362
for monitor_event in monitor_events.drain(..) {
73627363
match monitor_event {
73637364
MonitorEvent::HTLCEvent(htlc_update) => {
7364-
let logger = WithContext::from(&self.logger, counterparty_node_id, Some(channel_id));
7365+
let logger = WithContext::from(&self.logger, counterparty_node_id, Some(channel_id), Some(htlc_update.payment_hash));
73657366
if let Some(preimage) = htlc_update.payment_preimage {
73667367
log_trace!(logger, "Claiming HTLC with preimage {} from our monitor", preimage);
73677368
self.claim_funds_internal(htlc_update.source, preimage,
@@ -8225,7 +8226,7 @@ where
82258226
mut completed_blocker: Option<RAAMonitorUpdateBlockingAction>) {
82268227

82278228
let logger = WithContext::from(
8228-
&self.logger, Some(counterparty_node_id), Some(channel_id),
8229+
&self.logger, Some(counterparty_node_id), Some(channel_id), None
82298230
);
82308231
loop {
82318232
let per_peer_state = self.per_peer_state.read().unwrap();
@@ -8697,7 +8698,7 @@ where
86978698
HTLCFailReason::from_failure_code(0x2000 | 2),
86988699
HTLCDestination::InvalidForward { requested_forward_scid }));
86998700
let logger = WithContext::from(
8700-
&self.logger, None, Some(htlc.prev_channel_id)
8701+
&self.logger, None, Some(htlc.prev_channel_id), Some(htlc.forward_info.payment_hash)
87018702
);
87028703
log_trace!(logger, "Timing out intercepted HTLC with requested forward scid {}", requested_forward_scid);
87038704
false
@@ -9007,7 +9008,7 @@ where
90079008
let mut per_peer_state = self.per_peer_state.write().unwrap();
90089009
let remove_peer = {
90099010
log_debug!(
9010-
WithContext::from(&self.logger, Some(*counterparty_node_id), None),
9011+
WithContext::from(&self.logger, Some(*counterparty_node_id), None, None),
90119012
"Marking channels with {} disconnected and generating channel_updates.",
90129013
log_pubkey!(counterparty_node_id)
90139014
);
@@ -9115,7 +9116,7 @@ where
91159116
}
91169117

91179118
fn peer_connected(&self, counterparty_node_id: &PublicKey, init_msg: &msgs::Init, inbound: bool) -> Result<(), ()> {
9118-
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), None);
9119+
let logger = WithContext::from(&self.logger, Some(*counterparty_node_id), None, None);
91199120
if !init_msg.features.supports_static_remote_key() {
91209121
log_debug!(logger, "Peer {} does not support static remote key, disconnecting", log_pubkey!(counterparty_node_id));
91219122
return Err(());
@@ -10904,7 +10905,7 @@ where
1090410905
if let Some(in_flight_upds) = in_flight_monitor_updates {
1090510906
for ((counterparty_id, funding_txo), mut chan_in_flight_updates) in in_flight_upds {
1090610907
let channel_id = funding_txo_to_channel_id.get(&funding_txo).copied();
10907-
let logger = WithContext::from(&args.logger, Some(counterparty_id), channel_id);
10908+
let logger = WithContext::from(&args.logger, Some(counterparty_id), channel_id, None);
1090810909
if let Some(monitor) = args.channel_monitors.get(&funding_txo) {
1090910910
// Now that we've removed all the in-flight monitor updates for channels that are
1091010911
// still open, we need to replay any monitor updates that are for closed channels,
@@ -11268,7 +11269,7 @@ where
1126811269
for (node_id, monitor_update_blocked_actions) in monitor_update_blocked_actions_per_peer.unwrap() {
1126911270
if let Some(peer_state) = per_peer_state.get(&node_id) {
1127011271
for (channel_id, actions) in monitor_update_blocked_actions.iter() {
11271-
let logger = WithContext::from(&args.logger, Some(node_id), Some(*channel_id));
11272+
let logger = WithContext::from(&args.logger, Some(node_id), Some(*channel_id), None);
1127211273
for action in actions.iter() {
1127311274
if let MonitorUpdateCompletionAction::EmitEventAndFreeOtherChannel {
1127411275
downstream_counterparty_and_funding_outpoint:
@@ -11296,7 +11297,7 @@ where
1129611297
}
1129711298
peer_state.lock().unwrap().monitor_update_blocked_actions = monitor_update_blocked_actions;
1129811299
} else {
11299-
log_error!(WithContext::from(&args.logger, Some(node_id), None), "Got blocked actions without a per-peer-state for {}", node_id);
11300+
log_error!(WithContext::from(&args.logger, Some(node_id), None, None), "Got blocked actions without a per-peer-state for {}", node_id);
1130011301
return Err(DecodeError::InvalidValue);
1130111302
}
1130211303
}

0 commit comments

Comments
 (0)