Skip to content

Commit cca68c3

Browse files
committed
Calc decayed buckets to decide if we have valid historical points
When we're calculating if, once we apply the unupdated decays, the historical data tracker has enough data to assign a score, we previously calculated the decayed points while walking the buckets as we don't use the decayed buckets anyway (to avoid losing precision). That is fine, except that as written it decayed individual buckets additional times. Instead, here we actually calculate the full set of decayed buckets and use those to decide if we have valid points. This adds some additional stack space and may in fact be slower, but will be useful in the next commit and shouldn't be a huge change.
1 parent 869b71d commit cca68c3

File tree

1 file changed

+31
-14
lines changed

1 file changed

+31
-14
lines changed

lightning/src/routing/scoring.rs

Lines changed: 31 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -597,7 +597,26 @@ struct HistoricalMinMaxBuckets<'a> {
597597

598598
impl HistoricalMinMaxBuckets<'_> {
599599
#[inline]
600-
fn calculate_success_probability_times_billion(&self, required_decays: u32, payment_amt_64th_bucket: u8) -> Option<u64> {
600+
fn get_decayed_buckets<T: Time>(&self, now: T, last_updated: T, half_life: Duration)
601+
-> ([u16; 8], [u16; 8], u32) {
602+
let required_decays = now.duration_since(last_updated).as_secs()
603+
.checked_div(half_life.as_secs())
604+
.map_or(u32::max_value(), |decays| cmp::min(decays, u32::max_value() as u64) as u32);
605+
let mut min_buckets = [0; 8];
606+
for (idx, bucket) in self.min_liquidity_offset_history.buckets.iter().enumerate() {
607+
min_buckets[idx] = (*bucket).checked_shr(required_decays).unwrap_or(0);
608+
}
609+
let mut max_buckets = [0; 8];
610+
for (idx, bucket) in self.max_liquidity_offset_history.buckets.iter().enumerate() {
611+
max_buckets[idx] = (*bucket).checked_shr(required_decays).unwrap_or(0);
612+
}
613+
(min_buckets, max_buckets, required_decays)
614+
}
615+
616+
#[inline]
617+
fn calculate_success_probability_times_billion<T: Time>(
618+
&self, now: T, last_updated: T, half_life: Duration, payment_amt_64th_bucket: u8)
619+
-> Option<u64> {
601620
// If historical penalties are enabled, calculate the penalty by walking the set of
602621
// historical liquidity bucket (min, max) combinations (where min_idx < max_idx) and, for
603622
// each, calculate the probability of success given our payment amount, then total the
@@ -619,23 +638,22 @@ impl HistoricalMinMaxBuckets<'_> {
619638
// less than 1/16th of a channel's capacity, or 1/8th if we used the top of the bucket.
620639
let mut total_valid_points_tracked = 0;
621640

622-
// Rather than actually decaying the individual buckets, which would lose precision, we
623-
// simply track whether all buckets would be decayed to zero, in which case we treat it as
624-
// if we had no data.
625-
let mut is_fully_decayed = true;
626-
let mut check_track_bucket_contains_undecayed_points =
627-
|bucket_val: u16| if bucket_val.checked_shr(required_decays).unwrap_or(0) > 0 { is_fully_decayed = false; };
641+
// Check if all our buckets are zero, once decayed and treat it as if we had no data. We
642+
// don't actually use the decayed buckets, though, as that would lose precision.
643+
let (decayed_min_buckets, decayed_max_buckets, required_decays) =
644+
self.get_decayed_buckets(now, last_updated, half_life);
645+
if decayed_min_buckets.iter().all(|v| *v == 0) || decayed_max_buckets.iter().all(|v| *v == 0) {
646+
return None;
647+
}
628648

629649
for (min_idx, min_bucket) in self.min_liquidity_offset_history.buckets.iter().enumerate() {
630-
check_track_bucket_contains_undecayed_points(*min_bucket);
631650
for max_bucket in self.max_liquidity_offset_history.buckets.iter().take(8 - min_idx) {
632651
total_valid_points_tracked += (*min_bucket as u64) * (*max_bucket as u64);
633-
check_track_bucket_contains_undecayed_points(*max_bucket);
634652
}
635653
}
636654
// If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme), treat
637655
// it as if we were fully decayed.
638-
if total_valid_points_tracked.checked_shr(required_decays).unwrap_or(0) < 32*32 || is_fully_decayed {
656+
if total_valid_points_tracked.checked_shr(required_decays).unwrap_or(0) < 32*32 {
639657
return None;
640658
}
641659

@@ -942,9 +960,6 @@ impl<L: Deref<Target = u64>, BRT: Deref<Target = HistoricalBucketRangeTracker>,
942960

943961
if params.historical_liquidity_penalty_multiplier_msat != 0 ||
944962
params.historical_liquidity_penalty_amount_multiplier_msat != 0 {
945-
let required_decays = self.now.duration_since(*self.last_updated).as_secs()
946-
.checked_div(params.historical_no_updates_half_life.as_secs())
947-
.map_or(u32::max_value(), |decays| cmp::min(decays, u32::max_value() as u64) as u32);
948963
let payment_amt_64th_bucket = amount_msat * 64 / self.capacity_msat;
949964
debug_assert!(payment_amt_64th_bucket <= 64);
950965
if payment_amt_64th_bucket > 64 { return res; }
@@ -954,7 +969,9 @@ impl<L: Deref<Target = u64>, BRT: Deref<Target = HistoricalBucketRangeTracker>,
954969
max_liquidity_offset_history: &self.max_liquidity_offset_history,
955970
};
956971
if let Some(cumulative_success_prob_times_billion) = buckets
957-
.calculate_success_probability_times_billion(required_decays, payment_amt_64th_bucket as u8) {
972+
.calculate_success_probability_times_billion(self.now, *self.last_updated,
973+
params.historical_no_updates_half_life, payment_amt_64th_bucket as u8)
974+
{
958975
let historical_negative_log10_times_2048 = approx::negative_log10_times_2048(cumulative_success_prob_times_billion + 1, 1024 * 1024 * 1024);
959976
res = res.saturating_add(Self::combined_penalty_msat(amount_msat,
960977
historical_negative_log10_times_2048, params.historical_liquidity_penalty_multiplier_msat,

0 commit comments

Comments
 (0)