Skip to content

Commit e106dad

Browse files
committed
Decay historical_estimated_channel_liquidity_* result to None
`historical_estimated_channel_liquidity_probabilities` previously decayed to `Some(([0; 8], [0; 8]))`. This was thought to be useful in that it allowed identification of cases where data was previously available but is now decayed away vs cases where data was never available. However, with the introduction of `historical_estimated_payment_success_probability` (which uses the existing scoring routines so will decay to `None`) this is unnecessarily confusing. Given data which has decayed to zero will also not be used anyway, there's little reason to keep the old behavior, and we now decay to `None`.
1 parent 5a31e5a commit e106dad

File tree

1 file changed

+34
-26
lines changed

1 file changed

+34
-26
lines changed

lightning/src/routing/scoring.rs

Lines changed: 34 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -706,10 +706,17 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
706706
let amt = directed_info.effective_capacity().as_msat();
707707
let dir_liq = liq.as_directed(source, target, 0, amt, self.decay_params);
708708

709-
let (min_buckets, max_buckets, _) = dir_liq.liquidity_history
709+
let decayed_buckets = dir_liq.liquidity_history
710710
.get_decayed_buckets(now, *dir_liq.last_updated,
711711
self.decay_params.historical_no_updates_half_life);
712712

713+
let (min_buckets, max_buckets, _, _) =
714+
if let Some(buckets) = decayed_buckets { buckets } else {
715+
// If the buckets, once decayed, end up being zero, print them out
716+
// as zeros.
717+
([0; 32], [0; 32], 0, 0)
718+
};
719+
713720
log_debug!(self.logger, core::concat!(
714721
"Liquidity from {} to {} via {} is in the range ({}, {}).\n",
715722
"\tHistorical min liquidity bucket relative probabilities:\n",
@@ -788,7 +795,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
788795
/// in the top and bottom bucket, and roughly with similar (recent) frequency.
789796
///
790797
/// Because the datapoints are decayed slowly over time, values will eventually return to
791-
/// `Some(([0; 32], [0; 32]))`.
798+
/// `Some(([1; 32], [1; 32]))` and then to `None` once no datapoints remain.
792799
///
793800
/// In order to fetch a single success probability from the buckets provided here, as used in
794801
/// the scoring model, see [`Self::historical_estimated_payment_success_probability`].
@@ -802,9 +809,12 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
802809
let amt = directed_info.effective_capacity().as_msat();
803810
let dir_liq = liq.as_directed(source, target, 0, amt, self.decay_params);
804811

805-
let (min_buckets, mut max_buckets, _) = dir_liq.liquidity_history
806-
.get_decayed_buckets(dir_liq.now, *dir_liq.last_updated,
807-
self.decay_params.historical_no_updates_half_life);
812+
let (min_buckets, mut max_buckets, valid_points, required_decays) =
813+
dir_liq.liquidity_history.get_decayed_buckets(
814+
dir_liq.now, *dir_liq.last_updated,
815+
self.decay_params.historical_no_updates_half_life
816+
)?;
817+
808818
// Note that the liquidity buckets are an offset from the edge, so we inverse
809819
// the max order to get the probabilities from zero.
810820
max_buckets.reverse();
@@ -1735,15 +1745,29 @@ mod bucketed_history {
17351745
impl<D: Deref<Target = HistoricalBucketRangeTracker>> HistoricalMinMaxBuckets<D> {
17361746
#[inline]
17371747
pub(super) fn get_decayed_buckets<T: Time>(&self, now: T, last_updated: T, half_life: Duration)
1738-
-> ([u16; 32], [u16; 32], u32) {
1748+
-> Option<([u16; 32], [u16; 32], u64, u32)> {
17391749
let required_decays = now.duration_since(last_updated).as_secs()
17401750
.checked_div(half_life.as_secs())
17411751
.map_or(u32::max_value(), |decays| cmp::min(decays, u32::max_value() as u64) as u32);
1752+
1753+
let mut total_valid_points_tracked = 0;
1754+
for (min_idx, min_bucket) in self.min_liquidity_offset_history.buckets.iter().enumerate() {
1755+
for max_bucket in self.max_liquidity_offset_history.buckets.iter().take(32 - min_idx) {
1756+
total_valid_points_tracked += (*min_bucket as u64) * (*max_bucket as u64);
1757+
}
1758+
}
1759+
1760+
// If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme),
1761+
// treat it as if we were fully decayed.
1762+
if total_valid_points_tracked.checked_shr(required_decays).unwrap_or(0) < 32*32 {
1763+
return None;
1764+
}
1765+
17421766
let mut min_buckets = *self.min_liquidity_offset_history;
17431767
min_buckets.time_decay_data(required_decays);
17441768
let mut max_buckets = *self.max_liquidity_offset_history;
17451769
max_buckets.time_decay_data(required_decays);
1746-
(min_buckets.buckets, max_buckets.buckets, required_decays)
1770+
Some((min_buckets.buckets, max_buckets.buckets, total_valid_points_tracked, required_decays))
17471771
}
17481772

17491773
#[inline]
@@ -1755,29 +1779,13 @@ mod bucketed_history {
17551779
// having a minimum above our maximum is an invalid state). For each combination,
17561780
// calculate the probability of success given our payment amount, then total the
17571781
// weighted average probability of success.
1758-
let mut total_valid_points_tracked = 0;
1759-
17601782
let payment_pos = amount_to_pos(amount_msat, capacity_msat);
17611783
if payment_pos >= POSITION_TICKS { return None; }
17621784

17631785
// Check if all our buckets are zero, once decayed and treat it as if we had no data. We
17641786
// don't actually use the decayed buckets, though, as that would lose precision.
1765-
let (decayed_min_buckets, decayed_max_buckets, required_decays) =
1766-
self.get_decayed_buckets(now, last_updated, half_life);
1767-
if decayed_min_buckets.iter().all(|v| *v == 0) || decayed_max_buckets.iter().all(|v| *v == 0) {
1768-
return None;
1769-
}
1770-
1771-
for (min_idx, min_bucket) in self.min_liquidity_offset_history.buckets.iter().enumerate() {
1772-
for max_bucket in self.max_liquidity_offset_history.buckets.iter().take(32 - min_idx) {
1773-
total_valid_points_tracked += (*min_bucket as u64) * (*max_bucket as u64);
1774-
}
1775-
}
1776-
// If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme), treat
1777-
// it as if we were fully decayed.
1778-
if total_valid_points_tracked.checked_shr(required_decays).unwrap_or(0) < 32*32 {
1779-
return None;
1780-
}
1787+
let (decayed_min_buckets, decayed_max_buckets, total_valid_points_tracked, required_decays)
1788+
= self.get_decayed_buckets(now, last_updated, half_life)?;
17811789

17821790
let mut cumulative_success_prob_times_billion = 0;
17831791
// Special-case the 0th min bucket - it generally means we failed a payment, so only
@@ -3100,7 +3108,7 @@ mod tests {
31003108
// Once fully decayed we still have data, but its all-0s. In the future we may remove the
31013109
// data entirely instead.
31023110
assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3103-
Some(([0; 32], [0; 32])));
3111+
None);
31043112
assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, 1), None);
31053113

31063114
let mut usage = ChannelUsage {

0 commit comments

Comments
 (0)