Skip to content

Commit f7f524f

Browse files
committed
Decay historical_estimated_channel_liquidity_* result to None
`historical_estimated_channel_liquidity_probabilities` previously decayed to `Some(([0; 8], [0; 8]))`. This was thought to be useful in that it allowed identification of cases where data was previously available but is now decayed away vs cases where data was never available. However, with the introduction of `historical_estimated_payment_success_probability` (which uses the existing scoring routines so will decay to `None`) this is unnecessarily confusing. Given data which has decayed to zero will also not be used anyway, there's little reason to keep the old behavior, and we now decay to `None`. We also take this opportunity to split the overloaded `get_decayed_buckets`, removing uneccessary code during scoring.
1 parent b7d1e5f commit f7f524f

File tree

1 file changed

+39
-31
lines changed

1 file changed

+39
-31
lines changed

lightning/src/routing/scoring.rs

Lines changed: 39 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -706,9 +706,10 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
706706
let amt = directed_info.effective_capacity().as_msat();
707707
let dir_liq = liq.as_directed(source, target, 0, amt, self.decay_params);
708708

709-
let (min_buckets, max_buckets, _) = dir_liq.liquidity_history
709+
let (min_buckets, max_buckets) = dir_liq.liquidity_history
710710
.get_decayed_buckets(now, *dir_liq.last_updated,
711-
self.decay_params.historical_no_updates_half_life);
711+
self.decay_params.historical_no_updates_half_life)
712+
.unwrap_or(([0; 32], [0; 32]));
712713

713714
log_debug!(self.logger, core::concat!(
714715
"Liquidity from {} to {} via {} is in the range ({}, {}).\n",
@@ -787,7 +788,7 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
787788
/// in the top and bottom bucket, and roughly with similar (recent) frequency.
788789
///
789790
/// Because the datapoints are decayed slowly over time, values will eventually return to
790-
/// `Some(([0; 32], [0; 32]))`.
791+
/// `Some(([1; 32], [1; 32]))` and then to `None` once no datapoints remain.
791792
///
792793
/// In order to fetch a single success probability from the buckets provided here, as used in
793794
/// the scoring model, see [`Self::historical_estimated_payment_success_probability`].
@@ -801,9 +802,12 @@ impl<G: Deref<Target = NetworkGraph<L>>, L: Deref, T: Time> ProbabilisticScorerU
801802
let amt = directed_info.effective_capacity().as_msat();
802803
let dir_liq = liq.as_directed(source, target, 0, amt, self.decay_params);
803804

804-
let (min_buckets, mut max_buckets, _) = dir_liq.liquidity_history
805-
.get_decayed_buckets(dir_liq.now, *dir_liq.last_updated,
806-
self.decay_params.historical_no_updates_half_life);
805+
let (min_buckets, mut max_buckets) =
806+
dir_liq.liquidity_history.get_decayed_buckets(
807+
dir_liq.now, *dir_liq.last_updated,
808+
self.decay_params.historical_no_updates_half_life
809+
)?;
810+
807811
// Note that the liquidity buckets are an offset from the edge, so we inverse
808812
// the max order to get the probabilities from zero.
809813
max_buckets.reverse();
@@ -1738,17 +1742,37 @@ mod bucketed_history {
17381742
}
17391743

17401744
impl<D: Deref<Target = HistoricalBucketRangeTracker>> HistoricalMinMaxBuckets<D> {
1741-
#[inline]
17421745
pub(super) fn get_decayed_buckets<T: Time>(&self, now: T, last_updated: T, half_life: Duration)
1743-
-> ([u16; 32], [u16; 32], u32) {
1744-
let required_decays = now.duration_since(last_updated).as_secs()
1745-
.checked_div(half_life.as_secs())
1746-
.map_or(u32::max_value(), |decays| cmp::min(decays, u32::max_value() as u64) as u32);
1746+
-> Option<([u16; 32], [u16; 32])> {
1747+
let (_, required_decays) = self.get_total_valid_points(now, last_updated, half_life)?;
1748+
17471749
let mut min_buckets = *self.min_liquidity_offset_history;
17481750
min_buckets.time_decay_data(required_decays);
17491751
let mut max_buckets = *self.max_liquidity_offset_history;
17501752
max_buckets.time_decay_data(required_decays);
1751-
(min_buckets.buckets, max_buckets.buckets, required_decays)
1753+
Some((min_buckets.buckets, max_buckets.buckets))
1754+
}
1755+
#[inline]
1756+
pub(super) fn get_total_valid_points<T: Time>(&self, now: T, last_updated: T, half_life: Duration)
1757+
-> Option<(u64, u32)> {
1758+
let required_decays = now.duration_since(last_updated).as_secs()
1759+
.checked_div(half_life.as_secs())
1760+
.map_or(u32::max_value(), |decays| cmp::min(decays, u32::max_value() as u64) as u32);
1761+
1762+
let mut total_valid_points_tracked = 0;
1763+
for (min_idx, min_bucket) in self.min_liquidity_offset_history.buckets.iter().enumerate() {
1764+
for max_bucket in self.max_liquidity_offset_history.buckets.iter().take(32 - min_idx) {
1765+
total_valid_points_tracked += (*min_bucket as u64) * (*max_bucket as u64);
1766+
}
1767+
}
1768+
1769+
// If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme),
1770+
// treat it as if we were fully decayed.
1771+
if total_valid_points_tracked.checked_shr(required_decays).unwrap_or(0) < 32*32 {
1772+
return None;
1773+
}
1774+
1775+
Some((total_valid_points_tracked, required_decays))
17521776
}
17531777

17541778
#[inline]
@@ -1762,29 +1786,13 @@ mod bucketed_history {
17621786
// state). For each pair, we calculate the probability as if the bucket's corresponding
17631787
// min- and max- liquidity bounds were our current liquidity bounds and then multiply
17641788
// that probability by the weight of the selected buckets.
1765-
let mut total_valid_points_tracked = 0;
1766-
17671789
let payment_pos = amount_to_pos(amount_msat, capacity_msat);
17681790
if payment_pos >= POSITION_TICKS { return None; }
17691791

17701792
// Check if all our buckets are zero, once decayed and treat it as if we had no data. We
17711793
// don't actually use the decayed buckets, though, as that would lose precision.
1772-
let (decayed_min_buckets, decayed_max_buckets, required_decays) =
1773-
self.get_decayed_buckets(now, last_updated, half_life);
1774-
if decayed_min_buckets.iter().all(|v| *v == 0) || decayed_max_buckets.iter().all(|v| *v == 0) {
1775-
return None;
1776-
}
1777-
1778-
for (min_idx, min_bucket) in self.min_liquidity_offset_history.buckets.iter().enumerate() {
1779-
for max_bucket in self.max_liquidity_offset_history.buckets.iter().take(32 - min_idx) {
1780-
total_valid_points_tracked += (*min_bucket as u64) * (*max_bucket as u64);
1781-
}
1782-
}
1783-
// If the total valid points is smaller than 1.0 (i.e. 32 in our fixed-point scheme), treat
1784-
// it as if we were fully decayed.
1785-
if total_valid_points_tracked.checked_shr(required_decays).unwrap_or(0) < 32*32 {
1786-
return None;
1787-
}
1794+
let (total_valid_points_tracked, _)
1795+
= self.get_total_valid_points(now, last_updated, half_life)?;
17881796

17891797
let mut cumulative_success_prob_times_billion = 0;
17901798
// Special-case the 0th min bucket - it generally means we failed a payment, so only
@@ -3107,7 +3115,7 @@ mod tests {
31073115
// Once fully decayed we still have data, but its all-0s. In the future we may remove the
31083116
// data entirely instead.
31093117
assert_eq!(scorer.historical_estimated_channel_liquidity_probabilities(42, &target),
3110-
Some(([0; 32], [0; 32])));
3118+
None);
31113119
assert_eq!(scorer.historical_estimated_payment_success_probability(42, &target, 1), None);
31123120

31133121
let mut usage = ChannelUsage {

0 commit comments

Comments
 (0)