Skip to content

Commit 77172ee

Browse files
committed
Store pending claims awaiting monitor update in a separate map
In the next commits we'll move to generating `PaymentClaimed` events while handling `ChannelMonitorUpdate`s rather than directly in line. Thus, as a prerequisite, here we move to storing the info required to generate the `PaymentClaimed` event in a separate map. Note that while this does introduce a new map which is written as an even value which users cannot opt out of, the map is only filled in when users use the asynchronous `ChannelMonitor` updates and after a future PR. As these are still considered beta, breaking downgrades for such users is considered acceptable in the future PR (which will likely be one LDK version later).
1 parent 52edb35 commit 77172ee

File tree

1 file changed

+163
-105
lines changed

1 file changed

+163
-105
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 163 additions & 105 deletions
Original file line numberDiff line numberDiff line change
@@ -424,6 +424,18 @@ pub(super) enum RAACommitmentOrder {
424424
RevokeAndACKFirst,
425425
}
426426

427+
/// Information about a payment which is currently being claimed.
428+
struct PendingClaimingPayment {
429+
amount_msat: u64,
430+
payment_purpose: events::PaymentPurpose,
431+
receiver_node_id: PublicKey,
432+
}
433+
impl_writeable_tlv_based!(PendingClaimingPayment, {
434+
(0, amount_msat, required),
435+
(2, payment_purpose, required),
436+
(4, receiver_node_id, required),
437+
});
438+
427439
// Note this is only exposed in cfg(test):
428440
pub(super) struct ChannelHolder<Signer: Sign> {
429441
pub(super) by_id: HashMap<[u8; 32], Channel<Signer>>,
@@ -796,6 +808,13 @@ pub struct ChannelManager<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
796808
/// See `ChannelManager` struct-level documentation for lock order requirements.
797809
claimable_htlcs: Mutex<HashMap<PaymentHash, (events::PaymentPurpose, Vec<ClaimableHTLC>)>>,
798810

811+
/// Map from payment hash to the payment data for HTLCs which we have begun claiming, but which
812+
/// are waiting on a [`ChannelMonitorUpdate`] to complete in order to be surfaced to the user
813+
/// as an [`events::Event::PaymentClaimed`].
814+
///
815+
/// See `ChannelManager` struct-level documentation for lock order requirements.
816+
pending_claimed_payments: Mutex<HashMap<PaymentHash, PendingClaimingPayment>>,
817+
799818
/// The set of outbound SCID aliases across all our channels, including unconfirmed channels
800819
/// and some closed channels which reached a usable state prior to being closed. This is used
801820
/// only to avoid duplicates, and is not persisted explicitly to disk, but rebuilt from the
@@ -1602,6 +1621,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
16021621
forward_htlcs: Mutex::new(HashMap::new()),
16031622
claimable_htlcs: Mutex::new(HashMap::new()),
16041623
pending_intercepted_htlcs: Mutex::new(HashMap::new()),
1624+
pending_claimed_payments: Mutex::new(HashMap::new()),
16051625
id_to_peer: Mutex::new(HashMap::new()),
16061626
short_to_chan_info: FairRwLock::new(HashMap::new()),
16071627

@@ -3484,6 +3504,10 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
34843504
}
34853505
};
34863506
let mut claimable_htlcs = self.claimable_htlcs.lock().unwrap();
3507+
if self.pending_claimed_payments.lock().unwrap().contains_key(&payment_hash) {
3508+
fail_htlc!(claimable_htlc, payment_hash);
3509+
continue
3510+
}
34873511
let (_, htlcs) = claimable_htlcs.entry(payment_hash)
34883512
.or_insert_with(|| (purpose(), Vec::new()));
34893513
if htlcs.len() == 1 {
@@ -3556,7 +3580,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
35563580
check_total_value!(payment_data, payment_preimage);
35573581
},
35583582
OnionPayload::Spontaneous(preimage) => {
3559-
match self.claimable_htlcs.lock().unwrap().entry(payment_hash) {
3583+
let mut claimable_htlcs = self.claimable_htlcs.lock().unwrap();
3584+
if self.pending_claimed_payments.lock().unwrap().contains_key(&payment_hash) {
3585+
fail_htlc!(claimable_htlc, payment_hash);
3586+
continue
3587+
}
3588+
match claimable_htlcs.entry(payment_hash) {
35603589
hash_map::Entry::Vacant(e) => {
35613590
let purpose = events::PaymentPurpose::SpontaneousPayment(preimage);
35623591
e.insert((purpose.clone(), vec![claimable_htlc]));
@@ -4209,127 +4238,142 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
42094238

42104239
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(&self.total_consistency_lock, &self.persistence_notifier);
42114240

4212-
let removed_source = self.claimable_htlcs.lock().unwrap().remove(&payment_hash);
4213-
if let Some((payment_purpose, mut sources)) = removed_source {
4214-
assert!(!sources.is_empty());
4215-
4216-
// If we are claiming an MPP payment, we have to take special care to ensure that each
4217-
// channel exists before claiming all of the payments (inside one lock).
4218-
// Note that channel existance is sufficient as we should always get a monitor update
4219-
// which will take care of the real HTLC claim enforcement.
4220-
//
4221-
// If we find an HTLC which we would need to claim but for which we do not have a
4222-
// channel, we will fail all parts of the MPP payment. While we could wait and see if
4223-
// the sender retries the already-failed path(s), it should be a pretty rare case where
4224-
// we got all the HTLCs and then a channel closed while we were waiting for the user to
4225-
// provide the preimage, so worrying too much about the optimal handling isn't worth
4226-
// it.
4227-
let mut claimable_amt_msat = 0;
4228-
let mut expected_amt_msat = None;
4229-
let mut valid_mpp = true;
4230-
let mut errs = Vec::new();
4231-
let mut claimed_any_htlcs = false;
4232-
let mut channel_state_lock = self.channel_state.lock().unwrap();
4233-
let channel_state = &mut *channel_state_lock;
4234-
let mut receiver_node_id = Some(self.our_network_pubkey);
4235-
for htlc in sources.iter() {
4236-
let chan_id = match self.short_to_chan_info.read().unwrap().get(&htlc.prev_hop.short_channel_id) {
4237-
Some((_cp_id, chan_id)) => chan_id.clone(),
4238-
None => {
4239-
valid_mpp = false;
4241+
let mut sources = {
4242+
if let Some((payment_purpose, sources)) = self.claimable_htlcs.lock().unwrap().remove(&payment_hash) {
4243+
let mut receiver_node_id = self.our_network_pubkey;
4244+
for htlc in sources.iter() {
4245+
if htlc.prev_hop.phantom_shared_secret.is_some() {
4246+
let phantom_pubkey = self.keys_manager.get_node_id(Recipient::PhantomNode)
4247+
.expect("Failed to get node_id for phantom node recipient");
4248+
receiver_node_id = phantom_pubkey;
42404249
break;
42414250
}
4242-
};
4251+
}
42434252

4244-
if let None = channel_state.by_id.get(&chan_id) {
4245-
valid_mpp = false;
4246-
break;
4253+
let dup_purpose = self.pending_claimed_payments.lock().unwrap().insert(payment_hash,
4254+
PendingClaimingPayment { amount_msat: sources.iter().map(|source| source.value).sum(),
4255+
payment_purpose, receiver_node_id,
4256+
});
4257+
if dup_purpose.is_some() {
4258+
debug_assert!(false, "Shouldn't get a duplicate pending claim event ever");
4259+
log_error!(self.logger, "Got a duplicate pending claimable event on payment hash {}! Please report this bug",
4260+
log_bytes!(payment_hash.0));
42474261
}
4262+
sources
4263+
} else { return; }
4264+
};
4265+
debug_assert!(!sources.is_empty());
42484266

4249-
if expected_amt_msat.is_some() && expected_amt_msat != Some(htlc.total_msat) {
4250-
log_error!(self.logger, "Somehow ended up with an MPP payment with different total amounts - this should not be reachable!");
4251-
debug_assert!(false);
4267+
// If we are claiming an MPP payment, we have to take special care to ensure that each
4268+
// channel exists before claiming all of the payments (inside one lock).
4269+
// Note that channel existance is sufficient as we should always get a monitor update
4270+
// which will take care of the real HTLC claim enforcement.
4271+
//
4272+
// If we find an HTLC which we would need to claim but for which we do not have a
4273+
// channel, we will fail all parts of the MPP payment. While we could wait and see if
4274+
// the sender retries the already-failed path(s), it should be a pretty rare case where
4275+
// we got all the HTLCs and then a channel closed while we were waiting for the user to
4276+
// provide the preimage, so worrying too much about the optimal handling isn't worth
4277+
// it.
4278+
let mut claimable_amt_msat = 0;
4279+
let mut expected_amt_msat = None;
4280+
let mut valid_mpp = true;
4281+
let mut errs = Vec::new();
4282+
let mut claimed_any_htlcs = false;
4283+
let mut channel_state_lock = self.channel_state.lock().unwrap();
4284+
let channel_state = &mut *channel_state_lock;
4285+
for htlc in sources.iter() {
4286+
let chan_id = match self.short_to_chan_info.read().unwrap().get(&htlc.prev_hop.short_channel_id) {
4287+
Some((_cp_id, chan_id)) => chan_id.clone(),
4288+
None => {
42524289
valid_mpp = false;
42534290
break;
42544291
}
4255-
expected_amt_msat = Some(htlc.total_msat);
4256-
if let OnionPayload::Spontaneous(_) = &htlc.onion_payload {
4257-
// We don't currently support MPP for spontaneous payments, so just check
4258-
// that there's one payment here and move on.
4259-
if sources.len() != 1 {
4260-
log_error!(self.logger, "Somehow ended up with an MPP spontaneous payment - this should not be reachable!");
4261-
debug_assert!(false);
4262-
valid_mpp = false;
4263-
break;
4264-
}
4265-
}
4266-
let phantom_shared_secret = htlc.prev_hop.phantom_shared_secret;
4267-
if phantom_shared_secret.is_some() {
4268-
let phantom_pubkey = self.keys_manager.get_node_id(Recipient::PhantomNode)
4269-
.expect("Failed to get node_id for phantom node recipient");
4270-
receiver_node_id = Some(phantom_pubkey)
4271-
}
4292+
};
42724293

4273-
claimable_amt_msat += htlc.value;
4274-
}
4275-
if sources.is_empty() || expected_amt_msat.is_none() {
4276-
log_info!(self.logger, "Attempted to claim an incomplete payment which no longer had any available HTLCs!");
4277-
return;
4294+
if let None = channel_state.by_id.get(&chan_id) {
4295+
valid_mpp = false;
4296+
break;
42784297
}
4279-
if claimable_amt_msat != expected_amt_msat.unwrap() {
4280-
log_info!(self.logger, "Attempted to claim an incomplete payment, expected {} msat, had {} available to claim.",
4281-
expected_amt_msat.unwrap(), claimable_amt_msat);
4282-
return;
4298+
4299+
if expected_amt_msat.is_some() && expected_amt_msat != Some(htlc.total_msat) {
4300+
log_error!(self.logger, "Somehow ended up with an MPP payment with different total amounts - this should not be reachable!");
4301+
debug_assert!(false);
4302+
valid_mpp = false;
4303+
break;
42834304
}
4284-
if valid_mpp {
4285-
for htlc in sources.drain(..) {
4286-
match self.claim_funds_from_hop(&mut channel_state_lock, htlc.prev_hop, payment_preimage) {
4287-
ClaimFundsFromHop::MonitorUpdateFail(pk, err, _) => {
4288-
if let msgs::ErrorAction::IgnoreError = err.err.action {
4289-
// We got a temporary failure updating monitor, but will claim the
4290-
// HTLC when the monitor updating is restored (or on chain).
4291-
log_error!(self.logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
4292-
claimed_any_htlcs = true;
4293-
} else { errs.push((pk, err)); }
4294-
},
4295-
ClaimFundsFromHop::PrevHopForceClosed => unreachable!("We already checked for channel existence, we can't fail here!"),
4296-
ClaimFundsFromHop::DuplicateClaim => {
4297-
// While we should never get here in most cases, if we do, it likely
4298-
// indicates that the HTLC was timed out some time ago and is no longer
4299-
// available to be claimed. Thus, it does not make sense to set
4300-
// `claimed_any_htlcs`.
4301-
},
4302-
ClaimFundsFromHop::Success(_) => claimed_any_htlcs = true,
4303-
}
4305+
expected_amt_msat = Some(htlc.total_msat);
4306+
if let OnionPayload::Spontaneous(_) = &htlc.onion_payload {
4307+
// We don't currently support MPP for spontaneous payments, so just check
4308+
// that there's one payment here and move on.
4309+
if sources.len() != 1 {
4310+
log_error!(self.logger, "Somehow ended up with an MPP spontaneous payment - this should not be reachable!");
4311+
debug_assert!(false);
4312+
valid_mpp = false;
4313+
break;
43044314
}
43054315
}
4306-
mem::drop(channel_state_lock);
4307-
if !valid_mpp {
4308-
for htlc in sources.drain(..) {
4309-
let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
4310-
htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
4311-
self.best_block.read().unwrap().height()));
4312-
let source = HTLCSource::PreviousHopData(htlc.prev_hop);
4313-
let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
4314-
let receiver = HTLCDestination::FailedPayment { payment_hash };
4315-
self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
4316+
4317+
claimable_amt_msat += htlc.value;
4318+
}
4319+
if sources.is_empty() || expected_amt_msat.is_none() {
4320+
self.pending_claimed_payments.lock().unwrap().remove(&payment_hash);
4321+
log_info!(self.logger, "Attempted to claim an incomplete payment which no longer had any available HTLCs!");
4322+
return;
4323+
}
4324+
if claimable_amt_msat != expected_amt_msat.unwrap() {
4325+
self.pending_claimed_payments.lock().unwrap().remove(&payment_hash);
4326+
log_info!(self.logger, "Attempted to claim an incomplete payment, expected {} msat, had {} available to claim.",
4327+
expected_amt_msat.unwrap(), claimable_amt_msat);
4328+
return;
4329+
}
4330+
if valid_mpp {
4331+
for htlc in sources.drain(..) {
4332+
match self.claim_funds_from_hop(&mut channel_state_lock, htlc.prev_hop, payment_preimage) {
4333+
ClaimFundsFromHop::MonitorUpdateFail(pk, err, _) => {
4334+
if let msgs::ErrorAction::IgnoreError = err.err.action {
4335+
// We got a temporary failure updating monitor, but will claim the
4336+
// HTLC when the monitor updating is restored (or on chain).
4337+
log_error!(self.logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
4338+
claimed_any_htlcs = true;
4339+
} else { errs.push((pk, err)); }
4340+
},
4341+
ClaimFundsFromHop::PrevHopForceClosed => unreachable!("We already checked for channel existence, we can't fail here!"),
4342+
ClaimFundsFromHop::DuplicateClaim => {
4343+
// While we should never get here in most cases, if we do, it likely
4344+
// indicates that the HTLC was timed out some time ago and is no longer
4345+
// available to be claimed. Thus, it does not make sense to set
4346+
// `claimed_any_htlcs`.
4347+
},
4348+
ClaimFundsFromHop::Success(_) => claimed_any_htlcs = true,
43164349
}
43174350
}
4318-
4319-
if claimed_any_htlcs {
4320-
self.pending_events.lock().unwrap().push(events::Event::PaymentClaimed {
4321-
receiver_node_id,
4322-
payment_hash,
4323-
purpose: payment_purpose,
4324-
amount_msat: claimable_amt_msat,
4325-
});
4351+
}
4352+
mem::drop(channel_state_lock);
4353+
if !valid_mpp {
4354+
for htlc in sources.drain(..) {
4355+
let mut htlc_msat_height_data = byte_utils::be64_to_array(htlc.value).to_vec();
4356+
htlc_msat_height_data.extend_from_slice(&byte_utils::be32_to_array(
4357+
self.best_block.read().unwrap().height()));
4358+
let source = HTLCSource::PreviousHopData(htlc.prev_hop);
4359+
let reason = HTLCFailReason::reason(0x4000 | 15, htlc_msat_height_data);
4360+
let receiver = HTLCDestination::FailedPayment { payment_hash };
4361+
self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
43264362
}
4363+
}
43274364

4328-
// Now we can handle any errors which were generated.
4329-
for (counterparty_node_id, err) in errs.drain(..) {
4330-
let res: Result<(), _> = Err(err);
4331-
let _ = handle_error!(self, res, counterparty_node_id);
4332-
}
4365+
let PendingClaimingPayment { amount_msat, payment_purpose: purpose, receiver_node_id } =
4366+
self.pending_claimed_payments.lock().unwrap().remove(&payment_hash).unwrap();
4367+
if claimed_any_htlcs {
4368+
self.pending_events.lock().unwrap().push(events::Event::PaymentClaimed {
4369+
payment_hash, purpose, amount_msat, receiver_node_id: Some(receiver_node_id),
4370+
});
4371+
}
4372+
4373+
// Now we can handle any errors which were generated.
4374+
for (counterparty_node_id, err) in errs.drain(..) {
4375+
let res: Result<(), _> = Err(err);
4376+
let _ = handle_error!(self, res, counterparty_node_id);
43334377
}
43344378
}
43354379

@@ -7231,10 +7275,21 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> Writeable for ChannelMana
72317275
if our_pending_intercepts.len() != 0 {
72327276
pending_intercepted_htlcs = Some(our_pending_intercepts);
72337277
}
7278+
7279+
let mut pending_claimed_payments = Some(self.pending_claimed_payments.lock().unwrap());
7280+
if pending_claimed_payments.as_ref().unwrap().is_empty() {
7281+
// LDK versions prior to 0.0.113 do not know how to read the pending claimed payments
7282+
// map. Thus, if there are no entries we skip writing a TLV for it.
7283+
pending_claimed_payments = None;
7284+
} else {
7285+
debug_assert!(false, "While we have code to serialize pending_claimed_payments, the map should always be empty until a later PR");
7286+
}
7287+
72347288
write_tlv_fields!(writer, {
72357289
(1, pending_outbound_payments_no_retry, required),
72367290
(2, pending_intercepted_htlcs, option),
72377291
(3, pending_outbound_payments, required),
7292+
(4, pending_claimed_payments, option),
72387293
(5, self.our_network_pubkey, required),
72397294
(7, self.fake_scid_rand_bytes, required),
72407295
(9, htlc_purposes, vec_type),
@@ -7552,10 +7607,12 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
75527607
let mut fake_scid_rand_bytes: Option<[u8; 32]> = None;
75537608
let mut probing_cookie_secret: Option<[u8; 32]> = None;
75547609
let mut claimable_htlc_purposes = None;
7610+
let mut pending_claimed_payments = Some(HashMap::new());
75557611
read_tlv_fields!(reader, {
75567612
(1, pending_outbound_payments_no_retry, option),
75577613
(2, pending_intercepted_htlcs, option),
75587614
(3, pending_outbound_payments, option),
7615+
(4, pending_claimed_payments, option),
75597616
(5, received_network_pubkey, option),
75607617
(7, fake_scid_rand_bytes, option),
75617618
(9, claimable_htlc_purposes, vec_type),
@@ -7781,6 +7838,7 @@ impl<'a, M: Deref, T: Deref, K: Deref, F: Deref, L: Deref>
77817838

77827839
forward_htlcs: Mutex::new(forward_htlcs),
77837840
claimable_htlcs: Mutex::new(claimable_htlcs),
7841+
pending_claimed_payments: Mutex::new(pending_claimed_payments.unwrap()),
77847842
outbound_scid_aliases: Mutex::new(outbound_scid_aliases),
77857843
id_to_peer: Mutex::new(id_to_peer),
77867844
short_to_chan_info: FairRwLock::new(short_to_chan_info),

0 commit comments

Comments
 (0)