Skip to content

Commit 1feb459

Browse files
committed
Handle claim result event generation in claim_funds_from_hop
Currently `claim_funds` and `claim_funds_internal` call `claim_funds_from_hop` and then surface and `Event` to the user informing them of the forwarded/claimed payment based on it's result. In both places we assume that a claim "completed" even if a monitor update is being done async. Instead, here we push that event generation through a `MonitorUpdateCompletionAction` and a call to `handle_monitor_update_completion_action`. This will allow us to hold the event(s) until async monitor updates complete in the future.
1 parent def193d commit 1feb459

File tree

1 file changed

+43
-50
lines changed

1 file changed

+43
-50
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 43 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -4300,7 +4300,6 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
43004300
let mut expected_amt_msat = None;
43014301
let mut valid_mpp = true;
43024302
let mut errs = Vec::new();
4303-
let mut claimed_any_htlcs = false;
43044303
let mut channel_state = Some(self.channel_state.lock().unwrap());
43054304
for htlc in sources.iter() {
43064305
let chan_id = match self.short_to_chan_info.read().unwrap().get(&htlc.prev_hop.short_channel_id) {
@@ -4352,13 +4351,14 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
43524351
if valid_mpp {
43534352
for htlc in sources.drain(..) {
43544353
if channel_state.is_none() { channel_state = Some(self.channel_state.lock().unwrap()); }
4355-
match self.claim_funds_from_hop(channel_state.take().unwrap(), htlc.prev_hop, payment_preimage) {
4354+
match self.claim_funds_from_hop(channel_state.take().unwrap(), htlc.prev_hop, payment_preimage,
4355+
|_| Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash }))
4356+
{
43564357
ClaimFundsFromHop::MonitorUpdateFail(pk, err, _) => {
43574358
if let msgs::ErrorAction::IgnoreError = err.err.action {
43584359
// We got a temporary failure updating monitor, but will claim the
43594360
// HTLC when the monitor updating is restored (or on chain).
43604361
log_error!(self.logger, "Temporary failure claiming HTLC, treating as success: {}", err.err.err);
4361-
claimed_any_htlcs = true;
43624362
} else { errs.push((pk, err)); }
43634363
},
43644364
ClaimFundsFromHop::PrevHopForceClosed => {
@@ -4373,7 +4373,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
43734373
// available to be claimed. Thus, it does not make sense to set
43744374
// `claimed_any_htlcs`.
43754375
},
4376-
ClaimFundsFromHop::Success(_) => claimed_any_htlcs = true,
4376+
ClaimFundsFromHop::Success(_) => {},
43774377
}
43784378
}
43794379
}
@@ -4387,14 +4387,7 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
43874387
let receiver = HTLCDestination::FailedPayment { payment_hash };
43884388
self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
43894389
}
4390-
}
4391-
4392-
let ClaimingPayment { amount_msat, payment_purpose: purpose, receiver_node_id } =
4393-
self.claimable_payments.lock().unwrap().pending_claiming_payments.remove(&payment_hash).unwrap();
4394-
if claimed_any_htlcs {
4395-
self.pending_events.lock().unwrap().push(events::Event::PaymentClaimed {
4396-
payment_hash, purpose, amount_msat, receiver_node_id: Some(receiver_node_id),
4397-
});
4390+
self.claimable_payments.lock().unwrap().pending_claiming_payments.remove(&payment_hash);
43984391
}
43994392

44004393
// Now we can handle any errors which were generated.
@@ -4404,12 +4397,16 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44044397
}
44054398
}
44064399

4407-
fn claim_funds_from_hop(&self, mut channel_state_lock: MutexGuard<ChannelHolder<<K::Target as KeysInterface>::Signer>>, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage) -> ClaimFundsFromHop {
4400+
fn claim_funds_from_hop<ComplFunc: FnOnce(Option<u64>) -> Option<MonitorUpdateCompletionAction>>(&self,
4401+
mut channel_state_lock: MutexGuard<ChannelHolder<<K::Target as KeysInterface>::Signer>>,
4402+
prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage, completion_action: ComplFunc)
4403+
-> ClaimFundsFromHop {
44084404
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
44094405

44104406
let chan_id = prev_hop.outpoint.to_channel_id();
44114407
let channel_state = &mut *channel_state_lock;
44124408
if let hash_map::Entry::Occupied(mut chan) = channel_state.by_id.entry(chan_id) {
4409+
let counterparty_node_id = chan.get().get_counterparty_node_id();
44134410
match chan.get_mut().get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, &self.logger) {
44144411
Ok(msgs_monitor_option) => {
44154412
if let UpdateFulfillCommitFetch::NewClaim { msgs, htlc_value_msat, monitor_update } = msgs_monitor_option {
@@ -4419,10 +4416,11 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44194416
log_given_level!(self.logger, if e == ChannelMonitorUpdateStatus::PermanentFailure { Level::Error } else { Level::Debug },
44204417
"Failed to update channel monitor with preimage {:?}: {:?}",
44214418
payment_preimage, e);
4419+
let err = handle_monitor_update_res!(self, e, chan, RAACommitmentOrder::CommitmentFirst, false, msgs.is_some()).unwrap_err();
4420+
mem::drop(channel_state_lock);
4421+
self.handle_monitor_update_completion_actions(completion_action(Some(htlc_value_msat)));
44224422
return ClaimFundsFromHop::MonitorUpdateFail(
4423-
chan.get().get_counterparty_node_id(),
4424-
handle_monitor_update_res!(self, e, chan, RAACommitmentOrder::CommitmentFirst, false, msgs.is_some()).unwrap_err(),
4425-
Some(htlc_value_msat)
4423+
counterparty_node_id, err, Some(htlc_value_msat)
44264424
);
44274425
}
44284426
}
@@ -4441,6 +4439,8 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44414439
}
44424440
});
44434441
}
4442+
mem::drop(channel_state_lock);
4443+
self.handle_monitor_update_completion_actions(completion_action(Some(htlc_value_msat)));
44444444
return ClaimFundsFromHop::Success(htlc_value_msat);
44454445
} else {
44464446
return ClaimFundsFromHop::DuplicateClaim;
@@ -4455,11 +4455,12 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44554455
payment_preimage, e);
44564456
},
44574457
}
4458-
let counterparty_node_id = chan.get().get_counterparty_node_id();
44594458
let (drop, res) = convert_chan_err!(self, e, chan.get_mut(), &chan_id);
44604459
if drop {
44614460
chan.remove_entry();
44624461
}
4462+
mem::drop(channel_state_lock);
4463+
self.handle_monitor_update_completion_actions(completion_action(None));
44634464
return ClaimFundsFromHop::MonitorUpdateFail(counterparty_node_id, res, None);
44644465
},
44654466
}
@@ -4481,6 +4482,13 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
44814482
log_error!(self.logger, "Critical error: failed to update channel monitor with preimage {:?}: {:?}",
44824483
payment_preimage, update_res);
44834484
}
4485+
mem::drop(channel_state_lock);
4486+
// Note that we do process the completion action here. This totally could be a
4487+
// duplicate claim, but we have no way of knowing without interrogating the
4488+
// `ChannelMonitor` we've provided the above update to. Instead, note that `Event`s are
4489+
// generally always allowed to be duplicative (and it's specifically noted in
4490+
// `PaymentForwarded`).
4491+
self.handle_monitor_update_completion_actions(completion_action(None));
44844492
return ClaimFundsFromHop::PrevHopForceClosed
44854493
}
44864494
}
@@ -4555,43 +4563,28 @@ impl<M: Deref, T: Deref, K: Deref, F: Deref, L: Deref> ChannelManager<M, T, K, F
45554563
},
45564564
HTLCSource::PreviousHopData(hop_data) => {
45574565
let prev_outpoint = hop_data.outpoint;
4558-
let res = self.claim_funds_from_hop(channel_state_lock, hop_data, payment_preimage);
4559-
let claimed_htlc = if let ClaimFundsFromHop::DuplicateClaim = res { false } else { true };
4560-
let htlc_claim_value_msat = match res {
4561-
ClaimFundsFromHop::MonitorUpdateFail(_, _, amt_opt) => amt_opt,
4562-
ClaimFundsFromHop::Success(amt) => Some(amt),
4563-
_ => None,
4564-
};
4565-
if let ClaimFundsFromHop::PrevHopForceClosed = res {
4566-
// Note that we do *not* set `claimed_htlc` to false here. In fact, this
4567-
// totally could be a duplicate claim, but we have no way of knowing
4568-
// without interrogating the `ChannelMonitor` we've provided the above
4569-
// update to. Instead, we simply document in `PaymentForwarded` that this
4570-
// can happen.
4571-
}
4566+
let res = self.claim_funds_from_hop(channel_state_lock, hop_data, payment_preimage,
4567+
|htlc_claim_value_msat| {
4568+
if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
4569+
let fee_earned_msat = if let Some(claimed_htlc_value) = htlc_claim_value_msat {
4570+
Some(claimed_htlc_value - forwarded_htlc_value)
4571+
} else { None };
4572+
4573+
let prev_channel_id = Some(prev_outpoint.to_channel_id());
4574+
let next_channel_id = Some(next_channel_id);
4575+
4576+
Some(MonitorUpdateCompletionAction::EmitEvent { event: events::Event::PaymentForwarded {
4577+
fee_earned_msat,
4578+
claim_from_onchain_tx: from_onchain,
4579+
prev_channel_id,
4580+
next_channel_id,
4581+
}})
4582+
} else { None }
4583+
});
45724584
if let ClaimFundsFromHop::MonitorUpdateFail(pk, err, _) = res {
45734585
let result: Result<(), _> = Err(err);
45744586
let _ = handle_error!(self, result, pk);
45754587
}
4576-
4577-
if claimed_htlc {
4578-
if let Some(forwarded_htlc_value) = forwarded_htlc_value_msat {
4579-
let fee_earned_msat = if let Some(claimed_htlc_value) = htlc_claim_value_msat {
4580-
Some(claimed_htlc_value - forwarded_htlc_value)
4581-
} else { None };
4582-
4583-
let mut pending_events = self.pending_events.lock().unwrap();
4584-
let prev_channel_id = Some(prev_outpoint.to_channel_id());
4585-
let next_channel_id = Some(next_channel_id);
4586-
4587-
pending_events.push(events::Event::PaymentForwarded {
4588-
fee_earned_msat,
4589-
claim_from_onchain_tx: from_onchain,
4590-
prev_channel_id,
4591-
next_channel_id,
4592-
});
4593-
}
4594-
}
45954588
},
45964589
}
45974590
}

0 commit comments

Comments
 (0)