Skip to content

Commit 15e9a1a

Browse files
committed
Move ChannelClosed generation into finish_close_channel
Currently the channel shutdown sequence has a number of steps which all the shutdown callsites have to call. Because many shutdown cases are rare error cases, its relatively easy to miss a call and leave users without `Event`s or miss some important cleanup. One of those steps, calling `issue_channel_close_events`, is rather easy to remove, as it only generates two events, which can simply be moved to another shutdown step. Here we remove `issue_channel_close_events` by moving `ChannelClosed` event generation into `finish_force_close_channel`.
1 parent adbc1e3 commit 15e9a1a

File tree

3 files changed

+55
-67
lines changed

3 files changed

+55
-67
lines changed

lightning/src/ln/channel.rs

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -814,6 +814,7 @@ pub(super) struct ReestablishResponses {
814814
/// The result of a shutdown that should be handled.
815815
#[must_use]
816816
pub(crate) struct ShutdownResult {
817+
pub(crate) closure_reason: ClosureReason,
817818
/// A channel monitor update to apply.
818819
pub(crate) monitor_update: Option<(PublicKey, OutPoint, ChannelMonitorUpdate)>,
819820
/// A list of dropped outbound HTLCs that can safely be failed backwards immediately.
@@ -822,6 +823,8 @@ pub(crate) struct ShutdownResult {
822823
/// propagated to the remainder of the batch.
823824
pub(crate) unbroadcasted_batch_funding_txid: Option<Txid>,
824825
pub(crate) channel_id: ChannelId,
826+
pub(crate) user_channel_id: u128,
827+
pub(crate) channel_capacity_satoshis: u64,
825828
pub(crate) counterparty_node_id: PublicKey,
826829
pub(crate) unbroadcasted_funding_tx: Option<Transaction>,
827830
}
@@ -2359,7 +2362,7 @@ impl<SP: Deref> ChannelContext<SP> where SP::Target: SignerProvider {
23592362
/// those explicitly stated to be allowed after shutdown completes, eg some simple getters).
23602363
/// Also returns the list of payment_hashes for channels which we can safely fail backwards
23612364
/// immediately (others we will have to allow to time out).
2362-
pub fn force_shutdown(&mut self, should_broadcast: bool) -> ShutdownResult {
2365+
pub fn force_shutdown(&mut self, should_broadcast: bool, closure_reason: ClosureReason) -> ShutdownResult {
23632366
// Note that we MUST only generate a monitor update that indicates force-closure - we're
23642367
// called during initialization prior to the chain_monitor in the encompassing ChannelManager
23652368
// being fully configured in some cases. Thus, its likely any monitor events we generate will
@@ -2405,10 +2408,13 @@ impl<SP: Deref> ChannelContext<SP> where SP::Target: SignerProvider {
24052408
self.channel_state = ChannelState::ShutdownComplete;
24062409
self.update_time_counter += 1;
24072410
ShutdownResult {
2411+
closure_reason,
24082412
monitor_update,
24092413
dropped_outbound_htlcs,
24102414
unbroadcasted_batch_funding_txid,
24112415
channel_id: self.channel_id,
2416+
user_channel_id: self.user_id,
2417+
channel_capacity_satoshis: self.channel_value_satoshis,
24122418
counterparty_node_id: self.counterparty_node_id,
24132419
unbroadcasted_funding_tx,
24142420
}
@@ -4938,10 +4944,13 @@ impl<SP: Deref> Channel<SP> where
49384944
if let Some((last_fee, sig)) = self.context.last_sent_closing_fee {
49394945
if last_fee == msg.fee_satoshis {
49404946
let shutdown_result = ShutdownResult {
4947+
closure_reason: ClosureReason::CooperativeClosure,
49414948
monitor_update: None,
49424949
dropped_outbound_htlcs: Vec::new(),
49434950
unbroadcasted_batch_funding_txid: self.context.unbroadcasted_batch_funding_txid(),
49444951
channel_id: self.context.channel_id,
4952+
user_channel_id: self.context.user_id,
4953+
channel_capacity_satoshis: self.context.channel_value_satoshis,
49454954
counterparty_node_id: self.context.counterparty_node_id,
49464955
unbroadcasted_funding_tx: self.context.unbroadcasted_funding(),
49474956
};
@@ -4969,10 +4978,13 @@ impl<SP: Deref> Channel<SP> where
49694978
.map_err(|_| ChannelError::Close("External signer refused to sign closing transaction".to_owned()))?;
49704979
let (signed_tx, shutdown_result) = if $new_fee == msg.fee_satoshis {
49714980
let shutdown_result = ShutdownResult {
4981+
closure_reason: ClosureReason::CooperativeClosure,
49724982
monitor_update: None,
49734983
dropped_outbound_htlcs: Vec::new(),
49744984
unbroadcasted_batch_funding_txid: self.context.unbroadcasted_batch_funding_txid(),
49754985
channel_id: self.context.channel_id,
4986+
user_channel_id: self.context.user_id,
4987+
channel_capacity_satoshis: self.context.channel_value_satoshis,
49764988
counterparty_node_id: self.context.counterparty_node_id,
49774989
unbroadcasted_funding_tx: self.context.unbroadcasted_funding(),
49784990
};

lightning/src/ln/channelmanager.rs

Lines changed: 27 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1966,14 +1966,6 @@ macro_rules! handle_error {
19661966
msg: update
19671967
});
19681968
}
1969-
if let Some((channel_id, user_channel_id)) = chan_id {
1970-
$self.pending_events.lock().unwrap().push_back((events::Event::ChannelClosed {
1971-
channel_id, user_channel_id,
1972-
reason: ClosureReason::ProcessingError { err: err.err.clone() },
1973-
counterparty_node_id: Some($counterparty_node_id),
1974-
channel_capacity_sats: channel_capacity,
1975-
}, None));
1976-
}
19771969
}
19781970

19791971
let logger = WithContext::from(
@@ -2039,7 +2031,8 @@ macro_rules! convert_chan_phase_err {
20392031
let logger = WithChannelContext::from(&$self.logger, &$channel.context);
20402032
log_error!(logger, "Closing channel {} due to close-required error: {}", $channel_id, msg);
20412033
update_maps_on_chan_removal!($self, $channel.context);
2042-
let shutdown_res = $channel.context.force_shutdown(true);
2034+
let reason = ClosureReason::ProcessingError { err: msg.clone() };
2035+
let shutdown_res = $channel.context.force_shutdown(true, reason);
20432036
let user_id = $channel.context.get_user_id();
20442037
let channel_capacity_satoshis = $channel.context.get_value_satoshis();
20452038

@@ -2701,18 +2694,6 @@ where
27012694
.collect()
27022695
}
27032696

2704-
/// Helper function that issues the channel close events
2705-
fn issue_channel_close_events(&self, context: &ChannelContext<SP>, closure_reason: ClosureReason) {
2706-
let mut pending_events_lock = self.pending_events.lock().unwrap();
2707-
pending_events_lock.push_back((events::Event::ChannelClosed {
2708-
channel_id: context.channel_id(),
2709-
user_channel_id: context.get_user_id(),
2710-
reason: closure_reason,
2711-
counterparty_node_id: Some(context.get_counterparty_node_id()),
2712-
channel_capacity_sats: Some(context.get_value_satoshis()),
2713-
}, None));
2714-
}
2715-
27162697
fn close_channel_internal(&self, channel_id: &ChannelId, counterparty_node_id: &PublicKey, target_feerate_sats_per_1000_weight: Option<u32>, override_shutdown_script: Option<ShutdownScript>) -> Result<(), APIError> {
27172698
let _persistence_guard = PersistenceNotifierGuard::notify_on_drop(self);
27182699

@@ -2754,9 +2735,8 @@ where
27542735
peer_state_lock, peer_state, per_peer_state, chan);
27552736
}
27562737
} else {
2757-
self.issue_channel_close_events(chan_phase_entry.get().context(), ClosureReason::HolderForceClosed);
27582738
let mut chan_phase = remove_channel_phase!(self, chan_phase_entry);
2759-
shutdown_result = Some(chan_phase.context_mut().force_shutdown(false));
2739+
shutdown_result = Some(chan_phase.context_mut().force_shutdown(false, ClosureReason::HolderForceClosed));
27602740
}
27612741
},
27622742
hash_map::Entry::Vacant(_) => {
@@ -2853,6 +2833,7 @@ where
28532833
let logger = WithContext::from(
28542834
&self.logger, Some(shutdown_res.counterparty_node_id), Some(shutdown_res.channel_id),
28552835
);
2836+
28562837
log_debug!(logger, "Finishing closure of channel with {} HTLCs to fail", shutdown_res.dropped_outbound_htlcs.len());
28572838
for htlc_source in shutdown_res.dropped_outbound_htlcs.drain(..) {
28582839
let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
@@ -2878,8 +2859,7 @@ where
28782859
let mut peer_state = peer_state_mutex.lock().unwrap();
28792860
if let Some(mut chan) = peer_state.channel_by_id.remove(&channel_id) {
28802861
update_maps_on_chan_removal!(self, &chan.context());
2881-
self.issue_channel_close_events(&chan.context(), ClosureReason::FundingBatchClosure);
2882-
shutdown_results.push(chan.context_mut().force_shutdown(false));
2862+
shutdown_results.push(chan.context_mut().force_shutdown(false, ClosureReason::FundingBatchClosure));
28832863
}
28842864
}
28852865
has_uncompleted_channel = Some(has_uncompleted_channel.map_or(!state, |v| v || !state));
@@ -2892,6 +2872,14 @@ where
28922872

28932873
{
28942874
let mut pending_events = self.pending_events.lock().unwrap();
2875+
pending_events.push_back((events::Event::ChannelClosed {
2876+
channel_id: shutdown_res.channel_id,
2877+
user_channel_id: shutdown_res.user_channel_id,
2878+
reason: shutdown_res.closure_reason,
2879+
counterparty_node_id: Some(shutdown_res.counterparty_node_id),
2880+
channel_capacity_sats: Some(shutdown_res.channel_capacity_satoshis),
2881+
}, None));
2882+
28952883
if let Some(transaction) = shutdown_res.unbroadcasted_funding_tx {
28962884
pending_events.push_back((events::Event::DiscardFunding {
28972885
channel_id: shutdown_res.channel_id, transaction
@@ -2920,17 +2908,16 @@ where
29202908
let logger = WithContext::from(&self.logger, Some(*peer_node_id), Some(*channel_id));
29212909
if let hash_map::Entry::Occupied(chan_phase_entry) = peer_state.channel_by_id.entry(channel_id.clone()) {
29222910
log_error!(logger, "Force-closing channel {}", channel_id);
2923-
self.issue_channel_close_events(&chan_phase_entry.get().context(), closure_reason);
29242911
let mut chan_phase = remove_channel_phase!(self, chan_phase_entry);
29252912
mem::drop(peer_state);
29262913
mem::drop(per_peer_state);
29272914
match chan_phase {
29282915
ChannelPhase::Funded(mut chan) => {
2929-
self.finish_close_channel(chan.context.force_shutdown(broadcast));
2916+
self.finish_close_channel(chan.context.force_shutdown(broadcast, closure_reason));
29302917
(self.get_channel_update_for_broadcast(&chan).ok(), chan.context.get_counterparty_node_id())
29312918
},
29322919
ChannelPhase::UnfundedOutboundV1(_) | ChannelPhase::UnfundedInboundV1(_) => {
2933-
self.finish_close_channel(chan_phase.context_mut().force_shutdown(false));
2920+
self.finish_close_channel(chan_phase.context_mut().force_shutdown(false, closure_reason));
29342921
// Unfunded channel has no update
29352922
(None, chan_phase.context().get_counterparty_node_id())
29362923
},
@@ -3760,7 +3747,8 @@ where
37603747
.map_err(|(mut chan, e)| if let ChannelError::Close(msg) = e {
37613748
let channel_id = chan.context.channel_id();
37623749
let user_id = chan.context.get_user_id();
3763-
let shutdown_res = chan.context.force_shutdown(false);
3750+
let reason = ClosureReason::ProcessingError { err: msg.clone() };
3751+
let shutdown_res = chan.context.force_shutdown(false, reason);
37643752
let channel_capacity = chan.context.get_value_satoshis();
37653753
(chan, MsgHandleErrInternal::from_finish_shutdown(msg, channel_id, user_id, shutdown_res, None, channel_capacity))
37663754
} else { unreachable!(); });
@@ -3967,8 +3955,8 @@ where
39673955
.and_then(|mut peer_state| peer_state.channel_by_id.remove(&channel_id))
39683956
.map(|mut chan| {
39693957
update_maps_on_chan_removal!(self, &chan.context());
3970-
self.issue_channel_close_events(&chan.context(), ClosureReason::ProcessingError { err: e.clone() });
3971-
shutdown_results.push(chan.context_mut().force_shutdown(false));
3958+
let closure_reason = ClosureReason::ProcessingError { err: e.clone() };
3959+
shutdown_results.push(chan.context_mut().force_shutdown(false, closure_reason));
39723960
});
39733961
}
39743962
}
@@ -4890,8 +4878,7 @@ where
48904878
log_error!(logger,
48914879
"Force-closing pending channel with ID {} for not establishing in a timely manner", chan_id);
48924880
update_maps_on_chan_removal!(self, &context);
4893-
self.issue_channel_close_events(&context, ClosureReason::HolderForceClosed);
4894-
shutdown_channels.push(context.force_shutdown(false));
4881+
shutdown_channels.push(context.force_shutdown(false, ClosureReason::HolderForceClosed));
48954882
pending_msg_events.push(MessageSendEvent::HandleError {
48964883
node_id: counterparty_node_id,
48974884
action: msgs::ErrorAction::SendErrorMessage {
@@ -6511,9 +6498,8 @@ where
65116498
let context = phase.context_mut();
65126499
let logger = WithChannelContext::from(&self.logger, context);
65136500
log_error!(logger, "Immediately closing unfunded channel {} as peer asked to cooperatively shut it down (which is unnecessary)", &msg.channel_id);
6514-
self.issue_channel_close_events(&context, ClosureReason::CounterpartyCoopClosedUnfundedChannel);
65156501
let mut chan = remove_channel_phase!(self, chan_phase_entry);
6516-
finish_shutdown = Some(chan.context_mut().force_shutdown(false));
6502+
finish_shutdown = Some(chan.context_mut().force_shutdown(false, ClosureReason::CounterpartyCoopClosedUnfundedChannel));
65176503
},
65186504
}
65196505
} else {
@@ -6582,7 +6568,6 @@ where
65826568
msg: update
65836569
});
65846570
}
6585-
self.issue_channel_close_events(&chan.context, ClosureReason::CooperativeClosure);
65866571
}
65876572
mem::drop(per_peer_state);
65886573
if let Some(shutdown_result) = shutdown_result {
@@ -7234,13 +7219,12 @@ where
72347219
let pending_msg_events = &mut peer_state.pending_msg_events;
72357220
if let hash_map::Entry::Occupied(chan_phase_entry) = peer_state.channel_by_id.entry(funding_outpoint.to_channel_id()) {
72367221
if let ChannelPhase::Funded(mut chan) = remove_channel_phase!(self, chan_phase_entry) {
7237-
failed_channels.push(chan.context.force_shutdown(false));
7222+
failed_channels.push(chan.context.force_shutdown(false, ClosureReason::HolderForceClosed));
72387223
if let Ok(update) = self.get_channel_update_for_broadcast(&chan) {
72397224
pending_msg_events.push(events::MessageSendEvent::BroadcastChannelUpdate {
72407225
msg: update
72417226
});
72427227
}
7243-
self.issue_channel_close_events(&chan.context, ClosureReason::HolderForceClosed);
72447228
pending_msg_events.push(events::MessageSendEvent::HandleError {
72457229
node_id: chan.context.get_counterparty_node_id(),
72467230
action: msgs::ErrorAction::DisconnectPeer {
@@ -7427,8 +7411,6 @@ where
74277411
});
74287412
}
74297413

7430-
self.issue_channel_close_events(&chan.context, ClosureReason::CooperativeClosure);
7431-
74327414
log_info!(logger, "Broadcasting {}", log_tx!(tx));
74337415
self.tx_broadcaster.broadcast_transactions(&[&tx]);
74347416
update_maps_on_chan_removal!(self, &chan.context);
@@ -8441,14 +8423,13 @@ where
84418423
update_maps_on_chan_removal!(self, &channel.context);
84428424
// It looks like our counterparty went on-chain or funding transaction was
84438425
// reorged out of the main chain. Close the channel.
8444-
failed_channels.push(channel.context.force_shutdown(true));
8426+
let reason_message = format!("{}", reason);
8427+
failed_channels.push(channel.context.force_shutdown(true, reason));
84458428
if let Ok(update) = self.get_channel_update_for_broadcast(&channel) {
84468429
pending_msg_events.push(events::MessageSendEvent::BroadcastChannelUpdate {
84478430
msg: update
84488431
});
84498432
}
8450-
let reason_message = format!("{}", reason);
8451-
self.issue_channel_close_events(&channel.context, reason);
84528433
pending_msg_events.push(events::MessageSendEvent::HandleError {
84538434
node_id: channel.context.get_counterparty_node_id(),
84548435
action: msgs::ErrorAction::DisconnectPeer {
@@ -8846,8 +8827,7 @@ where
88468827
};
88478828
// Clean up for removal.
88488829
update_maps_on_chan_removal!(self, &context);
8849-
self.issue_channel_close_events(&context, ClosureReason::DisconnectedPeer);
8850-
failed_channels.push(context.force_shutdown(false));
8830+
failed_channels.push(context.force_shutdown(false, ClosureReason::DisconnectedPeer));
88518831
false
88528832
});
88538833
// Note that we don't bother generating any events for pre-accept channels -
@@ -10287,7 +10267,7 @@ where
1028710267
log_error!(logger, " The ChannelMonitor for channel {} is at counterparty commitment transaction number {} but the ChannelManager is at counterparty commitment transaction number {}.",
1028810268
&channel.context.channel_id(), monitor.get_cur_counterparty_commitment_number(), channel.get_cur_counterparty_commitment_transaction_number());
1028910269
}
10290-
let mut shutdown_result = channel.context.force_shutdown(true);
10270+
let mut shutdown_result = channel.context.force_shutdown(true, ClosureReason::OutdatedChannelManager);
1029110271
if shutdown_result.unbroadcasted_batch_funding_txid.is_some() {
1029210272
return Err(DecodeError::InvalidValue);
1029310273
}
@@ -10349,7 +10329,7 @@ where
1034910329
// If we were persisted and shut down while the initial ChannelMonitor persistence
1035010330
// was in-progress, we never broadcasted the funding transaction and can still
1035110331
// safely discard the channel.
10352-
let _ = channel.context.force_shutdown(false);
10332+
let _ = channel.context.force_shutdown(false, ClosureReason::DisconnectedPeer);
1035310333
channel_closures.push_back((events::Event::ChannelClosed {
1035410334
channel_id: channel.context.channel_id(),
1035510335
user_channel_id: channel.context.get_user_id(),

lightning/src/ln/functional_tests.rs

Lines changed: 15 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3328,22 +3328,18 @@ fn do_test_commitment_revoked_fail_backward_exhaustive(deliver_bs_raa: bool, use
33283328

33293329
let events = nodes[1].node.get_and_clear_pending_events();
33303330
assert_eq!(events.len(), if deliver_bs_raa { 3 + nodes.len() - 1 } else { 4 + nodes.len() });
3331-
match events[0] {
3332-
Event::ChannelClosed { reason: ClosureReason::CommitmentTxConfirmed, .. } => { },
3333-
_ => panic!("Unexepected event"),
3334-
}
3335-
match events[1] {
3336-
Event::PaymentPathFailed { ref payment_hash, .. } => {
3337-
assert_eq!(*payment_hash, fourth_payment_hash);
3338-
},
3339-
_ => panic!("Unexpected event"),
3340-
}
3341-
match events[2] {
3342-
Event::PaymentFailed { ref payment_hash, .. } => {
3343-
assert_eq!(*payment_hash, fourth_payment_hash);
3344-
},
3345-
_ => panic!("Unexpected event"),
3346-
}
3331+
assert!(events.iter().any(|ev| matches!(
3332+
ev,
3333+
Event::ChannelClosed { reason: ClosureReason::CommitmentTxConfirmed, .. }
3334+
)));
3335+
assert!(events.iter().any(|ev| matches!(
3336+
ev,
3337+
Event::PaymentPathFailed { ref payment_hash, .. } if *payment_hash == fourth_payment_hash
3338+
)));
3339+
assert!(events.iter().any(|ev| matches!(
3340+
ev,
3341+
Event::PaymentFailed { ref payment_hash, .. } if *payment_hash == fourth_payment_hash
3342+
)));
33473343

33483344
nodes[1].node.process_pending_htlc_forwards();
33493345
check_added_monitors!(nodes[1], 1);
@@ -9131,16 +9127,16 @@ fn test_duplicate_chan_id() {
91319127
chan.get_funding_created(tx.clone(), funding_outpoint, false, &&logger).map_err(|_| ()).unwrap()
91329128
},
91339129
_ => panic!("Unexpected ChannelPhase variant"),
9134-
}
9130+
}.unwrap()
91359131
};
91369132
check_added_monitors!(nodes[0], 0);
9137-
nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &funding_created.unwrap());
9133+
nodes[1].node.handle_funding_created(&nodes[0].node.get_our_node_id(), &funding_created);
91389134
// At this point we'll look up if the channel_id is present and immediately fail the channel
91399135
// without trying to persist the `ChannelMonitor`.
91409136
check_added_monitors!(nodes[1], 0);
91419137

91429138
check_closed_events(&nodes[1], &[
9143-
ExpectedCloseEvent::from_id_reason(channel_id, false, ClosureReason::ProcessingError {
9139+
ExpectedCloseEvent::from_id_reason(funding_created.temporary_channel_id, false, ClosureReason::ProcessingError {
91449140
err: "Already had channel with the new channel_id".to_owned()
91459141
})
91469142
]);

0 commit comments

Comments
 (0)