Skip to content

Commit 2755d0b

Browse files
committed
Add missing pending channel maps checks in ChannelManager
One of a series of follow-up commits to address some issues found in PR 2077, where we split channels up into different maps and structs depending on phase in their life.
1 parent d327c23 commit 2755d0b

File tree

1 file changed

+45
-32
lines changed

1 file changed

+45
-32
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 45 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -691,6 +691,18 @@ impl <Signer: ChannelSigner> PeerState<Signer> {
691691
self.outbound_v1_channel_by_id.contains_key(channel_id) ||
692692
self.inbound_v1_channel_by_id.contains_key(channel_id)
693693
}
694+
695+
/// Returns a bool indicating if the given `channel_id` matches a channel we have with this peer
696+
/// that is in one of our pending (unfunded) channel maps.
697+
///
698+
/// NOTE: Although V1 established channels will always have a `temporary_channel_id` if they're
699+
/// in `(outbound/inbound)_v1_channel_by_id`, we use the more general `channel_id` as V2
700+
/// established channels will have a fixed `channel_id` already after the `accept_channel2`
701+
/// message is sent/received.
702+
fn has_pending_channel(&self, channel_id: &[u8; 32]) -> bool {
703+
self.outbound_v1_channel_by_id.contains_key(channel_id) ||
704+
self.inbound_v1_channel_by_id.contains_key(channel_id)
705+
}
694706
}
695707

696708
/// Stores a PaymentSecret and any other data we may need to validate an inbound payment is
@@ -2190,6 +2202,7 @@ where
21902202
for (_cp_id, peer_state_mutex) in per_peer_state.iter() {
21912203
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
21922204
let peer_state = &mut *peer_state_lock;
2205+
// Only `Channels` in the channel_by_id map can be considered funded.
21932206
for (_channel_id, channel) in peer_state.channel_by_id.iter().filter(f) {
21942207
let details = ChannelDetails::from_channel_context(&channel.context, best_block_height,
21952208
peer_state.latest_features.clone());
@@ -2216,20 +2229,18 @@ where
22162229
for (_cp_id, peer_state_mutex) in per_peer_state.iter() {
22172230
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
22182231
let peer_state = &mut *peer_state_lock;
2219-
for (_channel_id, channel) in peer_state.channel_by_id.iter() {
2220-
let details = ChannelDetails::from_channel_context(&channel.context, best_block_height,
2221-
peer_state.latest_features.clone());
2222-
res.push(details);
2232+
let chan_context_to_details = |context| {
2233+
ChannelDetails::from_channel_context(context, best_block_height,
2234+
peer_state.latest_features.clone())
2235+
};
2236+
for (_, channel) in peer_state.channel_by_id.iter() {
2237+
res.push(chan_context_to_details(&channel.context));
22232238
}
2224-
for (_channel_id, channel) in peer_state.inbound_v1_channel_by_id.iter() {
2225-
let details = ChannelDetails::from_channel_context(&channel.context, best_block_height,
2226-
peer_state.latest_features.clone());
2227-
res.push(details);
2239+
for (_, channel) in peer_state.inbound_v1_channel_by_id.iter() {
2240+
res.push(chan_context_to_details(&channel.context));
22282241
}
2229-
for (_channel_id, channel) in peer_state.outbound_v1_channel_by_id.iter() {
2230-
let details = ChannelDetails::from_channel_context(&channel.context, best_block_height,
2231-
peer_state.latest_features.clone());
2232-
res.push(details);
2242+
for (_, channel) in peer_state.outbound_v1_channel_by_id.iter() {
2243+
res.push(chan_context_to_details(&channel.context));
22332244
}
22342245
}
22352246
}
@@ -2258,10 +2269,14 @@ where
22582269
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
22592270
let peer_state = &mut *peer_state_lock;
22602271
let features = &peer_state.latest_features;
2272+
let chan_context_to_details = |context| {
2273+
ChannelDetails::from_channel_context(context, best_block_height, features.clone())
2274+
};
22612275
return peer_state.channel_by_id
22622276
.iter()
2263-
.map(|(_, channel)|
2264-
ChannelDetails::from_channel_context(&channel.context, best_block_height, features.clone()))
2277+
.map(|(_, channel)| chan_context_to_details(&channel.context))
2278+
.chain(peer_state.outbound_v1_channel_by_id.iter().map(|(_, channel)| chan_context_to_details(&channel.context)))
2279+
.chain(peer_state.inbound_v1_channel_by_id.iter().map(|(_, channel)| chan_context_to_details(&channel.context)))
22652280
.collect();
22662281
}
22672282
vec![]
@@ -7097,23 +7112,22 @@ where
70977112
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
70987113
let peer_state = &mut *peer_state_lock;
70997114
let pending_msg_events = &mut peer_state.pending_msg_events;
7100-
peer_state.channel_by_id.retain(|_, chan| {
7101-
let retain = if chan.context.get_counterparty_node_id() == *counterparty_node_id {
7102-
if !chan.context.have_received_message() {
7103-
// If we created this (outbound) channel while we were disconnected from the
7104-
// peer we probably failed to send the open_channel message, which is now
7105-
// lost. We can't have had anything pending related to this channel, so we just
7106-
// drop it.
7107-
false
7108-
} else {
7109-
pending_msg_events.push(events::MessageSendEvent::SendChannelReestablish {
7110-
node_id: chan.context.get_counterparty_node_id(),
7111-
msg: chan.get_channel_reestablish(&self.logger),
7112-
});
7113-
true
7114-
}
7115-
} else { true };
7116-
if retain && chan.context.get_counterparty_node_id() != *counterparty_node_id {
7115+
peer_state.outbound_v1_channel_by_id.retain(|_, chan| {
7116+
if chan.context.get_counterparty_node_id() == *counterparty_node_id {
7117+
// If we created this (outbound) channel while we were disconnected from the
7118+
// peer we probably failed to send the open_channel message, which is now
7119+
// lost. We can't have had anything pending related to this channel, so we just
7120+
// drop it.
7121+
chan.context.have_received_message()
7122+
} else { true }
7123+
});
7124+
peer_state.channel_by_id.iter_mut().for_each(|(_, chan)| {
7125+
if chan.context.get_counterparty_node_id() == *counterparty_node_id {
7126+
pending_msg_events.push(events::MessageSendEvent::SendChannelReestablish {
7127+
node_id: chan.context.get_counterparty_node_id(),
7128+
msg: chan.get_channel_reestablish(&self.logger),
7129+
});
7130+
} else {
71177131
if let Some(msg) = chan.get_signed_channel_announcement(&self.node_signer, self.genesis_hash.clone(), self.best_block.read().unwrap().height(), &self.default_configuration) {
71187132
if let Ok(update_msg) = self.get_channel_update_for_broadcast(chan) {
71197133
pending_msg_events.push(events::MessageSendEvent::SendChannelAnnouncement {
@@ -7123,7 +7137,6 @@ where
71237137
}
71247138
}
71257139
}
7126-
retain
71277140
});
71287141
}
71297142
//TODO: Also re-broadcast announcement_signatures

0 commit comments

Comments
 (0)