Skip to content

Commit 3c25995

Browse files
committed
Drop the remaining direct uses of locked_close_channel
While most of our closure logic mostly lives in `locked_close_chan`, some important steps happen in `convert_channel_err` and `handle_error` (mostly broadcasting a channel closure `ChannelUpdate` and sending a relevant error message to our peer). In a previous commit we removed most instances of `locked_close_channel` in our closure pipeline, however it was still called in cases where we did a cooperative close. Here we remove the remaining direct references to it, always using `convert_channel_err` instead (now with a new `COOP_CLOSED` macro variant).
1 parent 9d7a7e0 commit 3c25995

File tree

1 file changed

+50
-58
lines changed

1 file changed

+50
-58
lines changed

lightning/src/ln/channelmanager.rs

Lines changed: 50 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -3245,6 +3245,23 @@ macro_rules! convert_channel_err {
32453245
},
32463246
}
32473247
};
3248+
($self: ident, $peer_state: expr, $shutdown_result: expr, $funded_channel: expr, $channel_id: expr, COOP_CLOSED) => { {
3249+
let reason = ChannelError::Close(("Coop Closed".to_owned(), $shutdown_result.closure_reason.clone()));
3250+
let do_close = |_| {
3251+
(
3252+
$shutdown_result,
3253+
$self.get_channel_update_for_broadcast(&$funded_channel).ok(),
3254+
)
3255+
};
3256+
let mut locked_close = |shutdown_res_mut: &mut ShutdownResult, funded_channel: &mut FundedChannel<_>| {
3257+
locked_close_channel!($self, $peer_state, funded_channel, shutdown_res_mut, FUNDED);
3258+
};
3259+
let (close, mut err) =
3260+
convert_channel_err!($self, $peer_state, reason, $funded_channel, do_close, locked_close, $channel_id, _internal);
3261+
err.dont_send_error_message();
3262+
debug_assert!(close);
3263+
(close, err)
3264+
} };
32483265
($self: ident, $peer_state: expr, $err: expr, $funded_channel: expr, $channel_id: expr, FUNDED_CHANNEL) => { {
32493266
let mut do_close = |reason| {
32503267
(
@@ -9726,13 +9743,14 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
97269743
msg.channel_id,
97279744
)
97289745
})?;
9729-
let (tx, chan_option, shutdown_result) = {
9746+
let logger;
9747+
let tx_err: Option<(_, Result<Infallible, _>)> = {
97309748
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
97319749
let peer_state = &mut *peer_state_lock;
97329750
match peer_state.channel_by_id.entry(msg.channel_id.clone()) {
97339751
hash_map::Entry::Occupied(mut chan_entry) => {
97349752
if let Some(chan) = chan_entry.get_mut().as_funded_mut() {
9735-
let logger = WithChannelContext::from(&self.logger, &chan.context, None);
9753+
logger = WithChannelContext::from(&self.logger, &chan.context, None);
97369754
let res = chan.closing_signed(&self.fee_estimator, &msg, &&logger);
97379755
let (closing_signed, tx_shutdown_result) =
97389756
try_channel_entry!(self, peer_state, res, chan_entry);
@@ -9743,16 +9761,17 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
97439761
msg,
97449762
});
97459763
}
9746-
if let Some((tx, mut close_res)) = tx_shutdown_result {
9764+
if let Some((tx, close_res)) = tx_shutdown_result {
97479765
// We're done with this channel, we've got a signed closing transaction and
97489766
// will send the closing_signed back to the remote peer upon return. This
97499767
// also implies there are no pending HTLCs left on the channel, so we can
97509768
// fully delete it from tracking (the channel monitor is still around to
97519769
// watch for old state broadcasts)!
9752-
locked_close_channel!(self, peer_state, chan, close_res, FUNDED);
9753-
(Some(tx), Some(chan_entry.remove()), Some(close_res))
9770+
let (_, err) = convert_channel_err!(self, peer_state, close_res, chan, &msg.channel_id, COOP_CLOSED);
9771+
chan_entry.remove();
9772+
Some((tx, Err(err)))
97549773
} else {
9755-
(None, None, None)
9774+
None
97569775
}
97579776
} else {
97589777
return try_channel_entry!(self, peer_state, Err(ChannelError::close(
@@ -9762,26 +9781,11 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
97629781
hash_map::Entry::Vacant(_) => return Err(MsgHandleErrInternal::send_err_msg_no_close(format!("Got a message for a channel from the wrong node! No such channel for the passed counterparty_node_id {}", counterparty_node_id), msg.channel_id))
97639782
}
97649783
};
9765-
if let Some(broadcast_tx) = tx {
9766-
let channel_id = chan_option.as_ref().map(|channel| channel.context().channel_id());
9767-
log_info!(
9768-
WithContext::from(&self.logger, Some(*counterparty_node_id), channel_id, None),
9769-
"Broadcasting {}",
9770-
log_tx!(broadcast_tx)
9771-
);
9772-
self.tx_broadcaster.broadcast_transactions(&[&broadcast_tx]);
9773-
}
9774-
if let Some(chan) = chan_option.as_ref().and_then(Channel::as_funded) {
9775-
if let Ok(update) = self.get_channel_update_for_broadcast(chan) {
9776-
let mut pending_broadcast_messages =
9777-
self.pending_broadcast_messages.lock().unwrap();
9778-
pending_broadcast_messages
9779-
.push(MessageSendEvent::BroadcastChannelUpdate { msg: update });
9780-
}
9781-
}
97829784
mem::drop(per_peer_state);
9783-
if let Some(shutdown_result) = shutdown_result {
9784-
self.finish_close_channel(shutdown_result);
9785+
if let Some((broadcast_tx, err)) = tx_err {
9786+
log_info!(logger, "Broadcasting {}", log_tx!(broadcast_tx));
9787+
self.tx_broadcaster.broadcast_transactions(&[&broadcast_tx]);
9788+
let _ = handle_error!(self, err, *counterparty_node_id);
97859789
}
97869790
Ok(())
97879791
}
@@ -11021,12 +11025,6 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1102111025
if let Some(broadcast_tx) = msgs.signed_closing_tx {
1102211026
log_info!(logger, "Broadcasting closing tx {}", log_tx!(broadcast_tx));
1102311027
self.tx_broadcaster.broadcast_transactions(&[&broadcast_tx]);
11024-
11025-
if let Ok(update) = self.get_channel_update_for_broadcast(&funded_chan) {
11026-
pending_msg_events.push(MessageSendEvent::BroadcastChannelUpdate {
11027-
msg: update
11028-
});
11029-
}
1103011028
}
1103111029
} else {
1103211030
// We don't know how to handle a channel_ready or signed_closing_tx for a
@@ -11040,40 +11038,45 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1104011038
}
1104111039
};
1104211040

11043-
let mut shutdown_results = Vec::new();
11041+
let mut shutdown_results: Vec<(Result<Infallible, _>, _)> = Vec::new();
1104411042
let per_peer_state = self.per_peer_state.read().unwrap();
1104511043
let per_peer_state_iter = per_peer_state.iter().filter(|(cp_id, _)| {
1104611044
if let Some((counterparty_node_id, _)) = channel_opt {
1104711045
**cp_id == counterparty_node_id
1104811046
} else { true }
1104911047
});
11050-
for (_cp_id, peer_state_mutex) in per_peer_state_iter {
11048+
for (cp_id, peer_state_mutex) in per_peer_state_iter {
1105111049
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
1105211050
let peer_state = &mut *peer_state_lock;
1105311051
peer_state.channel_by_id.retain(|_, chan| {
1105411052
let shutdown_result = match channel_opt {
1105511053
Some((_, channel_id)) if chan.context().channel_id() != channel_id => None,
1105611054
_ => unblock_chan(chan, &mut peer_state.pending_msg_events),
1105711055
};
11058-
if let Some(mut shutdown_result) = shutdown_result {
11056+
if let Some(shutdown_res) = shutdown_result {
1105911057
let context = chan.context();
1106011058
let logger = WithChannelContext::from(&self.logger, context, None);
11061-
log_trace!(logger, "Removing channel {} now that the signer is unblocked", context.channel_id());
11062-
if let Some(funded_channel) = chan.as_funded_mut() {
11063-
locked_close_channel!(self, peer_state, funded_channel, shutdown_result, FUNDED);
11059+
let chan_id = context.channel_id();
11060+
log_trace!(logger, "Removing channel {} now that the signer is unblocked", chan_id);
11061+
let (remove, err) = if let Some(funded_channel) = chan.as_funded_mut() {
11062+
convert_channel_err!(self, peer_state, shutdown_res, funded_channel, &chan_id, COOP_CLOSED)
1106411063
} else {
11065-
locked_close_channel!(self, chan.context(), UNFUNDED);
11066-
}
11067-
shutdown_results.push(shutdown_result);
11064+
debug_assert!(false);
11065+
let reason = shutdown_res.closure_reason.clone();
11066+
let err = ChannelError::Close((reason.to_string(), reason));
11067+
convert_channel_err!(self, peer_state, err, chan, &chan_id, UNFUNDED_CHANNEL)
11068+
};
11069+
debug_assert!(remove);
11070+
shutdown_results.push((Err(err), *cp_id));
1106811071
false
1106911072
} else {
1107011073
true
1107111074
}
1107211075
});
1107311076
}
1107411077
drop(per_peer_state);
11075-
for shutdown_result in shutdown_results.drain(..) {
11076-
self.finish_close_channel(shutdown_result);
11078+
for (err, counterparty_node_id) in shutdown_results {
11079+
let _ = handle_error!(self, err, counterparty_node_id);
1107711080
}
1107811081
}
1107911082

@@ -11084,11 +11087,10 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1108411087
fn maybe_generate_initial_closing_signed(&self) -> bool {
1108511088
let mut handle_errors: Vec<(PublicKey, Result<(), _>)> = Vec::new();
1108611089
let mut has_update = false;
11087-
let mut shutdown_results = Vec::new();
1108811090
{
1108911091
let per_peer_state = self.per_peer_state.read().unwrap();
1109011092

11091-
for (_cp_id, peer_state_mutex) in per_peer_state.iter() {
11093+
for (cp_id, peer_state_mutex) in per_peer_state.iter() {
1109211094
let mut peer_state_lock = peer_state_mutex.lock().unwrap();
1109311095
let peer_state = &mut *peer_state_lock;
1109411096
let pending_msg_events = &mut peer_state.pending_msg_events;
@@ -11105,17 +11107,11 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1110511107
});
1110611108
}
1110711109
debug_assert_eq!(tx_shutdown_result_opt.is_some(), funded_chan.is_shutdown());
11108-
if let Some((tx, mut shutdown_res)) = tx_shutdown_result_opt {
11109-
locked_close_channel!(self, peer_state, funded_chan, shutdown_res, FUNDED);
11110-
shutdown_results.push(shutdown_res);
11110+
if let Some((tx, shutdown_res)) = tx_shutdown_result_opt {
1111111111
// We're done with this channel. We got a closing_signed and sent back
1111211112
// a closing_signed with a closing transaction to broadcast.
11113-
if let Ok(update) = self.get_channel_update_for_broadcast(&funded_chan) {
11114-
let mut pending_broadcast_messages = self.pending_broadcast_messages.lock().unwrap();
11115-
pending_broadcast_messages.push(MessageSendEvent::BroadcastChannelUpdate {
11116-
msg: update
11117-
});
11118-
}
11113+
let (_, err) = convert_channel_err!(self, peer_state, shutdown_res, funded_chan, channel_id, COOP_CLOSED);
11114+
handle_errors.push((*cp_id, Err(err)));
1111911115

1112011116
log_info!(logger, "Broadcasting {}", log_tx!(tx));
1112111117
self.tx_broadcaster.broadcast_transactions(&[&tx]);
@@ -11136,14 +11132,10 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1113611132
}
1113711133
}
1113811134

11139-
for (counterparty_node_id, err) in handle_errors.drain(..) {
11135+
for (counterparty_node_id, err) in handle_errors {
1114011136
let _ = handle_error!(self, err, counterparty_node_id);
1114111137
}
1114211138

11143-
for shutdown_result in shutdown_results.drain(..) {
11144-
self.finish_close_channel(shutdown_result);
11145-
}
11146-
1114711139
has_update
1114811140
}
1114911141

0 commit comments

Comments
 (0)