Skip to content

Commit 9f81f1f

Browse files
committed
ln+events+liquidity/refactor: NextHopChannel renamed ForwardFailed
Standardize naming within the HTLCHandlingType struct to present more consistent API terminology.
1 parent 02c13b2 commit 9f81f1f

14 files changed

+73
-73
lines changed

lightning-liquidity/src/lsps2/service.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -883,7 +883,7 @@ where
883883
///
884884
/// [`Event::HTLCHandlingFailed`]: lightning::events::Event::HTLCHandlingFailed
885885
pub fn htlc_handling_failed(&self, handling_type: HTLCHandlingType) -> Result<(), APIError> {
886-
if let HTLCHandlingType::NextHopChannel { channel_id, .. } = handling_type {
886+
if let HTLCHandlingType::ForwardFailed { channel_id, .. } = handling_type {
887887
let peer_by_channel_id = self.peer_by_channel_id.read().unwrap();
888888
if let Some(counterparty_node_id) = peer_by_channel_id.get(&channel_id) {
889889
let outer_state_lock = self.per_peer_state.read().unwrap();

lightning/src/events/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -471,7 +471,7 @@ impl_writeable_tlv_based_enum_upgradable!(ClosureReason,
471471
pub enum HTLCHandlingType {
472472
/// We tried forwarding to a channel but failed to do so. An example of such an instance is when
473473
/// there is insufficient capacity in our outbound channel.
474-
NextHopChannel {
474+
ForwardFailed {
475475
/// The `node_id` of the next node. For backwards compatibility, this field is
476476
/// marked as optional, versions prior to 0.0.110 may not always be able to provide
477477
/// counterparty node information.
@@ -509,7 +509,7 @@ pub enum HTLCHandlingType {
509509
}
510510

511511
impl_writeable_tlv_based_enum_upgradable!(HTLCHandlingType,
512-
(0, NextHopChannel) => {
512+
(0, ForwardFailed) => {
513513
(0, node_id, required),
514514
(2, channel_id, required),
515515
},

lightning/src/ln/blinded_payment_tests.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -428,7 +428,7 @@ fn do_forward_checks_failure(check: ForwardCheckFail, intro_fails: bool) {
428428
ForwardCheckFail::InboundOnionCheck => HTLCHandlingType::InvalidOnion,
429429
ForwardCheckFail::ForwardPayloadEncodedAsReceive => HTLCHandlingType::InvalidOnion,
430430
ForwardCheckFail::OutboundChannelCheck =>
431-
HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_1_2.2 },
431+
HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_1_2.2 },
432432
};
433433
expect_htlc_handling_failed_destinations!(
434434
nodes[1].node.get_and_clear_pending_events(), &[failed_destination.clone()]
@@ -459,7 +459,7 @@ fn do_forward_checks_failure(check: ForwardCheckFail, intro_fails: bool) {
459459
let failed_destination = match check {
460460
ForwardCheckFail::InboundOnionCheck|ForwardCheckFail::ForwardPayloadEncodedAsReceive => HTLCHandlingType::InvalidOnion,
461461
ForwardCheckFail::OutboundChannelCheck =>
462-
HTLCHandlingType::NextHopChannel { node_id: Some(nodes[3].node.get_our_node_id()), channel_id: chan_2_3.2 },
462+
HTLCHandlingType::ForwardFailed { node_id: Some(nodes[3].node.get_our_node_id()), channel_id: chan_2_3.2 },
463463
};
464464
expect_htlc_handling_failed_destinations!(
465465
nodes[2].node.get_and_clear_pending_events(), &[failed_destination.clone()]
@@ -606,7 +606,7 @@ fn do_forward_fail_in_process_pending_htlc_fwds(check: ProcessPendingHTLCsCheck,
606606
$curr_node.node.peer_disconnected($next_node.node.get_our_node_id());
607607
expect_pending_htlcs_forwardable!($curr_node);
608608
expect_htlc_handling_failed_destinations!($curr_node.node.get_and_clear_pending_events(),
609-
vec![HTLCHandlingType::NextHopChannel { node_id: Some($next_node.node.get_our_node_id()), channel_id: $failed_chan_id }]);
609+
vec![HTLCHandlingType::ForwardFailed { node_id: Some($next_node.node.get_our_node_id()), channel_id: $failed_chan_id }]);
610610
},
611611
ProcessPendingHTLCsCheck::FwdChannelClosed => {
612612
// Force close the next-hop channel so when we go to forward in process_pending_htlc_forwards,
@@ -1243,7 +1243,7 @@ fn min_htlc() {
12431243
expect_pending_htlcs_forwardable!(nodes[1]);
12441244
expect_htlc_handling_failed_destinations!(
12451245
nodes[1].node.get_and_clear_pending_events(),
1246-
&[HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_1_2.2 }]
1246+
&[HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_1_2.2 }]
12471247
);
12481248
check_added_monitors(&nodes[1], 1);
12491249
let mut updates = get_htlc_update_msgs!(nodes[1], nodes[0].node.get_our_node_id());

lightning/src/ln/chanmon_update_fail_tests.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -906,7 +906,7 @@ fn do_test_monitor_update_fail_raa(test_ignore_second_cs: bool) {
906906
let (latest_update, _) = nodes[1].chain_monitor.latest_monitor_update_id.lock().unwrap().get(&chan_2.2).unwrap().clone();
907907
nodes[1].chain_monitor.chain_monitor.force_channel_monitor_updated(chan_2.2, latest_update);
908908
check_added_monitors!(nodes[1], 0);
909-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_2.2 }]);
909+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_2.2 }]);
910910
check_added_monitors!(nodes[1], 1);
911911

912912
let mut events_3 = nodes[1].node.get_and_clear_pending_msg_events();
@@ -1753,7 +1753,7 @@ fn test_monitor_update_on_pending_forwards() {
17531753
commitment_signed_dance!(nodes[1], nodes[2], payment_event.commitment_msg, false);
17541754

17551755
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::InProgress);
1756-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_2.2 }]);
1756+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_2.2 }]);
17571757
check_added_monitors!(nodes[1], 1);
17581758

17591759
chanmon_cfgs[1].persister.set_update_ret(ChannelMonitorUpdateStatus::Completed);
@@ -2160,7 +2160,7 @@ fn test_fail_htlc_on_broadcast_after_claim() {
21602160
check_closed_broadcast!(nodes[1], true);
21612161
connect_blocks(&nodes[1], ANTI_REORG_DELAY - 1);
21622162
check_added_monitors!(nodes[1], 1);
2163-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_id_2 }]);
2163+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_id_2 }]);
21642164

21652165
nodes[0].node.handle_update_fulfill_htlc(nodes[1].node.get_our_node_id(), &bs_updates.update_fulfill_htlcs[0]);
21662166
expect_payment_sent(&nodes[0], payment_preimage, None, false, false);
@@ -2550,7 +2550,7 @@ fn do_test_reconnect_dup_htlc_claims(htlc_status: HTLCStatusAtDupClaim, second_f
25502550
let mut reconnect_args = ReconnectArgs::new(&nodes[1], &nodes[2]);
25512551
reconnect_args.pending_htlc_fails.0 = 1;
25522552
reconnect_nodes(reconnect_args);
2553-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::NextHopChannel { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_id_2 }]);
2553+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(nodes[1], vec![HTLCHandlingType::ForwardFailed { node_id: Some(nodes[2].node.get_our_node_id()), channel_id: chan_id_2 }]);
25542554
} else {
25552555
let mut reconnect_args = ReconnectArgs::new(&nodes[1], &nodes[2]);
25562556
reconnect_args.pending_htlc_claims.0 = 1;

lightning/src/ln/channelmanager.rs

+10-10
Original file line numberDiff line numberDiff line change
@@ -3298,7 +3298,7 @@ macro_rules! handle_monitor_update_completion {
32983298
}
32993299
$self.finalize_claims(updates.finalized_claimed_htlcs);
33003300
for failure in updates.failed_htlcs.drain(..) {
3301-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
3301+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id), channel_id };
33023302
$self.fail_htlc_backwards_internal(&failure.0, &failure.1, &failure.2, receiver);
33033303
}
33043304
} }
@@ -3924,7 +3924,7 @@ where
39243924
for htlc_source in failed_htlcs.drain(..) {
39253925
let failure_reason = LocalHTLCFailureReason::ChannelClosed;
39263926
let reason = HTLCFailReason::from_failure_code(failure_reason);
3927-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(*counterparty_node_id), channel_id: *channel_id };
3927+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(*counterparty_node_id), channel_id: *channel_id };
39283928
self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
39293929
}
39303930

@@ -4048,7 +4048,7 @@ where
40484048
let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
40494049
let failure_reason = LocalHTLCFailureReason::ChannelClosed;
40504050
let reason = HTLCFailReason::from_failure_code(failure_reason);
4051-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
4051+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id), channel_id };
40524052
self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
40534053
}
40544054
if let Some((_, funding_txo, _channel_id, monitor_update)) = shutdown_res.monitor_update {
@@ -5748,7 +5748,7 @@ where
57485748
if let Some(outgoing_scid) = outgoing_scid_opt {
57495749
match self.short_to_chan_info.read().unwrap().get(&outgoing_scid) {
57505750
Some((outgoing_counterparty_node_id, outgoing_channel_id)) =>
5751-
HTLCHandlingType::NextHopChannel {
5751+
HTLCHandlingType::ForwardFailed {
57525752
node_id: Some(*outgoing_counterparty_node_id),
57535753
channel_id: *outgoing_channel_id,
57545754
},
@@ -6116,7 +6116,7 @@ where
61166116
let data = self.get_htlc_inbound_temp_fail_data(reason);
61176117
failed_forwards.push((htlc_source, payment_hash,
61186118
HTLCFailReason::reason(reason, data),
6119-
HTLCHandlingType::NextHopChannel { node_id: Some(chan.context.get_counterparty_node_id()), channel_id: forward_chan_id }
6119+
HTLCHandlingType::ForwardFailed { node_id: Some(chan.context.get_counterparty_node_id()), channel_id: forward_chan_id }
61206120
));
61216121
} else {
61226122
forwarding_channel_not_found!(core::iter::once(forward_info).chain(draining_pending_forwards));
@@ -6969,7 +6969,7 @@ where
69696969

69706970
for (htlc_src, payment_hash) in htlcs_to_fail.drain(..) {
69716971
let reason = HTLCFailReason::reason(failure_reason, onion_failure_data.clone());
6972-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id };
6972+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id.clone()), channel_id };
69736973
self.fail_htlc_backwards_internal(&htlc_src, &payment_hash, &reason, receiver);
69746974
}
69756975
}
@@ -8755,7 +8755,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
87558755
}
87568756
}
87578757
for htlc_source in dropped_htlcs.drain(..) {
8758-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id: msg.channel_id };
8758+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id.clone()), channel_id: msg.channel_id };
87598759
let reason = HTLCFailReason::from_failure_code(LocalHTLCFailureReason::ChannelClosed);
87608760
self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
87618761
}
@@ -9613,7 +9613,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96139613
} else {
96149614
log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
96159615
let failure_reason = LocalHTLCFailureReason::OnChainTimeout;
9616-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
9616+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id), channel_id };
96179617
let reason = HTLCFailReason::from_failure_code(failure_reason);
96189618
self.fail_htlc_backwards_internal(&htlc_update.source, &htlc_update.payment_hash, &reason, receiver);
96199619
}
@@ -11692,7 +11692,7 @@ where
1169211692
let reason = LocalHTLCFailureReason::CLTVExpiryTooSoon;
1169311693
let data = self.get_htlc_inbound_temp_fail_data(reason);
1169411694
timed_out_htlcs.push((source, payment_hash, HTLCFailReason::reason(reason, data),
11695-
HTLCHandlingType::NextHopChannel { node_id: Some(funded_channel.context.get_counterparty_node_id()), channel_id: funded_channel.context.channel_id() }));
11695+
HTLCHandlingType::ForwardFailed { node_id: Some(funded_channel.context.get_counterparty_node_id()), channel_id: funded_channel.context.channel_id() }));
1169611696
}
1169711697
let logger = WithChannelContext::from(&self.logger, &funded_channel.context, None);
1169811698
if let Some(channel_ready) = channel_ready_opt {
@@ -14932,7 +14932,7 @@ where
1493214932
for htlc_source in failed_htlcs.drain(..) {
1493314933
let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
1493414934
let failure_reason = LocalHTLCFailureReason::ChannelClosed;
14935-
let receiver = HTLCHandlingType::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
14935+
let receiver = HTLCHandlingType::ForwardFailed { node_id: Some(counterparty_node_id), channel_id };
1493614936
let reason = HTLCFailReason::from_failure_code(failure_reason);
1493714937
channel_manager.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
1493814938
}

lightning/src/ln/functional_test_utils.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -2153,7 +2153,7 @@ pub fn do_commitment_signed_dance(node_a: &Node<'_, '_, '_>, node_b: &Node<'_, '
21532153

21542154
if fail_backwards {
21552155
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(node_a,
2156-
vec![crate::events::HTLCHandlingType::NextHopChannel{ node_id: Some(node_b.node.get_our_node_id()), channel_id }]);
2156+
vec![crate::events::HTLCHandlingType::ForwardFailed{ node_id: Some(node_b.node.get_our_node_id()), channel_id }]);
21572157
check_added_monitors!(node_a, 1);
21582158

21592159
let node_a_per_peer_state = node_a.node.per_peer_state.read().unwrap();
@@ -3225,7 +3225,7 @@ pub fn pass_failed_payment_back<'a, 'b, 'c>(origin_node: &Node<'a, 'b, 'c>, expe
32253225
node.node.handle_update_fail_htlc(prev_node.node.get_our_node_id(), &next_msgs.as_ref().unwrap().0);
32263226
commitment_signed_dance!(node, prev_node, next_msgs.as_ref().unwrap().1, update_next_node);
32273227
if !update_next_node {
3228-
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(node, vec![HTLCHandlingType::NextHopChannel { node_id: Some(prev_node.node.get_our_node_id()), channel_id: next_msgs.as_ref().unwrap().0.channel_id }]);
3228+
expect_pending_htlcs_forwardable_and_htlc_handling_failed!(node, vec![HTLCHandlingType::ForwardFailed { node_id: Some(prev_node.node.get_our_node_id()), channel_id: next_msgs.as_ref().unwrap().0.channel_id }]);
32293229
}
32303230
}
32313231
let events = node.node.get_and_clear_pending_msg_events();

0 commit comments

Comments
 (0)