Skip to content

Commit 8315a95

Browse files
committed
ln: expand LocalHTLCFailureReason to un-erase BOLT04 error codes
Sometimes the error codes that we return to the sender intentionally obscure information about the payment to prevent probing/ leaking information. This commit updates our internal representation to surface some of these failures, which will be converted to their corresponding bolt 04 codes when they're sent over the wire.
1 parent 96efbc3 commit 8315a95

File tree

4 files changed

+141
-47
lines changed

4 files changed

+141
-47
lines changed

lightning/src/ln/channel.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -7887,7 +7887,7 @@ impl<SP: Deref> FundedChannel<SP> where
78877887
L::Target: Logger
78887888
{
78897889
if self.context.channel_state.is_local_shutdown_sent() {
7890-
return Err(("Shutdown was already sent", LocalHTLCFailureReason::PermanentChannelFailure))
7890+
return Err(("Shutdown was already sent", LocalHTLCFailureReason::ChannelClosed))
78917891
}
78927892

78937893
let dust_exposure_limiting_feerate = self.context.get_dust_exposure_limiting_feerate(&fee_estimator);
@@ -7899,7 +7899,7 @@ impl<SP: Deref> FundedChannel<SP> where
78997899
log_info!(logger, "Cannot accept value that would put our total dust exposure at {} over the limit {} on counterparty commitment tx",
79007900
on_counterparty_tx_dust_htlc_exposure_msat, max_dust_htlc_exposure_msat);
79017901
return Err(("Exceeded our total dust exposure limit on counterparty commitment tx",
7902-
LocalHTLCFailureReason::TemporaryChannelFailure))
7902+
LocalHTLCFailureReason::DustLimitCounterparty))
79037903
}
79047904
let htlc_success_dust_limit = if self.funding.get_channel_type().supports_anchors_zero_fee_htlc_tx() {
79057905
0
@@ -7914,7 +7914,7 @@ impl<SP: Deref> FundedChannel<SP> where
79147914
log_info!(logger, "Cannot accept value that would put our exposure to dust HTLCs at {} over the limit {} on holder commitment tx",
79157915
on_holder_tx_dust_htlc_exposure_msat, max_dust_htlc_exposure_msat);
79167916
return Err(("Exceeded our dust exposure limit on holder commitment tx",
7917-
LocalHTLCFailureReason::TemporaryChannelFailure))
7917+
LocalHTLCFailureReason::DustLimitHolder))
79187918
}
79197919
}
79207920

@@ -7952,7 +7952,7 @@ impl<SP: Deref> FundedChannel<SP> where
79527952
}
79537953
if pending_remote_value_msat.saturating_sub(self.funding.holder_selected_channel_reserve_satoshis * 1000).saturating_sub(anchor_outputs_value_msat) < remote_fee_cost_incl_stuck_buffer_msat {
79547954
log_info!(logger, "Attempting to fail HTLC due to fee spike buffer violation in channel {}. Rebalancing is required.", &self.context.channel_id());
7955-
return Err(("Fee spike buffer violation", LocalHTLCFailureReason::TemporaryChannelFailure));
7955+
return Err(("Fee spike buffer violation", LocalHTLCFailureReason::FeeSpikeBuffer));
79567956
}
79577957
}
79587958

lightning/src/ln/channelmanager.rs

+12-12
Original file line numberDiff line numberDiff line change
@@ -3922,7 +3922,7 @@ where
39223922
}
39233923

39243924
for htlc_source in failed_htlcs.drain(..) {
3925-
let failure_reason = LocalHTLCFailureReason::PermanentChannelFailure;
3925+
let failure_reason = LocalHTLCFailureReason::ChannelClosed;
39263926
let reason = HTLCFailReason::from_failure_code(failure_reason);
39273927
let receiver = HTLCDestination::NextHopChannel { node_id: Some(*counterparty_node_id), channel_id: *channel_id };
39283928
self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
@@ -4046,7 +4046,7 @@ where
40464046
shutdown_res.closure_reason, shutdown_res.dropped_outbound_htlcs.len());
40474047
for htlc_source in shutdown_res.dropped_outbound_htlcs.drain(..) {
40484048
let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
4049-
let failure_reason = LocalHTLCFailureReason::PermanentChannelFailure;
4049+
let failure_reason = LocalHTLCFailureReason::ChannelClosed;
40504050
let reason = HTLCFailReason::from_failure_code(failure_reason);
40514051
let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
40524052
self.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);
@@ -4349,19 +4349,19 @@ where
43494349
// should NOT reveal the existence or non-existence of a private channel if
43504350
// we don't allow forwards outbound over them.
43514351
return Err(("Refusing to forward to a private channel based on our config.",
4352-
LocalHTLCFailureReason::UnknownNextPeer));
4352+
LocalHTLCFailureReason::PrivateChannelForward));
43534353
}
43544354
if let HopConnector::ShortChannelId(outgoing_scid) = next_packet.outgoing_connector {
43554355
if chan.funding.get_channel_type().supports_scid_privacy() && outgoing_scid != chan.context.outbound_scid_alias() {
43564356
// `option_scid_alias` (referred to in LDK as `scid_privacy`) means
43574357
// "refuse to forward unless the SCID alias was used", so we pretend
43584358
// we don't have the channel here.
43594359
return Err(("Refusing to forward over real channel SCID as our counterparty requested.",
4360-
LocalHTLCFailureReason::UnknownNextPeer));
4360+
LocalHTLCFailureReason::RealSCIDForward));
43614361
}
43624362
} else {
43634363
return Err(("Cannot forward by Node ID without SCID.",
4364-
LocalHTLCFailureReason::UnknownNextPeer));
4364+
LocalHTLCFailureReason::InvalidTrampolineForward));
43654365
}
43664366

43674367
// Note that we could technically not return an error yet here and just hope
@@ -4375,7 +4375,7 @@ where
43754375
LocalHTLCFailureReason::ChannelDisabled));
43764376
} else {
43774377
return Err(("Forwarding channel is not in a ready state.",
4378-
LocalHTLCFailureReason::TemporaryChannelFailure));
4378+
LocalHTLCFailureReason::ChannelNotReady));
43794379
}
43804380
}
43814381
if next_packet.outgoing_amt_msat < chan.context.get_counterparty_htlc_minimum_msat() {
@@ -4416,7 +4416,7 @@ where
44164416
HopConnector::ShortChannelId(scid) => scid,
44174417
HopConnector::Trampoline(_) => {
44184418
return Err(("Cannot forward by Node ID without SCID.",
4419-
LocalHTLCFailureReason::UnknownNextPeer));
4419+
LocalHTLCFailureReason::InvalidTrampolineForward));
44204420
}
44214421
};
44224422
match self.do_funded_channel_callback(outgoing_scid, |chan: &mut FundedChannel<SP>| {
@@ -8772,7 +8772,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
87728772
}
87738773
for htlc_source in dropped_htlcs.drain(..) {
87748774
let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id.clone()), channel_id: msg.channel_id };
8775-
let reason = HTLCFailReason::from_failure_code(LocalHTLCFailureReason::PermanentChannelFailure);
8775+
let reason = HTLCFailReason::from_failure_code(LocalHTLCFailureReason::ChannelClosed);
87768776
self.fail_htlc_backwards_internal(&htlc_source.0, &htlc_source.1, &reason, receiver);
87778777
}
87788778
if let Some(shutdown_res) = finish_shutdown {
@@ -9628,7 +9628,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
96289628
);
96299629
} else {
96309630
log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
9631-
let failure_reason = LocalHTLCFailureReason::PermanentChannelFailure;
9631+
let failure_reason = LocalHTLCFailureReason::OnChainTimeout;
96329632
let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
96339633
let reason = HTLCFailReason::from_failure_code(failure_reason);
96349634
self.fail_htlc_backwards_internal(&htlc_update.source, &htlc_update.payment_hash, &reason, receiver);
@@ -11827,7 +11827,7 @@ where
1182711827
// number of blocks we generally consider it to take to do a commitment update,
1182811828
// just give up on it and fail the HTLC.
1182911829
if height >= htlc.cltv_expiry - HTLC_FAIL_BACK_BUFFER {
11830-
let reason = LocalHTLCFailureReason::IncorrectPaymentDetails;
11830+
let reason = LocalHTLCFailureReason::PaymentClaimBuffer;
1183111831
timed_out_htlcs.push((HTLCSource::PreviousHopData(htlc.prev_hop.clone()), payment_hash.clone(),
1183211832
HTLCFailReason::reason(reason, invalid_payment_err_data(htlc.value, height)),
1183311833
HTLCDestination::FailedPayment { payment_hash: payment_hash.clone() }));
@@ -11858,7 +11858,7 @@ where
1185811858
_ => unreachable!(),
1185911859
};
1186011860
timed_out_htlcs.push((prev_hop_data, htlc.forward_info.payment_hash,
11861-
HTLCFailReason::from_failure_code(LocalHTLCFailureReason::TemporaryNodeFailure),
11861+
HTLCFailReason::from_failure_code(LocalHTLCFailureReason::ForwardExpiryBuffer),
1186211862
HTLCDestination::InvalidForward { requested_forward_scid }));
1186311863
let logger = WithContext::from(
1186411864
&self.logger, None, Some(htlc.prev_channel_id), Some(htlc.forward_info.payment_hash)
@@ -14947,7 +14947,7 @@ where
1494714947

1494814948
for htlc_source in failed_htlcs.drain(..) {
1494914949
let (source, payment_hash, counterparty_node_id, channel_id) = htlc_source;
14950-
let failure_reason = LocalHTLCFailureReason::PermanentChannelFailure;
14950+
let failure_reason = LocalHTLCFailureReason::ChannelClosed;
1495114951
let receiver = HTLCDestination::NextHopChannel { node_id: Some(counterparty_node_id), channel_id };
1495214952
let reason = HTLCFailReason::from_failure_code(failure_reason);
1495314953
channel_manager.fail_htlc_backwards_internal(&source, &payment_hash, &reason, receiver);

lightning/src/ln/onion_payment.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ pub(super) fn create_fwd_pending_htlc_info(
199199
Some(Ok(pubkey)) => pubkey,
200200
_ => return Err(InboundHTLCErr {
201201
msg: "Missing next Trampoline hop pubkey from intermediate Trampoline forwarding data",
202-
reason: LocalHTLCFailureReason::InvalidOnionPayload,
202+
reason: LocalHTLCFailureReason::InvalidTrampolinePayload,
203203
err_data: Vec::new(),
204204
}),
205205
};
@@ -342,7 +342,7 @@ pub(super) fn create_recv_pending_htlc_info(
342342
// channel closure (see HTLC_FAIL_BACK_BUFFER rationale).
343343
if cltv_expiry <= current_height + HTLC_FAIL_BACK_BUFFER + 1 {
344344
return Err(InboundHTLCErr {
345-
reason: LocalHTLCFailureReason::IncorrectPaymentDetails,
345+
reason: LocalHTLCFailureReason::PaymentClaimBuffer,
346346
err_data: invalid_payment_err_data(amt_msat, current_height),
347347
msg: "The final CLTV expiry is too soon to handle",
348348
});
@@ -367,7 +367,7 @@ pub(super) fn create_recv_pending_htlc_info(
367367
let hashed_preimage = PaymentHash(Sha256::hash(&payment_preimage.0).to_byte_array());
368368
if hashed_preimage != payment_hash {
369369
return Err(InboundHTLCErr {
370-
reason: LocalHTLCFailureReason::IncorrectPaymentDetails,
370+
reason: LocalHTLCFailureReason::InvalidKeysendPreimage,
371371
err_data: invalid_payment_err_data(amt_msat, current_height),
372372
msg: "Payment preimage didn't match payment hash",
373373
});
@@ -395,7 +395,7 @@ pub(super) fn create_recv_pending_htlc_info(
395395
}
396396
} else {
397397
return Err(InboundHTLCErr {
398-
reason: LocalHTLCFailureReason::RequiredNodeFeature,
398+
reason: LocalHTLCFailureReason::PaymentSecretRequired,
399399
err_data: Vec::new(),
400400
msg: "We require payment_secrets",
401401
});
@@ -624,7 +624,7 @@ pub(super) fn check_incoming_htlc_cltv(
624624
// but there is no need to do that, and since we're a bit conservative with our
625625
// risk threshold it just results in failing to forward payments.
626626
if (outgoing_cltv_value) as u64 <= (cur_height + LATENCY_GRACE_PERIOD_BLOCKS) as u64 {
627-
return Err(("Outgoing CLTV value is too soon", LocalHTLCFailureReason::CLTVExpiryTooSoon));
627+
return Err(("Outgoing CLTV value is too soon", LocalHTLCFailureReason::OutgoingCLTVTooSoon));
628628
}
629629

630630
Ok(())

0 commit comments

Comments
 (0)