@@ -56,14 +56,15 @@ use crate::events::{
56
56
InboundChannelFunds, PaymentFailureReason, ReplayEvent,
57
57
};
58
58
use crate::events::{FundingInfo, PaidBolt12Invoice};
59
+ use crate::ln::onion_utils::process_onion_success;
59
60
// Since this struct is returned in `list_channels` methods, expose it here in case users want to
60
61
// construct one themselves.
61
- use crate::ln::channel::PendingV2Channel;
62
62
use crate::ln::channel::{
63
63
self, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
64
64
OutboundV1Channel, ReconnectionMsg, ShutdownResult, UpdateFulfillCommitFetch,
65
65
WithChannelContext,
66
66
};
67
+ use crate::ln::channel::{duration_since_epoch, PendingV2Channel};
67
68
use crate::ln::channel_state::ChannelDetails;
68
69
use crate::ln::inbound_payment;
69
70
use crate::ln::msgs;
@@ -76,6 +77,7 @@ use crate::ln::onion_payment::{
76
77
decode_incoming_update_add_htlc_onion, invalid_payment_err_data, HopConnector, InboundHTLCErr,
77
78
NextPacketDetails,
78
79
};
80
+ use crate::ln::onion_utils::AttributionData;
79
81
use crate::ln::onion_utils::{self};
80
82
use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
81
83
use crate::ln::our_peer_storage::EncryptedOurPeerStorage;
@@ -7625,8 +7627,16 @@ where
7625
7627
pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
7626
7628
}
7627
7629
});
7630
+
7631
+ // Create new attribution data as the final hop. Always report a zero hold time, because reporting a
7632
+ // non-zero value will not make a difference in the penalty that may be applied by the sender.
7633
+ let mut attribution_data = AttributionData::new();
7634
+ attribution_data.update(&[], &htlc.prev_hop.incoming_packet_shared_secret, 0);
7635
+ attribution_data.crypt(&htlc.prev_hop.incoming_packet_shared_secret);
7636
+
7628
7637
self.claim_funds_from_hop(
7629
7638
htlc.prev_hop, payment_preimage, payment_info.clone(),
7639
+ attribution_data,
7630
7640
|_, definitely_duplicate| {
7631
7641
debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
7632
7642
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim: this_mpp_claim }), raa_blocker)
@@ -7658,7 +7668,8 @@ where
7658
7668
) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
7659
7669
>(
7660
7670
&self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7661
- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7671
+ payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData,
7672
+ completion_action: ComplFunc,
7662
7673
) {
7663
7674
let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
7664
7675
let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7671,15 +7682,21 @@ where
7671
7682
channel_id: prev_hop.channel_id,
7672
7683
htlc_id: prev_hop.htlc_id,
7673
7684
};
7674
- self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7685
+ self.claim_mpp_part(
7686
+ htlc_source,
7687
+ payment_preimage,
7688
+ payment_info,
7689
+ attribution_data,
7690
+ completion_action,
7691
+ )
7675
7692
}
7676
7693
7677
7694
#[rustfmt::skip]
7678
7695
fn claim_mpp_part<
7679
7696
ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
7680
7697
>(
7681
7698
&self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7682
- payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7699
+ payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, completion_action: ComplFunc,
7683
7700
) {
7684
7701
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
7685
7702
@@ -7712,7 +7729,7 @@ where
7712
7729
if let hash_map::Entry::Occupied(mut chan_entry) = peer_state.channel_by_id.entry(chan_id) {
7713
7730
if let Some(chan) = chan_entry.get_mut().as_funded_mut() {
7714
7731
let logger = WithChannelContext::from(&self.logger, &chan.context, None);
7715
- let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, &&logger);
7732
+ let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, attribution_data, &&logger);
7716
7733
7717
7734
match fulfill_res {
7718
7735
UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } => {
@@ -7865,9 +7882,16 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
7865
7882
forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
7866
7883
startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
7867
7884
next_channel_outpoint: OutPoint, next_channel_id: ChannelId, next_user_channel_id: Option<u128>,
7885
+ attribution_data: Option<&AttributionData>, send_timestamp: Option<Duration>,
7868
7886
) {
7869
7887
match source {
7870
7888
HTLCSource::OutboundRoute { session_priv, payment_id, path, bolt12_invoice, .. } => {
7889
+ // Extract the hold times for this fulfilled HTLC, if available.
7890
+ if let Some(attribution_data) = attribution_data {
7891
+ let _ = process_onion_success(&self.secp_ctx, &self.logger, &path,
7892
+ &session_priv, attribution_data.clone());
7893
+ }
7894
+
7871
7895
debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
7872
7896
"We don't support claim_htlc claims during startup - monitors may not be available yet");
7873
7897
debug_assert_eq!(next_channel_counterparty_node_id, path.hops[0].pubkey);
@@ -7884,7 +7908,31 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
7884
7908
let prev_user_channel_id = hop_data.user_channel_id;
7885
7909
let prev_node_id = hop_data.counterparty_node_id;
7886
7910
let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
7887
- self.claim_funds_from_hop(hop_data, payment_preimage, None,
7911
+
7912
+ // If attribution data was received from downstream, we shift it and get it ready for adding our hold
7913
+ // time.
7914
+ let mut attribution_data = attribution_data
7915
+ .map_or(AttributionData::new(), |attribution_data| {
7916
+ let mut attribution_data = attribution_data.clone();
7917
+
7918
+ attribution_data.shift_right();
7919
+
7920
+ attribution_data
7921
+ });
7922
+
7923
+ // Obtain hold time, if available.
7924
+ let now = duration_since_epoch();
7925
+ let hold_time = if let (Some(timestamp), Some(now)) = (send_timestamp, now) {
7926
+ u32::try_from(now.saturating_sub(timestamp).as_millis()).unwrap_or(u32::MAX)
7927
+ } else {
7928
+ 0
7929
+ };
7930
+
7931
+ // Finish attribution data by adding our hold time and crypting it.
7932
+ attribution_data.update(&[], &hop_data.incoming_packet_shared_secret, hold_time);
7933
+ attribution_data.crypt(&hop_data.incoming_packet_shared_secret);
7934
+
7935
+ self.claim_funds_from_hop(hop_data, payment_preimage, None, attribution_data,
7888
7936
|htlc_claim_value_msat, definitely_duplicate| {
7889
7937
let chan_to_release = Some(EventUnblockedChannel {
7890
7938
counterparty_node_id: next_channel_counterparty_node_id,
@@ -9428,7 +9476,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
9428
9476
fn internal_update_fulfill_htlc(&self, counterparty_node_id: &PublicKey, msg: &msgs::UpdateFulfillHTLC) -> Result<(), MsgHandleErrInternal> {
9429
9477
let funding_txo;
9430
9478
let next_user_channel_id;
9431
- let (htlc_source, forwarded_htlc_value, skimmed_fee_msat) = {
9479
+ let (htlc_source, forwarded_htlc_value, skimmed_fee_msat, send_timestamp ) = {
9432
9480
let per_peer_state = self.per_peer_state.read().unwrap();
9433
9481
let peer_state_mutex = per_peer_state.get(counterparty_node_id)
9434
9482
.ok_or_else(|| {
@@ -9469,7 +9517,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
9469
9517
};
9470
9518
self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(),
9471
9519
Some(forwarded_htlc_value), skimmed_fee_msat, false, false, *counterparty_node_id,
9472
- funding_txo, msg.channel_id, Some(next_user_channel_id),
9520
+ funding_txo, msg.channel_id, Some(next_user_channel_id), msg.attribution_data.as_ref(),
9521
+ send_timestamp,
9473
9522
);
9474
9523
9475
9524
Ok(())
@@ -10283,10 +10332,13 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
10283
10332
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(channel_id), Some(htlc_update.payment_hash));
10284
10333
if let Some(preimage) = htlc_update.payment_preimage {
10285
10334
log_trace!(logger, "Claiming HTLC with preimage {} from our monitor", preimage);
10335
+ // Claim the funds from the previous hop, if there is one. Because this is in response to a
10336
+ // chain event, no attribution data is available.
10286
10337
self.claim_funds_internal(
10287
10338
htlc_update.source, preimage,
10288
10339
htlc_update.htlc_value_satoshis.map(|v| v * 1000), None, true,
10289
10340
false, counterparty_node_id, funding_outpoint, channel_id, None,
10341
+ None, None,
10290
10342
);
10291
10343
} else {
10292
10344
log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
@@ -15668,8 +15720,11 @@ where
15668
15720
// Note that we don't need to pass the `payment_info` here - its
15669
15721
// already (clearly) durably on disk in the `ChannelMonitor` so there's
15670
15722
// no need to worry about getting it into others.
15723
+ //
15724
+ // We don't encode any attribution data, because the required onion shared secret isn't
15725
+ // available here.
15671
15726
channel_manager.claim_mpp_part(
15672
- part.into(), payment_preimage, None,
15727
+ part.into(), payment_preimage, None, AttributionData::new(),
15673
15728
|_, _|
15674
15729
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim }), pending_claim_ptr)
15675
15730
);
@@ -15773,9 +15828,10 @@ where
15773
15828
// We use `downstream_closed` in place of `from_onchain` here just as a guess - we
15774
15829
// don't remember in the `ChannelMonitor` where we got a preimage from, but if the
15775
15830
// channel is closed we just assume that it probably came from an on-chain claim.
15831
+ // The same holds for attribution data. We don't have any, so we pass an empty one.
15776
15832
channel_manager.claim_funds_internal(source, preimage, Some(downstream_value), None,
15777
15833
downstream_closed, true, downstream_node_id, downstream_funding,
15778
- downstream_channel_id, None
15834
+ downstream_channel_id, None, None, None,
15779
15835
);
15780
15836
}
15781
15837
0 commit comments