Skip to content

Commit 72c0f06

Browse files
committed
Add hold times to update_fulfill_htlc
1 parent 85d3502 commit 72c0f06

File tree

3 files changed

+206
-32
lines changed

3 files changed

+206
-32
lines changed

lightning/src/ln/channel.rs

Lines changed: 24 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ enum FeeUpdateState {
138138
enum InboundHTLCRemovalReason {
139139
FailRelay(msgs::OnionErrorPacket),
140140
FailMalformed(([u8; 32], u16)),
141-
Fulfill(PaymentPreimage),
141+
Fulfill(PaymentPreimage, Option<AttributionData>),
142142
}
143143

144144
/// Represents the resolution status of an inbound HTLC.
@@ -234,7 +234,7 @@ impl From<&InboundHTLCState> for Option<InboundHTLCStateDetails> {
234234
Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFail),
235235
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::FailMalformed(_)) =>
236236
Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFail),
237-
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(_)) =>
237+
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(_, _)) =>
238238
Some(InboundHTLCStateDetails::AwaitingRemoteRevokeToRemoveFulfill),
239239
}
240240
}
@@ -266,7 +266,7 @@ impl InboundHTLCState {
266266

267267
fn preimage(&self) -> Option<PaymentPreimage> {
268268
match self {
269-
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(preimage)) => {
269+
InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(preimage, _)) => {
270270
Some(*preimage)
271271
},
272272
_ => None,
@@ -466,6 +466,7 @@ enum HTLCUpdateAwaitingACK {
466466
},
467467
ClaimHTLC {
468468
payment_preimage: PaymentPreimage,
469+
attribution_data: AttributionData,
469470
htlc_id: u64,
470471
},
471472
FailHTLC {
@@ -6212,7 +6213,7 @@ where
62126213
// (see equivalent if condition there).
62136214
assert!(!self.context.channel_state.can_generate_new_commitment());
62146215
let mon_update_id = self.context.latest_monitor_update_id; // Forget the ChannelMonitor update
6215-
let fulfill_resp = self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, logger);
6216+
let fulfill_resp = self.get_update_fulfill_htlc(htlc_id_arg, payment_preimage_arg, None, AttributionData::new(), logger);
62166217
self.context.latest_monitor_update_id = mon_update_id;
62176218
if let UpdateFulfillFetch::NewClaim { update_blocked, .. } = fulfill_resp {
62186219
assert!(update_blocked); // The HTLC must have ended up in the holding cell.
@@ -6222,7 +6223,7 @@ where
62226223
#[rustfmt::skip]
62236224
fn get_update_fulfill_htlc<L: Deref>(
62246225
&mut self, htlc_id_arg: u64, payment_preimage_arg: PaymentPreimage,
6225-
payment_info: Option<PaymentClaimDetails>, logger: &L,
6226+
payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, logger: &L,
62266227
) -> UpdateFulfillFetch where L::Target: Logger {
62276228
// Either ChannelReady got set (which means it won't be unset) or there is no way any
62286229
// caller thought we could have something claimed (cause we wouldn't have accepted in an
@@ -6246,7 +6247,7 @@ where
62466247
match htlc.state {
62476248
InboundHTLCState::Committed => {},
62486249
InboundHTLCState::LocalRemoved(ref reason) => {
6249-
if let &InboundHTLCRemovalReason::Fulfill(_) = reason {
6250+
if let &InboundHTLCRemovalReason::Fulfill(_, _) = reason {
62506251
} else {
62516252
log_warn!(logger, "Have preimage and want to fulfill HTLC with payment hash {} we already failed against channel {}", &htlc.payment_hash, &self.context.channel_id());
62526253
debug_assert!(false, "Tried to fulfill an HTLC that was already failed");
@@ -6312,6 +6313,7 @@ where
63126313
log_trace!(logger, "Adding HTLC claim to holding_cell in channel {}! Current state: {}", &self.context.channel_id(), self.context.channel_state.to_u32());
63136314
self.context.holding_cell_htlc_updates.push(HTLCUpdateAwaitingACK::ClaimHTLC {
63146315
payment_preimage: payment_preimage_arg, htlc_id: htlc_id_arg,
6316+
attribution_data,
63156317
});
63166318
return UpdateFulfillFetch::NewClaim { monitor_update, htlc_value_msat, update_blocked: true };
63176319
}
@@ -6324,7 +6326,7 @@ where
63246326
return UpdateFulfillFetch::NewClaim { monitor_update, htlc_value_msat, update_blocked: true };
63256327
}
63266328
log_trace!(logger, "Upgrading HTLC {} to LocalRemoved with a Fulfill in channel {}!", &htlc.payment_hash, &self.context.channel_id);
6327-
htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(payment_preimage_arg.clone()));
6329+
htlc.state = InboundHTLCState::LocalRemoved(InboundHTLCRemovalReason::Fulfill(payment_preimage_arg.clone(), Some(attribution_data)));
63286330
}
63296331

63306332
UpdateFulfillFetch::NewClaim {
@@ -6337,10 +6339,10 @@ where
63376339
#[rustfmt::skip]
63386340
pub fn get_update_fulfill_htlc_and_commit<L: Deref>(
63396341
&mut self, htlc_id: u64, payment_preimage: PaymentPreimage,
6340-
payment_info: Option<PaymentClaimDetails>, logger: &L,
6342+
payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, logger: &L,
63416343
) -> UpdateFulfillCommitFetch where L::Target: Logger {
63426344
let release_cs_monitor = self.context.blocked_monitor_updates.is_empty();
6343-
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, logger) {
6345+
match self.get_update_fulfill_htlc(htlc_id, payment_preimage, payment_info, attribution_data, logger) {
63446346
UpdateFulfillFetch::NewClaim { mut monitor_update, htlc_value_msat, update_blocked } => {
63456347
// Even if we aren't supposed to let new monitor updates with commitment state
63466348
// updates run, we still need to push the preimage ChannelMonitorUpdateStep no
@@ -6658,7 +6660,7 @@ where
66586660
}
66596661

66606662
#[rustfmt::skip]
6661-
pub fn update_fulfill_htlc(&mut self, msg: &msgs::UpdateFulfillHTLC) -> Result<(HTLCSource, u64, Option<u64>), ChannelError> {
6663+
pub fn update_fulfill_htlc(&mut self, msg: &msgs::UpdateFulfillHTLC) -> Result<(HTLCSource, u64, Option<u64>, Option<Duration>), ChannelError> {
66626664
if self.context.channel_state.is_remote_stfu_sent() || self.context.channel_state.is_quiescent() {
66636665
return Err(ChannelError::WarnAndDisconnect("Got fulfill HTLC message while quiescent".to_owned()));
66646666
}
@@ -6669,7 +6671,7 @@ where
66696671
return Err(ChannelError::close("Peer sent update_fulfill_htlc when we needed a channel_reestablish".to_owned()));
66706672
}
66716673

6672-
self.mark_outbound_htlc_removed(msg.htlc_id, OutboundHTLCOutcome::Success(msg.payment_preimage)).map(|htlc| (htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat))
6674+
self.mark_outbound_htlc_removed(msg.htlc_id, OutboundHTLCOutcome::Success(msg.payment_preimage)).map(|htlc| (htlc.source.clone(), htlc.amount_msat, htlc.skimmed_fee_msat, htlc.send_timestamp))
66736675
}
66746676

66756677
#[rustfmt::skip]
@@ -7160,7 +7162,7 @@ where
71607162
}
71617163
None
71627164
},
7163-
&HTLCUpdateAwaitingACK::ClaimHTLC { ref payment_preimage, htlc_id, .. } => {
7165+
&HTLCUpdateAwaitingACK::ClaimHTLC { ref payment_preimage, htlc_id, ref attribution_data } => {
71647166
// If an HTLC claim was previously added to the holding cell (via
71657167
// `get_update_fulfill_htlc`, then generating the claim message itself must
71667168
// not fail - any in between attempts to claim the HTLC will have resulted
@@ -7173,7 +7175,7 @@ where
71737175
// We do not bother to track and include `payment_info` here, however.
71747176
let mut additional_monitor_update =
71757177
if let UpdateFulfillFetch::NewClaim { monitor_update, .. } =
7176-
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, logger)
7178+
self.get_update_fulfill_htlc(htlc_id, *payment_preimage, None, attribution_data.clone(), logger)
71777179
{ monitor_update } else { unreachable!() };
71787180
update_fulfill_count += 1;
71797181
monitor_update.updates.append(&mut additional_monitor_update.updates);
@@ -7376,7 +7378,7 @@ where
73767378
pending_inbound_htlcs.retain(|htlc| {
73777379
if let &InboundHTLCState::LocalRemoved(ref reason) = &htlc.state {
73787380
log_trace!(logger, " ...removing inbound LocalRemoved {}", &htlc.payment_hash);
7379-
if let &InboundHTLCRemovalReason::Fulfill(_) = reason {
7381+
if let &InboundHTLCRemovalReason::Fulfill(_, _) = reason {
73807382
value_to_self_msat_diff += htlc.amount_msat as i64;
73817383
}
73827384
*expecting_peer_commitment_signed = true;
@@ -8242,12 +8244,12 @@ where
82428244
failure_code: failure_code.clone(),
82438245
});
82448246
},
8245-
&InboundHTLCRemovalReason::Fulfill(ref payment_preimage) => {
8247+
&InboundHTLCRemovalReason::Fulfill(ref payment_preimage, ref attribution_data) => {
82468248
update_fulfill_htlcs.push(msgs::UpdateFulfillHTLC {
82478249
channel_id: self.context.channel_id(),
82488250
htlc_id: htlc.htlc_id,
82498251
payment_preimage: payment_preimage.clone(),
8250-
attribution_data: None,
8252+
attribution_data: attribution_data.clone(),
82518253
});
82528254
},
82538255
}
@@ -12339,7 +12341,7 @@ where
1233912341
1u8.write(writer)?;
1234012342
(hash, code).write(writer)?;
1234112343
},
12342-
InboundHTLCRemovalReason::Fulfill(preimage) => {
12344+
InboundHTLCRemovalReason::Fulfill(preimage, _) => { // TODO: Persistence
1234312345
2u8.write(writer)?;
1234412346
preimage.write(writer)?;
1234512347
},
@@ -12418,7 +12420,7 @@ where
1241812420
holding_cell_skimmed_fees.push(skimmed_fee_msat);
1241912421
holding_cell_blinding_points.push(blinding_point);
1242012422
},
12421-
&HTLCUpdateAwaitingACK::ClaimHTLC { ref payment_preimage, ref htlc_id } => {
12423+
&HTLCUpdateAwaitingACK::ClaimHTLC { ref payment_preimage, ref htlc_id, .. } => {
1242212424
1u8.write(writer)?;
1242312425
payment_preimage.write(writer)?;
1242412426
htlc_id.write(writer)?;
@@ -12705,7 +12707,7 @@ where
1270512707
attribution_data: None,
1270612708
}),
1270712709
1 => InboundHTLCRemovalReason::FailMalformed(Readable::read(reader)?),
12708-
2 => InboundHTLCRemovalReason::Fulfill(Readable::read(reader)?),
12710+
2 => InboundHTLCRemovalReason::Fulfill(Readable::read(reader)?, None), // TODO: Persistence
1270912711
_ => return Err(DecodeError::InvalidValue),
1271012712
};
1271112713
InboundHTLCState::LocalRemoved(reason)
@@ -12778,6 +12780,7 @@ where
1277812780
1 => HTLCUpdateAwaitingACK::ClaimHTLC {
1277912781
payment_preimage: Readable::read(reader)?,
1278012782
htlc_id: Readable::read(reader)?,
12783+
attribution_data: AttributionData::new(), // TODO: Persistence
1278112784
},
1278212785
2 => HTLCUpdateAwaitingACK::FailHTLC {
1278312786
htlc_id: Readable::read(reader)?,
@@ -13283,7 +13286,7 @@ where
1328313286
}
1328413287
}
1328513288

13286-
fn duration_since_epoch() -> Option<Duration> {
13289+
pub(crate) fn duration_since_epoch() -> Option<Duration> {
1328713290
#[cfg(not(feature = "std"))]
1328813291
let now = None;
1328913292

@@ -14004,6 +14007,7 @@ mod tests {
1400414007
let dummy_holding_cell_claim_htlc = HTLCUpdateAwaitingACK::ClaimHTLC {
1400514008
payment_preimage: PaymentPreimage([42; 32]),
1400614009
htlc_id: 0,
14010+
attribution_data: AttributionData::new(),
1400714011
};
1400814012
let dummy_holding_cell_failed_htlc = |htlc_id| HTLCUpdateAwaitingACK::FailHTLC {
1400914013
htlc_id, err_packet: msgs::OnionErrorPacket { data: vec![42], attribution_data: Some(AttributionData::new()) }

lightning/src/ln/channelmanager.rs

Lines changed: 66 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -56,14 +56,15 @@ use crate::events::{
5656
InboundChannelFunds, PaymentFailureReason, ReplayEvent,
5757
};
5858
use crate::events::{FundingInfo, PaidBolt12Invoice};
59+
use crate::ln::onion_utils::process_onion_success;
5960
// Since this struct is returned in `list_channels` methods, expose it here in case users want to
6061
// construct one themselves.
61-
use crate::ln::channel::PendingV2Channel;
6262
use crate::ln::channel::{
6363
self, Channel, ChannelError, ChannelUpdateStatus, FundedChannel, InboundV1Channel,
6464
OutboundV1Channel, ReconnectionMsg, ShutdownResult, UpdateFulfillCommitFetch,
6565
WithChannelContext,
6666
};
67+
use crate::ln::channel::{duration_since_epoch, PendingV2Channel};
6768
use crate::ln::channel_state::ChannelDetails;
6869
use crate::ln::inbound_payment;
6970
use crate::ln::msgs;
@@ -76,6 +77,7 @@ use crate::ln::onion_payment::{
7677
decode_incoming_update_add_htlc_onion, invalid_payment_err_data, HopConnector, InboundHTLCErr,
7778
NextPacketDetails,
7879
};
80+
use crate::ln::onion_utils::AttributionData;
7981
use crate::ln::onion_utils::{self};
8082
use crate::ln::onion_utils::{HTLCFailReason, LocalHTLCFailureReason};
8183
use crate::ln::our_peer_storage::EncryptedOurPeerStorage;
@@ -7625,8 +7627,16 @@ where
76257627
pending_claim: PendingMPPClaimPointer(Arc::clone(pending_claim)),
76267628
}
76277629
});
7630+
7631+
// Create new attribution data as the final hop. Always report a zero hold time, because reporting a
7632+
// non-zero value will not make a difference in the penalty that may be applied by the sender.
7633+
let mut attribution_data = AttributionData::new();
7634+
attribution_data.update(&[], &htlc.prev_hop.incoming_packet_shared_secret, 0);
7635+
attribution_data.crypt(&htlc.prev_hop.incoming_packet_shared_secret);
7636+
76287637
self.claim_funds_from_hop(
76297638
htlc.prev_hop, payment_preimage, payment_info.clone(),
7639+
attribution_data,
76307640
|_, definitely_duplicate| {
76317641
debug_assert!(!definitely_duplicate, "We shouldn't claim duplicatively from a payment");
76327642
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim: this_mpp_claim }), raa_blocker)
@@ -7658,7 +7668,8 @@ where
76587668
) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>),
76597669
>(
76607670
&self, prev_hop: HTLCPreviousHopData, payment_preimage: PaymentPreimage,
7661-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7671+
payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData,
7672+
completion_action: ComplFunc,
76627673
) {
76637674
let counterparty_node_id = prev_hop.counterparty_node_id.or_else(|| {
76647675
let short_to_chan_info = self.short_to_chan_info.read().unwrap();
@@ -7671,15 +7682,21 @@ where
76717682
channel_id: prev_hop.channel_id,
76727683
htlc_id: prev_hop.htlc_id,
76737684
};
7674-
self.claim_mpp_part(htlc_source, payment_preimage, payment_info, completion_action)
7685+
self.claim_mpp_part(
7686+
htlc_source,
7687+
payment_preimage,
7688+
payment_info,
7689+
attribution_data,
7690+
completion_action,
7691+
)
76757692
}
76767693

76777694
#[rustfmt::skip]
76787695
fn claim_mpp_part<
76797696
ComplFunc: FnOnce(Option<u64>, bool) -> (Option<MonitorUpdateCompletionAction>, Option<RAAMonitorUpdateBlockingAction>)
76807697
>(
76817698
&self, prev_hop: HTLCClaimSource, payment_preimage: PaymentPreimage,
7682-
payment_info: Option<PaymentClaimDetails>, completion_action: ComplFunc,
7699+
payment_info: Option<PaymentClaimDetails>, attribution_data: AttributionData, completion_action: ComplFunc,
76837700
) {
76847701
//TODO: Delay the claimed_funds relaying just like we do outbound relay!
76857702

@@ -7712,7 +7729,7 @@ where
77127729
if let hash_map::Entry::Occupied(mut chan_entry) = peer_state.channel_by_id.entry(chan_id) {
77137730
if let Some(chan) = chan_entry.get_mut().as_funded_mut() {
77147731
let logger = WithChannelContext::from(&self.logger, &chan.context, None);
7715-
let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, &&logger);
7732+
let fulfill_res = chan.get_update_fulfill_htlc_and_commit(prev_hop.htlc_id, payment_preimage, payment_info, attribution_data, &&logger);
77167733

77177734
match fulfill_res {
77187735
UpdateFulfillCommitFetch::NewClaim { htlc_value_msat, monitor_update } => {
@@ -7865,9 +7882,16 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
78657882
forwarded_htlc_value_msat: Option<u64>, skimmed_fee_msat: Option<u64>, from_onchain: bool,
78667883
startup_replay: bool, next_channel_counterparty_node_id: PublicKey,
78677884
next_channel_outpoint: OutPoint, next_channel_id: ChannelId, next_user_channel_id: Option<u128>,
7885+
attribution_data: Option<&AttributionData>, send_timestamp: Option<Duration>,
78687886
) {
78697887
match source {
78707888
HTLCSource::OutboundRoute { session_priv, payment_id, path, bolt12_invoice, .. } => {
7889+
// Extract the hold times for this fulfilled HTLC, if available.
7890+
if let Some(attribution_data) = attribution_data {
7891+
let _ = process_onion_success(&self.secp_ctx, &self.logger, &path,
7892+
&session_priv, attribution_data.clone());
7893+
}
7894+
78717895
debug_assert!(self.background_events_processed_since_startup.load(Ordering::Acquire),
78727896
"We don't support claim_htlc claims during startup - monitors may not be available yet");
78737897
debug_assert_eq!(next_channel_counterparty_node_id, path.hops[0].pubkey);
@@ -7884,7 +7908,31 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
78847908
let prev_user_channel_id = hop_data.user_channel_id;
78857909
let prev_node_id = hop_data.counterparty_node_id;
78867910
let completed_blocker = RAAMonitorUpdateBlockingAction::from_prev_hop_data(&hop_data);
7887-
self.claim_funds_from_hop(hop_data, payment_preimage, None,
7911+
7912+
// If attribution data was received from downstream, we shift it and get it ready for adding our hold
7913+
// time.
7914+
let mut attribution_data = attribution_data
7915+
.map_or(AttributionData::new(), |attribution_data| {
7916+
let mut attribution_data = attribution_data.clone();
7917+
7918+
attribution_data.shift_right();
7919+
7920+
attribution_data
7921+
});
7922+
7923+
// Obtain hold time, if available.
7924+
let now = duration_since_epoch();
7925+
let hold_time = if let (Some(timestamp), Some(now)) = (send_timestamp, now) {
7926+
u32::try_from(now.saturating_sub(timestamp).as_millis()).unwrap_or(u32::MAX)
7927+
} else {
7928+
0
7929+
};
7930+
7931+
// Finish attribution data by adding our hold time and crypting it.
7932+
attribution_data.update(&[], &hop_data.incoming_packet_shared_secret, hold_time);
7933+
attribution_data.crypt(&hop_data.incoming_packet_shared_secret);
7934+
7935+
self.claim_funds_from_hop(hop_data, payment_preimage, None, attribution_data,
78887936
|htlc_claim_value_msat, definitely_duplicate| {
78897937
let chan_to_release = Some(EventUnblockedChannel {
78907938
counterparty_node_id: next_channel_counterparty_node_id,
@@ -9428,7 +9476,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
94289476
fn internal_update_fulfill_htlc(&self, counterparty_node_id: &PublicKey, msg: &msgs::UpdateFulfillHTLC) -> Result<(), MsgHandleErrInternal> {
94299477
let funding_txo;
94309478
let next_user_channel_id;
9431-
let (htlc_source, forwarded_htlc_value, skimmed_fee_msat) = {
9479+
let (htlc_source, forwarded_htlc_value, skimmed_fee_msat, send_timestamp) = {
94329480
let per_peer_state = self.per_peer_state.read().unwrap();
94339481
let peer_state_mutex = per_peer_state.get(counterparty_node_id)
94349482
.ok_or_else(|| {
@@ -9469,7 +9517,8 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
94699517
};
94709518
self.claim_funds_internal(htlc_source, msg.payment_preimage.clone(),
94719519
Some(forwarded_htlc_value), skimmed_fee_msat, false, false, *counterparty_node_id,
9472-
funding_txo, msg.channel_id, Some(next_user_channel_id),
9520+
funding_txo, msg.channel_id, Some(next_user_channel_id), msg.attribution_data.as_ref(),
9521+
send_timestamp,
94739522
);
94749523

94759524
Ok(())
@@ -10283,10 +10332,13 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1028310332
let logger = WithContext::from(&self.logger, Some(counterparty_node_id), Some(channel_id), Some(htlc_update.payment_hash));
1028410333
if let Some(preimage) = htlc_update.payment_preimage {
1028510334
log_trace!(logger, "Claiming HTLC with preimage {} from our monitor", preimage);
10335+
// Claim the funds from the previous hop, if there is one. Because this is in response to a
10336+
// chain event, no attribution data is available.
1028610337
self.claim_funds_internal(
1028710338
htlc_update.source, preimage,
1028810339
htlc_update.htlc_value_satoshis.map(|v| v * 1000), None, true,
1028910340
false, counterparty_node_id, funding_outpoint, channel_id, None,
10341+
None, None,
1029010342
);
1029110343
} else {
1029210344
log_trace!(logger, "Failing HTLC with hash {} from our monitor", &htlc_update.payment_hash);
@@ -15668,8 +15720,11 @@ where
1566815720
// Note that we don't need to pass the `payment_info` here - its
1566915721
// already (clearly) durably on disk in the `ChannelMonitor` so there's
1567015722
// no need to worry about getting it into others.
15723+
//
15724+
// We don't encode any attribution data, because the required onion shared secret isn't
15725+
// available here.
1567115726
channel_manager.claim_mpp_part(
15672-
part.into(), payment_preimage, None,
15727+
part.into(), payment_preimage, None, AttributionData::new(),
1567315728
|_, _|
1567415729
(Some(MonitorUpdateCompletionAction::PaymentClaimed { payment_hash, pending_mpp_claim }), pending_claim_ptr)
1567515730
);
@@ -15773,9 +15828,10 @@ where
1577315828
// We use `downstream_closed` in place of `from_onchain` here just as a guess - we
1577415829
// don't remember in the `ChannelMonitor` where we got a preimage from, but if the
1577515830
// channel is closed we just assume that it probably came from an on-chain claim.
15831+
// The same holds for attribution data. We don't have any, so we pass an empty one.
1577615832
channel_manager.claim_funds_internal(source, preimage, Some(downstream_value), None,
1577715833
downstream_closed, true, downstream_node_id, downstream_funding,
15778-
downstream_channel_id, None
15834+
downstream_channel_id, None, None, None,
1577915835
);
1578015836
}
1578115837

0 commit comments

Comments
 (0)