@@ -3633,14 +3633,23 @@ impl TrustedChannelFeatures {
36333633struct ClaimCompletionActionParams {
36343634 definitely_duplicate: bool,
36353635 inbound_htlc_value_msat: Option<u64>,
3636+ inbound_edge_closed: bool,
36363637}
36373638
36383639impl ClaimCompletionActionParams {
36393640 fn new_claim(inbound_htlc_value_msat: u64) -> Self {
3640- Self { definitely_duplicate: false, inbound_htlc_value_msat: Some(inbound_htlc_value_msat) }
3641+ Self {
3642+ definitely_duplicate: false,
3643+ inbound_htlc_value_msat: Some(inbound_htlc_value_msat),
3644+ inbound_edge_closed: false,
3645+ }
36413646 }
36423647 fn duplicate_claim() -> Self {
3643- Self { definitely_duplicate: true, inbound_htlc_value_msat: None }
3648+ Self {
3649+ definitely_duplicate: true,
3650+ inbound_htlc_value_msat: None,
3651+ inbound_edge_closed: false,
3652+ }
36443653 }
36453654}
36463655
@@ -9649,16 +9658,56 @@ impl<
96499658 monitor_event_id
96509659 .map(|event_id| MonitorEventSource { event_id, channel_id: next_channel_id }),
96519660 |claim_completion_action_params| {
9652- let ClaimCompletionActionParams { definitely_duplicate, inbound_htlc_value_msat } =
9653- claim_completion_action_params;
9661+ let ClaimCompletionActionParams {
9662+ definitely_duplicate,
9663+ inbound_htlc_value_msat,
9664+ inbound_edge_closed,
9665+ } = claim_completion_action_params;
96549666 let chan_to_release = EventUnblockedChannel {
96559667 counterparty_node_id: next_channel_counterparty_node_id,
96569668 funding_txo: next_channel_outpoint,
96579669 channel_id: next_channel_id,
96589670 blocking_action: completed_blocker,
96599671 };
96609672
9661- if definitely_duplicate && startup_replay {
9673+ if self.persistent_monitor_events {
9674+ let monitor_event_source = monitor_event_id.map(|event_id| {
9675+ MonitorEventSource { event_id, channel_id: next_channel_id }
9676+ });
9677+ // If persistent_monitor_events is enabled, then we'll get a MonitorEvent for this HTLC
9678+ // claim re-provided to us until we explicitly ack it.
9679+ // * If the inbound edge is closed, then we can ack it when we know the preimage is
9680+ // durably persisted there + the user has processed a `PaymentForwarded` event
9681+ // * If the inbound edge is open, then we'll ack the monitor event when HTLC has been
9682+ // irrevocably removed via revoke_and_ack. This prevents forgetting to claim the HTLC
9683+ // backwards if we lose the off-chain HTLC from the holding cell after a restart.
9684+ if definitely_duplicate {
9685+ if inbound_edge_closed {
9686+ if let Some(id) = monitor_event_source {
9687+ self.chain_monitor.ack_monitor_event(id);
9688+ }
9689+ }
9690+ (None, None)
9691+ } else if let Some(event) =
9692+ make_payment_forwarded_event(inbound_htlc_value_msat)
9693+ {
9694+ let preimage_update_action =
9695+ MonitorUpdateCompletionAction::EmitForwardEvent {
9696+ event,
9697+ post_event_ackable_monitor_event: inbound_edge_closed
9698+ .then_some(monitor_event_source)
9699+ .flatten(),
9700+ };
9701+ (Some(preimage_update_action), None)
9702+ } else if inbound_edge_closed {
9703+ let preimage_update_action = monitor_event_source.map(|src| {
9704+ MonitorUpdateCompletionAction::AckMonitorEvents { event_ids: vec![src] }
9705+ });
9706+ (preimage_update_action, None)
9707+ } else {
9708+ (None, None)
9709+ }
9710+ } else if definitely_duplicate && startup_replay {
96629711 // On startup we may get redundant claims which are related to
96639712 // monitor updates still in flight. In that case, we shouldn't
96649713 // immediately free, but instead let that monitor update complete
@@ -9991,6 +10040,7 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
999110040 let (action_opt, raa_blocker_opt) = completion_action(ClaimCompletionActionParams {
999210041 definitely_duplicate: false,
999310042 inbound_htlc_value_msat: None,
10043+ inbound_edge_closed: true,
999410044 });
999510045
999610046 if let Some(raa_blocker) = raa_blocker_opt {
@@ -12712,23 +12762,32 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1271212762 chan.update_fulfill_htlc(&msg),
1271312763 chan_entry
1271412764 );
12715- let prev_hops = match &res.0 {
12716- HTLCSource::PreviousHopData(prev_hop) => vec![prev_hop],
12717- HTLCSource::TrampolineForward { previous_hop_data, .. } => {
12718- previous_hop_data.iter().collect()
12719- },
12720- _ => vec![],
12721- };
12722- let logger = WithChannelContext::from(&self.logger, &chan.context, None);
12723- for prev_hop in prev_hops {
12724- log_trace!(logger,
12725- "Holding the next revoke_and_ack until the preimage is durably persisted in the inbound edge's ChannelMonitor",
12726- );
12727- peer_state
12728- .actions_blocking_raa_monitor_updates
12729- .entry(msg.channel_id)
12730- .or_insert_with(Vec::new)
12731- .push(RAAMonitorUpdateBlockingAction::from_prev_hop_data(prev_hop));
12765+ // If persistent_monitor_events is enabled, we don't need to block preimage-removing
12766+ // monitor updates because we'll get the preimage from monitor events (that are
12767+ // guaranteed to be re-provided until they are explicitly acked) rather than from
12768+ // polling the monitor's internal state.
12769+ if !self.persistent_monitor_events {
12770+ let prev_hops = match &res.0 {
12771+ HTLCSource::PreviousHopData(prev_hop) => vec![prev_hop],
12772+ HTLCSource::TrampolineForward { previous_hop_data, .. } => {
12773+ previous_hop_data.iter().collect()
12774+ },
12775+ _ => vec![],
12776+ };
12777+ let logger =
12778+ WithChannelContext::from(&self.logger, &chan.context, None);
12779+ for prev_hop in prev_hops {
12780+ log_trace!(logger,
12781+ "Holding the next revoke_and_ack until the preimage is durably persisted in the inbound edge's ChannelMonitor",
12782+ );
12783+ peer_state
12784+ .actions_blocking_raa_monitor_updates
12785+ .entry(msg.channel_id)
12786+ .or_insert_with(Vec::new)
12787+ .push(RAAMonitorUpdateBlockingAction::from_prev_hop_data(
12788+ prev_hop,
12789+ ));
12790+ }
1273212791 }
1273312792
1273412793 // Note that we do not need to push an `actions_blocking_raa_monitor_updates`
@@ -13730,29 +13789,22 @@ This indicates a bug inside LDK. Please report this error at https://github.com/
1373013789 .channel_by_id
1373113790 .contains_key(&channel_id)
1373213791 });
13733- let we_are_sender =
13734- matches!(htlc_update.source, HTLCSource::OutboundRoute { .. });
13735- if from_onchain | we_are_sender {
13736- // Claim the funds from the previous hop, if there is one. In the future we can
13737- // store attribution data in the `ChannelMonitor` and provide it here.
13738- self.claim_funds_internal(
13739- htlc_update.source,
13740- preimage,
13741- htlc_update.htlc_value_msat,
13742- None,
13743- from_onchain,
13744- counterparty_node_id,
13745- funding_outpoint,
13746- channel_id,
13747- htlc_update.user_channel_id,
13748- None,
13749- None,
13750- Some(event_id),
13751- );
13752- }
13753- if !we_are_sender {
13754- self.chain_monitor.ack_monitor_event(monitor_event_source);
13755- }
13792+ // Claim the funds from the previous hop, if there is one. In the future we can
13793+ // store attribution data in the `ChannelMonitor` and provide it here.
13794+ self.claim_funds_internal(
13795+ htlc_update.source,
13796+ preimage,
13797+ htlc_update.htlc_value_msat,
13798+ None,
13799+ from_onchain,
13800+ counterparty_node_id,
13801+ funding_outpoint,
13802+ channel_id,
13803+ htlc_update.user_channel_id,
13804+ None,
13805+ None,
13806+ Some(event_id),
13807+ );
1375613808 } else {
1375713809 log_trace!(logger, "Failing HTLC from our monitor");
1375813810 let failure_reason = LocalHTLCFailureReason::OnChainTimeout;
@@ -20679,6 +20731,12 @@ impl<
2067920731 downstream_user_channel_id,
2068020732 ) in pending_claims_to_replay
2068120733 {
20734+ // If persistent_monitor_events is enabled, we don't need to explicitly reclaim HTLCs on
20735+ // startup because we can just wait for the relevant MonitorEvents to be re-provided to us
20736+ // during runtime.
20737+ if channel_manager.persistent_monitor_events {
20738+ continue;
20739+ }
2068220740 // We use `downstream_closed` in place of `from_onchain` here just as a guess - we
2068320741 // don't remember in the `ChannelMonitor` where we got a preimage from, but if the
2068420742 // channel is closed we just assume that it probably came from an on-chain claim.
0 commit comments