lightning_signer/
monitor.rs

1use alloc::collections::BTreeSet as Set;
2
3use bitcoin::absolute::LockTime;
4use bitcoin::blockdata::block::Header as BlockHeader;
5use bitcoin::secp256k1::Secp256k1;
6use bitcoin::{BlockHash, OutPoint, Transaction, TxIn, TxOut, Txid};
7use log::*;
8use push_decoder::Listener as _;
9use serde_derive::{Deserialize, Serialize};
10use serde_with::serde_as;
11
12use crate::chain::tracker::ChainListener;
13use crate::channel::ChannelId;
14use crate::policy::validator::ChainState;
15use crate::prelude::*;
16use crate::util::transaction_utils::{decode_commitment_number, decode_commitment_tx};
17use crate::{Arc, CommitmentPointProvider};
18
19// the depth at which we consider a channel to be done
20const MIN_DEPTH: u32 = 100;
21
22// the maximum depth we will watch for HTLC sweeps on closed channels
23const MAX_CLOSING_DEPTH: u32 = 2016;
24
25// Keep track of closing transaction outpoints.
26// These include the to-us output (if it exists) and all HTLC outputs.
27// For each output, we keep track of whether it has been spent yet.
28#[derive(Clone, Debug, Serialize, Deserialize)]
29struct ClosingOutpoints {
30    txid: Txid,
31    our_output: Option<(u32, bool)>,
32    htlc_outputs: Vec<u32>,
33    htlc_spents: Vec<bool>,
34}
35
36impl ClosingOutpoints {
37    // construct a new ClosingOutpoints with all spent flags false
38    fn new(txid: Txid, our_output_index: Option<u32>, htlc_output_indexes: Vec<u32>) -> Self {
39        let v = vec![false; htlc_output_indexes.len()];
40        ClosingOutpoints {
41            txid,
42            our_output: our_output_index.map(|i| (i, false)),
43            htlc_outputs: htlc_output_indexes,
44            htlc_spents: v,
45        }
46    }
47
48    // does this closing tx's to-us output match this outpoint?
49    fn includes_our_output(&self, outpoint: &OutPoint) -> bool {
50        self.txid == outpoint.txid && self.our_output.map(|(i, _)| i) == Some(outpoint.vout)
51    }
52
53    // does this closing tx include an HTLC outpoint that matches?
54    fn includes_htlc_output(&self, outpoint: &OutPoint) -> bool {
55        self.txid == outpoint.txid && self.htlc_outputs.contains(&(outpoint.vout))
56    }
57
58    fn set_our_output_spent(&mut self, vout: u32, spent: bool) {
59        // safe due to PushListener logic
60        let p = self.our_output.as_mut().unwrap();
61        assert_eq!(p.0, vout);
62        p.1 = spent;
63    }
64
65    fn set_htlc_output_spent(&mut self, vout: u32, spent: bool) {
66        // safe due to PushListener logic
67        let i = self.htlc_outputs.iter().position(|&x| x == vout).unwrap();
68        self.htlc_spents[i] = spent;
69    }
70
71    // are all outputs spent?
72    fn is_all_spent(&self) -> bool {
73        self.our_output.as_ref().map(|(_, b)| *b).unwrap_or(true)
74            && self.htlc_spents.iter().all(|b| *b)
75    }
76}
77
78/// State
79#[serde_as]
80#[derive(Clone, Debug, Serialize, Deserialize)]
81pub struct State {
82    // Chain height
83    height: u32,
84    // funding txids
85    funding_txids: Vec<Txid>,
86    // the funding output index for each funding tx
87    funding_vouts: Vec<u32>,
88    // inputs derived from funding_txs for convenience
89    funding_inputs: Set<OutPoint>,
90    // The height where the funding transaction was confirmed
91    funding_height: Option<u32>,
92    // The actual funding outpoint on-chain
93    funding_outpoint: Option<OutPoint>,
94    // The height of a transaction that double-spends a funding input
95    funding_double_spent_height: Option<u32>,
96    // The height of a mutual-close transaction
97    mutual_closing_height: Option<u32>,
98    // The height of a unilateral-close transaction
99    unilateral_closing_height: Option<u32>,
100    // Unilateral closing transaction outpoints to watch
101    closing_outpoints: Option<ClosingOutpoints>,
102    // Unilateral closing transaction swept height
103    closing_swept_height: Option<u32>,
104    // Our commitment transaction output swept height
105    our_output_swept_height: Option<u32>,
106    // Whether we saw a block yet - used for sanity check
107    #[serde(default)]
108    saw_block: bool,
109    // Whether the node has forgotten this channel
110    #[serde(default)]
111    saw_forget_channel: bool,
112    // The associated channel_id for logging and debugging.
113    // Not persisted, but explicitly populated by new_from_persistence
114    #[serde(skip)]
115    channel_id: Option<ChannelId>,
116}
117
118// A push decoder listener.
119// We need this temporary struct so that the commitment point provider
120// is easily accessible during push event handling.
121struct PushListener<'a> {
122    commitment_point_provider: &'a dyn CommitmentPointProvider,
123    decode_state: &'a mut BlockDecodeState,
124    saw_block: bool,
125}
126
127// A state change detected in a block, to be applied to the monitor `State`.
128#[derive(Clone, Debug, Serialize, Deserialize)]
129enum StateChange {
130    // A funding transaction was confirmed.  The funding outpoint is provided.
131    FundingConfirmed(OutPoint),
132    // A funding input was spent, either by the actual funding transaction
133    // or by a double-spend.  The output is provided.
134    FundingInputSpent(OutPoint),
135    // A unilateral closing transaction was confirmed.
136    // The funding outpoint, our output index and HTLC output indexes are provided
137    UnilateralCloseConfirmed(Txid, OutPoint, Option<u32>, Vec<u32>),
138    // A mutual close transaction was confirmed.
139    MutualCloseConfirmed(Txid, OutPoint),
140    /// Our commitment output was spent
141    OurOutputSpent(u32),
142    /// An HTLC commitment output was spent
143    HTLCOutputSpent(u32),
144}
145
146// Keep track of the state of a block push-decoder parse state
147#[derive(Clone, Debug)]
148struct BlockDecodeState {
149    // The changes detected in the current block
150    changes: Vec<StateChange>,
151    // The version of the current transaction
152    version: i32,
153    // The input number in the current transaction
154    input_num: u32,
155    // The output number in the current transaction
156    output_num: u32,
157    // The closing transaction, if we detect one
158    closing_tx: Option<Transaction>,
159    // The block hash
160    block_hash: Option<BlockHash>,
161    // A temporary copy of the current state, for keeping track
162    // of state changes intra-block, without changing the actual state
163    state: State,
164}
165
166impl BlockDecodeState {
167    fn new(state: &State) -> Self {
168        BlockDecodeState {
169            changes: Vec::new(),
170            version: 0,
171            input_num: 0,
172            output_num: 0,
173            closing_tx: None,
174            block_hash: None,
175            state: state.clone(),
176        }
177    }
178
179    fn new_with_block_hash(state: &State, block_hash: &BlockHash) -> Self {
180        BlockDecodeState {
181            changes: Vec::new(),
182            version: 0,
183            input_num: 0,
184            output_num: 0,
185            closing_tx: None,
186            block_hash: Some(*block_hash),
187            state: state.clone(),
188        }
189    }
190
191    // Add a state change for the current block.
192    // This also updates the temporary monitor state, so that intra-block
193    // processing can be done.  For example, this is needed if a closing transaction
194    // is confirmed, and then swept in the same block.
195    fn add_change(&mut self, change: StateChange) {
196        self.changes.push(change.clone());
197        let mut adds = Vec::new();
198        let mut removes = Vec::new();
199        self.state.apply_forward_change(&mut adds, &mut removes, change);
200    }
201}
202
203const MAX_COMMITMENT_OUTPUTS: u32 = 600;
204
205impl<'a> PushListener<'a> {
206    // Check if we ever saw the beginning of a block.  If not, we might get
207    // a partial set of push events from a block right after we got created,
208    // which we must ignore.
209    fn is_not_ready_for_push(&self) -> bool {
210        if self.saw_block {
211            // if we ever saw a block, then we must have seen the block start
212            // for the current block
213            assert!(self.decode_state.block_hash.is_some(), "saw block but no decode state");
214            false
215        } else {
216            // if we never saw a block, then we must not have seen the block start
217            assert!(
218                self.decode_state.block_hash.is_none(),
219                "never saw a block but decode state is present"
220            );
221            true
222        }
223    }
224}
225
226impl<'a> push_decoder::Listener for PushListener<'a> {
227    fn on_block_start(&mut self, header: &BlockHeader) {
228        // we shouldn't get more than one block start per decode state lifetime
229        // (which is the lifetime of a block stream)
230        assert!(self.decode_state.block_hash.is_none(), "saw more than one on_block_start");
231        self.decode_state.block_hash = Some(header.block_hash());
232        self.saw_block = true;
233    }
234
235    fn on_transaction_start(&mut self, version: i32) {
236        if self.is_not_ready_for_push() {
237            return;
238        }
239        let state = &mut self.decode_state;
240        state.version = version;
241        state.input_num = 0;
242        state.output_num = 0;
243        state.closing_tx = None;
244    }
245
246    fn on_transaction_input(&mut self, input: &TxIn) {
247        if self.is_not_ready_for_push() {
248            return;
249        }
250
251        let decode_state = &mut self.decode_state;
252
253        if decode_state.state.funding_inputs.contains(&input.previous_output) {
254            // A funding input was spent
255            decode_state.add_change(StateChange::FundingInputSpent(input.previous_output));
256        }
257
258        if Some(input.previous_output) == decode_state.state.funding_outpoint {
259            // The funding outpoint was spent - this is a closing transaction.
260            // Starting gathering it.  It will be processed in on_transaction_end.
261            // It may be either mutual or unilateral.
262            let tx = Transaction {
263                version: decode_state.version,
264                lock_time: LockTime::ZERO,
265                input: vec![input.clone()],
266                output: vec![],
267            };
268            decode_state.closing_tx = Some(tx);
269        }
270
271        // Check if an output of a unilateral closing transaction was spent.
272        // split into two blocks for borrow checker
273        let closing_change = if let Some(ref c) = decode_state.state.closing_outpoints {
274            if c.includes_our_output(&input.previous_output) {
275                // We spent our output of a closing transaction
276                Some(StateChange::OurOutputSpent(input.previous_output.vout))
277            } else if c.includes_htlc_output(&input.previous_output) {
278                // We spent an HTLC output of a closing transaction
279                Some(StateChange::HTLCOutputSpent(input.previous_output.vout))
280            } else {
281                None
282            }
283        } else {
284            None
285        };
286
287        closing_change.map(|c| decode_state.add_change(c));
288
289        if decode_state.closing_tx.is_some() {
290            assert_eq!(decode_state.input_num, 0, "closing tx must have only one input");
291        }
292        decode_state.input_num += 1;
293    }
294
295    fn on_transaction_output(&mut self, output: &TxOut) {
296        if self.is_not_ready_for_push() {
297            return;
298        }
299
300        let decode_state = &mut self.decode_state;
301        if let Some(closing_tx) = &mut decode_state.closing_tx {
302            closing_tx.output.push(output.clone());
303            assert!(
304                decode_state.output_num < MAX_COMMITMENT_OUTPUTS,
305                "more than {} commitment outputs",
306                MAX_COMMITMENT_OUTPUTS
307            );
308        }
309
310        decode_state.output_num += 1;
311    }
312
313    fn on_transaction_end(&mut self, lock_time: LockTime, txid: Txid) {
314        if self.is_not_ready_for_push() {
315            return;
316        }
317
318        let decode_state = &mut self.decode_state;
319
320        if let Some(ind) = decode_state.state.funding_txids.iter().position(|i| *i == txid) {
321            let vout = decode_state.state.funding_vouts[ind];
322            // This was a funding transaction, which just confirmed
323            assert!(
324                vout < decode_state.output_num,
325                "tx {} doesn't have funding output index {}",
326                txid,
327                vout
328            );
329            let outpoint = OutPoint { txid: txid.clone(), vout };
330            decode_state.add_change(StateChange::FundingConfirmed(outpoint));
331        }
332
333        // complete handling of closing tx, if this was one
334        if let Some(mut closing_tx) = decode_state.closing_tx.take() {
335            closing_tx.lock_time = lock_time;
336            // closing tx
337            assert_eq!(closing_tx.input.len(), 1);
338            let provider = self.commitment_point_provider;
339            let parameters = provider.get_transaction_parameters();
340
341            // check that the closing tx is a commitment tx, otherwise it was a mutual close
342            let commitment_number_opt = decode_commitment_number(&closing_tx, &parameters);
343            if let Some(commitment_number) = commitment_number_opt {
344                let secp_ctx = Secp256k1::new();
345                info!("unilateral close {} at commitment {} confirmed", txid, commitment_number);
346                let holder_per_commitment = provider.get_holder_commitment_point(commitment_number);
347                let cp_per_commitment =
348                    provider.get_counterparty_commitment_point(commitment_number);
349                let (our_output_index, htlc_indices) = decode_commitment_tx(
350                    &closing_tx,
351                    &holder_per_commitment,
352                    &cp_per_commitment,
353                    &parameters,
354                    &secp_ctx,
355                );
356                info!("our_output_index: {:?}, htlc_indices: {:?}", our_output_index, htlc_indices);
357                decode_state.add_change(StateChange::UnilateralCloseConfirmed(
358                    txid,
359                    closing_tx.input[0].previous_output,
360                    our_output_index,
361                    htlc_indices,
362                ));
363            } else {
364                decode_state.add_change(StateChange::MutualCloseConfirmed(
365                    txid,
366                    closing_tx.input[0].previous_output,
367                ));
368                info!("mutual close {} confirmed", txid);
369            }
370        }
371    }
372
373    fn on_block_end(&mut self) {
374        // we need to wait until we get the following `AddBlock` or `RemoveBlock`
375        // message before actually updating ourselves
376    }
377}
378
379impl State {
380    fn channel_id(&self) -> &ChannelId {
381        // safe because populated by new_from_persistence
382        self.channel_id.as_ref().expect("missing associated channel_id in monitor::State")
383    }
384
385    fn depth_of(&self, other_height: Option<u32>) -> u32 {
386        (self.height + 1).saturating_sub(other_height.unwrap_or(self.height + 1))
387    }
388
389    fn deep_enough_and_saw_node_forget(&self, other_height: Option<u32>, limit: u32) -> bool {
390        // If the event depth is less than MIN_DEPTH we never prune.
391        // If the event depth is greater we prune if saw_forget_channel is true.
392        let depth = self.depth_of(other_height);
393        if depth < limit {
394            // Not deep enough, we aren't done
395            false
396        } else if self.saw_forget_channel {
397            // Deep enough and the node thinks it's done too
398            true
399        } else {
400            // Deep enough, but we haven't heard from the node
401            warn!(
402                "expected forget_channel for {} overdue by {} blocks",
403                self.channel_id(),
404                depth - limit
405            );
406            false
407        }
408    }
409
410    fn diagnostic(&self, is_closed: bool) -> String {
411        if self.funding_height.is_none() {
412            format!("UNCOMFIRMED hold till funding doublespent + {}", MIN_DEPTH)
413        } else if let Some(height) = self.funding_double_spent_height {
414            format!("AGING_FUNDING_DOUBLESPENT at {} until {}", height, height + MIN_DEPTH)
415        } else if let Some(height) = self.mutual_closing_height {
416            format!("AGING_MUTUALLY_CLOSED at {} until {}", height, height + MIN_DEPTH)
417        } else if let Some(height) = self.closing_swept_height {
418            format!("AGING_CLOSING_SWEPT at {} until {}", height, height + MIN_DEPTH)
419        } else if let Some(height) = self.our_output_swept_height {
420            format!("AGING_OUR_OUTPUT_SWEPT at {} until {}", height, height + MAX_CLOSING_DEPTH)
421        } else if is_closed {
422            "CLOSING".into()
423        } else {
424            "ACTIVE".into()
425        }
426    }
427
428    fn is_done(&self) -> bool {
429        // we are done if:
430        // - funding was double spent
431        // - mutual closed
432        // - unilateral closed, and our output, as well as all HTLCs were swept
433        // and, the last confirmation is buried
434        //
435        // TODO(472) check 2nd level HTLCs
436        // TODO(472) disregard received HTLCs that we can't claim (we don't have the preimage)
437
438        if self.deep_enough_and_saw_node_forget(self.funding_double_spent_height, MIN_DEPTH) {
439            debug!(
440                "{} is_done because funding double spent {} blocks ago",
441                self.channel_id(),
442                MIN_DEPTH
443            );
444            return true;
445        }
446
447        if self.deep_enough_and_saw_node_forget(self.mutual_closing_height, MIN_DEPTH) {
448            debug!("{} is_done because mutual closed {} blocks ago", self.channel_id(), MIN_DEPTH);
449            return true;
450        }
451
452        if self.deep_enough_and_saw_node_forget(self.closing_swept_height, MIN_DEPTH) {
453            debug!("{} is_done because closing swept {} blocks ago", self.channel_id(), MIN_DEPTH);
454            return true;
455        }
456
457        // since we don't yet have the logic to tell which HTLCs we can claim,
458        // time out watching them after MAX_CLOSING_DEPTH
459        if self.deep_enough_and_saw_node_forget(self.our_output_swept_height, MAX_CLOSING_DEPTH) {
460            debug!(
461                "{} is_done because closing output swept {} blocks ago",
462                self.channel_id(),
463                MAX_CLOSING_DEPTH
464            );
465            return true;
466        }
467
468        return false;
469    }
470
471    fn on_add_block_end(
472        &mut self,
473        block_hash: &BlockHash,
474        decode_state: &mut BlockDecodeState,
475    ) -> (Vec<OutPoint>, Vec<OutPoint>) {
476        assert_eq!(decode_state.block_hash.as_ref(), Some(block_hash));
477
478        self.saw_block = true;
479        self.height += 1;
480
481        let closing_was_swept = self.is_closing_swept();
482        let our_output_was_swept = self.is_our_output_swept();
483
484        let mut adds = Vec::new();
485        let mut removes = Vec::new();
486
487        let changed = !decode_state.changes.is_empty();
488
489        if changed {
490            debug!(
491                "{} detected add-changes at height {}: {:?}",
492                self.channel_id(),
493                self.height,
494                decode_state.changes
495            );
496        }
497
498        // apply changes
499        for change in decode_state.changes.drain(..) {
500            self.apply_forward_change(&mut adds, &mut removes, change);
501        }
502
503        let closing_is_swept = self.is_closing_swept();
504        let our_output_is_swept = self.is_our_output_swept();
505
506        if !closing_was_swept && closing_is_swept {
507            info!("{} closing tx was swept at height {}", self.channel_id(), self.height);
508            self.closing_swept_height = Some(self.height);
509        }
510
511        if !our_output_was_swept && our_output_is_swept {
512            info!("{} our output was swept at height {}", self.channel_id(), self.height);
513            self.our_output_swept_height = Some(self.height);
514        }
515
516        if self.is_done() {
517            info!("{} done at height {}", self.channel_id(), self.height);
518        }
519
520        if changed {
521            #[cfg(not(feature = "log_pretty_print"))]
522            info!("on_add_block_end state changed: {:?}", self);
523            #[cfg(feature = "log_pretty_print")]
524            info!("on_add_block_end state changed: {:#?}", self);
525        }
526
527        (adds, removes)
528    }
529
530    fn on_remove_block_end(
531        &mut self,
532        block_hash: &BlockHash,
533        decode_state: &mut BlockDecodeState,
534    ) -> (Vec<OutPoint>, Vec<OutPoint>) {
535        assert_eq!(decode_state.block_hash.as_ref(), Some(block_hash));
536
537        let closing_was_swept = self.is_closing_swept();
538        let our_output_was_swept = self.is_our_output_swept();
539
540        let mut adds = Vec::new();
541        let mut removes = Vec::new();
542
543        let changed = !decode_state.changes.is_empty();
544
545        if changed {
546            debug!(
547                "{} detected remove-changes at height {}: {:?}",
548                self.channel_id(),
549                self.height,
550                decode_state.changes
551            );
552        }
553
554        for change in decode_state.changes.drain(..) {
555            self.apply_backward_change(&mut adds, &mut removes, change);
556        }
557
558        let closing_is_swept = self.is_closing_swept();
559        let our_output_is_swept = self.is_our_output_swept();
560
561        if closing_was_swept && !closing_is_swept {
562            info!("{} closing tx was un-swept at height {}", self.channel_id(), self.height);
563            self.closing_swept_height = None;
564        }
565
566        if our_output_was_swept && !our_output_is_swept {
567            info!("{} our output was un-swept at height {}", self.channel_id(), self.height);
568            self.our_output_swept_height = None;
569        }
570
571        self.height -= 1;
572
573        if changed {
574            #[cfg(not(feature = "log_pretty_print"))]
575            info!("on_remove_block_end state changed: {:?}", self);
576            #[cfg(feature = "log_pretty_print")]
577            info!("on_remove_block_end state changed: {:#?}", self);
578        }
579
580        // note that the caller will remove the adds and add the removes
581        (adds, removes)
582    }
583
584    // whether the unilateral closing tx was fully swept
585    fn is_closing_swept(&self) -> bool {
586        self.closing_outpoints.as_ref().map(|o| o.is_all_spent()).unwrap_or(false)
587    }
588
589    // whether our output was swept, or does not exist
590    fn is_our_output_swept(&self) -> bool {
591        self.closing_outpoints
592            .as_ref()
593            .map(|o| o.our_output.map(|(_, s)| s).unwrap_or(true))
594            .unwrap_or(false)
595    }
596
597    fn apply_forward_change(
598        &mut self,
599        adds: &mut Vec<OutPoint>,
600        removes: &mut Vec<OutPoint>,
601        change: StateChange,
602    ) {
603        // unwraps below on self.closing_outpoints are safe due to PushListener logic
604        match change {
605            StateChange::FundingConfirmed(outpoint) => {
606                self.funding_height = Some(self.height);
607                self.funding_outpoint = Some(outpoint);
608                // we may have thought we had a double-spend, but now we know we don't
609                self.funding_double_spent_height = None;
610                adds.push(outpoint);
611            }
612            StateChange::FundingInputSpent(outpoint) => {
613                // A funding input was double-spent, or funding was confirmed
614                // (in which case we'll see FundingConfirmed later on in this
615                // change list).
616                // we may have seen some other funding input double-spent, so
617                // don't overwrite the depth if it exists
618                self.funding_double_spent_height.get_or_insert(self.height);
619                // no matter whether funding, or double-spend, we want to stop watching this outpoint
620                removes.push(outpoint);
621            }
622            StateChange::UnilateralCloseConfirmed(
623                txid,
624                funding_outpoint,
625                our_output_index,
626                htlcs_indices,
627            ) => {
628                self.unilateral_closing_height = Some(self.height);
629                removes.push(funding_outpoint);
630                our_output_index.map(|i| adds.push(OutPoint { txid: txid.clone(), vout: i }));
631                for i in htlcs_indices.iter() {
632                    adds.push(OutPoint { txid: txid.clone(), vout: *i });
633                }
634                self.closing_outpoints =
635                    Some(ClosingOutpoints::new(txid, our_output_index, htlcs_indices));
636            }
637            StateChange::OurOutputSpent(vout) => {
638                let outpoints = self.closing_outpoints.as_mut().unwrap();
639                outpoints.set_our_output_spent(vout, true);
640                let outpoint = OutPoint { txid: outpoints.txid, vout };
641                removes.push(outpoint);
642            }
643            StateChange::HTLCOutputSpent(vout) => {
644                let outpoints = self.closing_outpoints.as_mut().unwrap();
645                outpoints.set_htlc_output_spent(vout, true);
646                let outpoint = OutPoint { txid: outpoints.txid, vout };
647                removes.push(outpoint);
648            }
649            StateChange::MutualCloseConfirmed(_txid, funding_outpoint) => {
650                self.mutual_closing_height = Some(self.height);
651                removes.push(funding_outpoint);
652            }
653        }
654    }
655
656    // Note that in the logic below, we are mimicking the logic of
657    // apply_forward_change, but the caller will remove the adds and add the
658    // removes.
659    fn apply_backward_change(
660        &mut self,
661        adds: &mut Vec<OutPoint>,
662        removes: &mut Vec<OutPoint>,
663        change: StateChange,
664    ) {
665        match change {
666            StateChange::FundingConfirmed(outpoint) => {
667                // A funding tx was reorged-out
668                assert_eq!(self.funding_height, Some(self.height));
669                self.funding_height = None;
670                self.funding_outpoint = None;
671                adds.push(outpoint);
672            }
673            StateChange::FundingInputSpent(outpoint) => {
674                // A funding double-spent was reorged-out, or funding confirmation
675                // was reorged-out (in which case we'll see FundingConfirmed later
676                // on in this change list).
677                // We may have seen some other funding input double-spent, so
678                // clear out the height only if it is the current height.
679                if self.funding_double_spent_height == Some(self.height) {
680                    self.funding_double_spent_height = None
681                }
682                // no matter whether funding, or double-spend, we want to re-start watching this outpoint
683                removes.push(outpoint);
684            }
685            StateChange::UnilateralCloseConfirmed(
686                txid,
687                funding_outpoint,
688                our_output_index,
689                htlcs_indices,
690            ) => {
691                // A closing tx was reorged-out
692                assert_eq!(self.unilateral_closing_height, Some(self.height));
693                self.unilateral_closing_height = None;
694                self.closing_outpoints = None;
695                our_output_index.map(|i| adds.push(OutPoint { txid: txid.clone(), vout: i }));
696                for i in htlcs_indices {
697                    adds.push(OutPoint { txid: txid.clone(), vout: i });
698                }
699                removes.push(funding_outpoint)
700            }
701            StateChange::OurOutputSpent(vout) => {
702                let outpoints = self.closing_outpoints.as_mut().unwrap();
703                outpoints.set_our_output_spent(vout, false);
704                let outpoint = OutPoint { txid: outpoints.txid, vout };
705                removes.push(outpoint);
706            }
707            StateChange::HTLCOutputSpent(vout) => {
708                let outpoints = self.closing_outpoints.as_mut().unwrap();
709                outpoints.set_htlc_output_spent(vout, false);
710                let outpoint = OutPoint { txid: outpoints.txid, vout };
711                removes.push(outpoint);
712            }
713            StateChange::MutualCloseConfirmed(_txid, funding_outpoint) => {
714                self.mutual_closing_height = None;
715                removes.push(funding_outpoint);
716            }
717        }
718    }
719}
720
721/// This is a pre-cursor to [`ChainMonitor`], before the [`CommitmentPointProvider`] is available.
722#[derive(Clone)]
723pub struct ChainMonitorBase {
724    // the first funding outpoint, used to identify the channel / channel monitor
725    pub(crate) funding_outpoint: OutPoint,
726    // the monitor state
727    state: Arc<Mutex<State>>,
728}
729
730impl ChainMonitorBase {
731    /// Create a new chain monitor.
732    /// Use add_funding to really start monitoring.
733    pub fn new(funding_outpoint: OutPoint, height: u32, chan_id: &ChannelId) -> Self {
734        let state = State {
735            height,
736            funding_txids: Vec::new(),
737            funding_vouts: Vec::new(),
738            funding_inputs: OrderedSet::new(),
739            funding_height: None,
740            funding_outpoint: None,
741            funding_double_spent_height: None,
742            mutual_closing_height: None,
743            unilateral_closing_height: None,
744            closing_outpoints: None,
745            closing_swept_height: None,
746            our_output_swept_height: None,
747            saw_block: false,
748            saw_forget_channel: false,
749            channel_id: Some(chan_id.clone()),
750        };
751
752        Self { funding_outpoint, state: Arc::new(Mutex::new(state)) }
753    }
754
755    /// recreate this monitor after restoring from persistence
756    pub fn new_from_persistence(
757        funding_outpoint: OutPoint,
758        state: State,
759        channel_id: &ChannelId,
760    ) -> Self {
761        let state = Arc::new(Mutex::new(state));
762        state.lock().unwrap().channel_id = Some(channel_id.clone());
763        Self { funding_outpoint, state }
764    }
765
766    /// Get the ChainMonitor
767    pub fn as_monitor(
768        &self,
769        commitment_point_provider: Box<dyn CommitmentPointProvider>,
770    ) -> ChainMonitor {
771        ChainMonitor {
772            funding_outpoint: self.funding_outpoint,
773            state: self.state.clone(),
774            decode_state: Arc::new(Mutex::new(None)),
775            commitment_point_provider,
776        }
777    }
778
779    /// Add a funding transaction to keep track of
780    /// For single-funding
781    pub fn add_funding_outpoint(&self, outpoint: &OutPoint) {
782        let mut state = self.get_state();
783        assert!(state.funding_txids.is_empty(), "only a single funding tx currently supported");
784        assert_eq!(state.funding_txids.len(), state.funding_vouts.len());
785        state.funding_txids.push(outpoint.txid);
786        state.funding_vouts.push(outpoint.vout);
787    }
788
789    /// Add a funding input
790    /// For single-funding
791    pub fn add_funding_inputs(&self, tx: &Transaction) {
792        let mut state = self.get_state();
793        state.funding_inputs.extend(tx.input.iter().map(|i| i.previous_output));
794    }
795
796    /// Convert to a ChainState, to be used for validation
797    pub fn as_chain_state(&self) -> ChainState {
798        let state = self.get_state();
799        ChainState {
800            current_height: state.height,
801            funding_depth: state.funding_height.map(|h| state.height + 1 - h).unwrap_or(0),
802            funding_double_spent_depth: state
803                .funding_double_spent_height
804                .map(|h| state.height + 1 - h)
805                .unwrap_or(0),
806            closing_depth: state
807                .mutual_closing_height
808                .or(state.unilateral_closing_height)
809                .map(|h| state.height + 1 - h)
810                .unwrap_or(0),
811        }
812    }
813
814    /// Whether this channel can be forgotten
815    pub fn is_done(&self) -> bool {
816        self.get_state().is_done()
817    }
818
819    /// Called when the node tells us it forgot the channel
820    pub fn forget_channel(&self) {
821        let mut state = self.get_state();
822        state.saw_forget_channel = true;
823    }
824
825    /// Returns the actual funding outpoint on-chain
826    pub fn funding_outpoint(&self) -> Option<OutPoint> {
827        self.get_state().funding_outpoint
828    }
829
830    /// Return whether forget_channel was seen
831    pub fn forget_seen(&self) -> bool {
832        self.get_state().saw_forget_channel
833    }
834
835    /// Return string describing the state
836    pub fn diagnostic(&self, is_closed: bool) -> String {
837        self.get_state().diagnostic(is_closed)
838    }
839
840    // Add this getter method
841    fn get_state(&self) -> MutexGuard<State> {
842        self.state.lock().expect("lock")
843    }
844}
845
846/// Keep track of channel on-chain events.
847/// Note that this object has refcounted state, so is lightweight to clone.
848#[derive(Clone)]
849pub struct ChainMonitor {
850    /// the first funding outpoint, used to identify the channel / channel monitor
851    pub funding_outpoint: OutPoint,
852    /// the monitor state
853    pub state: Arc<Mutex<State>>,
854    // Block decode state, only while in progress
855    // Lock order: after `self.state`
856    decode_state: Arc<Mutex<Option<BlockDecodeState>>>,
857    // the commitment point provider, helps with decoding transactions
858    commitment_point_provider: Box<dyn CommitmentPointProvider>,
859}
860
861impl ChainMonitor {
862    /// Get the base
863    pub fn as_base(&self) -> ChainMonitorBase {
864        ChainMonitorBase { funding_outpoint: self.funding_outpoint, state: self.state.clone() }
865    }
866
867    /// Get the locked state
868    pub fn get_state(&self) -> MutexGuard<State> {
869        self.state.lock().expect("lock")
870    }
871
872    /// Add a funding transaction to keep track of
873    /// For dual-funding
874    pub fn add_funding(&self, tx: &Transaction, vout: u32) {
875        let mut state = self.get_state();
876        assert!(state.funding_txids.is_empty(), "only a single funding tx currently supported");
877        assert_eq!(state.funding_txids.len(), state.funding_vouts.len());
878        state.funding_txids.push(tx.txid());
879        state.funding_vouts.push(vout);
880        state.funding_inputs.extend(tx.input.iter().map(|i| i.previous_output));
881    }
882
883    /// Returns the number of confirmations of the funding transaction, or zero
884    /// if it wasn't confirmed yet.
885    pub fn funding_depth(&self) -> u32 {
886        let state = self.get_state();
887        state.depth_of(state.funding_height)
888    }
889
890    /// Returns the number of confirmations of a double-spend of the funding transaction
891    /// or zero if it wasn't double-spent.
892    pub fn funding_double_spent_depth(&self) -> u32 {
893        let state = self.get_state();
894        state.depth_of(state.funding_double_spent_height)
895    }
896
897    /// Returns the number of confirmations of the closing transaction, or zero
898    pub fn closing_depth(&self) -> u32 {
899        let state = self.get_state();
900        let closing_height = state.unilateral_closing_height.or(state.mutual_closing_height);
901        state.depth_of(closing_height)
902    }
903
904    /// Whether this channel can be forgotten:
905    /// - mutual close is confirmed
906    /// - unilateral close is swept
907    /// - funding transaction is double-spent
908    /// and enough confirmations have passed
909    pub fn is_done(&self) -> bool {
910        self.get_state().is_done()
911    }
912
913    // push compact proof transactions through, simulating a streamed block
914    fn push_transactions(&self, block_hash: &BlockHash, txs: &[Transaction]) -> BlockDecodeState {
915        let mut state = self.get_state();
916
917        // we are synced if we see a compact proof
918        state.saw_block = true;
919
920        let mut decode_state = BlockDecodeState::new_with_block_hash(&*state, block_hash);
921
922        let mut listener = PushListener {
923            commitment_point_provider: &*self.commitment_point_provider,
924            decode_state: &mut decode_state,
925            saw_block: true,
926        };
927
928        // stream the transactions to the state
929        for tx in txs {
930            listener.on_transaction_start(tx.version);
931            for input in tx.input.iter() {
932                listener.on_transaction_input(input);
933            }
934
935            for output in tx.output.iter() {
936                listener.on_transaction_output(output);
937            }
938            listener.on_transaction_end(tx.lock_time, tx.txid());
939        }
940
941        decode_state
942    }
943}
944
945impl ChainListener for ChainMonitor {
946    type Key = OutPoint;
947
948    fn key(&self) -> &Self::Key {
949        &self.funding_outpoint
950    }
951
952    fn on_add_block(
953        &self,
954        txs: &[Transaction],
955        block_hash: &BlockHash,
956    ) -> (Vec<OutPoint>, Vec<OutPoint>) {
957        debug!("on_add_block for {}", self.funding_outpoint);
958        let mut decode_state = self.push_transactions(block_hash, txs);
959
960        let mut state = self.get_state();
961        state.on_add_block_end(block_hash, &mut decode_state)
962    }
963
964    fn on_add_streamed_block_end(&self, block_hash: &BlockHash) -> (Vec<OutPoint>, Vec<OutPoint>) {
965        let mut state = self.get_state();
966        let mut decode_state = self.decode_state.lock().expect("lock").take();
967        if !state.saw_block {
968            // not ready yet, bail
969            return (Vec::new(), Vec::new());
970        }
971        // safe because `on_push` must have been called first
972        state.on_add_block_end(block_hash, decode_state.as_mut().unwrap())
973    }
974
975    fn on_remove_block(
976        &self,
977        txs: &[Transaction],
978        block_hash: &BlockHash,
979    ) -> (Vec<OutPoint>, Vec<OutPoint>) {
980        debug!("on_remove_block for {}", self.funding_outpoint);
981        let mut decode_state = self.push_transactions(block_hash, txs);
982
983        let mut state = self.get_state();
984        state.on_remove_block_end(block_hash, &mut decode_state)
985    }
986
987    fn on_remove_streamed_block_end(
988        &self,
989        block_hash: &BlockHash,
990    ) -> (Vec<OutPoint>, Vec<OutPoint>) {
991        let mut state = self.get_state();
992        let mut decode_state = self.decode_state.lock().expect("lock").take();
993        if !state.saw_block {
994            // not ready yet, bail
995            return (Vec::new(), Vec::new());
996        }
997        // safe because `on_push` must have been called first
998        state.on_remove_block_end(block_hash, decode_state.as_mut().unwrap())
999    }
1000
1001    fn on_push<F>(&self, f: F)
1002    where
1003        F: FnOnce(&mut dyn push_decoder::Listener),
1004    {
1005        let mut state = self.get_state();
1006        let saw_block = state.saw_block;
1007
1008        let mut decode_state_lock = self.decode_state.lock().expect("lock");
1009
1010        let decode_state = decode_state_lock.get_or_insert_with(|| BlockDecodeState::new(&*state));
1011
1012        let mut listener = PushListener {
1013            commitment_point_provider: &*self.commitment_point_provider,
1014            decode_state,
1015            saw_block,
1016        };
1017        f(&mut listener);
1018
1019        // update the saw_block flag, in case the listener saw a block start event
1020        state.saw_block = listener.saw_block;
1021    }
1022}
1023
1024impl SendSync for ChainMonitor {}
1025
1026#[cfg(test)]
1027mod tests {
1028    use crate::channel::{
1029        ChannelBase, ChannelCommitmentPointProvider, ChannelId, ChannelSetup, CommitmentType,
1030    };
1031    use crate::node::Node;
1032    use crate::util::test_utils::key::{make_test_counterparty_points, make_test_pubkey};
1033    use crate::util::test_utils::*;
1034    use bitcoin::block::Version;
1035    use bitcoin::hash_types::TxMerkleNode;
1036    use bitcoin::hashes::Hash;
1037    use bitcoin::CompactTarget;
1038    use lightning::ln::chan_utils::HTLCOutputInCommitment;
1039    use lightning::ln::PaymentHash;
1040    use test_log::test;
1041
1042    use super::*;
1043
1044    #[test]
1045    fn test_funding() {
1046        let tx = make_tx(vec![make_txin(1), make_txin(2)]);
1047        let outpoint = OutPoint::new(tx.txid(), 0);
1048        let cpp = Box::new(DummyCommitmentPointProvider {});
1049        let chan_id = ChannelId::new(&[33u8; 32]);
1050        let monitor = ChainMonitorBase::new(outpoint, 0, &chan_id).as_monitor(cpp);
1051        let block_hash = BlockHash::all_zeros();
1052        monitor.add_funding(&tx, 0);
1053        monitor.on_add_block(&[], &block_hash);
1054        monitor.on_add_block(&[tx.clone()], &block_hash);
1055        assert_eq!(monitor.funding_depth(), 1);
1056        assert_eq!(monitor.funding_double_spent_depth(), 0);
1057        monitor.on_add_block(&[], &block_hash);
1058        assert_eq!(monitor.funding_depth(), 2);
1059        monitor.on_remove_block(&[], &block_hash);
1060        assert_eq!(monitor.funding_depth(), 1);
1061        monitor.on_remove_block(&[tx], &block_hash);
1062        assert_eq!(monitor.funding_depth(), 0);
1063        monitor.on_remove_block(&[], &block_hash);
1064        assert_eq!(monitor.funding_depth(), 0);
1065    }
1066
1067    #[test]
1068    fn test_funding_double_spent() {
1069        let tx = make_tx(vec![make_txin(1), make_txin(2)]);
1070        let tx2 = make_tx(vec![make_txin(2)]);
1071        let outpoint = OutPoint::new(tx.txid(), 0);
1072        let cpp = Box::new(DummyCommitmentPointProvider {});
1073        let chan_id = ChannelId::new(&[33u8; 32]);
1074        let monitor = ChainMonitorBase::new(outpoint, 0, &chan_id).as_monitor(cpp);
1075        let block_hash = BlockHash::all_zeros();
1076        monitor.add_funding(&tx, 0);
1077        monitor.on_add_block(&[], &block_hash);
1078        monitor.on_add_block(&[tx2.clone()], &block_hash);
1079        assert_eq!(monitor.funding_depth(), 0);
1080        assert_eq!(monitor.funding_double_spent_depth(), 1);
1081        monitor.on_add_block(&[], &block_hash);
1082        assert_eq!(monitor.funding_depth(), 0);
1083        assert_eq!(monitor.funding_double_spent_depth(), 2);
1084        monitor.on_remove_block(&[], &block_hash);
1085        assert_eq!(monitor.funding_double_spent_depth(), 1);
1086        monitor.on_remove_block(&[tx2], &block_hash);
1087        assert_eq!(monitor.funding_double_spent_depth(), 0);
1088        monitor.on_remove_block(&[], &block_hash);
1089        assert_eq!(monitor.funding_double_spent_depth(), 0);
1090    }
1091
1092    #[test]
1093    fn test_stream() {
1094        let outpoint = OutPoint::new(Txid::from_slice(&[1; 32]).unwrap(), 0);
1095        let cpp = Box::new(DummyCommitmentPointProvider {});
1096        let chan_id = ChannelId::new(&[33u8; 32]);
1097        let monitor = ChainMonitorBase::new(outpoint, 0, &chan_id).as_monitor(cpp);
1098        let header = BlockHeader {
1099            version: Version::from_consensus(0),
1100            prev_blockhash: BlockHash::all_zeros(),
1101            merkle_root: TxMerkleNode::all_zeros(),
1102            time: 0,
1103            bits: CompactTarget::from_consensus(0),
1104            nonce: 0,
1105        };
1106        let tx = make_tx(vec![make_txin(1), make_txin(2)]);
1107
1108        // test a push when not ready (simulates creation during a stream)
1109        monitor.on_push(|listener| {
1110            listener.on_transaction_input(&tx.input[1]);
1111            listener.on_transaction_output(&tx.output[0]);
1112            listener.on_transaction_end(tx.lock_time, tx.txid());
1113            listener.on_block_end();
1114        });
1115
1116        assert!(!monitor.state.lock().unwrap().saw_block);
1117
1118        // test a block push
1119        monitor.on_push(|listener| {
1120            listener.on_block_start(&header);
1121            listener.on_transaction_start(2);
1122            listener.on_transaction_input(&tx.input[0]);
1123            listener.on_transaction_input(&tx.input[1]);
1124            listener.on_transaction_output(&tx.output[0]);
1125            listener.on_transaction_end(tx.lock_time, tx.txid());
1126            listener.on_block_end();
1127        });
1128        monitor.on_add_streamed_block_end(&header.block_hash());
1129
1130        assert!(monitor.state.lock().unwrap().saw_block);
1131
1132        // test another block push to ensure the state is reset
1133        monitor.on_push(|listener| {
1134            listener.on_block_start(&header);
1135            listener.on_transaction_start(2);
1136            listener.on_transaction_input(&tx.input[0]);
1137            listener.on_transaction_input(&tx.input[1]);
1138            listener.on_transaction_output(&tx.output[0]);
1139            listener.on_transaction_end(tx.lock_time, tx.txid());
1140            listener.on_block_end();
1141        });
1142        monitor.on_add_streamed_block_end(&header.block_hash());
1143
1144        assert!(monitor.state.lock().unwrap().saw_block);
1145    }
1146
1147    #[test]
1148    fn test_mutual_close() {
1149        let block_hash = BlockHash::all_zeros();
1150        let (node, channel_id, monitor, funding_txid) = setup_funded_channel();
1151
1152        // channel should exist after a heartbeat
1153        node.get_heartbeat();
1154        assert!(node.get_channel(&channel_id).is_ok());
1155        assert_eq!(node.get_tracker().listeners.len(), 1);
1156
1157        let close_tx = make_tx(vec![TxIn {
1158            previous_output: OutPoint::new(funding_txid, 0),
1159            script_sig: Default::default(),
1160            sequence: Default::default(),
1161            witness: Default::default(),
1162        }]);
1163        monitor.on_add_block(&[close_tx.clone()], &block_hash);
1164        assert_eq!(monitor.closing_depth(), 1);
1165        assert!(!monitor.is_done());
1166
1167        // channel should exist after a heartbeat
1168        node.get_heartbeat();
1169        assert!(node.get_channel(&channel_id).is_ok());
1170        assert_eq!(node.get_tracker().listeners.len(), 1);
1171
1172        for _ in 1..MIN_DEPTH - 1 {
1173            monitor.on_add_block(&[], &block_hash);
1174        }
1175        assert!(!monitor.is_done());
1176        node.forget_channel(&channel_id).unwrap();
1177        monitor.on_add_block(&[], &block_hash);
1178        assert!(monitor.is_done());
1179
1180        // channel should still be there until the heartbeat
1181        assert!(node.get_channel(&channel_id).is_ok());
1182
1183        // channel should be pruned after a heartbeat
1184        node.get_heartbeat();
1185        assert!(node.get_channel(&channel_id).is_err());
1186        assert_eq!(node.get_tracker().listeners.len(), 0);
1187    }
1188
1189    #[test]
1190    fn test_mutual_close_with_forget_channel() {
1191        let block_hash = BlockHash::all_zeros();
1192        let (node, channel_id, monitor, funding_txid) = setup_funded_channel();
1193
1194        // channel should exist after a heartbeat
1195        node.get_heartbeat();
1196        assert!(node.get_channel(&channel_id).is_ok());
1197        assert_eq!(node.get_tracker().listeners.len(), 1);
1198
1199        let close_tx = make_tx(vec![TxIn {
1200            previous_output: OutPoint::new(funding_txid, 0),
1201            script_sig: Default::default(),
1202            sequence: Default::default(),
1203            witness: Default::default(),
1204        }]);
1205        monitor.on_add_block(&[close_tx.clone()], &block_hash);
1206        assert_eq!(monitor.closing_depth(), 1);
1207        assert!(!monitor.is_done());
1208
1209        // channel should exist after a heartbeat
1210        node.get_heartbeat();
1211        assert!(node.get_channel(&channel_id).is_ok());
1212        assert_eq!(node.get_tracker().listeners.len(), 1);
1213
1214        for _ in 1..MIN_DEPTH - 1 {
1215            monitor.on_add_block(&[], &block_hash);
1216        }
1217        assert!(!monitor.is_done());
1218        monitor.on_add_block(&[], &block_hash);
1219        assert!(!monitor.is_done());
1220
1221        // channel should still be there until the forget_channel
1222        assert!(node.get_channel(&channel_id).is_ok());
1223        node.forget_channel(&channel_id).unwrap();
1224
1225        // need a heartbeat to do the pruning
1226        assert!(node.get_channel(&channel_id).is_ok());
1227        node.get_heartbeat();
1228        assert!(node.get_channel(&channel_id).is_err());
1229        assert_eq!(node.get_tracker().listeners.len(), 0);
1230    }
1231
1232    #[test]
1233    fn test_mutual_close_with_missing_forget_channel() {
1234        let block_hash = BlockHash::all_zeros();
1235        let (node, channel_id, monitor, funding_txid) = setup_funded_channel();
1236
1237        // channel should exist after a heartbeat
1238        node.get_heartbeat();
1239        assert!(node.get_channel(&channel_id).is_ok());
1240        assert_eq!(node.get_tracker().listeners.len(), 1);
1241
1242        let close_tx = make_tx(vec![TxIn {
1243            previous_output: OutPoint::new(funding_txid, 0),
1244            script_sig: Default::default(),
1245            sequence: Default::default(),
1246            witness: Default::default(),
1247        }]);
1248        monitor.on_add_block(&[close_tx.clone()], &block_hash);
1249        assert_eq!(monitor.closing_depth(), 1);
1250        assert!(!monitor.is_done());
1251
1252        // channel should exist after a heartbeat
1253        node.get_heartbeat();
1254        assert!(node.get_channel(&channel_id).is_ok());
1255        assert_eq!(node.get_tracker().listeners.len(), 1);
1256
1257        for _ in 1..MIN_DEPTH - 1 {
1258            monitor.on_add_block(&[], &block_hash);
1259        }
1260        assert!(!monitor.is_done());
1261        monitor.on_add_block(&[], &block_hash);
1262
1263        // we're not done because no forget_channel seen
1264        assert!(!monitor.is_done());
1265        assert!(node.get_channel(&channel_id).is_ok());
1266
1267        // channel should still be there after heartbeat
1268        node.get_heartbeat();
1269        assert!(node.get_channel(&channel_id).is_ok());
1270
1271        // wait a long time
1272        for _ in 0..2016 - 1 {
1273            monitor.on_add_block(&[], &block_hash);
1274        }
1275        assert!(!monitor.is_done());
1276
1277        // we still don't forget the channel if the node hasn't said forget
1278        monitor.on_add_block(&[], &block_hash);
1279        assert!(!monitor.is_done());
1280
1281        // channel should still be there
1282        assert!(node.get_channel(&channel_id).is_ok());
1283
1284        // channel should not be pruned after a heartbeat
1285        node.get_heartbeat();
1286        assert!(node.get_channel(&channel_id).is_ok());
1287    }
1288
1289    #[test]
1290    fn test_unilateral_holder_close() {
1291        let block_hash = BlockHash::all_zeros();
1292        let (node, channel_id, monitor, _funding_txid) = setup_funded_channel();
1293
1294        let commit_num = 23;
1295        let feerate_per_kw = 1000;
1296        let to_holder = 100000;
1297        let to_cp = 200000;
1298        let htlcs = Vec::new();
1299        let closing_commitment_tx = node
1300            .with_channel(&channel_id, |chan| {
1301                chan.set_next_holder_commit_num_for_testing(commit_num);
1302                let per_commitment_point = chan.get_per_commitment_point(commit_num)?;
1303                let txkeys = chan.make_holder_tx_keys(&per_commitment_point);
1304
1305                Ok(chan.make_holder_commitment_tx(
1306                    commit_num,
1307                    &txkeys,
1308                    feerate_per_kw,
1309                    to_holder,
1310                    to_cp,
1311                    htlcs.clone(),
1312                ))
1313            })
1314            .expect("make_holder_commitment_tx failed");
1315        let closing_tx = closing_commitment_tx.trust().built_transaction().transaction.clone();
1316        let closing_txid = closing_tx.txid();
1317        let holder_output_index =
1318            closing_tx.output.iter().position(|out| out.value == to_holder).unwrap() as u32;
1319        monitor.on_add_block(&[closing_tx.clone()], &block_hash);
1320        assert_eq!(monitor.closing_depth(), 1);
1321        assert!(!monitor.is_done());
1322        // we never forget the channel if we didn't sweep our output
1323        for _ in 1..MAX_CLOSING_DEPTH {
1324            monitor.on_add_block(&[], &block_hash);
1325        }
1326        assert!(!monitor.is_done());
1327        let sweep_cp_tx = make_tx(vec![make_txin2(closing_txid, 1 - holder_output_index)]);
1328        monitor.on_add_block(&[sweep_cp_tx], &block_hash);
1329        // we still never forget the channel
1330        for _ in 1..MAX_CLOSING_DEPTH {
1331            monitor.on_add_block(&[], &block_hash);
1332        }
1333        assert!(!monitor.is_done());
1334        let sweep_holder_tx = make_tx(vec![make_txin2(closing_txid, holder_output_index)]);
1335        monitor.on_add_block(&[sweep_holder_tx], &block_hash);
1336        // once we sweep our output, we forget the channel
1337        for _ in 1..MIN_DEPTH {
1338            monitor.on_add_block(&[], &block_hash);
1339        }
1340        node.forget_channel(&channel_id).unwrap();
1341        assert!(monitor.is_done());
1342    }
1343
1344    #[test]
1345    fn test_unilateral_cp_and_htlcs_close() {
1346        let block_hash = BlockHash::all_zeros();
1347        let (node, channel_id, monitor, _funding_txid) = setup_funded_channel();
1348
1349        let commit_num = 23;
1350        let feerate_per_kw = 1000;
1351        let to_holder = 100000;
1352        let to_cp = 200000;
1353        let htlcs = vec![HTLCOutputInCommitment {
1354            offered: false,
1355            amount_msat: 10000,
1356            cltv_expiry: 0,
1357            payment_hash: PaymentHash([0; 32]),
1358            transaction_output_index: None,
1359        }];
1360        let closing_commitment_tx = node
1361            .with_channel(&channel_id, |chan| {
1362                let per_commitment_point = make_test_pubkey(12);
1363                chan.set_next_counterparty_commit_num_for_testing(
1364                    commit_num,
1365                    per_commitment_point.clone(),
1366                );
1367                Ok(chan.make_counterparty_commitment_tx(
1368                    &per_commitment_point,
1369                    commit_num,
1370                    feerate_per_kw,
1371                    to_holder,
1372                    to_cp,
1373                    htlcs.clone(),
1374                ))
1375            })
1376            .expect("make_holder_commitment_tx failed");
1377        let closing_tx = closing_commitment_tx.trust().built_transaction().transaction.clone();
1378        let closing_txid = closing_tx.txid();
1379        let holder_output_index =
1380            closing_tx.output.iter().position(|out| out.value == to_holder).unwrap() as u32;
1381        let cp_output_index =
1382            closing_tx.output.iter().position(|out| out.value == to_cp).unwrap() as u32;
1383        let htlc_output_index = closing_tx
1384            .output
1385            .iter()
1386            .position(|out| out.value == htlcs[0].amount_msat / 1000)
1387            .unwrap() as u32;
1388        monitor.on_add_block(&[closing_tx.clone()], &block_hash);
1389        assert_eq!(monitor.closing_depth(), 1);
1390        assert!(!monitor.is_done());
1391        // we never forget the channel if we didn't sweep our output
1392        for _ in 1..MAX_CLOSING_DEPTH {
1393            monitor.on_add_block(&[], &block_hash);
1394        }
1395        assert!(!monitor.is_done());
1396        let sweep_cp_tx = make_tx(vec![make_txin2(closing_txid, cp_output_index)]);
1397        monitor.on_add_block(&[sweep_cp_tx], &block_hash);
1398        // we still never forget the channel
1399        for _ in 1..MAX_CLOSING_DEPTH {
1400            monitor.on_add_block(&[], &block_hash);
1401        }
1402        assert!(!monitor.is_done());
1403        let sweep_holder_tx = make_tx(vec![make_txin2(closing_txid, holder_output_index)]);
1404        monitor.on_add_block(&[sweep_holder_tx], &block_hash);
1405
1406        let monitor1 = monitor.clone();
1407
1408        // TIMELINE 1 - HTLC output not swept
1409        // we forget the channel once we sweep our output and MAX_CLOSING_DEPTH blocks have passed
1410        for _ in 1..MAX_CLOSING_DEPTH - 1 {
1411            monitor.on_add_block(&[], &block_hash);
1412        }
1413        assert!(!monitor.is_done());
1414        monitor.on_add_block(&[], &block_hash);
1415        assert!(!monitor.is_done());
1416
1417        // TIMELINE 2 - HTLC output swept
1418        let sweep_htlc_tx = make_tx(vec![make_txin2(closing_txid, htlc_output_index)]);
1419        monitor1.on_add_block(&[sweep_htlc_tx], &block_hash);
1420
1421        for _ in 1..MIN_DEPTH {
1422            monitor1.on_add_block(&[], &block_hash);
1423        }
1424        // still not done, need forget from node
1425        assert!(!monitor1.is_done());
1426
1427        // once the node forgets we can forget all of the above
1428        node.forget_channel(&channel_id).unwrap();
1429        assert!(monitor.is_done());
1430        assert!(monitor1.is_done());
1431    }
1432
1433    fn setup_funded_channel() -> (Arc<Node>, ChannelId, ChainMonitor, Txid) {
1434        let funding_tx = make_tx(vec![make_txin(1), make_txin(2)]);
1435        let funding_outpoint = OutPoint::new(funding_tx.txid(), 0);
1436        let setup = make_channel_setup(funding_outpoint);
1437
1438        let (node, channel_id) =
1439            init_node_and_channel(TEST_NODE_CONFIG, TEST_SEED[1], setup.clone());
1440        let channel = node.get_channel(&channel_id).unwrap();
1441        let cpp = Box::new(ChannelCommitmentPointProvider::new(channel.clone()));
1442        let monitor = node
1443            .with_channel(&channel_id, |chan| Ok(chan.monitor.clone().as_monitor(cpp.clone())))
1444            .unwrap();
1445        let block_hash = BlockHash::all_zeros();
1446        monitor.on_add_block(&[], &block_hash);
1447        monitor.on_add_block(&[funding_tx.clone()], &block_hash);
1448        assert_eq!(monitor.funding_depth(), 1);
1449        (node, channel_id, monitor, funding_tx.txid())
1450    }
1451
1452    fn make_txin2(prev_txid: Txid, prevout: u32) -> TxIn {
1453        TxIn {
1454            previous_output: OutPoint::new(prev_txid, prevout),
1455            script_sig: Default::default(),
1456            sequence: Default::default(),
1457            witness: Default::default(),
1458        }
1459    }
1460
1461    fn make_channel_setup(funding_outpoint: OutPoint) -> ChannelSetup {
1462        ChannelSetup {
1463            is_outbound: true,
1464            channel_value_sat: 3_000_000,
1465            push_value_msat: 0,
1466            funding_outpoint,
1467            holder_selected_contest_delay: 6,
1468            holder_shutdown_script: None,
1469            counterparty_points: make_test_counterparty_points(),
1470            counterparty_selected_contest_delay: 7,
1471            counterparty_shutdown_script: None,
1472            commitment_type: CommitmentType::StaticRemoteKey,
1473        }
1474    }
1475}