1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
use bitcoind_client::dummy::DummyPersistentTxooSource;
use lightning_signer::txoo::source::Source;
use std::collections::HashSet;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use tokio::{task, time};
use url::Url;

use lightning_signer::bitcoin::{BlockHash, FilterHeader, Network};
use log::info;

use crate::{chain_follower::ChainFollower, ChainTrack, ChainTrackDirectory};

pub struct SourceFactory {
    datadir: PathBuf,
    network: Network,
}

impl SourceFactory {
    /// Create a new SourceFactory
    pub fn new<P: Into<PathBuf>>(datadir: P, network: Network) -> Self {
        Self { datadir: datadir.into(), network }
    }

    /// Get a new TXOO source
    // TODO use real TxooSource
    pub fn get_source(
        &self,
        start_block: u32,
        block_hash: BlockHash,
        filter_header: FilterHeader,
    ) -> Box<dyn Source> {
        Box::new(DummyPersistentTxooSource::from_checkpoint(
            self.datadir.clone(),
            self.network,
            start_block,
            block_hash,
            filter_header.clone(),
        ))
    }
}

#[derive(Clone)]
pub struct Frontend {
    directory: Arc<dyn ChainTrackDirectory>,
    rpc_url: Url,
    tracker_ids: Arc<Mutex<HashSet<Vec<u8>>>>,
    source_factory: Arc<SourceFactory>,
}

impl Frontend {
    /// Create a new Frontend
    pub fn new(
        signer: Arc<dyn ChainTrackDirectory>,
        source_factory: Arc<SourceFactory>,
        rpc_url: Url,
    ) -> Frontend {
        let tracker_ids = Arc::new(Mutex::new(HashSet::new()));
        Frontend { directory: signer, source_factory, rpc_url, tracker_ids }
    }

    pub fn directory(&self) -> Arc<dyn ChainTrackDirectory> {
        Arc::clone(&self.directory)
    }

    /// Start a task which creates a chain follower for each existing tracker
    pub fn start(&self) {
        let s = self.clone();
        task::spawn(async move {
            s.start_loop().await;
        });
        info!("frontend started");
    }

    async fn start_loop(&self) {
        let mut interval = time::interval(Duration::from_secs(1));
        loop {
            interval.tick().await;
            self.handle_new_trackers().await;
        }
    }

    async fn handle_new_trackers(&self) {
        for tracker in self.directory.trackers().await {
            let tracker_id = tracker.id().await;
            {
                let mut lock = self.tracker_ids.lock().unwrap();
                if lock.contains(&tracker_id) {
                    continue;
                }
                lock.insert(tracker_id);
            }
            self.start_follower(tracker).await;
        }
    }

    /// Start a chain follower for a specific tracker
    pub async fn start_follower(&self, tracker: Arc<dyn ChainTrack>) {
        assert_eq!(tracker.network(), self.source_factory.network);
        let cf_arc = ChainFollower::new(tracker, &self.source_factory, &self.rpc_url).await;
        ChainFollower::start(cf_arc).await;
    }
}