bitcoin_block_parser/
headers.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
//! Use [`HeaderParser`] to read from your node's `blocks` directory.
//!
//! You will get back Vec<[`ParsedHeader`]> that identifies every block.  You can take a slice or
//! even reorder these headers before you pass them into [`crate::BlockParser::parse`].

use anyhow::bail;
use anyhow::Result;
use bitcoin::block::Header;
use bitcoin::consensus::Decodable;
use bitcoin::hashes::Hash;
use bitcoin::BlockHash;
use rustc_hash::FxHashMap;
use std::fs;
use std::fs::File;
use std::io::{BufReader, Read};
use std::path::{Path, PathBuf};
use std::sync::mpsc;
use threadpool::ThreadPool;

/// Before the header are 4 magic bytes and 4 bytes that indicate the block size
const PRE_HEADER_SIZE: usize = 8;

/// Points to the on-disk location where a block starts (and the header ends)
#[derive(Clone, Debug)]
pub struct ParsedHeader {
    /// Consensus parsed `bitcoin::Header`
    pub inner: Header,
    /// Byte offset from the beginning of the file
    pub offset: usize,
    /// This header's block hash
    pub hash: BlockHash,
    /// Path of the BLK file
    pub path: PathBuf,
}

/// Fast parser of [`ParsedHeader`] from the blocks directory
pub struct HeaderParser;
impl HeaderParser {
    /// Parses the headers from the `blocks_dir` returning the `ParsedHeader` in height order,
    /// starting from the genesis block.  Takes a few seconds to run.
    pub fn parse(blocks_dir: &str) -> Result<Vec<ParsedHeader>> {
        let (tx, rx) = mpsc::channel();
        let pool = ThreadPool::new(100);

        // Read headers from every BLK file in a new thread
        for path in Self::blk_files(blocks_dir)? {
            let path = path.clone();
            let tx = tx.clone();
            pool.execute(move || {
                let results = Self::parse_headers_file(path);
                let _ = tx.send(results);
            });
        }
        drop(tx);

        // Receive all the headers from spawned threads
        let mut locations = FxHashMap::default();
        let mut collisions: Vec<ParsedHeader> = vec![];
        for received in rx {
            for header in received? {
                if let Some(collision) = locations.insert(header.inner.prev_blockhash, header) {
                    collisions.push(collision)
                }
            }
        }

        // Resolve reorgs and order the headers by block height
        for collision in collisions {
            Self::resolve_collisions(&mut locations, collision);
        }
        Ok(Self::order_headers(locations))
    }

    /// Parses headers from a BLK file
    fn parse_headers_file(path: PathBuf) -> anyhow::Result<Vec<ParsedHeader>> {
        let buffer_size = PRE_HEADER_SIZE + Header::SIZE;
        let mut reader = BufReader::with_capacity(buffer_size, File::open(&path)?);
        let mut offset = 0;
        // First 8 bytes are 4 magic bytes and 4 bytes that indicate the block size
        let mut buffer = vec![0; PRE_HEADER_SIZE];
        let mut headers = vec![];

        while reader.read_exact(&mut buffer).is_ok() {
            offset += buffer.len();
            if let Ok(header) = Header::consensus_decode(&mut reader) {
                headers.push(ParsedHeader {
                    inner: header,
                    offset: offset + Header::SIZE,
                    hash: header.block_hash(),
                    path: path.clone(),
                });
                // Get the size of the next block
                let size = u32::from_le_bytes(buffer[4..].try_into()?) as usize;
                // Seek to the next block, subtracting the block header bytes we parsed
                reader.seek_relative(size.saturating_sub(Header::SIZE) as i64)?;
                offset += size;
            }
        }
        Ok(headers)
    }

    /// Returns the list of all BLK files in the dir
    fn blk_files(dir: &str) -> anyhow::Result<Vec<PathBuf>> {
        let read_dir = fs::read_dir(Path::new(&dir))?;
        let mut files = vec![];

        for file in read_dir {
            let file = file?;
            let name = file
                .file_name()
                .into_string()
                .expect("Could not parse filename");
            if name.starts_with("blk") {
                files.push(file.path())
            }
        }

        if files.is_empty() {
            bail!("No BLK files found in dir {:?}", dir);
        }

        Ok(files)
    }

    /// In case of reorgs we need to resolve to the longest chain
    fn resolve_collisions(
        headers: &mut FxHashMap<BlockHash, ParsedHeader>,
        collision: ParsedHeader,
    ) {
        let existing = headers
            .get(&collision.inner.prev_blockhash)
            .expect("exists");
        let mut e_hash = &existing.hash;
        let mut c_hash = &collision.hash;

        while let (Some(e), Some(c)) = (headers.get(e_hash), headers.get(c_hash)) {
            e_hash = &e.hash;
            c_hash = &c.hash;
        }

        // In case collision is the longest, update the blocks map
        if headers.contains_key(c_hash) {
            headers.insert(collision.inner.prev_blockhash, collision);
        }
    }

    /// Puts the headers into the correct order by block height (using the hashes)
    fn order_headers(mut headers: FxHashMap<BlockHash, ParsedHeader>) -> Vec<ParsedHeader> {
        let mut ordered_headers = vec![];
        // Genesis block starts with prev = all_zeros
        let mut next_hash = BlockHash::all_zeros();

        while let Some(index) = headers.remove(&next_hash) {
            next_hash = index.hash;
            ordered_headers.push(index);
        }

        ordered_headers
    }
}