kaspa_consensus/processes/
difficulty.rs

1use crate::model::stores::{
2    block_window_cache::BlockWindowHeap,
3    ghostdag::{GhostdagData, GhostdagStoreReader},
4    headers::HeaderStoreReader,
5};
6use kaspa_consensus_core::{
7    config::params::MIN_DIFFICULTY_WINDOW_LEN,
8    errors::difficulty::{DifficultyError, DifficultyResult},
9    BlockHashSet, BlueWorkType,
10};
11use kaspa_math::{Uint256, Uint320};
12use std::{
13    cmp::{max, Ordering},
14    iter::once_with,
15    ops::Deref,
16    sync::Arc,
17};
18
19use super::ghostdag::ordering::SortableBlock;
20use itertools::Itertools;
21
22trait DifficultyManagerExtension {
23    fn headers_store(&self) -> &dyn HeaderStoreReader;
24
25    #[inline]
26    #[must_use]
27    fn internal_calc_daa_score(&self, ghostdag_data: &GhostdagData, mergeset_non_daa: &BlockHashSet) -> u64 {
28        let sp_daa_score = self.headers_store().get_daa_score(ghostdag_data.selected_parent).unwrap();
29        sp_daa_score + (ghostdag_data.mergeset_size() - mergeset_non_daa.len()) as u64
30    }
31
32    fn get_difficulty_blocks(&self, window: &BlockWindowHeap) -> Vec<DifficultyBlock> {
33        window
34            .iter()
35            .map(|item| {
36                let data = self.headers_store().get_compact_header_data(item.0.hash).unwrap();
37                DifficultyBlock { timestamp: data.timestamp, bits: data.bits, sortable_block: item.0.clone() }
38            })
39            .collect()
40    }
41
42    fn internal_estimate_network_hashes_per_second(&self, window: &BlockWindowHeap) -> DifficultyResult<u64> {
43        // TODO: perhaps move this const
44        const MIN_WINDOW_SIZE: usize = 1000;
45        let window_size = window.len();
46        if window_size < MIN_WINDOW_SIZE {
47            return Err(DifficultyError::UnderMinWindowSizeAllowed(window_size, MIN_WINDOW_SIZE));
48        }
49        let difficulty_blocks = self.get_difficulty_blocks(window);
50        let (min_ts, max_ts) = difficulty_blocks.iter().map(|x| x.timestamp).minmax().into_option().unwrap();
51        if min_ts == max_ts {
52            return Err(DifficultyError::EmptyTimestampRange);
53        }
54        let window_duration = (max_ts - min_ts) / 1000; // Divided by 1000 to convert milliseconds to seconds
55        if window_duration == 0 {
56            return Ok(0);
57        }
58
59        let (min_blue_work, max_blue_work) =
60            difficulty_blocks.iter().map(|x| x.sortable_block.blue_work).minmax().into_option().unwrap();
61
62        Ok(((max_blue_work - min_blue_work) / window_duration).as_u64())
63    }
64
65    #[inline]
66    fn check_min_difficulty_window_len(difficulty_window_size: usize, min_difficulty_window_len: usize) {
67        assert!(
68            MIN_DIFFICULTY_WINDOW_LEN <= min_difficulty_window_len && min_difficulty_window_len <= difficulty_window_size,
69            "min_difficulty_window_len {} is expected to fit within {}..={}",
70            min_difficulty_window_len,
71            MIN_DIFFICULTY_WINDOW_LEN,
72            difficulty_window_size
73        );
74    }
75}
76
77/// A difficulty manager conforming to the legacy golang implementation
78/// based on full, hence un-sampled, windows
79#[derive(Clone)]
80pub struct FullDifficultyManager<T: HeaderStoreReader> {
81    headers_store: Arc<T>,
82    genesis_bits: u32,
83    max_difficulty_target: Uint320,
84    difficulty_window_size: usize,
85    min_difficulty_window_len: usize,
86    target_time_per_block: u64,
87}
88
89impl<T: HeaderStoreReader> FullDifficultyManager<T> {
90    pub fn new(
91        headers_store: Arc<T>,
92        genesis_bits: u32,
93        max_difficulty_target: Uint256,
94        difficulty_window_size: usize,
95        min_difficulty_window_len: usize,
96        target_time_per_block: u64,
97    ) -> Self {
98        Self::check_min_difficulty_window_len(difficulty_window_size, min_difficulty_window_len);
99        Self {
100            headers_store,
101            genesis_bits,
102            max_difficulty_target: max_difficulty_target.into(),
103            difficulty_window_size,
104            min_difficulty_window_len,
105            target_time_per_block,
106        }
107    }
108
109    pub fn calc_daa_score_and_mergeset_non_daa_blocks<'a>(
110        &'a self,
111        window: &BlockWindowHeap,
112        ghostdag_data: &GhostdagData,
113        store: &'a (impl GhostdagStoreReader + ?Sized),
114    ) -> (u64, BlockHashSet) {
115        // If the window is empty, all the mergeset goes in the non-DAA set, hence a default lowest block with maximum blue work.
116        let default_lowest_block = SortableBlock { hash: Default::default(), blue_work: BlueWorkType::MAX };
117        let window_lowest_block = window.peek().map(|x| &x.0).unwrap_or_else(|| &default_lowest_block);
118        let mergeset_non_daa: BlockHashSet = ghostdag_data
119            .ascending_mergeset_without_selected_parent(store)
120            .chain(once_with(|| {
121                let selected_parent_hash = ghostdag_data.selected_parent;
122                SortableBlock { hash: selected_parent_hash, blue_work: store.get_blue_work(selected_parent_hash).unwrap_or_default() }
123            }))
124            .take_while(|sortable_block| sortable_block < window_lowest_block)
125            .map(|sortable_block| sortable_block.hash)
126            .collect();
127
128        (self.internal_calc_daa_score(ghostdag_data, &mergeset_non_daa), mergeset_non_daa)
129    }
130
131    pub fn calculate_difficulty_bits(&self, window: &BlockWindowHeap) -> u32 {
132        let mut difficulty_blocks = self.get_difficulty_blocks(window);
133
134        // Until there are enough blocks for a valid calculation the difficulty should remain constant.
135        if difficulty_blocks.len() < self.min_difficulty_window_len {
136            return self.genesis_bits;
137        }
138
139        let (min_ts_index, max_ts_index) = difficulty_blocks.iter().position_minmax().into_option().unwrap();
140
141        let min_ts = difficulty_blocks[min_ts_index].timestamp;
142        let max_ts = difficulty_blocks[max_ts_index].timestamp;
143
144        // We remove the minimal block because we want the average target for the internal window.
145        difficulty_blocks.swap_remove(min_ts_index);
146
147        // We need Uint320 to avoid overflow when summing and multiplying by the window size.
148        let difficulty_blocks_len = difficulty_blocks.len() as u64;
149        let targets_sum: Uint320 =
150            difficulty_blocks.into_iter().map(|diff_block| Uint320::from(Uint256::from_compact_target_bits(diff_block.bits))).sum();
151        let average_target = targets_sum / (difficulty_blocks_len);
152        let new_target = average_target * max(max_ts - min_ts, 1) / (self.target_time_per_block * difficulty_blocks_len);
153        Uint256::try_from(new_target.min(self.max_difficulty_target)).expect("max target < Uint256::MAX").compact_target_bits()
154    }
155
156    pub fn estimate_network_hashes_per_second(&self, window: &BlockWindowHeap) -> DifficultyResult<u64> {
157        self.internal_estimate_network_hashes_per_second(window)
158    }
159}
160
161impl<T: HeaderStoreReader> DifficultyManagerExtension for FullDifficultyManager<T> {
162    fn headers_store(&self) -> &dyn HeaderStoreReader {
163        self.headers_store.deref()
164    }
165}
166
167/// A difficulty manager implementing [KIP-0004](https://github.com/kaspanet/kips/blob/master/kip-0004.md),
168/// so based on sampled windows
169#[derive(Clone)]
170pub struct SampledDifficultyManager<T: HeaderStoreReader> {
171    headers_store: Arc<T>,
172    genesis_bits: u32,
173    max_difficulty_target: Uint320,
174    difficulty_window_size: usize,
175    min_difficulty_window_len: usize,
176    difficulty_sample_rate: u64,
177    target_time_per_block: u64,
178}
179
180impl<T: HeaderStoreReader> SampledDifficultyManager<T> {
181    pub fn new(
182        headers_store: Arc<T>,
183        genesis_bits: u32,
184        max_difficulty_target: Uint256,
185        difficulty_window_size: usize,
186        min_difficulty_window_len: usize,
187        difficulty_sample_rate: u64,
188        target_time_per_block: u64,
189    ) -> Self {
190        Self::check_min_difficulty_window_len(difficulty_window_size, min_difficulty_window_len);
191        Self {
192            headers_store,
193            genesis_bits,
194            max_difficulty_target: max_difficulty_target.into(),
195            difficulty_window_size,
196            min_difficulty_window_len,
197            difficulty_sample_rate,
198            target_time_per_block,
199        }
200    }
201
202    #[inline]
203    #[must_use]
204    pub fn difficulty_full_window_size(&self) -> u64 {
205        self.difficulty_window_size as u64 * self.difficulty_sample_rate
206    }
207
208    /// Returns the DAA window lowest accepted blue score
209    #[inline]
210    #[must_use]
211    pub fn lowest_daa_blue_score(&self, ghostdag_data: &GhostdagData) -> u64 {
212        let difficulty_full_window_size = self.difficulty_full_window_size();
213        ghostdag_data.blue_score.max(difficulty_full_window_size) - difficulty_full_window_size
214    }
215
216    #[inline]
217    #[must_use]
218    pub fn calc_daa_score(&self, ghostdag_data: &GhostdagData, mergeset_non_daa: &BlockHashSet) -> u64 {
219        self.internal_calc_daa_score(ghostdag_data, mergeset_non_daa)
220    }
221
222    pub fn calc_daa_score_and_mergeset_non_daa_blocks(
223        &self,
224        ghostdag_data: &GhostdagData,
225        store: &(impl GhostdagStoreReader + ?Sized),
226    ) -> (u64, BlockHashSet) {
227        let lowest_daa_blue_score = self.lowest_daa_blue_score(ghostdag_data);
228        let mergeset_non_daa: BlockHashSet =
229            ghostdag_data.unordered_mergeset().filter(|hash| store.get_blue_score(*hash).unwrap() < lowest_daa_blue_score).collect();
230        (self.internal_calc_daa_score(ghostdag_data, &mergeset_non_daa), mergeset_non_daa)
231    }
232
233    pub fn calculate_difficulty_bits(&self, window: &BlockWindowHeap) -> u32 {
234        // Note: this fn is duplicated (almost, see `* self.difficulty_sample_rate`) in Full and Sampled structs
235        // so some alternate calculation can be investigated here.
236        let mut difficulty_blocks = self.get_difficulty_blocks(window);
237
238        // Until there are enough blocks for a valid calculation the difficulty should remain constant.
239        if difficulty_blocks.len() < self.min_difficulty_window_len {
240            return self.genesis_bits;
241        }
242
243        let (min_ts_index, max_ts_index) = difficulty_blocks.iter().position_minmax().into_option().unwrap();
244
245        let min_ts = difficulty_blocks[min_ts_index].timestamp;
246        let max_ts = difficulty_blocks[max_ts_index].timestamp;
247
248        // We remove the minimal block because we want the average target for the internal window.
249        difficulty_blocks.swap_remove(min_ts_index);
250
251        // We need Uint320 to avoid overflow when summing and multiplying by the window size.
252        let difficulty_blocks_len = difficulty_blocks.len() as u64;
253        let targets_sum: Uint320 =
254            difficulty_blocks.into_iter().map(|diff_block| Uint320::from(Uint256::from_compact_target_bits(diff_block.bits))).sum();
255        let average_target = targets_sum / difficulty_blocks_len;
256        let measured_duration = max(max_ts - min_ts, 1);
257        let expected_duration = self.target_time_per_block * self.difficulty_sample_rate * difficulty_blocks_len; // This does differ from FullDifficultyManager version
258        let new_target = average_target * measured_duration / expected_duration;
259        Uint256::try_from(new_target.min(self.max_difficulty_target)).expect("max target < Uint256::MAX").compact_target_bits()
260    }
261
262    pub fn estimate_network_hashes_per_second(&self, window: &BlockWindowHeap) -> DifficultyResult<u64> {
263        self.internal_estimate_network_hashes_per_second(window)
264    }
265}
266
267impl<T: HeaderStoreReader> DifficultyManagerExtension for SampledDifficultyManager<T> {
268    fn headers_store(&self) -> &dyn HeaderStoreReader {
269        self.headers_store.deref()
270    }
271}
272
273pub fn calc_work(bits: u32) -> BlueWorkType {
274    let target = Uint256::from_compact_target_bits(bits);
275    // Source: https://github.com/bitcoin/bitcoin/blob/2e34374bf3e12b37b0c66824a6c998073cdfab01/src/chain.cpp#L131
276    // We need to compute 2**256 / (bnTarget+1), but we can't represent 2**256
277    // as it's too large for an arith_uint256. However, as 2**256 is at least as large
278    // as bnTarget+1, it is equal to ((2**256 - bnTarget - 1) / (bnTarget+1)) + 1,
279    // or ~bnTarget / (bnTarget+1) + 1.
280
281    let res = (!target / (target + 1)) + 1;
282    res.try_into().expect("Work should not exceed 2**192")
283}
284
285#[derive(Eq)]
286struct DifficultyBlock {
287    timestamp: u64,
288    bits: u32,
289    sortable_block: SortableBlock,
290}
291
292impl PartialEq for DifficultyBlock {
293    fn eq(&self, other: &Self) -> bool {
294        // If the sortable blocks are equal the timestamps and bits that are associated with the block are equal for sure.
295        self.sortable_block == other.sortable_block
296    }
297}
298
299impl PartialOrd for DifficultyBlock {
300    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
301        Some(self.cmp(other))
302    }
303}
304
305impl Ord for DifficultyBlock {
306    fn cmp(&self, other: &Self) -> Ordering {
307        self.timestamp.cmp(&other.timestamp).then_with(|| self.sortable_block.cmp(&other.sortable_block))
308    }
309}