Skip to main content

tycho_block_util/block/
block_stuff.rs

1use std::mem::ManuallyDrop;
2use std::sync::{Arc, OnceLock};
3use std::time::Duration;
4
5use anyhow::Result;
6use bytes::Bytes;
7use tycho_types::models::*;
8use tycho_types::prelude::*;
9use tycho_util::FastHashMap;
10use tycho_util::mem::Reclaimer;
11
12use crate::archive::WithArchiveData;
13
14pub type BlockStuffAug = WithArchiveData<BlockStuff>;
15
16/// Deserialized block.
17#[derive(Clone)]
18#[repr(transparent)]
19pub struct BlockStuff {
20    inner: Arc<Inner>,
21}
22
23impl BlockStuff {
24    /// Time until the block is considered "trusted". We use it to force
25    /// all new nodes to download at least this amount of history.
26    pub const BOOT_OFFSET: Duration = Duration::from_secs(12 * 3600);
27
28    pub fn compute_is_persistent(block_utime: u32, prev_utime: u32) -> bool {
29        let hack_enabled = cfg!(tycho_unstable)
30            && std::env::var("HACK_EACH_KEY_BLOCK_IS_PERSISTENT").unwrap_or_default() == "1";
31
32        if hack_enabled {
33            true
34        } else {
35            block_utime >> 17 != prev_utime >> 17
36        }
37    }
38
39    pub fn can_use_for_boot(block_utime: u32, now_utime: u32) -> bool {
40        now_utime.saturating_sub(block_utime) as u64 >= Self::BOOT_OFFSET.as_secs()
41    }
42
43    pub fn time_until_can_use_for_boot(block_utime: u32, now_utime: u32) -> Duration {
44        let time_since_collated = Duration::from_secs(now_utime.saturating_sub(block_utime) as _);
45        Self::BOOT_OFFSET.saturating_sub(time_since_collated)
46    }
47
48    #[cfg(any(test, feature = "test"))]
49    pub fn new_empty(shard: ShardIdent, seqno: u32) -> Self {
50        Self::new_with(shard, seqno, |_| {})
51    }
52
53    #[cfg(any(test, feature = "test"))]
54    pub fn new_with<F>(shard: ShardIdent, seqno: u32, modify: F) -> Self
55    where
56        F: FnOnce(&mut Block),
57    {
58        use tycho_types::cell::Lazy;
59        use tycho_types::merkle::MerkleUpdate;
60
61        const DATA_SIZE: usize = 1024; // ~1 KB of data for an empty block.
62
63        let block_info = BlockInfo {
64            shard,
65            seqno,
66            ..Default::default()
67        };
68
69        let mut block = Block {
70            global_id: 0,
71            info: Lazy::new(&block_info).unwrap(),
72            value_flow: Lazy::new(&ValueFlow::default()).unwrap(),
73            state_update: Lazy::new(&MerkleUpdate::default()).unwrap(),
74            out_msg_queue_updates: OutMsgQueueUpdates {
75                diff_hash: Default::default(),
76                tail_len: 0,
77            },
78            extra: Lazy::new(&BlockExtra::default()).unwrap(),
79        };
80
81        // Apply modifications
82        modify(&mut block);
83
84        let root = CellBuilder::build_from(&block).unwrap();
85        let root_hash = *root.repr_hash();
86        let file_hash = Boc::file_hash_blake(Boc::encode(&root));
87
88        let block_id = BlockId {
89            shard: block_info.shard,
90            seqno: block_info.seqno,
91            root_hash,
92            file_hash,
93        };
94
95        Self::from_block_and_root(&block_id, block, root, DATA_SIZE)
96    }
97
98    pub fn from_block_and_root(id: &BlockId, block: Block, root: Cell, data_size: usize) -> Self {
99        debug_assert_eq!(&id.root_hash, root.repr_hash());
100
101        Self {
102            inner: Arc::new(Inner {
103                id: *id,
104                data_size,
105                parts: ManuallyDrop::new(InnerParts {
106                    block,
107                    root,
108                    block_info: Default::default(),
109                    block_extra: Default::default(),
110                    block_mc_extra: Default::default(),
111                }),
112            }),
113        }
114    }
115
116    pub fn deserialize_checked(id: &BlockId, data: &[u8]) -> Result<Self> {
117        let file_hash = Boc::file_hash_blake(data);
118        anyhow::ensure!(
119            id.file_hash.as_slice() == file_hash.as_slice(),
120            "file_hash mismatch for {id}"
121        );
122
123        Self::deserialize(id, data)
124    }
125
126    pub fn deserialize(id: &BlockId, data: &[u8]) -> Result<Self> {
127        let root = Boc::decode(data)?;
128        anyhow::ensure!(
129            &id.root_hash == root.repr_hash(),
130            "root_hash mismatch for {id}"
131        );
132
133        let block = root.parse::<Block>()?;
134        Ok(Self {
135            inner: Arc::new(Inner {
136                id: *id,
137                data_size: data.len(),
138                parts: ManuallyDrop::new(InnerParts {
139                    block,
140                    root,
141                    block_info: Default::default(),
142                    block_extra: Default::default(),
143                    block_mc_extra: Default::default(),
144                }),
145            }),
146        })
147    }
148
149    pub fn root_cell(&self) -> &Cell {
150        &self.inner.root
151    }
152
153    pub fn data_size(&self) -> usize {
154        self.inner.data_size
155    }
156
157    pub fn with_archive_data<A>(self, data: A) -> WithArchiveData<Self>
158    where
159        Bytes: From<A>,
160    {
161        WithArchiveData::new(self, data)
162    }
163
164    pub fn id(&self) -> &BlockId {
165        &self.inner.id
166    }
167
168    pub fn block(&self) -> &Block {
169        &self.inner.block
170    }
171
172    pub fn into_block(self) -> Block {
173        self.inner.block.clone()
174    }
175
176    pub fn construct_prev_id(&self) -> Result<(BlockId, Option<BlockId>)> {
177        let header = self.load_info()?;
178        match header.load_prev_ref()? {
179            PrevBlockRef::Single(prev) => {
180                let shard = if header.after_split {
181                    let Some(shard) = header.shard.merge() else {
182                        anyhow::bail!("failed to merge shard");
183                    };
184                    shard
185                } else {
186                    header.shard
187                };
188
189                let id = BlockId {
190                    shard,
191                    seqno: prev.seqno,
192                    root_hash: prev.root_hash,
193                    file_hash: prev.file_hash,
194                };
195
196                Ok((id, None))
197            }
198            PrevBlockRef::AfterMerge { left, right } => {
199                let Some((left_shard, right_shard)) = header.shard.split() else {
200                    anyhow::bail!("failed to split shard");
201                };
202
203                let id1 = BlockId {
204                    shard: left_shard,
205                    seqno: left.seqno,
206                    root_hash: left.root_hash,
207                    file_hash: left.file_hash,
208                };
209
210                let id2 = BlockId {
211                    shard: right_shard,
212                    seqno: right.seqno,
213                    root_hash: right.root_hash,
214                    file_hash: right.file_hash,
215                };
216
217                Ok((id1, Some(id2)))
218            }
219        }
220    }
221
222    pub fn load_info(&self) -> Result<&BlockInfo, tycho_types::error::Error> {
223        #[expect(
224            clippy::disallowed_methods,
225            reason = "We are implementing that load_info getter here"
226        )]
227        self.inner
228            .block_info
229            .get_or_init(|| self.inner.block.load_info())
230            .as_ref()
231            .map_err(|e| e.clone())
232    }
233
234    pub fn load_extra(&self) -> Result<&BlockExtra, tycho_types::error::Error> {
235        #[expect(
236            clippy::disallowed_methods,
237            reason = "We are implementing that load_extra getter here"
238        )]
239        self.inner
240            .block_extra
241            .get_or_init(|| self.inner.block.load_extra())
242            .as_ref()
243            .map_err(|e| e.clone())
244    }
245
246    pub fn load_custom(&self) -> Result<&McBlockExtra, tycho_types::error::Error> {
247        let extra = self.load_extra()?;
248
249        #[expect(
250            clippy::disallowed_methods,
251            reason = "We are implementing that load_custom getter here"
252        )]
253        self.inner
254            .block_mc_extra
255            .get_or_init(|| {
256                extra
257                    .load_custom()
258                    .and_then(|c| c.ok_or(tycho_types::error::Error::InvalidData))
259            })
260            .as_ref()
261            .map_err(|e| e.clone())
262    }
263
264    pub fn shard_blocks(&self) -> Result<FastHashMap<ShardIdent, BlockId>> {
265        self.load_custom()?
266            .shards
267            .latest_blocks()
268            .map(|id| id.map(|id| (id.shard, id)).map_err(From::from))
269            .collect()
270    }
271
272    pub fn shard_blocks_seqno(&self) -> Result<FastHashMap<ShardIdent, u32>> {
273        self.load_custom()?
274            .shards
275            .latest_blocks()
276            .map(|id| id.map(|id| (id.shard, id.seqno)).map_err(From::from))
277            .collect()
278    }
279}
280
281impl AsRef<Block> for BlockStuff {
282    #[inline]
283    fn as_ref(&self) -> &Block {
284        &self.inner.block
285    }
286}
287
288unsafe impl arc_swap::RefCnt for BlockStuff {
289    type Base = Inner;
290
291    fn into_ptr(me: Self) -> *mut Self::Base {
292        arc_swap::RefCnt::into_ptr(me.inner)
293    }
294
295    fn as_ptr(me: &Self) -> *mut Self::Base {
296        arc_swap::RefCnt::as_ptr(&me.inner)
297    }
298
299    unsafe fn from_ptr(ptr: *const Self::Base) -> Self {
300        Self {
301            inner: unsafe { arc_swap::RefCnt::from_ptr(ptr) },
302        }
303    }
304}
305
306#[doc(hidden)]
307pub struct Inner {
308    id: BlockId,
309    data_size: usize,
310    parts: ManuallyDrop<InnerParts>,
311}
312
313impl std::ops::Deref for Inner {
314    type Target = InnerParts;
315
316    #[inline]
317    fn deref(&self) -> &Self::Target {
318        &self.parts
319    }
320}
321
322impl Drop for Inner {
323    fn drop(&mut self) {
324        // SAFETY: Inner is dropped only once.
325        let parts = unsafe { ManuallyDrop::take(&mut self.parts) };
326        Reclaimer::instance().drop(parts);
327    }
328}
329
330// NOTE: Stored as a separate struct to queue drop all at once.
331#[doc(hidden)]
332pub struct InnerParts {
333    block: Block,
334    root: Cell,
335    block_info: OnceLock<Result<BlockInfo, tycho_types::error::Error>>,
336    block_extra: OnceLock<Result<BlockExtra, tycho_types::error::Error>>,
337    block_mc_extra: OnceLock<Result<McBlockExtra, tycho_types::error::Error>>,
338}