tycho_types/cell/
mod.rs

1//! Cell tree implementation.
2
3use std::ops::{BitOr, BitOrAssign};
4use std::str::FromStr;
5
6pub use tycho_types_proc::{Load, Store};
7
8pub use self::builder::{
9    CellBuilder, CellDataBuilder, CellRefsBuilder, DisplayCellBuilderData, Store,
10};
11pub use self::cell_context::{CellContext, CellParts, LoadMode};
12#[cfg(not(feature = "sync"))]
13pub use self::cell_impl::rc::{Cell, CellInner, WeakCell};
14#[cfg(feature = "sync")]
15pub use self::cell_impl::sync::{Cell, CellInner, WeakCell};
16pub use self::cell_impl::{StaticCell, VirtualCellWrapper};
17pub use self::lazy::{Lazy, LazyExotic};
18pub use self::slice::{
19    CellSlice, CellSliceParts, CellSliceRange, DisplayCellSliceData, ExactSize, Load, LoadCell,
20};
21pub use self::usage_tree::{UsageTree, UsageTreeMode, UsageTreeWithSubtrees};
22use crate::error::{Error, ParseHashBytesError};
23use crate::util::Bitstring;
24
25/// Generic cell implementation.
26mod cell_impl;
27
28/// Traits for gas accounting and resolving exotic cells.
29mod cell_context;
30
31/// Cell view utils.
32mod slice;
33
34/// Cell creation utils.
35mod builder;
36
37/// Lazy-loaded cell data.
38mod lazy;
39
40mod usage_tree;
41
42#[cfg(feature = "sync")]
43#[doc(hidden)]
44mod __checks {
45    use super::*;
46
47    assert_impl_all!(Cell: Send);
48    assert_impl_all!(CellSlice: Send);
49    assert_impl_all!(CellBuilder: Send);
50}
51
52/// Marker trait which allows casting lazy-loaded data.
53pub trait EquivalentRepr<T> {}
54
55/// All types are equivalent to itself.
56impl<T> EquivalentRepr<T> for T {}
57
58/// Cell implementation family.
59pub trait CellFamily: Sized {
60    /// The default cell context type.
61    type EmptyCellContext: CellContext;
62
63    /// Creates an empty cell.
64    ///
65    /// NOTE: in most cases empty cell is ZST.
66    fn empty_cell() -> Cell;
67
68    /// Returns a static reference to the empty cell
69    fn empty_cell_ref() -> &'static DynCell;
70
71    /// Creates an empty cell context.
72    fn empty_context() -> &'static Self::EmptyCellContext;
73
74    /// Returns a static reference to the cell with all zeros.
75    fn all_zeros_ref() -> &'static DynCell;
76
77    /// Returns a static reference to the cell with all ones.
78    fn all_ones_ref() -> &'static DynCell;
79
80    /// Creates a virtualized cell from the specified cell.
81    fn virtualize(cell: Cell) -> Cell;
82}
83
84/// Dyn trait type alias.
85#[cfg(not(feature = "sync"))]
86pub type DynCell = dyn CellImpl;
87
88/// Dyn trait type alias.
89#[cfg(feature = "sync")]
90pub type DynCell = dyn CellImpl + Send + Sync;
91
92impl AsRef<DynCell> for DynCell {
93    #[inline(always)]
94    fn as_ref(&self) -> &Self {
95        self
96    }
97}
98
99impl AsMut<DynCell> for DynCell {
100    #[inline(always)]
101    fn as_mut(&mut self) -> &mut Self {
102        self
103    }
104}
105
106/// Represents the interface of a well-formed cell.
107///
108/// Since all basic operations are implements via dynamic dispatch,
109/// all high-level helper methods are implemented for `dyn Cell`.
110pub trait CellImpl {
111    /// Unwraps the root cell from the usage tracking.
112    fn untrack(self: CellInner<Self>) -> Cell;
113
114    /// Returns cell descriptor.
115    ///
116    /// # See also
117    ///
118    /// Cell descriptor contains some tightly packed info about the cell.
119    /// If you want convenient methods to access it use:
120    /// [`cell_type`], [`level_mask`], [`reference_count`], [`is_exotic`]
121    ///
122    /// [`cell_type`]: CellDescriptor::cell_type
123    /// [`level_mask`]: CellDescriptor::level_mask
124    /// [`reference_count`]: CellDescriptor::reference_count
125    /// [`is_exotic`]: CellDescriptor::is_exotic
126    fn descriptor(&self) -> CellDescriptor;
127
128    /// Returns the raw data of this cell.
129    fn data(&self) -> &[u8];
130
131    /// Returns the data size of this cell in bits.
132    fn bit_len(&self) -> u16;
133
134    /// Returns a reference to the Nth child cell.
135    fn reference(&self, index: u8) -> Option<&DynCell>;
136
137    /// Returns the Nth child cell.
138    fn reference_cloned(&self, index: u8) -> Option<Cell>;
139
140    /// Returns this cell as a virtualized cell, so that all hashes
141    /// and depths will have an offset.
142    fn virtualize(&self) -> &DynCell;
143
144    /// Returns cell hash for the specified level.
145    ///
146    /// Cell representation hash is the hash at the maximum level ([`LevelMask::MAX_LEVEL`]).
147    /// Use `repr_hash` as a simple alias for this.
148    fn hash(&self, level: u8) -> &HashBytes;
149
150    /// Returns cell depth for the specified level.
151    fn depth(&self, level: u8) -> u16;
152
153    /// Consumes the first child during the deep drop.
154    fn take_first_child(&mut self) -> Option<Cell>;
155
156    /// Replaces the first child with the provided parent during the deep drop.
157    ///
158    /// Returns `Ok(child)` if child was successfully replaced,
159    /// `Err(parent)` otherwise.
160    fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell>;
161
162    /// Consumes the next child (except first) during the deep drop.
163    fn take_next_child(&mut self) -> Option<Cell>;
164
165    /// Returns the sum of all bits and cells of all elements in the cell tree
166    /// (including this cell).
167    ///
168    /// NOTE: identical cells are counted each time they occur in the tree.
169    #[cfg(feature = "stats")]
170    fn stats(&self) -> CellTreeStats;
171}
172
173impl DynCell {
174    /// Computes cell type from descriptor bytes.
175    #[inline]
176    pub fn cell_type(&self) -> CellType {
177        self.descriptor().cell_type()
178    }
179
180    /// Computes the cell level from the level mask.
181    #[inline]
182    pub fn level(&self) -> u8 {
183        self.descriptor().level_mask().level()
184    }
185
186    /// Computes the level mask from the descriptor bytes.
187    #[inline]
188    pub fn level_mask(&self) -> LevelMask {
189        self.descriptor().level_mask()
190    }
191
192    /// Computes the number of child cells from descriptor bytes.
193    #[inline]
194    pub fn reference_count(&self) -> u8 {
195        self.descriptor().reference_count()
196    }
197
198    /// Tries to load the specified child cell as slice.
199    /// Returns an error if the loaded cell is absent or is pruned.
200    pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'_>, Error> {
201        match self.reference(index) {
202            Some(cell) => CellSlice::new(cell),
203            None => Err(Error::CellUnderflow),
204        }
205    }
206
207    /// Returns whether the cell is not [`Ordinary`].
208    ///
209    /// [`Ordinary`]: CellType::Ordinary
210    #[inline]
211    pub fn is_exotic(&self) -> bool {
212        self.descriptor().is_exotic()
213    }
214
215    /// Returns a representation hash of the cell.
216    #[inline]
217    pub fn repr_hash(&self) -> &HashBytes {
218        self.hash(LevelMask::MAX_LEVEL)
219    }
220
221    /// Returns a representation depth of the cell.
222    #[inline]
223    pub fn repr_depth(&self) -> u16 {
224        self.depth(LevelMask::MAX_LEVEL)
225    }
226
227    /// Returns `true` if any of cell levels has the maximum depth.
228    pub fn has_max_depth(&self) -> bool {
229        for level in self.descriptor().level_mask() {
230            if self.depth(level) == u16::MAX {
231                return true;
232            }
233        }
234        false
235    }
236
237    /// Returns true if the cell is empty (no bits, no refs).
238    pub fn is_empty(&self) -> bool {
239        self.hash(LevelMask::MAX_LEVEL) == EMPTY_CELL_HASH
240    }
241
242    /// Creates an iterator through child nodes.
243    #[inline]
244    pub fn references(&self) -> RefsIter<'_> {
245        RefsIter {
246            cell: self,
247            max: self.reference_count(),
248            index: 0,
249        }
250    }
251
252    /// Returns this cell as a cell slice.
253    /// Returns an error if the cell is not ordinary.
254    #[inline]
255    pub fn as_slice(&'_ self) -> Result<CellSlice<'_>, Error> {
256        CellSlice::new(self)
257    }
258
259    /// Returns this cell as a cell slice.
260    ///
261    /// Loads cell as is.
262    #[inline]
263    pub fn as_slice_allow_exotic(&'_ self) -> CellSlice<'_> {
264        CellSlice::new_allow_exotic(self)
265    }
266
267    /// Recursively computes the count of distinct cells returning
268    /// the total storage used by this dag taking into account the
269    /// identification of equal cells.
270    pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
271        StorageStat::compute_for_cell(self, limit)
272    }
273
274    /// Recursively traverses the cells tree without tracking a uniqueness
275    /// of cells. Usefull for adding small subtrees to merkle proofs.
276    pub fn touch_recursive(&self) {
277        self.data(); // Touch data to include it if `OnDataAccess` is used.
278        for child in self.references() {
279            child.touch_recursive();
280        }
281    }
282
283    /// Returns an object that implements [`Debug`] for printing only
284    /// the root cell of the cell tree.
285    ///
286    /// [`Debug`]: std::fmt::Debug
287    #[inline]
288    pub fn debug_root(&'_ self) -> DebugCell<'_> {
289        DebugCell(self)
290    }
291
292    /// Returns an object that implements [`Display`] for printing only
293    /// the root cell of the cell tree.
294    ///
295    /// [`Display`]: std::fmt::Display
296    #[inline]
297    pub fn display_root(&'_ self) -> DisplayCellRoot<'_> {
298        DisplayCellRoot {
299            cell: self,
300            level: 0,
301        }
302    }
303
304    /// Returns an object that implements [`Display`] for printing all
305    /// cells in the cell tree.
306    ///
307    /// [`Display`]: std::fmt::Display
308    #[inline]
309    pub fn display_tree(&'_ self) -> DisplayCellTree<'_> {
310        DisplayCellTree(self)
311    }
312
313    /// Returns an object which will display cell data as a bitstring
314    /// with a termination bit.
315    #[inline]
316    pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
317        struct DisplayData<'a>(&'a DynCell);
318
319        impl std::fmt::Display for DisplayData<'_> {
320            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
321                std::fmt::Display::fmt(
322                    &Bitstring {
323                        bytes: self.0.data(),
324                        bit_len: self.0.bit_len(),
325                    },
326                    f,
327                )
328            }
329        }
330
331        impl std::fmt::Binary for DisplayData<'_> {
332            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
333                std::fmt::Binary::fmt(
334                    &Bitstring {
335                        bytes: self.0.data(),
336                        bit_len: self.0.bit_len(),
337                    },
338                    f,
339                )
340            }
341        }
342
343        DisplayData(self)
344    }
345
346    /// Converts this cell into a slice and tries to load the specified type from it.
347    /// Fails if the cell is not ordinary.
348    ///
349    /// NOTE: parsing `Cell` will load the first reference!
350    #[inline]
351    pub fn parse<'a, T: Load<'a>>(&'a self) -> Result<T, Error> {
352        T::load_from(&mut ok!(self.as_slice()))
353    }
354
355    /// Loads an exotic cell.
356    #[inline]
357    pub fn parse_exotic<'a, T: LoadCell<'a>>(&'a self) -> Result<T, Error> {
358        if self.is_exotic() {
359            T::load_from_cell(self)
360        } else {
361            Err(Error::UnexpectedOrdinaryCell)
362        }
363    }
364}
365
366impl std::fmt::Debug for DynCell {
367    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
368        crate::util::debug_struct_field2_finish(
369            f,
370            "Cell",
371            "ty",
372            &self.cell_type(),
373            "hash",
374            self.repr_hash(),
375        )
376    }
377}
378
379impl Eq for DynCell {}
380
381impl PartialEq<DynCell> for DynCell {
382    #[inline]
383    fn eq(&self, other: &DynCell) -> bool {
384        self.repr_hash() == other.repr_hash()
385    }
386}
387
388#[cfg(feature = "serde")]
389impl serde::Serialize for DynCell {
390    fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
391        let boc = crate::boc::Boc::encode(self);
392        if serializer.is_human_readable() {
393            serializer.serialize_str(&crate::util::encode_base64(boc))
394        } else {
395            serializer.serialize_bytes(&boc)
396        }
397    }
398}
399
400#[cfg(feature = "arbitrary")]
401impl<'a> arbitrary::Arbitrary<'a> for Cell {
402    #[inline]
403    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
404        Ok(u.arbitrary::<CellBuilder>()?.build().unwrap())
405    }
406
407    #[inline]
408    fn size_hint(depth: usize) -> (usize, Option<usize>) {
409        Self::try_size_hint(depth).unwrap_or_default()
410    }
411
412    #[inline]
413    fn try_size_hint(
414        depth: usize,
415    ) -> Result<(usize, Option<usize>), arbitrary::MaxRecursionReached> {
416        <CellBuilder as arbitrary::Arbitrary>::try_size_hint(depth)
417    }
418}
419
420impl From<Cell> for CellSliceParts {
421    #[inline]
422    fn from(value: Cell) -> Self {
423        (CellSliceRange::full(&value), value)
424    }
425}
426
427/// An iterator through child nodes.
428#[must_use = "iterators are lazy and do nothing unless consumed"]
429pub struct RefsIter<'a> {
430    cell: &'a DynCell,
431    max: u8,
432    index: u8,
433}
434
435impl<'a> RefsIter<'a> {
436    /// Returns a cell by children of which we are iterating.
437    #[inline]
438    pub fn cell(&self) -> &'a DynCell {
439        self.cell
440    }
441
442    /// Returns a reference to the next() value without advancing the iterator.
443    #[inline]
444    pub fn peek(&self) -> Option<&'a DynCell> {
445        if self.index >= self.max {
446            None
447        } else {
448            self.cell.reference(self.index)
449        }
450    }
451
452    /// Returns a cloned reference to the next() value without advancing the iterator.
453    #[inline]
454    pub fn peek_cloned(&self) -> Option<Cell> {
455        if self.index >= self.max {
456            None
457        } else {
458            self.cell.reference_cloned(self.index)
459        }
460    }
461
462    /// Returns a reference to the next_back() value without advancing the iterator.
463    #[inline]
464    pub fn peek_prev(&self) -> Option<&'a DynCell> {
465        if self.index > 0 {
466            self.cell.reference(self.index - 1)
467        } else {
468            None
469        }
470    }
471
472    /// Returns a cloned reference to the next_back() value without advancing the iterator.
473    #[inline]
474    pub fn peek_prev_cloned(&self) -> Option<Cell> {
475        if self.index > 0 {
476            self.cell.reference_cloned(self.index - 1)
477        } else {
478            None
479        }
480    }
481
482    /// Creates an iterator through child nodes which produces cloned references.
483    #[inline]
484    pub fn cloned(self) -> ClonedRefsIter<'a> {
485        ClonedRefsIter { inner: self }
486    }
487}
488
489impl Clone for RefsIter<'_> {
490    #[inline]
491    fn clone(&self) -> Self {
492        Self {
493            cell: self.cell,
494            max: self.max,
495            index: self.index,
496        }
497    }
498}
499
500impl<'a> Iterator for RefsIter<'a> {
501    type Item = &'a DynCell;
502
503    #[inline]
504    fn next(&mut self) -> Option<Self::Item> {
505        if self.index >= self.max {
506            None
507        } else {
508            let child = self.cell.reference(self.index);
509            self.index += 1;
510            child
511        }
512    }
513
514    #[inline]
515    fn size_hint(&self) -> (usize, Option<usize>) {
516        let remaining = self.max.saturating_sub(self.index) as usize;
517        (remaining, Some(remaining))
518    }
519}
520
521impl DoubleEndedIterator for RefsIter<'_> {
522    #[inline]
523    fn next_back(&mut self) -> Option<Self::Item> {
524        if self.max > self.index {
525            self.max -= 1;
526            self.cell.reference(self.max)
527        } else {
528            None
529        }
530    }
531}
532
533impl ExactSizeIterator for RefsIter<'_> {
534    #[inline]
535    fn len(&self) -> usize {
536        self.size_hint().0
537    }
538}
539
540/// An iterator through child nodes which produces cloned references.
541#[must_use = "iterators are lazy and do nothing unless consumed"]
542pub struct ClonedRefsIter<'a> {
543    inner: RefsIter<'a>,
544}
545
546impl<'a> ClonedRefsIter<'a> {
547    /// Returns a cell by children of which we are iterating.
548    #[inline]
549    pub fn cell(&self) -> &'a DynCell {
550        self.inner.cell
551    }
552
553    /// Returns a reference to the next() value without advancing the iterator.
554    #[inline]
555    pub fn peek(&self) -> Option<Cell> {
556        self.inner.peek_cloned()
557    }
558
559    /// Returns a reference to the next_back() value without advancing the iterator.
560    #[inline]
561    pub fn peek_prev(&self) -> Option<Cell> {
562        self.inner.peek_prev_cloned()
563    }
564}
565
566impl Clone for ClonedRefsIter<'_> {
567    #[inline]
568    fn clone(&self) -> Self {
569        Self {
570            inner: self.inner.clone(),
571        }
572    }
573}
574
575impl Iterator for ClonedRefsIter<'_> {
576    type Item = Cell;
577
578    #[inline]
579    fn next(&mut self) -> Option<Self::Item> {
580        if self.inner.index >= self.inner.max {
581            None
582        } else {
583            let child = self.inner.cell.reference_cloned(self.inner.index);
584            self.inner.index += 1;
585            child
586        }
587    }
588
589    #[inline]
590    fn size_hint(&self) -> (usize, Option<usize>) {
591        self.inner.size_hint()
592    }
593}
594
595impl DoubleEndedIterator for ClonedRefsIter<'_> {
596    #[inline]
597    fn next_back(&mut self) -> Option<Self::Item> {
598        if self.inner.max > self.inner.index {
599            self.inner.max -= 1;
600            self.inner.cell.reference_cloned(self.inner.max)
601        } else {
602            None
603        }
604    }
605}
606
607impl ExactSizeIterator for ClonedRefsIter<'_> {
608    #[inline]
609    fn len(&self) -> usize {
610        self.size_hint().0
611    }
612}
613
614/// Type alias for a cell hash.
615#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
616#[repr(transparent)]
617pub struct HashBytes(pub [u8; 32]);
618
619impl HashBytes {
620    /// Array of zero bytes.
621    pub const ZERO: Self = Self([0; 32]);
622
623    /// Converts slice to a hash bytes.
624    ///
625    /// # Panics
626    ///
627    /// Panics if the length of the slice is not 32 bytes.
628    #[inline]
629    pub fn from_slice(slice: &[u8]) -> Self {
630        Self(slice.try_into().expect("slice with incorrect length"))
631    }
632
633    /// Wraps a reference to an internal array into a newtype reference.
634    #[inline(always)]
635    pub const fn wrap(value: &[u8; 32]) -> &Self {
636        // SAFETY: HashBytes is #[repr(transparent)]
637        unsafe { &*(value as *const [u8; 32] as *const Self) }
638    }
639
640    /// Returns a slice containing the entire array.
641    pub const fn as_slice(&self) -> &[u8] {
642        self.0.as_slice()
643    }
644
645    /// Returns a mutable slice containing the entire array.
646    pub fn as_mut_slice(&mut self) -> &mut [u8] {
647        self.0.as_mut_slice()
648    }
649
650    /// Returns an internal array.
651    pub const fn as_array(&self) -> &[u8; 32] {
652        &self.0
653    }
654
655    /// Returns a mutable internal array.
656    pub fn as_mut_array(&mut self) -> &mut [u8; 32] {
657        &mut self.0
658    }
659
660    /// Returns a raw pointer to the slice's buffer.
661    #[inline(always)]
662    pub const fn as_ptr(&self) -> *const u8 {
663        &self.0 as *const [u8] as *const u8
664    }
665
666    /// Returns a first chunk of 8 bytes.
667    pub const fn first_chunk(&self) -> &[u8; 8] {
668        match self.0.first_chunk::<8>() {
669            Some(chunk) => chunk,
670            // SAFETY: Const unwrap go brr
671            None => unsafe { std::hint::unreachable_unchecked() },
672        }
673    }
674
675    #[inline(always)]
676    fn fmt_hex(&self, f: &mut std::fmt::Formatter<'_>, table: &[u8; 16]) -> std::fmt::Result {
677        let mut output = [0u8; 64];
678        crate::util::encode_to_hex_slice(&self.0, &mut output, table).ok();
679
680        // SAFETY: output is guaranteed to contain only [0-9a-f]
681        let output = unsafe { std::str::from_utf8_unchecked(&output) };
682        f.write_str(output)
683    }
684}
685
686impl Default for HashBytes {
687    #[inline(always)]
688    fn default() -> Self {
689        Self::ZERO
690    }
691}
692
693impl AsRef<[u8; 32]> for HashBytes {
694    #[inline(always)]
695    fn as_ref(&self) -> &[u8; 32] {
696        &self.0
697    }
698}
699
700impl AsRef<[u8]> for HashBytes {
701    #[inline(always)]
702    fn as_ref(&self) -> &[u8] {
703        self.0.as_ref()
704    }
705}
706
707impl AsMut<[u8; 32]> for HashBytes {
708    #[inline(always)]
709    fn as_mut(&mut self) -> &mut [u8; 32] {
710        &mut self.0
711    }
712}
713
714impl AsMut<[u8]> for HashBytes {
715    #[inline(always)]
716    fn as_mut(&mut self) -> &mut [u8] {
717        self.0.as_mut()
718    }
719}
720
721impl std::borrow::Borrow<[u8]> for HashBytes {
722    #[inline(always)]
723    fn borrow(&self) -> &[u8] {
724        &self.0
725    }
726}
727
728impl std::borrow::BorrowMut<[u8]> for HashBytes {
729    #[inline(always)]
730    fn borrow_mut(&mut self) -> &mut [u8] {
731        &mut self.0
732    }
733}
734
735impl std::borrow::Borrow<HashBytes> for [u8; 32] {
736    #[inline(always)]
737    fn borrow(&self) -> &HashBytes {
738        HashBytes::wrap(self)
739    }
740}
741
742impl PartialEq<[u8; 32]> for HashBytes {
743    #[inline(always)]
744    fn eq(&self, other: &[u8; 32]) -> bool {
745        &self.0 == other
746    }
747}
748
749impl PartialEq<HashBytes> for [u8; 32] {
750    #[inline(always)]
751    fn eq(&self, other: &HashBytes) -> bool {
752        self == &other.0
753    }
754}
755
756impl PartialEq<[u8]> for HashBytes {
757    #[inline(always)]
758    fn eq(&self, other: &[u8]) -> bool {
759        self.0 == other
760    }
761}
762
763impl PartialEq<HashBytes> for [u8] {
764    #[inline(always)]
765    fn eq(&self, other: &HashBytes) -> bool {
766        self == other.0
767    }
768}
769
770impl From<[u8; 32]> for HashBytes {
771    #[inline(always)]
772    fn from(value: [u8; 32]) -> Self {
773        Self(value)
774    }
775}
776
777impl From<sha2::digest::Output<sha2::Sha256>> for HashBytes {
778    #[inline(always)]
779    fn from(value: sha2::digest::Output<sha2::Sha256>) -> Self {
780        Self(value.into())
781    }
782}
783
784#[cfg(feature = "blake3")]
785impl From<blake3::Hash> for HashBytes {
786    #[inline(always)]
787    fn from(value: blake3::Hash) -> Self {
788        Self(value.into())
789    }
790}
791
792impl From<HashBytes> for [u8; 32] {
793    #[inline(always)]
794    fn from(value: HashBytes) -> Self {
795        value.0
796    }
797}
798
799impl FromStr for HashBytes {
800    type Err = ParseHashBytesError;
801
802    fn from_str(s: &str) -> Result<Self, Self::Err> {
803        let mut result = Self::default();
804        match s.len() {
805            64 => {
806                if let Err(e) = hex::decode_to_slice(s, &mut result.0) {
807                    return Err(ParseHashBytesError::InvalidHex(e));
808                }
809            }
810            66 => {
811                if let Err(e) = hex::decode_to_slice(&s[2..], &mut result.0) {
812                    return Err(ParseHashBytesError::InvalidHex(e));
813                }
814            }
815            #[cfg(feature = "base64")]
816            44 => {
817                if let Err(e) = crate::util::decode_base64_slice(s, &mut result.0) {
818                    return Err(ParseHashBytesError::InvalidBase64(e));
819                }
820            }
821            _ => return Err(ParseHashBytesError::UnexpectedStringLength),
822        }
823        Ok(result)
824    }
825}
826
827impl std::fmt::Display for HashBytes {
828    #[inline]
829    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
830        std::fmt::LowerHex::fmt(self, f)
831    }
832}
833
834impl std::fmt::LowerHex for HashBytes {
835    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
836        Self::fmt_hex(self, f, crate::util::HEX_CHARS_LOWER)
837    }
838}
839
840impl std::fmt::UpperHex for HashBytes {
841    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
842        Self::fmt_hex(self, f, crate::util::HEX_CHARS_UPPER)
843    }
844}
845
846impl std::fmt::Debug for HashBytes {
847    #[inline(always)]
848    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
849        std::fmt::Display::fmt(self, f)
850    }
851}
852
853impl<I> std::ops::Index<I> for HashBytes
854where
855    [u8; 32]: std::ops::Index<I>,
856{
857    type Output = <[u8; 32] as std::ops::Index<I>>::Output;
858
859    #[inline]
860    fn index(&self, index: I) -> &Self::Output {
861        std::ops::Index::index(&self.0, index)
862    }
863}
864
865impl<I> std::ops::IndexMut<I> for HashBytes
866where
867    [u8; 32]: std::ops::IndexMut<I>,
868{
869    #[inline]
870    fn index_mut(&mut self, index: I) -> &mut Self::Output {
871        std::ops::IndexMut::index_mut(&mut self.0, index)
872    }
873}
874
875#[cfg(feature = "rand8")]
876impl rand8::distributions::Distribution<HashBytes> for rand8::distributions::Standard {
877    #[inline]
878    fn sample<R: rand8::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
879        HashBytes(rand8::distributions::Standard.sample(rng))
880    }
881}
882
883#[cfg(feature = "rand9")]
884impl rand9::distr::Distribution<HashBytes> for rand9::distr::StandardUniform {
885    #[inline]
886    fn sample<R: rand9::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
887        HashBytes(rand9::distr::StandardUniform.sample(rng))
888    }
889}
890
891#[cfg(feature = "serde")]
892impl serde::Serialize for HashBytes {
893    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
894    where
895        S: serde::Serializer,
896    {
897        if serializer.is_human_readable() {
898            let mut output = [0u8; 64];
899            crate::util::encode_to_hex_slice(&self.0, &mut output, crate::util::HEX_CHARS_LOWER)
900                .ok();
901
902            // SAFETY: output is guaranteed to contain only [0-9a-f]
903            let output = unsafe { std::str::from_utf8_unchecked(&output) };
904            serializer.serialize_str(output)
905        } else {
906            serializer.serialize_bytes(&self.0)
907        }
908    }
909}
910
911#[cfg(feature = "serde")]
912impl<'de> serde::Deserialize<'de> for HashBytes {
913    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
914    where
915        D: serde::Deserializer<'de>,
916    {
917        use ::serde::de::{Error, Visitor};
918
919        struct HashBytesHexVisitor;
920
921        impl Visitor<'_> for HashBytesHexVisitor {
922            type Value = HashBytes;
923
924            fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
925                formatter.write_str("hex-encoded byte array of size 32")
926            }
927
928            fn visit_str<E: Error>(self, value: &str) -> Result<Self::Value, E> {
929                let mut result = HashBytes([0; 32]);
930                match hex::decode_to_slice(value, &mut result.0) {
931                    Ok(()) => Ok(result),
932                    Err(_) => Err(Error::invalid_value(
933                        serde::de::Unexpected::Str(value),
934                        &self,
935                    )),
936                }
937            }
938        }
939
940        pub struct HashBytesRawVisitor;
941
942        impl Visitor<'_> for HashBytesRawVisitor {
943            type Value = HashBytes;
944
945            fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
946                f.write_fmt(format_args!("a byte array of size 32"))
947            }
948
949            fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
950                v.try_into()
951                    .map(HashBytes)
952                    .map_err(|_e| Error::invalid_length(v.len(), &self))
953            }
954        }
955
956        if deserializer.is_human_readable() {
957            deserializer.deserialize_str(HashBytesHexVisitor)
958        } else {
959            deserializer.deserialize_bytes(HashBytesRawVisitor)
960        }
961    }
962}
963
964#[cfg(feature = "arbitrary")]
965impl<'a> arbitrary::Arbitrary<'a> for HashBytes {
966    #[inline]
967    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
968        u.arbitrary().map(Self)
969    }
970
971    #[inline]
972    fn size_hint(_: usize) -> (usize, Option<usize>) {
973        (32, Some(32))
974    }
975}
976
977/// Hash of an empty (0 bits of data, no refs) ordinary cell.
978pub static EMPTY_CELL_HASH: &HashBytes = HashBytes::wrap(&[
979    0x96, 0xa2, 0x96, 0xd2, 0x24, 0xf2, 0x85, 0xc6, 0x7b, 0xee, 0x93, 0xc3, 0x0f, 0x8a, 0x30, 0x91,
980    0x57, 0xf0, 0xda, 0xa3, 0x5d, 0xc5, 0xb8, 0x7e, 0x41, 0x0b, 0x78, 0x63, 0x0a, 0x09, 0xcf, 0xc7,
981]);
982
983/// Well-formed cell type.
984#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
985#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
986pub enum CellType {
987    /// Cell of this type just stores data and references.
988    #[default]
989    Ordinary,
990    /// Exotic cell which was pruned from the original tree of cells
991    /// when a Merkle proof has been created.
992    PrunedBranch,
993    /// Exotic cell with a reference to the cell with a library.
994    LibraryReference,
995    /// Exotic cell with one hash and one reference.
996    MerkleProof,
997    /// Exotic cell with two hashes and two references.
998    MerkleUpdate,
999}
1000
1001impl CellType {
1002    /// Returns whether this cell type is Merkle proof or Merkle update.
1003    #[inline]
1004    pub const fn is_merkle(self) -> bool {
1005        matches!(self, Self::MerkleProof | Self::MerkleUpdate)
1006    }
1007
1008    /// Returns whether the cell is not ordinary.
1009    #[inline]
1010    pub const fn is_exotic(self) -> bool {
1011        !matches!(self, Self::Ordinary)
1012    }
1013
1014    /// Returns whether the cell is a pruned branch.
1015    #[inline]
1016    pub const fn is_pruned_branch(self) -> bool {
1017        matches!(self, Self::PrunedBranch)
1018    }
1019
1020    /// Returns whether this cell is a library cell.
1021    #[inline(always)]
1022    pub const fn is_library(self) -> bool {
1023        matches!(self, Self::LibraryReference)
1024    }
1025
1026    /// Encodes cell type as byte.
1027    #[inline]
1028    pub const fn to_byte(self) -> u8 {
1029        match self {
1030            CellType::Ordinary => 0xff,
1031            CellType::PrunedBranch => 1,
1032            CellType::LibraryReference => 2,
1033            CellType::MerkleProof => 3,
1034            CellType::MerkleUpdate => 4,
1035        }
1036    }
1037
1038    /// Decodes any cell type from byte.
1039    #[inline]
1040    pub const fn from_byte(byte: u8) -> Option<Self> {
1041        Some(match byte {
1042            0xff => CellType::Ordinary,
1043            1 => CellType::PrunedBranch,
1044            2 => CellType::LibraryReference,
1045            3 => CellType::MerkleProof,
1046            4 => CellType::MerkleUpdate,
1047            _ => return None,
1048        })
1049    }
1050
1051    /// Decodes exotic cell type from byte.
1052    #[inline]
1053    pub const fn from_byte_exotic(byte: u8) -> Option<Self> {
1054        Some(match byte {
1055            1 => CellType::PrunedBranch,
1056            2 => CellType::LibraryReference,
1057            3 => CellType::MerkleProof,
1058            4 => CellType::MerkleUpdate,
1059            _ => return None,
1060        })
1061    }
1062}
1063
1064impl From<CellType> for u8 {
1065    #[inline]
1066    fn from(cell_type: CellType) -> u8 {
1067        cell_type.to_byte()
1068    }
1069}
1070
1071#[cfg(feature = "arbitrary")]
1072impl<'a> arbitrary::Arbitrary<'a> for CellType {
1073    #[inline]
1074    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1075        static ALL_TYPES: [CellType; 5] = [
1076            CellType::Ordinary,
1077            CellType::PrunedBranch,
1078            CellType::LibraryReference,
1079            CellType::MerkleProof,
1080            CellType::MerkleUpdate,
1081        ];
1082
1083        u.choose(&ALL_TYPES).copied()
1084    }
1085
1086    #[inline]
1087    fn size_hint(_: usize) -> (usize, Option<usize>) {
1088        (1, Some(1))
1089    }
1090}
1091
1092/// Tightly packed info about a cell.
1093#[derive(Hash, Debug, Clone, Copy)]
1094#[repr(C)]
1095pub struct CellDescriptor {
1096    /// First descriptor byte with a generic info about cell.
1097    pub d1: u8,
1098    /// Second descriptor byte with a packed data size.
1099    pub d2: u8,
1100}
1101
1102impl CellDescriptor {
1103    /// Bit mask to store the number of references in the descriptor.
1104    pub const REF_COUNT_MASK: u8 = 0b0000_0111;
1105    /// Bit mask to store the `is_exotic` flag in the descriptor.
1106    pub const IS_EXOTIC_MASK: u8 = 0b0000_1000;
1107    /// Bit mask to store the `store_hashes` flag in the descriptor.
1108    pub const STORE_HASHES_MASK: u8 = 0b0001_0000;
1109    /// _de Brujn_ level presence mask in the descriptor.
1110    pub const LEVEL_MASK: u8 = 0b1110_0000;
1111
1112    /// Computes d1 descriptor byte from parts
1113    #[inline(always)]
1114    pub const fn compute_d1(level_mask: LevelMask, is_exotic: bool, ref_count: u8) -> u8 {
1115        (level_mask.0 << 5) | ((is_exotic as u8) << 3) | (ref_count & Self::REF_COUNT_MASK)
1116    }
1117
1118    /// Computes d2 descriptor byte from cell length in bits
1119    #[inline(always)]
1120    pub const fn compute_d2(bit_len: u16) -> u8 {
1121        (((bit_len >> 2) as u8) & !0b1) | ((bit_len % 8 != 0) as u8)
1122    }
1123
1124    /// Constructs cell descriptor from descriptor bytes.
1125    #[inline(always)]
1126    pub const fn new(bytes: [u8; 2]) -> Self {
1127        Self {
1128            d1: bytes[0],
1129            d2: bytes[1],
1130        }
1131    }
1132
1133    /// Creates a new descriptor with a new mask, shifted by the offset.
1134    #[must_use]
1135    pub const fn virtualize(mut self, offset: u8) -> Self {
1136        let virtualized_mask = (self.d1 >> offset) & Self::LEVEL_MASK;
1137        self.d1 = virtualized_mask | (self.d1 & !Self::LEVEL_MASK);
1138        self
1139    }
1140
1141    /// Computes cell type.
1142    pub fn cell_type(self) -> CellType {
1143        if self.d1 & Self::IS_EXOTIC_MASK == 0 {
1144            CellType::Ordinary
1145        } else {
1146            match self.d1 & Self::REF_COUNT_MASK {
1147                0 => {
1148                    // NOTE: zero mask <=> zero level
1149                    if self.d1 & Self::LEVEL_MASK == 0 {
1150                        CellType::LibraryReference
1151                    } else {
1152                        CellType::PrunedBranch
1153                    }
1154                }
1155                1 => CellType::MerkleProof,
1156                _ => CellType::MerkleUpdate,
1157            }
1158        }
1159    }
1160
1161    /// Computes child cell count.
1162    #[inline(always)]
1163    pub const fn reference_count(self) -> u8 {
1164        self.d1 & Self::REF_COUNT_MASK
1165    }
1166
1167    /// Computes hash count.
1168    ///
1169    /// NOTE: Guaranteed to be in range 1..=4.
1170    #[inline(always)]
1171    pub const fn hash_count(self) -> u8 {
1172        let level = self.level_mask().level();
1173        if self.is_exotic() && self.reference_count() == 0 && level > 0 {
1174            1 // pruned branch always has 1 hash
1175        } else {
1176            level + 1
1177        }
1178    }
1179
1180    /// Returns whether the cell is not [`Ordinary`].
1181    ///
1182    /// [`Ordinary`]: CellType::Ordinary
1183    #[inline(always)]
1184    pub const fn is_exotic(self) -> bool {
1185        self.d1 & Self::IS_EXOTIC_MASK != 0
1186    }
1187
1188    /// Returns whether this cell is a pruned branch cell.
1189    #[inline(always)]
1190    pub const fn is_pruned_branch(self) -> bool {
1191        self.is_exotic() && self.reference_count() == 0 && !self.level_mask().is_empty()
1192    }
1193
1194    /// Returns whether this cell is a library cell.
1195    #[inline(always)]
1196    pub const fn is_library(self) -> bool {
1197        self.is_exotic() && self.reference_count() == 0 && self.level_mask().is_empty()
1198    }
1199
1200    /// Returns whether this cell type is Merkle proof or Merkle update.
1201    #[inline(always)]
1202    pub const fn is_merkle(self) -> bool {
1203        self.is_exotic() && self.reference_count() != 0
1204    }
1205
1206    /// Returns whether this cell refers to some external data.
1207    #[inline(always)]
1208    pub const fn is_absent(self) -> bool {
1209        self.d1 == (Self::REF_COUNT_MASK | Self::IS_EXOTIC_MASK)
1210    }
1211
1212    /// Returns whether this cell should store hashes in data.
1213    #[inline(always)]
1214    pub const fn store_hashes(self) -> bool {
1215        self.d1 & Self::STORE_HASHES_MASK != 0
1216    }
1217
1218    /// Computes level mask.
1219    #[inline(always)]
1220    pub const fn level_mask(self) -> LevelMask {
1221        // SAFETY: `u8 >> 5` is always 3 bits long
1222        unsafe { LevelMask::new_unchecked(self.d1 >> 5) }
1223    }
1224
1225    /// Returns whether this cell's data is 8-bit aligned.
1226    #[inline(always)]
1227    pub const fn is_aligned(self) -> bool {
1228        self.d2 & 1 == 0
1229    }
1230
1231    /// Returns this cell's data length in bytes.
1232    #[inline(always)]
1233    pub const fn byte_len(self) -> u8 {
1234        (self.d2 & 1) + (self.d2 >> 1)
1235    }
1236}
1237
1238/// _de Brujn_ level presence bitset.
1239#[derive(Copy, Clone, Eq, PartialEq, Hash)]
1240pub struct LevelMask(u8);
1241
1242impl LevelMask {
1243    /// Empty bitset.
1244    pub const EMPTY: Self = LevelMask(0);
1245    /// Max _de Brujn_ level.
1246    pub const MAX_LEVEL: u8 = 3;
1247
1248    /// Constructs a new level mask, truncating extra bits.
1249    #[inline(always)]
1250    pub const fn new(mask: u8) -> Self {
1251        Self(mask & 0b111)
1252    }
1253
1254    /// Returns true if there are no levels in mask.
1255    #[inline(always)]
1256    pub const fn is_empty(self) -> bool {
1257        self.0 == 0
1258    }
1259
1260    /// Constructs a new level mask from the provided byte as is.
1261    ///
1262    /// # Safety
1263    ///
1264    /// The following must be true:
1265    /// - Mask must be in range `0b000..=0b111`.
1266    #[inline(always)]
1267    pub const unsafe fn new_unchecked(mask: u8) -> Self {
1268        Self(mask)
1269    }
1270
1271    /// Creates a sufficient mask for the specified level.
1272    ///
1273    /// NOTE: levels > 3 has no effect (mask will always be `0b111`).
1274    #[inline(always)]
1275    pub const fn from_level(level: u8) -> Self {
1276        Self(match level {
1277            0 => 0,
1278            1 => 1,
1279            2 => 3,
1280            _ => 7,
1281        })
1282    }
1283
1284    /// Counts presented higher hashes.
1285    pub const fn level(self) -> u8 {
1286        (self.0 & 1) + ((self.0 >> 1) & 1) + ((self.0 >> 2) & 1)
1287    }
1288
1289    /// Computes hash index for the specified level.
1290    pub const fn hash_index(self, level: u8) -> u8 {
1291        Self(self.0 & Self::from_level(level).0).level()
1292    }
1293
1294    /// Returns whether the specified level is included into the mask.
1295    pub const fn contains(self, level: u8) -> bool {
1296        level == 0 || self.0 & (1 << (level - 1)) != 0
1297    }
1298
1299    /// Creates a new mask, shifted by the offset.
1300    #[inline(always)]
1301    #[must_use]
1302    pub const fn virtualize(self, offset: u8) -> Self {
1303        Self(self.0 >> offset)
1304    }
1305
1306    /// Encodes level mask as byte.
1307    #[inline]
1308    pub const fn to_byte(self) -> u8 {
1309        self.0
1310    }
1311}
1312
1313impl IntoIterator for LevelMask {
1314    type Item = u8;
1315    type IntoIter = LevelMaskIter;
1316
1317    #[inline]
1318    fn into_iter(self) -> Self::IntoIter {
1319        // Include zero level
1320        LevelMaskIter(1 | (self.0 << 1))
1321    }
1322}
1323
1324impl PartialEq<u8> for LevelMask {
1325    #[inline]
1326    fn eq(&self, other: &u8) -> bool {
1327        self.0 == *other
1328    }
1329}
1330
1331impl BitOr for LevelMask {
1332    type Output = Self;
1333
1334    #[inline(always)]
1335    fn bitor(self, rhs: Self) -> Self::Output {
1336        Self(self.0 | rhs.0)
1337    }
1338}
1339
1340impl BitOrAssign for LevelMask {
1341    #[inline(always)]
1342    fn bitor_assign(&mut self, rhs: Self) {
1343        self.0 |= rhs.0;
1344    }
1345}
1346
1347impl From<LevelMask> for u8 {
1348    #[inline(always)]
1349    fn from(m: LevelMask) -> u8 {
1350        m.0
1351    }
1352}
1353
1354impl std::fmt::Debug for LevelMask {
1355    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1356        f.write_fmt(format_args!("{:03b}", self.0))
1357    }
1358}
1359
1360#[cfg(feature = "arbitrary")]
1361impl<'a> arbitrary::Arbitrary<'a> for LevelMask {
1362    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1363        u.int_in_range(0b000..=0b111).map(Self::new)
1364    }
1365
1366    #[inline]
1367    fn size_hint(_: usize) -> (usize, Option<usize>) {
1368        (1, Some(1))
1369    }
1370}
1371
1372/// _de Brujn_ level presence bitset iterator.
1373#[derive(Clone)]
1374pub struct LevelMaskIter(u8);
1375
1376impl LevelMaskIter {
1377    /// Returns `true` if all levels were consumed.
1378    pub fn is_empty(&self) -> bool {
1379        self.0 == 0
1380    }
1381}
1382
1383impl Iterator for LevelMaskIter {
1384    type Item = u8;
1385
1386    #[inline]
1387    fn next(&mut self) -> Option<Self::Item> {
1388        (self.0 != 0).then(|| {
1389            //  111 - 1   = 110
1390            // !110       = 001
1391            //  111 & 001 = 001
1392            let mask = self.0 & !(self.0 - 1);
1393
1394            // 111 & !001 = 110
1395            self.0 &= !mask;
1396
1397            mask.trailing_zeros() as u8
1398        })
1399    }
1400
1401    fn size_hint(&self) -> (usize, Option<usize>) {
1402        let len = self.0.count_ones() as usize;
1403        (len, Some(len))
1404    }
1405}
1406
1407/// A size of a cell.
1408#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
1409pub struct Size {
1410    /// Total number of bits in cell slice.
1411    pub bits: u16,
1412    /// Total number refs in cell slice.
1413    pub refs: u8,
1414}
1415
1416impl Size {
1417    /// The additive identity for this type, i.e. `0`.
1418    pub const ZERO: Self = Self { bits: 0, refs: 0 };
1419
1420    /// The multiplicative bits identity for this type, i.e. `1 bit`.
1421    pub const BIT: Self = Self { bits: 1, refs: 0 };
1422
1423    /// The multiplicative refs identity for this type, i.e. `1 ref`.
1424    pub const REF: Self = Self { bits: 0, refs: 1 };
1425
1426    /// The largest valid value that can be represented by this type.
1427    pub const MAX: Self = Self {
1428        bits: MAX_BIT_LEN,
1429        refs: MAX_REF_COUNT as _,
1430    };
1431
1432    /// Returns true if the number of bits and refs is in the valid range for the cell.
1433    #[inline]
1434    pub const fn fits_into_cell(&self) -> bool {
1435        self.bits <= MAX_BIT_LEN && self.refs <= MAX_REF_COUNT as _
1436    }
1437
1438    /// Saturating size addition. Computes self + rhs for bits and refs,
1439    /// saturating at the numeric bounds instead of overflowing.
1440    #[inline]
1441    pub const fn saturating_add(self, rhs: Self) -> Self {
1442        Self {
1443            bits: self.bits.saturating_add(rhs.bits),
1444            refs: self.refs.saturating_add(rhs.refs),
1445        }
1446    }
1447
1448    /// Saturating size substraction. Computes self - rhs for bits and refs,
1449    /// saturating at the numeric bounds instead of overflowing.
1450    #[inline]
1451    pub const fn saturating_sub(self, rhs: Self) -> Self {
1452        Self {
1453            bits: self.bits.saturating_sub(rhs.bits),
1454            refs: self.refs.saturating_sub(rhs.refs),
1455        }
1456    }
1457
1458    /// Returns true if there are no bits and refs.
1459    pub const fn is_zero(self) -> bool {
1460        self.bits == 0 && self.refs == 0
1461    }
1462}
1463
1464impl From<Size> for CellTreeStats {
1465    #[inline]
1466    fn from(value: Size) -> Self {
1467        Self {
1468            bit_count: value.bits as _,
1469            cell_count: value.refs as _,
1470        }
1471    }
1472}
1473
1474impl std::ops::Add for Size {
1475    type Output = Self;
1476
1477    #[inline]
1478    fn add(mut self, rhs: Self) -> Self::Output {
1479        self += rhs;
1480        self
1481    }
1482}
1483
1484impl std::ops::AddAssign for Size {
1485    #[inline]
1486    fn add_assign(&mut self, rhs: Self) {
1487        self.bits += rhs.bits;
1488        self.refs += rhs.refs;
1489    }
1490}
1491
1492impl std::ops::Sub for Size {
1493    type Output = Self;
1494
1495    #[inline]
1496    fn sub(mut self, rhs: Self) -> Self::Output {
1497        self -= rhs;
1498        self
1499    }
1500}
1501
1502impl std::ops::SubAssign for Size {
1503    #[inline]
1504    fn sub_assign(&mut self, rhs: Self) {
1505        self.bits -= rhs.bits;
1506        self.refs -= rhs.refs;
1507    }
1508}
1509
1510/// Cell tree storage stats.
1511#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
1512pub struct CellTreeStats {
1513    /// Total number of bits in tree.
1514    pub bit_count: u64,
1515    /// Total number of cells in tree.
1516    pub cell_count: u64,
1517}
1518
1519impl CellTreeStats {
1520    /// The additive identity for this type, i.e. `0`.
1521    pub const ZERO: Self = CellTreeStats {
1522        bit_count: 0,
1523        cell_count: 0,
1524    };
1525}
1526
1527impl std::ops::Add for CellTreeStats {
1528    type Output = Self;
1529
1530    #[inline]
1531    fn add(self, rhs: Self) -> Self::Output {
1532        Self {
1533            bit_count: self.bit_count.saturating_add(rhs.bit_count),
1534            cell_count: self.cell_count.saturating_add(rhs.cell_count),
1535        }
1536    }
1537}
1538
1539impl std::ops::AddAssign for CellTreeStats {
1540    #[inline]
1541    fn add_assign(&mut self, rhs: Self) {
1542        self.bit_count = self.bit_count.saturating_add(rhs.bit_count);
1543        self.cell_count = self.cell_count.saturating_add(rhs.cell_count);
1544    }
1545}
1546
1547impl std::ops::Add<Size> for CellTreeStats {
1548    type Output = Self;
1549
1550    #[inline]
1551    fn add(self, rhs: Size) -> Self::Output {
1552        Self {
1553            bit_count: self.bit_count.saturating_add(rhs.bits as _),
1554            cell_count: self.cell_count.saturating_add(rhs.refs as _),
1555        }
1556    }
1557}
1558
1559impl std::iter::Sum for CellTreeStats {
1560    #[inline]
1561    fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
1562        let mut res = Self::ZERO;
1563        for item in iter {
1564            res += item;
1565        }
1566        res
1567    }
1568}
1569
1570impl std::ops::AddAssign<Size> for CellTreeStats {
1571    fn add_assign(&mut self, rhs: Size) {
1572        self.bit_count = self.bit_count.saturating_add(rhs.bits as _);
1573        self.cell_count = self.cell_count.saturating_add(rhs.refs as _);
1574    }
1575}
1576
1577impl std::ops::Sub for CellTreeStats {
1578    type Output = Self;
1579
1580    #[inline]
1581    fn sub(mut self, rhs: Self) -> Self::Output {
1582        self -= rhs;
1583        self
1584    }
1585}
1586
1587impl std::ops::SubAssign for CellTreeStats {
1588    #[inline]
1589    fn sub_assign(&mut self, rhs: Self) {
1590        self.bit_count = self.bit_count.saturating_sub(rhs.bit_count);
1591        self.cell_count = self.cell_count.saturating_sub(rhs.cell_count);
1592    }
1593}
1594
1595impl std::ops::SubAssign<Size> for CellTreeStats {
1596    #[inline]
1597    fn sub_assign(&mut self, rhs: Size) {
1598        self.bit_count = self.bit_count.saturating_sub(rhs.bits as _);
1599        self.cell_count = self.cell_count.saturating_sub(rhs.refs as _);
1600    }
1601}
1602
1603/// A helper to track the size of the unique data in multiple cell trees.
1604///
1605/// NOTE: It uses hashes for deduplication, so you can only use it for
1606/// fully computed and valid trees.
1607pub struct StorageStat<'a> {
1608    visited: ahash::HashSet<&'a HashBytes>,
1609    stack: Vec<RefsIter<'a>>,
1610    stats: CellTreeStats,
1611    limit: usize,
1612}
1613
1614impl<'a> StorageStat<'a> {
1615    /// Recursively computes the count of distinct cells returning
1616    /// the total storage used by this dag taking into account the
1617    /// identification of equal cells.
1618    ///
1619    /// Root slice does not count as cell. A slice subrange of
1620    /// cells is used during computation.
1621    pub fn compute_for_slice<'b: 'a>(
1622        slice: &'a CellSlice<'b>,
1623        limit: usize,
1624    ) -> Option<CellTreeStats> {
1625        let mut this = Self::with_limit(limit);
1626        if this.add_slice(slice) {
1627            Some(this.stats)
1628        } else {
1629            None
1630        }
1631    }
1632
1633    /// Recursively computes the count of distinct cells returning
1634    /// the total storage used by this dag taking into account the
1635    /// identification of equal cells.
1636    pub fn compute_for_cell(cell: &'a DynCell, limit: usize) -> Option<CellTreeStats> {
1637        let mut this = Self::with_limit(limit);
1638        if this.add_cell(cell) {
1639            Some(this.stats)
1640        } else {
1641            None
1642        }
1643    }
1644
1645    /// Creates a new storage stat state with an explicit limit.
1646    pub fn with_limit(limit: usize) -> Self {
1647        Self {
1648            visited: Default::default(),
1649            stack: Vec::new(),
1650            stats: CellTreeStats::ZERO,
1651            limit,
1652        }
1653    }
1654
1655    /// Creates a new storage stat state without a limit.
1656    pub fn unlimited() -> Self {
1657        Self::with_limit(usize::MAX)
1658    }
1659
1660    /// Returns the current tree stats.
1661    pub fn stats(&self) -> CellTreeStats {
1662        self.stats
1663    }
1664
1665    /// Merges current stats with the stats from the provided cell tree.
1666    ///
1667    /// Returns `false` if the limit was reached.
1668    pub fn add_cell(&mut self, cell: &'a DynCell) -> bool {
1669        if !self.visited.insert(cell.repr_hash()) {
1670            return true;
1671        }
1672
1673        self.stats.bit_count += cell.bit_len() as u64;
1674        self.stats.cell_count += 1;
1675
1676        self.stack.clear();
1677        self.stack.push(cell.references());
1678        self.reduce_stack()
1679    }
1680
1681    /// Merges current stats with the stats from the provided slice.
1682    ///
1683    /// Returns `false` if the limit was reached.
1684    pub fn add_slice(&mut self, slice: &CellSlice<'a>) -> bool {
1685        self.stats.bit_count += slice.size_bits() as u64;
1686
1687        self.stack.clear();
1688        self.stack.push(slice.references());
1689        self.reduce_stack()
1690    }
1691
1692    fn reduce_stack(&mut self) -> bool {
1693        'outer: while let Some(item) = self.stack.last_mut() {
1694            for cell in item.by_ref() {
1695                if !self.visited.insert(cell.repr_hash()) {
1696                    continue;
1697                }
1698
1699                if self.stats.cell_count >= self.limit as u64 {
1700                    return false;
1701                }
1702
1703                self.stats.bit_count += cell.bit_len() as u64;
1704                self.stats.cell_count += 1;
1705
1706                let next = cell.references();
1707                if next.max > 0 {
1708                    self.stack.push(next);
1709                    continue 'outer;
1710                }
1711            }
1712
1713            self.stack.pop();
1714        }
1715
1716        true
1717    }
1718}
1719
1720/// Helper struct to debug print the root cell.
1721#[derive(Clone, Copy)]
1722pub struct DebugCell<'a>(&'a DynCell);
1723
1724impl std::fmt::Debug for DebugCell<'_> {
1725    #[inline]
1726    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1727        self.0.fmt(f)
1728    }
1729}
1730
1731/// Helper struct to print only the root cell in the cell tree.
1732#[derive(Clone, Copy)]
1733pub struct DisplayCellRoot<'a> {
1734    cell: &'a DynCell,
1735    level: usize,
1736}
1737
1738impl std::fmt::Display for DisplayCellRoot<'_> {
1739    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1740        // TODO: encode on stack
1741        let data = hex::encode(self.cell.data());
1742
1743        let indent = self.level * 2;
1744        if f.alternate() {
1745            f.write_fmt(format_args!("{:indent$}{data}\n", ""))
1746        } else {
1747            let repr_depth = self.cell.depth(LevelMask::MAX_LEVEL);
1748            let repr_hash = self.cell.repr_hash();
1749            let descriptor = self.cell.descriptor();
1750            f.write_fmt(format_args!(
1751                "{:indent$}{:?}: {data}\n{:indent$}bits: {:>4}, refs: {}, l: {:?}, depth: {}, hash: {}\n",
1752                "",
1753                descriptor.cell_type(),
1754                "",
1755                self.cell.bit_len(),
1756                descriptor.reference_count(),
1757                descriptor.level_mask(),
1758                repr_depth,
1759                repr_hash,
1760            ))
1761        }
1762    }
1763}
1764
1765/// Helper struct to print all cells in the cell tree.
1766#[derive(Clone, Copy)]
1767pub struct DisplayCellTree<'a>(&'a DynCell);
1768
1769impl std::fmt::Display for DisplayCellTree<'_> {
1770    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1771        let mut stack = vec![(0, self.0)];
1772
1773        while let Some((level, cell)) = stack.pop() {
1774            ok!(std::fmt::Display::fmt(&DisplayCellRoot { cell, level }, f));
1775
1776            let reference_count = cell.reference_count();
1777            for i in (0..reference_count).rev() {
1778                if let Some(child) = cell.reference(i) {
1779                    stack.push((level + 1, child));
1780                }
1781            }
1782        }
1783
1784        Ok(())
1785    }
1786}
1787
1788/// Max cell data capacity in bits
1789pub const MAX_BIT_LEN: u16 = 1023;
1790/// Maximum number of child cells
1791pub const MAX_REF_COUNT: usize = 4;
1792
1793#[cfg(test)]
1794mod tests {
1795    use super::*;
1796
1797    #[test]
1798    fn correct_level() {
1799        const LEVEL: [u8; 8] = [0, 1, 1, 2, 1, 2, 2, 3];
1800
1801        for mask in 0b000..=0b111 {
1802            assert_eq!(LevelMask(mask).level(), LEVEL[mask as usize]);
1803        }
1804    }
1805
1806    #[test]
1807    fn virtualize_descriptor() {
1808        let level_mask = LevelMask(0b111);
1809        let desc = CellDescriptor::new([
1810            CellDescriptor::compute_d1(level_mask, false, 3),
1811            CellDescriptor::compute_d2(123),
1812        ]);
1813
1814        assert_eq!(desc.level_mask(), level_mask);
1815
1816        for i in 0..3 {
1817            let v_desc = desc.virtualize(i);
1818
1819            assert_eq!(v_desc.cell_type(), desc.cell_type());
1820            assert_eq!(v_desc.reference_count(), desc.reference_count());
1821            assert_eq!(v_desc.is_exotic(), desc.is_exotic());
1822            assert_eq!(v_desc.store_hashes(), desc.store_hashes());
1823            assert_eq!(v_desc.is_aligned(), desc.is_aligned());
1824            assert_eq!(v_desc.byte_len(), desc.byte_len());
1825
1826            assert_eq!(v_desc.level_mask(), level_mask.virtualize(i));
1827        }
1828    }
1829
1830    #[test]
1831    fn correct_hash_index() {
1832        const HASH_INDEX_TABLE: [[u8; 4]; 8] = [
1833            // index      // mask
1834            [0, 0, 0, 0], // 000
1835            [0, 1, 1, 1], // 001
1836            [0, 0, 1, 1], // 010
1837            [0, 1, 2, 2], // 011
1838            [0, 0, 0, 1], // 100
1839            [0, 1, 1, 2], // 101
1840            [0, 0, 1, 2], // 110
1841            [0, 1, 2, 3], // 111
1842        ];
1843
1844        for mask in 0b000..=0b111 {
1845            let mask = LevelMask(mask);
1846
1847            for i in 0..=3 {
1848                let hash_index = mask.hash_index(i);
1849                assert_eq!(hash_index, HASH_INDEX_TABLE[mask.0 as usize][i as usize]);
1850            }
1851        }
1852    }
1853
1854    #[test]
1855    fn ultra_virtual_cell_by_ref() {
1856        let cell = Cell::empty_cell();
1857
1858        let pruned1 =
1859            crate::merkle::make_pruned_branch(cell.as_ref(), 0, Cell::empty_context()).unwrap();
1860
1861        let pruned2 =
1862            crate::merkle::make_pruned_branch(pruned1.as_ref(), 1, Cell::empty_context()).unwrap();
1863
1864        let pruned3 =
1865            crate::merkle::make_pruned_branch(pruned2.as_ref(), 2, Cell::empty_context()).unwrap();
1866
1867        // Level 3 -> 2
1868        let pruned3 = pruned3.virtualize();
1869        assert_eq!(pruned3.repr_hash(), pruned2.repr_hash());
1870        assert_eq!(pruned3.repr_depth(), pruned2.repr_depth());
1871
1872        // Level 2 -> 1
1873        let pruned3 = pruned3.virtualize();
1874        assert_eq!(pruned3.repr_hash(), pruned1.repr_hash());
1875        assert_eq!(pruned3.repr_depth(), pruned1.repr_depth());
1876
1877        // Level 1 -> 0
1878        let pruned3 = pruned3.virtualize();
1879        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1880        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1881
1882        // Level 0 -> 0
1883        let pruned3 = pruned3.virtualize();
1884        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1885        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1886
1887        // Level 0 -> 0 (just in case)
1888        let pruned3 = pruned3.virtualize();
1889        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1890        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1891    }
1892
1893    #[test]
1894    fn versioned_store_load() {
1895        #[derive(Debug, Clone, Copy, Eq, PartialEq, Store, Load)]
1896        #[tlb(tag = ["#12", "#34", "#56"])]
1897        struct Versioned {
1898            old_field1: u32,
1899            #[tlb(since_tag = 1)]
1900            new_field1: u32,
1901            old_field2: bool,
1902            #[tlb(since_tag = 2)]
1903            new_field2: bool,
1904        }
1905
1906        let old = Versioned {
1907            old_field1: 123,
1908            new_field1: 0,
1909            old_field2: true,
1910            new_field2: false,
1911        };
1912        let cell = CellBuilder::build_from(old).unwrap();
1913
1914        {
1915            let mut slice = cell.as_slice().unwrap();
1916            assert_eq!(slice.size_bits(), 8 + 32 + 1);
1917            assert_eq!(slice.load_u8().unwrap(), 0x12);
1918            assert_eq!(slice.load_u32().unwrap(), 123);
1919            assert!(slice.load_bit().unwrap());
1920            assert!(slice.is_empty());
1921        }
1922        assert_eq!(
1923            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1924            old,
1925        );
1926
1927        let new = Versioned {
1928            old_field1: 123,
1929            new_field1: 456,
1930            old_field2: true,
1931            new_field2: false,
1932        };
1933        let cell = CellBuilder::build_from(new).unwrap();
1934
1935        {
1936            let mut slice = cell.as_slice().unwrap();
1937            assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1);
1938            assert_eq!(slice.load_u8().unwrap(), 0x34);
1939            assert_eq!(slice.load_u32().unwrap(), 123);
1940            assert_eq!(slice.load_u32().unwrap(), 456);
1941            assert!(slice.load_bit().unwrap());
1942            assert!(slice.is_empty());
1943        }
1944        assert_eq!(
1945            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1946            new
1947        );
1948
1949        let too_new = Versioned {
1950            old_field1: 123,
1951            new_field1: 0,
1952            old_field2: true,
1953            new_field2: true,
1954        };
1955        let cell = CellBuilder::build_from(too_new).unwrap();
1956
1957        {
1958            let mut slice = cell.as_slice().unwrap();
1959            assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1 + 1);
1960            assert_eq!(slice.load_u8().unwrap(), 0x56);
1961            assert_eq!(slice.load_u32().unwrap(), 123);
1962            assert_eq!(slice.load_u32().unwrap(), 0);
1963            assert!(slice.load_bit().unwrap());
1964            assert!(slice.load_bit().unwrap());
1965            assert!(slice.is_empty());
1966        }
1967        assert_eq!(
1968            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1969            too_new
1970        );
1971    }
1972}