Skip to main content

tycho_types/cell/
mod.rs

1//! Cell tree implementation.
2
3use std::ops::{BitOr, BitOrAssign};
4use std::str::FromStr;
5
6pub use tycho_types_proc::{Load, Store};
7
8pub use self::builder::{
9    CellBuilder, CellDataBuilder, CellRefsBuilder, DisplayCellBuilderData, Store,
10};
11pub use self::cell_context::{CellContext, CellParts, LoadMode};
12#[cfg(not(feature = "sync"))]
13pub use self::cell_impl::rc::{Cell, CellInner, WeakCell};
14#[cfg(feature = "sync")]
15pub use self::cell_impl::sync::{Cell, CellInner, WeakCell};
16pub use self::cell_impl::{SafeDeleter, StaticCell, VirtualCellWrapper};
17pub use self::lazy::{Lazy, LazyExotic};
18pub use self::slice::{
19    CellSlice, CellSliceParts, CellSliceRange, DisplayCellSliceData, ExactSize, Load, LoadCell,
20};
21pub use self::usage_tree::{UsageTree, UsageTreeMode, UsageTreeWithSubtrees};
22use crate::error::{Error, ParseHashBytesError};
23use crate::util::Bitstring;
24
25/// Generic cell implementation.
26mod cell_impl;
27
28/// Traits for gas accounting and resolving exotic cells.
29mod cell_context;
30
31/// Cell view utils.
32mod slice;
33
34/// Cell creation utils.
35mod builder;
36
37/// Lazy-loaded cell data.
38mod lazy;
39
40mod usage_tree;
41
42#[cfg(feature = "sync")]
43#[doc(hidden)]
44mod __checks {
45    use super::*;
46
47    assert_impl_all!(Cell: Send);
48    assert_impl_all!(CellSlice: Send);
49    assert_impl_all!(CellBuilder: Send);
50}
51
52/// Marker trait which allows casting lazy-loaded data.
53pub trait EquivalentRepr<T> {}
54
55/// All types are equivalent to itself.
56impl<T> EquivalentRepr<T> for T {}
57
58/// Cell implementation family.
59pub trait CellFamily: Sized {
60    /// The default cell context type.
61    type EmptyCellContext: CellContext;
62
63    /// Creates an empty cell.
64    ///
65    /// NOTE: in most cases empty cell is ZST.
66    fn empty_cell() -> Cell;
67
68    /// Returns a static reference to the empty cell
69    fn empty_cell_ref() -> &'static DynCell;
70
71    /// Creates an empty cell context.
72    fn empty_context() -> &'static Self::EmptyCellContext;
73
74    /// Returns a static reference to the cell with all zeros.
75    fn all_zeros_ref() -> &'static DynCell;
76
77    /// Returns a static reference to the cell with all ones.
78    fn all_ones_ref() -> &'static DynCell;
79
80    /// Creates a virtualized cell from the specified cell.
81    fn virtualize(cell: Cell) -> Cell;
82}
83
84/// Dyn trait type alias.
85#[cfg(not(feature = "sync"))]
86pub type DynCell = dyn CellImpl;
87
88/// Dyn trait type alias.
89#[cfg(feature = "sync")]
90pub type DynCell = dyn CellImpl + Send + Sync;
91
92impl AsRef<DynCell> for DynCell {
93    #[inline(always)]
94    fn as_ref(&self) -> &Self {
95        self
96    }
97}
98
99impl AsMut<DynCell> for DynCell {
100    #[inline(always)]
101    fn as_mut(&mut self) -> &mut Self {
102        self
103    }
104}
105
106/// Represents the interface of a well-formed cell.
107///
108/// Since all basic operations are implements via dynamic dispatch,
109/// all high-level helper methods are implemented for `dyn Cell`.
110pub trait CellImpl {
111    /// Unwraps the root cell from the usage tracking.
112    fn untrack(self: CellInner<Self>) -> Cell;
113
114    /// Returns cell descriptor.
115    ///
116    /// # See also
117    ///
118    /// Cell descriptor contains some tightly packed info about the cell.
119    /// If you want convenient methods to access it use:
120    /// [`cell_type`], [`level_mask`], [`reference_count`], [`is_exotic`]
121    ///
122    /// [`cell_type`]: CellDescriptor::cell_type
123    /// [`level_mask`]: CellDescriptor::level_mask
124    /// [`reference_count`]: CellDescriptor::reference_count
125    /// [`is_exotic`]: CellDescriptor::is_exotic
126    fn descriptor(&self) -> CellDescriptor;
127
128    /// Returns the raw data of this cell.
129    fn data(&self) -> &[u8];
130
131    /// Returns the data size of this cell in bits.
132    fn bit_len(&self) -> u16;
133
134    /// Returns a reference to the Nth child cell.
135    fn reference(&self, index: u8) -> Option<&DynCell>;
136
137    /// Returns the Nth child cell.
138    fn reference_cloned(&self, index: u8) -> Option<Cell>;
139
140    /// Returns a repr hash of the Nth child cell.
141    ///
142    /// This method is needed when the underlying implementation
143    /// lazily loads child cells. With this method there is
144    /// no need to load anything and it just returns the hash.
145    ///
146    /// NOTE: This hash is not affected by virtualization.
147    fn reference_repr_hash(&self, index: u8) -> Option<HashBytes>;
148
149    /// Returns this cell as a virtualized cell, so that all hashes
150    /// and depths will have an offset.
151    fn virtualize(&self) -> &DynCell;
152
153    /// Returns cell hash for the specified level.
154    ///
155    /// Cell representation hash is the hash at the maximum level ([`LevelMask::MAX_LEVEL`]).
156    /// Use `repr_hash` as a simple alias for this.
157    fn hash(&self, level: u8) -> &HashBytes;
158
159    /// Returns cell depth for the specified level.
160    fn depth(&self, level: u8) -> u16;
161}
162
163impl DynCell {
164    /// Computes cell type from descriptor bytes.
165    #[inline]
166    pub fn cell_type(&self) -> CellType {
167        self.descriptor().cell_type()
168    }
169
170    /// Computes the cell level from the level mask.
171    #[inline]
172    pub fn level(&self) -> u8 {
173        self.descriptor().level_mask().level()
174    }
175
176    /// Computes the level mask from the descriptor bytes.
177    #[inline]
178    pub fn level_mask(&self) -> LevelMask {
179        self.descriptor().level_mask()
180    }
181
182    /// Computes the number of child cells from descriptor bytes.
183    #[inline]
184    pub fn reference_count(&self) -> u8 {
185        self.descriptor().reference_count()
186    }
187
188    /// Tries to load the specified child cell as slice.
189    /// Returns an error if the loaded cell is absent or is pruned.
190    pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'_>, Error> {
191        match self.reference(index) {
192            Some(cell) => CellSlice::new(cell),
193            None => Err(Error::CellUnderflow),
194        }
195    }
196
197    /// Returns whether the cell is not [`Ordinary`].
198    ///
199    /// [`Ordinary`]: CellType::Ordinary
200    #[inline]
201    pub fn is_exotic(&self) -> bool {
202        self.descriptor().is_exotic()
203    }
204
205    /// Returns a representation hash of the cell.
206    #[inline]
207    pub fn repr_hash(&self) -> &HashBytes {
208        self.hash(LevelMask::MAX_LEVEL)
209    }
210
211    /// Returns a representation depth of the cell.
212    #[inline]
213    pub fn repr_depth(&self) -> u16 {
214        self.depth(LevelMask::MAX_LEVEL)
215    }
216
217    /// Returns `true` if any of cell levels has the maximum depth.
218    pub fn has_max_depth(&self) -> bool {
219        for level in self.descriptor().level_mask() {
220            if self.depth(level) == u16::MAX {
221                return true;
222            }
223        }
224        false
225    }
226
227    /// Returns true if the cell is empty (no bits, no refs).
228    pub fn is_empty(&self) -> bool {
229        self.hash(LevelMask::MAX_LEVEL) == EMPTY_CELL_HASH
230    }
231
232    /// Creates an iterator through child nodes.
233    #[inline]
234    pub fn references(&self) -> RefsIter<'_> {
235        RefsIter {
236            cell: self,
237            max: self.reference_count(),
238            index: 0,
239        }
240    }
241
242    /// Creates an iterator over child hashes.
243    #[inline]
244    pub fn reference_repr_hashes(&self) -> RefsHashesIter<'_> {
245        RefsHashesIter {
246            cell: self,
247            max: self.reference_count(),
248            index: 0,
249        }
250    }
251
252    /// Returns this cell as a cell slice.
253    /// Returns an error if the cell is not ordinary.
254    #[inline]
255    pub fn as_slice(&'_ self) -> Result<CellSlice<'_>, Error> {
256        CellSlice::new(self)
257    }
258
259    /// Returns this cell as a cell slice.
260    ///
261    /// Loads cell as is.
262    #[inline]
263    pub fn as_slice_allow_exotic(&'_ self) -> CellSlice<'_> {
264        CellSlice::new_allow_exotic(self)
265    }
266
267    /// Recursively computes the count of distinct cells returning
268    /// the total storage used by this dag taking into account the
269    /// identification of equal cells.
270    pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
271        StorageStat::compute_for_cell(self, limit)
272    }
273
274    /// Recursively traverses the cells tree without tracking a uniqueness
275    /// of cells. Usefull for adding small subtrees to merkle proofs.
276    pub fn touch_recursive(&self) {
277        self.data(); // Touch data to include it if `OnDataAccess` is used.
278        for child in self.references() {
279            child.touch_recursive();
280        }
281    }
282
283    /// Returns an object that implements [`Debug`] for printing only
284    /// the root cell of the cell tree.
285    ///
286    /// [`Debug`]: std::fmt::Debug
287    #[inline]
288    pub fn debug_root(&'_ self) -> DebugCell<'_> {
289        DebugCell(self)
290    }
291
292    /// Returns an object that implements [`Display`] for printing only
293    /// the root cell of the cell tree.
294    ///
295    /// [`Display`]: std::fmt::Display
296    #[inline]
297    pub fn display_root(&'_ self) -> DisplayCellRoot<'_> {
298        DisplayCellRoot {
299            cell: self,
300            level: 0,
301        }
302    }
303
304    /// Returns an object that implements [`Display`] for printing all
305    /// cells in the cell tree.
306    ///
307    /// [`Display`]: std::fmt::Display
308    #[inline]
309    pub fn display_tree(&'_ self) -> DisplayCellTree<'_> {
310        DisplayCellTree(self)
311    }
312
313    /// Returns an object which will display cell data as a bitstring
314    /// with a termination bit.
315    #[inline]
316    pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
317        struct DisplayData<'a>(&'a DynCell);
318
319        impl std::fmt::Display for DisplayData<'_> {
320            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
321                std::fmt::Display::fmt(
322                    &Bitstring {
323                        bytes: self.0.data(),
324                        bit_len: self.0.bit_len(),
325                    },
326                    f,
327                )
328            }
329        }
330
331        impl std::fmt::Binary for DisplayData<'_> {
332            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
333                std::fmt::Binary::fmt(
334                    &Bitstring {
335                        bytes: self.0.data(),
336                        bit_len: self.0.bit_len(),
337                    },
338                    f,
339                )
340            }
341        }
342
343        DisplayData(self)
344    }
345
346    /// Converts this cell into a slice and tries to load the specified type from it.
347    /// Fails if the cell is not ordinary.
348    ///
349    /// NOTE: parsing `Cell` will load the first reference!
350    #[inline]
351    pub fn parse<'a, T: Load<'a>>(&'a self) -> Result<T, Error> {
352        T::load_from(&mut ok!(self.as_slice()))
353    }
354
355    /// Loads an exotic cell.
356    #[inline]
357    pub fn parse_exotic<'a, T: LoadCell<'a>>(&'a self) -> Result<T, Error> {
358        if self.is_exotic() {
359            T::load_from_cell(self)
360        } else {
361            Err(Error::UnexpectedOrdinaryCell)
362        }
363    }
364}
365
366impl std::fmt::Debug for DynCell {
367    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
368        crate::util::debug_struct_field2_finish(
369            f,
370            "Cell",
371            "ty",
372            &self.cell_type(),
373            "hash",
374            self.repr_hash(),
375        )
376    }
377}
378
379impl Eq for DynCell {}
380
381impl PartialEq<DynCell> for DynCell {
382    #[inline]
383    fn eq(&self, other: &DynCell) -> bool {
384        self.repr_hash() == other.repr_hash()
385    }
386}
387
388#[cfg(feature = "serde")]
389impl serde::Serialize for DynCell {
390    fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
391        let boc = crate::boc::Boc::encode(self);
392        if serializer.is_human_readable() {
393            serializer.serialize_str(&crate::util::encode_base64(boc))
394        } else {
395            serializer.serialize_bytes(&boc)
396        }
397    }
398}
399
400#[cfg(feature = "arbitrary")]
401impl<'a> arbitrary::Arbitrary<'a> for Cell {
402    #[inline]
403    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
404        Ok(u.arbitrary::<CellBuilder>()?.build().unwrap())
405    }
406
407    #[inline]
408    fn size_hint(depth: usize) -> (usize, Option<usize>) {
409        Self::try_size_hint(depth).unwrap_or_default()
410    }
411
412    #[inline]
413    fn try_size_hint(
414        depth: usize,
415    ) -> Result<(usize, Option<usize>), arbitrary::MaxRecursionReached> {
416        <CellBuilder as arbitrary::Arbitrary>::try_size_hint(depth)
417    }
418}
419
420impl From<Cell> for CellSliceParts {
421    #[inline]
422    fn from(value: Cell) -> Self {
423        (CellSliceRange::full(&value), value)
424    }
425}
426
427/// An iterator through child nodes.
428#[must_use = "iterators are lazy and do nothing unless consumed"]
429pub struct RefsIter<'a> {
430    cell: &'a DynCell,
431    max: u8,
432    index: u8,
433}
434
435impl<'a> RefsIter<'a> {
436    /// Returns a cell by children of which we are iterating.
437    #[inline]
438    pub fn cell(&self) -> &'a DynCell {
439        self.cell
440    }
441
442    /// Returns a reference to the next() value without advancing the iterator.
443    #[inline]
444    pub fn peek(&self) -> Option<&'a DynCell> {
445        if self.index >= self.max {
446            None
447        } else {
448            self.cell.reference(self.index)
449        }
450    }
451
452    /// Returns a cloned reference to the next() value without advancing the iterator.
453    #[inline]
454    pub fn peek_cloned(&self) -> Option<Cell> {
455        if self.index >= self.max {
456            None
457        } else {
458            self.cell.reference_cloned(self.index)
459        }
460    }
461
462    /// Returns a reference to the next_back() value without advancing the iterator.
463    #[inline]
464    pub fn peek_prev(&self) -> Option<&'a DynCell> {
465        if self.index > 0 {
466            self.cell.reference(self.index - 1)
467        } else {
468            None
469        }
470    }
471
472    /// Returns a cloned reference to the next_back() value without advancing the iterator.
473    #[inline]
474    pub fn peek_prev_cloned(&self) -> Option<Cell> {
475        if self.index > 0 {
476            self.cell.reference_cloned(self.index - 1)
477        } else {
478            None
479        }
480    }
481
482    /// Creates an iterator through child nodes which produces cloned references.
483    #[inline]
484    pub fn cloned(self) -> ClonedRefsIter<'a> {
485        ClonedRefsIter { inner: self }
486    }
487}
488
489impl Clone for RefsIter<'_> {
490    #[inline]
491    fn clone(&self) -> Self {
492        Self {
493            cell: self.cell,
494            max: self.max,
495            index: self.index,
496        }
497    }
498}
499
500impl<'a> Iterator for RefsIter<'a> {
501    type Item = &'a DynCell;
502
503    #[inline]
504    fn next(&mut self) -> Option<Self::Item> {
505        if self.index >= self.max {
506            None
507        } else {
508            let child = self.cell.reference(self.index);
509            self.index += 1;
510            child
511        }
512    }
513
514    #[inline]
515    fn size_hint(&self) -> (usize, Option<usize>) {
516        let remaining = self.max.saturating_sub(self.index) as usize;
517        (remaining, Some(remaining))
518    }
519}
520
521impl DoubleEndedIterator for RefsIter<'_> {
522    #[inline]
523    fn next_back(&mut self) -> Option<Self::Item> {
524        if self.max > self.index {
525            self.max -= 1;
526            self.cell.reference(self.max)
527        } else {
528            None
529        }
530    }
531}
532
533impl ExactSizeIterator for RefsIter<'_> {
534    #[inline]
535    fn len(&self) -> usize {
536        self.size_hint().0
537    }
538}
539
540/// An iterator through child nodes which produces cloned references.
541#[must_use = "iterators are lazy and do nothing unless consumed"]
542pub struct ClonedRefsIter<'a> {
543    inner: RefsIter<'a>,
544}
545
546impl<'a> ClonedRefsIter<'a> {
547    /// Returns a cell by children of which we are iterating.
548    #[inline]
549    pub fn cell(&self) -> &'a DynCell {
550        self.inner.cell
551    }
552
553    /// Returns a reference to the next() value without advancing the iterator.
554    #[inline]
555    pub fn peek(&self) -> Option<Cell> {
556        self.inner.peek_cloned()
557    }
558
559    /// Returns a reference to the next_back() value without advancing the iterator.
560    #[inline]
561    pub fn peek_prev(&self) -> Option<Cell> {
562        self.inner.peek_prev_cloned()
563    }
564}
565
566impl Clone for ClonedRefsIter<'_> {
567    #[inline]
568    fn clone(&self) -> Self {
569        Self {
570            inner: self.inner.clone(),
571        }
572    }
573}
574
575impl Iterator for ClonedRefsIter<'_> {
576    type Item = Cell;
577
578    #[inline]
579    fn next(&mut self) -> Option<Self::Item> {
580        if self.inner.index >= self.inner.max {
581            None
582        } else {
583            let child = self.inner.cell.reference_cloned(self.inner.index);
584            self.inner.index += 1;
585            child
586        }
587    }
588
589    #[inline]
590    fn size_hint(&self) -> (usize, Option<usize>) {
591        self.inner.size_hint()
592    }
593}
594
595impl DoubleEndedIterator for ClonedRefsIter<'_> {
596    #[inline]
597    fn next_back(&mut self) -> Option<Self::Item> {
598        if self.inner.max > self.inner.index {
599            self.inner.max -= 1;
600            self.inner.cell.reference_cloned(self.inner.max)
601        } else {
602            None
603        }
604    }
605}
606
607impl ExactSizeIterator for ClonedRefsIter<'_> {
608    #[inline]
609    fn len(&self) -> usize {
610        self.size_hint().0
611    }
612}
613
614/// An iterator over child hashes.
615#[must_use = "iterators are lazy and do nothing unless consumed"]
616pub struct RefsHashesIter<'a> {
617    cell: &'a DynCell,
618    max: u8,
619    index: u8,
620}
621
622impl<'a> RefsHashesIter<'a> {
623    /// Returns a cell by children of which we are iterating.
624    #[inline]
625    pub fn cell(&self) -> &'a DynCell {
626        self.cell
627    }
628
629    /// Returns the `next()` value without advancing the iterator.
630    #[inline]
631    pub fn peek(&self) -> Option<HashBytes> {
632        if self.index >= self.max {
633            None
634        } else {
635            self.cell.reference_repr_hash(self.index)
636        }
637    }
638
639    /// Returns the `next_back()` value without advancing the iterator.
640    #[inline]
641    pub fn peek_prev(&self) -> Option<HashBytes> {
642        if let Some(index) = self.index.checked_sub(1) {
643            self.cell.reference_repr_hash(index)
644        } else {
645            None
646        }
647    }
648}
649
650impl Clone for RefsHashesIter<'_> {
651    #[inline]
652    fn clone(&self) -> Self {
653        Self {
654            cell: self.cell,
655            max: self.max,
656            index: self.index,
657        }
658    }
659}
660
661impl Iterator for RefsHashesIter<'_> {
662    type Item = HashBytes;
663
664    #[inline]
665    fn next(&mut self) -> Option<Self::Item> {
666        if self.index >= self.max {
667            None
668        } else {
669            let hash = self.cell.reference_repr_hash(self.index);
670            self.index += 1;
671            hash
672        }
673    }
674
675    #[inline]
676    fn size_hint(&self) -> (usize, Option<usize>) {
677        let remaining = self.max.saturating_sub(self.index) as usize;
678        (remaining, Some(remaining))
679    }
680}
681
682impl DoubleEndedIterator for RefsHashesIter<'_> {
683    #[inline]
684    fn next_back(&mut self) -> Option<Self::Item> {
685        if self.max > self.index {
686            self.max -= 1;
687            self.cell.reference_repr_hash(self.max)
688        } else {
689            None
690        }
691    }
692}
693
694impl ExactSizeIterator for RefsHashesIter<'_> {
695    #[inline]
696    fn len(&self) -> usize {
697        self.size_hint().0
698    }
699}
700
701/// Type alias for a cell hash.
702#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
703#[repr(transparent)]
704pub struct HashBytes(pub [u8; 32]);
705
706impl HashBytes {
707    /// Array of zero bytes.
708    pub const ZERO: Self = Self([0; 32]);
709
710    /// Converts slice to a hash bytes.
711    ///
712    /// # Panics
713    ///
714    /// Panics if the length of the slice is not 32 bytes.
715    #[inline]
716    pub fn from_slice(slice: &[u8]) -> Self {
717        Self(slice.try_into().expect("slice with incorrect length"))
718    }
719
720    /// Converts integer into zero-padded big-endian bytes.
721    ///
722    /// Returns `None` on overflow or negative values.
723    #[cfg(feature = "bigint")]
724    pub fn from_bigint(int: &num_bigint::BigInt) -> Option<Self> {
725        if int.sign() == num_bigint::Sign::Minus {
726            return None;
727        }
728        Self::from_biguint(int.magnitude())
729    }
730
731    /// Converts integer into zero-padded big-endian bytes.
732    ///
733    /// Returns `None` on overflow.
734    #[cfg(feature = "bigint")]
735    pub fn from_biguint(uint: &num_bigint::BigUint) -> Option<Self> {
736        let mut bytes = uint.to_bytes_le();
737        if bytes.len() > 32 {
738            return None;
739        }
740
741        bytes.resize(32, 0);
742        bytes.reverse();
743        Some(Self::from_slice(&bytes))
744    }
745
746    /// Converts integer into zero-padded big-endian bytes.
747    ///
748    /// Ignores all bits after 256th.
749    #[cfg(feature = "bigint")]
750    pub fn from_biguint_lossy(uint: &num_bigint::BigUint) -> Self {
751        let mut bytes = uint.to_bytes_le();
752        bytes.resize(32, 0);
753        bytes.reverse();
754        Self::from_slice(&bytes)
755    }
756
757    /// Wraps a reference to an internal array into a newtype reference.
758    #[inline(always)]
759    pub const fn wrap(value: &[u8; 32]) -> &Self {
760        // SAFETY: HashBytes is #[repr(transparent)]
761        unsafe { &*(value as *const [u8; 32] as *const Self) }
762    }
763
764    /// Returns a slice containing the entire array.
765    pub const fn as_slice(&self) -> &[u8] {
766        self.0.as_slice()
767    }
768
769    /// Returns a mutable slice containing the entire array.
770    pub fn as_mut_slice(&mut self) -> &mut [u8] {
771        self.0.as_mut_slice()
772    }
773
774    /// Returns an internal array.
775    pub const fn as_array(&self) -> &[u8; 32] {
776        &self.0
777    }
778
779    /// Returns a mutable internal array.
780    pub fn as_mut_array(&mut self) -> &mut [u8; 32] {
781        &mut self.0
782    }
783
784    /// Returns a raw pointer to the slice's buffer.
785    #[inline(always)]
786    pub const fn as_ptr(&self) -> *const u8 {
787        &self.0 as *const [u8] as *const u8
788    }
789
790    /// Returns a first chunk of 8 bytes.
791    pub const fn first_chunk(&self) -> &[u8; 8] {
792        match self.0.first_chunk::<8>() {
793            Some(chunk) => chunk,
794            // SAFETY: Const unwrap go brr
795            None => unsafe { std::hint::unreachable_unchecked() },
796        }
797    }
798
799    /// Returns `true` if all bytes are zero.
800    pub const fn is_zero(&self) -> bool {
801        let mut i = 0;
802        while i < 32 {
803            if self.0[i] != 0 {
804                return false;
805            }
806            i += 1;
807        }
808        true
809    }
810
811    #[inline(always)]
812    fn fmt_hex(&self, f: &mut std::fmt::Formatter<'_>, table: &[u8; 16]) -> std::fmt::Result {
813        let mut output = [0u8; 64];
814        crate::util::encode_to_hex_slice(&self.0, &mut output, table).ok();
815
816        // SAFETY: output is guaranteed to contain only [0-9a-f]
817        let output = unsafe { std::str::from_utf8_unchecked(&output) };
818        f.write_str(output)
819    }
820
821    /// Creates a bigint from bytes (as big-endian).
822    #[cfg(feature = "bigint")]
823    pub fn as_biguint(&self) -> num_bigint::BigUint {
824        num_bigint::BigUint::from_bytes_be(&self.0)
825    }
826
827    /// Creates a bigint from bytes (as big-endian).
828    #[cfg(feature = "bigint")]
829    pub fn as_bigint(&self) -> num_bigint::BigInt {
830        self.as_biguint().into()
831    }
832}
833
834impl Default for HashBytes {
835    #[inline(always)]
836    fn default() -> Self {
837        Self::ZERO
838    }
839}
840
841impl AsRef<[u8; 32]> for HashBytes {
842    #[inline(always)]
843    fn as_ref(&self) -> &[u8; 32] {
844        &self.0
845    }
846}
847
848impl AsRef<[u8]> for HashBytes {
849    #[inline(always)]
850    fn as_ref(&self) -> &[u8] {
851        self.0.as_ref()
852    }
853}
854
855impl AsMut<[u8; 32]> for HashBytes {
856    #[inline(always)]
857    fn as_mut(&mut self) -> &mut [u8; 32] {
858        &mut self.0
859    }
860}
861
862impl AsMut<[u8]> for HashBytes {
863    #[inline(always)]
864    fn as_mut(&mut self) -> &mut [u8] {
865        self.0.as_mut()
866    }
867}
868
869impl std::borrow::Borrow<[u8]> for HashBytes {
870    #[inline(always)]
871    fn borrow(&self) -> &[u8] {
872        &self.0
873    }
874}
875
876impl std::borrow::BorrowMut<[u8]> for HashBytes {
877    #[inline(always)]
878    fn borrow_mut(&mut self) -> &mut [u8] {
879        &mut self.0
880    }
881}
882
883impl std::borrow::Borrow<HashBytes> for [u8; 32] {
884    #[inline(always)]
885    fn borrow(&self) -> &HashBytes {
886        HashBytes::wrap(self)
887    }
888}
889
890impl PartialEq<[u8; 32]> for HashBytes {
891    #[inline(always)]
892    fn eq(&self, other: &[u8; 32]) -> bool {
893        &self.0 == other
894    }
895}
896
897impl PartialEq<HashBytes> for [u8; 32] {
898    #[inline(always)]
899    fn eq(&self, other: &HashBytes) -> bool {
900        self == &other.0
901    }
902}
903
904impl PartialEq<[u8]> for HashBytes {
905    #[inline(always)]
906    fn eq(&self, other: &[u8]) -> bool {
907        self.0 == other
908    }
909}
910
911impl PartialEq<HashBytes> for [u8] {
912    #[inline(always)]
913    fn eq(&self, other: &HashBytes) -> bool {
914        self == other.0
915    }
916}
917
918impl From<[u8; 32]> for HashBytes {
919    #[inline(always)]
920    fn from(value: [u8; 32]) -> Self {
921        Self(value)
922    }
923}
924
925impl From<sha2::digest::Output<sha2::Sha256>> for HashBytes {
926    #[inline(always)]
927    fn from(value: sha2::digest::Output<sha2::Sha256>) -> Self {
928        Self(value.into())
929    }
930}
931
932#[cfg(feature = "blake3")]
933impl From<blake3::Hash> for HashBytes {
934    #[inline(always)]
935    fn from(value: blake3::Hash) -> Self {
936        Self(value.into())
937    }
938}
939
940impl From<HashBytes> for [u8; 32] {
941    #[inline(always)]
942    fn from(value: HashBytes) -> Self {
943        value.0
944    }
945}
946
947impl FromStr for HashBytes {
948    type Err = ParseHashBytesError;
949
950    fn from_str(s: &str) -> Result<Self, Self::Err> {
951        let mut result = Self::default();
952        match s.len() {
953            64 => {
954                if let Err(e) = hex::decode_to_slice(s, &mut result.0) {
955                    return Err(ParseHashBytesError::InvalidHex(e));
956                }
957            }
958            66 => {
959                if let Err(e) = hex::decode_to_slice(&s[2..], &mut result.0) {
960                    return Err(ParseHashBytesError::InvalidHex(e));
961                }
962            }
963            #[cfg(feature = "base64")]
964            44 => {
965                if let Err(e) = crate::util::decode_base64_slice(s, &mut result.0) {
966                    return Err(ParseHashBytesError::InvalidBase64(e));
967                }
968            }
969            _ => return Err(ParseHashBytesError::UnexpectedStringLength),
970        }
971        Ok(result)
972    }
973}
974
975impl std::fmt::Display for HashBytes {
976    #[inline]
977    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
978        std::fmt::LowerHex::fmt(self, f)
979    }
980}
981
982impl std::fmt::LowerHex for HashBytes {
983    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
984        Self::fmt_hex(self, f, crate::util::HEX_CHARS_LOWER)
985    }
986}
987
988impl std::fmt::UpperHex for HashBytes {
989    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
990        Self::fmt_hex(self, f, crate::util::HEX_CHARS_UPPER)
991    }
992}
993
994impl std::fmt::Debug for HashBytes {
995    #[inline(always)]
996    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
997        std::fmt::Display::fmt(self, f)
998    }
999}
1000
1001impl<I> std::ops::Index<I> for HashBytes
1002where
1003    [u8; 32]: std::ops::Index<I>,
1004{
1005    type Output = <[u8; 32] as std::ops::Index<I>>::Output;
1006
1007    #[inline]
1008    fn index(&self, index: I) -> &Self::Output {
1009        std::ops::Index::index(&self.0, index)
1010    }
1011}
1012
1013impl<I> std::ops::IndexMut<I> for HashBytes
1014where
1015    [u8; 32]: std::ops::IndexMut<I>,
1016{
1017    #[inline]
1018    fn index_mut(&mut self, index: I) -> &mut Self::Output {
1019        std::ops::IndexMut::index_mut(&mut self.0, index)
1020    }
1021}
1022
1023#[cfg(feature = "rand8")]
1024impl rand8::distributions::Distribution<HashBytes> for rand8::distributions::Standard {
1025    #[inline]
1026    fn sample<R: rand8::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
1027        HashBytes(rand8::distributions::Standard.sample(rng))
1028    }
1029}
1030
1031#[cfg(feature = "rand9")]
1032impl rand9::distr::Distribution<HashBytes> for rand9::distr::StandardUniform {
1033    #[inline]
1034    fn sample<R: rand9::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
1035        HashBytes(rand9::distr::StandardUniform.sample(rng))
1036    }
1037}
1038
1039#[cfg(feature = "serde")]
1040impl serde::Serialize for HashBytes {
1041    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1042    where
1043        S: serde::Serializer,
1044    {
1045        if serializer.is_human_readable() {
1046            let mut output = [0u8; 64];
1047            crate::util::encode_to_hex_slice(&self.0, &mut output, crate::util::HEX_CHARS_LOWER)
1048                .ok();
1049
1050            // SAFETY: output is guaranteed to contain only [0-9a-f]
1051            let output = unsafe { std::str::from_utf8_unchecked(&output) };
1052            serializer.serialize_str(output)
1053        } else {
1054            serializer.serialize_bytes(&self.0)
1055        }
1056    }
1057}
1058
1059#[cfg(feature = "serde")]
1060impl<'de> serde::Deserialize<'de> for HashBytes {
1061    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
1062    where
1063        D: serde::Deserializer<'de>,
1064    {
1065        use ::serde::de::{Error, Visitor};
1066
1067        struct HashBytesHexVisitor;
1068
1069        impl Visitor<'_> for HashBytesHexVisitor {
1070            type Value = HashBytes;
1071
1072            fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1073                formatter.write_str("hex-encoded byte array of size 32")
1074            }
1075
1076            fn visit_str<E: Error>(self, value: &str) -> Result<Self::Value, E> {
1077                let mut result = HashBytes([0; 32]);
1078                match hex::decode_to_slice(value, &mut result.0) {
1079                    Ok(()) => Ok(result),
1080                    Err(_) => Err(Error::invalid_value(
1081                        serde::de::Unexpected::Str(value),
1082                        &self,
1083                    )),
1084                }
1085            }
1086        }
1087
1088        pub struct HashBytesRawVisitor;
1089
1090        impl Visitor<'_> for HashBytesRawVisitor {
1091            type Value = HashBytes;
1092
1093            fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1094                f.write_fmt(format_args!("a byte array of size 32"))
1095            }
1096
1097            fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
1098                v.try_into()
1099                    .map(HashBytes)
1100                    .map_err(|_e| Error::invalid_length(v.len(), &self))
1101            }
1102        }
1103
1104        if deserializer.is_human_readable() {
1105            deserializer.deserialize_str(HashBytesHexVisitor)
1106        } else {
1107            deserializer.deserialize_bytes(HashBytesRawVisitor)
1108        }
1109    }
1110}
1111
1112#[cfg(feature = "arbitrary")]
1113impl<'a> arbitrary::Arbitrary<'a> for HashBytes {
1114    #[inline]
1115    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1116        u.arbitrary().map(Self)
1117    }
1118
1119    #[inline]
1120    fn size_hint(_: usize) -> (usize, Option<usize>) {
1121        (32, Some(32))
1122    }
1123}
1124
1125/// Hash of an empty (0 bits of data, no refs) ordinary cell.
1126pub static EMPTY_CELL_HASH: &HashBytes = HashBytes::wrap(&[
1127    0x96, 0xa2, 0x96, 0xd2, 0x24, 0xf2, 0x85, 0xc6, 0x7b, 0xee, 0x93, 0xc3, 0x0f, 0x8a, 0x30, 0x91,
1128    0x57, 0xf0, 0xda, 0xa3, 0x5d, 0xc5, 0xb8, 0x7e, 0x41, 0x0b, 0x78, 0x63, 0x0a, 0x09, 0xcf, 0xc7,
1129]);
1130
1131/// Well-formed cell type.
1132#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
1133#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
1134pub enum CellType {
1135    /// Cell of this type just stores data and references.
1136    #[default]
1137    Ordinary,
1138    /// Exotic cell which was pruned from the original tree of cells
1139    /// when a Merkle proof has been created.
1140    PrunedBranch,
1141    /// Exotic cell with a reference to the cell with a library.
1142    LibraryReference,
1143    /// Exotic cell with one hash and one reference.
1144    MerkleProof,
1145    /// Exotic cell with two hashes and two references.
1146    MerkleUpdate,
1147}
1148
1149impl CellType {
1150    /// Returns whether this cell type is Merkle proof or Merkle update.
1151    #[inline]
1152    pub const fn is_merkle(self) -> bool {
1153        matches!(self, Self::MerkleProof | Self::MerkleUpdate)
1154    }
1155
1156    /// Returns whether the cell is not ordinary.
1157    #[inline]
1158    pub const fn is_exotic(self) -> bool {
1159        !matches!(self, Self::Ordinary)
1160    }
1161
1162    /// Returns whether the cell is a pruned branch.
1163    #[inline]
1164    pub const fn is_pruned_branch(self) -> bool {
1165        matches!(self, Self::PrunedBranch)
1166    }
1167
1168    /// Returns whether this cell is a library cell.
1169    #[inline(always)]
1170    pub const fn is_library(self) -> bool {
1171        matches!(self, Self::LibraryReference)
1172    }
1173
1174    /// Encodes cell type as byte.
1175    #[inline]
1176    pub const fn to_byte(self) -> u8 {
1177        match self {
1178            CellType::Ordinary => 0xff,
1179            CellType::PrunedBranch => 1,
1180            CellType::LibraryReference => 2,
1181            CellType::MerkleProof => 3,
1182            CellType::MerkleUpdate => 4,
1183        }
1184    }
1185
1186    /// Decodes any cell type from byte.
1187    #[inline]
1188    pub const fn from_byte(byte: u8) -> Option<Self> {
1189        Some(match byte {
1190            0xff => CellType::Ordinary,
1191            1 => CellType::PrunedBranch,
1192            2 => CellType::LibraryReference,
1193            3 => CellType::MerkleProof,
1194            4 => CellType::MerkleUpdate,
1195            _ => return None,
1196        })
1197    }
1198
1199    /// Decodes exotic cell type from byte.
1200    #[inline]
1201    pub const fn from_byte_exotic(byte: u8) -> Option<Self> {
1202        Some(match byte {
1203            1 => CellType::PrunedBranch,
1204            2 => CellType::LibraryReference,
1205            3 => CellType::MerkleProof,
1206            4 => CellType::MerkleUpdate,
1207            _ => return None,
1208        })
1209    }
1210}
1211
1212impl From<CellType> for u8 {
1213    #[inline]
1214    fn from(cell_type: CellType) -> u8 {
1215        cell_type.to_byte()
1216    }
1217}
1218
1219#[cfg(feature = "arbitrary")]
1220impl<'a> arbitrary::Arbitrary<'a> for CellType {
1221    #[inline]
1222    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1223        static ALL_TYPES: [CellType; 5] = [
1224            CellType::Ordinary,
1225            CellType::PrunedBranch,
1226            CellType::LibraryReference,
1227            CellType::MerkleProof,
1228            CellType::MerkleUpdate,
1229        ];
1230
1231        u.choose(&ALL_TYPES).copied()
1232    }
1233
1234    #[inline]
1235    fn size_hint(_: usize) -> (usize, Option<usize>) {
1236        (1, Some(1))
1237    }
1238}
1239
1240/// Tightly packed info about a cell.
1241#[derive(Hash, Debug, Clone, Copy)]
1242#[repr(C)]
1243pub struct CellDescriptor {
1244    /// First descriptor byte with a generic info about cell.
1245    pub d1: u8,
1246    /// Second descriptor byte with a packed data size.
1247    pub d2: u8,
1248}
1249
1250impl CellDescriptor {
1251    /// Bit mask to store the number of references in the descriptor.
1252    pub const REF_COUNT_MASK: u8 = 0b0000_0111;
1253    /// Bit mask to store the `is_exotic` flag in the descriptor.
1254    pub const IS_EXOTIC_MASK: u8 = 0b0000_1000;
1255    /// Bit mask to store the `store_hashes` flag in the descriptor.
1256    pub const STORE_HASHES_MASK: u8 = 0b0001_0000;
1257    /// _de Brujn_ level presence mask in the descriptor.
1258    pub const LEVEL_MASK: u8 = 0b1110_0000;
1259
1260    /// Computes d1 descriptor byte from parts
1261    #[inline(always)]
1262    pub const fn compute_d1(level_mask: LevelMask, is_exotic: bool, ref_count: u8) -> u8 {
1263        (level_mask.0 << 5) | ((is_exotic as u8) << 3) | (ref_count & Self::REF_COUNT_MASK)
1264    }
1265
1266    /// Computes d2 descriptor byte from cell length in bits
1267    #[inline(always)]
1268    pub const fn compute_d2(bit_len: u16) -> u8 {
1269        (((bit_len >> 2) as u8) & !0b1) | (!bit_len.is_multiple_of(8) as u8)
1270    }
1271
1272    /// Constructs cell descriptor from descriptor bytes.
1273    #[inline(always)]
1274    pub const fn new(bytes: [u8; 2]) -> Self {
1275        Self {
1276            d1: bytes[0],
1277            d2: bytes[1],
1278        }
1279    }
1280
1281    /// Creates a new descriptor with a new mask, shifted by the offset.
1282    #[must_use]
1283    pub const fn virtualize(mut self, offset: u8) -> Self {
1284        let virtualized_mask = (self.d1 >> offset) & Self::LEVEL_MASK;
1285        self.d1 = virtualized_mask | (self.d1 & !Self::LEVEL_MASK);
1286        self
1287    }
1288
1289    /// Computes cell type.
1290    pub fn cell_type(self) -> CellType {
1291        if self.d1 & Self::IS_EXOTIC_MASK == 0 {
1292            CellType::Ordinary
1293        } else {
1294            match self.d1 & Self::REF_COUNT_MASK {
1295                0 => {
1296                    // NOTE: zero mask <=> zero level
1297                    if self.d1 & Self::LEVEL_MASK == 0 {
1298                        CellType::LibraryReference
1299                    } else {
1300                        CellType::PrunedBranch
1301                    }
1302                }
1303                1 => CellType::MerkleProof,
1304                _ => CellType::MerkleUpdate,
1305            }
1306        }
1307    }
1308
1309    /// Computes child cell count.
1310    #[inline(always)]
1311    pub const fn reference_count(self) -> u8 {
1312        self.d1 & Self::REF_COUNT_MASK
1313    }
1314
1315    /// Computes hash count.
1316    ///
1317    /// NOTE: Guaranteed to be in range 1..=4.
1318    #[inline(always)]
1319    pub const fn hash_count(self) -> u8 {
1320        let level = self.level_mask().level();
1321        if self.is_exotic() && self.reference_count() == 0 && level > 0 {
1322            1 // pruned branch always has 1 hash
1323        } else {
1324            level + 1
1325        }
1326    }
1327
1328    /// Returns whether the cell is not [`Ordinary`].
1329    ///
1330    /// [`Ordinary`]: CellType::Ordinary
1331    #[inline(always)]
1332    pub const fn is_exotic(self) -> bool {
1333        self.d1 & Self::IS_EXOTIC_MASK != 0
1334    }
1335
1336    /// Returns whether this cell is a pruned branch cell.
1337    #[inline(always)]
1338    pub const fn is_pruned_branch(self) -> bool {
1339        self.is_exotic() && self.reference_count() == 0 && !self.level_mask().is_empty()
1340    }
1341
1342    /// Returns whether this cell is a library cell.
1343    #[inline(always)]
1344    pub const fn is_library(self) -> bool {
1345        self.is_exotic() && self.reference_count() == 0 && self.level_mask().is_empty()
1346    }
1347
1348    /// Returns whether this cell type is Merkle proof or Merkle update.
1349    #[inline(always)]
1350    pub const fn is_merkle(self) -> bool {
1351        self.is_exotic() && self.reference_count() != 0
1352    }
1353
1354    /// Returns whether this cell refers to some external data.
1355    #[inline(always)]
1356    pub const fn is_absent(self) -> bool {
1357        self.d1 == (Self::REF_COUNT_MASK | Self::IS_EXOTIC_MASK)
1358    }
1359
1360    /// Returns whether this cell should store hashes in data.
1361    #[inline(always)]
1362    pub const fn store_hashes(self) -> bool {
1363        self.d1 & Self::STORE_HASHES_MASK != 0
1364    }
1365
1366    /// Computes level mask.
1367    #[inline(always)]
1368    pub const fn level_mask(self) -> LevelMask {
1369        // SAFETY: `u8 >> 5` is always 3 bits long
1370        unsafe { LevelMask::new_unchecked(self.d1 >> 5) }
1371    }
1372
1373    /// Returns whether this cell's data is 8-bit aligned.
1374    #[inline(always)]
1375    pub const fn is_aligned(self) -> bool {
1376        self.d2 & 1 == 0
1377    }
1378
1379    /// Returns this cell's data length in bytes.
1380    #[inline(always)]
1381    pub const fn byte_len(self) -> u8 {
1382        (self.d2 & 1) + (self.d2 >> 1)
1383    }
1384}
1385
1386/// _de Brujn_ level presence bitset.
1387#[derive(Copy, Clone, Eq, PartialEq, Hash)]
1388pub struct LevelMask(u8);
1389
1390impl LevelMask {
1391    /// Empty bitset.
1392    pub const EMPTY: Self = LevelMask(0);
1393    /// Max _de Brujn_ level.
1394    pub const MAX_LEVEL: u8 = 3;
1395
1396    /// Constructs a new level mask, truncating extra bits.
1397    #[inline(always)]
1398    pub const fn new(mask: u8) -> Self {
1399        Self(mask & 0b111)
1400    }
1401
1402    /// Returns true if there are no levels in mask.
1403    #[inline(always)]
1404    pub const fn is_empty(self) -> bool {
1405        self.0 == 0
1406    }
1407
1408    /// Constructs a new level mask from the provided byte as is.
1409    ///
1410    /// # Safety
1411    ///
1412    /// The following must be true:
1413    /// - Mask must be in range `0b000..=0b111`.
1414    #[inline(always)]
1415    pub const unsafe fn new_unchecked(mask: u8) -> Self {
1416        Self(mask)
1417    }
1418
1419    /// Creates a sufficient mask for the specified level.
1420    ///
1421    /// NOTE: levels > 3 has no effect (mask will always be `0b111`).
1422    #[inline(always)]
1423    pub const fn from_level(level: u8) -> Self {
1424        Self(match level {
1425            0 => 0,
1426            1 => 1,
1427            2 => 3,
1428            _ => 7,
1429        })
1430    }
1431
1432    /// Counts presented higher hashes.
1433    pub const fn level(self) -> u8 {
1434        (self.0 & 1) + ((self.0 >> 1) & 1) + ((self.0 >> 2) & 1)
1435    }
1436
1437    /// Computes hash index for the specified level.
1438    pub const fn hash_index(self, level: u8) -> u8 {
1439        Self(self.0 & Self::from_level(level).0).level()
1440    }
1441
1442    /// Returns whether the specified level is included into the mask.
1443    pub const fn contains(self, level: u8) -> bool {
1444        level == 0 || self.0 & (1 << (level - 1)) != 0
1445    }
1446
1447    /// Creates a new mask, shifted by the offset.
1448    #[inline(always)]
1449    #[must_use]
1450    pub const fn virtualize(self, offset: u8) -> Self {
1451        Self(self.0 >> offset)
1452    }
1453
1454    /// Encodes level mask as byte.
1455    #[inline]
1456    pub const fn to_byte(self) -> u8 {
1457        self.0
1458    }
1459}
1460
1461impl IntoIterator for LevelMask {
1462    type Item = u8;
1463    type IntoIter = LevelMaskIter;
1464
1465    #[inline]
1466    fn into_iter(self) -> Self::IntoIter {
1467        // Include zero level
1468        LevelMaskIter(1 | (self.0 << 1))
1469    }
1470}
1471
1472impl PartialEq<u8> for LevelMask {
1473    #[inline]
1474    fn eq(&self, other: &u8) -> bool {
1475        self.0 == *other
1476    }
1477}
1478
1479impl BitOr for LevelMask {
1480    type Output = Self;
1481
1482    #[inline(always)]
1483    fn bitor(self, rhs: Self) -> Self::Output {
1484        Self(self.0 | rhs.0)
1485    }
1486}
1487
1488impl BitOrAssign for LevelMask {
1489    #[inline(always)]
1490    fn bitor_assign(&mut self, rhs: Self) {
1491        self.0 |= rhs.0;
1492    }
1493}
1494
1495impl From<LevelMask> for u8 {
1496    #[inline(always)]
1497    fn from(m: LevelMask) -> u8 {
1498        m.0
1499    }
1500}
1501
1502impl std::fmt::Debug for LevelMask {
1503    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1504        f.write_fmt(format_args!("{:03b}", self.0))
1505    }
1506}
1507
1508#[cfg(feature = "arbitrary")]
1509impl<'a> arbitrary::Arbitrary<'a> for LevelMask {
1510    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1511        u.int_in_range(0b000..=0b111).map(Self::new)
1512    }
1513
1514    #[inline]
1515    fn size_hint(_: usize) -> (usize, Option<usize>) {
1516        (1, Some(1))
1517    }
1518}
1519
1520/// _de Brujn_ level presence bitset iterator.
1521#[derive(Clone)]
1522pub struct LevelMaskIter(u8);
1523
1524impl LevelMaskIter {
1525    /// Returns `true` if all levels were consumed.
1526    pub fn is_empty(&self) -> bool {
1527        self.0 == 0
1528    }
1529}
1530
1531impl Iterator for LevelMaskIter {
1532    type Item = u8;
1533
1534    #[inline]
1535    fn next(&mut self) -> Option<Self::Item> {
1536        (self.0 != 0).then(|| {
1537            //  111 - 1   = 110
1538            // !110       = 001
1539            //  111 & 001 = 001
1540            let mask = self.0 & !(self.0 - 1);
1541
1542            // 111 & !001 = 110
1543            self.0 &= !mask;
1544
1545            mask.trailing_zeros() as u8
1546        })
1547    }
1548
1549    fn size_hint(&self) -> (usize, Option<usize>) {
1550        let len = self.0.count_ones() as usize;
1551        (len, Some(len))
1552    }
1553}
1554
1555/// A size of a cell.
1556#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
1557pub struct Size {
1558    /// Total number of bits in cell slice.
1559    pub bits: u16,
1560    /// Total number refs in cell slice.
1561    pub refs: u8,
1562}
1563
1564impl Size {
1565    /// The additive identity for this type, i.e. `0`.
1566    pub const ZERO: Self = Self { bits: 0, refs: 0 };
1567
1568    /// The multiplicative bits identity for this type, i.e. `1 bit`.
1569    pub const BIT: Self = Self { bits: 1, refs: 0 };
1570
1571    /// The multiplicative refs identity for this type, i.e. `1 ref`.
1572    pub const REF: Self = Self { bits: 0, refs: 1 };
1573
1574    /// The largest valid value that can be represented by this type.
1575    pub const MAX: Self = Self {
1576        bits: MAX_BIT_LEN,
1577        refs: MAX_REF_COUNT as _,
1578    };
1579
1580    /// Returns true if the number of bits and refs is in the valid range for the cell.
1581    #[inline]
1582    pub const fn fits_into_cell(&self) -> bool {
1583        self.bits <= MAX_BIT_LEN && self.refs <= MAX_REF_COUNT as _
1584    }
1585
1586    /// Saturating size addition. Computes self + rhs for bits and refs,
1587    /// saturating at the numeric bounds instead of overflowing.
1588    #[inline]
1589    pub const fn saturating_add(self, rhs: Self) -> Self {
1590        Self {
1591            bits: self.bits.saturating_add(rhs.bits),
1592            refs: self.refs.saturating_add(rhs.refs),
1593        }
1594    }
1595
1596    /// Saturating size substraction. Computes self - rhs for bits and refs,
1597    /// saturating at the numeric bounds instead of overflowing.
1598    #[inline]
1599    pub const fn saturating_sub(self, rhs: Self) -> Self {
1600        Self {
1601            bits: self.bits.saturating_sub(rhs.bits),
1602            refs: self.refs.saturating_sub(rhs.refs),
1603        }
1604    }
1605
1606    /// Returns true if there are no bits and refs.
1607    pub const fn is_zero(self) -> bool {
1608        self.bits == 0 && self.refs == 0
1609    }
1610}
1611
1612impl From<Size> for CellTreeStats {
1613    #[inline]
1614    fn from(value: Size) -> Self {
1615        Self {
1616            bit_count: value.bits as _,
1617            cell_count: value.refs as _,
1618        }
1619    }
1620}
1621
1622impl std::ops::Add for Size {
1623    type Output = Self;
1624
1625    #[inline]
1626    fn add(mut self, rhs: Self) -> Self::Output {
1627        self += rhs;
1628        self
1629    }
1630}
1631
1632impl std::ops::AddAssign for Size {
1633    #[inline]
1634    fn add_assign(&mut self, rhs: Self) {
1635        self.bits += rhs.bits;
1636        self.refs += rhs.refs;
1637    }
1638}
1639
1640impl std::ops::Sub for Size {
1641    type Output = Self;
1642
1643    #[inline]
1644    fn sub(mut self, rhs: Self) -> Self::Output {
1645        self -= rhs;
1646        self
1647    }
1648}
1649
1650impl std::ops::SubAssign for Size {
1651    #[inline]
1652    fn sub_assign(&mut self, rhs: Self) {
1653        self.bits -= rhs.bits;
1654        self.refs -= rhs.refs;
1655    }
1656}
1657
1658/// Cell tree storage stats.
1659#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
1660pub struct CellTreeStats {
1661    /// Total number of bits in tree.
1662    pub bit_count: u64,
1663    /// Total number of cells in tree.
1664    pub cell_count: u64,
1665}
1666
1667impl CellTreeStats {
1668    /// The additive identity for this type, i.e. `0`.
1669    pub const ZERO: Self = CellTreeStats {
1670        bit_count: 0,
1671        cell_count: 0,
1672    };
1673}
1674
1675impl std::ops::Add for CellTreeStats {
1676    type Output = Self;
1677
1678    #[inline]
1679    fn add(self, rhs: Self) -> Self::Output {
1680        Self {
1681            bit_count: self.bit_count.saturating_add(rhs.bit_count),
1682            cell_count: self.cell_count.saturating_add(rhs.cell_count),
1683        }
1684    }
1685}
1686
1687impl std::ops::AddAssign for CellTreeStats {
1688    #[inline]
1689    fn add_assign(&mut self, rhs: Self) {
1690        self.bit_count = self.bit_count.saturating_add(rhs.bit_count);
1691        self.cell_count = self.cell_count.saturating_add(rhs.cell_count);
1692    }
1693}
1694
1695impl std::ops::Add<Size> for CellTreeStats {
1696    type Output = Self;
1697
1698    #[inline]
1699    fn add(self, rhs: Size) -> Self::Output {
1700        Self {
1701            bit_count: self.bit_count.saturating_add(rhs.bits as _),
1702            cell_count: self.cell_count.saturating_add(rhs.refs as _),
1703        }
1704    }
1705}
1706
1707impl std::iter::Sum for CellTreeStats {
1708    #[inline]
1709    fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
1710        let mut res = Self::ZERO;
1711        for item in iter {
1712            res += item;
1713        }
1714        res
1715    }
1716}
1717
1718impl std::ops::AddAssign<Size> for CellTreeStats {
1719    fn add_assign(&mut self, rhs: Size) {
1720        self.bit_count = self.bit_count.saturating_add(rhs.bits as _);
1721        self.cell_count = self.cell_count.saturating_add(rhs.refs as _);
1722    }
1723}
1724
1725impl std::ops::Sub for CellTreeStats {
1726    type Output = Self;
1727
1728    #[inline]
1729    fn sub(mut self, rhs: Self) -> Self::Output {
1730        self -= rhs;
1731        self
1732    }
1733}
1734
1735impl std::ops::SubAssign for CellTreeStats {
1736    #[inline]
1737    fn sub_assign(&mut self, rhs: Self) {
1738        self.bit_count = self.bit_count.saturating_sub(rhs.bit_count);
1739        self.cell_count = self.cell_count.saturating_sub(rhs.cell_count);
1740    }
1741}
1742
1743impl std::ops::SubAssign<Size> for CellTreeStats {
1744    #[inline]
1745    fn sub_assign(&mut self, rhs: Size) {
1746        self.bit_count = self.bit_count.saturating_sub(rhs.bits as _);
1747        self.cell_count = self.cell_count.saturating_sub(rhs.refs as _);
1748    }
1749}
1750
1751/// A helper to track the size of the unique data in multiple cell trees.
1752///
1753/// NOTE: It uses hashes for deduplication, so you can only use it for
1754/// fully computed and valid trees.
1755pub struct StorageStat<'a> {
1756    visited: ahash::HashSet<&'a HashBytes>,
1757    stack: Vec<RefsIter<'a>>,
1758    stats: CellTreeStats,
1759    limit: usize,
1760}
1761
1762impl<'a> StorageStat<'a> {
1763    /// Recursively computes the count of distinct cells returning
1764    /// the total storage used by this dag taking into account the
1765    /// identification of equal cells.
1766    ///
1767    /// Root slice does not count as cell. A slice subrange of
1768    /// cells is used during computation.
1769    pub fn compute_for_slice<'b: 'a>(
1770        slice: &'a CellSlice<'b>,
1771        limit: usize,
1772    ) -> Option<CellTreeStats> {
1773        let mut this = Self::with_limit(limit);
1774        if this.add_slice(slice) {
1775            Some(this.stats)
1776        } else {
1777            None
1778        }
1779    }
1780
1781    /// Recursively computes the count of distinct cells returning
1782    /// the total storage used by this dag taking into account the
1783    /// identification of equal cells.
1784    pub fn compute_for_cell(cell: &'a DynCell, limit: usize) -> Option<CellTreeStats> {
1785        let mut this = Self::with_limit(limit);
1786        if this.add_cell(cell) {
1787            Some(this.stats)
1788        } else {
1789            None
1790        }
1791    }
1792
1793    /// Creates a new storage stat state with an explicit limit.
1794    pub fn with_limit(limit: usize) -> Self {
1795        Self {
1796            visited: Default::default(),
1797            stack: Vec::new(),
1798            stats: CellTreeStats::ZERO,
1799            limit,
1800        }
1801    }
1802
1803    /// Creates a new storage stat state without a limit.
1804    pub fn unlimited() -> Self {
1805        Self::with_limit(usize::MAX)
1806    }
1807
1808    /// Returns the current tree stats.
1809    pub fn stats(&self) -> CellTreeStats {
1810        self.stats
1811    }
1812
1813    /// Merges current stats with the stats from the provided cell tree.
1814    ///
1815    /// Returns `false` if the limit was reached.
1816    pub fn add_cell(&mut self, cell: &'a DynCell) -> bool {
1817        if !self.visited.insert(cell.repr_hash()) {
1818            return true;
1819        }
1820
1821        self.stats.bit_count += cell.bit_len() as u64;
1822        self.stats.cell_count += 1;
1823
1824        self.stack.clear();
1825        self.stack.push(cell.references());
1826        self.reduce_stack()
1827    }
1828
1829    /// Merges current stats with the stats from the provided slice.
1830    ///
1831    /// Returns `false` if the limit was reached.
1832    pub fn add_slice(&mut self, slice: &CellSlice<'a>) -> bool {
1833        self.stats.bit_count += slice.size_bits() as u64;
1834
1835        self.stack.clear();
1836        self.stack.push(slice.references());
1837        self.reduce_stack()
1838    }
1839
1840    fn reduce_stack(&mut self) -> bool {
1841        'outer: while let Some(item) = self.stack.last_mut() {
1842            for cell in item.by_ref() {
1843                if !self.visited.insert(cell.repr_hash()) {
1844                    continue;
1845                }
1846
1847                if self.stats.cell_count >= self.limit as u64 {
1848                    return false;
1849                }
1850
1851                self.stats.bit_count += cell.bit_len() as u64;
1852                self.stats.cell_count += 1;
1853
1854                let next = cell.references();
1855                if next.max > 0 {
1856                    self.stack.push(next);
1857                    continue 'outer;
1858                }
1859            }
1860
1861            self.stack.pop();
1862        }
1863
1864        true
1865    }
1866}
1867
1868/// Helper struct to debug print the root cell.
1869#[derive(Clone, Copy)]
1870pub struct DebugCell<'a>(&'a DynCell);
1871
1872impl std::fmt::Debug for DebugCell<'_> {
1873    #[inline]
1874    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1875        self.0.fmt(f)
1876    }
1877}
1878
1879/// Helper struct to print only the root cell in the cell tree.
1880#[derive(Clone, Copy)]
1881pub struct DisplayCellRoot<'a> {
1882    cell: &'a DynCell,
1883    level: usize,
1884}
1885
1886impl std::fmt::Display for DisplayCellRoot<'_> {
1887    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1888        // TODO: encode on stack
1889        let data = hex::encode(self.cell.data());
1890
1891        let indent = self.level * 2;
1892        if f.alternate() {
1893            f.write_fmt(format_args!("{:indent$}{data}\n", ""))
1894        } else {
1895            let repr_depth = self.cell.depth(LevelMask::MAX_LEVEL);
1896            let repr_hash = self.cell.repr_hash();
1897            let descriptor = self.cell.descriptor();
1898            f.write_fmt(format_args!(
1899                "{:indent$}{:?}: {data}\n{:indent$}bits: {:>4}, refs: {}, l: {:?}, depth: {}, hash: {}\n",
1900                "",
1901                descriptor.cell_type(),
1902                "",
1903                self.cell.bit_len(),
1904                descriptor.reference_count(),
1905                descriptor.level_mask(),
1906                repr_depth,
1907                repr_hash,
1908            ))
1909        }
1910    }
1911}
1912
1913/// Helper struct to print all cells in the cell tree.
1914#[derive(Clone, Copy)]
1915pub struct DisplayCellTree<'a>(&'a DynCell);
1916
1917impl std::fmt::Display for DisplayCellTree<'_> {
1918    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1919        let mut stack = vec![(0, self.0)];
1920
1921        while let Some((level, cell)) = stack.pop() {
1922            ok!(std::fmt::Display::fmt(&DisplayCellRoot { cell, level }, f));
1923
1924            let reference_count = cell.reference_count();
1925            for i in (0..reference_count).rev() {
1926                if let Some(child) = cell.reference(i) {
1927                    stack.push((level + 1, child));
1928                }
1929            }
1930        }
1931
1932        Ok(())
1933    }
1934}
1935
1936/// Max cell data capacity in bits
1937pub const MAX_BIT_LEN: u16 = 1023;
1938/// Maximum number of child cells
1939pub const MAX_REF_COUNT: usize = 4;
1940
1941#[cfg(test)]
1942mod tests {
1943    use super::*;
1944
1945    #[test]
1946    fn correct_level() {
1947        const LEVEL: [u8; 8] = [0, 1, 1, 2, 1, 2, 2, 3];
1948
1949        for mask in 0b000..=0b111 {
1950            assert_eq!(LevelMask(mask).level(), LEVEL[mask as usize]);
1951        }
1952    }
1953
1954    #[test]
1955    fn virtualize_descriptor() {
1956        let level_mask = LevelMask(0b111);
1957        let desc = CellDescriptor::new([
1958            CellDescriptor::compute_d1(level_mask, false, 3),
1959            CellDescriptor::compute_d2(123),
1960        ]);
1961
1962        assert_eq!(desc.level_mask(), level_mask);
1963
1964        for i in 0..3 {
1965            let v_desc = desc.virtualize(i);
1966
1967            assert_eq!(v_desc.cell_type(), desc.cell_type());
1968            assert_eq!(v_desc.reference_count(), desc.reference_count());
1969            assert_eq!(v_desc.is_exotic(), desc.is_exotic());
1970            assert_eq!(v_desc.store_hashes(), desc.store_hashes());
1971            assert_eq!(v_desc.is_aligned(), desc.is_aligned());
1972            assert_eq!(v_desc.byte_len(), desc.byte_len());
1973
1974            assert_eq!(v_desc.level_mask(), level_mask.virtualize(i));
1975        }
1976    }
1977
1978    #[test]
1979    fn correct_hash_index() {
1980        const HASH_INDEX_TABLE: [[u8; 4]; 8] = [
1981            // index      // mask
1982            [0, 0, 0, 0], // 000
1983            [0, 1, 1, 1], // 001
1984            [0, 0, 1, 1], // 010
1985            [0, 1, 2, 2], // 011
1986            [0, 0, 0, 1], // 100
1987            [0, 1, 1, 2], // 101
1988            [0, 0, 1, 2], // 110
1989            [0, 1, 2, 3], // 111
1990        ];
1991
1992        for mask in 0b000..=0b111 {
1993            let mask = LevelMask(mask);
1994
1995            for i in 0..=3 {
1996                let hash_index = mask.hash_index(i);
1997                assert_eq!(hash_index, HASH_INDEX_TABLE[mask.0 as usize][i as usize]);
1998            }
1999        }
2000    }
2001
2002    #[test]
2003    fn ultra_virtual_cell_by_ref() {
2004        let cell = Cell::empty_cell();
2005
2006        let pruned1 =
2007            crate::merkle::make_pruned_branch(cell.as_ref(), 0, Cell::empty_context()).unwrap();
2008
2009        let pruned2 =
2010            crate::merkle::make_pruned_branch(pruned1.as_ref(), 1, Cell::empty_context()).unwrap();
2011
2012        let pruned3 =
2013            crate::merkle::make_pruned_branch(pruned2.as_ref(), 2, Cell::empty_context()).unwrap();
2014
2015        // Level 3 -> 2
2016        let pruned3 = pruned3.virtualize();
2017        assert_eq!(pruned3.repr_hash(), pruned2.repr_hash());
2018        assert_eq!(pruned3.repr_depth(), pruned2.repr_depth());
2019
2020        // Level 2 -> 1
2021        let pruned3 = pruned3.virtualize();
2022        assert_eq!(pruned3.repr_hash(), pruned1.repr_hash());
2023        assert_eq!(pruned3.repr_depth(), pruned1.repr_depth());
2024
2025        // Level 1 -> 0
2026        let pruned3 = pruned3.virtualize();
2027        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
2028        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
2029
2030        // Level 0 -> 0
2031        let pruned3 = pruned3.virtualize();
2032        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
2033        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
2034
2035        // Level 0 -> 0 (just in case)
2036        let pruned3 = pruned3.virtualize();
2037        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
2038        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
2039    }
2040
2041    #[test]
2042    fn versioned_store_load() {
2043        #[derive(Debug, Clone, Copy, Eq, PartialEq, Store, Load)]
2044        #[tlb(tag = ["#12", "#34", "#56"])]
2045        struct Versioned {
2046            old_field1: u32,
2047            #[tlb(since_tag = 1)]
2048            new_field1: u32,
2049            old_field2: bool,
2050            #[tlb(since_tag = 2)]
2051            new_field2: bool,
2052        }
2053
2054        let old = Versioned {
2055            old_field1: 123,
2056            new_field1: 0,
2057            old_field2: true,
2058            new_field2: false,
2059        };
2060        let cell = CellBuilder::build_from(old).unwrap();
2061
2062        {
2063            let mut slice = cell.as_slice().unwrap();
2064            assert_eq!(slice.size_bits(), 8 + 32 + 1);
2065            assert_eq!(slice.load_u8().unwrap(), 0x12);
2066            assert_eq!(slice.load_u32().unwrap(), 123);
2067            assert!(slice.load_bit().unwrap());
2068            assert!(slice.is_empty());
2069        }
2070        assert_eq!(
2071            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
2072            old,
2073        );
2074
2075        let new = Versioned {
2076            old_field1: 123,
2077            new_field1: 456,
2078            old_field2: true,
2079            new_field2: false,
2080        };
2081        let cell = CellBuilder::build_from(new).unwrap();
2082
2083        {
2084            let mut slice = cell.as_slice().unwrap();
2085            assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1);
2086            assert_eq!(slice.load_u8().unwrap(), 0x34);
2087            assert_eq!(slice.load_u32().unwrap(), 123);
2088            assert_eq!(slice.load_u32().unwrap(), 456);
2089            assert!(slice.load_bit().unwrap());
2090            assert!(slice.is_empty());
2091        }
2092        assert_eq!(
2093            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
2094            new
2095        );
2096
2097        let too_new = Versioned {
2098            old_field1: 123,
2099            new_field1: 0,
2100            old_field2: true,
2101            new_field2: true,
2102        };
2103        let cell = CellBuilder::build_from(too_new).unwrap();
2104
2105        {
2106            let mut slice = cell.as_slice().unwrap();
2107            assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1 + 1);
2108            assert_eq!(slice.load_u8().unwrap(), 0x56);
2109            assert_eq!(slice.load_u32().unwrap(), 123);
2110            assert_eq!(slice.load_u32().unwrap(), 0);
2111            assert!(slice.load_bit().unwrap());
2112            assert!(slice.load_bit().unwrap());
2113            assert!(slice.is_empty());
2114        }
2115        assert_eq!(
2116            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
2117            too_new
2118        );
2119    }
2120}