tycho_types/cell/
mod.rs

1//! Cell tree implementation.
2
3use std::ops::{BitOr, BitOrAssign};
4use std::str::FromStr;
5
6pub use tycho_types_proc::{Load, Store};
7
8pub use self::builder::{
9    CellBuilder, CellDataBuilder, CellRefsBuilder, DisplayCellBuilderData, Store,
10};
11pub use self::cell_context::{CellContext, CellParts, LoadMode};
12#[cfg(not(feature = "sync"))]
13pub use self::cell_impl::rc::{Cell, CellInner, WeakCell};
14#[cfg(feature = "sync")]
15pub use self::cell_impl::sync::{Cell, CellInner, WeakCell};
16pub use self::cell_impl::{SafeDeleter, StaticCell, VirtualCellWrapper};
17pub use self::lazy::{Lazy, LazyExotic};
18pub use self::slice::{
19    CellSlice, CellSliceParts, CellSliceRange, DisplayCellSliceData, ExactSize, Load, LoadCell,
20};
21pub use self::usage_tree::{UsageTree, UsageTreeMode, UsageTreeWithSubtrees};
22use crate::error::{Error, ParseHashBytesError};
23use crate::util::Bitstring;
24
25/// Generic cell implementation.
26mod cell_impl;
27
28/// Traits for gas accounting and resolving exotic cells.
29mod cell_context;
30
31/// Cell view utils.
32mod slice;
33
34/// Cell creation utils.
35mod builder;
36
37/// Lazy-loaded cell data.
38mod lazy;
39
40mod usage_tree;
41
42#[cfg(feature = "sync")]
43#[doc(hidden)]
44mod __checks {
45    use super::*;
46
47    assert_impl_all!(Cell: Send);
48    assert_impl_all!(CellSlice: Send);
49    assert_impl_all!(CellBuilder: Send);
50}
51
52/// Marker trait which allows casting lazy-loaded data.
53pub trait EquivalentRepr<T> {}
54
55/// All types are equivalent to itself.
56impl<T> EquivalentRepr<T> for T {}
57
58/// Cell implementation family.
59pub trait CellFamily: Sized {
60    /// The default cell context type.
61    type EmptyCellContext: CellContext;
62
63    /// Creates an empty cell.
64    ///
65    /// NOTE: in most cases empty cell is ZST.
66    fn empty_cell() -> Cell;
67
68    /// Returns a static reference to the empty cell
69    fn empty_cell_ref() -> &'static DynCell;
70
71    /// Creates an empty cell context.
72    fn empty_context() -> &'static Self::EmptyCellContext;
73
74    /// Returns a static reference to the cell with all zeros.
75    fn all_zeros_ref() -> &'static DynCell;
76
77    /// Returns a static reference to the cell with all ones.
78    fn all_ones_ref() -> &'static DynCell;
79
80    /// Creates a virtualized cell from the specified cell.
81    fn virtualize(cell: Cell) -> Cell;
82}
83
84/// Dyn trait type alias.
85#[cfg(not(feature = "sync"))]
86pub type DynCell = dyn CellImpl;
87
88/// Dyn trait type alias.
89#[cfg(feature = "sync")]
90pub type DynCell = dyn CellImpl + Send + Sync;
91
92impl AsRef<DynCell> for DynCell {
93    #[inline(always)]
94    fn as_ref(&self) -> &Self {
95        self
96    }
97}
98
99impl AsMut<DynCell> for DynCell {
100    #[inline(always)]
101    fn as_mut(&mut self) -> &mut Self {
102        self
103    }
104}
105
106/// Represents the interface of a well-formed cell.
107///
108/// Since all basic operations are implements via dynamic dispatch,
109/// all high-level helper methods are implemented for `dyn Cell`.
110pub trait CellImpl {
111    /// Unwraps the root cell from the usage tracking.
112    fn untrack(self: CellInner<Self>) -> Cell;
113
114    /// Returns cell descriptor.
115    ///
116    /// # See also
117    ///
118    /// Cell descriptor contains some tightly packed info about the cell.
119    /// If you want convenient methods to access it use:
120    /// [`cell_type`], [`level_mask`], [`reference_count`], [`is_exotic`]
121    ///
122    /// [`cell_type`]: CellDescriptor::cell_type
123    /// [`level_mask`]: CellDescriptor::level_mask
124    /// [`reference_count`]: CellDescriptor::reference_count
125    /// [`is_exotic`]: CellDescriptor::is_exotic
126    fn descriptor(&self) -> CellDescriptor;
127
128    /// Returns the raw data of this cell.
129    fn data(&self) -> &[u8];
130
131    /// Returns the data size of this cell in bits.
132    fn bit_len(&self) -> u16;
133
134    /// Returns a reference to the Nth child cell.
135    fn reference(&self, index: u8) -> Option<&DynCell>;
136
137    /// Returns the Nth child cell.
138    fn reference_cloned(&self, index: u8) -> Option<Cell>;
139
140    /// Returns this cell as a virtualized cell, so that all hashes
141    /// and depths will have an offset.
142    fn virtualize(&self) -> &DynCell;
143
144    /// Returns cell hash for the specified level.
145    ///
146    /// Cell representation hash is the hash at the maximum level ([`LevelMask::MAX_LEVEL`]).
147    /// Use `repr_hash` as a simple alias for this.
148    fn hash(&self, level: u8) -> &HashBytes;
149
150    /// Returns cell depth for the specified level.
151    fn depth(&self, level: u8) -> u16;
152}
153
154impl DynCell {
155    /// Computes cell type from descriptor bytes.
156    #[inline]
157    pub fn cell_type(&self) -> CellType {
158        self.descriptor().cell_type()
159    }
160
161    /// Computes the cell level from the level mask.
162    #[inline]
163    pub fn level(&self) -> u8 {
164        self.descriptor().level_mask().level()
165    }
166
167    /// Computes the level mask from the descriptor bytes.
168    #[inline]
169    pub fn level_mask(&self) -> LevelMask {
170        self.descriptor().level_mask()
171    }
172
173    /// Computes the number of child cells from descriptor bytes.
174    #[inline]
175    pub fn reference_count(&self) -> u8 {
176        self.descriptor().reference_count()
177    }
178
179    /// Tries to load the specified child cell as slice.
180    /// Returns an error if the loaded cell is absent or is pruned.
181    pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'_>, Error> {
182        match self.reference(index) {
183            Some(cell) => CellSlice::new(cell),
184            None => Err(Error::CellUnderflow),
185        }
186    }
187
188    /// Returns whether the cell is not [`Ordinary`].
189    ///
190    /// [`Ordinary`]: CellType::Ordinary
191    #[inline]
192    pub fn is_exotic(&self) -> bool {
193        self.descriptor().is_exotic()
194    }
195
196    /// Returns a representation hash of the cell.
197    #[inline]
198    pub fn repr_hash(&self) -> &HashBytes {
199        self.hash(LevelMask::MAX_LEVEL)
200    }
201
202    /// Returns a representation depth of the cell.
203    #[inline]
204    pub fn repr_depth(&self) -> u16 {
205        self.depth(LevelMask::MAX_LEVEL)
206    }
207
208    /// Returns `true` if any of cell levels has the maximum depth.
209    pub fn has_max_depth(&self) -> bool {
210        for level in self.descriptor().level_mask() {
211            if self.depth(level) == u16::MAX {
212                return true;
213            }
214        }
215        false
216    }
217
218    /// Returns true if the cell is empty (no bits, no refs).
219    pub fn is_empty(&self) -> bool {
220        self.hash(LevelMask::MAX_LEVEL) == EMPTY_CELL_HASH
221    }
222
223    /// Creates an iterator through child nodes.
224    #[inline]
225    pub fn references(&self) -> RefsIter<'_> {
226        RefsIter {
227            cell: self,
228            max: self.reference_count(),
229            index: 0,
230        }
231    }
232
233    /// Returns this cell as a cell slice.
234    /// Returns an error if the cell is not ordinary.
235    #[inline]
236    pub fn as_slice(&'_ self) -> Result<CellSlice<'_>, Error> {
237        CellSlice::new(self)
238    }
239
240    /// Returns this cell as a cell slice.
241    ///
242    /// Loads cell as is.
243    #[inline]
244    pub fn as_slice_allow_exotic(&'_ self) -> CellSlice<'_> {
245        CellSlice::new_allow_exotic(self)
246    }
247
248    /// Recursively computes the count of distinct cells returning
249    /// the total storage used by this dag taking into account the
250    /// identification of equal cells.
251    pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
252        StorageStat::compute_for_cell(self, limit)
253    }
254
255    /// Recursively traverses the cells tree without tracking a uniqueness
256    /// of cells. Usefull for adding small subtrees to merkle proofs.
257    pub fn touch_recursive(&self) {
258        self.data(); // Touch data to include it if `OnDataAccess` is used.
259        for child in self.references() {
260            child.touch_recursive();
261        }
262    }
263
264    /// Returns an object that implements [`Debug`] for printing only
265    /// the root cell of the cell tree.
266    ///
267    /// [`Debug`]: std::fmt::Debug
268    #[inline]
269    pub fn debug_root(&'_ self) -> DebugCell<'_> {
270        DebugCell(self)
271    }
272
273    /// Returns an object that implements [`Display`] for printing only
274    /// the root cell of the cell tree.
275    ///
276    /// [`Display`]: std::fmt::Display
277    #[inline]
278    pub fn display_root(&'_ self) -> DisplayCellRoot<'_> {
279        DisplayCellRoot {
280            cell: self,
281            level: 0,
282        }
283    }
284
285    /// Returns an object that implements [`Display`] for printing all
286    /// cells in the cell tree.
287    ///
288    /// [`Display`]: std::fmt::Display
289    #[inline]
290    pub fn display_tree(&'_ self) -> DisplayCellTree<'_> {
291        DisplayCellTree(self)
292    }
293
294    /// Returns an object which will display cell data as a bitstring
295    /// with a termination bit.
296    #[inline]
297    pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
298        struct DisplayData<'a>(&'a DynCell);
299
300        impl std::fmt::Display for DisplayData<'_> {
301            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
302                std::fmt::Display::fmt(
303                    &Bitstring {
304                        bytes: self.0.data(),
305                        bit_len: self.0.bit_len(),
306                    },
307                    f,
308                )
309            }
310        }
311
312        impl std::fmt::Binary for DisplayData<'_> {
313            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
314                std::fmt::Binary::fmt(
315                    &Bitstring {
316                        bytes: self.0.data(),
317                        bit_len: self.0.bit_len(),
318                    },
319                    f,
320                )
321            }
322        }
323
324        DisplayData(self)
325    }
326
327    /// Converts this cell into a slice and tries to load the specified type from it.
328    /// Fails if the cell is not ordinary.
329    ///
330    /// NOTE: parsing `Cell` will load the first reference!
331    #[inline]
332    pub fn parse<'a, T: Load<'a>>(&'a self) -> Result<T, Error> {
333        T::load_from(&mut ok!(self.as_slice()))
334    }
335
336    /// Loads an exotic cell.
337    #[inline]
338    pub fn parse_exotic<'a, T: LoadCell<'a>>(&'a self) -> Result<T, Error> {
339        if self.is_exotic() {
340            T::load_from_cell(self)
341        } else {
342            Err(Error::UnexpectedOrdinaryCell)
343        }
344    }
345}
346
347impl std::fmt::Debug for DynCell {
348    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
349        crate::util::debug_struct_field2_finish(
350            f,
351            "Cell",
352            "ty",
353            &self.cell_type(),
354            "hash",
355            self.repr_hash(),
356        )
357    }
358}
359
360impl Eq for DynCell {}
361
362impl PartialEq<DynCell> for DynCell {
363    #[inline]
364    fn eq(&self, other: &DynCell) -> bool {
365        self.repr_hash() == other.repr_hash()
366    }
367}
368
369#[cfg(feature = "serde")]
370impl serde::Serialize for DynCell {
371    fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
372        let boc = crate::boc::Boc::encode(self);
373        if serializer.is_human_readable() {
374            serializer.serialize_str(&crate::util::encode_base64(boc))
375        } else {
376            serializer.serialize_bytes(&boc)
377        }
378    }
379}
380
381#[cfg(feature = "arbitrary")]
382impl<'a> arbitrary::Arbitrary<'a> for Cell {
383    #[inline]
384    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
385        Ok(u.arbitrary::<CellBuilder>()?.build().unwrap())
386    }
387
388    #[inline]
389    fn size_hint(depth: usize) -> (usize, Option<usize>) {
390        Self::try_size_hint(depth).unwrap_or_default()
391    }
392
393    #[inline]
394    fn try_size_hint(
395        depth: usize,
396    ) -> Result<(usize, Option<usize>), arbitrary::MaxRecursionReached> {
397        <CellBuilder as arbitrary::Arbitrary>::try_size_hint(depth)
398    }
399}
400
401impl From<Cell> for CellSliceParts {
402    #[inline]
403    fn from(value: Cell) -> Self {
404        (CellSliceRange::full(&value), value)
405    }
406}
407
408/// An iterator through child nodes.
409#[must_use = "iterators are lazy and do nothing unless consumed"]
410pub struct RefsIter<'a> {
411    cell: &'a DynCell,
412    max: u8,
413    index: u8,
414}
415
416impl<'a> RefsIter<'a> {
417    /// Returns a cell by children of which we are iterating.
418    #[inline]
419    pub fn cell(&self) -> &'a DynCell {
420        self.cell
421    }
422
423    /// Returns a reference to the next() value without advancing the iterator.
424    #[inline]
425    pub fn peek(&self) -> Option<&'a DynCell> {
426        if self.index >= self.max {
427            None
428        } else {
429            self.cell.reference(self.index)
430        }
431    }
432
433    /// Returns a cloned reference to the next() value without advancing the iterator.
434    #[inline]
435    pub fn peek_cloned(&self) -> Option<Cell> {
436        if self.index >= self.max {
437            None
438        } else {
439            self.cell.reference_cloned(self.index)
440        }
441    }
442
443    /// Returns a reference to the next_back() value without advancing the iterator.
444    #[inline]
445    pub fn peek_prev(&self) -> Option<&'a DynCell> {
446        if self.index > 0 {
447            self.cell.reference(self.index - 1)
448        } else {
449            None
450        }
451    }
452
453    /// Returns a cloned reference to the next_back() value without advancing the iterator.
454    #[inline]
455    pub fn peek_prev_cloned(&self) -> Option<Cell> {
456        if self.index > 0 {
457            self.cell.reference_cloned(self.index - 1)
458        } else {
459            None
460        }
461    }
462
463    /// Creates an iterator through child nodes which produces cloned references.
464    #[inline]
465    pub fn cloned(self) -> ClonedRefsIter<'a> {
466        ClonedRefsIter { inner: self }
467    }
468}
469
470impl Clone for RefsIter<'_> {
471    #[inline]
472    fn clone(&self) -> Self {
473        Self {
474            cell: self.cell,
475            max: self.max,
476            index: self.index,
477        }
478    }
479}
480
481impl<'a> Iterator for RefsIter<'a> {
482    type Item = &'a DynCell;
483
484    #[inline]
485    fn next(&mut self) -> Option<Self::Item> {
486        if self.index >= self.max {
487            None
488        } else {
489            let child = self.cell.reference(self.index);
490            self.index += 1;
491            child
492        }
493    }
494
495    #[inline]
496    fn size_hint(&self) -> (usize, Option<usize>) {
497        let remaining = self.max.saturating_sub(self.index) as usize;
498        (remaining, Some(remaining))
499    }
500}
501
502impl DoubleEndedIterator for RefsIter<'_> {
503    #[inline]
504    fn next_back(&mut self) -> Option<Self::Item> {
505        if self.max > self.index {
506            self.max -= 1;
507            self.cell.reference(self.max)
508        } else {
509            None
510        }
511    }
512}
513
514impl ExactSizeIterator for RefsIter<'_> {
515    #[inline]
516    fn len(&self) -> usize {
517        self.size_hint().0
518    }
519}
520
521/// An iterator through child nodes which produces cloned references.
522#[must_use = "iterators are lazy and do nothing unless consumed"]
523pub struct ClonedRefsIter<'a> {
524    inner: RefsIter<'a>,
525}
526
527impl<'a> ClonedRefsIter<'a> {
528    /// Returns a cell by children of which we are iterating.
529    #[inline]
530    pub fn cell(&self) -> &'a DynCell {
531        self.inner.cell
532    }
533
534    /// Returns a reference to the next() value without advancing the iterator.
535    #[inline]
536    pub fn peek(&self) -> Option<Cell> {
537        self.inner.peek_cloned()
538    }
539
540    /// Returns a reference to the next_back() value without advancing the iterator.
541    #[inline]
542    pub fn peek_prev(&self) -> Option<Cell> {
543        self.inner.peek_prev_cloned()
544    }
545}
546
547impl Clone for ClonedRefsIter<'_> {
548    #[inline]
549    fn clone(&self) -> Self {
550        Self {
551            inner: self.inner.clone(),
552        }
553    }
554}
555
556impl Iterator for ClonedRefsIter<'_> {
557    type Item = Cell;
558
559    #[inline]
560    fn next(&mut self) -> Option<Self::Item> {
561        if self.inner.index >= self.inner.max {
562            None
563        } else {
564            let child = self.inner.cell.reference_cloned(self.inner.index);
565            self.inner.index += 1;
566            child
567        }
568    }
569
570    #[inline]
571    fn size_hint(&self) -> (usize, Option<usize>) {
572        self.inner.size_hint()
573    }
574}
575
576impl DoubleEndedIterator for ClonedRefsIter<'_> {
577    #[inline]
578    fn next_back(&mut self) -> Option<Self::Item> {
579        if self.inner.max > self.inner.index {
580            self.inner.max -= 1;
581            self.inner.cell.reference_cloned(self.inner.max)
582        } else {
583            None
584        }
585    }
586}
587
588impl ExactSizeIterator for ClonedRefsIter<'_> {
589    #[inline]
590    fn len(&self) -> usize {
591        self.size_hint().0
592    }
593}
594
595/// Type alias for a cell hash.
596#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
597#[repr(transparent)]
598pub struct HashBytes(pub [u8; 32]);
599
600impl HashBytes {
601    /// Array of zero bytes.
602    pub const ZERO: Self = Self([0; 32]);
603
604    /// Converts slice to a hash bytes.
605    ///
606    /// # Panics
607    ///
608    /// Panics if the length of the slice is not 32 bytes.
609    #[inline]
610    pub fn from_slice(slice: &[u8]) -> Self {
611        Self(slice.try_into().expect("slice with incorrect length"))
612    }
613
614    /// Converts integer into zero-padded big-endian bytes.
615    ///
616    /// Returns `None` on overflow or negative values.
617    #[cfg(feature = "bigint")]
618    pub fn from_bigint(int: &num_bigint::BigInt) -> Option<Self> {
619        if int.sign() == num_bigint::Sign::Minus {
620            return None;
621        }
622        Self::from_biguint(int.magnitude())
623    }
624
625    /// Converts integer into zero-padded big-endian bytes.
626    ///
627    /// Returns `None` on overflow.
628    #[cfg(feature = "bigint")]
629    pub fn from_biguint(uint: &num_bigint::BigUint) -> Option<Self> {
630        let mut bytes = uint.to_bytes_le();
631        if bytes.len() > 32 {
632            return None;
633        }
634
635        bytes.resize(32, 0);
636        bytes.reverse();
637        Some(Self::from_slice(&bytes))
638    }
639
640    /// Converts integer into zero-padded big-endian bytes.
641    ///
642    /// Ignores all bits after 256th.
643    #[cfg(feature = "bigint")]
644    pub fn from_biguint_lossy(uint: &num_bigint::BigUint) -> Self {
645        let mut bytes = uint.to_bytes_le();
646        bytes.resize(32, 0);
647        bytes.reverse();
648        Self::from_slice(&bytes)
649    }
650
651    /// Wraps a reference to an internal array into a newtype reference.
652    #[inline(always)]
653    pub const fn wrap(value: &[u8; 32]) -> &Self {
654        // SAFETY: HashBytes is #[repr(transparent)]
655        unsafe { &*(value as *const [u8; 32] as *const Self) }
656    }
657
658    /// Returns a slice containing the entire array.
659    pub const fn as_slice(&self) -> &[u8] {
660        self.0.as_slice()
661    }
662
663    /// Returns a mutable slice containing the entire array.
664    pub fn as_mut_slice(&mut self) -> &mut [u8] {
665        self.0.as_mut_slice()
666    }
667
668    /// Returns an internal array.
669    pub const fn as_array(&self) -> &[u8; 32] {
670        &self.0
671    }
672
673    /// Returns a mutable internal array.
674    pub fn as_mut_array(&mut self) -> &mut [u8; 32] {
675        &mut self.0
676    }
677
678    /// Returns a raw pointer to the slice's buffer.
679    #[inline(always)]
680    pub const fn as_ptr(&self) -> *const u8 {
681        &self.0 as *const [u8] as *const u8
682    }
683
684    /// Returns a first chunk of 8 bytes.
685    pub const fn first_chunk(&self) -> &[u8; 8] {
686        match self.0.first_chunk::<8>() {
687            Some(chunk) => chunk,
688            // SAFETY: Const unwrap go brr
689            None => unsafe { std::hint::unreachable_unchecked() },
690        }
691    }
692
693    /// Returns `true` if all bytes are zero.
694    pub const fn is_zero(&self) -> bool {
695        let mut i = 0;
696        while i < 32 {
697            if self.0[i] != 0 {
698                return false;
699            }
700            i += 1;
701        }
702        true
703    }
704
705    #[inline(always)]
706    fn fmt_hex(&self, f: &mut std::fmt::Formatter<'_>, table: &[u8; 16]) -> std::fmt::Result {
707        let mut output = [0u8; 64];
708        crate::util::encode_to_hex_slice(&self.0, &mut output, table).ok();
709
710        // SAFETY: output is guaranteed to contain only [0-9a-f]
711        let output = unsafe { std::str::from_utf8_unchecked(&output) };
712        f.write_str(output)
713    }
714
715    /// Creates a bigint from bytes (as big-endian).
716    #[cfg(feature = "bigint")]
717    pub fn as_biguint(&self) -> num_bigint::BigUint {
718        num_bigint::BigUint::from_bytes_be(&self.0)
719    }
720
721    /// Creates a bigint from bytes (as big-endian).
722    #[cfg(feature = "bigint")]
723    pub fn as_bigint(&self) -> num_bigint::BigInt {
724        self.as_biguint().into()
725    }
726}
727
728impl Default for HashBytes {
729    #[inline(always)]
730    fn default() -> Self {
731        Self::ZERO
732    }
733}
734
735impl AsRef<[u8; 32]> for HashBytes {
736    #[inline(always)]
737    fn as_ref(&self) -> &[u8; 32] {
738        &self.0
739    }
740}
741
742impl AsRef<[u8]> for HashBytes {
743    #[inline(always)]
744    fn as_ref(&self) -> &[u8] {
745        self.0.as_ref()
746    }
747}
748
749impl AsMut<[u8; 32]> for HashBytes {
750    #[inline(always)]
751    fn as_mut(&mut self) -> &mut [u8; 32] {
752        &mut self.0
753    }
754}
755
756impl AsMut<[u8]> for HashBytes {
757    #[inline(always)]
758    fn as_mut(&mut self) -> &mut [u8] {
759        self.0.as_mut()
760    }
761}
762
763impl std::borrow::Borrow<[u8]> for HashBytes {
764    #[inline(always)]
765    fn borrow(&self) -> &[u8] {
766        &self.0
767    }
768}
769
770impl std::borrow::BorrowMut<[u8]> for HashBytes {
771    #[inline(always)]
772    fn borrow_mut(&mut self) -> &mut [u8] {
773        &mut self.0
774    }
775}
776
777impl std::borrow::Borrow<HashBytes> for [u8; 32] {
778    #[inline(always)]
779    fn borrow(&self) -> &HashBytes {
780        HashBytes::wrap(self)
781    }
782}
783
784impl PartialEq<[u8; 32]> for HashBytes {
785    #[inline(always)]
786    fn eq(&self, other: &[u8; 32]) -> bool {
787        &self.0 == other
788    }
789}
790
791impl PartialEq<HashBytes> for [u8; 32] {
792    #[inline(always)]
793    fn eq(&self, other: &HashBytes) -> bool {
794        self == &other.0
795    }
796}
797
798impl PartialEq<[u8]> for HashBytes {
799    #[inline(always)]
800    fn eq(&self, other: &[u8]) -> bool {
801        self.0 == other
802    }
803}
804
805impl PartialEq<HashBytes> for [u8] {
806    #[inline(always)]
807    fn eq(&self, other: &HashBytes) -> bool {
808        self == other.0
809    }
810}
811
812impl From<[u8; 32]> for HashBytes {
813    #[inline(always)]
814    fn from(value: [u8; 32]) -> Self {
815        Self(value)
816    }
817}
818
819impl From<sha2::digest::Output<sha2::Sha256>> for HashBytes {
820    #[inline(always)]
821    fn from(value: sha2::digest::Output<sha2::Sha256>) -> Self {
822        Self(value.into())
823    }
824}
825
826#[cfg(feature = "blake3")]
827impl From<blake3::Hash> for HashBytes {
828    #[inline(always)]
829    fn from(value: blake3::Hash) -> Self {
830        Self(value.into())
831    }
832}
833
834impl From<HashBytes> for [u8; 32] {
835    #[inline(always)]
836    fn from(value: HashBytes) -> Self {
837        value.0
838    }
839}
840
841impl FromStr for HashBytes {
842    type Err = ParseHashBytesError;
843
844    fn from_str(s: &str) -> Result<Self, Self::Err> {
845        let mut result = Self::default();
846        match s.len() {
847            64 => {
848                if let Err(e) = hex::decode_to_slice(s, &mut result.0) {
849                    return Err(ParseHashBytesError::InvalidHex(e));
850                }
851            }
852            66 => {
853                if let Err(e) = hex::decode_to_slice(&s[2..], &mut result.0) {
854                    return Err(ParseHashBytesError::InvalidHex(e));
855                }
856            }
857            #[cfg(feature = "base64")]
858            44 => {
859                if let Err(e) = crate::util::decode_base64_slice(s, &mut result.0) {
860                    return Err(ParseHashBytesError::InvalidBase64(e));
861                }
862            }
863            _ => return Err(ParseHashBytesError::UnexpectedStringLength),
864        }
865        Ok(result)
866    }
867}
868
869impl std::fmt::Display for HashBytes {
870    #[inline]
871    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
872        std::fmt::LowerHex::fmt(self, f)
873    }
874}
875
876impl std::fmt::LowerHex for HashBytes {
877    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
878        Self::fmt_hex(self, f, crate::util::HEX_CHARS_LOWER)
879    }
880}
881
882impl std::fmt::UpperHex for HashBytes {
883    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
884        Self::fmt_hex(self, f, crate::util::HEX_CHARS_UPPER)
885    }
886}
887
888impl std::fmt::Debug for HashBytes {
889    #[inline(always)]
890    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
891        std::fmt::Display::fmt(self, f)
892    }
893}
894
895impl<I> std::ops::Index<I> for HashBytes
896where
897    [u8; 32]: std::ops::Index<I>,
898{
899    type Output = <[u8; 32] as std::ops::Index<I>>::Output;
900
901    #[inline]
902    fn index(&self, index: I) -> &Self::Output {
903        std::ops::Index::index(&self.0, index)
904    }
905}
906
907impl<I> std::ops::IndexMut<I> for HashBytes
908where
909    [u8; 32]: std::ops::IndexMut<I>,
910{
911    #[inline]
912    fn index_mut(&mut self, index: I) -> &mut Self::Output {
913        std::ops::IndexMut::index_mut(&mut self.0, index)
914    }
915}
916
917#[cfg(feature = "rand8")]
918impl rand8::distributions::Distribution<HashBytes> for rand8::distributions::Standard {
919    #[inline]
920    fn sample<R: rand8::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
921        HashBytes(rand8::distributions::Standard.sample(rng))
922    }
923}
924
925#[cfg(feature = "rand9")]
926impl rand9::distr::Distribution<HashBytes> for rand9::distr::StandardUniform {
927    #[inline]
928    fn sample<R: rand9::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
929        HashBytes(rand9::distr::StandardUniform.sample(rng))
930    }
931}
932
933#[cfg(feature = "serde")]
934impl serde::Serialize for HashBytes {
935    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
936    where
937        S: serde::Serializer,
938    {
939        if serializer.is_human_readable() {
940            let mut output = [0u8; 64];
941            crate::util::encode_to_hex_slice(&self.0, &mut output, crate::util::HEX_CHARS_LOWER)
942                .ok();
943
944            // SAFETY: output is guaranteed to contain only [0-9a-f]
945            let output = unsafe { std::str::from_utf8_unchecked(&output) };
946            serializer.serialize_str(output)
947        } else {
948            serializer.serialize_bytes(&self.0)
949        }
950    }
951}
952
953#[cfg(feature = "serde")]
954impl<'de> serde::Deserialize<'de> for HashBytes {
955    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
956    where
957        D: serde::Deserializer<'de>,
958    {
959        use ::serde::de::{Error, Visitor};
960
961        struct HashBytesHexVisitor;
962
963        impl Visitor<'_> for HashBytesHexVisitor {
964            type Value = HashBytes;
965
966            fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
967                formatter.write_str("hex-encoded byte array of size 32")
968            }
969
970            fn visit_str<E: Error>(self, value: &str) -> Result<Self::Value, E> {
971                let mut result = HashBytes([0; 32]);
972                match hex::decode_to_slice(value, &mut result.0) {
973                    Ok(()) => Ok(result),
974                    Err(_) => Err(Error::invalid_value(
975                        serde::de::Unexpected::Str(value),
976                        &self,
977                    )),
978                }
979            }
980        }
981
982        pub struct HashBytesRawVisitor;
983
984        impl Visitor<'_> for HashBytesRawVisitor {
985            type Value = HashBytes;
986
987            fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
988                f.write_fmt(format_args!("a byte array of size 32"))
989            }
990
991            fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
992                v.try_into()
993                    .map(HashBytes)
994                    .map_err(|_e| Error::invalid_length(v.len(), &self))
995            }
996        }
997
998        if deserializer.is_human_readable() {
999            deserializer.deserialize_str(HashBytesHexVisitor)
1000        } else {
1001            deserializer.deserialize_bytes(HashBytesRawVisitor)
1002        }
1003    }
1004}
1005
1006#[cfg(feature = "arbitrary")]
1007impl<'a> arbitrary::Arbitrary<'a> for HashBytes {
1008    #[inline]
1009    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1010        u.arbitrary().map(Self)
1011    }
1012
1013    #[inline]
1014    fn size_hint(_: usize) -> (usize, Option<usize>) {
1015        (32, Some(32))
1016    }
1017}
1018
1019/// Hash of an empty (0 bits of data, no refs) ordinary cell.
1020pub static EMPTY_CELL_HASH: &HashBytes = HashBytes::wrap(&[
1021    0x96, 0xa2, 0x96, 0xd2, 0x24, 0xf2, 0x85, 0xc6, 0x7b, 0xee, 0x93, 0xc3, 0x0f, 0x8a, 0x30, 0x91,
1022    0x57, 0xf0, 0xda, 0xa3, 0x5d, 0xc5, 0xb8, 0x7e, 0x41, 0x0b, 0x78, 0x63, 0x0a, 0x09, 0xcf, 0xc7,
1023]);
1024
1025/// Well-formed cell type.
1026#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
1027#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
1028pub enum CellType {
1029    /// Cell of this type just stores data and references.
1030    #[default]
1031    Ordinary,
1032    /// Exotic cell which was pruned from the original tree of cells
1033    /// when a Merkle proof has been created.
1034    PrunedBranch,
1035    /// Exotic cell with a reference to the cell with a library.
1036    LibraryReference,
1037    /// Exotic cell with one hash and one reference.
1038    MerkleProof,
1039    /// Exotic cell with two hashes and two references.
1040    MerkleUpdate,
1041}
1042
1043impl CellType {
1044    /// Returns whether this cell type is Merkle proof or Merkle update.
1045    #[inline]
1046    pub const fn is_merkle(self) -> bool {
1047        matches!(self, Self::MerkleProof | Self::MerkleUpdate)
1048    }
1049
1050    /// Returns whether the cell is not ordinary.
1051    #[inline]
1052    pub const fn is_exotic(self) -> bool {
1053        !matches!(self, Self::Ordinary)
1054    }
1055
1056    /// Returns whether the cell is a pruned branch.
1057    #[inline]
1058    pub const fn is_pruned_branch(self) -> bool {
1059        matches!(self, Self::PrunedBranch)
1060    }
1061
1062    /// Returns whether this cell is a library cell.
1063    #[inline(always)]
1064    pub const fn is_library(self) -> bool {
1065        matches!(self, Self::LibraryReference)
1066    }
1067
1068    /// Encodes cell type as byte.
1069    #[inline]
1070    pub const fn to_byte(self) -> u8 {
1071        match self {
1072            CellType::Ordinary => 0xff,
1073            CellType::PrunedBranch => 1,
1074            CellType::LibraryReference => 2,
1075            CellType::MerkleProof => 3,
1076            CellType::MerkleUpdate => 4,
1077        }
1078    }
1079
1080    /// Decodes any cell type from byte.
1081    #[inline]
1082    pub const fn from_byte(byte: u8) -> Option<Self> {
1083        Some(match byte {
1084            0xff => CellType::Ordinary,
1085            1 => CellType::PrunedBranch,
1086            2 => CellType::LibraryReference,
1087            3 => CellType::MerkleProof,
1088            4 => CellType::MerkleUpdate,
1089            _ => return None,
1090        })
1091    }
1092
1093    /// Decodes exotic cell type from byte.
1094    #[inline]
1095    pub const fn from_byte_exotic(byte: u8) -> Option<Self> {
1096        Some(match byte {
1097            1 => CellType::PrunedBranch,
1098            2 => CellType::LibraryReference,
1099            3 => CellType::MerkleProof,
1100            4 => CellType::MerkleUpdate,
1101            _ => return None,
1102        })
1103    }
1104}
1105
1106impl From<CellType> for u8 {
1107    #[inline]
1108    fn from(cell_type: CellType) -> u8 {
1109        cell_type.to_byte()
1110    }
1111}
1112
1113#[cfg(feature = "arbitrary")]
1114impl<'a> arbitrary::Arbitrary<'a> for CellType {
1115    #[inline]
1116    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1117        static ALL_TYPES: [CellType; 5] = [
1118            CellType::Ordinary,
1119            CellType::PrunedBranch,
1120            CellType::LibraryReference,
1121            CellType::MerkleProof,
1122            CellType::MerkleUpdate,
1123        ];
1124
1125        u.choose(&ALL_TYPES).copied()
1126    }
1127
1128    #[inline]
1129    fn size_hint(_: usize) -> (usize, Option<usize>) {
1130        (1, Some(1))
1131    }
1132}
1133
1134/// Tightly packed info about a cell.
1135#[derive(Hash, Debug, Clone, Copy)]
1136#[repr(C)]
1137pub struct CellDescriptor {
1138    /// First descriptor byte with a generic info about cell.
1139    pub d1: u8,
1140    /// Second descriptor byte with a packed data size.
1141    pub d2: u8,
1142}
1143
1144impl CellDescriptor {
1145    /// Bit mask to store the number of references in the descriptor.
1146    pub const REF_COUNT_MASK: u8 = 0b0000_0111;
1147    /// Bit mask to store the `is_exotic` flag in the descriptor.
1148    pub const IS_EXOTIC_MASK: u8 = 0b0000_1000;
1149    /// Bit mask to store the `store_hashes` flag in the descriptor.
1150    pub const STORE_HASHES_MASK: u8 = 0b0001_0000;
1151    /// _de Brujn_ level presence mask in the descriptor.
1152    pub const LEVEL_MASK: u8 = 0b1110_0000;
1153
1154    /// Computes d1 descriptor byte from parts
1155    #[inline(always)]
1156    pub const fn compute_d1(level_mask: LevelMask, is_exotic: bool, ref_count: u8) -> u8 {
1157        (level_mask.0 << 5) | ((is_exotic as u8) << 3) | (ref_count & Self::REF_COUNT_MASK)
1158    }
1159
1160    /// Computes d2 descriptor byte from cell length in bits
1161    #[inline(always)]
1162    pub const fn compute_d2(bit_len: u16) -> u8 {
1163        (((bit_len >> 2) as u8) & !0b1) | (!bit_len.is_multiple_of(8) as u8)
1164    }
1165
1166    /// Constructs cell descriptor from descriptor bytes.
1167    #[inline(always)]
1168    pub const fn new(bytes: [u8; 2]) -> Self {
1169        Self {
1170            d1: bytes[0],
1171            d2: bytes[1],
1172        }
1173    }
1174
1175    /// Creates a new descriptor with a new mask, shifted by the offset.
1176    #[must_use]
1177    pub const fn virtualize(mut self, offset: u8) -> Self {
1178        let virtualized_mask = (self.d1 >> offset) & Self::LEVEL_MASK;
1179        self.d1 = virtualized_mask | (self.d1 & !Self::LEVEL_MASK);
1180        self
1181    }
1182
1183    /// Computes cell type.
1184    pub fn cell_type(self) -> CellType {
1185        if self.d1 & Self::IS_EXOTIC_MASK == 0 {
1186            CellType::Ordinary
1187        } else {
1188            match self.d1 & Self::REF_COUNT_MASK {
1189                0 => {
1190                    // NOTE: zero mask <=> zero level
1191                    if self.d1 & Self::LEVEL_MASK == 0 {
1192                        CellType::LibraryReference
1193                    } else {
1194                        CellType::PrunedBranch
1195                    }
1196                }
1197                1 => CellType::MerkleProof,
1198                _ => CellType::MerkleUpdate,
1199            }
1200        }
1201    }
1202
1203    /// Computes child cell count.
1204    #[inline(always)]
1205    pub const fn reference_count(self) -> u8 {
1206        self.d1 & Self::REF_COUNT_MASK
1207    }
1208
1209    /// Computes hash count.
1210    ///
1211    /// NOTE: Guaranteed to be in range 1..=4.
1212    #[inline(always)]
1213    pub const fn hash_count(self) -> u8 {
1214        let level = self.level_mask().level();
1215        if self.is_exotic() && self.reference_count() == 0 && level > 0 {
1216            1 // pruned branch always has 1 hash
1217        } else {
1218            level + 1
1219        }
1220    }
1221
1222    /// Returns whether the cell is not [`Ordinary`].
1223    ///
1224    /// [`Ordinary`]: CellType::Ordinary
1225    #[inline(always)]
1226    pub const fn is_exotic(self) -> bool {
1227        self.d1 & Self::IS_EXOTIC_MASK != 0
1228    }
1229
1230    /// Returns whether this cell is a pruned branch cell.
1231    #[inline(always)]
1232    pub const fn is_pruned_branch(self) -> bool {
1233        self.is_exotic() && self.reference_count() == 0 && !self.level_mask().is_empty()
1234    }
1235
1236    /// Returns whether this cell is a library cell.
1237    #[inline(always)]
1238    pub const fn is_library(self) -> bool {
1239        self.is_exotic() && self.reference_count() == 0 && self.level_mask().is_empty()
1240    }
1241
1242    /// Returns whether this cell type is Merkle proof or Merkle update.
1243    #[inline(always)]
1244    pub const fn is_merkle(self) -> bool {
1245        self.is_exotic() && self.reference_count() != 0
1246    }
1247
1248    /// Returns whether this cell refers to some external data.
1249    #[inline(always)]
1250    pub const fn is_absent(self) -> bool {
1251        self.d1 == (Self::REF_COUNT_MASK | Self::IS_EXOTIC_MASK)
1252    }
1253
1254    /// Returns whether this cell should store hashes in data.
1255    #[inline(always)]
1256    pub const fn store_hashes(self) -> bool {
1257        self.d1 & Self::STORE_HASHES_MASK != 0
1258    }
1259
1260    /// Computes level mask.
1261    #[inline(always)]
1262    pub const fn level_mask(self) -> LevelMask {
1263        // SAFETY: `u8 >> 5` is always 3 bits long
1264        unsafe { LevelMask::new_unchecked(self.d1 >> 5) }
1265    }
1266
1267    /// Returns whether this cell's data is 8-bit aligned.
1268    #[inline(always)]
1269    pub const fn is_aligned(self) -> bool {
1270        self.d2 & 1 == 0
1271    }
1272
1273    /// Returns this cell's data length in bytes.
1274    #[inline(always)]
1275    pub const fn byte_len(self) -> u8 {
1276        (self.d2 & 1) + (self.d2 >> 1)
1277    }
1278}
1279
1280/// _de Brujn_ level presence bitset.
1281#[derive(Copy, Clone, Eq, PartialEq, Hash)]
1282pub struct LevelMask(u8);
1283
1284impl LevelMask {
1285    /// Empty bitset.
1286    pub const EMPTY: Self = LevelMask(0);
1287    /// Max _de Brujn_ level.
1288    pub const MAX_LEVEL: u8 = 3;
1289
1290    /// Constructs a new level mask, truncating extra bits.
1291    #[inline(always)]
1292    pub const fn new(mask: u8) -> Self {
1293        Self(mask & 0b111)
1294    }
1295
1296    /// Returns true if there are no levels in mask.
1297    #[inline(always)]
1298    pub const fn is_empty(self) -> bool {
1299        self.0 == 0
1300    }
1301
1302    /// Constructs a new level mask from the provided byte as is.
1303    ///
1304    /// # Safety
1305    ///
1306    /// The following must be true:
1307    /// - Mask must be in range `0b000..=0b111`.
1308    #[inline(always)]
1309    pub const unsafe fn new_unchecked(mask: u8) -> Self {
1310        Self(mask)
1311    }
1312
1313    /// Creates a sufficient mask for the specified level.
1314    ///
1315    /// NOTE: levels > 3 has no effect (mask will always be `0b111`).
1316    #[inline(always)]
1317    pub const fn from_level(level: u8) -> Self {
1318        Self(match level {
1319            0 => 0,
1320            1 => 1,
1321            2 => 3,
1322            _ => 7,
1323        })
1324    }
1325
1326    /// Counts presented higher hashes.
1327    pub const fn level(self) -> u8 {
1328        (self.0 & 1) + ((self.0 >> 1) & 1) + ((self.0 >> 2) & 1)
1329    }
1330
1331    /// Computes hash index for the specified level.
1332    pub const fn hash_index(self, level: u8) -> u8 {
1333        Self(self.0 & Self::from_level(level).0).level()
1334    }
1335
1336    /// Returns whether the specified level is included into the mask.
1337    pub const fn contains(self, level: u8) -> bool {
1338        level == 0 || self.0 & (1 << (level - 1)) != 0
1339    }
1340
1341    /// Creates a new mask, shifted by the offset.
1342    #[inline(always)]
1343    #[must_use]
1344    pub const fn virtualize(self, offset: u8) -> Self {
1345        Self(self.0 >> offset)
1346    }
1347
1348    /// Encodes level mask as byte.
1349    #[inline]
1350    pub const fn to_byte(self) -> u8 {
1351        self.0
1352    }
1353}
1354
1355impl IntoIterator for LevelMask {
1356    type Item = u8;
1357    type IntoIter = LevelMaskIter;
1358
1359    #[inline]
1360    fn into_iter(self) -> Self::IntoIter {
1361        // Include zero level
1362        LevelMaskIter(1 | (self.0 << 1))
1363    }
1364}
1365
1366impl PartialEq<u8> for LevelMask {
1367    #[inline]
1368    fn eq(&self, other: &u8) -> bool {
1369        self.0 == *other
1370    }
1371}
1372
1373impl BitOr for LevelMask {
1374    type Output = Self;
1375
1376    #[inline(always)]
1377    fn bitor(self, rhs: Self) -> Self::Output {
1378        Self(self.0 | rhs.0)
1379    }
1380}
1381
1382impl BitOrAssign for LevelMask {
1383    #[inline(always)]
1384    fn bitor_assign(&mut self, rhs: Self) {
1385        self.0 |= rhs.0;
1386    }
1387}
1388
1389impl From<LevelMask> for u8 {
1390    #[inline(always)]
1391    fn from(m: LevelMask) -> u8 {
1392        m.0
1393    }
1394}
1395
1396impl std::fmt::Debug for LevelMask {
1397    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1398        f.write_fmt(format_args!("{:03b}", self.0))
1399    }
1400}
1401
1402#[cfg(feature = "arbitrary")]
1403impl<'a> arbitrary::Arbitrary<'a> for LevelMask {
1404    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1405        u.int_in_range(0b000..=0b111).map(Self::new)
1406    }
1407
1408    #[inline]
1409    fn size_hint(_: usize) -> (usize, Option<usize>) {
1410        (1, Some(1))
1411    }
1412}
1413
1414/// _de Brujn_ level presence bitset iterator.
1415#[derive(Clone)]
1416pub struct LevelMaskIter(u8);
1417
1418impl LevelMaskIter {
1419    /// Returns `true` if all levels were consumed.
1420    pub fn is_empty(&self) -> bool {
1421        self.0 == 0
1422    }
1423}
1424
1425impl Iterator for LevelMaskIter {
1426    type Item = u8;
1427
1428    #[inline]
1429    fn next(&mut self) -> Option<Self::Item> {
1430        (self.0 != 0).then(|| {
1431            //  111 - 1   = 110
1432            // !110       = 001
1433            //  111 & 001 = 001
1434            let mask = self.0 & !(self.0 - 1);
1435
1436            // 111 & !001 = 110
1437            self.0 &= !mask;
1438
1439            mask.trailing_zeros() as u8
1440        })
1441    }
1442
1443    fn size_hint(&self) -> (usize, Option<usize>) {
1444        let len = self.0.count_ones() as usize;
1445        (len, Some(len))
1446    }
1447}
1448
1449/// A size of a cell.
1450#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
1451pub struct Size {
1452    /// Total number of bits in cell slice.
1453    pub bits: u16,
1454    /// Total number refs in cell slice.
1455    pub refs: u8,
1456}
1457
1458impl Size {
1459    /// The additive identity for this type, i.e. `0`.
1460    pub const ZERO: Self = Self { bits: 0, refs: 0 };
1461
1462    /// The multiplicative bits identity for this type, i.e. `1 bit`.
1463    pub const BIT: Self = Self { bits: 1, refs: 0 };
1464
1465    /// The multiplicative refs identity for this type, i.e. `1 ref`.
1466    pub const REF: Self = Self { bits: 0, refs: 1 };
1467
1468    /// The largest valid value that can be represented by this type.
1469    pub const MAX: Self = Self {
1470        bits: MAX_BIT_LEN,
1471        refs: MAX_REF_COUNT as _,
1472    };
1473
1474    /// Returns true if the number of bits and refs is in the valid range for the cell.
1475    #[inline]
1476    pub const fn fits_into_cell(&self) -> bool {
1477        self.bits <= MAX_BIT_LEN && self.refs <= MAX_REF_COUNT as _
1478    }
1479
1480    /// Saturating size addition. Computes self + rhs for bits and refs,
1481    /// saturating at the numeric bounds instead of overflowing.
1482    #[inline]
1483    pub const fn saturating_add(self, rhs: Self) -> Self {
1484        Self {
1485            bits: self.bits.saturating_add(rhs.bits),
1486            refs: self.refs.saturating_add(rhs.refs),
1487        }
1488    }
1489
1490    /// Saturating size substraction. Computes self - rhs for bits and refs,
1491    /// saturating at the numeric bounds instead of overflowing.
1492    #[inline]
1493    pub const fn saturating_sub(self, rhs: Self) -> Self {
1494        Self {
1495            bits: self.bits.saturating_sub(rhs.bits),
1496            refs: self.refs.saturating_sub(rhs.refs),
1497        }
1498    }
1499
1500    /// Returns true if there are no bits and refs.
1501    pub const fn is_zero(self) -> bool {
1502        self.bits == 0 && self.refs == 0
1503    }
1504}
1505
1506impl From<Size> for CellTreeStats {
1507    #[inline]
1508    fn from(value: Size) -> Self {
1509        Self {
1510            bit_count: value.bits as _,
1511            cell_count: value.refs as _,
1512        }
1513    }
1514}
1515
1516impl std::ops::Add for Size {
1517    type Output = Self;
1518
1519    #[inline]
1520    fn add(mut self, rhs: Self) -> Self::Output {
1521        self += rhs;
1522        self
1523    }
1524}
1525
1526impl std::ops::AddAssign for Size {
1527    #[inline]
1528    fn add_assign(&mut self, rhs: Self) {
1529        self.bits += rhs.bits;
1530        self.refs += rhs.refs;
1531    }
1532}
1533
1534impl std::ops::Sub for Size {
1535    type Output = Self;
1536
1537    #[inline]
1538    fn sub(mut self, rhs: Self) -> Self::Output {
1539        self -= rhs;
1540        self
1541    }
1542}
1543
1544impl std::ops::SubAssign for Size {
1545    #[inline]
1546    fn sub_assign(&mut self, rhs: Self) {
1547        self.bits -= rhs.bits;
1548        self.refs -= rhs.refs;
1549    }
1550}
1551
1552/// Cell tree storage stats.
1553#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
1554pub struct CellTreeStats {
1555    /// Total number of bits in tree.
1556    pub bit_count: u64,
1557    /// Total number of cells in tree.
1558    pub cell_count: u64,
1559}
1560
1561impl CellTreeStats {
1562    /// The additive identity for this type, i.e. `0`.
1563    pub const ZERO: Self = CellTreeStats {
1564        bit_count: 0,
1565        cell_count: 0,
1566    };
1567}
1568
1569impl std::ops::Add for CellTreeStats {
1570    type Output = Self;
1571
1572    #[inline]
1573    fn add(self, rhs: Self) -> Self::Output {
1574        Self {
1575            bit_count: self.bit_count.saturating_add(rhs.bit_count),
1576            cell_count: self.cell_count.saturating_add(rhs.cell_count),
1577        }
1578    }
1579}
1580
1581impl std::ops::AddAssign for CellTreeStats {
1582    #[inline]
1583    fn add_assign(&mut self, rhs: Self) {
1584        self.bit_count = self.bit_count.saturating_add(rhs.bit_count);
1585        self.cell_count = self.cell_count.saturating_add(rhs.cell_count);
1586    }
1587}
1588
1589impl std::ops::Add<Size> for CellTreeStats {
1590    type Output = Self;
1591
1592    #[inline]
1593    fn add(self, rhs: Size) -> Self::Output {
1594        Self {
1595            bit_count: self.bit_count.saturating_add(rhs.bits as _),
1596            cell_count: self.cell_count.saturating_add(rhs.refs as _),
1597        }
1598    }
1599}
1600
1601impl std::iter::Sum for CellTreeStats {
1602    #[inline]
1603    fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
1604        let mut res = Self::ZERO;
1605        for item in iter {
1606            res += item;
1607        }
1608        res
1609    }
1610}
1611
1612impl std::ops::AddAssign<Size> for CellTreeStats {
1613    fn add_assign(&mut self, rhs: Size) {
1614        self.bit_count = self.bit_count.saturating_add(rhs.bits as _);
1615        self.cell_count = self.cell_count.saturating_add(rhs.refs as _);
1616    }
1617}
1618
1619impl std::ops::Sub for CellTreeStats {
1620    type Output = Self;
1621
1622    #[inline]
1623    fn sub(mut self, rhs: Self) -> Self::Output {
1624        self -= rhs;
1625        self
1626    }
1627}
1628
1629impl std::ops::SubAssign for CellTreeStats {
1630    #[inline]
1631    fn sub_assign(&mut self, rhs: Self) {
1632        self.bit_count = self.bit_count.saturating_sub(rhs.bit_count);
1633        self.cell_count = self.cell_count.saturating_sub(rhs.cell_count);
1634    }
1635}
1636
1637impl std::ops::SubAssign<Size> for CellTreeStats {
1638    #[inline]
1639    fn sub_assign(&mut self, rhs: Size) {
1640        self.bit_count = self.bit_count.saturating_sub(rhs.bits as _);
1641        self.cell_count = self.cell_count.saturating_sub(rhs.refs as _);
1642    }
1643}
1644
1645/// A helper to track the size of the unique data in multiple cell trees.
1646///
1647/// NOTE: It uses hashes for deduplication, so you can only use it for
1648/// fully computed and valid trees.
1649pub struct StorageStat<'a> {
1650    visited: ahash::HashSet<&'a HashBytes>,
1651    stack: Vec<RefsIter<'a>>,
1652    stats: CellTreeStats,
1653    limit: usize,
1654}
1655
1656impl<'a> StorageStat<'a> {
1657    /// Recursively computes the count of distinct cells returning
1658    /// the total storage used by this dag taking into account the
1659    /// identification of equal cells.
1660    ///
1661    /// Root slice does not count as cell. A slice subrange of
1662    /// cells is used during computation.
1663    pub fn compute_for_slice<'b: 'a>(
1664        slice: &'a CellSlice<'b>,
1665        limit: usize,
1666    ) -> Option<CellTreeStats> {
1667        let mut this = Self::with_limit(limit);
1668        if this.add_slice(slice) {
1669            Some(this.stats)
1670        } else {
1671            None
1672        }
1673    }
1674
1675    /// Recursively computes the count of distinct cells returning
1676    /// the total storage used by this dag taking into account the
1677    /// identification of equal cells.
1678    pub fn compute_for_cell(cell: &'a DynCell, limit: usize) -> Option<CellTreeStats> {
1679        let mut this = Self::with_limit(limit);
1680        if this.add_cell(cell) {
1681            Some(this.stats)
1682        } else {
1683            None
1684        }
1685    }
1686
1687    /// Creates a new storage stat state with an explicit limit.
1688    pub fn with_limit(limit: usize) -> Self {
1689        Self {
1690            visited: Default::default(),
1691            stack: Vec::new(),
1692            stats: CellTreeStats::ZERO,
1693            limit,
1694        }
1695    }
1696
1697    /// Creates a new storage stat state without a limit.
1698    pub fn unlimited() -> Self {
1699        Self::with_limit(usize::MAX)
1700    }
1701
1702    /// Returns the current tree stats.
1703    pub fn stats(&self) -> CellTreeStats {
1704        self.stats
1705    }
1706
1707    /// Merges current stats with the stats from the provided cell tree.
1708    ///
1709    /// Returns `false` if the limit was reached.
1710    pub fn add_cell(&mut self, cell: &'a DynCell) -> bool {
1711        if !self.visited.insert(cell.repr_hash()) {
1712            return true;
1713        }
1714
1715        self.stats.bit_count += cell.bit_len() as u64;
1716        self.stats.cell_count += 1;
1717
1718        self.stack.clear();
1719        self.stack.push(cell.references());
1720        self.reduce_stack()
1721    }
1722
1723    /// Merges current stats with the stats from the provided slice.
1724    ///
1725    /// Returns `false` if the limit was reached.
1726    pub fn add_slice(&mut self, slice: &CellSlice<'a>) -> bool {
1727        self.stats.bit_count += slice.size_bits() as u64;
1728
1729        self.stack.clear();
1730        self.stack.push(slice.references());
1731        self.reduce_stack()
1732    }
1733
1734    fn reduce_stack(&mut self) -> bool {
1735        'outer: while let Some(item) = self.stack.last_mut() {
1736            for cell in item.by_ref() {
1737                if !self.visited.insert(cell.repr_hash()) {
1738                    continue;
1739                }
1740
1741                if self.stats.cell_count >= self.limit as u64 {
1742                    return false;
1743                }
1744
1745                self.stats.bit_count += cell.bit_len() as u64;
1746                self.stats.cell_count += 1;
1747
1748                let next = cell.references();
1749                if next.max > 0 {
1750                    self.stack.push(next);
1751                    continue 'outer;
1752                }
1753            }
1754
1755            self.stack.pop();
1756        }
1757
1758        true
1759    }
1760}
1761
1762/// Helper struct to debug print the root cell.
1763#[derive(Clone, Copy)]
1764pub struct DebugCell<'a>(&'a DynCell);
1765
1766impl std::fmt::Debug for DebugCell<'_> {
1767    #[inline]
1768    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1769        self.0.fmt(f)
1770    }
1771}
1772
1773/// Helper struct to print only the root cell in the cell tree.
1774#[derive(Clone, Copy)]
1775pub struct DisplayCellRoot<'a> {
1776    cell: &'a DynCell,
1777    level: usize,
1778}
1779
1780impl std::fmt::Display for DisplayCellRoot<'_> {
1781    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1782        // TODO: encode on stack
1783        let data = hex::encode(self.cell.data());
1784
1785        let indent = self.level * 2;
1786        if f.alternate() {
1787            f.write_fmt(format_args!("{:indent$}{data}\n", ""))
1788        } else {
1789            let repr_depth = self.cell.depth(LevelMask::MAX_LEVEL);
1790            let repr_hash = self.cell.repr_hash();
1791            let descriptor = self.cell.descriptor();
1792            f.write_fmt(format_args!(
1793                "{:indent$}{:?}: {data}\n{:indent$}bits: {:>4}, refs: {}, l: {:?}, depth: {}, hash: {}\n",
1794                "",
1795                descriptor.cell_type(),
1796                "",
1797                self.cell.bit_len(),
1798                descriptor.reference_count(),
1799                descriptor.level_mask(),
1800                repr_depth,
1801                repr_hash,
1802            ))
1803        }
1804    }
1805}
1806
1807/// Helper struct to print all cells in the cell tree.
1808#[derive(Clone, Copy)]
1809pub struct DisplayCellTree<'a>(&'a DynCell);
1810
1811impl std::fmt::Display for DisplayCellTree<'_> {
1812    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1813        let mut stack = vec![(0, self.0)];
1814
1815        while let Some((level, cell)) = stack.pop() {
1816            ok!(std::fmt::Display::fmt(&DisplayCellRoot { cell, level }, f));
1817
1818            let reference_count = cell.reference_count();
1819            for i in (0..reference_count).rev() {
1820                if let Some(child) = cell.reference(i) {
1821                    stack.push((level + 1, child));
1822                }
1823            }
1824        }
1825
1826        Ok(())
1827    }
1828}
1829
1830/// Max cell data capacity in bits
1831pub const MAX_BIT_LEN: u16 = 1023;
1832/// Maximum number of child cells
1833pub const MAX_REF_COUNT: usize = 4;
1834
1835#[cfg(test)]
1836mod tests {
1837    use super::*;
1838
1839    #[test]
1840    fn correct_level() {
1841        const LEVEL: [u8; 8] = [0, 1, 1, 2, 1, 2, 2, 3];
1842
1843        for mask in 0b000..=0b111 {
1844            assert_eq!(LevelMask(mask).level(), LEVEL[mask as usize]);
1845        }
1846    }
1847
1848    #[test]
1849    fn virtualize_descriptor() {
1850        let level_mask = LevelMask(0b111);
1851        let desc = CellDescriptor::new([
1852            CellDescriptor::compute_d1(level_mask, false, 3),
1853            CellDescriptor::compute_d2(123),
1854        ]);
1855
1856        assert_eq!(desc.level_mask(), level_mask);
1857
1858        for i in 0..3 {
1859            let v_desc = desc.virtualize(i);
1860
1861            assert_eq!(v_desc.cell_type(), desc.cell_type());
1862            assert_eq!(v_desc.reference_count(), desc.reference_count());
1863            assert_eq!(v_desc.is_exotic(), desc.is_exotic());
1864            assert_eq!(v_desc.store_hashes(), desc.store_hashes());
1865            assert_eq!(v_desc.is_aligned(), desc.is_aligned());
1866            assert_eq!(v_desc.byte_len(), desc.byte_len());
1867
1868            assert_eq!(v_desc.level_mask(), level_mask.virtualize(i));
1869        }
1870    }
1871
1872    #[test]
1873    fn correct_hash_index() {
1874        const HASH_INDEX_TABLE: [[u8; 4]; 8] = [
1875            // index      // mask
1876            [0, 0, 0, 0], // 000
1877            [0, 1, 1, 1], // 001
1878            [0, 0, 1, 1], // 010
1879            [0, 1, 2, 2], // 011
1880            [0, 0, 0, 1], // 100
1881            [0, 1, 1, 2], // 101
1882            [0, 0, 1, 2], // 110
1883            [0, 1, 2, 3], // 111
1884        ];
1885
1886        for mask in 0b000..=0b111 {
1887            let mask = LevelMask(mask);
1888
1889            for i in 0..=3 {
1890                let hash_index = mask.hash_index(i);
1891                assert_eq!(hash_index, HASH_INDEX_TABLE[mask.0 as usize][i as usize]);
1892            }
1893        }
1894    }
1895
1896    #[test]
1897    fn ultra_virtual_cell_by_ref() {
1898        let cell = Cell::empty_cell();
1899
1900        let pruned1 =
1901            crate::merkle::make_pruned_branch(cell.as_ref(), 0, Cell::empty_context()).unwrap();
1902
1903        let pruned2 =
1904            crate::merkle::make_pruned_branch(pruned1.as_ref(), 1, Cell::empty_context()).unwrap();
1905
1906        let pruned3 =
1907            crate::merkle::make_pruned_branch(pruned2.as_ref(), 2, Cell::empty_context()).unwrap();
1908
1909        // Level 3 -> 2
1910        let pruned3 = pruned3.virtualize();
1911        assert_eq!(pruned3.repr_hash(), pruned2.repr_hash());
1912        assert_eq!(pruned3.repr_depth(), pruned2.repr_depth());
1913
1914        // Level 2 -> 1
1915        let pruned3 = pruned3.virtualize();
1916        assert_eq!(pruned3.repr_hash(), pruned1.repr_hash());
1917        assert_eq!(pruned3.repr_depth(), pruned1.repr_depth());
1918
1919        // Level 1 -> 0
1920        let pruned3 = pruned3.virtualize();
1921        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1922        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1923
1924        // Level 0 -> 0
1925        let pruned3 = pruned3.virtualize();
1926        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1927        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1928
1929        // Level 0 -> 0 (just in case)
1930        let pruned3 = pruned3.virtualize();
1931        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1932        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1933    }
1934
1935    #[test]
1936    fn versioned_store_load() {
1937        #[derive(Debug, Clone, Copy, Eq, PartialEq, Store, Load)]
1938        #[tlb(tag = ["#12", "#34", "#56"])]
1939        struct Versioned {
1940            old_field1: u32,
1941            #[tlb(since_tag = 1)]
1942            new_field1: u32,
1943            old_field2: bool,
1944            #[tlb(since_tag = 2)]
1945            new_field2: bool,
1946        }
1947
1948        let old = Versioned {
1949            old_field1: 123,
1950            new_field1: 0,
1951            old_field2: true,
1952            new_field2: false,
1953        };
1954        let cell = CellBuilder::build_from(old).unwrap();
1955
1956        {
1957            let mut slice = cell.as_slice().unwrap();
1958            assert_eq!(slice.size_bits(), 8 + 32 + 1);
1959            assert_eq!(slice.load_u8().unwrap(), 0x12);
1960            assert_eq!(slice.load_u32().unwrap(), 123);
1961            assert!(slice.load_bit().unwrap());
1962            assert!(slice.is_empty());
1963        }
1964        assert_eq!(
1965            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1966            old,
1967        );
1968
1969        let new = Versioned {
1970            old_field1: 123,
1971            new_field1: 456,
1972            old_field2: true,
1973            new_field2: false,
1974        };
1975        let cell = CellBuilder::build_from(new).unwrap();
1976
1977        {
1978            let mut slice = cell.as_slice().unwrap();
1979            assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1);
1980            assert_eq!(slice.load_u8().unwrap(), 0x34);
1981            assert_eq!(slice.load_u32().unwrap(), 123);
1982            assert_eq!(slice.load_u32().unwrap(), 456);
1983            assert!(slice.load_bit().unwrap());
1984            assert!(slice.is_empty());
1985        }
1986        assert_eq!(
1987            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1988            new
1989        );
1990
1991        let too_new = Versioned {
1992            old_field1: 123,
1993            new_field1: 0,
1994            old_field2: true,
1995            new_field2: true,
1996        };
1997        let cell = CellBuilder::build_from(too_new).unwrap();
1998
1999        {
2000            let mut slice = cell.as_slice().unwrap();
2001            assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1 + 1);
2002            assert_eq!(slice.load_u8().unwrap(), 0x56);
2003            assert_eq!(slice.load_u32().unwrap(), 123);
2004            assert_eq!(slice.load_u32().unwrap(), 0);
2005            assert!(slice.load_bit().unwrap());
2006            assert!(slice.load_bit().unwrap());
2007            assert!(slice.is_empty());
2008        }
2009        assert_eq!(
2010            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
2011            too_new
2012        );
2013    }
2014}