tycho_types/cell/
mod.rs

1//! Cell tree implementation.
2
3use std::ops::{BitOr, BitOrAssign};
4use std::str::FromStr;
5
6pub use tycho_types_proc::{Load, Store};
7
8pub use self::builder::{
9    CellBuilder, CellDataBuilder, CellRefsBuilder, DisplayCellBuilderData, Store,
10};
11pub use self::cell_context::{CellContext, CellParts, LoadMode};
12#[cfg(not(feature = "sync"))]
13pub use self::cell_impl::rc::{Cell, CellInner, WeakCell};
14#[cfg(feature = "sync")]
15pub use self::cell_impl::sync::{Cell, CellInner, WeakCell};
16pub use self::cell_impl::{SafeDeleter, StaticCell, VirtualCellWrapper};
17pub use self::lazy::{Lazy, LazyExotic};
18pub use self::slice::{
19    CellSlice, CellSliceParts, CellSliceRange, DisplayCellSliceData, ExactSize, Load, LoadCell,
20};
21pub use self::usage_tree::{UsageTree, UsageTreeMode, UsageTreeWithSubtrees};
22use crate::error::{Error, ParseHashBytesError};
23use crate::util::Bitstring;
24
25/// Generic cell implementation.
26mod cell_impl;
27
28/// Traits for gas accounting and resolving exotic cells.
29mod cell_context;
30
31/// Cell view utils.
32mod slice;
33
34/// Cell creation utils.
35mod builder;
36
37/// Lazy-loaded cell data.
38mod lazy;
39
40mod usage_tree;
41
42#[cfg(feature = "sync")]
43#[doc(hidden)]
44mod __checks {
45    use super::*;
46
47    assert_impl_all!(Cell: Send);
48    assert_impl_all!(CellSlice: Send);
49    assert_impl_all!(CellBuilder: Send);
50}
51
52/// Marker trait which allows casting lazy-loaded data.
53pub trait EquivalentRepr<T> {}
54
55/// All types are equivalent to itself.
56impl<T> EquivalentRepr<T> for T {}
57
58/// Cell implementation family.
59pub trait CellFamily: Sized {
60    /// The default cell context type.
61    type EmptyCellContext: CellContext;
62
63    /// Creates an empty cell.
64    ///
65    /// NOTE: in most cases empty cell is ZST.
66    fn empty_cell() -> Cell;
67
68    /// Returns a static reference to the empty cell
69    fn empty_cell_ref() -> &'static DynCell;
70
71    /// Creates an empty cell context.
72    fn empty_context() -> &'static Self::EmptyCellContext;
73
74    /// Returns a static reference to the cell with all zeros.
75    fn all_zeros_ref() -> &'static DynCell;
76
77    /// Returns a static reference to the cell with all ones.
78    fn all_ones_ref() -> &'static DynCell;
79
80    /// Creates a virtualized cell from the specified cell.
81    fn virtualize(cell: Cell) -> Cell;
82}
83
84/// Dyn trait type alias.
85#[cfg(not(feature = "sync"))]
86pub type DynCell = dyn CellImpl;
87
88/// Dyn trait type alias.
89#[cfg(feature = "sync")]
90pub type DynCell = dyn CellImpl + Send + Sync;
91
92impl AsRef<DynCell> for DynCell {
93    #[inline(always)]
94    fn as_ref(&self) -> &Self {
95        self
96    }
97}
98
99impl AsMut<DynCell> for DynCell {
100    #[inline(always)]
101    fn as_mut(&mut self) -> &mut Self {
102        self
103    }
104}
105
106/// Represents the interface of a well-formed cell.
107///
108/// Since all basic operations are implements via dynamic dispatch,
109/// all high-level helper methods are implemented for `dyn Cell`.
110pub trait CellImpl {
111    /// Unwraps the root cell from the usage tracking.
112    fn untrack(self: CellInner<Self>) -> Cell;
113
114    /// Returns cell descriptor.
115    ///
116    /// # See also
117    ///
118    /// Cell descriptor contains some tightly packed info about the cell.
119    /// If you want convenient methods to access it use:
120    /// [`cell_type`], [`level_mask`], [`reference_count`], [`is_exotic`]
121    ///
122    /// [`cell_type`]: CellDescriptor::cell_type
123    /// [`level_mask`]: CellDescriptor::level_mask
124    /// [`reference_count`]: CellDescriptor::reference_count
125    /// [`is_exotic`]: CellDescriptor::is_exotic
126    fn descriptor(&self) -> CellDescriptor;
127
128    /// Returns the raw data of this cell.
129    fn data(&self) -> &[u8];
130
131    /// Returns the data size of this cell in bits.
132    fn bit_len(&self) -> u16;
133
134    /// Returns a reference to the Nth child cell.
135    fn reference(&self, index: u8) -> Option<&DynCell>;
136
137    /// Returns the Nth child cell.
138    fn reference_cloned(&self, index: u8) -> Option<Cell>;
139
140    /// Returns this cell as a virtualized cell, so that all hashes
141    /// and depths will have an offset.
142    fn virtualize(&self) -> &DynCell;
143
144    /// Returns cell hash for the specified level.
145    ///
146    /// Cell representation hash is the hash at the maximum level ([`LevelMask::MAX_LEVEL`]).
147    /// Use `repr_hash` as a simple alias for this.
148    fn hash(&self, level: u8) -> &HashBytes;
149
150    /// Returns cell depth for the specified level.
151    fn depth(&self, level: u8) -> u16;
152}
153
154impl DynCell {
155    /// Computes cell type from descriptor bytes.
156    #[inline]
157    pub fn cell_type(&self) -> CellType {
158        self.descriptor().cell_type()
159    }
160
161    /// Computes the cell level from the level mask.
162    #[inline]
163    pub fn level(&self) -> u8 {
164        self.descriptor().level_mask().level()
165    }
166
167    /// Computes the level mask from the descriptor bytes.
168    #[inline]
169    pub fn level_mask(&self) -> LevelMask {
170        self.descriptor().level_mask()
171    }
172
173    /// Computes the number of child cells from descriptor bytes.
174    #[inline]
175    pub fn reference_count(&self) -> u8 {
176        self.descriptor().reference_count()
177    }
178
179    /// Tries to load the specified child cell as slice.
180    /// Returns an error if the loaded cell is absent or is pruned.
181    pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'_>, Error> {
182        match self.reference(index) {
183            Some(cell) => CellSlice::new(cell),
184            None => Err(Error::CellUnderflow),
185        }
186    }
187
188    /// Returns whether the cell is not [`Ordinary`].
189    ///
190    /// [`Ordinary`]: CellType::Ordinary
191    #[inline]
192    pub fn is_exotic(&self) -> bool {
193        self.descriptor().is_exotic()
194    }
195
196    /// Returns a representation hash of the cell.
197    #[inline]
198    pub fn repr_hash(&self) -> &HashBytes {
199        self.hash(LevelMask::MAX_LEVEL)
200    }
201
202    /// Returns a representation depth of the cell.
203    #[inline]
204    pub fn repr_depth(&self) -> u16 {
205        self.depth(LevelMask::MAX_LEVEL)
206    }
207
208    /// Returns `true` if any of cell levels has the maximum depth.
209    pub fn has_max_depth(&self) -> bool {
210        for level in self.descriptor().level_mask() {
211            if self.depth(level) == u16::MAX {
212                return true;
213            }
214        }
215        false
216    }
217
218    /// Returns true if the cell is empty (no bits, no refs).
219    pub fn is_empty(&self) -> bool {
220        self.hash(LevelMask::MAX_LEVEL) == EMPTY_CELL_HASH
221    }
222
223    /// Creates an iterator through child nodes.
224    #[inline]
225    pub fn references(&self) -> RefsIter<'_> {
226        RefsIter {
227            cell: self,
228            max: self.reference_count(),
229            index: 0,
230        }
231    }
232
233    /// Returns this cell as a cell slice.
234    /// Returns an error if the cell is not ordinary.
235    #[inline]
236    pub fn as_slice(&'_ self) -> Result<CellSlice<'_>, Error> {
237        CellSlice::new(self)
238    }
239
240    /// Returns this cell as a cell slice.
241    ///
242    /// Loads cell as is.
243    #[inline]
244    pub fn as_slice_allow_exotic(&'_ self) -> CellSlice<'_> {
245        CellSlice::new_allow_exotic(self)
246    }
247
248    /// Recursively computes the count of distinct cells returning
249    /// the total storage used by this dag taking into account the
250    /// identification of equal cells.
251    pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
252        StorageStat::compute_for_cell(self, limit)
253    }
254
255    /// Recursively traverses the cells tree without tracking a uniqueness
256    /// of cells. Usefull for adding small subtrees to merkle proofs.
257    pub fn touch_recursive(&self) {
258        self.data(); // Touch data to include it if `OnDataAccess` is used.
259        for child in self.references() {
260            child.touch_recursive();
261        }
262    }
263
264    /// Returns an object that implements [`Debug`] for printing only
265    /// the root cell of the cell tree.
266    ///
267    /// [`Debug`]: std::fmt::Debug
268    #[inline]
269    pub fn debug_root(&'_ self) -> DebugCell<'_> {
270        DebugCell(self)
271    }
272
273    /// Returns an object that implements [`Display`] for printing only
274    /// the root cell of the cell tree.
275    ///
276    /// [`Display`]: std::fmt::Display
277    #[inline]
278    pub fn display_root(&'_ self) -> DisplayCellRoot<'_> {
279        DisplayCellRoot {
280            cell: self,
281            level: 0,
282        }
283    }
284
285    /// Returns an object that implements [`Display`] for printing all
286    /// cells in the cell tree.
287    ///
288    /// [`Display`]: std::fmt::Display
289    #[inline]
290    pub fn display_tree(&'_ self) -> DisplayCellTree<'_> {
291        DisplayCellTree(self)
292    }
293
294    /// Returns an object which will display cell data as a bitstring
295    /// with a termination bit.
296    #[inline]
297    pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
298        struct DisplayData<'a>(&'a DynCell);
299
300        impl std::fmt::Display for DisplayData<'_> {
301            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
302                std::fmt::Display::fmt(
303                    &Bitstring {
304                        bytes: self.0.data(),
305                        bit_len: self.0.bit_len(),
306                    },
307                    f,
308                )
309            }
310        }
311
312        impl std::fmt::Binary for DisplayData<'_> {
313            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
314                std::fmt::Binary::fmt(
315                    &Bitstring {
316                        bytes: self.0.data(),
317                        bit_len: self.0.bit_len(),
318                    },
319                    f,
320                )
321            }
322        }
323
324        DisplayData(self)
325    }
326
327    /// Converts this cell into a slice and tries to load the specified type from it.
328    /// Fails if the cell is not ordinary.
329    ///
330    /// NOTE: parsing `Cell` will load the first reference!
331    #[inline]
332    pub fn parse<'a, T: Load<'a>>(&'a self) -> Result<T, Error> {
333        T::load_from(&mut ok!(self.as_slice()))
334    }
335
336    /// Loads an exotic cell.
337    #[inline]
338    pub fn parse_exotic<'a, T: LoadCell<'a>>(&'a self) -> Result<T, Error> {
339        if self.is_exotic() {
340            T::load_from_cell(self)
341        } else {
342            Err(Error::UnexpectedOrdinaryCell)
343        }
344    }
345}
346
347impl std::fmt::Debug for DynCell {
348    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
349        crate::util::debug_struct_field2_finish(
350            f,
351            "Cell",
352            "ty",
353            &self.cell_type(),
354            "hash",
355            self.repr_hash(),
356        )
357    }
358}
359
360impl Eq for DynCell {}
361
362impl PartialEq<DynCell> for DynCell {
363    #[inline]
364    fn eq(&self, other: &DynCell) -> bool {
365        self.repr_hash() == other.repr_hash()
366    }
367}
368
369#[cfg(feature = "serde")]
370impl serde::Serialize for DynCell {
371    fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
372        let boc = crate::boc::Boc::encode(self);
373        if serializer.is_human_readable() {
374            serializer.serialize_str(&crate::util::encode_base64(boc))
375        } else {
376            serializer.serialize_bytes(&boc)
377        }
378    }
379}
380
381#[cfg(feature = "arbitrary")]
382impl<'a> arbitrary::Arbitrary<'a> for Cell {
383    #[inline]
384    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
385        Ok(u.arbitrary::<CellBuilder>()?.build().unwrap())
386    }
387
388    #[inline]
389    fn size_hint(depth: usize) -> (usize, Option<usize>) {
390        Self::try_size_hint(depth).unwrap_or_default()
391    }
392
393    #[inline]
394    fn try_size_hint(
395        depth: usize,
396    ) -> Result<(usize, Option<usize>), arbitrary::MaxRecursionReached> {
397        <CellBuilder as arbitrary::Arbitrary>::try_size_hint(depth)
398    }
399}
400
401impl From<Cell> for CellSliceParts {
402    #[inline]
403    fn from(value: Cell) -> Self {
404        (CellSliceRange::full(&value), value)
405    }
406}
407
408/// An iterator through child nodes.
409#[must_use = "iterators are lazy and do nothing unless consumed"]
410pub struct RefsIter<'a> {
411    cell: &'a DynCell,
412    max: u8,
413    index: u8,
414}
415
416impl<'a> RefsIter<'a> {
417    /// Returns a cell by children of which we are iterating.
418    #[inline]
419    pub fn cell(&self) -> &'a DynCell {
420        self.cell
421    }
422
423    /// Returns a reference to the next() value without advancing the iterator.
424    #[inline]
425    pub fn peek(&self) -> Option<&'a DynCell> {
426        if self.index >= self.max {
427            None
428        } else {
429            self.cell.reference(self.index)
430        }
431    }
432
433    /// Returns a cloned reference to the next() value without advancing the iterator.
434    #[inline]
435    pub fn peek_cloned(&self) -> Option<Cell> {
436        if self.index >= self.max {
437            None
438        } else {
439            self.cell.reference_cloned(self.index)
440        }
441    }
442
443    /// Returns a reference to the next_back() value without advancing the iterator.
444    #[inline]
445    pub fn peek_prev(&self) -> Option<&'a DynCell> {
446        if self.index > 0 {
447            self.cell.reference(self.index - 1)
448        } else {
449            None
450        }
451    }
452
453    /// Returns a cloned reference to the next_back() value without advancing the iterator.
454    #[inline]
455    pub fn peek_prev_cloned(&self) -> Option<Cell> {
456        if self.index > 0 {
457            self.cell.reference_cloned(self.index - 1)
458        } else {
459            None
460        }
461    }
462
463    /// Creates an iterator through child nodes which produces cloned references.
464    #[inline]
465    pub fn cloned(self) -> ClonedRefsIter<'a> {
466        ClonedRefsIter { inner: self }
467    }
468}
469
470impl Clone for RefsIter<'_> {
471    #[inline]
472    fn clone(&self) -> Self {
473        Self {
474            cell: self.cell,
475            max: self.max,
476            index: self.index,
477        }
478    }
479}
480
481impl<'a> Iterator for RefsIter<'a> {
482    type Item = &'a DynCell;
483
484    #[inline]
485    fn next(&mut self) -> Option<Self::Item> {
486        if self.index >= self.max {
487            None
488        } else {
489            let child = self.cell.reference(self.index);
490            self.index += 1;
491            child
492        }
493    }
494
495    #[inline]
496    fn size_hint(&self) -> (usize, Option<usize>) {
497        let remaining = self.max.saturating_sub(self.index) as usize;
498        (remaining, Some(remaining))
499    }
500}
501
502impl DoubleEndedIterator for RefsIter<'_> {
503    #[inline]
504    fn next_back(&mut self) -> Option<Self::Item> {
505        if self.max > self.index {
506            self.max -= 1;
507            self.cell.reference(self.max)
508        } else {
509            None
510        }
511    }
512}
513
514impl ExactSizeIterator for RefsIter<'_> {
515    #[inline]
516    fn len(&self) -> usize {
517        self.size_hint().0
518    }
519}
520
521/// An iterator through child nodes which produces cloned references.
522#[must_use = "iterators are lazy and do nothing unless consumed"]
523pub struct ClonedRefsIter<'a> {
524    inner: RefsIter<'a>,
525}
526
527impl<'a> ClonedRefsIter<'a> {
528    /// Returns a cell by children of which we are iterating.
529    #[inline]
530    pub fn cell(&self) -> &'a DynCell {
531        self.inner.cell
532    }
533
534    /// Returns a reference to the next() value without advancing the iterator.
535    #[inline]
536    pub fn peek(&self) -> Option<Cell> {
537        self.inner.peek_cloned()
538    }
539
540    /// Returns a reference to the next_back() value without advancing the iterator.
541    #[inline]
542    pub fn peek_prev(&self) -> Option<Cell> {
543        self.inner.peek_prev_cloned()
544    }
545}
546
547impl Clone for ClonedRefsIter<'_> {
548    #[inline]
549    fn clone(&self) -> Self {
550        Self {
551            inner: self.inner.clone(),
552        }
553    }
554}
555
556impl Iterator for ClonedRefsIter<'_> {
557    type Item = Cell;
558
559    #[inline]
560    fn next(&mut self) -> Option<Self::Item> {
561        if self.inner.index >= self.inner.max {
562            None
563        } else {
564            let child = self.inner.cell.reference_cloned(self.inner.index);
565            self.inner.index += 1;
566            child
567        }
568    }
569
570    #[inline]
571    fn size_hint(&self) -> (usize, Option<usize>) {
572        self.inner.size_hint()
573    }
574}
575
576impl DoubleEndedIterator for ClonedRefsIter<'_> {
577    #[inline]
578    fn next_back(&mut self) -> Option<Self::Item> {
579        if self.inner.max > self.inner.index {
580            self.inner.max -= 1;
581            self.inner.cell.reference_cloned(self.inner.max)
582        } else {
583            None
584        }
585    }
586}
587
588impl ExactSizeIterator for ClonedRefsIter<'_> {
589    #[inline]
590    fn len(&self) -> usize {
591        self.size_hint().0
592    }
593}
594
595/// Type alias for a cell hash.
596#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
597#[repr(transparent)]
598pub struct HashBytes(pub [u8; 32]);
599
600impl HashBytes {
601    /// Array of zero bytes.
602    pub const ZERO: Self = Self([0; 32]);
603
604    /// Converts slice to a hash bytes.
605    ///
606    /// # Panics
607    ///
608    /// Panics if the length of the slice is not 32 bytes.
609    #[inline]
610    pub fn from_slice(slice: &[u8]) -> Self {
611        Self(slice.try_into().expect("slice with incorrect length"))
612    }
613
614    /// Converts integer into zero-padded big-endian bytes.
615    ///
616    /// Returns `None` on overflow or negative values.
617    #[cfg(feature = "bigint")]
618    pub fn from_bigint(int: &num_bigint::BigInt) -> Option<Self> {
619        if int.sign() == num_bigint::Sign::Minus {
620            return None;
621        }
622        Self::from_biguint(int.magnitude())
623    }
624
625    /// Converts integer into zero-padded big-endian bytes.
626    ///
627    /// Returns `None` on overflow.
628    #[cfg(feature = "bigint")]
629    pub fn from_biguint(uint: &num_bigint::BigUint) -> Option<Self> {
630        let mut bytes = uint.to_bytes_le();
631        if bytes.len() > 32 {
632            return None;
633        }
634
635        bytes.resize(32, 0);
636        bytes.reverse();
637        Some(Self::from_slice(&bytes))
638    }
639
640    /// Converts integer into zero-padded big-endian bytes.
641    ///
642    /// Ignores all bits after 256th.
643    #[cfg(feature = "bigint")]
644    pub fn from_biguint_lossy(uint: &num_bigint::BigUint) -> Self {
645        let mut bytes = uint.to_bytes_le();
646        bytes.resize(32, 0);
647        bytes.reverse();
648        Self::from_slice(&bytes)
649    }
650
651    /// Wraps a reference to an internal array into a newtype reference.
652    #[inline(always)]
653    pub const fn wrap(value: &[u8; 32]) -> &Self {
654        // SAFETY: HashBytes is #[repr(transparent)]
655        unsafe { &*(value as *const [u8; 32] as *const Self) }
656    }
657
658    /// Returns a slice containing the entire array.
659    pub const fn as_slice(&self) -> &[u8] {
660        self.0.as_slice()
661    }
662
663    /// Returns a mutable slice containing the entire array.
664    pub fn as_mut_slice(&mut self) -> &mut [u8] {
665        self.0.as_mut_slice()
666    }
667
668    /// Returns an internal array.
669    pub const fn as_array(&self) -> &[u8; 32] {
670        &self.0
671    }
672
673    /// Returns a mutable internal array.
674    pub fn as_mut_array(&mut self) -> &mut [u8; 32] {
675        &mut self.0
676    }
677
678    /// Returns a raw pointer to the slice's buffer.
679    #[inline(always)]
680    pub const fn as_ptr(&self) -> *const u8 {
681        &self.0 as *const [u8] as *const u8
682    }
683
684    /// Returns a first chunk of 8 bytes.
685    pub const fn first_chunk(&self) -> &[u8; 8] {
686        match self.0.first_chunk::<8>() {
687            Some(chunk) => chunk,
688            // SAFETY: Const unwrap go brr
689            None => unsafe { std::hint::unreachable_unchecked() },
690        }
691    }
692
693    #[inline(always)]
694    fn fmt_hex(&self, f: &mut std::fmt::Formatter<'_>, table: &[u8; 16]) -> std::fmt::Result {
695        let mut output = [0u8; 64];
696        crate::util::encode_to_hex_slice(&self.0, &mut output, table).ok();
697
698        // SAFETY: output is guaranteed to contain only [0-9a-f]
699        let output = unsafe { std::str::from_utf8_unchecked(&output) };
700        f.write_str(output)
701    }
702
703    /// Creates a bigint from bytes (as big-endian).
704    #[cfg(feature = "bigint")]
705    pub fn as_biguint(&self) -> num_bigint::BigUint {
706        num_bigint::BigUint::from_bytes_be(&self.0)
707    }
708
709    /// Creates a bigint from bytes (as big-endian).
710    #[cfg(feature = "bigint")]
711    pub fn as_bigint(&self) -> num_bigint::BigInt {
712        self.as_biguint().into()
713    }
714}
715
716impl Default for HashBytes {
717    #[inline(always)]
718    fn default() -> Self {
719        Self::ZERO
720    }
721}
722
723impl AsRef<[u8; 32]> for HashBytes {
724    #[inline(always)]
725    fn as_ref(&self) -> &[u8; 32] {
726        &self.0
727    }
728}
729
730impl AsRef<[u8]> for HashBytes {
731    #[inline(always)]
732    fn as_ref(&self) -> &[u8] {
733        self.0.as_ref()
734    }
735}
736
737impl AsMut<[u8; 32]> for HashBytes {
738    #[inline(always)]
739    fn as_mut(&mut self) -> &mut [u8; 32] {
740        &mut self.0
741    }
742}
743
744impl AsMut<[u8]> for HashBytes {
745    #[inline(always)]
746    fn as_mut(&mut self) -> &mut [u8] {
747        self.0.as_mut()
748    }
749}
750
751impl std::borrow::Borrow<[u8]> for HashBytes {
752    #[inline(always)]
753    fn borrow(&self) -> &[u8] {
754        &self.0
755    }
756}
757
758impl std::borrow::BorrowMut<[u8]> for HashBytes {
759    #[inline(always)]
760    fn borrow_mut(&mut self) -> &mut [u8] {
761        &mut self.0
762    }
763}
764
765impl std::borrow::Borrow<HashBytes> for [u8; 32] {
766    #[inline(always)]
767    fn borrow(&self) -> &HashBytes {
768        HashBytes::wrap(self)
769    }
770}
771
772impl PartialEq<[u8; 32]> for HashBytes {
773    #[inline(always)]
774    fn eq(&self, other: &[u8; 32]) -> bool {
775        &self.0 == other
776    }
777}
778
779impl PartialEq<HashBytes> for [u8; 32] {
780    #[inline(always)]
781    fn eq(&self, other: &HashBytes) -> bool {
782        self == &other.0
783    }
784}
785
786impl PartialEq<[u8]> for HashBytes {
787    #[inline(always)]
788    fn eq(&self, other: &[u8]) -> bool {
789        self.0 == other
790    }
791}
792
793impl PartialEq<HashBytes> for [u8] {
794    #[inline(always)]
795    fn eq(&self, other: &HashBytes) -> bool {
796        self == other.0
797    }
798}
799
800impl From<[u8; 32]> for HashBytes {
801    #[inline(always)]
802    fn from(value: [u8; 32]) -> Self {
803        Self(value)
804    }
805}
806
807impl From<sha2::digest::Output<sha2::Sha256>> for HashBytes {
808    #[inline(always)]
809    fn from(value: sha2::digest::Output<sha2::Sha256>) -> Self {
810        Self(value.into())
811    }
812}
813
814#[cfg(feature = "blake3")]
815impl From<blake3::Hash> for HashBytes {
816    #[inline(always)]
817    fn from(value: blake3::Hash) -> Self {
818        Self(value.into())
819    }
820}
821
822impl From<HashBytes> for [u8; 32] {
823    #[inline(always)]
824    fn from(value: HashBytes) -> Self {
825        value.0
826    }
827}
828
829impl FromStr for HashBytes {
830    type Err = ParseHashBytesError;
831
832    fn from_str(s: &str) -> Result<Self, Self::Err> {
833        let mut result = Self::default();
834        match s.len() {
835            64 => {
836                if let Err(e) = hex::decode_to_slice(s, &mut result.0) {
837                    return Err(ParseHashBytesError::InvalidHex(e));
838                }
839            }
840            66 => {
841                if let Err(e) = hex::decode_to_slice(&s[2..], &mut result.0) {
842                    return Err(ParseHashBytesError::InvalidHex(e));
843                }
844            }
845            #[cfg(feature = "base64")]
846            44 => {
847                if let Err(e) = crate::util::decode_base64_slice(s, &mut result.0) {
848                    return Err(ParseHashBytesError::InvalidBase64(e));
849                }
850            }
851            _ => return Err(ParseHashBytesError::UnexpectedStringLength),
852        }
853        Ok(result)
854    }
855}
856
857impl std::fmt::Display for HashBytes {
858    #[inline]
859    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
860        std::fmt::LowerHex::fmt(self, f)
861    }
862}
863
864impl std::fmt::LowerHex for HashBytes {
865    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
866        Self::fmt_hex(self, f, crate::util::HEX_CHARS_LOWER)
867    }
868}
869
870impl std::fmt::UpperHex for HashBytes {
871    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
872        Self::fmt_hex(self, f, crate::util::HEX_CHARS_UPPER)
873    }
874}
875
876impl std::fmt::Debug for HashBytes {
877    #[inline(always)]
878    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
879        std::fmt::Display::fmt(self, f)
880    }
881}
882
883impl<I> std::ops::Index<I> for HashBytes
884where
885    [u8; 32]: std::ops::Index<I>,
886{
887    type Output = <[u8; 32] as std::ops::Index<I>>::Output;
888
889    #[inline]
890    fn index(&self, index: I) -> &Self::Output {
891        std::ops::Index::index(&self.0, index)
892    }
893}
894
895impl<I> std::ops::IndexMut<I> for HashBytes
896where
897    [u8; 32]: std::ops::IndexMut<I>,
898{
899    #[inline]
900    fn index_mut(&mut self, index: I) -> &mut Self::Output {
901        std::ops::IndexMut::index_mut(&mut self.0, index)
902    }
903}
904
905#[cfg(feature = "rand8")]
906impl rand8::distributions::Distribution<HashBytes> for rand8::distributions::Standard {
907    #[inline]
908    fn sample<R: rand8::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
909        HashBytes(rand8::distributions::Standard.sample(rng))
910    }
911}
912
913#[cfg(feature = "rand9")]
914impl rand9::distr::Distribution<HashBytes> for rand9::distr::StandardUniform {
915    #[inline]
916    fn sample<R: rand9::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
917        HashBytes(rand9::distr::StandardUniform.sample(rng))
918    }
919}
920
921#[cfg(feature = "serde")]
922impl serde::Serialize for HashBytes {
923    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
924    where
925        S: serde::Serializer,
926    {
927        if serializer.is_human_readable() {
928            let mut output = [0u8; 64];
929            crate::util::encode_to_hex_slice(&self.0, &mut output, crate::util::HEX_CHARS_LOWER)
930                .ok();
931
932            // SAFETY: output is guaranteed to contain only [0-9a-f]
933            let output = unsafe { std::str::from_utf8_unchecked(&output) };
934            serializer.serialize_str(output)
935        } else {
936            serializer.serialize_bytes(&self.0)
937        }
938    }
939}
940
941#[cfg(feature = "serde")]
942impl<'de> serde::Deserialize<'de> for HashBytes {
943    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
944    where
945        D: serde::Deserializer<'de>,
946    {
947        use ::serde::de::{Error, Visitor};
948
949        struct HashBytesHexVisitor;
950
951        impl Visitor<'_> for HashBytesHexVisitor {
952            type Value = HashBytes;
953
954            fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
955                formatter.write_str("hex-encoded byte array of size 32")
956            }
957
958            fn visit_str<E: Error>(self, value: &str) -> Result<Self::Value, E> {
959                let mut result = HashBytes([0; 32]);
960                match hex::decode_to_slice(value, &mut result.0) {
961                    Ok(()) => Ok(result),
962                    Err(_) => Err(Error::invalid_value(
963                        serde::de::Unexpected::Str(value),
964                        &self,
965                    )),
966                }
967            }
968        }
969
970        pub struct HashBytesRawVisitor;
971
972        impl Visitor<'_> for HashBytesRawVisitor {
973            type Value = HashBytes;
974
975            fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
976                f.write_fmt(format_args!("a byte array of size 32"))
977            }
978
979            fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
980                v.try_into()
981                    .map(HashBytes)
982                    .map_err(|_e| Error::invalid_length(v.len(), &self))
983            }
984        }
985
986        if deserializer.is_human_readable() {
987            deserializer.deserialize_str(HashBytesHexVisitor)
988        } else {
989            deserializer.deserialize_bytes(HashBytesRawVisitor)
990        }
991    }
992}
993
994#[cfg(feature = "arbitrary")]
995impl<'a> arbitrary::Arbitrary<'a> for HashBytes {
996    #[inline]
997    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
998        u.arbitrary().map(Self)
999    }
1000
1001    #[inline]
1002    fn size_hint(_: usize) -> (usize, Option<usize>) {
1003        (32, Some(32))
1004    }
1005}
1006
1007/// Hash of an empty (0 bits of data, no refs) ordinary cell.
1008pub static EMPTY_CELL_HASH: &HashBytes = HashBytes::wrap(&[
1009    0x96, 0xa2, 0x96, 0xd2, 0x24, 0xf2, 0x85, 0xc6, 0x7b, 0xee, 0x93, 0xc3, 0x0f, 0x8a, 0x30, 0x91,
1010    0x57, 0xf0, 0xda, 0xa3, 0x5d, 0xc5, 0xb8, 0x7e, 0x41, 0x0b, 0x78, 0x63, 0x0a, 0x09, 0xcf, 0xc7,
1011]);
1012
1013/// Well-formed cell type.
1014#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
1015#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
1016pub enum CellType {
1017    /// Cell of this type just stores data and references.
1018    #[default]
1019    Ordinary,
1020    /// Exotic cell which was pruned from the original tree of cells
1021    /// when a Merkle proof has been created.
1022    PrunedBranch,
1023    /// Exotic cell with a reference to the cell with a library.
1024    LibraryReference,
1025    /// Exotic cell with one hash and one reference.
1026    MerkleProof,
1027    /// Exotic cell with two hashes and two references.
1028    MerkleUpdate,
1029}
1030
1031impl CellType {
1032    /// Returns whether this cell type is Merkle proof or Merkle update.
1033    #[inline]
1034    pub const fn is_merkle(self) -> bool {
1035        matches!(self, Self::MerkleProof | Self::MerkleUpdate)
1036    }
1037
1038    /// Returns whether the cell is not ordinary.
1039    #[inline]
1040    pub const fn is_exotic(self) -> bool {
1041        !matches!(self, Self::Ordinary)
1042    }
1043
1044    /// Returns whether the cell is a pruned branch.
1045    #[inline]
1046    pub const fn is_pruned_branch(self) -> bool {
1047        matches!(self, Self::PrunedBranch)
1048    }
1049
1050    /// Returns whether this cell is a library cell.
1051    #[inline(always)]
1052    pub const fn is_library(self) -> bool {
1053        matches!(self, Self::LibraryReference)
1054    }
1055
1056    /// Encodes cell type as byte.
1057    #[inline]
1058    pub const fn to_byte(self) -> u8 {
1059        match self {
1060            CellType::Ordinary => 0xff,
1061            CellType::PrunedBranch => 1,
1062            CellType::LibraryReference => 2,
1063            CellType::MerkleProof => 3,
1064            CellType::MerkleUpdate => 4,
1065        }
1066    }
1067
1068    /// Decodes any cell type from byte.
1069    #[inline]
1070    pub const fn from_byte(byte: u8) -> Option<Self> {
1071        Some(match byte {
1072            0xff => CellType::Ordinary,
1073            1 => CellType::PrunedBranch,
1074            2 => CellType::LibraryReference,
1075            3 => CellType::MerkleProof,
1076            4 => CellType::MerkleUpdate,
1077            _ => return None,
1078        })
1079    }
1080
1081    /// Decodes exotic cell type from byte.
1082    #[inline]
1083    pub const fn from_byte_exotic(byte: u8) -> Option<Self> {
1084        Some(match byte {
1085            1 => CellType::PrunedBranch,
1086            2 => CellType::LibraryReference,
1087            3 => CellType::MerkleProof,
1088            4 => CellType::MerkleUpdate,
1089            _ => return None,
1090        })
1091    }
1092}
1093
1094impl From<CellType> for u8 {
1095    #[inline]
1096    fn from(cell_type: CellType) -> u8 {
1097        cell_type.to_byte()
1098    }
1099}
1100
1101#[cfg(feature = "arbitrary")]
1102impl<'a> arbitrary::Arbitrary<'a> for CellType {
1103    #[inline]
1104    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1105        static ALL_TYPES: [CellType; 5] = [
1106            CellType::Ordinary,
1107            CellType::PrunedBranch,
1108            CellType::LibraryReference,
1109            CellType::MerkleProof,
1110            CellType::MerkleUpdate,
1111        ];
1112
1113        u.choose(&ALL_TYPES).copied()
1114    }
1115
1116    #[inline]
1117    fn size_hint(_: usize) -> (usize, Option<usize>) {
1118        (1, Some(1))
1119    }
1120}
1121
1122/// Tightly packed info about a cell.
1123#[derive(Hash, Debug, Clone, Copy)]
1124#[repr(C)]
1125pub struct CellDescriptor {
1126    /// First descriptor byte with a generic info about cell.
1127    pub d1: u8,
1128    /// Second descriptor byte with a packed data size.
1129    pub d2: u8,
1130}
1131
1132impl CellDescriptor {
1133    /// Bit mask to store the number of references in the descriptor.
1134    pub const REF_COUNT_MASK: u8 = 0b0000_0111;
1135    /// Bit mask to store the `is_exotic` flag in the descriptor.
1136    pub const IS_EXOTIC_MASK: u8 = 0b0000_1000;
1137    /// Bit mask to store the `store_hashes` flag in the descriptor.
1138    pub const STORE_HASHES_MASK: u8 = 0b0001_0000;
1139    /// _de Brujn_ level presence mask in the descriptor.
1140    pub const LEVEL_MASK: u8 = 0b1110_0000;
1141
1142    /// Computes d1 descriptor byte from parts
1143    #[inline(always)]
1144    pub const fn compute_d1(level_mask: LevelMask, is_exotic: bool, ref_count: u8) -> u8 {
1145        (level_mask.0 << 5) | ((is_exotic as u8) << 3) | (ref_count & Self::REF_COUNT_MASK)
1146    }
1147
1148    /// Computes d2 descriptor byte from cell length in bits
1149    #[inline(always)]
1150    pub const fn compute_d2(bit_len: u16) -> u8 {
1151        (((bit_len >> 2) as u8) & !0b1) | (!bit_len.is_multiple_of(8) as u8)
1152    }
1153
1154    /// Constructs cell descriptor from descriptor bytes.
1155    #[inline(always)]
1156    pub const fn new(bytes: [u8; 2]) -> Self {
1157        Self {
1158            d1: bytes[0],
1159            d2: bytes[1],
1160        }
1161    }
1162
1163    /// Creates a new descriptor with a new mask, shifted by the offset.
1164    #[must_use]
1165    pub const fn virtualize(mut self, offset: u8) -> Self {
1166        let virtualized_mask = (self.d1 >> offset) & Self::LEVEL_MASK;
1167        self.d1 = virtualized_mask | (self.d1 & !Self::LEVEL_MASK);
1168        self
1169    }
1170
1171    /// Computes cell type.
1172    pub fn cell_type(self) -> CellType {
1173        if self.d1 & Self::IS_EXOTIC_MASK == 0 {
1174            CellType::Ordinary
1175        } else {
1176            match self.d1 & Self::REF_COUNT_MASK {
1177                0 => {
1178                    // NOTE: zero mask <=> zero level
1179                    if self.d1 & Self::LEVEL_MASK == 0 {
1180                        CellType::LibraryReference
1181                    } else {
1182                        CellType::PrunedBranch
1183                    }
1184                }
1185                1 => CellType::MerkleProof,
1186                _ => CellType::MerkleUpdate,
1187            }
1188        }
1189    }
1190
1191    /// Computes child cell count.
1192    #[inline(always)]
1193    pub const fn reference_count(self) -> u8 {
1194        self.d1 & Self::REF_COUNT_MASK
1195    }
1196
1197    /// Computes hash count.
1198    ///
1199    /// NOTE: Guaranteed to be in range 1..=4.
1200    #[inline(always)]
1201    pub const fn hash_count(self) -> u8 {
1202        let level = self.level_mask().level();
1203        if self.is_exotic() && self.reference_count() == 0 && level > 0 {
1204            1 // pruned branch always has 1 hash
1205        } else {
1206            level + 1
1207        }
1208    }
1209
1210    /// Returns whether the cell is not [`Ordinary`].
1211    ///
1212    /// [`Ordinary`]: CellType::Ordinary
1213    #[inline(always)]
1214    pub const fn is_exotic(self) -> bool {
1215        self.d1 & Self::IS_EXOTIC_MASK != 0
1216    }
1217
1218    /// Returns whether this cell is a pruned branch cell.
1219    #[inline(always)]
1220    pub const fn is_pruned_branch(self) -> bool {
1221        self.is_exotic() && self.reference_count() == 0 && !self.level_mask().is_empty()
1222    }
1223
1224    /// Returns whether this cell is a library cell.
1225    #[inline(always)]
1226    pub const fn is_library(self) -> bool {
1227        self.is_exotic() && self.reference_count() == 0 && self.level_mask().is_empty()
1228    }
1229
1230    /// Returns whether this cell type is Merkle proof or Merkle update.
1231    #[inline(always)]
1232    pub const fn is_merkle(self) -> bool {
1233        self.is_exotic() && self.reference_count() != 0
1234    }
1235
1236    /// Returns whether this cell refers to some external data.
1237    #[inline(always)]
1238    pub const fn is_absent(self) -> bool {
1239        self.d1 == (Self::REF_COUNT_MASK | Self::IS_EXOTIC_MASK)
1240    }
1241
1242    /// Returns whether this cell should store hashes in data.
1243    #[inline(always)]
1244    pub const fn store_hashes(self) -> bool {
1245        self.d1 & Self::STORE_HASHES_MASK != 0
1246    }
1247
1248    /// Computes level mask.
1249    #[inline(always)]
1250    pub const fn level_mask(self) -> LevelMask {
1251        // SAFETY: `u8 >> 5` is always 3 bits long
1252        unsafe { LevelMask::new_unchecked(self.d1 >> 5) }
1253    }
1254
1255    /// Returns whether this cell's data is 8-bit aligned.
1256    #[inline(always)]
1257    pub const fn is_aligned(self) -> bool {
1258        self.d2 & 1 == 0
1259    }
1260
1261    /// Returns this cell's data length in bytes.
1262    #[inline(always)]
1263    pub const fn byte_len(self) -> u8 {
1264        (self.d2 & 1) + (self.d2 >> 1)
1265    }
1266}
1267
1268/// _de Brujn_ level presence bitset.
1269#[derive(Copy, Clone, Eq, PartialEq, Hash)]
1270pub struct LevelMask(u8);
1271
1272impl LevelMask {
1273    /// Empty bitset.
1274    pub const EMPTY: Self = LevelMask(0);
1275    /// Max _de Brujn_ level.
1276    pub const MAX_LEVEL: u8 = 3;
1277
1278    /// Constructs a new level mask, truncating extra bits.
1279    #[inline(always)]
1280    pub const fn new(mask: u8) -> Self {
1281        Self(mask & 0b111)
1282    }
1283
1284    /// Returns true if there are no levels in mask.
1285    #[inline(always)]
1286    pub const fn is_empty(self) -> bool {
1287        self.0 == 0
1288    }
1289
1290    /// Constructs a new level mask from the provided byte as is.
1291    ///
1292    /// # Safety
1293    ///
1294    /// The following must be true:
1295    /// - Mask must be in range `0b000..=0b111`.
1296    #[inline(always)]
1297    pub const unsafe fn new_unchecked(mask: u8) -> Self {
1298        Self(mask)
1299    }
1300
1301    /// Creates a sufficient mask for the specified level.
1302    ///
1303    /// NOTE: levels > 3 has no effect (mask will always be `0b111`).
1304    #[inline(always)]
1305    pub const fn from_level(level: u8) -> Self {
1306        Self(match level {
1307            0 => 0,
1308            1 => 1,
1309            2 => 3,
1310            _ => 7,
1311        })
1312    }
1313
1314    /// Counts presented higher hashes.
1315    pub const fn level(self) -> u8 {
1316        (self.0 & 1) + ((self.0 >> 1) & 1) + ((self.0 >> 2) & 1)
1317    }
1318
1319    /// Computes hash index for the specified level.
1320    pub const fn hash_index(self, level: u8) -> u8 {
1321        Self(self.0 & Self::from_level(level).0).level()
1322    }
1323
1324    /// Returns whether the specified level is included into the mask.
1325    pub const fn contains(self, level: u8) -> bool {
1326        level == 0 || self.0 & (1 << (level - 1)) != 0
1327    }
1328
1329    /// Creates a new mask, shifted by the offset.
1330    #[inline(always)]
1331    #[must_use]
1332    pub const fn virtualize(self, offset: u8) -> Self {
1333        Self(self.0 >> offset)
1334    }
1335
1336    /// Encodes level mask as byte.
1337    #[inline]
1338    pub const fn to_byte(self) -> u8 {
1339        self.0
1340    }
1341}
1342
1343impl IntoIterator for LevelMask {
1344    type Item = u8;
1345    type IntoIter = LevelMaskIter;
1346
1347    #[inline]
1348    fn into_iter(self) -> Self::IntoIter {
1349        // Include zero level
1350        LevelMaskIter(1 | (self.0 << 1))
1351    }
1352}
1353
1354impl PartialEq<u8> for LevelMask {
1355    #[inline]
1356    fn eq(&self, other: &u8) -> bool {
1357        self.0 == *other
1358    }
1359}
1360
1361impl BitOr for LevelMask {
1362    type Output = Self;
1363
1364    #[inline(always)]
1365    fn bitor(self, rhs: Self) -> Self::Output {
1366        Self(self.0 | rhs.0)
1367    }
1368}
1369
1370impl BitOrAssign for LevelMask {
1371    #[inline(always)]
1372    fn bitor_assign(&mut self, rhs: Self) {
1373        self.0 |= rhs.0;
1374    }
1375}
1376
1377impl From<LevelMask> for u8 {
1378    #[inline(always)]
1379    fn from(m: LevelMask) -> u8 {
1380        m.0
1381    }
1382}
1383
1384impl std::fmt::Debug for LevelMask {
1385    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1386        f.write_fmt(format_args!("{:03b}", self.0))
1387    }
1388}
1389
1390#[cfg(feature = "arbitrary")]
1391impl<'a> arbitrary::Arbitrary<'a> for LevelMask {
1392    fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1393        u.int_in_range(0b000..=0b111).map(Self::new)
1394    }
1395
1396    #[inline]
1397    fn size_hint(_: usize) -> (usize, Option<usize>) {
1398        (1, Some(1))
1399    }
1400}
1401
1402/// _de Brujn_ level presence bitset iterator.
1403#[derive(Clone)]
1404pub struct LevelMaskIter(u8);
1405
1406impl LevelMaskIter {
1407    /// Returns `true` if all levels were consumed.
1408    pub fn is_empty(&self) -> bool {
1409        self.0 == 0
1410    }
1411}
1412
1413impl Iterator for LevelMaskIter {
1414    type Item = u8;
1415
1416    #[inline]
1417    fn next(&mut self) -> Option<Self::Item> {
1418        (self.0 != 0).then(|| {
1419            //  111 - 1   = 110
1420            // !110       = 001
1421            //  111 & 001 = 001
1422            let mask = self.0 & !(self.0 - 1);
1423
1424            // 111 & !001 = 110
1425            self.0 &= !mask;
1426
1427            mask.trailing_zeros() as u8
1428        })
1429    }
1430
1431    fn size_hint(&self) -> (usize, Option<usize>) {
1432        let len = self.0.count_ones() as usize;
1433        (len, Some(len))
1434    }
1435}
1436
1437/// A size of a cell.
1438#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
1439pub struct Size {
1440    /// Total number of bits in cell slice.
1441    pub bits: u16,
1442    /// Total number refs in cell slice.
1443    pub refs: u8,
1444}
1445
1446impl Size {
1447    /// The additive identity for this type, i.e. `0`.
1448    pub const ZERO: Self = Self { bits: 0, refs: 0 };
1449
1450    /// The multiplicative bits identity for this type, i.e. `1 bit`.
1451    pub const BIT: Self = Self { bits: 1, refs: 0 };
1452
1453    /// The multiplicative refs identity for this type, i.e. `1 ref`.
1454    pub const REF: Self = Self { bits: 0, refs: 1 };
1455
1456    /// The largest valid value that can be represented by this type.
1457    pub const MAX: Self = Self {
1458        bits: MAX_BIT_LEN,
1459        refs: MAX_REF_COUNT as _,
1460    };
1461
1462    /// Returns true if the number of bits and refs is in the valid range for the cell.
1463    #[inline]
1464    pub const fn fits_into_cell(&self) -> bool {
1465        self.bits <= MAX_BIT_LEN && self.refs <= MAX_REF_COUNT as _
1466    }
1467
1468    /// Saturating size addition. Computes self + rhs for bits and refs,
1469    /// saturating at the numeric bounds instead of overflowing.
1470    #[inline]
1471    pub const fn saturating_add(self, rhs: Self) -> Self {
1472        Self {
1473            bits: self.bits.saturating_add(rhs.bits),
1474            refs: self.refs.saturating_add(rhs.refs),
1475        }
1476    }
1477
1478    /// Saturating size substraction. Computes self - rhs for bits and refs,
1479    /// saturating at the numeric bounds instead of overflowing.
1480    #[inline]
1481    pub const fn saturating_sub(self, rhs: Self) -> Self {
1482        Self {
1483            bits: self.bits.saturating_sub(rhs.bits),
1484            refs: self.refs.saturating_sub(rhs.refs),
1485        }
1486    }
1487
1488    /// Returns true if there are no bits and refs.
1489    pub const fn is_zero(self) -> bool {
1490        self.bits == 0 && self.refs == 0
1491    }
1492}
1493
1494impl From<Size> for CellTreeStats {
1495    #[inline]
1496    fn from(value: Size) -> Self {
1497        Self {
1498            bit_count: value.bits as _,
1499            cell_count: value.refs as _,
1500        }
1501    }
1502}
1503
1504impl std::ops::Add for Size {
1505    type Output = Self;
1506
1507    #[inline]
1508    fn add(mut self, rhs: Self) -> Self::Output {
1509        self += rhs;
1510        self
1511    }
1512}
1513
1514impl std::ops::AddAssign for Size {
1515    #[inline]
1516    fn add_assign(&mut self, rhs: Self) {
1517        self.bits += rhs.bits;
1518        self.refs += rhs.refs;
1519    }
1520}
1521
1522impl std::ops::Sub for Size {
1523    type Output = Self;
1524
1525    #[inline]
1526    fn sub(mut self, rhs: Self) -> Self::Output {
1527        self -= rhs;
1528        self
1529    }
1530}
1531
1532impl std::ops::SubAssign for Size {
1533    #[inline]
1534    fn sub_assign(&mut self, rhs: Self) {
1535        self.bits -= rhs.bits;
1536        self.refs -= rhs.refs;
1537    }
1538}
1539
1540/// Cell tree storage stats.
1541#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
1542pub struct CellTreeStats {
1543    /// Total number of bits in tree.
1544    pub bit_count: u64,
1545    /// Total number of cells in tree.
1546    pub cell_count: u64,
1547}
1548
1549impl CellTreeStats {
1550    /// The additive identity for this type, i.e. `0`.
1551    pub const ZERO: Self = CellTreeStats {
1552        bit_count: 0,
1553        cell_count: 0,
1554    };
1555}
1556
1557impl std::ops::Add for CellTreeStats {
1558    type Output = Self;
1559
1560    #[inline]
1561    fn add(self, rhs: Self) -> Self::Output {
1562        Self {
1563            bit_count: self.bit_count.saturating_add(rhs.bit_count),
1564            cell_count: self.cell_count.saturating_add(rhs.cell_count),
1565        }
1566    }
1567}
1568
1569impl std::ops::AddAssign for CellTreeStats {
1570    #[inline]
1571    fn add_assign(&mut self, rhs: Self) {
1572        self.bit_count = self.bit_count.saturating_add(rhs.bit_count);
1573        self.cell_count = self.cell_count.saturating_add(rhs.cell_count);
1574    }
1575}
1576
1577impl std::ops::Add<Size> for CellTreeStats {
1578    type Output = Self;
1579
1580    #[inline]
1581    fn add(self, rhs: Size) -> Self::Output {
1582        Self {
1583            bit_count: self.bit_count.saturating_add(rhs.bits as _),
1584            cell_count: self.cell_count.saturating_add(rhs.refs as _),
1585        }
1586    }
1587}
1588
1589impl std::iter::Sum for CellTreeStats {
1590    #[inline]
1591    fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
1592        let mut res = Self::ZERO;
1593        for item in iter {
1594            res += item;
1595        }
1596        res
1597    }
1598}
1599
1600impl std::ops::AddAssign<Size> for CellTreeStats {
1601    fn add_assign(&mut self, rhs: Size) {
1602        self.bit_count = self.bit_count.saturating_add(rhs.bits as _);
1603        self.cell_count = self.cell_count.saturating_add(rhs.refs as _);
1604    }
1605}
1606
1607impl std::ops::Sub for CellTreeStats {
1608    type Output = Self;
1609
1610    #[inline]
1611    fn sub(mut self, rhs: Self) -> Self::Output {
1612        self -= rhs;
1613        self
1614    }
1615}
1616
1617impl std::ops::SubAssign for CellTreeStats {
1618    #[inline]
1619    fn sub_assign(&mut self, rhs: Self) {
1620        self.bit_count = self.bit_count.saturating_sub(rhs.bit_count);
1621        self.cell_count = self.cell_count.saturating_sub(rhs.cell_count);
1622    }
1623}
1624
1625impl std::ops::SubAssign<Size> for CellTreeStats {
1626    #[inline]
1627    fn sub_assign(&mut self, rhs: Size) {
1628        self.bit_count = self.bit_count.saturating_sub(rhs.bits as _);
1629        self.cell_count = self.cell_count.saturating_sub(rhs.refs as _);
1630    }
1631}
1632
1633/// A helper to track the size of the unique data in multiple cell trees.
1634///
1635/// NOTE: It uses hashes for deduplication, so you can only use it for
1636/// fully computed and valid trees.
1637pub struct StorageStat<'a> {
1638    visited: ahash::HashSet<&'a HashBytes>,
1639    stack: Vec<RefsIter<'a>>,
1640    stats: CellTreeStats,
1641    limit: usize,
1642}
1643
1644impl<'a> StorageStat<'a> {
1645    /// Recursively computes the count of distinct cells returning
1646    /// the total storage used by this dag taking into account the
1647    /// identification of equal cells.
1648    ///
1649    /// Root slice does not count as cell. A slice subrange of
1650    /// cells is used during computation.
1651    pub fn compute_for_slice<'b: 'a>(
1652        slice: &'a CellSlice<'b>,
1653        limit: usize,
1654    ) -> Option<CellTreeStats> {
1655        let mut this = Self::with_limit(limit);
1656        if this.add_slice(slice) {
1657            Some(this.stats)
1658        } else {
1659            None
1660        }
1661    }
1662
1663    /// Recursively computes the count of distinct cells returning
1664    /// the total storage used by this dag taking into account the
1665    /// identification of equal cells.
1666    pub fn compute_for_cell(cell: &'a DynCell, limit: usize) -> Option<CellTreeStats> {
1667        let mut this = Self::with_limit(limit);
1668        if this.add_cell(cell) {
1669            Some(this.stats)
1670        } else {
1671            None
1672        }
1673    }
1674
1675    /// Creates a new storage stat state with an explicit limit.
1676    pub fn with_limit(limit: usize) -> Self {
1677        Self {
1678            visited: Default::default(),
1679            stack: Vec::new(),
1680            stats: CellTreeStats::ZERO,
1681            limit,
1682        }
1683    }
1684
1685    /// Creates a new storage stat state without a limit.
1686    pub fn unlimited() -> Self {
1687        Self::with_limit(usize::MAX)
1688    }
1689
1690    /// Returns the current tree stats.
1691    pub fn stats(&self) -> CellTreeStats {
1692        self.stats
1693    }
1694
1695    /// Merges current stats with the stats from the provided cell tree.
1696    ///
1697    /// Returns `false` if the limit was reached.
1698    pub fn add_cell(&mut self, cell: &'a DynCell) -> bool {
1699        if !self.visited.insert(cell.repr_hash()) {
1700            return true;
1701        }
1702
1703        self.stats.bit_count += cell.bit_len() as u64;
1704        self.stats.cell_count += 1;
1705
1706        self.stack.clear();
1707        self.stack.push(cell.references());
1708        self.reduce_stack()
1709    }
1710
1711    /// Merges current stats with the stats from the provided slice.
1712    ///
1713    /// Returns `false` if the limit was reached.
1714    pub fn add_slice(&mut self, slice: &CellSlice<'a>) -> bool {
1715        self.stats.bit_count += slice.size_bits() as u64;
1716
1717        self.stack.clear();
1718        self.stack.push(slice.references());
1719        self.reduce_stack()
1720    }
1721
1722    fn reduce_stack(&mut self) -> bool {
1723        'outer: while let Some(item) = self.stack.last_mut() {
1724            for cell in item.by_ref() {
1725                if !self.visited.insert(cell.repr_hash()) {
1726                    continue;
1727                }
1728
1729                if self.stats.cell_count >= self.limit as u64 {
1730                    return false;
1731                }
1732
1733                self.stats.bit_count += cell.bit_len() as u64;
1734                self.stats.cell_count += 1;
1735
1736                let next = cell.references();
1737                if next.max > 0 {
1738                    self.stack.push(next);
1739                    continue 'outer;
1740                }
1741            }
1742
1743            self.stack.pop();
1744        }
1745
1746        true
1747    }
1748}
1749
1750/// Helper struct to debug print the root cell.
1751#[derive(Clone, Copy)]
1752pub struct DebugCell<'a>(&'a DynCell);
1753
1754impl std::fmt::Debug for DebugCell<'_> {
1755    #[inline]
1756    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1757        self.0.fmt(f)
1758    }
1759}
1760
1761/// Helper struct to print only the root cell in the cell tree.
1762#[derive(Clone, Copy)]
1763pub struct DisplayCellRoot<'a> {
1764    cell: &'a DynCell,
1765    level: usize,
1766}
1767
1768impl std::fmt::Display for DisplayCellRoot<'_> {
1769    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1770        // TODO: encode on stack
1771        let data = hex::encode(self.cell.data());
1772
1773        let indent = self.level * 2;
1774        if f.alternate() {
1775            f.write_fmt(format_args!("{:indent$}{data}\n", ""))
1776        } else {
1777            let repr_depth = self.cell.depth(LevelMask::MAX_LEVEL);
1778            let repr_hash = self.cell.repr_hash();
1779            let descriptor = self.cell.descriptor();
1780            f.write_fmt(format_args!(
1781                "{:indent$}{:?}: {data}\n{:indent$}bits: {:>4}, refs: {}, l: {:?}, depth: {}, hash: {}\n",
1782                "",
1783                descriptor.cell_type(),
1784                "",
1785                self.cell.bit_len(),
1786                descriptor.reference_count(),
1787                descriptor.level_mask(),
1788                repr_depth,
1789                repr_hash,
1790            ))
1791        }
1792    }
1793}
1794
1795/// Helper struct to print all cells in the cell tree.
1796#[derive(Clone, Copy)]
1797pub struct DisplayCellTree<'a>(&'a DynCell);
1798
1799impl std::fmt::Display for DisplayCellTree<'_> {
1800    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1801        let mut stack = vec![(0, self.0)];
1802
1803        while let Some((level, cell)) = stack.pop() {
1804            ok!(std::fmt::Display::fmt(&DisplayCellRoot { cell, level }, f));
1805
1806            let reference_count = cell.reference_count();
1807            for i in (0..reference_count).rev() {
1808                if let Some(child) = cell.reference(i) {
1809                    stack.push((level + 1, child));
1810                }
1811            }
1812        }
1813
1814        Ok(())
1815    }
1816}
1817
1818/// Max cell data capacity in bits
1819pub const MAX_BIT_LEN: u16 = 1023;
1820/// Maximum number of child cells
1821pub const MAX_REF_COUNT: usize = 4;
1822
1823#[cfg(test)]
1824mod tests {
1825    use super::*;
1826
1827    #[test]
1828    fn correct_level() {
1829        const LEVEL: [u8; 8] = [0, 1, 1, 2, 1, 2, 2, 3];
1830
1831        for mask in 0b000..=0b111 {
1832            assert_eq!(LevelMask(mask).level(), LEVEL[mask as usize]);
1833        }
1834    }
1835
1836    #[test]
1837    fn virtualize_descriptor() {
1838        let level_mask = LevelMask(0b111);
1839        let desc = CellDescriptor::new([
1840            CellDescriptor::compute_d1(level_mask, false, 3),
1841            CellDescriptor::compute_d2(123),
1842        ]);
1843
1844        assert_eq!(desc.level_mask(), level_mask);
1845
1846        for i in 0..3 {
1847            let v_desc = desc.virtualize(i);
1848
1849            assert_eq!(v_desc.cell_type(), desc.cell_type());
1850            assert_eq!(v_desc.reference_count(), desc.reference_count());
1851            assert_eq!(v_desc.is_exotic(), desc.is_exotic());
1852            assert_eq!(v_desc.store_hashes(), desc.store_hashes());
1853            assert_eq!(v_desc.is_aligned(), desc.is_aligned());
1854            assert_eq!(v_desc.byte_len(), desc.byte_len());
1855
1856            assert_eq!(v_desc.level_mask(), level_mask.virtualize(i));
1857        }
1858    }
1859
1860    #[test]
1861    fn correct_hash_index() {
1862        const HASH_INDEX_TABLE: [[u8; 4]; 8] = [
1863            // index      // mask
1864            [0, 0, 0, 0], // 000
1865            [0, 1, 1, 1], // 001
1866            [0, 0, 1, 1], // 010
1867            [0, 1, 2, 2], // 011
1868            [0, 0, 0, 1], // 100
1869            [0, 1, 1, 2], // 101
1870            [0, 0, 1, 2], // 110
1871            [0, 1, 2, 3], // 111
1872        ];
1873
1874        for mask in 0b000..=0b111 {
1875            let mask = LevelMask(mask);
1876
1877            for i in 0..=3 {
1878                let hash_index = mask.hash_index(i);
1879                assert_eq!(hash_index, HASH_INDEX_TABLE[mask.0 as usize][i as usize]);
1880            }
1881        }
1882    }
1883
1884    #[test]
1885    fn ultra_virtual_cell_by_ref() {
1886        let cell = Cell::empty_cell();
1887
1888        let pruned1 =
1889            crate::merkle::make_pruned_branch(cell.as_ref(), 0, Cell::empty_context()).unwrap();
1890
1891        let pruned2 =
1892            crate::merkle::make_pruned_branch(pruned1.as_ref(), 1, Cell::empty_context()).unwrap();
1893
1894        let pruned3 =
1895            crate::merkle::make_pruned_branch(pruned2.as_ref(), 2, Cell::empty_context()).unwrap();
1896
1897        // Level 3 -> 2
1898        let pruned3 = pruned3.virtualize();
1899        assert_eq!(pruned3.repr_hash(), pruned2.repr_hash());
1900        assert_eq!(pruned3.repr_depth(), pruned2.repr_depth());
1901
1902        // Level 2 -> 1
1903        let pruned3 = pruned3.virtualize();
1904        assert_eq!(pruned3.repr_hash(), pruned1.repr_hash());
1905        assert_eq!(pruned3.repr_depth(), pruned1.repr_depth());
1906
1907        // Level 1 -> 0
1908        let pruned3 = pruned3.virtualize();
1909        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1910        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1911
1912        // Level 0 -> 0
1913        let pruned3 = pruned3.virtualize();
1914        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1915        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1916
1917        // Level 0 -> 0 (just in case)
1918        let pruned3 = pruned3.virtualize();
1919        assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1920        assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1921    }
1922
1923    #[test]
1924    fn versioned_store_load() {
1925        #[derive(Debug, Clone, Copy, Eq, PartialEq, Store, Load)]
1926        #[tlb(tag = ["#12", "#34", "#56"])]
1927        struct Versioned {
1928            old_field1: u32,
1929            #[tlb(since_tag = 1)]
1930            new_field1: u32,
1931            old_field2: bool,
1932            #[tlb(since_tag = 2)]
1933            new_field2: bool,
1934        }
1935
1936        let old = Versioned {
1937            old_field1: 123,
1938            new_field1: 0,
1939            old_field2: true,
1940            new_field2: false,
1941        };
1942        let cell = CellBuilder::build_from(old).unwrap();
1943
1944        {
1945            let mut slice = cell.as_slice().unwrap();
1946            assert_eq!(slice.size_bits(), 8 + 32 + 1);
1947            assert_eq!(slice.load_u8().unwrap(), 0x12);
1948            assert_eq!(slice.load_u32().unwrap(), 123);
1949            assert!(slice.load_bit().unwrap());
1950            assert!(slice.is_empty());
1951        }
1952        assert_eq!(
1953            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1954            old,
1955        );
1956
1957        let new = Versioned {
1958            old_field1: 123,
1959            new_field1: 456,
1960            old_field2: true,
1961            new_field2: false,
1962        };
1963        let cell = CellBuilder::build_from(new).unwrap();
1964
1965        {
1966            let mut slice = cell.as_slice().unwrap();
1967            assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1);
1968            assert_eq!(slice.load_u8().unwrap(), 0x34);
1969            assert_eq!(slice.load_u32().unwrap(), 123);
1970            assert_eq!(slice.load_u32().unwrap(), 456);
1971            assert!(slice.load_bit().unwrap());
1972            assert!(slice.is_empty());
1973        }
1974        assert_eq!(
1975            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1976            new
1977        );
1978
1979        let too_new = Versioned {
1980            old_field1: 123,
1981            new_field1: 0,
1982            old_field2: true,
1983            new_field2: true,
1984        };
1985        let cell = CellBuilder::build_from(too_new).unwrap();
1986
1987        {
1988            let mut slice = cell.as_slice().unwrap();
1989            assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1 + 1);
1990            assert_eq!(slice.load_u8().unwrap(), 0x56);
1991            assert_eq!(slice.load_u32().unwrap(), 123);
1992            assert_eq!(slice.load_u32().unwrap(), 0);
1993            assert!(slice.load_bit().unwrap());
1994            assert!(slice.load_bit().unwrap());
1995            assert!(slice.is_empty());
1996        }
1997        assert_eq!(
1998            Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1999            too_new
2000        );
2001    }
2002}