tycho_types/cell/
slice.rs

1use std::num::{NonZeroU8, NonZeroU16, NonZeroU32};
2use std::rc::Rc;
3use std::sync::Arc;
4
5use super::CellFamily;
6use crate::cell::{
7    Cell, CellTreeStats, CellType, DynCell, HashBytes, LevelMask, RefsIter, Size, StorageStat,
8};
9use crate::error::Error;
10use crate::util::{Bitstring, unlikely};
11
12/// A data structure that can be deserialized from the full cell.
13pub trait LoadCell<'a>: Sized {
14    /// Tries to load itself from cell.
15    fn load_from_cell(cell: &'a DynCell) -> Result<Self, Error>;
16}
17
18impl<'a, T: LoadCell<'a>> LoadCell<'a> for Box<T> {
19    #[inline]
20    fn load_from_cell(cell: &'a DynCell) -> Result<Self, Error> {
21        match <T as LoadCell>::load_from_cell(cell) {
22            Ok(value) => Ok(Box::new(value)),
23            Err(e) => Err(e),
24        }
25    }
26}
27
28impl<'a, T: LoadCell<'a>> LoadCell<'a> for Arc<T> {
29    #[inline]
30    fn load_from_cell(cell: &'a DynCell) -> Result<Self, Error> {
31        match <T as LoadCell>::load_from_cell(cell) {
32            Ok(value) => Ok(Arc::new(value)),
33            Err(e) => Err(e),
34        }
35    }
36}
37
38impl<'a, T: LoadCell<'a>> LoadCell<'a> for Rc<T> {
39    #[inline]
40    fn load_from_cell(cell: &'a DynCell) -> Result<Self, Error> {
41        match <T as LoadCell>::load_from_cell(cell) {
42            Ok(value) => Ok(Rc::new(value)),
43            Err(e) => Err(e),
44        }
45    }
46}
47
48/// A data structure that can be deserialized from ordinary cells.
49pub trait Load<'a>: Sized {
50    /// Tries to load itself from a cell slice.
51    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error>;
52}
53
54impl<'a, T: Load<'a>> Load<'a> for Box<T> {
55    #[inline]
56    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
57        match <T as Load>::load_from(slice) {
58            Ok(value) => Ok(Box::new(value)),
59            Err(e) => Err(e),
60        }
61    }
62}
63
64impl<'a, T: Load<'a>> Load<'a> for Arc<T> {
65    #[inline]
66    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
67        match <T as Load>::load_from(slice) {
68            Ok(value) => Ok(Arc::new(value)),
69            Err(e) => Err(e),
70        }
71    }
72}
73
74impl<'a, T: Load<'a>> Load<'a> for Rc<T> {
75    #[inline]
76    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
77        match <T as Load>::load_from(slice) {
78            Ok(value) => Ok(Rc::new(value)),
79            Err(e) => Err(e),
80        }
81    }
82}
83
84impl<'a> Load<'a> for () {
85    #[inline]
86    fn load_from(_: &mut CellSlice<'a>) -> Result<Self, Error> {
87        Ok(())
88    }
89}
90
91macro_rules! impl_load_for_tuples {
92    ($( ($($t:ident),+) ),*$(,)?) => {$(
93        impl<'a, $($t: Load<'a>),+> Load<'a> for ($($t),*,) {
94            fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
95                Ok(($(ok!(<$t>::load_from(slice))),+))
96            }
97        }
98    )*};
99}
100
101impl_load_for_tuples! {
102    (T0, T1),
103    (T0, T1, T2),
104    (T0, T1, T2, T3),
105    (T0, T1, T2, T3, T4),
106    (T0, T1, T2, T3, T4, T5),
107}
108
109impl<'a, T: Load<'a>> Load<'a> for Option<T> {
110    #[inline]
111    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
112        if ok!(slice.load_bit()) {
113            match T::load_from(slice) {
114                Ok(value) => Ok(Some(value)),
115                Err(e) => Err(e),
116            }
117        } else {
118            Ok(None)
119        }
120    }
121}
122
123impl<T: ExactSize> ExactSize for Option<T> {
124    #[inline]
125    fn exact_size(&self) -> Size {
126        let mut total = Size { bits: 1, refs: 0 };
127        if let Some(this) = self {
128            total += this.exact_size();
129        }
130        total
131    }
132}
133
134impl<'a> Load<'a> for CellSlice<'a> {
135    #[inline]
136    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
137        Ok(slice.load_remaining())
138    }
139}
140
141macro_rules! ok_map {
142    ($expr:expr => $ty:ty) => {
143        match $expr {
144            core::result::Result::Ok(s) => core::result::Result::Ok(s as $ty),
145            core::result::Result::Err(e) => core::result::Result::Err(e),
146        }
147    };
148}
149
150macro_rules! impl_primitive_loads {
151    ($($type:ty => |$s:ident| $expr:expr),*$(,)?) => {
152        $(impl Load<'_> for $type {
153            #[inline]
154            fn load_from($s: &mut CellSlice) -> Result<Self, Error> {
155                $expr
156            }
157        })*
158    };
159}
160
161impl_primitive_loads! {
162    bool => |s| s.load_bit(),
163    u8 => |s| s.load_u8(),
164    i8 => |s| ok_map!(s.load_u8() => i8),
165    u16 => |s| s.load_u16(),
166    i16 => |s| ok_map!(s.load_u16() => i16),
167    u32 => |s| s.load_u32(),
168    i32 => |s| ok_map!(s.load_u32() => i32),
169    u64 => |s| s.load_u64(),
170    i64 => |s| ok_map!(s.load_u64() => i64),
171    u128 => |s| s.load_u128(),
172    i128 => |s| ok_map!(s.load_u128() => i128),
173    NonZeroU8 => |s| match s.load_u8() {
174        Ok(s) => match NonZeroU8::new(s) {
175            Some(s) => Ok(s),
176            None => Err(Error::InvalidData)
177        }
178        Err(e) => Err(e),
179    },
180    NonZeroU16 => |s| match s.load_u16() {
181        Ok(s) => match NonZeroU16::new(s) {
182            Some(s) => Ok(s),
183            None => Err(Error::InvalidData)
184        }
185        Err(e) => Err(e),
186    },
187    NonZeroU32 => |s| match s.load_u32() {
188        Ok(s) => match NonZeroU32::new(s) {
189            Some(s) => Ok(s),
190            None => Err(Error::InvalidData)
191        }
192        Err(e) => Err(e),
193    },
194    HashBytes => |s| s.load_u256(),
195}
196
197impl<'a> Load<'a> for &'a DynCell {
198    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
199        slice.load_reference()
200    }
201}
202
203impl ExactSize for DynCell {
204    #[inline]
205    fn exact_size(&self) -> Size {
206        Size { bits: 0, refs: 1 }
207    }
208}
209
210impl<'a> Load<'a> for Cell {
211    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
212        slice.load_reference_cloned()
213    }
214}
215
216impl ExactSize for Cell {
217    #[inline]
218    fn exact_size(&self) -> Size {
219        Size { bits: 0, refs: 1 }
220    }
221}
222
223/// Owned cell slice parts alias.
224pub type CellSliceParts = (CellSliceRange, Cell);
225
226impl ExactSize for CellSliceParts {
227    #[inline]
228    fn exact_size(&self) -> Size {
229        self.0.size()
230    }
231}
232
233/// Indices of the slice data and refs windows.
234#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Hash)]
235pub struct CellSliceRange {
236    bits_start: u16,
237    bits_end: u16,
238    refs_start: u8,
239    refs_end: u8,
240}
241
242impl CellSliceRange {
243    /// Returns an empty slice range.
244    pub const fn empty() -> Self {
245        CellSliceRange {
246            bits_start: 0,
247            bits_end: 0,
248            refs_start: 0,
249            refs_end: 0,
250        }
251    }
252
253    /// Returns a full range for the specified cell.
254    #[inline]
255    pub fn full<T>(cell: &T) -> Self
256    where
257        T: AsRef<DynCell> + ?Sized,
258    {
259        let cell = cell.as_ref();
260        Self {
261            bits_start: 0,
262            bits_end: cell.bit_len(),
263            refs_start: 0,
264            refs_end: cell.reference_count(),
265        }
266    }
267
268    /// Constructs a new cell slice from the specified cell using the current range.
269    /// Returns an error if the cell is pruned.
270    ///
271    /// NOTE: the resulting range will be truncated to cell bounds.
272    #[inline]
273    pub fn apply<T>(self, cell: &T) -> Result<CellSlice<'_>, Error>
274    where
275        T: AsRef<DynCell> + ?Sized,
276    {
277        fn apply_impl(range: CellSliceRange, cell: &DynCell) -> Result<CellSlice<'_>, Error> {
278            // Handle pruned branch access
279            if unlikely(cell.is_exotic()) {
280                Err(Error::UnexpectedExoticCell)
281            } else {
282                let bits_end = std::cmp::min(range.bits_end, cell.bit_len());
283                let refs_end = std::cmp::min(range.refs_end, cell.reference_count());
284                Ok(CellSlice {
285                    range: CellSliceRange {
286                        bits_start: std::cmp::min(range.bits_start, bits_end),
287                        bits_end,
288                        refs_start: std::cmp::min(range.refs_start, refs_end),
289                        refs_end,
290                    },
291                    cell,
292                })
293            }
294        }
295        apply_impl(self, cell.as_ref())
296    }
297
298    /// Constructs a new cell slice from the specified cell using the current range.
299    ///
300    /// NOTE: the resulting range will be truncated to cell bounds.
301    ///
302    /// # Safety
303    ///
304    /// The following must be true:
305    /// - cell is not pruned
306    /// - range is in cell bounds
307    pub fn apply_allow_exotic<T>(self, cell: &T) -> CellSlice<'_>
308    where
309        T: AsRef<DynCell> + ?Sized,
310    {
311        CellSlice {
312            range: self,
313            cell: cell.as_ref(),
314        }
315    }
316
317    /// Returns the number of remaining bits and refs in the slice.
318    pub const fn size(&self) -> Size {
319        Size {
320            bits: self.size_bits(),
321            refs: self.size_refs(),
322        }
323    }
324
325    /// Returns the number of remaining bits of data in the slice.
326    pub const fn size_bits(&self) -> u16 {
327        self.bits_end.saturating_sub(self.bits_start)
328    }
329
330    /// Returns the number of remaining references in the slice.
331    pub const fn size_refs(&self) -> u8 {
332        self.refs_end.saturating_sub(self.refs_start)
333    }
334
335    /// Returns whether there are no data bits and refs left.
336    pub const fn is_empty(&self) -> bool {
337        self.is_data_empty() && self.is_refs_empty()
338    }
339
340    /// Returns whether there are no bits of data left.
341    pub const fn is_data_empty(&self) -> bool {
342        self.bits_start >= self.bits_end
343    }
344
345    /// Returns whether there are no references left.
346    pub const fn is_refs_empty(&self) -> bool {
347        self.refs_start >= self.refs_end
348    }
349
350    /// Returns the start of data and reference windows.
351    pub const fn offset(&self) -> Size {
352        Size {
353            bits: self.bits_start,
354            refs: self.refs_start,
355        }
356    }
357
358    /// Returns the start of the data window.
359    pub const fn offset_bits(&self) -> u16 {
360        self.bits_start
361    }
362
363    /// Returns the start of the references window.
364    pub const fn offset_refs(&self) -> u8 {
365        self.refs_start
366    }
367
368    /// Returns true if the slice contains at least `bits` and `refs`.
369    pub const fn has_remaining(&self, bits: u16, refs: u8) -> bool {
370        self.bits_start + bits <= self.bits_end && self.refs_start + refs <= self.refs_end
371    }
372
373    /// Tries to advance the start of data and refs windows.
374    pub fn skip_first(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
375        if unlikely(
376            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
377        ) {
378            return Err(Error::CellUnderflow);
379        }
380
381        self.bits_start += bits;
382        self.refs_start += refs;
383        Ok(())
384    }
385
386    /// Leaves only the first `bits` and `refs` in the slice.
387    pub fn only_first(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
388        if unlikely(
389            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
390        ) {
391            return Err(Error::CellUnderflow);
392        }
393
394        self.bits_end = self.bits_start + bits;
395        self.refs_end = self.refs_start + refs;
396        Ok(())
397    }
398
399    /// Removes the last `bits` and `refs` from the slice.
400    pub fn skip_last(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
401        if unlikely(
402            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
403        ) {
404            return Err(Error::CellUnderflow);
405        }
406
407        self.bits_end -= bits;
408        self.refs_end -= refs;
409        Ok(())
410    }
411
412    /// Leaves only the last `bits` and `refs` in the slice.
413    pub fn only_last(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
414        if unlikely(
415            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
416        ) {
417            return Err(Error::CellUnderflow);
418        }
419
420        self.bits_start = self.bits_end - bits;
421        self.refs_start = self.refs_end - refs;
422        Ok(())
423    }
424
425    /// Returns the first `bits` and `refs` of the slice and advances the start
426    /// of data and refs windows.
427    #[must_use = "use `skip_first` if you don't need the result"]
428    pub fn split_prefix(&mut self, bits: u16, refs: u8) -> Result<Self, Error> {
429        if unlikely(
430            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
431        ) {
432            return Err(Error::CellUnderflow);
433        }
434
435        let mut res = *self;
436        self.bits_start += bits;
437        self.refs_start += refs;
438        res.bits_end = self.bits_start;
439        res.refs_end = self.refs_start;
440        Ok(res)
441    }
442
443    /// Returns the last `bits` and `refs` of the slice and shrinks the data and refs windows.
444    #[must_use = "use `skip_last` if you don't need the result"]
445    pub fn split_suffix(&mut self, bits: u16, refs: u8) -> Result<Self, Error> {
446        if unlikely(
447            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
448        ) {
449            return Err(Error::CellUnderflow);
450        }
451
452        let mut res = *self;
453        self.bits_end -= bits;
454        self.refs_end -= refs;
455        res.bits_start = self.bits_end;
456        res.refs_start = self.refs_end;
457        Ok(res)
458    }
459
460    /// Returns a slice range starting at the same bits and refs offsets,
461    /// and containing no more than `bits` of data and `refs` of children.
462    pub fn get_prefix(&self, bits: u16, refs: u8) -> Self {
463        Self {
464            bits_start: self.bits_start,
465            bits_end: std::cmp::min(self.bits_start + bits, self.bits_end),
466            refs_start: self.refs_start,
467            refs_end: std::cmp::min(self.refs_start + refs, self.refs_end),
468        }
469    }
470
471    /// Returns whether this range has the same size as the cell.
472    #[inline]
473    pub fn is_full<T>(&self, cell: &T) -> bool
474    where
475        T: AsRef<DynCell> + ?Sized,
476    {
477        let cell = cell.as_ref();
478        self.bits_start == 0
479            && self.refs_start == 0
480            && self.bits_end == cell.bit_len()
481            && self.refs_end == cell.reference_count()
482    }
483}
484
485/// A read-only view for a subrange of a cell.
486#[derive(Clone, Copy, Debug, Eq, PartialEq)]
487pub struct CellSlice<'a> {
488    cell: &'a DynCell,
489    range: CellSliceRange,
490}
491
492impl Default for CellSlice<'_> {
493    #[inline]
494    fn default() -> Self {
495        Cell::empty_cell_ref().as_slice_allow_exotic()
496    }
497}
498
499impl<'a> AsRef<CellSlice<'a>> for CellSlice<'a> {
500    #[inline]
501    fn as_ref(&self) -> &CellSlice<'a> {
502        self
503    }
504}
505
506impl<'a> AsMut<CellSlice<'a>> for CellSlice<'a> {
507    #[inline]
508    fn as_mut(&mut self) -> &mut CellSlice<'a> {
509        self
510    }
511}
512
513impl<'a> CellSlice<'a> {
514    /// Constructs a new cell slice from the specified cell.
515    /// Returns an error if the cell is not ordinary.
516    pub fn new(cell: &'a DynCell) -> Result<Self, Error> {
517        // Handle pruned branch access
518        if unlikely(cell.is_exotic()) {
519            Err(Error::UnexpectedExoticCell)
520        } else {
521            Ok(Self {
522                range: CellSliceRange::full(cell),
523                cell,
524            })
525        }
526    }
527
528    /// Constructs a new cell slice from the specified cell.
529    pub fn new_allow_exotic(cell: &'a DynCell) -> Self {
530        Self {
531            range: CellSliceRange::full(cell),
532            cell,
533        }
534    }
535
536    /// Alias for [`CellSliceRange::apply`].
537    ///
538    /// NOTE: the resulting range will be truncated to cell bounds.
539    #[inline]
540    pub fn apply((range, cell): &'a CellSliceParts) -> Result<Self, Error> {
541        range.apply(cell)
542    }
543
544    /// Alias for [`CellSliceRange::apply_allow_exotic`].
545    ///
546    /// # Safety
547    ///
548    /// The following must be true:
549    /// - cell is not pruned
550    /// - range is in cell bounds
551    #[inline]
552    pub fn apply_allow_exotic((range, cell): &'a CellSliceParts) -> Self {
553        range.apply_allow_exotic(cell)
554    }
555
556    /// Returns an underlying range indices.
557    #[inline]
558    pub const fn range(&self) -> CellSliceRange {
559        self.range
560    }
561
562    /// Returns a reference to the underlying cell.
563    #[inline]
564    pub const fn cell(&self) -> &'a DynCell {
565        self.cell
566    }
567
568    /// Computes cell type from descriptor bytes.
569    #[inline]
570    pub fn cell_type(&self) -> CellType {
571        self.cell.cell_type()
572    }
573
574    /// Computes the cell level from the level mask.
575    #[inline]
576    pub fn level(&self) -> u8 {
577        self.cell.level()
578    }
579
580    /// Computes the level mask from the descriptor bytes.
581    #[inline]
582    pub fn level_mask(&self) -> LevelMask {
583        self.cell.level_mask()
584    }
585
586    /// Returns whether there are no data bits and refs left.
587    pub const fn is_empty(&self) -> bool {
588        self.range.is_empty()
589    }
590
591    /// Returns whether there are no bits of data left.
592    ///
593    /// # Examples
594    ///
595    /// ```
596    /// # use tycho_types::prelude::{Cell, CellFamily, CellBuilder};
597    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
598    /// // Cell with empty data
599    /// let empty_cell = Cell::empty_cell();
600    /// assert!(empty_cell.as_slice()?.is_data_empty());
601    ///
602    /// // Cell with some bits in data
603    /// let not_empty_cell = {
604    ///     let mut builder = CellBuilder::new();
605    ///     builder.store_bit_zero()?;
606    ///     builder.build()?
607    /// };
608    /// assert!(!not_empty_cell.as_slice()?.is_data_empty());
609    /// # Ok(()) }
610    /// ```
611    pub const fn is_data_empty(&self) -> bool {
612        self.range.is_data_empty()
613    }
614
615    /// Returns whether there are no references left.
616    ///
617    /// # Examples
618    ///
619    /// ```
620    /// # use tycho_types::prelude::{Cell, CellFamily, CellBuilder};
621    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
622    /// // Cell without references
623    /// let empty_cell = Cell::empty_cell();
624    /// assert!(empty_cell.as_slice()?.is_refs_empty());
625    ///
626    /// // Cell with some references
627    /// let not_empty_cell = {
628    ///     let mut builder = CellBuilder::new();
629    ///     builder.store_reference(empty_cell)?;
630    ///     builder.build()?
631    /// };
632    /// assert!(!not_empty_cell.as_slice()?.is_refs_empty());
633    /// # Ok(()) }
634    /// ```
635    pub const fn is_refs_empty(&self) -> bool {
636        self.range.is_refs_empty()
637    }
638
639    /// Returns the number of remaining bits and refs in the slice.
640    pub const fn size(&self) -> Size {
641        self.range.size()
642    }
643
644    /// Returns the number of remaining bits of data in the slice.
645    pub const fn size_bits(&self) -> u16 {
646        self.range.size_bits()
647    }
648
649    /// Returns the number of remaining references in the slice.
650    pub const fn size_refs(&self) -> u8 {
651        self.range.size_refs()
652    }
653
654    /// Returns the start of data and reference windows.
655    pub const fn offset(&self) -> Size {
656        self.range.offset()
657    }
658
659    /// Returns the start of the data window.
660    ///
661    /// # Examples
662    ///
663    /// ```
664    /// # use tycho_types::prelude::CellBuilder;
665    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
666    /// let cell = {
667    ///     let mut builder = CellBuilder::new();
668    ///     builder.store_zeros(100)?;
669    ///     builder.build()?
670    /// };
671    /// let mut slice = cell.as_slice()?;
672    /// slice.load_u8()?;
673    /// assert_eq!(slice.offset_bits(), 8);
674    /// # Ok(()) }
675    /// ```
676    #[inline]
677    pub const fn offset_bits(&self) -> u16 {
678        self.range.offset_bits()
679    }
680
681    /// Returns the start of the references window.
682    ///
683    /// # Examples
684    ///
685    /// ```
686    /// # use tycho_types::prelude::{Cell, CellFamily, CellBuilder};
687    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
688    /// let cell = {
689    ///     let mut builder = CellBuilder::new();
690    ///     builder.store_reference(Cell::empty_cell())?;
691    ///     builder.build()?
692    /// };
693    /// let mut slice = cell.as_slice()?;
694    ///
695    /// slice.load_reference()?;
696    /// assert_eq!(slice.offset_refs(), 1);
697    /// # Ok(()) }
698    /// ```
699    #[inline]
700    pub const fn offset_refs(&self) -> u8 {
701        self.range.offset_refs()
702    }
703
704    /// Returns true if the slice contains at least `bits` and `refs`.
705    ///
706    /// # Examples
707    ///
708    /// ```
709    /// # use tycho_types::prelude::{Cell, CellBuilder, CellFamily};
710    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
711    /// let cell = {
712    ///     let mut builder = CellBuilder::new();
713    ///     builder.store_zeros(100)?;
714    ///     builder.store_reference(Cell::empty_cell())?;
715    ///     builder.store_reference(Cell::empty_cell())?;
716    ///     builder.build()?
717    /// };
718    /// let mut slice = cell.as_slice()?;
719    ///
720    /// assert!(slice.has_remaining(10, 2));
721    /// assert!(!slice.has_remaining(500, 2)); // too many bits
722    /// assert!(!slice.has_remaining(0, 4)); // too many refs
723    /// //
724    /// # Ok(()) }
725    /// ```
726    #[inline]
727    pub const fn has_remaining(&self, bits: u16, refs: u8) -> bool {
728        self.range.has_remaining(bits, refs)
729    }
730
731    /// Returns whether this slice is untouched.
732    #[inline]
733    pub fn is_full(&self) -> bool {
734        self.range.is_full(self.cell)
735    }
736
737    /// Recursively computes the count of distinct cells returning
738    /// the total storage used by this dag taking into account the
739    /// identification of equal cells.
740    ///
741    /// Root slice does not count as cell. A slice subrange of
742    /// cells is used during computation.
743    pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
744        StorageStat::compute_for_slice(self, limit)
745    }
746
747    /// Tries to advance the start of data and refs windows.
748    pub fn skip_first(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
749        self.range.skip_first(bits, refs)
750    }
751
752    /// Leaves only the first `bits` and `refs` in the slice.
753    pub fn only_first(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
754        self.range.only_first(bits, refs)
755    }
756
757    /// Removes the last `bits` and `refs` from the slice.
758    pub fn skip_last(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
759        self.range.skip_last(bits, refs)
760    }
761
762    /// Leaves only the last `bits` and `refs` in the slice.
763    pub fn only_last(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
764        self.range.only_last(bits, refs)
765    }
766
767    /// Lexicographically compares slice data.
768    ///
769    /// NOTE: this method is quite computationally heavy as it compares the content
770    /// of two potentially unaligned slices. Use it with caution or check by cell.
771    pub fn lex_cmp(&self, rhs: &CellSlice) -> Result<std::cmp::Ordering, Error> {
772        use std::cmp::Ordering;
773
774        let lhs = self;
775        let lhs_bits = lhs.size_bits();
776        let rhs_bits = rhs.size_bits();
777
778        // Fast check
779
780        // NOTE: We can ignore pointer metadata since we are comparing only bits,
781        // and custom implementations of `CellImpl` do not alter data behavior.
782        if std::ptr::addr_eq(lhs.cell, rhs.cell) && lhs.offset_bits() == rhs.offset_bits() {
783            return Ok(lhs_bits.cmp(&rhs_bits));
784        }
785
786        // Full check
787        let bits = std::cmp::min(lhs_bits, rhs_bits);
788        let rem = bits % 32;
789        for offset in (0..bits - rem).step_by(32) {
790            match ok!(lhs.get_u32(offset)).cmp(&ok!(rhs.get_u32(offset))) {
791                Ordering::Equal => {}
792                ord => return Ok(ord),
793            }
794        }
795
796        if rem > 0 {
797            match ok!(lhs.get_uint(bits - rem, rem)).cmp(&ok!(rhs.get_uint(bits - rem, rem))) {
798                Ordering::Equal => {}
799                ord => return Ok(ord),
800            }
801        }
802
803        Ok(lhs_bits.cmp(&rhs_bits))
804    }
805
806    /// Returns `true` if two slices have the same data bits and refs.
807    ///
808    /// NOTE: this method is quite computationally heavy as it compares the content
809    /// of two potentially unaligned slices. Use it with caution or check by cell.
810    pub fn contents_eq(&self, rhs: &CellSlice) -> Result<bool, Error> {
811        let lhs = self;
812
813        // Fast check
814        if lhs.size_bits() != rhs.size_bits() || lhs.size_refs() != rhs.size_refs() {
815            return Ok(false);
816        }
817
818        // Full check
819        if ok!(self.lex_cmp(rhs)).is_ne() {
820            return Ok(false);
821        }
822
823        for (lhs, rhs) in self.references().zip(rhs.references()) {
824            if lhs.repr_hash() != rhs.repr_hash() {
825                return Ok(false);
826            }
827        }
828
829        Ok(true)
830    }
831
832    /// Returns a slice starting at the same bits and refs offsets,
833    /// and containing no more than `bits` of data and `refs` of children.
834    pub fn get_prefix(&self, bits: u16, refs: u8) -> Self {
835        Self {
836            cell: self.cell,
837            range: self.range.get_prefix(bits, refs),
838        }
839    }
840
841    /// Returns `true` if this slice data is a prefix of the other slice data.
842    pub fn is_data_prefix_of(&self, other: &Self) -> Result<bool, Error> {
843        let bits = self.size_bits();
844        if bits > other.size_bits() {
845            return Ok(false);
846        }
847
848        let mut other = *other;
849        let ok = other.only_first(bits, 0).is_ok();
850        debug_assert!(ok);
851
852        Ok(ok!(self.lex_cmp(&other)).is_eq())
853    }
854
855    /// Returns `true` if this slice data is a suffix of the other slice data.
856    pub fn is_data_suffix_of(&self, other: &Self) -> Result<bool, Error> {
857        let bits = self.size_bits();
858        if bits > other.size_bits() {
859            return Ok(false);
860        }
861
862        let mut other = *other;
863        let ok = other.only_last(bits, 0).is_ok();
864        debug_assert!(ok);
865
866        Ok(ok!(self.lex_cmp(&other)).is_eq())
867    }
868
869    /// Returns a subslice with the data prefix removed.
870    ///
871    /// If the slice starts with `prefix`, returns the subslice after the prefix, wrapped in `Some`.
872    /// If `prefix` is empty, simply returns the original slice.
873    ///
874    /// If the slice does not start with `prefix`, returns `None`.
875    ///
876    /// # Examples
877    ///
878    /// ```
879    /// # use tycho_types::prelude::CellBuilder;
880    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
881    /// let cell = {
882    ///     let mut builder = CellBuilder::new();
883    ///     builder.store_u32(0xdeadbeaf)?;
884    ///     builder.build()?
885    /// };
886    /// let slice = cell.as_slice()?;
887    ///
888    /// let prefix = {
889    ///     let mut builder = CellBuilder::new();
890    ///     builder.store_u16(0xdead)?;
891    ///     builder.build()?
892    /// };
893    ///
894    /// let without_prefix = slice.strip_data_prefix(&prefix.as_slice()?).unwrap();
895    /// assert_eq!(without_prefix.get_u16(0)?, 0xbeaf);
896    /// # Ok(()) }
897    /// ```
898    pub fn strip_data_prefix<'b>(&self, prefix: &CellSlice<'b>) -> Option<CellSlice<'a>> {
899        let prefix_len = prefix.size_bits();
900        if prefix_len == 0 {
901            Some(*self)
902        } else if self.size_bits() < prefix_len {
903            None
904        } else {
905            let mut result = *self;
906            let lcp = self.longest_common_data_prefix_impl(prefix, prefix_len);
907            if prefix_len <= lcp && result.skip_first(prefix_len, 0).is_ok() {
908                Some(result)
909            } else {
910                None
911            }
912        }
913    }
914
915    /// Returns the longest common data prefix.
916    ///
917    /// NOTE: The returned subslice will be a subslice of the current slice.
918    ///
919    /// # Examples
920    ///
921    /// ```
922    /// # use tycho_types::prelude::CellBuilder;
923    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
924    /// let cell = {
925    ///     let mut builder = CellBuilder::new();
926    ///     builder.store_u32(0xdeadbeaf)?;
927    ///     builder.build()?
928    /// };
929    /// let slice = cell.as_slice()?;
930    ///
931    /// let prefix = {
932    ///     let mut builder = CellBuilder::new();
933    ///     builder.store_u16(0xdead)?;
934    ///     builder.build()?
935    /// };
936    ///
937    /// let lcp = slice.longest_common_data_prefix(&prefix.as_slice()?);
938    /// assert_eq!(lcp.get_u16(0)?, 0xdead);
939    /// assert_eq!(lcp.size_bits(), 16);
940    /// # Ok(()) }
941    /// ```
942    pub fn longest_common_data_prefix(&self, other: &Self) -> Self {
943        let prefix_len = self.longest_common_data_prefix_impl(other, u16::MAX);
944        self.get_prefix(prefix_len, 0)
945    }
946
947    fn longest_common_data_prefix_impl(&self, other: &Self, max_hint: u16) -> u16 {
948        if self.range.bits_start >= self.range.bits_end
949            || other.range.bits_start >= other.range.bits_end
950        {
951            return 0;
952        }
953        let self_remaining_bits = self.range.bits_end - self.range.bits_start;
954        let self_data = self.cell.data();
955        let other_remaining_bits = other.range.bits_end - other.range.bits_start;
956        let other_data = other.cell.data();
957
958        // Compute max prefix length in bits
959        let max_bit_len = std::cmp::min(self_remaining_bits, other_remaining_bits).min(max_hint);
960
961        // Compute shifts and data offsets
962        let self_r = self.range.bits_start % 8;
963        let self_q = (self.range.bits_start / 8) as usize;
964        let other_r = other.range.bits_start % 8;
965        let other_q = (other.range.bits_start / 8) as usize;
966
967        // Compute remaining bytes to check
968        let self_bytes = (self_r + max_bit_len).div_ceil(8) as usize;
969        debug_assert!((self_q + self_bytes) <= self_data.len());
970        let other_bytes = (other_r + max_bit_len).div_ceil(8) as usize;
971        debug_assert!((other_q + other_bytes) <= other_data.len());
972
973        let aligned_bytes = std::cmp::min(self_bytes, other_bytes);
974
975        let mut prefix_len: u16 = 0;
976
977        unsafe {
978            let self_data_ptr = self_data.as_ptr().add(self_q);
979            let other_data_ptr = other_data.as_ptr().add(other_q);
980
981            // Get first bytes aligned to the left
982            let mut self_byte = *self_data_ptr << self_r;
983            let mut other_byte = *other_data_ptr << other_r;
984
985            // For all aligned bytes except the first
986            for i in 1..aligned_bytes {
987                // Concat previous bits with current bits
988                // NOTE: shift as `u16` to allow overflow
989                let next_self_byte = *self_data_ptr.add(i);
990                self_byte |= ((next_self_byte as u16) >> (8 - self_r)) as u8;
991                let next_other_byte = *other_data_ptr.add(i);
992                other_byte |= ((next_other_byte as u16) >> (8 - other_r)) as u8;
993
994                // XOR bytes to check equality
995                match self_byte ^ other_byte {
996                    // All bits are equal, update current bytes and move forward
997                    0 => {
998                        prefix_len += 8;
999                        self_byte = next_self_byte << self_r;
1000                        other_byte = next_other_byte << other_r;
1001                    }
1002                    // Some bits are not equal
1003                    x => {
1004                        // Number of leading zeros is the number of equal bits
1005                        return std::cmp::min(prefix_len + x.leading_zeros() as u16, max_bit_len);
1006                    }
1007                }
1008            }
1009
1010            // Concat remaining bits
1011            if self_r > 0 && aligned_bytes < self_bytes {
1012                self_byte |= *self_data_ptr.add(aligned_bytes) >> (8 - self_r);
1013            }
1014            if other_r > 0 && aligned_bytes < other_bytes {
1015                other_byte |= *other_data_ptr.add(aligned_bytes) >> (8 - other_r);
1016            }
1017
1018            // Apply last byte mask
1019            let last_byte_mask = 0xff << ((8 - max_bit_len % 8) % 8);
1020            self_byte &= last_byte_mask;
1021            other_byte &= last_byte_mask;
1022
1023            // Count the number of remaining equal bits
1024            prefix_len += (self_byte ^ other_byte).leading_zeros() as u16;
1025        }
1026
1027        // Return the longest prefix (without equal bits from the last byte mask)
1028        std::cmp::min(prefix_len, max_bit_len)
1029    }
1030
1031    /// Returns the number of leading bits of `self`.
1032    pub fn count_leading(&self, bit: bool) -> Result<u16, Error> {
1033        if self.range.bits_start >= self.range.bits_end {
1034            return Ok(0);
1035        }
1036        let data = self.cell.data();
1037
1038        // Check if data is enough
1039        if self.range.bits_end.div_ceil(8) as usize > data.len() {
1040            return Err(Error::CellUnderflow);
1041        }
1042
1043        let bit_count = self.range.bits_end - self.range.bits_start;
1044
1045        // SAFETY: `bits_end` is in data range
1046        Ok(unsafe { bits_memscan(data, self.range.bits_start, bit_count, bit) })
1047    }
1048
1049    /// Returns the number of trailing bits of `self`.
1050    pub fn count_trailing(&self, bit: bool) -> Result<u16, Error> {
1051        if self.range.bits_start >= self.range.bits_end {
1052            return Ok(0);
1053        }
1054        let data = self.cell.data();
1055
1056        // Check if data is enough
1057        if self.range.bits_end.div_ceil(8) as usize > data.len() {
1058            return Err(Error::CellUnderflow);
1059        }
1060
1061        let bit_count = self.range.bits_end - self.range.bits_start;
1062
1063        // SAFETY: `bits_end` is in data range
1064        Ok(unsafe { bits_memscan_rev(data, self.range.bits_start, bit_count, bit) })
1065    }
1066
1067    /// Checks whether the current slice consists of the same bits,
1068    /// returns `None` if there are 0s and 1s, returns `Some(bit)` otherwise.
1069    ///
1070    /// # Examples
1071    ///
1072    /// ```
1073    /// # use tycho_types::prelude::{Cell, CellFamily, CellBuilder};
1074    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
1075    /// // Uniform cell consisting of only 0s
1076    /// let uniform_cell = {
1077    ///     let mut builder = CellBuilder::new();
1078    ///     builder.store_zeros(10)?;
1079    ///     builder.build()?
1080    /// };
1081    /// assert_eq!(uniform_cell.as_slice()?.test_uniform(), Some(false));
1082    ///
1083    /// // Non-uniform cell consisting of 0s and 1s
1084    /// let non_uniform_cell = {
1085    ///     let mut builder = CellBuilder::new();
1086    ///     builder.store_zeros(9)?;
1087    ///     builder.store_bit_one()?;
1088    ///     builder.build()?
1089    /// };
1090    /// assert_eq!(non_uniform_cell.as_slice()?.test_uniform(), None);
1091    ///
1092    /// // Empty cell is non-uniform
1093    /// let non_uniform_cell = Cell::empty_cell();
1094    /// assert_eq!(non_uniform_cell.as_slice()?.test_uniform(), None);
1095    /// # Ok(()) }
1096    /// ```
1097    pub fn test_uniform(&self) -> Option<bool> {
1098        if self.range.bits_start >= self.range.bits_end {
1099            return None;
1100        }
1101        let data = self.cell.data();
1102
1103        // Check if data is enough
1104        if self.range.bits_end.div_ceil(8) as usize > data.len() {
1105            return None;
1106        }
1107
1108        let mut bit_count = self.range.bits_end - self.range.bits_start;
1109
1110        let r = self.range.bits_start & 0b111;
1111        let q = self.range.bits_start >> 3;
1112
1113        // SAFETY: q is in data range
1114        let mut data_ptr = unsafe { data.as_ptr().add(q as usize) };
1115        let first_byte = unsafe { *data_ptr };
1116        let bit = (first_byte >> (7 - r)) & 1 != 0;
1117
1118        if bit_count < 64 {
1119            let target = (bit as u8) * u8::MAX;
1120            let first_byte_mask: u8 = 0xff >> r;
1121            let last_byte_mask: u8 = 0xff << ((8 - (bit_count + r) % 8) % 8);
1122
1123            if r + bit_count <= 8 {
1124                // Special case if all remaining_bits are in the first byte
1125                if ((first_byte ^ target) & first_byte_mask & last_byte_mask) != 0 {
1126                    return None;
1127                }
1128            } else {
1129                // Check the first byte
1130                if (first_byte ^ target) & first_byte_mask != 0 {
1131                    return None;
1132                }
1133
1134                unsafe {
1135                    // Check all full bytes
1136                    bit_count -= 8 - r;
1137                    for _ in 0..(bit_count / 8) {
1138                        data_ptr = data_ptr.add(1);
1139                        if *data_ptr != target {
1140                            return None;
1141                        }
1142                    }
1143
1144                    // Check the last byte (if not aligned)
1145                    if !bit_count.is_multiple_of(8) {
1146                        data_ptr = data_ptr.add(1);
1147                        if (*data_ptr ^ target) & last_byte_mask != 0 {
1148                            return None;
1149                        }
1150                    }
1151                }
1152            }
1153
1154            Some(bit)
1155        } else {
1156            // SAFETY: `bits_end` is in data range
1157            let same_bits = unsafe { bits_memscan(data, self.range.bits_start, bit_count, bit) };
1158            (same_bits == bit_count).then_some(bit)
1159        }
1160    }
1161
1162    /// Tries to read the bit at the specified offset (relative to the current bits window).
1163    pub fn get_bit(&self, offset: u16) -> Result<bool, Error> {
1164        if self.range.bits_start + offset < self.range.bits_end {
1165            let index = self.range.bits_start + offset;
1166            if let Some(byte) = self.cell.data().get((index / 8) as usize) {
1167                return Ok((byte >> (7 - index % 8)) & 1 != 0);
1168            }
1169        }
1170        Err(Error::CellUnderflow)
1171    }
1172
1173    /// Tries to read the next bit, incrementing the bits window start.
1174    pub fn load_bit(&mut self) -> Result<bool, Error> {
1175        if self.range.bits_start < self.range.bits_end {
1176            let index = self.range.bits_start;
1177            if let Some(byte) = self.cell.data().get((index / 8) as usize) {
1178                self.range.bits_start += 1;
1179                return Ok((byte >> (7 - index % 8)) & 1 != 0);
1180            }
1181        }
1182        Err(Error::CellUnderflow)
1183    }
1184
1185    /// Reads `u8` starting from the `offset`.
1186    #[inline]
1187    pub fn get_u8(&self, offset: u16) -> Result<u8, Error> {
1188        self.get_small_uint(offset, 8)
1189    }
1190
1191    /// Tries to read the next `u8`, incrementing the bits window start.
1192    #[inline]
1193    pub fn load_u8(&mut self) -> Result<u8, Error> {
1194        self.load_small_uint(8)
1195    }
1196
1197    /// Reads `u16` starting from the `offset`.
1198    pub fn get_u16(&self, offset: u16) -> Result<u16, Error> {
1199        if self.range.bits_start + offset + 16 <= self.range.bits_end {
1200            let index = self.range.bits_start + offset;
1201            let data = self.cell.data();
1202            let data_len = data.len();
1203
1204            let r = index % 8;
1205            let q = (index / 8) as usize;
1206
1207            if r == 0 && q + 2 <= data_len {
1208                // xxxxxxxx|yyyyyyyy -> xxxxxxxx|yyyyyyyy
1209                //^r
1210
1211                // SAFETY: `q + 2 <= data_len`
1212                Ok(u16::from_be_bytes(unsafe {
1213                    *(data.as_ptr().add(q) as *const [u8; 2])
1214                }))
1215            } else if r != 0 && q + 3 <= data_len {
1216                // ___xxxxx|yyyyyyyy|zzz_____ -> xxxxxyyy|yyyyyzzz
1217                //  r^
1218
1219                let mut bytes = [0u8; 4];
1220
1221                // SAFETY: `q + 3 <= data_len`
1222                unsafe {
1223                    std::ptr::copy_nonoverlapping(
1224                        data.as_ptr().add(q),
1225                        bytes.as_mut_ptr().add(1),
1226                        3,
1227                    );
1228                };
1229
1230                let res = u32::from_be_bytes(bytes);
1231                Ok((res >> (8 - r)) as u16)
1232            } else {
1233                Err(Error::CellUnderflow)
1234            }
1235        } else {
1236            Err(Error::CellUnderflow)
1237        }
1238    }
1239
1240    /// Tries to read the next `u16`, incrementing the bits window start.
1241    pub fn load_u16(&mut self) -> Result<u16, Error> {
1242        let res = self.get_u16(0);
1243        self.range.bits_start += 16 * res.is_ok() as u16;
1244        res
1245    }
1246
1247    /// Reads `u32` starting from the `offset`.
1248    pub fn get_u32(&self, offset: u16) -> Result<u32, Error> {
1249        if self.range.bits_start + offset + 32 <= self.range.bits_end {
1250            let index = self.range.bits_start + offset;
1251            let data = self.cell.data();
1252            let data_len = data.len();
1253
1254            let r = index % 8;
1255            let q = (index / 8) as usize;
1256
1257            if r == 0 && q + 4 <= data_len {
1258                // xxxxxxxx|yyyyyyyy|zzzzzzzz|wwwwwwww -> xxxxxxxx|yyyyyyyy|zzzzzzzz|wwwwwwww
1259                //^r
1260
1261                // SAFETY: `q + 4 <= data_len`
1262                Ok(u32::from_be_bytes(unsafe {
1263                    *(data.as_ptr().add(q) as *const [u8; 4])
1264                }))
1265            } else if r != 0 && q + 5 <= data_len {
1266                // ___xxxxx|yyyyyyyy|zzz_____ -> xxxxxyyy|yyyyyzzz
1267                //  r^
1268
1269                let mut bytes = [0u8; 8];
1270
1271                // SAFETY: `q + 5 <= data_len`
1272                unsafe {
1273                    std::ptr::copy_nonoverlapping(
1274                        data.as_ptr().add(q),
1275                        bytes.as_mut_ptr().add(3),
1276                        5,
1277                    );
1278                };
1279
1280                let res = u64::from_be_bytes(bytes);
1281                Ok((res >> (8 - r)) as u32)
1282            } else {
1283                Err(Error::CellUnderflow)
1284            }
1285        } else {
1286            Err(Error::CellUnderflow)
1287        }
1288    }
1289
1290    /// Tries to read the next `u32`, incrementing the bits window start.
1291    pub fn load_u32(&mut self) -> Result<u32, Error> {
1292        let res = self.get_u32(0);
1293        self.range.bits_start += 32 * res.is_ok() as u16;
1294        res
1295    }
1296
1297    /// Reads `u64` starting from the `offset`.
1298    pub fn get_u64(&self, offset: u16) -> Result<u64, Error> {
1299        if self.range.bits_start + offset + 64 <= self.range.bits_end {
1300            let index = self.range.bits_start + offset;
1301            let data = self.cell.data();
1302            let data_len = data.len();
1303
1304            let r = index % 8;
1305            let q = (index / 8) as usize;
1306
1307            if r == 0 && q + 8 <= data_len {
1308                // SAFETY: `q + 8 <= data_len`
1309                Ok(u64::from_be_bytes(unsafe {
1310                    *(data.as_ptr().add(q) as *const [u8; 8])
1311                }))
1312            } else if r != 0 && q + 9 <= data_len {
1313                // ___xxxxx|...|zzz_____ -> xxxxx...|...zzz
1314                //  r^
1315
1316                let mut bytes = [0u8; 16];
1317
1318                // SAFETY: `q + 9 <= data_len`
1319                unsafe {
1320                    std::ptr::copy_nonoverlapping(
1321                        data.as_ptr().add(q),
1322                        bytes.as_mut_ptr().add(7),
1323                        9,
1324                    );
1325                };
1326
1327                let res = u128::from_be_bytes(bytes);
1328                Ok((res >> (8 - r)) as u64)
1329            } else {
1330                Err(Error::CellUnderflow)
1331            }
1332        } else {
1333            Err(Error::CellUnderflow)
1334        }
1335    }
1336
1337    /// Tries to read the next `u64`, incrementing the bits window start.
1338    pub fn load_u64(&mut self) -> Result<u64, Error> {
1339        let res = self.get_u64(0);
1340        self.range.bits_start += 64 * res.is_ok() as u16;
1341        res
1342    }
1343
1344    /// Reads `u128` starting from the `offset`.
1345    pub fn get_u128(&self, offset: u16) -> Result<u128, Error> {
1346        if self.range.bits_start + offset + 128 <= self.range.bits_end {
1347            let index = self.range.bits_start + offset;
1348            let data = self.cell.data();
1349            let data_len = data.len();
1350
1351            let r = index % 8;
1352            let q = (index / 8) as usize;
1353
1354            if r == 0 && q + 16 <= data_len {
1355                // SAFETY: `q + 16 <= data_len`
1356                Ok(u128::from_be_bytes(unsafe {
1357                    *(data.as_ptr().add(q) as *const [u8; 16])
1358                }))
1359            } else if r != 0 && q + 17 <= data_len {
1360                // ___xxxxx|...|zzz_____ -> xxxxx...|...zzz
1361                //  r^
1362
1363                let mut bytes = [0u8; 17];
1364
1365                // SAFETY: `q + 17 <= data_len`
1366                unsafe {
1367                    std::ptr::copy_nonoverlapping(data.as_ptr().add(q), bytes.as_mut_ptr(), 17);
1368                };
1369
1370                let res = u128::from_be_bytes(bytes[1..].try_into().unwrap());
1371                Ok(((bytes[0] as u128) << (120 + r)) | (res >> (8 - r)))
1372            } else {
1373                Err(Error::CellUnderflow)
1374            }
1375        } else {
1376            Err(Error::CellUnderflow)
1377        }
1378    }
1379
1380    /// Tries to read the next `u128`, incrementing the bits window start.
1381    pub fn load_u128(&mut self) -> Result<u128, Error> {
1382        let res = self.get_u128(0);
1383        self.range.bits_start += 128 * res.is_ok() as u16;
1384        res
1385    }
1386
1387    /// Reads 32 bytes starting from the `offset`.
1388    pub fn get_u256(&self, offset: u16) -> Result<HashBytes, Error> {
1389        if self.range.bits_start + offset + 256 <= self.range.bits_end {
1390            let index = self.range.bits_start + offset;
1391            let data = self.cell.data();
1392            let data_len = data.len();
1393
1394            let r = index % 8;
1395            let q = (index / 8) as usize;
1396
1397            if r == 0 && q + 32 <= data_len {
1398                // SAFETY: `q + 32 <= data_len`
1399                Ok(HashBytes(unsafe {
1400                    *(data.as_ptr().add(q) as *const [u8; 32])
1401                }))
1402            } else if r != 0 && q + 33 <= data_len {
1403                // ___xxxxx|...|zzz_____ -> xxxxx...|...zzz
1404                //  r^
1405
1406                let shift = 8 - r;
1407                let rev_shift = 120 + r;
1408
1409                // SAFETY: `q + 33 <= data_len`
1410                unsafe {
1411                    let mut bytes = [0u8; 33];
1412                    std::ptr::copy_nonoverlapping(data.as_ptr().add(q), bytes.as_mut_ptr(), 33);
1413
1414                    // Interpret last 32 bytes as two u128
1415                    let [ovf, bytes @ ..] = bytes;
1416                    let [mut hi, mut lo]: [u128; 2] = std::mem::transmute(bytes);
1417
1418                    // Numbers are in big endian order, swap bytes on little endian arch
1419                    #[cfg(target_endian = "little")]
1420                    {
1421                        hi = hi.swap_bytes();
1422                        lo = lo.swap_bytes();
1423                    }
1424
1425                    // Shift right, putting `ovf` to the high bits
1426                    Ok(std::mem::transmute::<[[u8; 16]; 2], HashBytes>([
1427                        ((hi >> shift) | ((ovf as u128) << rev_shift)).to_be_bytes(),
1428                        ((lo >> shift) | (hi << rev_shift)).to_be_bytes(),
1429                    ]))
1430                }
1431            } else {
1432                Err(Error::CellUnderflow)
1433            }
1434        } else {
1435            Err(Error::CellUnderflow)
1436        }
1437    }
1438
1439    /// Tries to read the next 32 bytes, incrementing the bits window start.
1440    pub fn load_u256(&mut self) -> Result<HashBytes, Error> {
1441        let res = self.get_u256(0);
1442        self.range.bits_start += 256 * res.is_ok() as u16;
1443        res
1444    }
1445
1446    /// Returns a small subset of `bits` (0..=8) starting from the `offset`.
1447    ///
1448    /// NOTE: Reading zero bits always succeeds,
1449    /// and reading more than 8 bits always fails.
1450    pub fn get_small_uint(&self, offset: u16, bits: u16) -> Result<u8, Error> {
1451        if bits == 0 {
1452            return Ok(0);
1453        }
1454
1455        if bits <= 8 && self.range.bits_start + offset + bits <= self.range.bits_end {
1456            let index = self.range.bits_start + offset;
1457
1458            let r = index % 8;
1459            let q = (index / 8) as usize;
1460            let Some(&byte) = self.cell.data().get(q) else {
1461                return Err(Error::CellUnderflow);
1462            };
1463
1464            if r == 0 {
1465                // xxx_____ -> _____xxx
1466                //^r
1467                Ok(byte >> (8 - bits))
1468            } else if bits <= (8 - r) {
1469                // __xxx___ -> _____xxx
1470                // r^
1471                Ok((byte >> (8 - r - bits)) & ((1 << bits) - 1))
1472            } else {
1473                // ______xx|y_______ -> _____xxy
1474                //     r^
1475
1476                let mut res = (byte as u16) << 8;
1477                let Some(&next_byte) = self.cell.data().get(q + 1) else {
1478                    return Err(Error::CellUnderflow);
1479                };
1480
1481                res |= next_byte as u16;
1482                Ok((res >> (8 - r)) as u8 >> (8 - bits))
1483            }
1484        } else {
1485            Err(Error::CellUnderflow)
1486        }
1487    }
1488
1489    /// Tries to read the next small subset of `bits` (0..=8), incrementing the bits window start.
1490    ///
1491    /// NOTE: Reading zero bits always succeeds,
1492    /// and reading more than 8 bits always fails.
1493    pub fn load_small_uint(&mut self, bits: u16) -> Result<u8, Error> {
1494        let res = self.get_small_uint(0, bits);
1495        self.range.bits_start += bits * res.is_ok() as u16;
1496        res
1497    }
1498
1499    /// Reads `u64` from the cell (but only the specified number of bits)
1500    /// starting from the `offset`.
1501    ///
1502    /// NOTE: Reading zero bits always succeeds,
1503    /// and reading more than 64 bits always fails.
1504    pub fn get_uint(&self, offset: u16, mut bits: u16) -> Result<u64, Error> {
1505        if bits == 0 {
1506            return Ok(0);
1507        }
1508
1509        if bits <= 64 && self.range.bits_start + offset + bits <= self.range.bits_end {
1510            let index = self.range.bits_start + offset;
1511            let data = self.cell.data();
1512            let data_len = data.len();
1513
1514            // Check if data is enough
1515            if self.range.bits_end.div_ceil(8) as usize > data_len {
1516                return Err(Error::CellUnderflow);
1517            }
1518
1519            let r = index % 8;
1520            let q = (index / 8) as usize;
1521
1522            // SAFETY: remaining bits are at least enough for `data_len`
1523            unsafe {
1524                let data_ptr = data.as_ptr().add(q);
1525                let first_byte = *data_ptr & (0xff >> r);
1526
1527                let right_shift = (8 - (bits + r) % 8) % 8;
1528
1529                if r + bits <= 8 {
1530                    // Special case if all remaining_bits are in the first byte
1531                    Ok((first_byte >> right_shift) as u64)
1532                } else {
1533                    let mut bytes = [0u8; 8];
1534
1535                    // Copy remaining bytes
1536                    bits -= 8 - r;
1537                    std::ptr::copy_nonoverlapping(
1538                        data_ptr.add(1),
1539                        bytes.as_mut_ptr(),
1540                        bits.div_ceil(8) as usize,
1541                    );
1542
1543                    let mut result = u64::from_be_bytes(bytes) >> (64 - bits);
1544                    result |= (first_byte as u64) << bits;
1545                    Ok(result)
1546                }
1547            }
1548        } else {
1549            Err(Error::CellUnderflow)
1550        }
1551    }
1552
1553    /// Tries to read the next `u64` (but only the specified number of bits),
1554    /// incrementing the bits window start.
1555    ///
1556    /// NOTE: Reading zero bits always succeeds,
1557    /// and reading more than 64 bits always fails.
1558    pub fn load_uint(&mut self, bits: u16) -> Result<u64, Error> {
1559        let res = self.get_uint(0, bits);
1560        self.range.bits_start += bits * res.is_ok() as u16;
1561        res
1562    }
1563
1564    /// Loads `bits`-width int from the slice.
1565    #[cfg(feature = "bigint")]
1566    pub fn load_bigint(&mut self, bits: u16, signed: bool) -> Result<num_bigint::BigInt, Error> {
1567        use num_bigint::{BigInt, Sign};
1568
1569        match bits {
1570            0 => Ok(BigInt::ZERO),
1571            1..=64 if !signed => self.load_uint(bits).map(BigInt::from),
1572            1..=64 => self.load_uint(bits).map(|mut int| {
1573                if bits < 64 {
1574                    // Clone sign bit into all high bits
1575                    int |= ((int >> (bits - 1)) * u64::MAX) << (bits - 1);
1576                }
1577                BigInt::from(int as i64)
1578            }),
1579            65.. => {
1580                let rem = bits % 8;
1581                let mut buffer = [0u8; 128];
1582                self.load_raw(&mut buffer, bits).map(|buffer| {
1583                    let mut int = if signed {
1584                        BigInt::from_signed_bytes_be(buffer)
1585                    } else {
1586                        BigInt::from_bytes_be(Sign::Plus, buffer)
1587                    };
1588                    if !bits.is_multiple_of(8) {
1589                        int >>= 8 - rem;
1590                    }
1591                    int
1592                })
1593            }
1594        }
1595    }
1596
1597    /// Loads `bits`-width int from the slice.
1598    #[cfg(feature = "bigint")]
1599    pub fn load_biguint(&mut self, bits: u16) -> Result<num_bigint::BigUint, Error> {
1600        use num_bigint::BigUint;
1601
1602        match bits {
1603            0 => Ok(BigUint::ZERO),
1604            1..=64 => self.load_uint(bits).map(BigUint::from),
1605            65.. => {
1606                let rem = bits % 8;
1607                let mut buffer = [0u8; 128];
1608                self.load_raw(&mut buffer, bits).map(|buffer| {
1609                    let mut int = BigUint::from_bytes_be(buffer);
1610                    if !bits.is_multiple_of(8) {
1611                        int >>= 8 - rem;
1612                    }
1613                    int
1614                })
1615            }
1616        }
1617    }
1618
1619    /// Loads variable length int from the slice.
1620    #[cfg(feature = "bigint")]
1621    pub fn load_var_bigint(
1622        &mut self,
1623        len_bits: u16,
1624        signed: bool,
1625    ) -> Result<num_bigint::BigInt, Error> {
1626        let len = self.load_uint(len_bits)? as u16;
1627        self.load_bigint(len * 8, signed)
1628    }
1629
1630    /// Loads variable length unsigned int from the slice.
1631    #[cfg(feature = "bigint")]
1632    pub fn load_var_biguint(&mut self, len_bits: u16) -> Result<num_bigint::BigUint, Error> {
1633        let len = self.load_uint(len_bits)? as u16;
1634        self.load_biguint(len * 8)
1635    }
1636
1637    /// Reads the specified number of bits to the target starting from the `offset`.
1638    pub fn get_raw<'b>(
1639        &'_ self,
1640        offset: u16,
1641        target: &'b mut [u8],
1642        bits: u16,
1643    ) -> Result<&'b mut [u8], Error> {
1644        if bits == 0 {
1645            return Ok(&mut target[..0]);
1646        }
1647
1648        if self.range.bits_start + bits <= self.range.bits_end {
1649            let index = self.range.bits_start + offset;
1650            let data = self.cell.data();
1651            let data_len = data.len();
1652
1653            let target_len = bits.div_ceil(8) as usize;
1654            let target = if target_len <= target.len() {
1655                &mut target[..target_len]
1656            } else {
1657                return Err(Error::CellUnderflow);
1658            };
1659
1660            let r = index % 8;
1661            let q = (index / 8) as usize;
1662
1663            // SAFETY: q will be checked to be in range 0..data_len,
1664            // r is in range 0..=7, target is guaranteed to be `target_len`
1665            unsafe {
1666                let mut data_ptr = data.as_ptr().add(q);
1667                let target_ptr = target.as_mut_ptr();
1668
1669                if r == 0 && q + target_len <= data_len {
1670                    std::ptr::copy_nonoverlapping(data_ptr, target_ptr, target_len);
1671                } else if r != 0 {
1672                    let byte_len = (bits + r).div_ceil(8) as usize - 1;
1673                    if q + byte_len > data_len {
1674                        return Err(Error::CellUnderflow);
1675                    }
1676
1677                    let shift = 8 - r;
1678                    for i in 0..byte_len {
1679                        let target = target_ptr.add(i);
1680                        *target = *data_ptr << r;
1681                        data_ptr = data_ptr.add(1);
1682                        *target |= *data_ptr >> shift;
1683                    }
1684                    if byte_len < target_len {
1685                        *target_ptr.add(byte_len) = *data_ptr << r;
1686                    }
1687                } else {
1688                    return Err(Error::CellUnderflow);
1689                }
1690
1691                let bits_r = bits % 8;
1692                if bits_r != 0 {
1693                    *target_ptr.add(target_len - 1) &= 0xff << (8 - bits_r);
1694                }
1695                Ok(target)
1696            }
1697        } else {
1698            Err(Error::CellUnderflow)
1699        }
1700    }
1701
1702    /// Tries to read the specified number of bits, incrementing the bits window start.
1703    /// Returns the minimum subslice containing all bits.
1704    pub fn load_raw<'b>(
1705        &'_ mut self,
1706        target: &'b mut [u8],
1707        bits: u16,
1708    ) -> Result<&'b mut [u8], Error> {
1709        let res = self.get_raw(0, target, bits);
1710        self.range.bits_start += bits * res.is_ok() as u16;
1711        res
1712    }
1713
1714    /// Reads all remaining bits and refs into the new slice.
1715    pub fn load_remaining(&mut self) -> CellSlice<'a> {
1716        let result = *self;
1717        self.range.bits_start = self.range.bits_end;
1718        self.range.refs_start = self.range.refs_end;
1719        result
1720    }
1721
1722    /// Returns the first `bits` and `refs` of the slice and advances the start
1723    /// of data and refs windows.
1724    #[must_use = "use `skip_first` if you don't need the result"]
1725    pub fn load_prefix(&mut self, bits: u16, refs: u8) -> Result<Self, Error> {
1726        let prefix_range = ok!(self.range.split_prefix(bits, refs));
1727        Ok(Self {
1728            cell: self.cell,
1729            range: prefix_range,
1730        })
1731    }
1732
1733    /// Returns the last `bits` and `refs` of the slice and shrinks the data and refs windows.
1734    #[must_use = "use `skip_last` if you don't need the result"]
1735    pub fn load_suffix(&mut self, bits: u16, refs: u8) -> Result<Self, Error> {
1736        let suffix_range = ok!(self.range.split_suffix(bits, refs));
1737        Ok(Self {
1738            cell: self.cell,
1739            range: suffix_range,
1740        })
1741    }
1742
1743    /// Returns a reference to the Nth child cell (relative to this slice's refs window).
1744    pub fn get_reference(&self, index: u8) -> Result<&'a DynCell, Error> {
1745        if self.range.refs_start + index < self.range.refs_end
1746            && let Some(cell) = self.cell.reference(self.range.refs_start + index)
1747        {
1748            return Ok(cell);
1749        }
1750        Err(Error::CellUnderflow)
1751    }
1752
1753    /// Returns the Nth child cell (relative to this slice's refs window).
1754    pub fn get_reference_cloned(&self, index: u8) -> Result<Cell, Error> {
1755        if self.range.refs_start + index < self.range.refs_end
1756            && let Some(cell) = self.cell.reference_cloned(self.range.refs_start + index)
1757        {
1758            return Ok(cell);
1759        }
1760        Err(Error::CellUnderflow)
1761    }
1762
1763    /// Tries to load the specified child cell as slice.
1764    /// Returns an error if the loaded cell is absent or is pruned.
1765    pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'a>, Error> {
1766        if self.range.refs_start + index < self.range.refs_end {
1767            let Some(cell) = self.cell.reference(self.range.refs_start + index) else {
1768                return Err(Error::CellUnderflow);
1769            };
1770
1771            CellSlice::new(cell)
1772        } else {
1773            Err(Error::CellUnderflow)
1774        }
1775    }
1776
1777    /// Creates an iterator through child nodes.
1778    pub fn references(&self) -> RefsIter<'a> {
1779        RefsIter {
1780            cell: self.cell,
1781            max: self.range.refs_end,
1782            index: self.range.refs_start,
1783        }
1784    }
1785
1786    /// Converts this slice into an iterator through child nodes.
1787    #[inline]
1788    pub fn into_references(self) -> RefsIter<'a> {
1789        self.references()
1790    }
1791
1792    /// Returns this slice, but with references skipped.
1793    #[inline]
1794    pub fn without_references(mut self) -> Self {
1795        self.range.refs_start = self.range.refs_end;
1796        self
1797    }
1798
1799    /// Returns a reference to the next child cell (relative to this slice's refs window),
1800    /// incrementing the refs window start.
1801    pub fn load_reference(&mut self) -> Result<&'a DynCell, Error> {
1802        if self.range.refs_start < self.range.refs_end {
1803            let res = match self.cell.reference(self.range.refs_start) {
1804                Some(cell) => Ok(cell),
1805                None => Err(Error::CellUnderflow),
1806            };
1807            self.range.refs_start += res.is_ok() as u8;
1808            res
1809        } else {
1810            Err(Error::CellUnderflow)
1811        }
1812    }
1813
1814    /// Returns the next child cell (relative to this slice's refs window),
1815    /// incrementing the refs window start.
1816    pub fn load_reference_cloned(&mut self) -> Result<Cell, Error> {
1817        if self.range.refs_start < self.range.refs_end {
1818            let res = match self.cell.reference_cloned(self.range.refs_start) {
1819                Some(cell) => Ok(cell),
1820                None => Err(Error::CellUnderflow),
1821            };
1822            self.range.refs_start += res.is_ok() as u8;
1823            res
1824        } else {
1825            Err(Error::CellUnderflow)
1826        }
1827    }
1828
1829    /// Tries to load the next child cell as slice.
1830    /// Returns an error if the loaded cell is absent or is pruned.
1831    ///
1832    /// NOTE: In case of pruned cell access the current slice remains unchanged.
1833    pub fn load_reference_as_slice(&mut self) -> Result<CellSlice<'a>, Error> {
1834        if self.range.refs_start < self.range.refs_end {
1835            let Some(cell) = self.cell.reference(self.range.refs_start) else {
1836                return Err(Error::CellUnderflow);
1837            };
1838
1839            let res = CellSlice::new(cell);
1840            self.range.refs_start += res.is_ok() as u8;
1841            res
1842        } else {
1843            Err(Error::CellUnderflow)
1844        }
1845    }
1846
1847    /// Returns an object which will display data as a bitstring
1848    /// with a termination bit.
1849    #[inline]
1850    pub fn display_data<'b: 'a>(&'b self) -> DisplayCellSliceData<'b, 'a> {
1851        DisplayCellSliceData(self)
1852    }
1853}
1854
1855/// Helper struct to print the cell slice data.
1856#[derive(Clone, Copy)]
1857#[repr(transparent)]
1858pub struct DisplayCellSliceData<'b, 'a>(&'b CellSlice<'a>);
1859
1860impl<'b: 'a, 'a> std::fmt::Display for DisplayCellSliceData<'b, 'a> {
1861    #[inline]
1862    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1863        std::fmt::LowerHex::fmt(self, f)
1864    }
1865}
1866
1867impl<'b: 'a, 'a> std::fmt::LowerHex for DisplayCellSliceData<'b, 'a> {
1868    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1869        let mut bytes = [0u8; 128];
1870        std::fmt::LowerHex::fmt(&ok!(make_bitstring(self.0, &mut bytes)), f)
1871    }
1872}
1873
1874impl<'b: 'a, 'a> std::fmt::UpperHex for DisplayCellSliceData<'b, 'a> {
1875    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1876        let mut bytes = [0u8; 128];
1877        std::fmt::UpperHex::fmt(&ok!(make_bitstring(self.0, &mut bytes)), f)
1878    }
1879}
1880
1881impl<'b: 'a, 'a> std::fmt::Binary for DisplayCellSliceData<'b, 'a> {
1882    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1883        let mut bytes = [0u8; 128];
1884        std::fmt::Binary::fmt(&ok!(make_bitstring(self.0, &mut bytes)), f)
1885    }
1886}
1887
1888fn make_bitstring<'b: 'a, 'a>(
1889    s: &'b CellSlice<'a>,
1890    bytes: &'b mut [u8; 128],
1891) -> Result<Bitstring<'b>, std::fmt::Error> {
1892    let bit_len = s.size_bits();
1893    if s.get_raw(0, bytes, bit_len).is_err() {
1894        return Err(std::fmt::Error);
1895    }
1896    Ok(Bitstring { bytes, bit_len })
1897}
1898
1899impl ExactSize for CellSlice<'_> {
1900    #[inline]
1901    fn exact_size(&self) -> Size {
1902        self.size()
1903    }
1904}
1905
1906/// A type with a known size in bits and refs.
1907pub trait ExactSize {
1908    /// Exact size of the value when it is stored in a slice.
1909    fn exact_size(&self) -> Size;
1910}
1911
1912impl<T: ExactSize> ExactSize for &T {
1913    #[inline]
1914    fn exact_size(&self) -> Size {
1915        T::exact_size(self)
1916    }
1917}
1918
1919impl<T: ExactSize> ExactSize for &mut T {
1920    #[inline]
1921    fn exact_size(&self) -> Size {
1922        T::exact_size(self)
1923    }
1924}
1925
1926impl<T: ExactSize> ExactSize for Box<T> {
1927    #[inline]
1928    fn exact_size(&self) -> Size {
1929        T::exact_size(self)
1930    }
1931}
1932
1933impl<T: ExactSize> ExactSize for Arc<T> {
1934    #[inline]
1935    fn exact_size(&self) -> Size {
1936        T::exact_size(self)
1937    }
1938}
1939
1940impl<T: ExactSize> ExactSize for Rc<T> {
1941    #[inline]
1942    fn exact_size(&self) -> Size {
1943        T::exact_size(self)
1944    }
1945}
1946
1947impl ExactSize for () {
1948    #[inline]
1949    fn exact_size(&self) -> Size {
1950        Size::ZERO
1951    }
1952}
1953
1954macro_rules! strip_plus {
1955    (+ $($rest: tt)*) => {
1956        $($rest)*
1957    }
1958}
1959
1960macro_rules! impl_exact_size_for_tuples {
1961    ($( ($($tt:tt: $t:ident),+) ),*$(,)?) => {$(
1962        impl<$($t: ExactSize),+> ExactSize for ($($t),*,) {
1963            fn exact_size(&self) -> Size {
1964                strip_plus!($(+ ExactSize::exact_size(&self.$tt))+)
1965            }
1966        }
1967    )*};
1968}
1969
1970impl_exact_size_for_tuples! {
1971    (0: T0),
1972    (0: T0, 1: T1),
1973    (0: T0, 1: T1, 2: T2),
1974    (0: T0, 1: T1, 2: T2, 3: T3),
1975    (0: T0, 1: T1, 2: T2, 3: T3, 4: T4),
1976    (0: T0, 1: T1, 2: T2, 3: T3, 4: T4, 5: T5),
1977}
1978
1979/// # Safety
1980/// The following must be true:
1981/// - (offset + bit_count + 7) / 8 <= data.len()
1982unsafe fn bits_memscan(data: &[u8], mut offset: u16, bit_count: u16, cmp_to: bool) -> u16 {
1983    #[inline]
1984    fn is_aligned_to_u64(ptr: *const u8) -> bool {
1985        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
1986        // provenance).
1987        let addr = ptr.cast::<()>() as usize;
1988        addr & (std::mem::align_of::<u64>() - 1) == 0
1989    }
1990
1991    unsafe {
1992        if bit_count == 0 {
1993            return 0;
1994        }
1995        debug_assert!((offset + bit_count + 7) as usize / 8 <= data.len());
1996
1997        let xor_value = cmp_to as u8 * u8::MAX;
1998
1999        // Apply offset to byte level
2000        let mut ptr = data.as_ptr().add(offset as usize >> 3);
2001        offset &= 0b111;
2002
2003        let mut rem = bit_count;
2004        if offset > 0 {
2005            // NOTE: `offset` is in range 1..=7
2006            let v = (*ptr ^ xor_value) << offset;
2007            let c = v.leading_zeros() as u16;
2008            let l = 8 - offset;
2009            if c < l || bit_count <= l {
2010                return c.min(bit_count);
2011            }
2012
2013            ptr = ptr.add(1);
2014            rem -= l;
2015        }
2016
2017        while rem >= 8 && !is_aligned_to_u64(ptr) {
2018            let v = *ptr ^ xor_value;
2019            if v > 0 {
2020                return bit_count - rem + v.leading_zeros() as u16;
2021            }
2022
2023            ptr = ptr.add(1);
2024            rem -= 8;
2025        }
2026
2027        let xor_value_l = cmp_to as u64 * u64::MAX;
2028        while rem >= 64 {
2029            #[cfg(target_endian = "little")]
2030            let z = { (*ptr.cast::<u64>()).swap_bytes() ^ xor_value_l };
2031            #[cfg(not(target_endian = "little"))]
2032            let z = { *ptr.cast::<u64>() ^ xor_value_l };
2033
2034            if z > 0 {
2035                return bit_count - rem + z.leading_zeros() as u16;
2036            }
2037
2038            ptr = ptr.add(8);
2039            rem -= 64;
2040        }
2041
2042        while rem >= 8 {
2043            let v = *ptr ^ xor_value;
2044            if v > 0 {
2045                return bit_count - rem + v.leading_zeros() as u16;
2046            }
2047
2048            ptr = ptr.add(1);
2049            rem -= 8;
2050        }
2051
2052        if rem > 0 {
2053            let v = *ptr ^ xor_value;
2054            let c = v.leading_zeros() as u16;
2055            if c < rem {
2056                return bit_count - rem + c;
2057            }
2058        }
2059
2060        bit_count
2061    }
2062}
2063
2064// # Safety
2065/// The following must be true:
2066/// - (offset + bit_count + 7) / 8 <= data.len()
2067unsafe fn bits_memscan_rev(data: &[u8], mut offset: u16, mut bit_count: u16, cmp_to: bool) -> u16 {
2068    unsafe {
2069        if bit_count == 0 {
2070            return 0;
2071        }
2072        debug_assert!((offset + bit_count + 7) as usize / 8 <= data.len());
2073
2074        let xor_value = cmp_to as u8 * u8::MAX;
2075        let mut ptr = data.as_ptr().add((offset + bit_count) as usize >> 3);
2076        offset = (offset + bit_count) & 0b111;
2077
2078        let mut res = offset;
2079        if offset > 0 {
2080            let v = (*ptr >> (8 - offset)) ^ xor_value;
2081            let c = v.trailing_zeros() as u16;
2082            if c < offset || res >= bit_count {
2083                return c.min(bit_count);
2084            }
2085            bit_count -= res;
2086        }
2087
2088        let xor_value_l = cmp_to as u32 * u32::MAX;
2089        while bit_count >= 32 {
2090            ptr = ptr.sub(4);
2091
2092            #[cfg(target_endian = "little")]
2093            let v = { ptr.cast::<u32>().read_unaligned().swap_bytes() ^ xor_value_l };
2094            #[cfg(not(target_endian = "little"))]
2095            let v = { ptr.cast::<u32>().read_unaligned() ^ xor_value_l };
2096
2097            if v > 0 {
2098                return res + v.trailing_zeros() as u16;
2099            }
2100
2101            res += 32;
2102            bit_count -= 32;
2103        }
2104
2105        while bit_count >= 8 {
2106            ptr = ptr.sub(1);
2107            let v = *ptr ^ xor_value;
2108            if v > 0 {
2109                return res + v.trailing_zeros() as u16;
2110            }
2111
2112            res += 8;
2113            bit_count -= 8;
2114        }
2115
2116        if bit_count > 0 {
2117            ptr = ptr.sub(1);
2118            let v = *ptr ^ xor_value;
2119            res + std::cmp::min(v.trailing_zeros() as u16, bit_count)
2120        } else {
2121            res
2122        }
2123    }
2124}
2125
2126#[cfg(test)]
2127mod tests {
2128    use crate::error::Error;
2129    use crate::prelude::*;
2130
2131    fn build_cell<F: FnOnce(&mut CellBuilder) -> Result<(), Error>>(f: F) -> Cell {
2132        let mut builder = CellBuilder::new();
2133        f(&mut builder).unwrap();
2134        builder.build().unwrap()
2135    }
2136
2137    fn print_slice(name: &str, slice: CellSlice) {
2138        println!(
2139            "{name}: {}",
2140            build_cell(|b| b.store_slice(slice)).display_tree()
2141        );
2142    }
2143
2144    #[test]
2145    #[cfg_attr(miri, ignore)] // takes too long to execute on miri
2146    fn get_raw() -> anyhow::Result<()> {
2147        let cell = CellBuilder::from_raw_data(&[0xff; 128], 200).and_then(CellBuilder::build)?;
2148        let slice = cell.as_slice()?;
2149
2150        let mut data = [0; 1];
2151        assert!(slice.get_raw(0, &mut data, 100).is_err());
2152
2153        let mut data = [0; 64];
2154        assert!(slice.get_raw(0, &mut data, 500).is_err());
2155
2156        let cell = CellBuilder::from_raw_data(&[0xff; 128], 1023).and_then(CellBuilder::build)?;
2157        let slice = cell.as_slice()?;
2158
2159        let mut data = [0; 128];
2160        for offset in 0..=8 {
2161            for bits in 0..=(1023 - offset) {
2162                slice.get_raw(offset, &mut data, bits)?;
2163            }
2164        }
2165
2166        Ok(())
2167    }
2168
2169    #[test]
2170    fn strip_data_prefix() -> anyhow::Result<()> {
2171        let cell1 = build_cell(|b| {
2172            b.store_u16(0xabcd)?;
2173            b.store_bit_zero()?;
2174            b.store_u16(0xffff)
2175        });
2176        let mut slice1 = cell1.as_slice()?;
2177        slice1.skip_first(4, 0)?;
2178
2179        let cell2 = build_cell(|b| {
2180            b.store_uint(0xbcd, 12)?;
2181            b.store_bit_zero()
2182        });
2183
2184        print_slice("A", slice1);
2185        print_slice("B", cell2.as_slice()?);
2186        print_slice("LCP", slice1.longest_common_data_prefix(&cell2.as_slice()?));
2187
2188        let mut without_prefix = slice1.strip_data_prefix(&cell2.as_slice()?).unwrap();
2189        print_slice("Result", without_prefix);
2190
2191        assert_eq!(without_prefix.load_u16(), Ok(0xffff));
2192        assert!(without_prefix.is_data_empty());
2193
2194        Ok(())
2195    }
2196
2197    #[test]
2198    fn longest_common_data_prefix() -> anyhow::Result<()> {
2199        let cell1 = build_cell(|b| b.store_u64(0xffffffff00000000));
2200        let mut slice1 = cell1.as_slice()?;
2201        slice1.skip_first(1, 0)?;
2202
2203        let cell2 = build_cell(|b| b.store_u64(0xfffffff000000000));
2204        let mut slice2 = cell2.as_slice()?;
2205        slice2.skip_first(6, 0)?;
2206
2207        let prefix = slice1.longest_common_data_prefix(&slice2);
2208
2209        let prefix = build_cell(|b| b.store_slice(prefix));
2210        println!("{}", prefix.display_root());
2211        assert_eq!(prefix.data(), [0xff, 0xff, 0xfe]);
2212        assert_eq!(prefix.bit_len(), 22);
2213
2214        //
2215        let cell1 = build_cell(|b| b.store_u32(0));
2216        let cell2 = build_cell(|b| b.store_u32(1));
2217        let prefix = cell1
2218            .as_slice()?
2219            .longest_common_data_prefix(&cell2.as_slice()?);
2220        assert_eq!(prefix.size_bits(), 31);
2221
2222        //
2223        let cell1 = build_cell(|b| b.store_raw(&[0, 0, 2, 2], 32));
2224        let mut slice1 = cell1.as_slice()?;
2225        slice1.skip_first(23, 0)?;
2226
2227        let cell2 = build_cell(|b| b.store_raw(&[0; 128], 1023));
2228        let slice2 = cell2.as_slice()?.get_prefix(8, 0);
2229
2230        let prefix = slice1.longest_common_data_prefix(&slice2);
2231        assert_eq!(prefix.size_bits(), 7);
2232
2233        //
2234        let cell1 = build_cell(|b| b.store_u16(0));
2235        let mut slice1 = cell1.as_slice()?;
2236        slice1.skip_first(5, 0)?;
2237
2238        let cell2 = build_cell(|b| b.store_u8(0));
2239        let mut slice2 = cell2.as_slice()?;
2240        slice2.skip_first(2, 0)?;
2241
2242        let prefix = slice1
2243            .get_prefix(5, 0)
2244            .longest_common_data_prefix(&slice2.get_prefix(5, 0));
2245        assert_eq!(prefix.size_bits(), 5);
2246
2247        Ok(())
2248    }
2249
2250    #[test]
2251    fn unaligned_longest_common_data_prefix() -> anyhow::Result<()> {
2252        let raw_key =
2253            Boc::decode_base64("te6ccgEBAQEAJAAAQ0gBfkBkwI7RPE0/VFMj7yvbvFrpvOMEPDQHDlGWFIELM9s=")?;
2254
2255        let unaligned = {
2256            let mut raw_key = raw_key.as_slice()?;
2257            raw_key.skip_first(4, 0)?;
2258            raw_key.get_prefix(267, 0)
2259        };
2260        let aligned = CellBuilder::build_from(unaligned)?;
2261        let aligned = aligned.as_slice()?;
2262
2263        assert_eq!(unaligned.lex_cmp(&aligned)?, std::cmp::Ordering::Equal);
2264
2265        let prefix = Boc::decode_base64(
2266            "te6ccgEBAwEAjgACB4HQAsACAQCBvwFNima9rQU2tAaEHK+4fSc/aaYcTkT20uyfZuGbZjVMAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAIAgb8RUsb7kteM/ARjNwkzPPYoytZRb4Ic9epNxxLMl/2h7AAAAAAAAAAAAAAAAAAAAADMzMzMzMzMzMzMzMzMzMzO",
2267        )?;
2268        let prefix = {
2269            let mut prefix = prefix.as_slice()?;
2270            prefix.skip_first(11, 0)?;
2271            prefix.get_prefix(14, 0)
2272        };
2273
2274        let lcp_unaligned = unaligned.longest_common_data_prefix(&prefix);
2275        let lcp_aligned = aligned.longest_common_data_prefix(&prefix);
2276
2277        assert_eq!(
2278            lcp_unaligned.lex_cmp(&lcp_aligned)?,
2279            std::cmp::Ordering::Equal
2280        );
2281
2282        Ok(())
2283    }
2284
2285    #[test]
2286    fn get_uint() -> anyhow::Result<()> {
2287        let cell = build_cell(|b| b.store_u64(0xfafafafafafafafa));
2288
2289        let slice = cell.as_slice()?;
2290        assert_eq!(slice.get_uint(0, 3), Ok(0b111));
2291        assert_eq!(slice.get_uint(0, 11), Ok(0b11111010111));
2292        assert_eq!(slice.get_uint(1, 11), Ok(0b11110101111));
2293        assert_eq!(slice.get_uint(8, 3), Ok(0b111));
2294        assert_eq!(slice.get_uint(0, 16), Ok(0xfafa));
2295
2296        Ok(())
2297    }
2298
2299    #[test]
2300    fn test_uniform() -> anyhow::Result<()> {
2301        let cell = build_cell(|b| b.store_zeros(10));
2302        assert_eq!(cell.as_slice()?.test_uniform(), Some(false));
2303
2304        let cell = build_cell(|b| b.store_u8(0xff));
2305        assert_eq!(cell.as_slice()?.test_uniform(), Some(true));
2306
2307        let cell = build_cell(|b| b.store_u8(123));
2308        assert_eq!(cell.as_slice()?.test_uniform(), None);
2309
2310        let cell = build_cell(|b| b.store_u16(123));
2311        assert_eq!(cell.as_slice()?.test_uniform(), None);
2312
2313        let cell = build_cell(|b| {
2314            b.store_zeros(9)?;
2315            b.store_bit_one()
2316        });
2317        assert_eq!(cell.as_slice()?.test_uniform(), None);
2318
2319        let cell = build_cell(|b| {
2320            b.store_zeros(20)?;
2321            b.store_bit_one()
2322        });
2323        assert_eq!(cell.as_slice()?.test_uniform(), None);
2324
2325        let cell = build_cell(|b| {
2326            b.store_bit_zero()?;
2327            b.store_uint(u64::MAX, 29)
2328        });
2329        let mut slice = cell.as_slice()?;
2330        slice.skip_first(1, 0)?;
2331        assert_eq!(slice.test_uniform(), Some(true));
2332
2333        Ok(())
2334    }
2335
2336    #[test]
2337    fn compare_by_content() -> anyhow::Result<()> {
2338        fn cmp<L, R>(l: L, r: R) -> Result<std::cmp::Ordering, Error>
2339        where
2340            L: FnOnce(&mut CellBuilder) -> Result<(), Error>,
2341            R: FnOnce(&mut CellBuilder) -> Result<(), Error>,
2342        {
2343            let cell1 = build_cell(l);
2344            let cell2 = build_cell(r);
2345            cell1.as_slice()?.lex_cmp(&cell2.as_slice()?)
2346        }
2347
2348        assert_eq!(
2349            cmp(
2350                |b| b.store_u64(0xffffffff0000000f),
2351                |b| b.store_u64(0xffffffff00000000)
2352            )?,
2353            std::cmp::Ordering::Greater
2354        );
2355
2356        assert_eq!(
2357            cmp(
2358                |b| b.store_u64(0xfffffff00000000),
2359                |b| b.store_u64(0xffffffff00000000)
2360            )?,
2361            std::cmp::Ordering::Less
2362        );
2363
2364        assert_eq!(
2365            cmp(
2366                |b| b.store_u64(0xffffffff00000000),
2367                |b| b.store_u64(0xffffffff00000000)
2368            )?,
2369            std::cmp::Ordering::Equal
2370        );
2371
2372        assert_eq!(
2373            cmp(
2374                |b| b.store_uint(0xffffffff00000000, 60),
2375                |b| b.store_u64(0xffffffff00000000)
2376            )?,
2377            std::cmp::Ordering::Less
2378        );
2379
2380        Ok(())
2381    }
2382
2383    #[test]
2384    fn is_prefix_of() -> anyhow::Result<()> {
2385        let left = build_cell(|b| b.store_u64(0xabcdef1234567890));
2386        let right = build_cell(|b| b.store_u64(0xabcdef0000000000));
2387
2388        // Not a prefix
2389        {
2390            let left = left.as_slice()?;
2391            let right = right.as_slice()?;
2392            assert!(!right.is_data_prefix_of(&left).unwrap());
2393        }
2394
2395        // Aligned prefix
2396        {
2397            let left = left.as_slice()?;
2398            let mut right = right.as_slice()?;
2399            right.only_first(24, 0)?;
2400            assert!(right.is_data_prefix_of(&left).unwrap());
2401        }
2402
2403        // Shifted prefix
2404        {
2405            let mut left = left.as_slice()?;
2406            left.skip_first(3, 0)?;
2407
2408            let mut right = right.as_slice()?;
2409            right.skip_first(3, 0)?;
2410            right.only_first(21, 0)?;
2411
2412            assert!(right.is_data_prefix_of(&left).unwrap());
2413        }
2414
2415        // Empty prefix
2416        {
2417            let left = left.as_slice()?;
2418            let right = Cell::empty_cell_ref().as_slice()?;
2419            assert!(right.is_data_prefix_of(&left).unwrap());
2420        }
2421
2422        // Not as prefix of an empty prefix
2423        {
2424            let left = Cell::empty_cell_ref().as_slice()?;
2425            let right = right.as_slice()?;
2426            assert!(!right.is_data_prefix_of(&left).unwrap());
2427        }
2428
2429        Ok(())
2430    }
2431
2432    #[test]
2433    fn is_suffix_of() -> anyhow::Result<()> {
2434        let left = build_cell(|b| b.store_u64(0xabcdef1234567890));
2435        let right = build_cell(|b| b.store_u64(0x0000001234567890));
2436
2437        // Not a suffix
2438        {
2439            let left = left.as_slice()?;
2440            let right = right.as_slice()?;
2441            assert!(!right.is_data_suffix_of(&left).unwrap());
2442        }
2443
2444        // Aligned suffix
2445        {
2446            let left = left.as_slice()?;
2447            let mut right = right.as_slice()?;
2448            right.only_last(40, 0)?;
2449            assert!(right.is_data_suffix_of(&left).unwrap());
2450        }
2451
2452        // Shifted suffix
2453        {
2454            let mut left = left.as_slice()?;
2455            left.skip_last(3, 0)?;
2456
2457            let mut right = right.as_slice()?;
2458            right.skip_last(3, 0)?;
2459            right.only_last(37, 0)?;
2460
2461            assert!(right.is_data_suffix_of(&left).unwrap());
2462        }
2463
2464        // Empty suffix
2465        {
2466            let left = left.as_slice()?;
2467            let right = Cell::empty_cell_ref().as_slice()?;
2468            assert!(right.is_data_suffix_of(&left).unwrap());
2469        }
2470
2471        // Not as suffix of an empty suffix
2472        {
2473            let left = Cell::empty_cell_ref().as_slice()?;
2474            let right = right.as_slice()?;
2475            assert!(!right.is_data_suffix_of(&left).unwrap());
2476        }
2477
2478        Ok(())
2479    }
2480
2481    #[test]
2482    fn leading_bits() -> anyhow::Result<()> {
2483        // Empty slice has zero leading bits
2484        assert_eq!(Cell::empty_cell_ref().as_slice()?.count_leading(false)?, 0);
2485        assert_eq!(Cell::empty_cell_ref().as_slice()?.count_leading(true)?, 0);
2486
2487        // Full slice has all bits set
2488        assert_eq!(
2489            Cell::all_zeros_ref().as_slice()?.count_leading(false)?,
2490            1023
2491        );
2492        assert_eq!(Cell::all_ones_ref().as_slice()?.count_leading(true)?, 1023);
2493
2494        // Full slice has no leading other bits
2495        assert_eq!(Cell::all_zeros_ref().as_slice()?.count_leading(true)?, 0);
2496        assert_eq!(Cell::all_ones_ref().as_slice()?.count_leading(false)?, 0);
2497
2498        // Test for different alignments
2499        for shift_before in [false, true] {
2500            for shift_after in [false, true] {
2501                for i in 0..128 {
2502                    let mut builder = CellBuilder::new();
2503
2504                    if shift_before {
2505                        builder.store_ones(7)?;
2506                    };
2507                    builder.store_u128(1 << i)?;
2508                    if shift_after {
2509                        builder.store_ones(14)?;
2510                    }
2511
2512                    let mut slice = builder.as_data_slice();
2513
2514                    if shift_before {
2515                        slice.skip_first(7, 0)?;
2516                    }
2517                    if shift_after {
2518                        slice.only_first(128, 0)?;
2519                    }
2520
2521                    assert_eq!(slice.count_leading(false)?, 127 - i);
2522                    assert_eq!(slice.count_leading(true)?, (i == 127) as u16);
2523                }
2524            }
2525        }
2526
2527        Ok(())
2528    }
2529
2530    #[test]
2531    fn trailing_bits() -> anyhow::Result<()> {
2532        // Empty slice has zero trailing bits
2533        assert_eq!(Cell::empty_cell_ref().as_slice()?.count_trailing(false)?, 0);
2534        assert_eq!(Cell::empty_cell_ref().as_slice()?.count_trailing(true)?, 0);
2535
2536        // Full slice has all bits set
2537        assert_eq!(
2538            Cell::all_zeros_ref().as_slice()?.count_trailing(false)?,
2539            1023
2540        );
2541        assert_eq!(Cell::all_ones_ref().as_slice()?.count_trailing(true)?, 1023);
2542
2543        // Full slice has no trailing other bits
2544        assert_eq!(Cell::all_zeros_ref().as_slice()?.count_trailing(true)?, 0);
2545        assert_eq!(Cell::all_ones_ref().as_slice()?.count_trailing(false)?, 0);
2546
2547        // Test for different alignments
2548        for shift_before in [false, true] {
2549            for shift_after in [false, true] {
2550                for i in 0..128 {
2551                    let mut builder = CellBuilder::new();
2552
2553                    if shift_before {
2554                        builder.store_ones(7)?;
2555                    };
2556                    builder.store_u128(1 << i)?;
2557                    if shift_after {
2558                        builder.store_ones(14)?;
2559                    }
2560
2561                    let mut slice = builder.as_data_slice();
2562
2563                    if shift_before {
2564                        slice.skip_first(7, 0)?;
2565                    }
2566                    if shift_after {
2567                        slice.only_first(128, 0)?;
2568                    }
2569
2570                    assert_eq!(slice.count_trailing(false)?, i);
2571                    assert_eq!(slice.count_trailing(true)?, (i == 0) as u16);
2572                }
2573            }
2574        }
2575
2576        Ok(())
2577    }
2578
2579    #[test]
2580    fn split_slice() -> anyhow::Result<()> {
2581        let cell = CellBuilder::build_from((0xdeafbeafu32, 0xabbacafeu32))?;
2582
2583        // Prefix
2584        {
2585            let mut cs = cell.as_slice()?;
2586            assert!(cs.load_prefix(0, 1).is_err());
2587
2588            let mut prefix = cs.load_prefix(16, 0)?;
2589            assert_eq!(prefix.size_bits(), 16);
2590            assert_eq!(cs.size_bits(), 64 - 16);
2591            assert_eq!(prefix.load_u16()?, 0xdeaf);
2592            assert_eq!(cs.get_u16(0)?, 0xbeaf);
2593
2594            let mut prefix = cs.load_prefix(32, 0)?;
2595            assert_eq!(prefix.size_bits(), 32);
2596            assert_eq!(cs.size_bits(), 64 - 16 - 32);
2597            assert_eq!(prefix.load_u32()?, 0xbeafabba);
2598            assert_eq!(cs.get_u16(0)?, 0xcafe);
2599
2600            let mut prefix = cs.load_prefix(16, 0)?;
2601            assert_eq!(prefix.size_bits(), 16);
2602            assert_eq!(cs.size_bits(), 0);
2603            assert_eq!(prefix.load_u16()?, 0xcafe);
2604
2605            assert!(cs.load_prefix(10, 0).is_err());
2606        }
2607
2608        // Suffix
2609        {
2610            let mut cs = cell.as_slice()?;
2611            assert!(cs.load_suffix(0, 1).is_err());
2612
2613            let mut suffix = cs.load_suffix(16, 0)?;
2614            assert_eq!(suffix.size_bits(), 16);
2615            assert_eq!(cs.size_bits(), 64 - 16);
2616            assert_eq!(suffix.load_u16()?, 0xcafe);
2617            assert_eq!(cs.get_u16(32)?, 0xabba);
2618
2619            let mut suffix = cs.load_suffix(32, 0)?;
2620            assert_eq!(suffix.size_bits(), 32);
2621            assert_eq!(cs.size_bits(), 64 - 16 - 32);
2622            assert_eq!(suffix.load_u32()?, 0xbeafabba);
2623            assert_eq!(cs.get_u16(0)?, 0xdeaf);
2624
2625            let mut suffix = cs.load_suffix(16, 0)?;
2626            assert_eq!(suffix.size_bits(), 16);
2627            assert_eq!(cs.size_bits(), 0);
2628            assert_eq!(suffix.load_u16()?, 0xdeaf);
2629
2630            assert!(cs.load_suffix(10, 0).is_err());
2631        }
2632
2633        Ok(())
2634    }
2635}