everscale_types/cell/
slice.rs

1use std::num::{NonZeroU16, NonZeroU32, NonZeroU8};
2use std::rc::Rc;
3use std::sync::Arc;
4
5use crate::cell::{
6    Cell, CellTreeStats, CellType, DynCell, HashBytes, LevelMask, RefsIter, Size, StorageStat,
7};
8use crate::error::Error;
9use crate::util::{unlikely, Bitstring};
10
11use super::CellFamily;
12
13/// A data structure that can be deserialized from cells.
14pub trait Load<'a>: Sized {
15    /// Tries to load itself from a cell slice.
16    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error>;
17}
18
19impl<'a, T: Load<'a>> Load<'a> for Box<T> {
20    #[inline]
21    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
22        match <T as Load>::load_from(slice) {
23            Ok(value) => Ok(Box::new(value)),
24            Err(e) => Err(e),
25        }
26    }
27}
28
29impl<'a, T: Load<'a>> Load<'a> for Arc<T> {
30    #[inline]
31    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
32        match <T as Load>::load_from(slice) {
33            Ok(value) => Ok(Arc::new(value)),
34            Err(e) => Err(e),
35        }
36    }
37}
38
39impl<'a, T: Load<'a>> Load<'a> for Rc<T> {
40    #[inline]
41    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
42        match <T as Load>::load_from(slice) {
43            Ok(value) => Ok(Rc::new(value)),
44            Err(e) => Err(e),
45        }
46    }
47}
48
49impl<'a> Load<'a> for () {
50    #[inline]
51    fn load_from(_: &mut CellSlice<'a>) -> Result<Self, Error> {
52        Ok(())
53    }
54}
55
56macro_rules! impl_load_for_tuples {
57    ($( ($($t:ident),+) ),*$(,)?) => {$(
58        impl<'a, $($t: Load<'a>),+> Load<'a> for ($($t),*,) {
59            fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
60                Ok(($(ok!(<$t>::load_from(slice))),+))
61            }
62        }
63    )*};
64}
65
66impl_load_for_tuples! {
67    (T0, T1),
68    (T0, T1, T2),
69    (T0, T1, T2, T3),
70    (T0, T1, T2, T3, T4),
71    (T0, T1, T2, T3, T4, T5),
72}
73
74impl<'a, T: Load<'a>> Load<'a> for Option<T> {
75    #[inline]
76    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
77        if ok!(slice.load_bit()) {
78            match T::load_from(slice) {
79                Ok(value) => Ok(Some(value)),
80                Err(e) => Err(e),
81            }
82        } else {
83            Ok(None)
84        }
85    }
86}
87
88impl<T: ExactSize> ExactSize for Option<T> {
89    #[inline]
90    fn exact_size(&self) -> Size {
91        let mut total = Size { bits: 1, refs: 0 };
92        if let Some(this) = self {
93            total += this.exact_size();
94        }
95        total
96    }
97}
98
99impl<'a> Load<'a> for CellSlice<'a> {
100    #[inline]
101    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
102        Ok(slice.load_remaining())
103    }
104}
105
106macro_rules! ok_map {
107    ($expr:expr => $ty:ty) => {
108        match $expr {
109            core::result::Result::Ok(s) => core::result::Result::Ok(s as $ty),
110            core::result::Result::Err(e) => core::result::Result::Err(e),
111        }
112    };
113}
114
115macro_rules! impl_primitive_loads {
116    ($($type:ty => |$s:ident| $expr:expr),*$(,)?) => {
117        $(impl Load<'_> for $type {
118            #[inline]
119            fn load_from($s: &mut CellSlice) -> Result<Self, Error> {
120                $expr
121            }
122        })*
123    };
124}
125
126impl_primitive_loads! {
127    bool => |s| s.load_bit(),
128    u8 => |s| s.load_u8(),
129    i8 => |s| ok_map!(s.load_u8() => i8),
130    u16 => |s| s.load_u16(),
131    i16 => |s| ok_map!(s.load_u16() => i16),
132    u32 => |s| s.load_u32(),
133    i32 => |s| ok_map!(s.load_u32() => i32),
134    u64 => |s| s.load_u64(),
135    i64 => |s| ok_map!(s.load_u64() => i64),
136    u128 => |s| s.load_u128(),
137    i128 => |s| ok_map!(s.load_u128() => i128),
138    NonZeroU8 => |s| match s.load_u8() {
139        Ok(s) => match NonZeroU8::new(s) {
140            Some(s) => Ok(s),
141            None => Err(Error::InvalidData)
142        }
143        Err(e) => Err(e),
144    },
145    NonZeroU16 => |s| match s.load_u16() {
146        Ok(s) => match NonZeroU16::new(s) {
147            Some(s) => Ok(s),
148            None => Err(Error::InvalidData)
149        }
150        Err(e) => Err(e),
151    },
152    NonZeroU32 => |s| match s.load_u32() {
153        Ok(s) => match NonZeroU32::new(s) {
154            Some(s) => Ok(s),
155            None => Err(Error::InvalidData)
156        }
157        Err(e) => Err(e),
158    },
159    HashBytes => |s| s.load_u256(),
160}
161
162impl<'a> Load<'a> for &'a DynCell {
163    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
164        slice.load_reference()
165    }
166}
167
168impl ExactSize for DynCell {
169    #[inline]
170    fn exact_size(&self) -> Size {
171        Size { bits: 0, refs: 1 }
172    }
173}
174
175impl<'a> Load<'a> for Cell {
176    fn load_from(slice: &mut CellSlice<'a>) -> Result<Self, Error> {
177        slice.load_reference_cloned()
178    }
179}
180
181impl ExactSize for Cell {
182    #[inline]
183    fn exact_size(&self) -> Size {
184        Size { bits: 0, refs: 1 }
185    }
186}
187
188/// Owned cell slice parts alias.
189pub type CellSliceParts = (Cell, CellSliceRange);
190
191impl ExactSize for CellSliceParts {
192    #[inline]
193    fn exact_size(&self) -> Size {
194        self.1.size()
195    }
196}
197
198/// Indices of the slice data and refs windows.
199#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
200pub struct CellSliceRange {
201    bits_start: u16,
202    bits_end: u16,
203    refs_start: u8,
204    refs_end: u8,
205}
206
207impl CellSliceRange {
208    /// Returns an empty slice range.
209    pub const fn empty() -> Self {
210        CellSliceRange {
211            bits_start: 0,
212            bits_end: 0,
213            refs_start: 0,
214            refs_end: 0,
215        }
216    }
217
218    /// Returns a full range for the specified cell.
219    pub fn full(cell: &DynCell) -> Self {
220        Self {
221            bits_start: 0,
222            bits_end: cell.bit_len(),
223            refs_start: 0,
224            refs_end: cell.reference_count(),
225        }
226    }
227
228    /// Constructs a new cell slice from the specified cell using the current range.
229    /// Returns an error if the cell is pruned.
230    ///
231    /// NOTE: the resulting range will be truncated to cell bounds.
232    #[inline]
233    pub fn apply<T>(self, cell: &T) -> Result<CellSlice<'_>, Error>
234    where
235        T: AsRef<DynCell> + ?Sized,
236    {
237        fn apply_impl(range: CellSliceRange, cell: &DynCell) -> Result<CellSlice<'_>, Error> {
238            // Handle pruned branch access
239            if unlikely(cell.descriptor().is_pruned_branch()) {
240                Err(Error::PrunedBranchAccess)
241            } else {
242                let bits_end = std::cmp::min(range.bits_end, cell.bit_len());
243                let refs_end = std::cmp::min(range.refs_end, cell.reference_count());
244                Ok(CellSlice {
245                    range: CellSliceRange {
246                        bits_start: std::cmp::min(range.bits_start, bits_end),
247                        bits_end,
248                        refs_start: std::cmp::min(range.refs_start, refs_end),
249                        refs_end,
250                    },
251                    cell,
252                })
253            }
254        }
255        apply_impl(self, cell.as_ref())
256    }
257
258    /// Constructs a new cell slice from the specified cell using the current range.
259    ///
260    /// NOTE: the resulting range will be truncated to cell bounds.
261    ///
262    /// # Safety
263    ///
264    /// The following must be true:
265    /// - cell is not pruned
266    /// - range is in cell bounds
267    pub fn apply_allow_special<T>(self, cell: &T) -> CellSlice<'_>
268    where
269        T: AsRef<DynCell> + ?Sized,
270    {
271        CellSlice {
272            range: self,
273            cell: cell.as_ref(),
274        }
275    }
276
277    /// Returns the number of remaining bits and refs in the slice.
278    pub const fn size(&self) -> Size {
279        Size {
280            bits: self.size_bits(),
281            refs: self.size_refs(),
282        }
283    }
284
285    /// Returns the number of remaining bits of data in the slice.
286    pub const fn size_bits(&self) -> u16 {
287        if self.bits_start > self.bits_end {
288            0
289        } else {
290            self.bits_end - self.bits_start
291        }
292    }
293
294    /// Returns the number of remaining references in the slice.
295    pub const fn size_refs(&self) -> u8 {
296        if self.refs_start > self.refs_end {
297            0
298        } else {
299            self.refs_end - self.refs_start
300        }
301    }
302
303    /// Returns whether there are no data bits and refs left.
304    pub const fn is_empty(&self) -> bool {
305        self.is_data_empty() && self.is_refs_empty()
306    }
307
308    /// Returns whether there are no bits of data left.
309    pub const fn is_data_empty(&self) -> bool {
310        self.bits_start >= self.bits_end
311    }
312
313    /// Returns whether there are no references left.
314    pub const fn is_refs_empty(&self) -> bool {
315        self.refs_start >= self.refs_end
316    }
317
318    /// Returns the start of data and reference windows.
319    pub const fn offset(&self) -> Size {
320        Size {
321            bits: self.bits_start,
322            refs: self.refs_start,
323        }
324    }
325
326    /// Returns the start of the data window.
327    pub const fn offset_bits(&self) -> u16 {
328        self.bits_start
329    }
330
331    /// Returns the start of the references window.
332    pub const fn offset_refs(&self) -> u8 {
333        self.refs_start
334    }
335
336    /// Returns true if the slice contains at least `bits` and `refs`.
337    pub const fn has_remaining(&self, bits: u16, refs: u8) -> bool {
338        self.bits_start + bits <= self.bits_end && self.refs_start + refs <= self.refs_end
339    }
340
341    /// Tries to advance the start of data and refs windows.
342    pub fn skip_first(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
343        if unlikely(
344            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
345        ) {
346            return Err(Error::CellUnderflow);
347        }
348
349        self.bits_start += bits;
350        self.refs_start += refs;
351        Ok(())
352    }
353
354    /// Leaves only the first `bits` and `refs` in the slice.
355    pub fn only_first(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
356        if unlikely(
357            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
358        ) {
359            return Err(Error::CellUnderflow);
360        }
361
362        self.bits_end = self.bits_start + bits;
363        self.refs_end = self.refs_start + refs;
364        Ok(())
365    }
366
367    /// Removes the last `bits` and `refs` from the slice.
368    pub fn skip_last(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
369        if unlikely(
370            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
371        ) {
372            return Err(Error::CellUnderflow);
373        }
374
375        self.bits_end -= bits;
376        self.refs_end -= refs;
377        Ok(())
378    }
379
380    /// Leaves only the last `bits` and `refs` in the slice.
381    pub fn only_last(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
382        if unlikely(
383            self.bits_start + bits > self.bits_end || self.refs_start + refs > self.refs_end,
384        ) {
385            return Err(Error::CellUnderflow);
386        }
387
388        self.bits_start = self.bits_end - bits;
389        self.refs_start = self.refs_end - refs;
390        Ok(())
391    }
392
393    /// Returns a slice range starting at the same bits and refs offsets,
394    /// and containing no more than `bits` of data and `refs` of children.
395    pub fn get_prefix(&self, bits: u16, refs: u8) -> Self {
396        Self {
397            bits_start: self.bits_start,
398            bits_end: std::cmp::min(self.bits_start + bits, self.bits_end),
399            refs_start: self.refs_start,
400            refs_end: std::cmp::min(self.refs_start + refs, self.refs_end),
401        }
402    }
403
404    /// Returns whether this range has the same size as the cell.
405    #[inline]
406    pub fn is_full(&self, cell: &DynCell) -> bool {
407        self.bits_start == 0
408            && self.refs_start == 0
409            && self.bits_end == cell.bit_len()
410            && self.refs_end == cell.reference_count()
411    }
412}
413
414/// A read-only view for a subrange of a cell.
415#[derive(Clone, Copy, Debug, Eq, PartialEq)]
416pub struct CellSlice<'a> {
417    cell: &'a DynCell,
418    range: CellSliceRange,
419}
420
421impl Default for CellSlice<'_> {
422    #[inline]
423    fn default() -> Self {
424        // SAFETY: empty cell is an ordinary cell
425        unsafe { Cell::empty_cell_ref().as_slice_unchecked() }
426    }
427}
428
429impl<'a> AsRef<CellSlice<'a>> for CellSlice<'a> {
430    #[inline]
431    fn as_ref(&self) -> &CellSlice<'a> {
432        self
433    }
434}
435
436impl<'a> AsMut<CellSlice<'a>> for CellSlice<'a> {
437    #[inline]
438    fn as_mut(&mut self) -> &mut CellSlice<'a> {
439        self
440    }
441}
442
443impl<'a> CellSlice<'a> {
444    /// Constructs a new cell slice from the specified cell.
445    /// Returns an error if the cell is pruned.
446    pub fn new(cell: &'a DynCell) -> Result<Self, Error> {
447        // Handle pruned branch access
448        if unlikely(cell.descriptor().is_pruned_branch()) {
449            Err(Error::PrunedBranchAccess)
450        } else {
451            Ok(Self {
452                range: CellSliceRange::full(cell),
453                cell,
454            })
455        }
456    }
457
458    /// Constructs a new cell slice from the specified cell.
459    ///
460    /// # Safety
461    ///
462    /// The following must be true:
463    /// - cell is not pruned
464    pub unsafe fn new_unchecked(cell: &'a DynCell) -> Self {
465        Self {
466            range: CellSliceRange::full(cell),
467            cell,
468        }
469    }
470
471    /// Returns an underlying range indices.
472    #[inline]
473    pub const fn range(&self) -> CellSliceRange {
474        self.range
475    }
476
477    /// Returns a reference to the underlying cell.
478    #[inline]
479    pub const fn cell(&self) -> &'a DynCell {
480        self.cell
481    }
482
483    /// Computes cell type from descriptor bytes.
484    #[inline]
485    pub fn cell_type(&self) -> CellType {
486        self.cell.cell_type()
487    }
488
489    /// Computes the cell level from the level mask.
490    #[inline]
491    pub fn level(&self) -> u8 {
492        self.cell.level()
493    }
494
495    /// Computes the level mask from the descriptor bytes.
496    #[inline]
497    pub fn level_mask(&self) -> LevelMask {
498        self.cell.level_mask()
499    }
500
501    /// Returns whether there are no data bits and refs left.
502    pub const fn is_empty(&self) -> bool {
503        self.range.is_empty()
504    }
505
506    /// Returns whether there are no bits of data left.
507    ///
508    /// # Examples
509    ///
510    /// ```
511    /// # use everscale_types::prelude::{Cell, CellFamily, CellBuilder};
512    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
513    /// // Cell with empty data
514    /// let empty_cell = Cell::empty_cell();
515    /// assert!(empty_cell.as_slice()?.is_data_empty());
516    ///
517    /// // Cell with some bits in data
518    /// let not_empty_cell = {
519    ///     let mut builder = CellBuilder::new();
520    ///     builder.store_bit_zero()?;
521    ///     builder.build()?
522    /// };
523    /// assert!(!not_empty_cell.as_slice()?.is_data_empty());
524    /// # Ok(()) }
525    /// ```
526    pub const fn is_data_empty(&self) -> bool {
527        self.range.is_data_empty()
528    }
529
530    /// Returns whether there are no references left.
531    ///
532    /// # Examples
533    ///
534    /// ```
535    /// # use everscale_types::prelude::{Cell, CellFamily, CellBuilder};
536    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
537    /// // Cell without references
538    /// let empty_cell = Cell::empty_cell();
539    /// assert!(empty_cell.as_slice()?.is_refs_empty());
540    ///
541    /// // Cell with some references
542    /// let not_empty_cell = {
543    ///     let mut builder = CellBuilder::new();
544    ///     builder.store_reference(empty_cell)?;
545    ///     builder.build()?
546    /// };
547    /// assert!(!not_empty_cell.as_slice()?.is_refs_empty());
548    /// # Ok(()) }
549    /// ```
550    pub const fn is_refs_empty(&self) -> bool {
551        self.range.is_refs_empty()
552    }
553
554    /// Returns the number of remaining bits and refs in the slice.
555    pub const fn size(&self) -> Size {
556        self.range.size()
557    }
558
559    /// Returns the number of remaining bits of data in the slice.
560    pub const fn size_bits(&self) -> u16 {
561        self.range.size_bits()
562    }
563
564    /// Returns the number of remaining references in the slice.
565    pub const fn size_refs(&self) -> u8 {
566        self.range.size_refs()
567    }
568
569    /// Returns the start of data and reference windows.
570    pub const fn offset(&self) -> Size {
571        self.range.offset()
572    }
573
574    /// Returns the start of the data window.
575    ///
576    /// # Examples
577    ///
578    /// ```
579    /// # use everscale_types::prelude::CellBuilder;
580    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
581    /// let cell = {
582    ///     let mut builder = CellBuilder::new();
583    ///     builder.store_zeros(100)?;
584    ///     builder.build()?
585    /// };
586    /// let mut slice = cell.as_slice()?;
587    /// slice.load_u8()?;
588    /// assert_eq!(slice.offset_bits(), 8);
589    /// # Ok(()) }
590    /// ```
591    #[inline]
592    pub const fn offset_bits(&self) -> u16 {
593        self.range.offset_bits()
594    }
595
596    /// Returns the start of the references window.
597    ///
598    /// # Examples
599    ///
600    /// ```
601    /// # use everscale_types::prelude::{Cell, CellFamily, CellBuilder};
602    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
603    /// let cell = {
604    ///     let mut builder = CellBuilder::new();
605    ///     builder.store_reference(Cell::empty_cell())?;
606    ///     builder.build()?
607    /// };
608    /// let mut slice = cell.as_slice()?;
609    ///
610    /// slice.load_reference()?;
611    /// assert_eq!(slice.offset_refs(), 1);
612    /// # Ok(()) }
613    /// ```
614    #[inline]
615    pub const fn offset_refs(&self) -> u8 {
616        self.range.offset_refs()
617    }
618
619    /// Returns true if the slice contains at least `bits` and `refs`.
620    ///
621    /// # Examples
622    ///
623    /// ```
624    /// # use everscale_types::prelude::{Cell, CellBuilder, CellFamily};
625    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
626    /// let cell = {
627    ///     let mut builder = CellBuilder::new();
628    ///     builder.store_zeros(100)?;
629    ///     builder.store_reference(Cell::empty_cell())?;
630    ///     builder.store_reference(Cell::empty_cell())?;
631    ///     builder.build()?
632    /// };
633    /// let mut slice = cell.as_slice()?;
634    ///
635    /// assert!(slice.has_remaining(10, 2));
636    /// assert!(!slice.has_remaining(500, 2)); // too many bits
637    /// assert!(!slice.has_remaining(0, 4)); // too many refs
638    /// # Ok(()) }
639    /// ```
640    #[inline]
641    pub const fn has_remaining(&self, bits: u16, refs: u8) -> bool {
642        self.range.has_remaining(bits, refs)
643    }
644
645    /// Returns whether this slice is untouched.
646    #[inline]
647    pub fn is_full(&self) -> bool {
648        self.range.is_full(self.cell)
649    }
650
651    /// Recursively computes the count of distinct cells returning
652    /// the total storage used by this dag taking into account the
653    /// identification of equal cells.
654    ///
655    /// Root slice does not count as cell. A slice subrange of
656    /// cells is used during computation.
657    pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
658        StorageStat::compute_for_slice(self, limit)
659    }
660
661    /// Tries to advance the start of data and refs windows.
662    pub fn skip_first(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
663        self.range.skip_first(bits, refs)
664    }
665
666    /// Leaves only the first `bits` and `refs` in the slice.
667    pub fn only_first(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
668        self.range.only_first(bits, refs)
669    }
670
671    /// Removes the last `bits` and `refs` from the slice.
672    pub fn skip_last(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
673        self.range.skip_last(bits, refs)
674    }
675
676    /// Leaves only the last `bits` and `refs` in the slice.
677    pub fn only_last(&mut self, bits: u16, refs: u8) -> Result<(), Error> {
678        self.range.only_last(bits, refs)
679    }
680
681    /// Lexicographically compares slice data.
682    ///
683    /// NOTE: this method is quite computationally heavy as it compares the content
684    /// of two potentially unaligned slices. Use it with caution or check by cell.
685    pub fn lex_cmp(&self, rhs: &CellSlice) -> Result<std::cmp::Ordering, Error> {
686        use std::cmp::Ordering;
687
688        let lhs = self;
689        let lhs_bits = lhs.size_bits();
690        let rhs_bits = rhs.size_bits();
691
692        // Fast check
693
694        // NOTE: We can ignore pointer metadata since we are comparing only bits,
695        // and custom implementations of `CellImpl` do not alter data behavior.
696        if std::ptr::addr_eq(lhs.cell, rhs.cell) && lhs.offset_bits() == rhs.offset_bits() {
697            return Ok(lhs_bits.cmp(&rhs_bits));
698        }
699
700        // Full check
701        let bits = std::cmp::min(lhs_bits, rhs_bits);
702        let rem = bits % 32;
703        for offset in (0..bits - rem).step_by(32) {
704            match ok!(lhs.get_u32(offset)).cmp(&ok!(rhs.get_u32(offset))) {
705                Ordering::Equal => {}
706                ord => return Ok(ord),
707            }
708        }
709
710        if rem > 0 {
711            match ok!(lhs.get_uint(bits - rem, rem)).cmp(&ok!(rhs.get_uint(bits - rem, rem))) {
712                Ordering::Equal => {}
713                ord => return Ok(ord),
714            }
715        }
716
717        Ok(lhs_bits.cmp(&rhs_bits))
718    }
719
720    /// Returns `true` if two slices have the same data bits and refs.
721    ///
722    /// NOTE: this method is quite computationally heavy as it compares the content
723    /// of two potentially unaligned slices. Use it with caution or check by cell.
724    pub fn contents_eq(&self, rhs: &CellSlice) -> Result<bool, Error> {
725        let lhs = self;
726
727        // Fast check
728        if lhs.size_bits() != rhs.size_bits() || lhs.size_refs() != rhs.size_refs() {
729            return Ok(false);
730        }
731
732        // Full check
733        if ok!(self.lex_cmp(rhs)).is_ne() {
734            return Ok(false);
735        }
736
737        for (lhs, rhs) in self.references().zip(rhs.references()) {
738            if lhs.repr_hash() != rhs.repr_hash() {
739                return Ok(false);
740            }
741        }
742
743        Ok(true)
744    }
745
746    /// Returns a slice starting at the same bits and refs offsets,
747    /// and containing no more than `bits` of data and `refs` of children.
748    pub fn get_prefix(&self, bits: u16, refs: u8) -> Self {
749        Self {
750            cell: self.cell,
751            range: self.range.get_prefix(bits, refs),
752        }
753    }
754
755    /// Returns `true` if this slice data is a prefix of the other slice data.
756    pub fn is_data_prefix_of(&self, other: &Self) -> Result<bool, Error> {
757        let bits = self.size_bits();
758        if bits > other.size_bits() {
759            return Ok(false);
760        }
761
762        let mut other = *other;
763        let ok = other.only_first(bits, 0).is_ok();
764        debug_assert!(ok);
765
766        Ok(ok!(self.lex_cmp(&other)).is_eq())
767    }
768
769    /// Returns `true` if this slice data is a suffix of the other slice data.
770    pub fn is_data_suffix_of(&self, other: &Self) -> Result<bool, Error> {
771        let bits = self.size_bits();
772        if bits > other.size_bits() {
773            return Ok(false);
774        }
775
776        let mut other = *other;
777        let ok = other.only_last(bits, 0).is_ok();
778        debug_assert!(ok);
779
780        Ok(ok!(self.lex_cmp(&other)).is_eq())
781    }
782
783    /// Returns a subslice with the data prefix removed.
784    ///
785    /// If the slice starts with `prefix`, returns the subslice after the prefix, wrapped in `Some`.
786    /// If `prefix` is empty, simply returns the original slice.
787    ///
788    /// If the slice does not start with `prefix`, returns `None`.
789    ///
790    /// # Examples
791    ///
792    /// ```
793    /// # use everscale_types::prelude::CellBuilder;
794    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
795    /// let cell = {
796    ///     let mut builder = CellBuilder::new();
797    ///     builder.store_u32(0xdeadbeaf)?;
798    ///     builder.build()?
799    /// };
800    /// let slice = cell.as_slice()?;
801    ///
802    /// let prefix = {
803    ///     let mut builder = CellBuilder::new();
804    ///     builder.store_u16(0xdead)?;
805    ///     builder.build()?
806    /// };
807    ///
808    /// let without_prefix = slice.strip_data_prefix(&prefix.as_slice()?).unwrap();
809    /// assert_eq!(without_prefix.get_u16(0)?, 0xbeaf);
810    /// # Ok(()) }
811    /// ```
812    pub fn strip_data_prefix<'b>(&self, prefix: &CellSlice<'b>) -> Option<CellSlice<'a>> {
813        let prefix_len = prefix.size_bits();
814        if prefix_len == 0 {
815            Some(*self)
816        } else if self.size_bits() < prefix_len {
817            None
818        } else {
819            let mut result = *self;
820            let lcp = self.longest_common_data_prefix_impl(prefix, prefix_len);
821            if prefix_len <= lcp && result.skip_first(prefix_len, 0).is_ok() {
822                Some(result)
823            } else {
824                None
825            }
826        }
827    }
828
829    /// Returns the longest common data prefix.
830    ///
831    /// NOTE: The returned subslice will be a subslice of the current slice.
832    ///
833    /// # Examples
834    ///
835    /// ```
836    /// # use everscale_types::prelude::CellBuilder;
837    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
838    /// let cell = {
839    ///     let mut builder = CellBuilder::new();
840    ///     builder.store_u32(0xdeadbeaf)?;
841    ///     builder.build()?
842    /// };
843    /// let slice = cell.as_slice()?;
844    ///
845    /// let prefix = {
846    ///     let mut builder = CellBuilder::new();
847    ///     builder.store_u16(0xdead)?;
848    ///     builder.build()?
849    /// };
850    ///
851    /// let lcp = slice.longest_common_data_prefix(&prefix.as_slice()?);
852    /// assert_eq!(lcp.get_u16(0)?, 0xdead);
853    /// assert_eq!(lcp.size_bits(), 16);
854    /// # Ok(()) }
855    /// ```
856    pub fn longest_common_data_prefix(&self, other: &Self) -> Self {
857        let prefix_len = self.longest_common_data_prefix_impl(other, u16::MAX);
858        self.get_prefix(prefix_len, 0)
859    }
860
861    fn longest_common_data_prefix_impl(&self, other: &Self, max_hint: u16) -> u16 {
862        if self.range.bits_start >= self.range.bits_end
863            || other.range.bits_start >= other.range.bits_end
864        {
865            return 0;
866        }
867        let self_remaining_bits = self.range.bits_end - self.range.bits_start;
868        let self_data = self.cell.data();
869        let other_remaining_bits = other.range.bits_end - other.range.bits_start;
870        let other_data = other.cell.data();
871
872        // Compute max prefix length in bits
873        let max_bit_len = std::cmp::min(self_remaining_bits, other_remaining_bits).min(max_hint);
874
875        // Compute shifts and data offsets
876        let self_r = self.range.bits_start % 8;
877        let self_q = (self.range.bits_start / 8) as usize;
878        let other_r = other.range.bits_start % 8;
879        let other_q = (other.range.bits_start / 8) as usize;
880
881        // Compute remaining bytes to check
882        let self_bytes = (((self_r + max_bit_len) + 7) / 8) as usize;
883        debug_assert!((self_q + self_bytes) <= self_data.len());
884        let other_bytes = (((other_r + max_bit_len) + 7) / 8) as usize;
885        debug_assert!((other_q + other_bytes) <= other_data.len());
886
887        let aligned_bytes = std::cmp::min(self_bytes, other_bytes);
888
889        let mut prefix_len: u16 = 0;
890
891        unsafe {
892            let self_data_ptr = self_data.as_ptr().add(self_q);
893            let other_data_ptr = other_data.as_ptr().add(other_q);
894
895            // Get first bytes aligned to the left
896            let mut self_byte = *self_data_ptr << self_r;
897            let mut other_byte = *other_data_ptr << other_r;
898
899            // For all aligned bytes except the first
900            for i in 1..aligned_bytes {
901                // Concat previous bits with current bits
902                // NOTE: shift as `u16` to allow overflow
903                let next_self_byte = *self_data_ptr.add(i);
904                self_byte |= ((next_self_byte as u16) >> (8 - self_r)) as u8;
905                let next_other_byte = *other_data_ptr.add(i);
906                other_byte |= ((next_other_byte as u16) >> (8 - other_r)) as u8;
907
908                // XOR bytes to check equality
909                match self_byte ^ other_byte {
910                    // All bits are equal, update current bytes and move forward
911                    0 => {
912                        prefix_len += 8;
913                        self_byte = next_self_byte << self_r;
914                        other_byte = next_other_byte << other_r;
915                    }
916                    // Some bits are not equal
917                    x => {
918                        // Number of leading zeros is the number of equal bits
919                        return std::cmp::min(prefix_len + x.leading_zeros() as u16, max_bit_len);
920                    }
921                }
922            }
923
924            // Concat remaining bits
925            if self_r > 0 && aligned_bytes < self_bytes {
926                self_byte |= *self_data_ptr.add(aligned_bytes) >> (8 - self_r);
927            }
928            if other_r > 0 && aligned_bytes < other_bytes {
929                other_byte |= *other_data_ptr.add(aligned_bytes) >> (8 - other_r);
930            }
931
932            // Apply last byte mask
933            let last_byte_mask = 0xff << ((8 - max_bit_len % 8) % 8);
934            self_byte &= last_byte_mask;
935            other_byte &= last_byte_mask;
936
937            // Count the number of remaining equal bits
938            prefix_len += (self_byte ^ other_byte).leading_zeros() as u16;
939        }
940
941        // Return the longest prefix (without equal bits from the last byte mask)
942        std::cmp::min(prefix_len, max_bit_len)
943    }
944
945    /// Returns the number of leading bits of `self`.
946    pub fn count_leading(&self, bit: bool) -> Result<u16, Error> {
947        if self.range.bits_start >= self.range.bits_end {
948            return Ok(0);
949        }
950        let data = self.cell.data();
951
952        // Check if data is enough
953        if (self.range.bits_end + 7) / 8 > data.len() as u16 {
954            return Err(Error::CellUnderflow);
955        }
956
957        let bit_count = self.range.bits_end - self.range.bits_start;
958
959        // SAFETY: `bits_end` is in data range
960        Ok(unsafe { bits_memscan(data, self.range.bits_start, bit_count, bit) })
961    }
962
963    /// Returns the number of trailing bits of `self`.
964    pub fn count_trailing(&self, bit: bool) -> Result<u16, Error> {
965        if self.range.bits_start >= self.range.bits_end {
966            return Ok(0);
967        }
968        let data = self.cell.data();
969
970        // Check if data is enough
971        if (self.range.bits_end + 7) / 8 > data.len() as u16 {
972            return Err(Error::CellUnderflow);
973        }
974
975        let bit_count = self.range.bits_end - self.range.bits_start;
976
977        // SAFETY: `bits_end` is in data range
978        Ok(unsafe { bits_memscan_rev(data, self.range.bits_start, bit_count, bit) })
979    }
980
981    /// Checks whether the current slice consists of the same bits,
982    /// returns `None` if there are 0s and 1s, returns `Some(bit)` otherwise.
983    ///
984    /// # Examples
985    ///
986    /// ```
987    /// # use everscale_types::prelude::{Cell, CellFamily, CellBuilder};
988    /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
989    /// // Uniform cell consisting of only 0s
990    /// let uniform_cell = {
991    ///     let mut builder = CellBuilder::new();
992    ///     builder.store_zeros(10)?;
993    ///     builder.build()?
994    /// };
995    /// assert_eq!(uniform_cell.as_slice()?.test_uniform(), Some(false));
996    ///
997    /// // Non-uniform cell consisting of 0s and 1s
998    /// let non_uniform_cell = {
999    ///     let mut builder = CellBuilder::new();
1000    ///     builder.store_zeros(9)?;
1001    ///     builder.store_bit_one()?;
1002    ///     builder.build()?
1003    /// };
1004    /// assert_eq!(non_uniform_cell.as_slice()?.test_uniform(), None);
1005    ///
1006    /// // Empty cell is non-uniform
1007    /// let non_uniform_cell = Cell::empty_cell();
1008    /// assert_eq!(non_uniform_cell.as_slice()?.test_uniform(), None);
1009    /// # Ok(()) }
1010    /// ```
1011    pub fn test_uniform(&self) -> Option<bool> {
1012        if self.range.bits_start >= self.range.bits_end {
1013            return None;
1014        }
1015        let data = self.cell.data();
1016
1017        // Check if data is enough
1018        if (self.range.bits_end + 7) / 8 > data.len() as u16 {
1019            return None;
1020        }
1021
1022        let mut bit_count = self.range.bits_end - self.range.bits_start;
1023
1024        let r = self.range.bits_start & 0b111;
1025        let q = self.range.bits_start >> 3;
1026
1027        // SAFETY: q is in data range
1028        let mut data_ptr = unsafe { data.as_ptr().add(q as usize) };
1029        let first_byte = unsafe { *data_ptr };
1030        let bit = (first_byte >> (7 - r)) & 1 != 0;
1031
1032        if bit_count < 64 {
1033            let target = (bit as u8) * u8::MAX;
1034            let first_byte_mask: u8 = 0xff >> r;
1035            let last_byte_mask: u8 = 0xff << ((8 - (bit_count + r) % 8) % 8);
1036
1037            if r + bit_count <= 8 {
1038                // Special case if all remaining_bits are in the first byte
1039                if ((first_byte ^ target) & first_byte_mask & last_byte_mask) != 0 {
1040                    return None;
1041                }
1042            } else {
1043                // Check the first byte
1044                if (first_byte ^ target) & first_byte_mask != 0 {
1045                    return None;
1046                }
1047
1048                unsafe {
1049                    // Check all full bytes
1050                    bit_count -= 8 - r;
1051                    for _ in 0..(bit_count / 8) {
1052                        data_ptr = data_ptr.add(1);
1053                        if *data_ptr != target {
1054                            return None;
1055                        }
1056                    }
1057
1058                    // Check the last byte (if not aligned)
1059                    if bit_count % 8 != 0 {
1060                        data_ptr = data_ptr.add(1);
1061                        if (*data_ptr ^ target) & last_byte_mask != 0 {
1062                            return None;
1063                        }
1064                    }
1065                }
1066            }
1067
1068            Some(bit)
1069        } else {
1070            // SAFETY: `bits_end` is in data range
1071            let same_bits = unsafe { bits_memscan(data, self.range.bits_start, bit_count, bit) };
1072            (same_bits == bit_count).then_some(bit)
1073        }
1074    }
1075
1076    /// Tries to read the bit at the specified offset (relative to the current bits window).
1077    pub fn get_bit(&self, offset: u16) -> Result<bool, Error> {
1078        if self.range.bits_start + offset < self.range.bits_end {
1079            let index = self.range.bits_start + offset;
1080            if let Some(byte) = self.cell.data().get((index / 8) as usize) {
1081                return Ok((byte >> (7 - index % 8)) & 1 != 0);
1082            }
1083        }
1084        Err(Error::CellUnderflow)
1085    }
1086
1087    /// Tries to read the next bit, incrementing the bits window start.
1088    pub fn load_bit(&mut self) -> Result<bool, Error> {
1089        if self.range.bits_start < self.range.bits_end {
1090            let index = self.range.bits_start;
1091            if let Some(byte) = self.cell.data().get((index / 8) as usize) {
1092                self.range.bits_start += 1;
1093                return Ok((byte >> (7 - index % 8)) & 1 != 0);
1094            }
1095        }
1096        Err(Error::CellUnderflow)
1097    }
1098
1099    /// Reads `u8` starting from the `offset`.
1100    #[inline]
1101    pub fn get_u8(&self, offset: u16) -> Result<u8, Error> {
1102        self.get_small_uint(offset, 8)
1103    }
1104
1105    /// Tries to read the next `u8`, incrementing the bits window start.
1106    #[inline]
1107    pub fn load_u8(&mut self) -> Result<u8, Error> {
1108        self.load_small_uint(8)
1109    }
1110
1111    /// Reads `u16` starting from the `offset`.
1112    pub fn get_u16(&self, offset: u16) -> Result<u16, Error> {
1113        if self.range.bits_start + offset + 16 <= self.range.bits_end {
1114            let index = self.range.bits_start + offset;
1115            let data = self.cell.data();
1116            let data_len = data.len();
1117
1118            let r = index % 8;
1119            let q = (index / 8) as usize;
1120
1121            if r == 0 && q + 2 <= data_len {
1122                // xxxxxxxx|yyyyyyyy -> xxxxxxxx|yyyyyyyy
1123                //^r
1124
1125                // SAFETY: `q + 2 <= data_len`
1126                Ok(u16::from_be_bytes(unsafe {
1127                    *(data.as_ptr().add(q) as *const [u8; 2])
1128                }))
1129            } else if r != 0 && q + 3 <= data_len {
1130                // ___xxxxx|yyyyyyyy|zzz_____ -> xxxxxyyy|yyyyyzzz
1131                //  r^
1132
1133                let mut bytes = [0u8; 4];
1134
1135                // SAFETY: `q + 3 <= data_len`
1136                unsafe {
1137                    std::ptr::copy_nonoverlapping(
1138                        data.as_ptr().add(q),
1139                        bytes.as_mut_ptr().add(1),
1140                        3,
1141                    );
1142                };
1143
1144                let res = u32::from_be_bytes(bytes);
1145                Ok((res >> (8 - r)) as u16)
1146            } else {
1147                Err(Error::CellUnderflow)
1148            }
1149        } else {
1150            Err(Error::CellUnderflow)
1151        }
1152    }
1153
1154    /// Tries to read the next `u16`, incrementing the bits window start.
1155    pub fn load_u16(&mut self) -> Result<u16, Error> {
1156        let res = self.get_u16(0);
1157        self.range.bits_start += 16 * res.is_ok() as u16;
1158        res
1159    }
1160
1161    /// Reads `u32` starting from the `offset`.
1162    pub fn get_u32(&self, offset: u16) -> Result<u32, Error> {
1163        if self.range.bits_start + offset + 32 <= self.range.bits_end {
1164            let index = self.range.bits_start + offset;
1165            let data = self.cell.data();
1166            let data_len = data.len();
1167
1168            let r = index % 8;
1169            let q = (index / 8) as usize;
1170
1171            if r == 0 && q + 4 <= data_len {
1172                // xxxxxxxx|yyyyyyyy|zzzzzzzz|wwwwwwww -> xxxxxxxx|yyyyyyyy|zzzzzzzz|wwwwwwww
1173                //^r
1174
1175                // SAFETY: `q + 4 <= data_len`
1176                Ok(u32::from_be_bytes(unsafe {
1177                    *(data.as_ptr().add(q) as *const [u8; 4])
1178                }))
1179            } else if r != 0 && q + 5 <= data_len {
1180                // ___xxxxx|yyyyyyyy|zzz_____ -> xxxxxyyy|yyyyyzzz
1181                //  r^
1182
1183                let mut bytes = [0u8; 8];
1184
1185                // SAFETY: `q + 5 <= data_len`
1186                unsafe {
1187                    std::ptr::copy_nonoverlapping(
1188                        data.as_ptr().add(q),
1189                        bytes.as_mut_ptr().add(3),
1190                        5,
1191                    );
1192                };
1193
1194                let res = u64::from_be_bytes(bytes);
1195                Ok((res >> (8 - r)) as u32)
1196            } else {
1197                Err(Error::CellUnderflow)
1198            }
1199        } else {
1200            Err(Error::CellUnderflow)
1201        }
1202    }
1203
1204    /// Tries to read the next `u32`, incrementing the bits window start.
1205    pub fn load_u32(&mut self) -> Result<u32, Error> {
1206        let res = self.get_u32(0);
1207        self.range.bits_start += 32 * res.is_ok() as u16;
1208        res
1209    }
1210
1211    /// Reads `u64` starting from the `offset`.
1212    pub fn get_u64(&self, offset: u16) -> Result<u64, Error> {
1213        if self.range.bits_start + offset + 64 <= self.range.bits_end {
1214            let index = self.range.bits_start + offset;
1215            let data = self.cell.data();
1216            let data_len = data.len();
1217
1218            let r = index % 8;
1219            let q = (index / 8) as usize;
1220
1221            if r == 0 && q + 8 <= data_len {
1222                // SAFETY: `q + 8 <= data_len`
1223                Ok(u64::from_be_bytes(unsafe {
1224                    *(data.as_ptr().add(q) as *const [u8; 8])
1225                }))
1226            } else if r != 0 && q + 9 <= data_len {
1227                // ___xxxxx|...|zzz_____ -> xxxxx...|...zzz
1228                //  r^
1229
1230                let mut bytes = [0u8; 16];
1231
1232                // SAFETY: `q + 9 <= data_len`
1233                unsafe {
1234                    std::ptr::copy_nonoverlapping(
1235                        data.as_ptr().add(q),
1236                        bytes.as_mut_ptr().add(7),
1237                        9,
1238                    );
1239                };
1240
1241                let res = u128::from_be_bytes(bytes);
1242                Ok((res >> (8 - r)) as u64)
1243            } else {
1244                Err(Error::CellUnderflow)
1245            }
1246        } else {
1247            Err(Error::CellUnderflow)
1248        }
1249    }
1250
1251    /// Tries to read the next `u64`, incrementing the bits window start.
1252    pub fn load_u64(&mut self) -> Result<u64, Error> {
1253        let res = self.get_u64(0);
1254        self.range.bits_start += 64 * res.is_ok() as u16;
1255        res
1256    }
1257
1258    /// Reads `u128` starting from the `offset`.
1259    pub fn get_u128(&self, offset: u16) -> Result<u128, Error> {
1260        if self.range.bits_start + offset + 128 <= self.range.bits_end {
1261            let index = self.range.bits_start + offset;
1262            let data = self.cell.data();
1263            let data_len = data.len();
1264
1265            let r = index % 8;
1266            let q = (index / 8) as usize;
1267
1268            if r == 0 && q + 16 <= data_len {
1269                // SAFETY: `q + 16 <= data_len`
1270                Ok(u128::from_be_bytes(unsafe {
1271                    *(data.as_ptr().add(q) as *const [u8; 16])
1272                }))
1273            } else if r != 0 && q + 17 <= data_len {
1274                // ___xxxxx|...|zzz_____ -> xxxxx...|...zzz
1275                //  r^
1276
1277                let mut bytes = [0u8; 17];
1278
1279                // SAFETY: `q + 17 <= data_len`
1280                unsafe {
1281                    std::ptr::copy_nonoverlapping(data.as_ptr().add(q), bytes.as_mut_ptr(), 17);
1282                };
1283
1284                let res = u128::from_be_bytes(bytes[1..].try_into().unwrap());
1285                Ok(((bytes[0] as u128) << (120 + r)) | (res >> (8 - r)))
1286            } else {
1287                Err(Error::CellUnderflow)
1288            }
1289        } else {
1290            Err(Error::CellUnderflow)
1291        }
1292    }
1293
1294    /// Tries to read the next `u128`, incrementing the bits window start.
1295    pub fn load_u128(&mut self) -> Result<u128, Error> {
1296        let res = self.get_u128(0);
1297        self.range.bits_start += 128 * res.is_ok() as u16;
1298        res
1299    }
1300
1301    /// Reads 32 bytes starting from the `offset`.
1302    pub fn get_u256(&self, offset: u16) -> Result<HashBytes, Error> {
1303        if self.range.bits_start + offset + 256 <= self.range.bits_end {
1304            let index = self.range.bits_start + offset;
1305            let data = self.cell.data();
1306            let data_len = data.len();
1307
1308            let r = index % 8;
1309            let q = (index / 8) as usize;
1310
1311            if r == 0 && q + 32 <= data_len {
1312                // SAFETY: `q + 32 <= data_len`
1313                Ok(HashBytes(unsafe {
1314                    *(data.as_ptr().add(q) as *const [u8; 32])
1315                }))
1316            } else if r != 0 && q + 33 <= data_len {
1317                // ___xxxxx|...|zzz_____ -> xxxxx...|...zzz
1318                //  r^
1319
1320                let shift = 8 - r;
1321                let rev_shift = 120 + r;
1322
1323                // SAFETY: `q + 33 <= data_len`
1324                unsafe {
1325                    let mut bytes = [0u8; 33];
1326                    std::ptr::copy_nonoverlapping(data.as_ptr().add(q), bytes.as_mut_ptr(), 33);
1327
1328                    // Interpret last 32 bytes as two u128
1329                    let [ovf, bytes @ ..] = bytes;
1330                    let [mut hi, mut lo]: [u128; 2] = std::mem::transmute(bytes);
1331
1332                    // Numbers are in big endian order, swap bytes on little endian arch
1333                    #[cfg(target_endian = "little")]
1334                    {
1335                        hi = hi.swap_bytes();
1336                        lo = lo.swap_bytes();
1337                    }
1338
1339                    // Shift right, putting `ovf` to the high bits
1340                    Ok(std::mem::transmute::<[[u8; 16]; 2], HashBytes>([
1341                        (hi >> shift | ((ovf as u128) << rev_shift)).to_be_bytes(),
1342                        (lo >> shift | (hi << rev_shift)).to_be_bytes(),
1343                    ]))
1344                }
1345            } else {
1346                Err(Error::CellUnderflow)
1347            }
1348        } else {
1349            Err(Error::CellUnderflow)
1350        }
1351    }
1352
1353    /// Tries to read the next 32 bytes, incrementing the bits window start.
1354    pub fn load_u256(&mut self) -> Result<HashBytes, Error> {
1355        let res = self.get_u256(0);
1356        self.range.bits_start += 256 * res.is_ok() as u16;
1357        res
1358    }
1359
1360    /// Returns a small subset of `bits` (0..=8) starting from the `offset`.
1361    ///
1362    /// NOTE: Reading zero bits always succeeds,
1363    /// and reading more than 8 bits always fails.
1364    pub fn get_small_uint(&self, offset: u16, bits: u16) -> Result<u8, Error> {
1365        if bits == 0 {
1366            return Ok(0);
1367        }
1368
1369        if bits <= 8 && self.range.bits_start + offset + bits <= self.range.bits_end {
1370            let index = self.range.bits_start + offset;
1371
1372            let r = index % 8;
1373            let q = (index / 8) as usize;
1374            let Some(&byte) = self.cell.data().get(q) else {
1375                return Err(Error::CellUnderflow);
1376            };
1377
1378            if r == 0 {
1379                // xxx_____ -> _____xxx
1380                //^r
1381                Ok(byte >> (8 - bits))
1382            } else if bits <= (8 - r) {
1383                // __xxx___ -> _____xxx
1384                // r^
1385                Ok((byte >> (8 - r - bits)) & ((1 << bits) - 1))
1386            } else {
1387                // ______xx|y_______ -> _____xxy
1388                //     r^
1389
1390                let mut res = (byte as u16) << 8;
1391                let Some(&next_byte) = self.cell.data().get(q + 1) else {
1392                    return Err(Error::CellUnderflow);
1393                };
1394
1395                res |= next_byte as u16;
1396                Ok((res >> (8 - r)) as u8 >> (8 - bits))
1397            }
1398        } else {
1399            Err(Error::CellUnderflow)
1400        }
1401    }
1402
1403    /// Tries to read the next small subset of `bits` (0..=8), incrementing the bits window start.
1404    ///
1405    /// NOTE: Reading zero bits always succeeds,
1406    /// and reading more than 8 bits always fails.
1407    pub fn load_small_uint(&mut self, bits: u16) -> Result<u8, Error> {
1408        let res = self.get_small_uint(0, bits);
1409        self.range.bits_start += bits * res.is_ok() as u16;
1410        res
1411    }
1412
1413    /// Reads `u64` from the cell (but only the specified number of bits)
1414    /// starting from the `offset`.
1415    ///
1416    /// NOTE: Reading zero bits always succeeds,
1417    /// and reading more than 64 bits always fails.
1418    pub fn get_uint(&self, offset: u16, mut bits: u16) -> Result<u64, Error> {
1419        if bits == 0 {
1420            return Ok(0);
1421        }
1422
1423        if bits <= 64 && self.range.bits_start + offset + bits <= self.range.bits_end {
1424            let index = self.range.bits_start + offset;
1425            let data = self.cell.data();
1426            let data_len = data.len();
1427
1428            // Check if data is enough
1429            if (self.range.bits_end + 7) / 8 > data_len as u16 {
1430                return Err(Error::CellUnderflow);
1431            }
1432
1433            let r = index % 8;
1434            let q = (index / 8) as usize;
1435
1436            // SAFETY: remaining bits are at least enough for `data_len`
1437            unsafe {
1438                let data_ptr = data.as_ptr().add(q);
1439                let first_byte = *data_ptr & (0xff >> r);
1440
1441                let right_shift = (8 - (bits + r) % 8) % 8;
1442
1443                if r + bits <= 8 {
1444                    // Special case if all remaining_bits are in the first byte
1445                    Ok((first_byte >> right_shift) as u64)
1446                } else {
1447                    let mut bytes = [0u8; 8];
1448
1449                    // Copy remaining bytes
1450                    bits -= 8 - r;
1451                    std::ptr::copy_nonoverlapping(
1452                        data_ptr.add(1),
1453                        bytes.as_mut_ptr(),
1454                        ((bits + 7) / 8) as usize,
1455                    );
1456
1457                    let mut result = u64::from_be_bytes(bytes) >> (64 - bits);
1458                    result |= (first_byte as u64) << bits;
1459                    Ok(result)
1460                }
1461            }
1462        } else {
1463            Err(Error::CellUnderflow)
1464        }
1465    }
1466
1467    /// Tries to read the next `u64` (but only the specified number of bits),
1468    /// incrementing the bits window start.
1469    ///
1470    /// NOTE: Reading zero bits always succeeds,
1471    /// and reading more than 64 bits always fails.
1472    pub fn load_uint(&mut self, bits: u16) -> Result<u64, Error> {
1473        let res = self.get_uint(0, bits);
1474        self.range.bits_start += bits * res.is_ok() as u16;
1475        res
1476    }
1477
1478    /// Reads the specified number of bits to the target starting from the `offset`.
1479    pub fn get_raw<'b>(
1480        &'_ self,
1481        offset: u16,
1482        target: &'b mut [u8],
1483        bits: u16,
1484    ) -> Result<&'b mut [u8], Error> {
1485        if bits == 0 {
1486            return Ok(&mut target[..0]);
1487        }
1488
1489        if self.range.bits_start + bits <= self.range.bits_end {
1490            let index = self.range.bits_start + offset;
1491            let data = self.cell.data();
1492            let data_len = data.len();
1493
1494            let target_len = ((bits + 7) / 8) as usize;
1495            let target = if target_len <= target.len() {
1496                &mut target[..target_len]
1497            } else {
1498                return Err(Error::CellUnderflow);
1499            };
1500
1501            let r = index % 8;
1502            let q = (index / 8) as usize;
1503
1504            // SAFETY: q will be checked to be in range 0..data_len,
1505            // r is in range 0..=7, target is guaranteed to be `target_len`
1506            unsafe {
1507                let mut data_ptr = data.as_ptr().add(q);
1508                let target_ptr = target.as_mut_ptr();
1509
1510                if r == 0 && q + target_len <= data_len {
1511                    std::ptr::copy_nonoverlapping(data_ptr, target_ptr, target_len);
1512                } else if r != 0 {
1513                    let byte_len = ((bits + r + 7) / 8) as usize - 1;
1514                    if q + byte_len > data_len {
1515                        return Err(Error::CellUnderflow);
1516                    }
1517
1518                    let shift = 8 - r;
1519                    for i in 0..byte_len {
1520                        let target = target_ptr.add(i);
1521                        *target = *data_ptr << r;
1522                        data_ptr = data_ptr.add(1);
1523                        *target |= *data_ptr >> shift;
1524                    }
1525                    if byte_len < target_len {
1526                        *target_ptr.add(byte_len) = *data_ptr << r;
1527                    }
1528                } else {
1529                    return Err(Error::CellUnderflow);
1530                }
1531
1532                let bits_r = bits % 8;
1533                if bits_r != 0 {
1534                    *target_ptr.add(target_len - 1) &= 0xff << (8 - bits_r);
1535                }
1536                Ok(target)
1537            }
1538        } else {
1539            Err(Error::CellUnderflow)
1540        }
1541    }
1542
1543    /// Tries to read the specified number of bits, incrementing the bits window start.
1544    /// Returns the minimum subslice containing all bits.
1545    pub fn load_raw<'b>(
1546        &'_ mut self,
1547        target: &'b mut [u8],
1548        bits: u16,
1549    ) -> Result<&'b mut [u8], Error> {
1550        let res = self.get_raw(0, target, bits);
1551        self.range.bits_start += bits * res.is_ok() as u16;
1552        res
1553    }
1554
1555    /// Reads all remaining bits and refs into the new slice.
1556    pub fn load_remaining(&mut self) -> CellSlice<'a> {
1557        let result = *self;
1558        self.range.bits_start = self.range.bits_end;
1559        self.range.refs_start = self.range.refs_end;
1560        result
1561    }
1562
1563    /// Returns a reference to the Nth child cell (relative to this slice's refs window).
1564    pub fn get_reference(&self, index: u8) -> Result<&'a DynCell, Error> {
1565        if self.range.refs_start + index < self.range.refs_end {
1566            if let Some(cell) = self.cell.reference(self.range.refs_start + index) {
1567                return Ok(cell);
1568            }
1569        }
1570        Err(Error::CellUnderflow)
1571    }
1572
1573    /// Returns the Nth child cell (relative to this slice's refs window).
1574    pub fn get_reference_cloned(&self, index: u8) -> Result<Cell, Error> {
1575        if self.range.refs_start + index < self.range.refs_end {
1576            if let Some(cell) = self.cell.reference_cloned(self.range.refs_start + index) {
1577                return Ok(cell);
1578            }
1579        }
1580        Err(Error::CellUnderflow)
1581    }
1582
1583    /// Tries to load the specified child cell as slice.
1584    /// Returns an error if the loaded cell is absent or is pruned.
1585    pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'a>, Error> {
1586        if self.range.refs_start + index < self.range.refs_end {
1587            let Some(cell) = self.cell.reference(self.range.refs_start + index) else {
1588                return Err(Error::CellUnderflow);
1589            };
1590
1591            CellSlice::new(cell)
1592        } else {
1593            Err(Error::CellUnderflow)
1594        }
1595    }
1596
1597    /// Creates an iterator through child nodes.
1598    pub fn references(&self) -> RefsIter<'a> {
1599        RefsIter {
1600            cell: self.cell,
1601            max: self.range.refs_end,
1602            index: self.range.refs_start,
1603        }
1604    }
1605
1606    /// Converts this slice into an iterator through child nodes.
1607    #[inline]
1608    pub fn into_references(self) -> RefsIter<'a> {
1609        self.references()
1610    }
1611
1612    /// Returns this slice, but with references skipped.
1613    #[inline]
1614    pub fn without_references(mut self) -> Self {
1615        self.range.refs_start = self.range.refs_end;
1616        self
1617    }
1618
1619    /// Returns a reference to the next child cell (relative to this slice's refs window),
1620    /// incrementing the refs window start.
1621    pub fn load_reference(&mut self) -> Result<&'a DynCell, Error> {
1622        if self.range.refs_start < self.range.refs_end {
1623            let res = match self.cell.reference(self.range.refs_start) {
1624                Some(cell) => Ok(cell),
1625                None => Err(Error::CellUnderflow),
1626            };
1627            self.range.refs_start += res.is_ok() as u8;
1628            res
1629        } else {
1630            Err(Error::CellUnderflow)
1631        }
1632    }
1633
1634    /// Returns the next child cell (relative to this slice's refs window),
1635    /// incrementing the refs window start.
1636    pub fn load_reference_cloned(&mut self) -> Result<Cell, Error> {
1637        if self.range.refs_start < self.range.refs_end {
1638            let res = match self.cell.reference_cloned(self.range.refs_start) {
1639                Some(cell) => Ok(cell),
1640                None => Err(Error::CellUnderflow),
1641            };
1642            self.range.refs_start += res.is_ok() as u8;
1643            res
1644        } else {
1645            Err(Error::CellUnderflow)
1646        }
1647    }
1648
1649    /// Tries to load the next child cell as slice.
1650    /// Returns an error if the loaded cell is absent or is pruned.
1651    ///
1652    /// NOTE: In case of pruned cell access the current slice remains unchanged.
1653    pub fn load_reference_as_slice(&mut self) -> Result<CellSlice<'a>, Error> {
1654        if self.range.refs_start < self.range.refs_end {
1655            let Some(cell) = self.cell.reference(self.range.refs_start) else {
1656                return Err(Error::CellUnderflow);
1657            };
1658
1659            let res = CellSlice::new(cell);
1660            self.range.refs_start += res.is_ok() as u8;
1661            res
1662        } else {
1663            Err(Error::CellUnderflow)
1664        }
1665    }
1666
1667    /// Returns an object which will display data as a bitstring
1668    /// with a termination bit.
1669    pub fn display_data<'b: 'a>(&'b self) -> impl std::fmt::Display + std::fmt::Binary + 'b {
1670        fn make_bitstring<'b: 'a, 'a>(
1671            s: &'b CellSlice<'a>,
1672            bytes: &'b mut [u8; 128],
1673        ) -> Result<Bitstring<'b>, std::fmt::Error> {
1674            let bit_len = s.size_bits();
1675            if s.get_raw(0, bytes, bit_len).is_err() {
1676                return Err(std::fmt::Error);
1677            }
1678            Ok(Bitstring { bytes, bit_len })
1679        }
1680
1681        struct DisplayData<'b, 'a>(&'b CellSlice<'a>);
1682
1683        impl<'b: 'a, 'a> std::fmt::Display for DisplayData<'b, 'a> {
1684            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1685                let mut bytes = [0u8; 128];
1686                std::fmt::Display::fmt(&ok!(make_bitstring(self.0, &mut bytes)), f)
1687            }
1688        }
1689
1690        impl<'b: 'a, 'a> std::fmt::Binary for DisplayData<'b, 'a> {
1691            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1692                let mut bytes = [0u8; 128];
1693                std::fmt::Binary::fmt(&ok!(make_bitstring(self.0, &mut bytes)), f)
1694            }
1695        }
1696
1697        DisplayData(self)
1698    }
1699}
1700
1701impl ExactSize for CellSlice<'_> {
1702    #[inline]
1703    fn exact_size(&self) -> Size {
1704        self.size()
1705    }
1706}
1707
1708/// A type with a known size in bits and refs.
1709pub trait ExactSize {
1710    /// Exact size of the value when it is stored in a slice.
1711    fn exact_size(&self) -> Size;
1712}
1713
1714impl<T: ExactSize> ExactSize for &T {
1715    #[inline]
1716    fn exact_size(&self) -> Size {
1717        T::exact_size(self)
1718    }
1719}
1720
1721impl<T: ExactSize> ExactSize for &mut T {
1722    #[inline]
1723    fn exact_size(&self) -> Size {
1724        T::exact_size(self)
1725    }
1726}
1727
1728impl<T: ExactSize> ExactSize for Box<T> {
1729    #[inline]
1730    fn exact_size(&self) -> Size {
1731        T::exact_size(self)
1732    }
1733}
1734
1735impl<T: ExactSize> ExactSize for Arc<T> {
1736    #[inline]
1737    fn exact_size(&self) -> Size {
1738        T::exact_size(self)
1739    }
1740}
1741
1742impl<T: ExactSize> ExactSize for Rc<T> {
1743    #[inline]
1744    fn exact_size(&self) -> Size {
1745        T::exact_size(self)
1746    }
1747}
1748
1749impl ExactSize for () {
1750    #[inline]
1751    fn exact_size(&self) -> Size {
1752        Size::ZERO
1753    }
1754}
1755
1756macro_rules! strip_plus {
1757    (+ $($rest: tt)*) => {
1758        $($rest)*
1759    }
1760}
1761
1762macro_rules! impl_exact_size_for_tuples {
1763    ($( ($($tt:tt: $t:ident),+) ),*$(,)?) => {$(
1764        impl<$($t: ExactSize),+> ExactSize for ($($t),*,) {
1765            fn exact_size(&self) -> Size {
1766                strip_plus!($(+ ExactSize::exact_size(&self.$tt))+)
1767            }
1768        }
1769    )*};
1770}
1771
1772impl_exact_size_for_tuples! {
1773    (0: T0),
1774    (0: T0, 1: T1),
1775    (0: T0, 1: T1, 2: T2),
1776    (0: T0, 1: T1, 2: T2, 3: T3),
1777    (0: T0, 1: T1, 2: T2, 3: T3, 4: T4),
1778    (0: T0, 1: T1, 2: T2, 3: T3, 4: T4, 5: T5),
1779}
1780
1781/// # Safety
1782/// The following must be true:
1783/// - (offset + bit_count + 7) / 8 <= data.len()
1784unsafe fn bits_memscan(data: &[u8], mut offset: u16, bit_count: u16, cmp_to: bool) -> u16 {
1785    #[inline]
1786    const fn is_aligned_to_u64(ptr: *const u8) -> bool {
1787        // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
1788        // provenance).
1789        let addr: usize = unsafe { std::mem::transmute(ptr.cast::<()>()) };
1790        addr & (std::mem::align_of::<u64>() - 1) == 0
1791    }
1792
1793    if bit_count == 0 {
1794        return 0;
1795    }
1796    debug_assert!((offset + bit_count + 7) as usize / 8 <= data.len());
1797
1798    let xor_value = cmp_to as u8 * u8::MAX;
1799
1800    // Apply offset to byte level
1801    let mut ptr = data.as_ptr().add(offset as usize >> 3);
1802    offset &= 0b111;
1803
1804    let mut rem = bit_count;
1805    if offset > 0 {
1806        // NOTE: `offset` is in range 1..=7
1807        let v = (*ptr ^ xor_value) << offset;
1808        let c = v.leading_zeros() as u16;
1809        let l = 8 - offset;
1810        if c < l || bit_count <= l {
1811            return c.min(bit_count);
1812        }
1813
1814        ptr = ptr.add(1);
1815        rem -= l;
1816    }
1817
1818    while rem >= 8 && !is_aligned_to_u64(ptr) {
1819        let v = *ptr ^ xor_value;
1820        if v > 0 {
1821            return bit_count - rem + v.leading_zeros() as u16;
1822        }
1823
1824        ptr = ptr.add(1);
1825        rem -= 8;
1826    }
1827
1828    let xor_value_l = cmp_to as u64 * u64::MAX;
1829    while rem >= 64 {
1830        #[cfg(target_endian = "little")]
1831        let z = { (*ptr.cast::<u64>()).swap_bytes() ^ xor_value_l };
1832        #[cfg(not(target_endian = "little"))]
1833        let z = { *ptr.cast::<u64>() ^ xor_value_l };
1834
1835        if z > 0 {
1836            return bit_count - rem + z.leading_zeros() as u16;
1837        }
1838
1839        ptr = ptr.add(8);
1840        rem -= 64;
1841    }
1842
1843    while rem >= 8 {
1844        let v = *ptr ^ xor_value;
1845        if v > 0 {
1846            return bit_count - rem + v.leading_zeros() as u16;
1847        }
1848
1849        ptr = ptr.add(1);
1850        rem -= 8;
1851    }
1852
1853    if rem > 0 {
1854        let v = *ptr ^ xor_value;
1855        let c = v.leading_zeros() as u16;
1856        if c < rem {
1857            return bit_count - rem + c;
1858        }
1859    }
1860
1861    bit_count
1862}
1863
1864// # Safety
1865/// The following must be true:
1866/// - (offset + bit_count + 7) / 8 <= data.len()
1867unsafe fn bits_memscan_rev(data: &[u8], mut offset: u16, mut bit_count: u16, cmp_to: bool) -> u16 {
1868    if bit_count == 0 {
1869        return 0;
1870    }
1871    debug_assert!((offset + bit_count + 7) as usize / 8 <= data.len());
1872
1873    let xor_value = cmp_to as u8 * u8::MAX;
1874    let mut ptr = data.as_ptr().add((offset + bit_count) as usize >> 3);
1875    offset = (offset + bit_count) & 0b111;
1876
1877    let mut res = offset;
1878    if offset > 0 {
1879        let v = (*ptr >> (8 - offset)) ^ xor_value;
1880        let c = v.trailing_zeros() as u16;
1881        if c < offset || res >= bit_count {
1882            return c.min(bit_count);
1883        }
1884        bit_count -= res;
1885    }
1886
1887    let xor_value_l = cmp_to as u32 * u32::MAX;
1888    while bit_count >= 32 {
1889        ptr = ptr.sub(4);
1890
1891        #[cfg(target_endian = "little")]
1892        let v = { ptr.cast::<u32>().read_unaligned().swap_bytes() ^ xor_value_l };
1893        #[cfg(not(target_endian = "little"))]
1894        let v = { ptr.cast::<u32>().read_unaligned() ^ xor_value_l };
1895
1896        if v > 0 {
1897            return res + v.trailing_zeros() as u16;
1898        }
1899
1900        res += 32;
1901        bit_count -= 32;
1902    }
1903
1904    while bit_count >= 8 {
1905        ptr = ptr.sub(1);
1906        let v = *ptr ^ xor_value;
1907        if v > 0 {
1908            return res + v.trailing_zeros() as u16;
1909        }
1910
1911        res += 8;
1912        bit_count -= 8;
1913    }
1914
1915    if bit_count > 0 {
1916        ptr = ptr.sub(1);
1917        let v = *ptr ^ xor_value;
1918        res + std::cmp::min(v.trailing_zeros() as u16, bit_count)
1919    } else {
1920        res
1921    }
1922}
1923
1924#[cfg(test)]
1925mod tests {
1926    use crate::error::Error;
1927    use crate::prelude::*;
1928
1929    fn build_cell<F: FnOnce(&mut CellBuilder) -> Result<(), Error>>(f: F) -> Cell {
1930        let mut builder = CellBuilder::new();
1931        f(&mut builder).unwrap();
1932        builder.build().unwrap()
1933    }
1934
1935    fn print_slice(name: &str, slice: CellSlice) {
1936        println!(
1937            "{name}: {}",
1938            build_cell(|b| b.store_slice(slice)).display_tree()
1939        );
1940    }
1941
1942    #[test]
1943    #[cfg_attr(miri, ignore)] // takes too long to execute on miri
1944    fn get_raw() -> anyhow::Result<()> {
1945        let cell = CellBuilder::from_raw_data(&[0xff; 128], 200).and_then(CellBuilder::build)?;
1946        let slice = cell.as_slice()?;
1947
1948        let mut data = [0; 1];
1949        assert!(slice.get_raw(0, &mut data, 100).is_err());
1950
1951        let mut data = [0; 64];
1952        assert!(slice.get_raw(0, &mut data, 500).is_err());
1953
1954        let cell = CellBuilder::from_raw_data(&[0xff; 128], 1023).and_then(CellBuilder::build)?;
1955        let slice = cell.as_slice()?;
1956
1957        let mut data = [0; 128];
1958        for offset in 0..=8 {
1959            for bits in 0..=(1023 - offset) {
1960                slice.get_raw(offset, &mut data, bits)?;
1961            }
1962        }
1963
1964        Ok(())
1965    }
1966
1967    #[test]
1968    fn strip_data_prefix() -> anyhow::Result<()> {
1969        let cell1 = build_cell(|b| {
1970            b.store_u16(0xabcd)?;
1971            b.store_bit_zero()?;
1972            b.store_u16(0xffff)
1973        });
1974        let mut slice1 = cell1.as_slice()?;
1975        slice1.skip_first(4, 0)?;
1976
1977        let cell2 = build_cell(|b| {
1978            b.store_uint(0xbcd, 12)?;
1979            b.store_bit_zero()
1980        });
1981
1982        print_slice("A", slice1);
1983        print_slice("B", cell2.as_slice()?);
1984        print_slice("LCP", slice1.longest_common_data_prefix(&cell2.as_slice()?));
1985
1986        let mut without_prefix = slice1.strip_data_prefix(&cell2.as_slice()?).unwrap();
1987        print_slice("Result", without_prefix);
1988
1989        assert_eq!(without_prefix.load_u16(), Ok(0xffff));
1990        assert!(without_prefix.is_data_empty());
1991
1992        Ok(())
1993    }
1994
1995    #[test]
1996    fn longest_common_data_prefix() -> anyhow::Result<()> {
1997        let cell1 = build_cell(|b| b.store_u64(0xffffffff00000000));
1998        let mut slice1 = cell1.as_slice()?;
1999        slice1.skip_first(1, 0)?;
2000
2001        let cell2 = build_cell(|b| b.store_u64(0xfffffff000000000));
2002        let mut slice2 = cell2.as_slice()?;
2003        slice2.skip_first(6, 0)?;
2004
2005        let prefix = slice1.longest_common_data_prefix(&slice2);
2006
2007        let prefix = build_cell(|b| b.store_slice(prefix));
2008        println!("{}", prefix.display_root());
2009        assert_eq!(prefix.data(), [0xff, 0xff, 0xfe]);
2010        assert_eq!(prefix.bit_len(), 22);
2011
2012        //
2013        let cell1 = build_cell(|b| b.store_u32(0));
2014        let cell2 = build_cell(|b| b.store_u32(1));
2015        let prefix = cell1
2016            .as_slice()?
2017            .longest_common_data_prefix(&cell2.as_slice()?);
2018        assert_eq!(prefix.size_bits(), 31);
2019
2020        //
2021        let cell1 = build_cell(|b| b.store_raw(&[0, 0, 2, 2], 32));
2022        let mut slice1 = cell1.as_slice()?;
2023        slice1.skip_first(23, 0)?;
2024
2025        let cell2 = build_cell(|b| b.store_raw(&[0; 128], 1023));
2026        let slice2 = cell2.as_slice()?.get_prefix(8, 0);
2027
2028        let prefix = slice1.longest_common_data_prefix(&slice2);
2029        assert_eq!(prefix.size_bits(), 7);
2030
2031        //
2032        let cell1 = build_cell(|b| b.store_u16(0));
2033        let mut slice1 = cell1.as_slice()?;
2034        slice1.skip_first(5, 0)?;
2035
2036        let cell2 = build_cell(|b| b.store_u8(0));
2037        let mut slice2 = cell2.as_slice()?;
2038        slice2.skip_first(2, 0)?;
2039
2040        let prefix = slice1
2041            .get_prefix(5, 0)
2042            .longest_common_data_prefix(&slice2.get_prefix(5, 0));
2043        assert_eq!(prefix.size_bits(), 5);
2044
2045        Ok(())
2046    }
2047
2048    #[test]
2049    fn unaligned_longest_common_data_prefix() -> anyhow::Result<()> {
2050        let raw_key =
2051            Boc::decode_base64("te6ccgEBAQEAJAAAQ0gBfkBkwI7RPE0/VFMj7yvbvFrpvOMEPDQHDlGWFIELM9s=")?;
2052
2053        let unaligned = {
2054            let mut raw_key = raw_key.as_slice()?;
2055            raw_key.skip_first(4, 0)?;
2056            raw_key.get_prefix(267, 0)
2057        };
2058        let aligned = CellBuilder::build_from(unaligned)?;
2059        let aligned = aligned.as_slice()?;
2060
2061        assert_eq!(unaligned.lex_cmp(&aligned)?, std::cmp::Ordering::Equal);
2062
2063        let prefix = Boc::decode_base64("te6ccgEBAwEAjgACB4HQAsACAQCBvwFNima9rQU2tAaEHK+4fSc/aaYcTkT20uyfZuGbZjVMAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAIAgb8RUsb7kteM/ARjNwkzPPYoytZRb4Ic9epNxxLMl/2h7AAAAAAAAAAAAAAAAAAAAADMzMzMzMzMzMzMzMzMzMzO")?;
2064        let prefix = {
2065            let mut prefix = prefix.as_slice()?;
2066            prefix.skip_first(11, 0)?;
2067            prefix.get_prefix(14, 0)
2068        };
2069
2070        let lcp_unaligned = unaligned.longest_common_data_prefix(&prefix);
2071        let lcp_aligned = aligned.longest_common_data_prefix(&prefix);
2072
2073        assert_eq!(
2074            lcp_unaligned.lex_cmp(&lcp_aligned)?,
2075            std::cmp::Ordering::Equal
2076        );
2077
2078        Ok(())
2079    }
2080
2081    #[test]
2082    fn get_uint() -> anyhow::Result<()> {
2083        let cell = build_cell(|b| b.store_u64(0xfafafafafafafafa));
2084
2085        let slice = cell.as_slice()?;
2086        assert_eq!(slice.get_uint(0, 3), Ok(0b111));
2087        assert_eq!(slice.get_uint(0, 11), Ok(0b11111010111));
2088        assert_eq!(slice.get_uint(1, 11), Ok(0b11110101111));
2089        assert_eq!(slice.get_uint(8, 3), Ok(0b111));
2090        assert_eq!(slice.get_uint(0, 16), Ok(0xfafa));
2091
2092        Ok(())
2093    }
2094
2095    #[test]
2096    fn test_uniform() -> anyhow::Result<()> {
2097        let cell = build_cell(|b| b.store_zeros(10));
2098        assert_eq!(cell.as_slice()?.test_uniform(), Some(false));
2099
2100        let cell = build_cell(|b| b.store_u8(0xff));
2101        assert_eq!(cell.as_slice()?.test_uniform(), Some(true));
2102
2103        let cell = build_cell(|b| b.store_u8(123));
2104        assert_eq!(cell.as_slice()?.test_uniform(), None);
2105
2106        let cell = build_cell(|b| b.store_u16(123));
2107        assert_eq!(cell.as_slice()?.test_uniform(), None);
2108
2109        let cell = build_cell(|b| {
2110            b.store_zeros(9)?;
2111            b.store_bit_one()
2112        });
2113        assert_eq!(cell.as_slice()?.test_uniform(), None);
2114
2115        let cell = build_cell(|b| {
2116            b.store_zeros(20)?;
2117            b.store_bit_one()
2118        });
2119        assert_eq!(cell.as_slice()?.test_uniform(), None);
2120
2121        let cell = build_cell(|b| {
2122            b.store_bit_zero()?;
2123            b.store_uint(u64::MAX, 29)
2124        });
2125        let mut slice = cell.as_slice()?;
2126        slice.skip_first(1, 0)?;
2127        assert_eq!(slice.test_uniform(), Some(true));
2128
2129        Ok(())
2130    }
2131
2132    #[test]
2133    fn compare_by_content() -> anyhow::Result<()> {
2134        fn cmp<L, R>(l: L, r: R) -> Result<std::cmp::Ordering, Error>
2135        where
2136            L: FnOnce(&mut CellBuilder) -> Result<(), Error>,
2137            R: FnOnce(&mut CellBuilder) -> Result<(), Error>,
2138        {
2139            let cell1 = build_cell(l);
2140            let cell2 = build_cell(r);
2141            cell1.as_slice()?.lex_cmp(&cell2.as_slice()?)
2142        }
2143
2144        assert_eq!(
2145            cmp(
2146                |b| b.store_u64(0xffffffff0000000f),
2147                |b| b.store_u64(0xffffffff00000000)
2148            )?,
2149            std::cmp::Ordering::Greater
2150        );
2151
2152        assert_eq!(
2153            cmp(
2154                |b| b.store_u64(0xfffffff00000000),
2155                |b| b.store_u64(0xffffffff00000000)
2156            )?,
2157            std::cmp::Ordering::Less
2158        );
2159
2160        assert_eq!(
2161            cmp(
2162                |b| b.store_u64(0xffffffff00000000),
2163                |b| b.store_u64(0xffffffff00000000)
2164            )?,
2165            std::cmp::Ordering::Equal
2166        );
2167
2168        assert_eq!(
2169            cmp(
2170                |b| b.store_uint(0xffffffff00000000, 60),
2171                |b| b.store_u64(0xffffffff00000000)
2172            )?,
2173            std::cmp::Ordering::Less
2174        );
2175
2176        Ok(())
2177    }
2178
2179    #[test]
2180    fn is_prefix_of() -> anyhow::Result<()> {
2181        let left = build_cell(|b| b.store_u64(0xabcdef1234567890));
2182        let right = build_cell(|b| b.store_u64(0xabcdef0000000000));
2183
2184        // Not a prefix
2185        {
2186            let left = left.as_slice()?;
2187            let right = right.as_slice()?;
2188            assert!(!right.is_data_prefix_of(&left).unwrap());
2189        }
2190
2191        // Aligned prefix
2192        {
2193            let left = left.as_slice()?;
2194            let mut right = right.as_slice()?;
2195            right.only_first(24, 0)?;
2196            assert!(right.is_data_prefix_of(&left).unwrap());
2197        }
2198
2199        // Shifted prefix
2200        {
2201            let mut left = left.as_slice()?;
2202            left.skip_first(3, 0)?;
2203
2204            let mut right = right.as_slice()?;
2205            right.skip_first(3, 0)?;
2206            right.only_first(21, 0)?;
2207
2208            assert!(right.is_data_prefix_of(&left).unwrap());
2209        }
2210
2211        // Empty prefix
2212        {
2213            let left = left.as_slice()?;
2214            let right = Cell::empty_cell_ref().as_slice()?;
2215            assert!(right.is_data_prefix_of(&left).unwrap());
2216        }
2217
2218        // Not as prefix of an empty prefix
2219        {
2220            let left = Cell::empty_cell_ref().as_slice()?;
2221            let right = right.as_slice()?;
2222            assert!(!right.is_data_prefix_of(&left).unwrap());
2223        }
2224
2225        Ok(())
2226    }
2227
2228    #[test]
2229    fn is_suffix_of() -> anyhow::Result<()> {
2230        let left = build_cell(|b| b.store_u64(0xabcdef1234567890));
2231        let right = build_cell(|b| b.store_u64(0x0000001234567890));
2232
2233        // Not a suffix
2234        {
2235            let left = left.as_slice()?;
2236            let right = right.as_slice()?;
2237            assert!(!right.is_data_suffix_of(&left).unwrap());
2238        }
2239
2240        // Aligned suffix
2241        {
2242            let left = left.as_slice()?;
2243            let mut right = right.as_slice()?;
2244            right.only_last(40, 0)?;
2245            assert!(right.is_data_suffix_of(&left).unwrap());
2246        }
2247
2248        // Shifted suffix
2249        {
2250            let mut left = left.as_slice()?;
2251            left.skip_last(3, 0)?;
2252
2253            let mut right = right.as_slice()?;
2254            right.skip_last(3, 0)?;
2255            right.only_last(37, 0)?;
2256
2257            assert!(right.is_data_suffix_of(&left).unwrap());
2258        }
2259
2260        // Empty suffix
2261        {
2262            let left = left.as_slice()?;
2263            let right = Cell::empty_cell_ref().as_slice()?;
2264            assert!(right.is_data_suffix_of(&left).unwrap());
2265        }
2266
2267        // Not as suffix of an empty suffix
2268        {
2269            let left = Cell::empty_cell_ref().as_slice()?;
2270            let right = right.as_slice()?;
2271            assert!(!right.is_data_suffix_of(&left).unwrap());
2272        }
2273
2274        Ok(())
2275    }
2276
2277    #[test]
2278    fn leading_bits() -> anyhow::Result<()> {
2279        // Empty slice has zero leading bits
2280        assert_eq!(Cell::empty_cell_ref().as_slice()?.count_leading(false)?, 0);
2281        assert_eq!(Cell::empty_cell_ref().as_slice()?.count_leading(true)?, 0);
2282
2283        // Full slice has all bits set
2284        assert_eq!(
2285            Cell::all_zeros_ref().as_slice()?.count_leading(false)?,
2286            1023
2287        );
2288        assert_eq!(Cell::all_ones_ref().as_slice()?.count_leading(true)?, 1023);
2289
2290        // Full slice has no leading other bits
2291        assert_eq!(Cell::all_zeros_ref().as_slice()?.count_leading(true)?, 0);
2292        assert_eq!(Cell::all_ones_ref().as_slice()?.count_leading(false)?, 0);
2293
2294        // Test for different alignments
2295        for shift_before in [false, true] {
2296            for shift_after in [false, true] {
2297                for i in 0..128 {
2298                    let mut builder = CellBuilder::new();
2299
2300                    if shift_before {
2301                        builder.store_ones(7)?;
2302                    };
2303                    builder.store_u128(1 << i)?;
2304                    if shift_after {
2305                        builder.store_ones(14)?;
2306                    }
2307
2308                    let mut slice = builder.as_data_slice();
2309
2310                    if shift_before {
2311                        slice.skip_first(7, 0)?;
2312                    }
2313                    if shift_after {
2314                        slice.only_first(128, 0)?;
2315                    }
2316
2317                    assert_eq!(slice.count_leading(false)?, 127 - i);
2318                    assert_eq!(slice.count_leading(true)?, (i == 127) as u16);
2319                }
2320            }
2321        }
2322
2323        Ok(())
2324    }
2325
2326    #[test]
2327    fn trailing_bits() -> anyhow::Result<()> {
2328        // Empty slice has zero trailing bits
2329        assert_eq!(Cell::empty_cell_ref().as_slice()?.count_trailing(false)?, 0);
2330        assert_eq!(Cell::empty_cell_ref().as_slice()?.count_trailing(true)?, 0);
2331
2332        // Full slice has all bits set
2333        assert_eq!(
2334            Cell::all_zeros_ref().as_slice()?.count_trailing(false)?,
2335            1023
2336        );
2337        assert_eq!(Cell::all_ones_ref().as_slice()?.count_trailing(true)?, 1023);
2338
2339        // Full slice has no trailing other bits
2340        assert_eq!(Cell::all_zeros_ref().as_slice()?.count_trailing(true)?, 0);
2341        assert_eq!(Cell::all_ones_ref().as_slice()?.count_trailing(false)?, 0);
2342
2343        // Test for different alignments
2344        for shift_before in [false, true] {
2345            for shift_after in [false, true] {
2346                for i in 0..128 {
2347                    let mut builder = CellBuilder::new();
2348
2349                    if shift_before {
2350                        builder.store_ones(7)?;
2351                    };
2352                    builder.store_u128(1 << i)?;
2353                    if shift_after {
2354                        builder.store_ones(14)?;
2355                    }
2356
2357                    let mut slice = builder.as_data_slice();
2358
2359                    if shift_before {
2360                        slice.skip_first(7, 0)?;
2361                    }
2362                    if shift_after {
2363                        slice.only_first(128, 0)?;
2364                    }
2365
2366                    assert_eq!(slice.count_trailing(false)?, i);
2367                    assert_eq!(slice.count_trailing(true)?, (i == 0) as u16);
2368                }
2369            }
2370        }
2371
2372        Ok(())
2373    }
2374}