everscale_types/cell/
builder.rs

1use std::cmp::Ordering;
2use std::mem::MaybeUninit;
3use std::rc::Rc;
4use std::sync::Arc;
5
6use crate::cell::cell_context::{CellContext, CellParts};
7use crate::cell::{
8    Cell, CellDescriptor, CellImpl, CellInner, CellSlice, CellType, DynCell, HashBytes, LevelMask,
9    Size, MAX_BIT_LEN, MAX_REF_COUNT,
10};
11use crate::error::Error;
12use crate::util::{ArrayVec, Bitstring};
13
14use super::CellFamily;
15#[cfg(feature = "stats")]
16use super::CellTreeStats;
17
18/// A data structure that can be serialized into cells.
19pub trait Store {
20    /// Tries to store itself into the cell builder.
21    fn store_into(
22        &self,
23        builder: &mut CellBuilder,
24        context: &mut dyn CellContext,
25    ) -> Result<(), Error>;
26}
27
28impl<T: Store + ?Sized> Store for &T {
29    #[inline]
30    fn store_into(
31        &self,
32        builder: &mut CellBuilder,
33        context: &mut dyn CellContext,
34    ) -> Result<(), Error> {
35        <T as Store>::store_into(self, builder, context)
36    }
37}
38
39impl<T: Store + ?Sized> Store for &mut T {
40    #[inline]
41    fn store_into(
42        &self,
43        builder: &mut CellBuilder,
44        context: &mut dyn CellContext,
45    ) -> Result<(), Error> {
46        <T as Store>::store_into(self, builder, context)
47    }
48}
49
50impl<T: Store + ?Sized> Store for Box<T> {
51    #[inline]
52    fn store_into(
53        &self,
54        builder: &mut CellBuilder,
55        context: &mut dyn CellContext,
56    ) -> Result<(), Error> {
57        <T as Store>::store_into(self.as_ref(), builder, context)
58    }
59}
60
61impl<T: Store + ?Sized> Store for Arc<T> {
62    #[inline]
63    fn store_into(
64        &self,
65        builder: &mut CellBuilder,
66        context: &mut dyn CellContext,
67    ) -> Result<(), Error> {
68        <T as Store>::store_into(self.as_ref(), builder, context)
69    }
70}
71
72impl<T: Store + ?Sized> Store for Rc<T> {
73    #[inline]
74    fn store_into(
75        &self,
76        builder: &mut CellBuilder,
77        context: &mut dyn CellContext,
78    ) -> Result<(), Error> {
79        <T as Store>::store_into(self.as_ref(), builder, context)
80    }
81}
82
83impl Store for () {
84    #[inline]
85    fn store_into(&self, _: &mut CellBuilder, _: &mut dyn CellContext) -> Result<(), Error> {
86        Ok(())
87    }
88}
89
90macro_rules! impl_store_for_tuples {
91    ($( ($($field:ident: $t:ident),+) ),*$(,)?) => {$(
92        impl<$($t: Store),+> Store for ($($t),*) {
93            fn store_into(
94                &self,
95                builder: &mut CellBuilder,
96                context: &mut dyn CellContext
97            ) -> Result<(), Error> {
98                let ($($field),+) = self;
99                $(ok!($field.store_into(builder, context)));*;
100                Ok(())
101            }
102        }
103    )*};
104}
105
106impl_store_for_tuples! {
107    (t1: T1, t2: T2),
108    (t1: T1, t2: T2, t3: T3),
109    (t1: T1, t2: T2, t3: T3, t4: T4),
110    (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5),
111    (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6),
112}
113
114impl<T: Store> Store for Option<T> {
115    #[inline]
116    fn store_into(
117        &self,
118        builder: &mut CellBuilder,
119        context: &mut dyn CellContext,
120    ) -> Result<(), Error> {
121        match self {
122            Some(data) => {
123                ok!(builder.store_bit_one());
124                data.store_into(builder, context)
125            }
126            None => builder.store_bit_zero(),
127        }
128    }
129}
130
131impl<'a> Store for CellSlice<'a> {
132    #[inline]
133    fn store_into(&self, builder: &mut CellBuilder, _: &mut dyn CellContext) -> Result<(), Error> {
134        builder.store_slice(self)
135    }
136}
137
138impl Store for Cell {
139    #[inline]
140    fn store_into(&self, builder: &mut CellBuilder, _: &mut dyn CellContext) -> Result<(), Error> {
141        builder.store_reference(self.clone())
142    }
143}
144
145macro_rules! impl_primitive_store {
146    ($($type:ty => |$b:ident, $v:ident| $expr:expr),*$(,)?) => {
147        $(impl Store for $type {
148            #[inline]
149            fn store_into(&self,
150                $b: &mut CellBuilder,
151                _: &mut dyn CellContext
152            ) -> Result<(), Error> {
153                let $v = self;
154                $expr
155            }
156        })*
157    };
158}
159
160impl_primitive_store! {
161    bool => |b, v| b.store_bit(*v),
162    u8 => |b, v| b.store_u8(*v),
163    i8 => |b, v| b.store_u8(*v as u8),
164    u16 => |b, v| b.store_u16(*v),
165    i16 => |b, v| b.store_u16(*v as u16),
166    u32 => |b, v| b.store_u32(*v),
167    i32 => |b, v| b.store_u32(*v as u32),
168    u64 => |b, v| b.store_u64(*v),
169    i64 => |b, v| b.store_u64(*v as u64),
170    u128 => |b, v| b.store_u128(*v),
171    i128 => |b, v| b.store_u128(*v as u128),
172    HashBytes => |b, v| b.store_u256(v),
173}
174
175/// Builder for constructing cells with densely packed data.
176pub struct CellBuilder {
177    data: [u8; 128],
178    bit_len: u16,
179    is_exotic: bool,
180    references: ArrayVec<Cell, MAX_REF_COUNT>,
181}
182
183impl Default for CellBuilder {
184    #[inline]
185    fn default() -> Self {
186        Self::new()
187    }
188}
189
190impl Clone for CellBuilder {
191    fn clone(&self) -> Self {
192        Self {
193            data: self.data,
194            bit_len: self.bit_len,
195            is_exotic: self.is_exotic,
196            references: self.references.clone(),
197        }
198    }
199}
200
201impl Eq for CellBuilder {}
202
203impl PartialEq for CellBuilder {
204    #[inline]
205    fn eq(&self, other: &Self) -> bool {
206        self.bit_len == other.bit_len
207            && self.data == other.data
208            && self.references.as_ref() == other.references.as_ref()
209    }
210}
211
212impl Ord for CellBuilder {
213    fn cmp(&self, other: &Self) -> Ordering {
214        match (self.bit_len, self.references.len()).cmp(&(other.bit_len, other.references.len())) {
215            Ordering::Equal => {}
216            ord => return ord,
217        }
218
219        // TODO: compare subslices of len {(bits + 7) / 8} ?
220        match self.data.cmp(&other.data) {
221            Ordering::Equal => {}
222            ord => return ord,
223        }
224
225        for (a, b) in self.references().iter().zip(other.references().iter()) {
226            match a.repr_hash().cmp(b.repr_hash()) {
227                Ordering::Equal => {}
228                ord => return ord,
229            }
230        }
231
232        Ordering::Equal
233    }
234}
235
236impl PartialOrd for CellBuilder {
237    #[inline]
238    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
239        Some(self.cmp(other))
240    }
241}
242
243impl std::fmt::Debug for CellBuilder {
244    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
245        #[repr(transparent)]
246        struct Data<'a, T>(&'a T);
247
248        impl<T: std::fmt::Display> std::fmt::Debug for Data<'_, T> {
249            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
250                std::fmt::Display::fmt(self.0, f)
251            }
252        }
253
254        f.debug_struct("CellBuilder")
255            .field("data", &Data(&self.display_data()))
256            .field("bit_len", &self.bit_len)
257            .field("is_exotic", &self.is_exotic)
258            .field("references", &self.references.as_ref())
259            .finish()
260    }
261}
262
263macro_rules! impl_store_uint {
264    ($self:ident, $value:ident, bytes: $bytes:literal, bits: $bits:literal) => {
265        if $self.bit_len + $bits <= MAX_BIT_LEN {
266            let q = ($self.bit_len / 8) as usize;
267            let r = $self.bit_len % 8;
268            // SAFETY: q is in range 0..=127, r is in range 0..=7
269            unsafe {
270                let data_ptr = $self.data.as_mut_ptr().add(q);
271                debug_assert!(q + $bytes + usize::from(r > 0) <= 128);
272                if r == 0 {
273                    // Just append data
274                    let value = $value.to_be_bytes();
275                    std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, $bytes);
276                } else {
277                    // Append high bits to the last byte
278                    *data_ptr |= ($value >> ($bits - 8 + r)) as u8;
279                    // Make shifted bytes
280                    let value: [u8; $bytes] = ($value << (8 - r)).to_be_bytes();
281                    // Write shifted bytes
282                    std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr.add(1), $bytes);
283                }
284            };
285            $self.bit_len += $bits;
286            Ok(())
287        } else {
288            Err(Error::CellOverflow)
289        }
290    };
291}
292
293impl CellBuilder {
294    /// Builds a new cell from the specified data using the default cell context.
295    #[inline]
296    pub fn build_from<T>(data: T) -> Result<Cell, Error>
297    where
298        T: Store,
299    {
300        Self::build_from_ext(data, &mut Cell::empty_context())
301    }
302
303    /// Builds a new cell from the specified data using the provided cell context.
304    #[inline]
305    pub fn build_from_ext<T>(data: T, context: &mut dyn CellContext) -> Result<Cell, Error>
306    where
307        T: Store,
308    {
309        fn build_from_ext_impl(
310            data: &dyn Store,
311            context: &mut dyn CellContext,
312        ) -> Result<Cell, Error> {
313            let mut builder = CellBuilder::new();
314            ok!(data.store_into(&mut builder, context));
315            builder.build_ext(context)
316        }
317        build_from_ext_impl(&data, context)
318    }
319
320    /// Creates an empty cell builder.
321    pub const fn new() -> Self {
322        Self {
323            data: [0; 128],
324            bit_len: 0,
325            is_exotic: false,
326            references: ArrayVec::new(),
327        }
328    }
329
330    /// Tries to create a cell builder with the specified data.
331    ///
332    /// NOTE: if `bits` is greater than `bytes * 8`, pads the value with zeros (as high bits).
333    pub fn from_raw_data(value: &[u8], bits: u16) -> Result<Self, Error> {
334        let mut res = Self::new();
335        ok!(res.store_raw(value, bits));
336        Ok(res)
337    }
338
339    /// Returns a slice which contains only builder data bits and no references.
340    ///
341    /// NOTE: intermediate cell hash is undefined.
342    pub fn as_data_slice(&self) -> CellSlice<'_> {
343        // SAFETY: we interpret cell builder data as ordinary cell
344        unsafe { CellSlice::new_unchecked(IntermediateDataCell::wrap(self)) }
345    }
346
347    /// Returns a slice which contains builder data and references.
348    ///
349    /// NOTE: intermediate cell hash is undefined.
350    pub fn as_full_slice(&self) -> CellSlice<'_> {
351        // SAFETY: we interpret cell builder data as ordinary cell
352        unsafe { CellSlice::new_unchecked(IntermediateFullCell::wrap(self)) }
353    }
354
355    /// Returns an underlying cell data.
356    #[inline]
357    pub const fn raw_data(&self) -> &[u8; 128] {
358        &self.data
359    }
360
361    /// Returns the stored cell size.
362    pub const fn size(&self) -> Size {
363        Size {
364            bits: self.bit_len,
365            refs: self.references.len() as u8,
366        }
367    }
368
369    /// Returns the data size of this cell in bits.
370    #[inline]
371    pub const fn size_bits(&self) -> u16 {
372        self.bit_len
373    }
374
375    /// Returns child cell count.
376    #[inline(always)]
377    pub const fn size_refs(&self) -> u8 {
378        self.references.len() as u8
379    }
380
381    /// Returns the remaining capacity in bits and references.
382    pub const fn spare_capacity(&self) -> Size {
383        Size {
384            bits: self.spare_capacity_bits(),
385            refs: self.spare_capacity_refs(),
386        }
387    }
388
389    /// Returns remaining data capacity in bits.
390    #[inline]
391    pub const fn spare_capacity_bits(&self) -> u16 {
392        MAX_BIT_LEN - self.bit_len
393    }
394
395    /// Returns remaining references capacity.
396    #[inline]
397    pub const fn spare_capacity_refs(&self) -> u8 {
398        (MAX_REF_COUNT - self.references.len()) as u8
399    }
400
401    /// Returns true if there is enough remaining capacity to fit `bits` and `refs`.
402    #[inline]
403    pub const fn has_capacity(&self, bits: u16, refs: u8) -> bool {
404        self.bit_len + bits <= MAX_BIT_LEN && self.references.len() + refs as usize <= MAX_REF_COUNT
405    }
406
407    /// Returns whether this cell will be built as an exotic.
408    #[inline]
409    pub const fn is_exotic(&self) -> bool {
410        self.is_exotic
411    }
412
413    /// Marks this cell as exotic.
414    #[inline]
415    pub fn set_exotic(&mut self, is_exotic: bool) {
416        self.is_exotic = is_exotic;
417    }
418
419    /// Removes the specified amount of bits from the end of the data.
420    pub fn rewind(&mut self, mut bits: u16) -> Result<(), Error> {
421        if bits == 0 {
422            return Ok(());
423        }
424        let Some(new_bit_len) = self.bit_len.checked_sub(bits) else {
425            return Err(Error::CellUnderflow);
426        };
427
428        let q = (new_bit_len / 8) as usize;
429        let r = new_bit_len % 8;
430
431        // SAFETY: q is in range 0..=127, r is in range 0..=7
432        unsafe {
433            let mut data_ptr = self.data.as_mut_ptr().add(q);
434
435            if r != 0 {
436                let shift = 8 - r;
437                *data_ptr &= 0xff << shift;
438                bits = bits.saturating_sub(shift);
439                data_ptr = data_ptr.add(1);
440            }
441
442            std::ptr::write_bytes(data_ptr, 0, ((bits + 7) / 8) as usize);
443        }
444
445        self.bit_len = new_bit_len;
446        Ok(())
447    }
448
449    /// Tries to store the specified number of zero bits in the cell,
450    /// returning `false` if there is not enough remaining capacity.
451    pub fn store_zeros(&mut self, bits: u16) -> Result<(), Error> {
452        if self.bit_len + bits <= MAX_BIT_LEN {
453            self.bit_len += bits;
454            Ok(())
455        } else {
456            Err(Error::CellOverflow)
457        }
458    }
459
460    /// Tries to store the specified number of set bits in the cell,
461    /// returning `false` if there is not enough remaining capacity.
462    pub fn store_ones(&mut self, bits: u16) -> Result<(), Error> {
463        self.store_raw(crate::cell::cell_impl::ALL_ONES_CELL.data(), bits)
464    }
465
466    /// Tries to store one zero bit in the cell,
467    /// returning `false` if there is not enough remaining capacity.
468    pub fn store_bit_zero(&mut self) -> Result<(), Error> {
469        let fits = self.bit_len < MAX_BIT_LEN;
470        self.bit_len += fits as u16;
471        if fits {
472            Ok(())
473        } else {
474            Err(Error::CellOverflow)
475        }
476    }
477
478    /// Tries to store one non-zero bit in the cell,
479    /// returning `false` if there is not enough remaining capacity.
480    pub fn store_bit_one(&mut self) -> Result<(), Error> {
481        if self.bit_len < MAX_BIT_LEN {
482            let q = (self.bit_len / 8) as usize;
483            let r = self.bit_len % 8;
484            unsafe { *self.data.get_unchecked_mut(q) |= 1 << (7 - r) };
485            self.bit_len += 1;
486            Ok(())
487        } else {
488            Err(Error::CellOverflow)
489        }
490    }
491
492    /// Tries to store one bit in the cell,
493    /// returning `false` if there is not enough remaining capacity.
494    pub fn store_bit(&mut self, value: bool) -> Result<(), Error> {
495        if value {
496            self.store_bit_one()
497        } else {
498            self.store_bit_zero()
499        }
500    }
501
502    /// Tries to store `u8` in the cell,
503    /// returning `false` if there is not enough remaining capacity.
504    pub fn store_u8(&mut self, value: u8) -> Result<(), Error> {
505        if self.bit_len + 8 <= MAX_BIT_LEN {
506            let q = (self.bit_len / 8) as usize;
507            let r = self.bit_len % 8;
508            unsafe {
509                if r == 0 {
510                    debug_assert!(q < 128);
511                    // xxxxxxxx
512                    *self.data.get_unchecked_mut(q) = value;
513                } else {
514                    debug_assert!(q + 1 < 128);
515                    // yyyxxxxx|xxx00000
516                    *self.data.get_unchecked_mut(q) |= value >> r;
517                    *self.data.get_unchecked_mut(q + 1) = value << (8 - r);
518                }
519            };
520            self.bit_len += 8;
521            Ok(())
522        } else {
523            Err(Error::CellOverflow)
524        }
525    }
526
527    /// Tries to store `u16` in the cell,
528    /// returning `false` if there is not enough remaining capacity.
529    pub fn store_u16(&mut self, value: u16) -> Result<(), Error> {
530        impl_store_uint!(self, value, bytes: 2, bits: 16)
531    }
532
533    /// Tries to store `u32` in the cell,
534    /// returning `false` if there is not enough remaining capacity.
535    pub fn store_u32(&mut self, value: u32) -> Result<(), Error> {
536        impl_store_uint!(self, value, bytes: 4, bits: 32)
537    }
538
539    /// Tries to store `u64` in the cell,
540    /// returning `false` if there is not enough remaining capacity.
541    pub fn store_u64(&mut self, value: u64) -> Result<(), Error> {
542        impl_store_uint!(self, value, bytes: 8, bits: 64)
543    }
544
545    /// Tries to store `u128` in the cell,
546    /// returning `false` if there is not enough remaining capacity.
547    pub fn store_u128(&mut self, value: u128) -> Result<(), Error> {
548        impl_store_uint!(self, value, bytes: 16, bits: 128)
549    }
550
551    /// Tries to store 32 bytes in the cell,
552    /// returning `false` if there is not enough remaining capacity.
553    #[inline]
554    pub fn store_u256<T>(&mut self, value: &T) -> Result<(), Error>
555    where
556        T: AsRef<[u8; 32]>,
557    {
558        fn store_u256_impl(builder: &mut CellBuilder, value: &[u8; 32]) -> Result<(), Error> {
559            if builder.bit_len + 256 <= MAX_BIT_LEN {
560                let q = (builder.bit_len / 8) as usize;
561                let r = builder.bit_len % 8;
562                unsafe {
563                    let data_ptr = builder.data.as_mut_ptr().add(q);
564                    debug_assert!(q + 32 + usize::from(r > 0) <= 128);
565                    if r == 0 {
566                        // Just append data
567                        std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, 32);
568                    } else {
569                        // Interpret 32 bytes as two u128
570                        let [mut hi, mut lo]: [u128; 2] = std::mem::transmute_copy(value);
571
572                        // Numbers are in big endian order, swap bytes on little endian arch
573                        #[cfg(target_endian = "little")]
574                        {
575                            hi = hi.swap_bytes();
576                            lo = lo.swap_bytes();
577                        }
578
579                        let shift = 8 - r;
580
581                        // Append high bits to the last byte
582                        *data_ptr |= (hi >> (128 - shift)) as u8;
583                        // Make shifted bytes
584                        let hi: [u8; 16] = ((hi << shift) | (lo >> (128 - shift))).to_be_bytes();
585                        let lo: [u8; 16] = (lo << shift).to_be_bytes();
586                        // Write shifted bytes
587                        std::ptr::copy_nonoverlapping(hi.as_ptr(), data_ptr.add(1), 16);
588                        std::ptr::copy_nonoverlapping(lo.as_ptr(), data_ptr.add(17), 16);
589                    }
590                };
591                builder.bit_len += 256;
592                Ok(())
593            } else {
594                Err(Error::CellOverflow)
595            }
596        }
597
598        store_u256_impl(self, value.as_ref())
599    }
600
601    /// Tries to store `u8` in the cell (but only the specified number of bits),
602    /// returning `false` if there is not enough remaining capacity.
603    ///
604    /// NOTE: if `bits` is greater than **8**, pads the value with zeros (as high bits).
605    pub fn store_small_uint(&mut self, mut value: u8, mut bits: u16) -> Result<(), Error> {
606        if bits == 0 {
607            return Ok(());
608        }
609
610        if self.bit_len + bits <= MAX_BIT_LEN {
611            bits = if let Some(offset) = bits.checked_sub(8) {
612                self.bit_len += offset;
613                8
614            } else {
615                bits
616            };
617
618            // Ensure that value starts with significant bits
619            value <<= 8 - bits;
620
621            let q = (self.bit_len / 8) as usize;
622            let r = self.bit_len % 8;
623            unsafe {
624                debug_assert!(q < 128);
625                if r == 0 {
626                    // xxxxxxxx
627                    *self.data.get_unchecked_mut(q) = value;
628                } else {
629                    // yyyxxxxx|xxx00000
630                    *self.data.get_unchecked_mut(q) |= value >> r;
631                    if bits + r > 8 {
632                        debug_assert!(q + 1 < 128);
633                        *self.data.get_unchecked_mut(q + 1) = value << (8 - r);
634                    }
635                }
636            };
637            self.bit_len += bits;
638            Ok(())
639        } else {
640            Err(Error::CellOverflow)
641        }
642    }
643
644    /// Tries to store `u64` in the cell (but only the specified number of bits),
645    /// returning `false` if there is not enough remaining capacity.
646    ///
647    /// NOTE: if `bits` is greater than **64**, pads the value with zeros (as high bits).
648    pub fn store_uint(&mut self, mut value: u64, mut bits: u16) -> Result<(), Error> {
649        if bits == 0 {
650            return Ok(());
651        }
652
653        if self.bit_len + bits <= MAX_BIT_LEN {
654            // Store zeros if bits is greater than 64
655            bits = if let Some(offset) = bits.checked_sub(64) {
656                self.bit_len += offset;
657                64
658            } else {
659                bits
660            };
661
662            // Ensure that value starts with significant bits
663            value <<= 64 - bits;
664
665            let q = (self.bit_len / 8) as usize;
666            let r = self.bit_len % 8;
667            // SAFETY: q is in range 0..=127, r is in range 0..=7
668            unsafe {
669                let data_ptr = self.data.as_mut_ptr().add(q);
670                if r == 0 {
671                    let byte_len = ((bits + 7) / 8) as usize;
672                    debug_assert!(q + byte_len <= 128);
673
674                    // Just append data
675                    let value = value.to_be_bytes();
676                    std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, byte_len);
677                } else {
678                    debug_assert!(q < 128);
679
680                    // Append high bits to the last byte
681                    let shift = 8 - r;
682                    *data_ptr |= (value >> (64 - shift)) as u8;
683
684                    // If there are some bits left
685                    if let Some(bits) = bits.checked_sub(shift) {
686                        if bits > 0 {
687                            let byte_len = ((bits + 7) / 8) as usize;
688                            debug_assert!(q + 1 + byte_len <= 128);
689
690                            // Make shifted bytes
691                            let value: [u8; 8] = (value << shift).to_be_bytes();
692                            // Write shifted bytes
693                            std::ptr::copy_nonoverlapping(
694                                value.as_ptr(),
695                                data_ptr.add(1),
696                                byte_len,
697                            );
698                        }
699                    }
700                }
701            }
702            self.bit_len += bits;
703            Ok(())
704        } else {
705            Err(Error::CellOverflow)
706        }
707    }
708
709    /// Tries to store bytes in the cell (but only the specified number of bits),
710    /// returning `false` if there is not enough remaining capacity.
711    ///
712    /// NOTE: if `bits` is greater than `bytes * 8`, pads the value with zeros (as high bits).
713    pub fn store_raw(&mut self, value: &[u8], bits: u16) -> Result<(), Error> {
714        store_raw(&mut self.data, &mut self.bit_len, value, bits)
715    }
716
717    /// Tries to store all data bits of the specified cell in the current cell,
718    /// returning `false` if there is not enough remaining capacity.
719    #[inline]
720    pub fn store_cell_data<T>(&mut self, value: T) -> Result<(), Error>
721    where
722        T: AsRef<DynCell>,
723    {
724        fn store_cell_data_impl(builder: &mut CellBuilder, value: &DynCell) -> Result<(), Error> {
725            store_raw(
726                &mut builder.data,
727                &mut builder.bit_len,
728                value.data(),
729                value.bit_len(),
730            )
731        }
732        store_cell_data_impl(self, value.as_ref())
733    }
734
735    /// Tries to store the remaining slice data in the cell,
736    /// returning `false` if there is not enough remaining capacity.
737    #[inline]
738    pub fn store_slice_data<'a, T>(&mut self, value: T) -> Result<(), Error>
739    where
740        T: AsRef<CellSlice<'a>>,
741    {
742        fn store_slice_data_impl(
743            builder: &mut CellBuilder,
744            value: &CellSlice<'_>,
745        ) -> Result<(), Error> {
746            let bits = value.size_bits();
747            if builder.bit_len + bits <= MAX_BIT_LEN {
748                // SAFETY: An uninitialized `[MaybeUninit<_>; LEN]` is valid.
749                let mut slice_data =
750                    unsafe { MaybeUninit::<[MaybeUninit<u8>; 128]>::uninit().assume_init() };
751
752                // SAFETY: casting `slice_data` to a `*const [u8]` is safe since `MaybeUninit`
753                // is guaranteed to have the same layout as `u8`.
754                // The pointer obtained is valid since it refers to memory owned by `slice_data`
755                // which is a reference and thus guaranteed to be valid for reads.
756                let slice_data = unsafe {
757                    &mut *(&mut slice_data as *mut [MaybeUninit<u8>; 128] as *mut [u8; 128])
758                };
759
760                let slice_data = ok!(value.get_raw(0, slice_data, bits));
761                builder.store_raw(slice_data, bits)
762            } else {
763                Err(Error::CellOverflow)
764            }
765        }
766        store_slice_data_impl(self, value.as_ref())
767    }
768
769    /// Tries to prepend bytes to the cell data (but only the specified number of bits),
770    /// returning `false` if there is not enough capacity.
771    ///
772    /// NOTE: if `bits` is greater than `bytes * 8`, pads the value with zeros (as high bits).
773    pub fn prepend_raw(&mut self, value: &[u8], bits: u16) -> Result<(), Error> {
774        if bits == 0 {
775            return Ok(());
776        }
777
778        // Prevent asm code bloat
779        fn store_raw_impl(
780            data: &mut [u8; 128],
781            bit_len: &mut u16,
782            value: &[u8],
783            bits: u16,
784        ) -> Result<(), Error> {
785            store_raw(data, bit_len, value, bits)
786        }
787
788        if self.bit_len + bits <= MAX_BIT_LEN {
789            let mut data = [0; 128];
790            let mut bit_len = 0;
791            ok!(store_raw_impl(&mut data, &mut bit_len, value, bits));
792            ok!(store_raw_impl(
793                &mut data,
794                &mut bit_len,
795                &self.data,
796                self.bit_len
797            ));
798            self.data = data;
799            self.bit_len = bit_len;
800            Ok(())
801        } else {
802            Err(Error::CellOverflow)
803        }
804    }
805}
806
807#[inline]
808fn store_raw(
809    data: &mut [u8; 128],
810    bit_len: &mut u16,
811    value: &[u8],
812    mut bits: u16,
813) -> Result<(), Error> {
814    if *bit_len + bits <= MAX_BIT_LEN {
815        let max_bit_len = value.len().saturating_mul(8) as u16;
816        bits = if let Some(offset) = bits.checked_sub(max_bit_len) {
817            *bit_len += offset;
818            max_bit_len
819        } else {
820            bits
821        };
822
823        // Do nothing for empty slices or noop store
824        if bits == 0 {
825            return Ok(());
826        }
827
828        let q = (*bit_len / 8) as usize;
829        let r = *bit_len % 8;
830        // SAFETY: q is in range 0..=127, r is in range 0..=7
831        unsafe {
832            let mut data_ptr = data.as_mut_ptr().add(q);
833            let mut value_ptr = value.as_ptr();
834
835            if r == 0 {
836                let byte_len = ((bits + 7) / 8) as usize;
837                debug_assert!(q + byte_len <= 128);
838                debug_assert!(byte_len <= value.len());
839
840                std::ptr::copy_nonoverlapping(value_ptr, data_ptr, byte_len);
841
842                let bits_r = bits % 8;
843                if bits_r != 0 {
844                    *data_ptr.add(byte_len - 1) &= 0xff << (8 - bits_r);
845                }
846            } else {
847                let byte_len = ((bits + r + 7) / 8) as usize - 1;
848                let value_len = ((bits + 7) / 8) as usize;
849                debug_assert!(q + byte_len <= 128);
850                debug_assert!(byte_len <= value_len && value_len <= value.len());
851
852                let shift = 8 - r;
853                for _ in 0..byte_len {
854                    *data_ptr |= *value_ptr >> r;
855                    data_ptr = data_ptr.add(1);
856                    *data_ptr = *value_ptr << shift;
857                    value_ptr = value_ptr.add(1);
858                }
859                if byte_len < value_len {
860                    *data_ptr |= *value_ptr >> r;
861                }
862
863                let bits_r = (r + bits) % 8;
864                if bits_r != 0 {
865                    *data_ptr &= 0xff << (8 - bits_r);
866                }
867            }
868        }
869        *bit_len += bits;
870        Ok(())
871    } else {
872        Err(Error::CellOverflow)
873    }
874}
875
876impl CellBuilder {
877    /// Returns a slice of the child cells stored in the builder.
878    #[inline]
879    pub fn references(&self) -> &[Cell] {
880        self.references.as_ref()
881    }
882
883    /// Tries to store a child in the cell,
884    /// returning `false` if there is not enough remaining capacity.
885    pub fn store_reference(&mut self, cell: Cell) -> Result<(), Error> {
886        if self.references.len() < MAX_REF_COUNT {
887            // SAFETY: reference count is in the valid range
888            unsafe { self.references.push(cell) }
889            Ok(())
890        } else {
891            Err(Error::CellOverflow)
892        }
893    }
894
895    /// Sets children of the cell.
896    pub fn set_references(&mut self, refs: CellRefsBuilder) {
897        self.references = refs.0;
898    }
899
900    /// Tries to append a builder (its data and references),
901    /// returning `false` if there is not enough remaining capacity.
902    pub fn store_builder(&mut self, builder: &Self) -> Result<(), Error> {
903        if self.bit_len + builder.bit_len <= MAX_BIT_LEN
904            && self.references.len() + builder.references.len() <= MAX_REF_COUNT
905        {
906            ok!(self.store_raw(&builder.data, builder.bit_len));
907            for cell in builder.references.as_ref() {
908                ok!(self.store_reference(cell.clone()));
909            }
910            Ok(())
911        } else {
912            Err(Error::CellOverflow)
913        }
914    }
915
916    /// Tries to append a cell slice (its data and references),
917    /// returning `false` if there is not enough remaining capacity.
918    #[inline]
919    pub fn store_slice<'a, T>(&mut self, value: T) -> Result<(), Error>
920    where
921        T: AsRef<CellSlice<'a>>,
922    {
923        fn store_slice_impl(builder: &mut CellBuilder, value: &CellSlice<'_>) -> Result<(), Error> {
924            if builder.bit_len + value.size_bits() <= MAX_BIT_LEN
925                && builder.references.len() + value.size_refs() as usize <= MAX_REF_COUNT
926            {
927                ok!(builder.store_slice_data(value));
928                for cell in value.references().cloned() {
929                    ok!(builder.store_reference(cell));
930                }
931                Ok(())
932            } else {
933                Err(Error::CellOverflow)
934            }
935        }
936        store_slice_impl(self, value.as_ref())
937    }
938
939    /// Tries to build a new cell using the specified cell context.
940    pub fn build_ext(mut self, context: &mut dyn CellContext) -> Result<Cell, Error> {
941        debug_assert!(self.bit_len <= MAX_BIT_LEN);
942        debug_assert!(self.references.len() <= MAX_REF_COUNT);
943
944        #[cfg(feature = "stats")]
945        let mut stats = CellTreeStats {
946            bit_count: self.bit_len as u64,
947            cell_count: 1,
948        };
949
950        let mut children_mask = LevelMask::EMPTY;
951        for child in self.references.as_ref() {
952            let child = child.as_ref();
953            children_mask |= child.descriptor().level_mask();
954
955            #[cfg(feature = "stats")]
956            {
957                stats += child.stats();
958            }
959        }
960
961        let is_exotic = self.is_exotic;
962
963        let level_mask = 'mask: {
964            // NOTE: make only a brief check here, as it will raise a proper error in finalier
965            if is_exotic && self.bit_len >= 8 {
966                if let Some(ty) = CellType::from_byte_exotic(self.data[0]) {
967                    match ty {
968                        CellType::PrunedBranch => break 'mask LevelMask::new(self.data[1]),
969                        CellType::MerkleProof | CellType::MerkleUpdate => {
970                            break 'mask children_mask.virtualize(1)
971                        }
972                        CellType::LibraryReference => break 'mask LevelMask::EMPTY,
973                        _ => {}
974                    };
975                }
976            }
977
978            children_mask
979        };
980
981        let d1 = CellDescriptor::compute_d1(level_mask, is_exotic, self.references.len() as u8);
982        let d2 = CellDescriptor::compute_d2(self.bit_len);
983
984        let rem = self.bit_len % 8;
985        let last_byte = (self.bit_len / 8) as usize;
986        if rem > 0 {
987            // SAFETY: `last_byte` is in the valid range
988            let last_byte = unsafe { self.data.get_unchecked_mut(last_byte) };
989
990            // x0000000 - rem=1, tag_mask=01000000, data_mask=11000000
991            // xx000000 - rem=2, tag_mask=00100000, data_mask=11100000
992            // xxx00000 - rem=3, tag_mask=00010000, data_mask=11110000
993            // xxxx0000 - rem=4, tag_mask=00001000, data_mask=11111000
994            // xxxxx000 - rem=5, tag_mask=00000100, data_mask=11111100
995            // xxxxxx00 - rem=6, tag_mask=00000010, data_mask=11111110
996            // xxxxxxx0 - rem=7, tag_mask=00000001, data_mask=11111111
997            let tag_mask: u8 = 1 << (7 - rem);
998            let data_mask = !(tag_mask - 1);
999
1000            // xxxxyyyy & data_mask -> xxxxy000 | tag_mask -> xxxx1000
1001            *last_byte = (*last_byte & data_mask) | tag_mask;
1002        }
1003
1004        let byte_len = (self.bit_len + 7) / 8;
1005        let data = &self.data[..std::cmp::min(byte_len as usize, 128)];
1006
1007        let cell_parts = CellParts {
1008            #[cfg(feature = "stats")]
1009            stats,
1010            bit_len: self.bit_len,
1011            descriptor: CellDescriptor { d1, d2 },
1012            children_mask,
1013            references: self.references,
1014            data,
1015        };
1016        context.finalize_cell(cell_parts)
1017    }
1018
1019    /// Tries to build a new cell using the default cell context.
1020    ///
1021    /// See [`empty_context`]
1022    ///
1023    /// [`empty_context`]: fn@CellFamily::empty_context
1024    pub fn build(self) -> Result<Cell, Error> {
1025        self.build_ext(&mut Cell::empty_context())
1026    }
1027
1028    /// Returns an object which will display data as a bitstring
1029    /// with a termination bit.
1030    pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
1031        struct DisplayData<'a>(&'a CellBuilder);
1032
1033        impl std::fmt::Display for DisplayData<'_> {
1034            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1035                std::fmt::Display::fmt(
1036                    &Bitstring {
1037                        bytes: &self.0.data,
1038                        bit_len: self.0.bit_len,
1039                    },
1040                    f,
1041                )
1042            }
1043        }
1044
1045        impl std::fmt::Binary for DisplayData<'_> {
1046            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1047                std::fmt::Binary::fmt(
1048                    &Bitstring {
1049                        bytes: &self.0.data,
1050                        bit_len: self.0.bit_len,
1051                    },
1052                    f,
1053                )
1054            }
1055        }
1056
1057        DisplayData(self)
1058    }
1059}
1060
1061/// Builder for constructing cell references array.
1062///
1063/// Can be used later for [`CellBuilder::set_references`].
1064#[derive(Default)]
1065#[repr(transparent)]
1066pub struct CellRefsBuilder(ArrayVec<Cell, MAX_REF_COUNT>);
1067
1068impl CellRefsBuilder {
1069    /// Tries to store a child in the cell,
1070    /// returning `false` if there is not enough remaining capacity.
1071    pub fn store_reference(&mut self, cell: Cell) -> Result<(), Error> {
1072        if self.0.len() < MAX_REF_COUNT {
1073            // SAFETY: reference count is in the valid range
1074            unsafe { self.0.push(cell) }
1075            Ok(())
1076        } else {
1077            Err(Error::CellOverflow)
1078        }
1079    }
1080
1081    /// Computes children level mask as a combination of all level masks.
1082    pub fn compute_level_mask(&self) -> LevelMask {
1083        let mut result = LevelMask::EMPTY;
1084        for child in self.0.as_ref() {
1085            result |= child.as_ref().level_mask();
1086        }
1087        result
1088    }
1089}
1090
1091#[repr(transparent)]
1092struct IntermediateDataCell(CellBuilder);
1093
1094impl IntermediateDataCell {
1095    #[inline(always)]
1096    const fn wrap(value: &CellBuilder) -> &Self {
1097        // SAFETY: IntermediateDataCell is #[repr(transparent)]
1098        unsafe { &*(value as *const CellBuilder as *const Self) }
1099    }
1100}
1101
1102impl CellImpl for IntermediateDataCell {
1103    fn untrack(self: CellInner<Self>) -> Cell {
1104        Cell(self)
1105    }
1106
1107    fn descriptor(&self) -> CellDescriptor {
1108        CellDescriptor {
1109            d1: 0,
1110            d2: CellDescriptor::compute_d2(self.0.bit_len),
1111        }
1112    }
1113
1114    fn data(&self) -> &[u8] {
1115        self.0.raw_data()
1116    }
1117
1118    fn bit_len(&self) -> u16 {
1119        self.0.bit_len
1120    }
1121
1122    fn reference(&self, _: u8) -> Option<&DynCell> {
1123        None
1124    }
1125
1126    fn reference_cloned(&self, _: u8) -> Option<Cell> {
1127        None
1128    }
1129
1130    fn virtualize(&self) -> &DynCell {
1131        self
1132    }
1133
1134    fn hash(&self, _: u8) -> &HashBytes {
1135        panic!("Hash for an intermediate data cell is not defined");
1136    }
1137
1138    fn depth(&self, _: u8) -> u16 {
1139        0
1140    }
1141
1142    fn take_first_child(&mut self) -> Option<Cell> {
1143        None
1144    }
1145
1146    fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell> {
1147        Err(parent)
1148    }
1149
1150    fn take_next_child(&mut self) -> Option<Cell> {
1151        None
1152    }
1153
1154    #[cfg(feature = "stats")]
1155    fn stats(&self) -> CellTreeStats {
1156        CellTreeStats {
1157            bit_count: self.0.bit_len as u64,
1158            cell_count: 1,
1159        }
1160    }
1161}
1162
1163#[repr(transparent)]
1164struct IntermediateFullCell(CellBuilder);
1165
1166impl IntermediateFullCell {
1167    #[inline(always)]
1168    const fn wrap(value: &CellBuilder) -> &Self {
1169        // SAFETY: IntermediateFullCell is #[repr(transparent)]
1170        unsafe { &*(value as *const CellBuilder as *const Self) }
1171    }
1172}
1173
1174impl CellImpl for IntermediateFullCell {
1175    fn untrack(self: CellInner<Self>) -> Cell {
1176        Cell(self)
1177    }
1178
1179    fn descriptor(&self) -> CellDescriptor {
1180        CellDescriptor {
1181            d1: CellDescriptor::compute_d1(LevelMask::EMPTY, false, self.0.references.len() as u8),
1182            d2: CellDescriptor::compute_d2(self.0.bit_len),
1183        }
1184    }
1185
1186    fn data(&self) -> &[u8] {
1187        self.0.raw_data()
1188    }
1189
1190    fn bit_len(&self) -> u16 {
1191        self.0.bit_len
1192    }
1193
1194    fn reference(&self, index: u8) -> Option<&DynCell> {
1195        match self.0.references.get(index) {
1196            Some(cell) => Some(cell.as_ref()),
1197            None => None,
1198        }
1199    }
1200
1201    fn reference_cloned(&self, index: u8) -> Option<Cell> {
1202        self.0.references.get(index).cloned()
1203    }
1204
1205    fn virtualize(&self) -> &DynCell {
1206        self
1207    }
1208
1209    fn hash(&self, _: u8) -> &HashBytes {
1210        panic!("Hash for an intermediate data cell is not defined");
1211    }
1212
1213    fn depth(&self, _: u8) -> u16 {
1214        0
1215    }
1216
1217    fn take_first_child(&mut self) -> Option<Cell> {
1218        None
1219    }
1220
1221    fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell> {
1222        Err(parent)
1223    }
1224
1225    fn take_next_child(&mut self) -> Option<Cell> {
1226        None
1227    }
1228
1229    #[cfg(feature = "stats")]
1230    fn stats(&self) -> CellTreeStats {
1231        CellTreeStats {
1232            bit_count: self.0.bit_len as u64,
1233            cell_count: 1 + self.0.references.len() as u64,
1234        }
1235    }
1236}
1237
1238#[cfg(test)]
1239mod tests {
1240    use super::*;
1241
1242    #[test]
1243    fn clone_builder() {
1244        let mut builder = CellBuilder::new();
1245        builder.store_u32(0xdeafbeaf).unwrap();
1246        let cell1 = builder.clone().build().unwrap();
1247        let cell2 = builder.clone().build().unwrap();
1248        assert_eq!(cell1.as_ref(), cell2.as_ref());
1249
1250        builder.store_u32(0xb00b5).unwrap();
1251        let cell3 = builder.build().unwrap();
1252        assert_ne!(cell1.as_ref(), cell3.as_ref());
1253    }
1254
1255    #[test]
1256    fn compare_builders() {
1257        let mut a = CellBuilder::new();
1258        a.store_u32(0xdeafbeaf).unwrap();
1259
1260        let mut b = CellBuilder::new();
1261        b.store_u32(0xdeafbeaf).unwrap();
1262
1263        assert_eq!(a, b);
1264
1265        b.store_u8(1).unwrap();
1266        assert!(a < b);
1267        a.store_u8(2).unwrap();
1268        assert!(a > b);
1269
1270        a.rewind(8).unwrap();
1271        a.store_u8(1).unwrap();
1272        assert_eq!(a, b);
1273
1274        let child_a = a.clone().build().unwrap();
1275        a.store_reference(child_a.clone()).unwrap();
1276        assert!(a > b);
1277
1278        let child_b = b.clone().build().unwrap();
1279        b.store_reference(child_b).unwrap();
1280        assert_eq!(a, b);
1281
1282        let child_b2 = b.clone().build().unwrap();
1283        a.store_reference(child_a).unwrap();
1284        b.store_reference(child_b2).unwrap();
1285        assert_ne!(a, b);
1286    }
1287
1288    #[test]
1289    fn rewind_builder() {
1290        let mut builder = CellBuilder::new();
1291        builder.store_u32(0xdeafbeaf).unwrap();
1292        assert_eq!(builder.size_bits(), 32);
1293        assert_eq!(builder.data[..4], 0xdeafbeaf_u32.to_be_bytes());
1294
1295        builder.rewind(5).unwrap();
1296        assert_eq!(builder.size_bits(), 27);
1297        assert_eq!(builder.data[..4], 0xdeafbea0_u32.to_be_bytes());
1298
1299        builder.store_u32(0xdeafbeaf).unwrap();
1300        assert_eq!(builder.size_bits(), 32 + 27);
1301        assert_eq!(
1302            builder.data[..8],
1303            [0xde, 0xaf, 0xbe, 0xbb, 0xd5, 0xf7, 0xd5, 0xe0]
1304        );
1305        builder.rewind(32).unwrap();
1306        assert_eq!(
1307            builder.data[..8],
1308            [0xde, 0xaf, 0xbe, 0xa0, 0x00, 0x00, 0x00, 0x00]
1309        );
1310
1311        assert_eq!(builder.rewind(32), Err(Error::CellUnderflow));
1312
1313        builder.rewind(27).unwrap();
1314        assert_eq!(builder.size_bits(), 0);
1315        assert_eq!(builder.data, [0u8; 128]);
1316
1317        builder.store_raw(&[0xff; 128], MAX_BIT_LEN).unwrap();
1318        assert_eq!(builder.size_bits(), MAX_BIT_LEN);
1319
1320        let mut target = [0xff; 128];
1321        target[127] = 0xfe;
1322        assert_eq!(builder.data, target);
1323
1324        builder.rewind(3).unwrap();
1325        assert_eq!(builder.size_bits(), MAX_BIT_LEN - 3);
1326        target[127] = 0xf0;
1327        assert_eq!(builder.data, target);
1328
1329        builder.rewind(8).unwrap();
1330        assert_eq!(builder.size_bits(), MAX_BIT_LEN - 3 - 8);
1331        target[126] = 0xf0;
1332        target[127] = 0x00;
1333        assert_eq!(builder.data, target);
1334    }
1335
1336    #[test]
1337    #[cfg_attr(miri, ignore)] // takes too long to execute on miri
1338    fn store_raw() {
1339        const ONES: &[u8] = &[0xff; 128];
1340        for offset in 0..8 {
1341            for bits in 0..=1016 {
1342                let mut builder = CellBuilder::new();
1343                builder.store_zeros(offset).unwrap();
1344                builder.store_raw(ONES, bits).unwrap();
1345                builder.build().unwrap();
1346            }
1347        }
1348    }
1349
1350    #[test]
1351    fn prepend_raw() {
1352        let mut builder = CellBuilder::new();
1353        builder.store_raw(&[0xde, 0xaf, 0xbe, 0xaf], 20).unwrap();
1354        builder.prepend_raw(&[0xaa, 0x55], 5).unwrap();
1355        let cell = builder.build().unwrap();
1356        println!("{}", cell.display_tree());
1357    }
1358
1359    #[test]
1360    fn store_slice() -> anyhow::Result<()> {
1361        const SOME_HASH: &HashBytes = HashBytes::wrap(&[
1362            0xdf, 0x86, 0xce, 0xbc, 0xe8, 0xd5, 0xab, 0x0c, 0x69, 0xb4, 0xce, 0x33, 0xfe, 0x9b,
1363            0x0e, 0x2c, 0xdf, 0x69, 0xa3, 0xe1, 0x13, 0x7e, 0x64, 0x85, 0x6b, 0xbc, 0xfd, 0x39,
1364            0xe7, 0x9b, 0xc1, 0x6f,
1365        ]);
1366
1367        let mut builder = CellBuilder::new();
1368        builder.store_zeros(3)?;
1369        builder.store_u256(SOME_HASH)?;
1370        let cell = builder.build()?;
1371        println!("{}", cell.display_tree());
1372
1373        let mut builder = CellBuilder::new();
1374        let mut slice = cell.as_slice()?;
1375        slice.skip_first(3, 0)?;
1376        builder.store_slice(slice)?;
1377        let cell = builder.build()?;
1378        println!("{}", cell.display_tree());
1379        assert_eq!(cell.data(), SOME_HASH);
1380
1381        Ok(())
1382    }
1383}