1use std::cmp::Ordering;
2use std::mem::MaybeUninit;
3use std::rc::Rc;
4use std::sync::Arc;
5
6use super::CellFamily;
7#[cfg(feature = "stats")]
8use super::CellTreeStats;
9use crate::cell::cell_context::{CellContext, CellParts};
10use crate::cell::{
11 Cell, CellDescriptor, CellImpl, CellInner, CellSlice, CellType, DynCell, HashBytes, LevelMask,
12 Size, MAX_BIT_LEN, MAX_REF_COUNT,
13};
14use crate::error::Error;
15use crate::util::{ArrayVec, ArrayVecIntoIter, Bitstring};
16
17pub trait Store {
19 fn store_into(&self, builder: &mut CellBuilder, context: &dyn CellContext)
21 -> Result<(), Error>;
22}
23
24impl<T: Store + ?Sized> Store for &T {
25 #[inline]
26 fn store_into(
27 &self,
28 builder: &mut CellBuilder,
29 context: &dyn CellContext,
30 ) -> Result<(), Error> {
31 <T as Store>::store_into(self, builder, context)
32 }
33}
34
35impl<T: Store + ?Sized> Store for &mut T {
36 #[inline]
37 fn store_into(
38 &self,
39 builder: &mut CellBuilder,
40 context: &dyn CellContext,
41 ) -> Result<(), Error> {
42 <T as Store>::store_into(self, builder, context)
43 }
44}
45
46impl<T: Store + ?Sized> Store for Box<T> {
47 #[inline]
48 fn store_into(
49 &self,
50 builder: &mut CellBuilder,
51 context: &dyn CellContext,
52 ) -> Result<(), Error> {
53 <T as Store>::store_into(self.as_ref(), builder, context)
54 }
55}
56
57impl<T: Store + ?Sized> Store for Arc<T> {
58 #[inline]
59 fn store_into(
60 &self,
61 builder: &mut CellBuilder,
62 context: &dyn CellContext,
63 ) -> Result<(), Error> {
64 <T as Store>::store_into(self.as_ref(), builder, context)
65 }
66}
67
68impl<T: Store + ?Sized> Store for Rc<T> {
69 #[inline]
70 fn store_into(
71 &self,
72 builder: &mut CellBuilder,
73 context: &dyn CellContext,
74 ) -> Result<(), Error> {
75 <T as Store>::store_into(self.as_ref(), builder, context)
76 }
77}
78
79impl Store for () {
80 #[inline]
81 fn store_into(&self, _: &mut CellBuilder, _: &dyn CellContext) -> Result<(), Error> {
82 Ok(())
83 }
84}
85
86macro_rules! impl_store_for_tuples {
87 ($( ($($field:ident: $t:ident),+) ),*$(,)?) => {$(
88 impl<$($t: Store),+> Store for ($($t),*) {
89 fn store_into(
90 &self,
91 builder: &mut CellBuilder,
92 context: &dyn CellContext
93 ) -> Result<(), Error> {
94 let ($($field),+) = self;
95 $(ok!($field.store_into(builder, context)));*;
96 Ok(())
97 }
98 }
99 )*};
100}
101
102impl_store_for_tuples! {
103 (t1: T1, t2: T2),
104 (t1: T1, t2: T2, t3: T3),
105 (t1: T1, t2: T2, t3: T3, t4: T4),
106 (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5),
107 (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6),
108}
109
110impl<T: Store> Store for Option<T> {
111 #[inline]
112 fn store_into(
113 &self,
114 builder: &mut CellBuilder,
115 context: &dyn CellContext,
116 ) -> Result<(), Error> {
117 match self {
118 Some(data) => {
119 ok!(builder.store_bit_one());
120 data.store_into(builder, context)
121 }
122 None => builder.store_bit_zero(),
123 }
124 }
125}
126
127impl Store for CellBuilder {
128 #[inline]
129 fn store_into(&self, builder: &mut CellBuilder, _: &dyn CellContext) -> Result<(), Error> {
130 builder.store_builder(self)
131 }
132}
133
134impl Store for CellSlice<'_> {
135 #[inline]
136 fn store_into(&self, builder: &mut CellBuilder, _: &dyn CellContext) -> Result<(), Error> {
137 builder.store_slice(self)
138 }
139}
140
141impl Store for Cell {
142 #[inline]
143 fn store_into(&self, builder: &mut CellBuilder, _: &dyn CellContext) -> Result<(), Error> {
144 builder.store_reference(self.clone())
145 }
146}
147
148macro_rules! impl_primitive_store {
149 ($($type:ty => |$b:ident, $v:ident| $expr:expr),*$(,)?) => {
150 $(impl Store for $type {
151 #[inline]
152 fn store_into(&self,
153 $b: &mut CellBuilder,
154 _: &dyn CellContext
155 ) -> Result<(), Error> {
156 let $v = self;
157 $expr
158 }
159 })*
160 };
161}
162
163impl_primitive_store! {
164 bool => |b, v| b.store_bit(*v),
165 u8 => |b, v| b.store_u8(*v),
166 i8 => |b, v| b.store_u8(*v as u8),
167 u16 => |b, v| b.store_u16(*v),
168 i16 => |b, v| b.store_u16(*v as u16),
169 u32 => |b, v| b.store_u32(*v),
170 i32 => |b, v| b.store_u32(*v as u32),
171 u64 => |b, v| b.store_u64(*v),
172 i64 => |b, v| b.store_u64(*v as u64),
173 u128 => |b, v| b.store_u128(*v),
174 i128 => |b, v| b.store_u128(*v as u128),
175 HashBytes => |b, v| b.store_u256(v),
176}
177
178#[derive(Default, Clone)]
180pub struct CellBuilder {
181 inner: CellDataBuilder,
182 is_exotic: bool,
183 references: ArrayVec<Cell, MAX_REF_COUNT>,
184}
185
186impl Eq for CellBuilder {}
187
188impl PartialEq for CellBuilder {
189 #[inline]
190 fn eq(&self, other: &Self) -> bool {
191 self.is_exotic == other.is_exotic
192 && self.inner == other.inner
193 && self.references.as_ref() == other.references.as_ref()
194 }
195}
196
197impl Ord for CellBuilder {
198 fn cmp(&self, other: &Self) -> Ordering {
199 match (self.inner.bit_len, self.references.len())
200 .cmp(&(other.inner.bit_len, other.references.len()))
201 {
202 Ordering::Equal => {}
203 ord => return ord,
204 }
205
206 match self.inner.data.cmp(&other.inner.data) {
208 Ordering::Equal => {}
209 ord => return ord,
210 }
211
212 for (a, b) in self.references().iter().zip(other.references().iter()) {
213 match a.repr_hash().cmp(b.repr_hash()) {
214 Ordering::Equal => {}
215 ord => return ord,
216 }
217 }
218
219 Ordering::Equal
220 }
221}
222
223impl PartialOrd for CellBuilder {
224 #[inline]
225 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
226 Some(self.cmp(other))
227 }
228}
229
230impl std::fmt::Debug for CellBuilder {
231 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
232 #[repr(transparent)]
233 struct Data<'a, T>(&'a T);
234
235 impl<T: std::fmt::Display> std::fmt::Debug for Data<'_, T> {
236 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
237 std::fmt::Display::fmt(self.0, f)
238 }
239 }
240
241 f.debug_struct("CellBuilder")
242 .field("data", &Data(&self.inner.display_data()))
243 .field("bit_len", &self.inner.bit_len)
244 .field("is_exotic", &self.is_exotic)
245 .field("references", &self.references.as_ref())
246 .finish()
247 }
248}
249
250impl std::ops::Deref for CellBuilder {
251 type Target = CellDataBuilder;
252
253 #[inline]
254 fn deref(&self) -> &Self::Target {
255 &self.inner
256 }
257}
258
259impl std::ops::DerefMut for CellBuilder {
260 #[inline]
261 fn deref_mut(&mut self) -> &mut Self::Target {
262 &mut self.inner
263 }
264}
265
266impl AsRef<CellDataBuilder> for CellBuilder {
267 #[inline]
268 fn as_ref(&self) -> &CellDataBuilder {
269 &self.inner
270 }
271}
272
273impl AsMut<CellDataBuilder> for CellBuilder {
274 #[inline]
275 fn as_mut(&mut self) -> &mut CellDataBuilder {
276 &mut self.inner
277 }
278}
279
280impl From<CellDataBuilder> for CellBuilder {
281 #[inline]
282 fn from(inner: CellDataBuilder) -> Self {
283 Self {
284 inner,
285 is_exotic: false,
286 references: ArrayVec::new(),
287 }
288 }
289}
290
291impl CellBuilder {
292 #[inline]
294 pub fn build_from<T>(data: T) -> Result<Cell, Error>
295 where
296 T: Store,
297 {
298 Self::build_from_ext(data, Cell::empty_context())
299 }
300
301 #[inline]
303 pub fn build_from_ext<T>(data: T, context: &dyn CellContext) -> Result<Cell, Error>
304 where
305 T: Store,
306 {
307 fn build_from_ext_impl(data: &dyn Store, context: &dyn CellContext) -> Result<Cell, Error> {
308 let mut builder = CellBuilder::new();
309 ok!(data.store_into(&mut builder, context));
310 builder.build_ext(context)
311 }
312 build_from_ext_impl(&data, context)
313 }
314
315 #[inline]
319 pub fn build_library<T: AsRef<[u8; 32]>>(hash: &T) -> Cell {
320 Self::build_library_ext(hash, Cell::empty_context())
321 }
322
323 #[inline]
327 pub fn build_library_ext<T: AsRef<[u8; 32]>>(hash: &T, context: &dyn CellContext) -> Cell {
328 fn build_library_ext_impl(
329 hash: &[u8; 32],
330 context: &dyn CellContext,
331 ) -> Result<Cell, Error> {
332 let mut b = CellBuilder::new();
333 b.set_exotic(true);
334 ok!(b.store_u8(CellType::LibraryReference.to_byte()));
335 ok!(b.store_u256(HashBytes::wrap(hash)));
336 b.build_ext(context)
337 }
338 build_library_ext_impl(hash.as_ref(), context).unwrap()
339 }
340
341 pub const fn new() -> Self {
343 Self {
344 inner: CellDataBuilder::new(),
345 is_exotic: false,
346 references: ArrayVec::new(),
347 }
348 }
349
350 pub fn from_raw_data(value: &[u8], bits: u16) -> Result<Self, Error> {
354 Ok(Self {
355 inner: ok!(CellDataBuilder::from_raw_data(value, bits)),
356 is_exotic: false,
357 references: ArrayVec::new(),
358 })
359 }
360
361 #[inline]
363 pub fn from_parts(is_exotic: bool, data: CellDataBuilder, refs: CellRefsBuilder) -> Self {
364 Self {
365 inner: data,
366 is_exotic,
367 references: refs.0,
368 }
369 }
370
371 #[inline]
373 pub fn into_parts(self) -> (bool, CellDataBuilder, CellRefsBuilder) {
374 (self.is_exotic, self.inner, CellRefsBuilder(self.references))
375 }
376
377 pub fn as_full_slice(&self) -> CellSlice<'_> {
381 CellSlice::new_allow_exotic(IntermediateFullCell::wrap(self))
382 }
383
384 pub const fn size(&self) -> Size {
386 Size {
387 bits: self.inner.bit_len,
388 refs: self.references.len() as u8,
389 }
390 }
391
392 #[inline(always)]
394 pub const fn size_refs(&self) -> u8 {
395 self.references.len() as u8
396 }
397
398 pub const fn spare_capacity(&self) -> Size {
400 Size {
401 bits: self.inner.spare_capacity_bits(),
402 refs: self.spare_capacity_refs(),
403 }
404 }
405
406 #[inline]
408 pub const fn spare_capacity_refs(&self) -> u8 {
409 (MAX_REF_COUNT - self.references.len()) as u8
410 }
411
412 #[inline]
414 pub const fn has_capacity(&self, bits: u16, refs: u8) -> bool {
415 self.inner.bit_len + bits <= MAX_BIT_LEN
416 && self.references.len() + refs as usize <= MAX_REF_COUNT
417 }
418
419 #[inline]
421 pub const fn is_exotic(&self) -> bool {
422 self.is_exotic
423 }
424
425 #[inline]
427 pub fn set_exotic(&mut self, is_exotic: bool) {
428 self.is_exotic = is_exotic;
429 }
430
431 #[inline]
433 pub fn references(&self) -> &[Cell] {
434 self.references.as_ref()
435 }
436
437 pub fn store_reference(&mut self, cell: Cell) -> Result<(), Error> {
440 if self.references.len() < MAX_REF_COUNT {
441 unsafe { self.references.push(cell) }
443 Ok(())
444 } else {
445 Err(Error::CellOverflow)
446 }
447 }
448
449 pub fn set_references(&mut self, refs: CellRefsBuilder) {
451 self.references = refs.0;
452 }
453
454 pub fn store_builder(&mut self, builder: &Self) -> Result<(), Error> {
457 if self.inner.bit_len + builder.inner.bit_len <= MAX_BIT_LEN
458 && self.references.len() + builder.references.len() <= MAX_REF_COUNT
459 {
460 ok!(self.store_raw(&builder.inner.data, builder.inner.bit_len));
461 for cell in builder.references.as_ref() {
462 ok!(self.store_reference(cell.clone()));
463 }
464 Ok(())
465 } else {
466 Err(Error::CellOverflow)
467 }
468 }
469
470 #[inline]
473 pub fn store_slice<'a, T>(&mut self, value: T) -> Result<(), Error>
474 where
475 T: AsRef<CellSlice<'a>>,
476 {
477 fn store_slice_impl(builder: &mut CellBuilder, value: &CellSlice<'_>) -> Result<(), Error> {
478 if builder.inner.bit_len + value.size_bits() <= MAX_BIT_LEN
479 && builder.references.len() + value.size_refs() as usize <= MAX_REF_COUNT
480 {
481 ok!(builder.store_slice_data(value));
482 for cell in value.references().cloned() {
483 ok!(builder.store_reference(cell));
484 }
485 Ok(())
486 } else {
487 Err(Error::CellOverflow)
488 }
489 }
490 store_slice_impl(self, value.as_ref())
491 }
492
493 pub fn build_ext(mut self, context: &dyn CellContext) -> Result<Cell, Error> {
495 debug_assert!(self.inner.bit_len <= MAX_BIT_LEN);
496 debug_assert!(self.references.len() <= MAX_REF_COUNT);
497
498 #[cfg(feature = "stats")]
499 let mut stats = CellTreeStats {
500 bit_count: self.bit_len as u64,
501 cell_count: 1,
502 };
503
504 let mut children_mask = LevelMask::EMPTY;
505 for child in self.references.as_ref() {
506 let child = child.as_ref();
507 children_mask |= child.descriptor().level_mask();
508
509 #[cfg(feature = "stats")]
510 {
511 stats += child.stats();
512 }
513 }
514
515 let is_exotic = self.is_exotic;
516
517 let level_mask = 'mask: {
518 if is_exotic && self.inner.bit_len >= 8 {
520 if let Some(ty) = CellType::from_byte_exotic(self.data[0]) {
521 match ty {
522 CellType::PrunedBranch => break 'mask LevelMask::new(self.data[1]),
523 CellType::MerkleProof | CellType::MerkleUpdate => {
524 break 'mask children_mask.virtualize(1)
525 }
526 CellType::LibraryReference => break 'mask LevelMask::EMPTY,
527 _ => {}
528 };
529 }
530 }
531
532 children_mask
533 };
534
535 let d1 = CellDescriptor::compute_d1(level_mask, is_exotic, self.references.len() as u8);
536 let d2 = CellDescriptor::compute_d2(self.inner.bit_len);
537
538 let rem = self.inner.bit_len % 8;
539 let last_byte = (self.inner.bit_len / 8) as usize;
540 if rem > 0 {
541 let last_byte = unsafe { self.inner.data.get_unchecked_mut(last_byte) };
543
544 let tag_mask: u8 = 1 << (7 - rem);
552 let data_mask = !(tag_mask - 1);
553
554 *last_byte = (*last_byte & data_mask) | tag_mask;
556 }
557
558 let byte_len = self.inner.bit_len.div_ceil(8);
559 let data = &self.inner.data[..std::cmp::min(byte_len as usize, 128)];
560
561 let cell_parts = CellParts {
562 #[cfg(feature = "stats")]
563 stats,
564 bit_len: self.inner.bit_len,
565 descriptor: CellDescriptor { d1, d2 },
566 children_mask,
567 references: self.references,
568 data,
569 };
570 context.finalize_cell(cell_parts)
571 }
572
573 pub fn build(self) -> Result<Cell, Error> {
579 self.build_ext(Cell::empty_context())
580 }
581}
582
583#[derive(Clone, Copy)]
585#[repr(transparent)]
586pub struct DisplayCellBuilderData<'a>(&'a CellDataBuilder);
587
588impl std::fmt::Display for DisplayCellBuilderData<'_> {
589 #[inline]
590 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
591 std::fmt::LowerHex::fmt(self, f)
592 }
593}
594
595impl std::fmt::LowerHex for DisplayCellBuilderData<'_> {
596 #[inline]
597 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
598 std::fmt::LowerHex::fmt(&self.0.as_bitstring(), f)
599 }
600}
601
602impl std::fmt::UpperHex for DisplayCellBuilderData<'_> {
603 #[inline]
604 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
605 std::fmt::UpperHex::fmt(&self.0.as_bitstring(), f)
606 }
607}
608
609impl std::fmt::Binary for DisplayCellBuilderData<'_> {
610 #[inline]
611 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
612 std::fmt::Binary::fmt(&self.0.as_bitstring(), f)
613 }
614}
615
616#[cfg(feature = "arbitrary")]
618impl<'a> arbitrary::Arbitrary<'a> for CellBuilder {
619 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
620 match u.arbitrary::<CellType>()? {
621 CellType::Ordinary => {
622 let bit_len = u.int_in_range(0..=MAX_BIT_LEN)?;
623 let refs = u.int_in_range(0..=4)?;
624
625 let mut b = CellBuilder::new();
626 b.store_raw(u.bytes(bit_len.div_ceil(8) as _)?, bit_len)
627 .unwrap();
628
629 let mut children = ArrayVec::<Cell, 4>::new();
630 for i in 0..refs as u8 {
631 let cell = 'cell: {
632 if i > 0 {
633 if let Some(i) = u.int_in_range(0..=i)?.checked_sub(1) {
635 break 'cell children.get(i).cloned().unwrap();
636 }
637 }
638
639 u.arbitrary::<Cell>()
640 .and_then(crate::arbitrary::check_max_depth)?
641 };
642
643 b.store_reference(cell.clone()).unwrap();
644
645 unsafe { children.push(cell) };
647 }
648
649 Ok(b)
650 }
651 CellType::PrunedBranch => {
652 let level_mask = LevelMask::new(u.int_in_range(0b001..=0b111)?);
653
654 let mut b = CellBuilder::new();
655 b.set_exotic(true);
656 b.store_u16(u16::from_be_bytes([
657 CellType::PrunedBranch.to_byte(),
658 level_mask.to_byte(),
659 ]))
660 .unwrap();
661
662 let level_count = level_mask.level() as usize;
663
664 let hashes = 32 * level_count;
665 b.store_raw(u.bytes(hashes)?, hashes as u16 * 8).unwrap();
666
667 for _ in 0..level_count {
668 b.store_u16(u.int_in_range(0..=(u16::MAX - 1))?).unwrap();
669 }
670
671 Ok(b)
672 }
673 CellType::LibraryReference => {
674 let hash = u.bytes(32)?;
675
676 let mut b = CellBuilder::new();
677 b.set_exotic(true);
678 b.store_u8(CellType::LibraryReference.to_byte()).unwrap();
679 b.store_raw(hash, 256).unwrap();
680 Ok(b)
681 }
682 CellType::MerkleProof => {
683 let mut b = CellBuilder::new();
684 u.arbitrary::<crate::merkle::MerkleProof>()?
685 .store_into(&mut b, Cell::empty_context())
686 .unwrap();
687 Ok(b)
688 }
689 CellType::MerkleUpdate => {
690 let mut b = CellBuilder::new();
691 u.arbitrary::<crate::merkle::MerkleUpdate>()?
692 .store_into(&mut b, Cell::empty_context())
693 .unwrap();
694 Ok(b)
695 }
696 }
697 }
698
699 fn size_hint(_: usize) -> (usize, Option<usize>) {
700 (3, None)
701 }
702}
703
704#[derive(Clone)]
706pub struct CellDataBuilder {
707 data: [u8; 128],
708 bit_len: u16,
709}
710
711impl Default for CellDataBuilder {
712 #[inline]
713 fn default() -> Self {
714 Self::new()
715 }
716}
717
718impl Eq for CellDataBuilder {}
719impl PartialEq for CellDataBuilder {
720 #[inline]
721 fn eq(&self, other: &Self) -> bool {
722 self.bit_len == other.bit_len && self.data == other.data
723 }
724}
725
726impl Ord for CellDataBuilder {
727 fn cmp(&self, other: &Self) -> Ordering {
728 match self.bit_len.cmp(&other.bit_len) {
729 Ordering::Equal => {}
730 ord => return ord,
731 }
732
733 self.data.cmp(&other.data)
735 }
736}
737
738impl PartialOrd for CellDataBuilder {
739 #[inline]
740 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
741 Some(self.cmp(other))
742 }
743}
744
745impl std::fmt::Debug for CellDataBuilder {
746 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
747 #[repr(transparent)]
748 struct Data<'a, T>(&'a T);
749
750 impl<T: std::fmt::Display> std::fmt::Debug for Data<'_, T> {
751 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
752 std::fmt::Display::fmt(self.0, f)
753 }
754 }
755
756 f.debug_struct("CellBuilder")
757 .field("data", &Data(&self.display_data()))
758 .field("bit_len", &self.bit_len)
759 .finish()
760 }
761}
762
763macro_rules! impl_store_uint {
764 ($self:ident, $value:ident, bytes: $bytes:literal, bits: $bits:literal) => {
765 if $self.bit_len + $bits <= MAX_BIT_LEN {
766 let q = ($self.bit_len / 8) as usize;
767 let r = $self.bit_len % 8;
768 unsafe {
770 let data_ptr = $self.data.as_mut_ptr().add(q);
771 debug_assert!(q + $bytes + usize::from(r > 0) <= 128);
772 if r == 0 {
773 let value = $value.to_be_bytes();
775 std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, $bytes);
776 } else {
777 *data_ptr |= ($value >> ($bits - 8 + r)) as u8;
779 let value: [u8; $bytes] = ($value << (8 - r)).to_be_bytes();
781 std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr.add(1), $bytes);
783 }
784 };
785 $self.bit_len += $bits;
786 Ok(())
787 } else {
788 Err(Error::CellOverflow)
789 }
790 };
791}
792
793impl CellDataBuilder {
794 pub const fn new() -> Self {
796 Self {
797 data: [0; 128],
798 bit_len: 0,
799 }
800 }
801
802 pub fn from_raw_data(value: &[u8], bits: u16) -> Result<Self, Error> {
806 let mut res = Self::new();
807 ok!(res.store_raw(value, bits));
808 Ok(res)
809 }
810
811 pub fn as_data_slice(&self) -> CellSlice<'_> {
815 CellSlice::new_allow_exotic(IntermediateDataCell::wrap(self))
816 }
817
818 #[inline]
821 pub fn display_data(&self) -> DisplayCellBuilderData<'_> {
822 DisplayCellBuilderData(self)
823 }
824
825 #[inline]
827 pub fn as_bitstring(&self) -> Bitstring<'_> {
828 Bitstring {
829 bytes: &self.data,
830 bit_len: self.bit_len,
831 }
832 }
833
834 #[inline]
836 pub const fn raw_data(&self) -> &[u8; 128] {
837 &self.data
838 }
839
840 #[inline]
842 pub const fn size_bits(&self) -> u16 {
843 self.bit_len
844 }
845
846 #[inline]
848 pub const fn spare_capacity_bits(&self) -> u16 {
849 MAX_BIT_LEN - self.bit_len
850 }
851
852 #[inline]
854 pub const fn has_capacity_bits(&self, bits: u16) -> bool {
855 self.bit_len + bits <= MAX_BIT_LEN
856 }
857
858 pub fn clear_bits(&mut self) {
860 self.data = [0; 128];
861 self.bit_len = 0;
862 }
863
864 pub fn rewind_bits(&mut self, mut bits: u16) -> Result<(), Error> {
866 if bits == 0 {
867 return Ok(());
868 }
869 let Some(new_bit_len) = self.bit_len.checked_sub(bits) else {
870 return Err(Error::CellUnderflow);
871 };
872
873 let q = (new_bit_len / 8) as usize;
874 let r = new_bit_len % 8;
875
876 unsafe {
878 let mut data_ptr = self.data.as_mut_ptr().add(q);
879
880 if r != 0 {
881 let shift = 8 - r;
882 *data_ptr &= 0xff << shift;
883 bits = bits.saturating_sub(shift);
884 data_ptr = data_ptr.add(1);
885 }
886
887 std::ptr::write_bytes(data_ptr, 0, bits.div_ceil(8) as usize);
888 }
889
890 self.bit_len = new_bit_len;
891 Ok(())
892 }
893
894 pub fn store_zeros(&mut self, bits: u16) -> Result<(), Error> {
897 if self.bit_len + bits <= MAX_BIT_LEN {
898 self.bit_len += bits;
899 Ok(())
900 } else {
901 Err(Error::CellOverflow)
902 }
903 }
904
905 pub fn store_ones(&mut self, bits: u16) -> Result<(), Error> {
908 self.store_raw(crate::cell::cell_impl::ALL_ONES_CELL.data(), bits)
909 }
910
911 pub fn store_bit_zero(&mut self) -> Result<(), Error> {
914 let fits = self.bit_len < MAX_BIT_LEN;
915 self.bit_len += fits as u16;
916 if fits {
917 Ok(())
918 } else {
919 Err(Error::CellOverflow)
920 }
921 }
922
923 pub fn store_bit_one(&mut self) -> Result<(), Error> {
926 if self.bit_len < MAX_BIT_LEN {
927 let q = (self.bit_len / 8) as usize;
928 let r = self.bit_len % 8;
929 unsafe { *self.data.get_unchecked_mut(q) |= 1 << (7 - r) };
930 self.bit_len += 1;
931 Ok(())
932 } else {
933 Err(Error::CellOverflow)
934 }
935 }
936
937 pub fn store_bit(&mut self, value: bool) -> Result<(), Error> {
940 if value {
941 self.store_bit_one()
942 } else {
943 self.store_bit_zero()
944 }
945 }
946
947 pub fn store_u8(&mut self, value: u8) -> Result<(), Error> {
950 if self.bit_len + 8 <= MAX_BIT_LEN {
951 let q = (self.bit_len / 8) as usize;
952 let r = self.bit_len % 8;
953 unsafe {
954 if r == 0 {
955 debug_assert!(q < 128);
956 *self.data.get_unchecked_mut(q) = value;
958 } else {
959 debug_assert!(q + 1 < 128);
960 *self.data.get_unchecked_mut(q) |= value >> r;
962 *self.data.get_unchecked_mut(q + 1) = value << (8 - r);
963 }
964 };
965 self.bit_len += 8;
966 Ok(())
967 } else {
968 Err(Error::CellOverflow)
969 }
970 }
971
972 pub fn store_u16(&mut self, value: u16) -> Result<(), Error> {
975 impl_store_uint!(self, value, bytes: 2, bits: 16)
976 }
977
978 pub fn store_u32(&mut self, value: u32) -> Result<(), Error> {
981 impl_store_uint!(self, value, bytes: 4, bits: 32)
982 }
983
984 pub fn store_u64(&mut self, value: u64) -> Result<(), Error> {
987 impl_store_uint!(self, value, bytes: 8, bits: 64)
988 }
989
990 pub fn store_u128(&mut self, value: u128) -> Result<(), Error> {
993 impl_store_uint!(self, value, bytes: 16, bits: 128)
994 }
995
996 #[inline]
999 pub fn store_u256<T>(&mut self, value: &T) -> Result<(), Error>
1000 where
1001 T: AsRef<[u8; 32]>,
1002 {
1003 fn store_u256_impl(builder: &mut CellDataBuilder, value: &[u8; 32]) -> Result<(), Error> {
1004 if builder.bit_len + 256 <= MAX_BIT_LEN {
1005 let q = (builder.bit_len / 8) as usize;
1006 let r = builder.bit_len % 8;
1007 unsafe {
1008 let data_ptr = builder.data.as_mut_ptr().add(q);
1009 debug_assert!(q + 32 + usize::from(r > 0) <= 128);
1010 if r == 0 {
1011 std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, 32);
1013 } else {
1014 let [mut hi, mut lo]: [u128; 2] = std::mem::transmute_copy(value);
1016
1017 #[cfg(target_endian = "little")]
1019 {
1020 hi = hi.swap_bytes();
1021 lo = lo.swap_bytes();
1022 }
1023
1024 let shift = 8 - r;
1025
1026 *data_ptr |= (hi >> (128 - shift)) as u8;
1028 let hi: [u8; 16] = ((hi << shift) | (lo >> (128 - shift))).to_be_bytes();
1030 let lo: [u8; 16] = (lo << shift).to_be_bytes();
1031 std::ptr::copy_nonoverlapping(hi.as_ptr(), data_ptr.add(1), 16);
1033 std::ptr::copy_nonoverlapping(lo.as_ptr(), data_ptr.add(17), 16);
1034 }
1035 };
1036 builder.bit_len += 256;
1037 Ok(())
1038 } else {
1039 Err(Error::CellOverflow)
1040 }
1041 }
1042
1043 store_u256_impl(self, value.as_ref())
1044 }
1045
1046 pub fn store_small_uint(&mut self, mut value: u8, mut bits: u16) -> Result<(), Error> {
1051 if bits == 0 {
1052 return Ok(());
1053 }
1054
1055 if self.bit_len + bits <= MAX_BIT_LEN {
1056 bits = if let Some(offset) = bits.checked_sub(8) {
1057 self.bit_len += offset;
1058 8
1059 } else {
1060 bits
1061 };
1062
1063 value <<= 8 - bits;
1065
1066 let q = (self.bit_len / 8) as usize;
1067 let r = self.bit_len % 8;
1068 unsafe {
1069 debug_assert!(q < 128);
1070 if r == 0 {
1071 *self.data.get_unchecked_mut(q) = value;
1073 } else {
1074 *self.data.get_unchecked_mut(q) |= value >> r;
1076 if bits + r > 8 {
1077 debug_assert!(q + 1 < 128);
1078 *self.data.get_unchecked_mut(q + 1) = value << (8 - r);
1079 }
1080 }
1081 };
1082 self.bit_len += bits;
1083 Ok(())
1084 } else {
1085 Err(Error::CellOverflow)
1086 }
1087 }
1088
1089 pub fn store_uint(&mut self, mut value: u64, mut bits: u16) -> Result<(), Error> {
1094 if bits == 0 {
1095 return Ok(());
1096 }
1097
1098 if self.bit_len + bits <= MAX_BIT_LEN {
1099 bits = if let Some(offset) = bits.checked_sub(64) {
1101 self.bit_len += offset;
1102 64
1103 } else {
1104 bits
1105 };
1106
1107 value <<= 64 - bits;
1109
1110 let q = (self.bit_len / 8) as usize;
1111 let r = self.bit_len % 8;
1112 unsafe {
1114 let data_ptr = self.data.as_mut_ptr().add(q);
1115 if r == 0 {
1116 let byte_len = bits.div_ceil(8) as usize;
1117 debug_assert!(q + byte_len <= 128);
1118
1119 let value = value.to_be_bytes();
1121 std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, byte_len);
1122 } else {
1123 debug_assert!(q < 128);
1124
1125 let shift = 8 - r;
1127 *data_ptr |= (value >> (64 - shift)) as u8;
1128
1129 if let Some(bits) = bits.checked_sub(shift) {
1131 if bits > 0 {
1132 let byte_len = bits.div_ceil(8) as usize;
1133 debug_assert!(q + 1 + byte_len <= 128);
1134
1135 let value: [u8; 8] = (value << shift).to_be_bytes();
1137 std::ptr::copy_nonoverlapping(
1139 value.as_ptr(),
1140 data_ptr.add(1),
1141 byte_len,
1142 );
1143 }
1144 }
1145 }
1146 }
1147 self.bit_len += bits;
1148 Ok(())
1149 } else {
1150 Err(Error::CellOverflow)
1151 }
1152 }
1153
1154 #[cfg(feature = "bigint")]
1156 pub fn store_bigint(
1157 &mut self,
1158 x: &num_bigint::BigInt,
1159 bits: u16,
1160 signed: bool,
1161 ) -> Result<(), Error> {
1162 use crate::util::BigIntExt;
1163
1164 let int_bits = x.bitsize(signed);
1165 if int_bits > bits {
1166 return Err(Error::IntOverflow);
1167 }
1168 self.store_bigint_truncate(x, bits, signed)
1169 }
1170
1171 #[cfg(feature = "bigint")]
1174 pub fn store_bigint_truncate(
1175 &mut self,
1176 x: &num_bigint::BigInt,
1177 bits: u16,
1178 signed: bool,
1179 ) -> Result<(), Error> {
1180 use std::borrow::Cow;
1181
1182 use num_traits::ToPrimitive;
1183
1184 match x.to_u64() {
1185 Some(value) => self.store_uint(value, bits),
1186 None => {
1187 let int = if bits % 8 != 0 {
1188 let align = 8 - bits % 8;
1189 Cow::Owned(x.clone() << align)
1190 } else {
1191 Cow::Borrowed(x)
1192 };
1193
1194 let minimal_bytes = bits.div_ceil(8) as usize;
1195
1196 let (prefix, mut bytes) = if signed {
1197 let bytes = int.to_signed_bytes_le();
1198 (
1199 bytes
1200 .last()
1201 .map(|first| (first >> 7) * 255)
1202 .unwrap_or_default(),
1203 bytes,
1204 )
1205 } else {
1206 (0, int.to_bytes_le().1)
1207 };
1208 bytes.resize(minimal_bytes, prefix);
1209 bytes.reverse();
1210
1211 self.store_raw(&bytes, bits)
1212 }
1213 }
1214 }
1215
1216 #[cfg(feature = "bigint")]
1218 pub fn store_biguint(
1219 &mut self,
1220 x: &num_bigint::BigUint,
1221 bits: u16,
1222 signed: bool,
1223 ) -> Result<(), Error> {
1224 use crate::util::BigIntExt;
1225
1226 let int_bits = x.bitsize(signed);
1227 if int_bits > bits {
1228 return Err(Error::IntOverflow);
1229 }
1230 self.store_biguint_truncate(x, bits)
1231 }
1232
1233 #[cfg(feature = "bigint")]
1236 pub fn store_biguint_truncate(
1237 &mut self,
1238 x: &num_bigint::BigUint,
1239 bits: u16,
1240 ) -> Result<(), Error> {
1241 use std::borrow::Cow;
1242
1243 use num_traits::ToPrimitive;
1244
1245 match x.to_u64() {
1246 Some(value) => self.store_uint(value, bits),
1247 None => {
1248 let int = if bits % 8 != 0 {
1249 let align = 8 - bits % 8;
1250 Cow::Owned(x.clone() << align)
1251 } else {
1252 Cow::Borrowed(x)
1253 };
1254
1255 let minimal_bytes = bits.div_ceil(8) as usize;
1256
1257 let mut bytes = int.to_bytes_le();
1258 bytes.resize(minimal_bytes, 0);
1259 bytes.reverse();
1260
1261 self.store_raw(&bytes, bits)
1262 }
1263 }
1264 }
1265
1266 #[cfg(feature = "bigint")]
1268 pub fn store_var_bigint(
1269 &mut self,
1270 x: &num_bigint::BigInt,
1271 len_bits: u16,
1272 signed: bool,
1273 ) -> Result<(), Error> {
1274 use crate::util::BigIntExt;
1275
1276 let bits = x.bitsize(signed);
1277 let bytes = bits.div_ceil(8);
1278 if bytes >= (1 << len_bits) {
1279 return Err(Error::IntOverflow);
1280 }
1281
1282 ok!(self.store_small_uint(bytes as u8, len_bits));
1283 self.store_bigint_truncate(x, bytes * 8, signed)
1284 }
1285
1286 #[cfg(feature = "bigint")]
1288 pub fn store_var_biguint(
1289 &mut self,
1290 x: &num_bigint::BigUint,
1291 len_bits: u16,
1292 ) -> Result<(), Error> {
1293 let bits = x.bits() as u16;
1294 let bytes = bits.div_ceil(8);
1295 if bytes >= (1 << len_bits) {
1296 return Err(Error::IntOverflow);
1297 }
1298
1299 ok!(self.store_small_uint(bytes as u8, len_bits));
1300 self.store_biguint_truncate(x, bytes * 8)
1301 }
1302
1303 pub fn store_raw(&mut self, value: &[u8], bits: u16) -> Result<(), Error> {
1308 store_raw(&mut self.data, &mut self.bit_len, value, bits)
1309 }
1310
1311 #[inline]
1314 pub fn store_cell_data<T>(&mut self, value: T) -> Result<(), Error>
1315 where
1316 T: AsRef<DynCell>,
1317 {
1318 fn store_cell_data_impl(
1319 builder: &mut CellDataBuilder,
1320 value: &DynCell,
1321 ) -> Result<(), Error> {
1322 store_raw(
1323 &mut builder.data,
1324 &mut builder.bit_len,
1325 value.data(),
1326 value.bit_len(),
1327 )
1328 }
1329 store_cell_data_impl(self, value.as_ref())
1330 }
1331
1332 #[inline]
1335 pub fn store_slice_data<'a, T>(&mut self, value: T) -> Result<(), Error>
1336 where
1337 T: AsRef<CellSlice<'a>>,
1338 {
1339 fn store_slice_data_impl(
1340 builder: &mut CellDataBuilder,
1341 value: &CellSlice<'_>,
1342 ) -> Result<(), Error> {
1343 let bits = value.size_bits();
1344 if builder.bit_len + bits <= MAX_BIT_LEN {
1345 let mut slice_data =
1347 unsafe { MaybeUninit::<[MaybeUninit<u8>; 128]>::uninit().assume_init() };
1348
1349 let slice_data = unsafe {
1354 &mut *(&mut slice_data as *mut [MaybeUninit<u8>; 128] as *mut [u8; 128])
1355 };
1356
1357 let slice_data = ok!(value.get_raw(0, slice_data, bits));
1358 builder.store_raw(slice_data, bits)
1359 } else {
1360 Err(Error::CellOverflow)
1361 }
1362 }
1363 store_slice_data_impl(self, value.as_ref())
1364 }
1365
1366 pub fn prepend_raw(&mut self, value: &[u8], bits: u16) -> Result<(), Error> {
1371 if bits == 0 {
1372 return Ok(());
1373 }
1374
1375 fn store_raw_impl(
1377 data: &mut [u8; 128],
1378 bit_len: &mut u16,
1379 value: &[u8],
1380 bits: u16,
1381 ) -> Result<(), Error> {
1382 store_raw(data, bit_len, value, bits)
1383 }
1384
1385 if self.bit_len + bits <= MAX_BIT_LEN {
1386 let mut data = [0; 128];
1387 let mut bit_len = 0;
1388 ok!(store_raw_impl(&mut data, &mut bit_len, value, bits));
1389 ok!(store_raw_impl(
1390 &mut data,
1391 &mut bit_len,
1392 &self.data,
1393 self.bit_len
1394 ));
1395 self.data = data;
1396 self.bit_len = bit_len;
1397 Ok(())
1398 } else {
1399 Err(Error::CellOverflow)
1400 }
1401 }
1402}
1403
1404#[inline]
1405fn store_raw(
1406 data: &mut [u8; 128],
1407 bit_len: &mut u16,
1408 value: &[u8],
1409 mut bits: u16,
1410) -> Result<(), Error> {
1411 if *bit_len + bits <= MAX_BIT_LEN {
1412 let max_bit_len = value.len().saturating_mul(8) as u16;
1413 bits = if let Some(offset) = bits.checked_sub(max_bit_len) {
1414 *bit_len += offset;
1415 max_bit_len
1416 } else {
1417 bits
1418 };
1419
1420 if bits == 0 {
1422 return Ok(());
1423 }
1424
1425 let q = (*bit_len / 8) as usize;
1426 let r = *bit_len % 8;
1427 unsafe {
1429 let mut data_ptr = data.as_mut_ptr().add(q);
1430 let mut value_ptr = value.as_ptr();
1431
1432 if r == 0 {
1433 let byte_len = bits.div_ceil(8) as usize;
1434 debug_assert!(q + byte_len <= 128);
1435 debug_assert!(byte_len <= value.len());
1436
1437 std::ptr::copy_nonoverlapping(value_ptr, data_ptr, byte_len);
1438
1439 let bits_r = bits % 8;
1440 if bits_r != 0 {
1441 *data_ptr.add(byte_len - 1) &= 0xff << (8 - bits_r);
1442 }
1443 } else {
1444 let byte_len = (bits + r).div_ceil(8) as usize - 1;
1445 let value_len = bits.div_ceil(8) as usize;
1446 debug_assert!(q + byte_len <= 128);
1447 debug_assert!(byte_len <= value_len && value_len <= value.len());
1448
1449 let shift = 8 - r;
1450 for _ in 0..byte_len {
1451 *data_ptr |= *value_ptr >> r;
1452 data_ptr = data_ptr.add(1);
1453 *data_ptr = *value_ptr << shift;
1454 value_ptr = value_ptr.add(1);
1455 }
1456 if byte_len < value_len {
1457 *data_ptr |= *value_ptr >> r;
1458 }
1459
1460 let bits_r = (r + bits) % 8;
1461 if bits_r != 0 {
1462 *data_ptr &= 0xff << (8 - bits_r);
1463 }
1464 }
1465 }
1466 *bit_len += bits;
1467 Ok(())
1468 } else {
1469 Err(Error::CellOverflow)
1470 }
1471}
1472
1473#[derive(Default)]
1477#[repr(transparent)]
1478pub struct CellRefsBuilder(ArrayVec<Cell, MAX_REF_COUNT>);
1479
1480impl CellRefsBuilder {
1481 pub fn store_reference(&mut self, cell: Cell) -> Result<(), Error> {
1484 if self.0.len() < MAX_REF_COUNT {
1485 unsafe { self.0.push(cell) }
1487 Ok(())
1488 } else {
1489 Err(Error::CellOverflow)
1490 }
1491 }
1492
1493 pub fn compute_level_mask(&self) -> LevelMask {
1495 let mut result = LevelMask::EMPTY;
1496 for child in self.0.as_ref() {
1497 result |= child.as_ref().level_mask();
1498 }
1499 result
1500 }
1501
1502 #[inline]
1504 pub fn len(&self) -> usize {
1505 self.0.len()
1506 }
1507
1508 #[inline]
1510 pub const fn is_empty(&self) -> bool {
1511 self.0.is_empty()
1512 }
1513
1514 pub fn iter(&self) -> std::slice::Iter<'_, Cell> {
1516 self.0.as_ref().iter()
1517 }
1518}
1519
1520impl IntoIterator for CellRefsBuilder {
1521 type Item = Cell;
1522 type IntoIter = ArrayVecIntoIter<Cell, MAX_REF_COUNT>;
1523
1524 #[inline]
1525 fn into_iter(self) -> Self::IntoIter {
1526 self.0.into_iter()
1527 }
1528}
1529
1530impl<'a> IntoIterator for &'a CellRefsBuilder {
1531 type Item = &'a Cell;
1532 type IntoIter = std::slice::Iter<'a, Cell>;
1533
1534 #[inline]
1535 fn into_iter(self) -> Self::IntoIter {
1536 self.0.as_ref().iter()
1537 }
1538}
1539
1540#[repr(transparent)]
1541struct IntermediateDataCell(CellDataBuilder);
1542
1543impl IntermediateDataCell {
1544 #[inline(always)]
1545 const fn wrap(value: &CellDataBuilder) -> &Self {
1546 unsafe { &*(value as *const CellDataBuilder as *const Self) }
1548 }
1549}
1550
1551impl CellImpl for IntermediateDataCell {
1552 fn untrack(self: CellInner<Self>) -> Cell {
1553 Cell(self)
1554 }
1555
1556 fn descriptor(&self) -> CellDescriptor {
1557 CellDescriptor {
1558 d1: 0,
1559 d2: CellDescriptor::compute_d2(self.0.bit_len),
1560 }
1561 }
1562
1563 fn data(&self) -> &[u8] {
1564 self.0.raw_data()
1565 }
1566
1567 fn bit_len(&self) -> u16 {
1568 self.0.bit_len
1569 }
1570
1571 fn reference(&self, _: u8) -> Option<&DynCell> {
1572 None
1573 }
1574
1575 fn reference_cloned(&self, _: u8) -> Option<Cell> {
1576 None
1577 }
1578
1579 fn virtualize(&self) -> &DynCell {
1580 self
1581 }
1582
1583 fn hash(&self, _: u8) -> &HashBytes {
1584 panic!("Hash for an intermediate data cell is not defined");
1585 }
1586
1587 fn depth(&self, _: u8) -> u16 {
1588 0
1589 }
1590
1591 fn take_first_child(&mut self) -> Option<Cell> {
1592 None
1593 }
1594
1595 fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell> {
1596 Err(parent)
1597 }
1598
1599 fn take_next_child(&mut self) -> Option<Cell> {
1600 None
1601 }
1602
1603 #[cfg(feature = "stats")]
1604 fn stats(&self) -> CellTreeStats {
1605 CellTreeStats {
1606 bit_count: self.0.bit_len as u64,
1607 cell_count: 1,
1608 }
1609 }
1610}
1611
1612#[repr(transparent)]
1613struct IntermediateFullCell(CellBuilder);
1614
1615impl IntermediateFullCell {
1616 #[inline(always)]
1617 const fn wrap(value: &CellBuilder) -> &Self {
1618 unsafe { &*(value as *const CellBuilder as *const Self) }
1620 }
1621}
1622
1623impl CellImpl for IntermediateFullCell {
1624 fn untrack(self: CellInner<Self>) -> Cell {
1625 Cell(self)
1626 }
1627
1628 fn descriptor(&self) -> CellDescriptor {
1629 CellDescriptor {
1630 d1: CellDescriptor::compute_d1(LevelMask::EMPTY, false, self.0.references.len() as u8),
1631 d2: CellDescriptor::compute_d2(self.0.bit_len),
1632 }
1633 }
1634
1635 fn data(&self) -> &[u8] {
1636 self.0.raw_data()
1637 }
1638
1639 fn bit_len(&self) -> u16 {
1640 self.0.bit_len
1641 }
1642
1643 fn reference(&self, index: u8) -> Option<&DynCell> {
1644 match self.0.references.get(index) {
1645 Some(cell) => Some(cell.as_ref()),
1646 None => None,
1647 }
1648 }
1649
1650 fn reference_cloned(&self, index: u8) -> Option<Cell> {
1651 self.0.references.get(index).cloned()
1652 }
1653
1654 fn virtualize(&self) -> &DynCell {
1655 self
1656 }
1657
1658 fn hash(&self, _: u8) -> &HashBytes {
1659 panic!("Hash for an intermediate data cell is not defined");
1660 }
1661
1662 fn depth(&self, _: u8) -> u16 {
1663 0
1664 }
1665
1666 fn take_first_child(&mut self) -> Option<Cell> {
1667 None
1668 }
1669
1670 fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell> {
1671 Err(parent)
1672 }
1673
1674 fn take_next_child(&mut self) -> Option<Cell> {
1675 None
1676 }
1677
1678 #[cfg(feature = "stats")]
1679 fn stats(&self) -> CellTreeStats {
1680 CellTreeStats {
1681 bit_count: self.0.bit_len as u64,
1682 cell_count: 1 + self.0.references.len() as u64,
1683 }
1684 }
1685}
1686
1687#[cfg(test)]
1688mod tests {
1689 use super::*;
1690
1691 #[test]
1692 fn clone_builder() {
1693 let mut builder = CellBuilder::new();
1694 builder.store_u32(0xdeafbeaf).unwrap();
1695 let cell1 = builder.clone().build().unwrap();
1696 let cell2 = builder.clone().build().unwrap();
1697 assert_eq!(cell1.as_ref(), cell2.as_ref());
1698
1699 builder.store_u32(0xb00b5).unwrap();
1700 let cell3 = builder.build().unwrap();
1701 assert_ne!(cell1.as_ref(), cell3.as_ref());
1702 }
1703
1704 #[test]
1705 fn compare_builders() {
1706 let mut a = CellBuilder::new();
1707 a.store_u32(0xdeafbeaf).unwrap();
1708
1709 let mut b = CellBuilder::new();
1710 b.store_u32(0xdeafbeaf).unwrap();
1711
1712 assert_eq!(a, b);
1713
1714 b.store_u8(1).unwrap();
1715 assert!(a < b);
1716 a.store_u8(2).unwrap();
1717 assert!(a > b);
1718
1719 a.rewind_bits(8).unwrap();
1720 a.store_u8(1).unwrap();
1721 assert_eq!(a, b);
1722
1723 let child_a = a.clone().build().unwrap();
1724 a.store_reference(child_a.clone()).unwrap();
1725 assert!(a > b);
1726
1727 let child_b = b.clone().build().unwrap();
1728 b.store_reference(child_b).unwrap();
1729 assert_eq!(a, b);
1730
1731 let child_b2 = b.clone().build().unwrap();
1732 a.store_reference(child_a).unwrap();
1733 b.store_reference(child_b2).unwrap();
1734 assert_ne!(a, b);
1735 }
1736
1737 #[test]
1738 fn rewind_builder() {
1739 let mut builder = CellBuilder::new();
1740 builder.store_u32(0xdeafbeaf).unwrap();
1741 assert_eq!(builder.size_bits(), 32);
1742 assert_eq!(builder.data[..4], 0xdeafbeaf_u32.to_be_bytes());
1743
1744 builder.rewind_bits(5).unwrap();
1745 assert_eq!(builder.size_bits(), 27);
1746 assert_eq!(builder.data[..4], 0xdeafbea0_u32.to_be_bytes());
1747
1748 builder.store_u32(0xdeafbeaf).unwrap();
1749 assert_eq!(builder.size_bits(), 32 + 27);
1750 assert_eq!(builder.data[..8], [
1751 0xde, 0xaf, 0xbe, 0xbb, 0xd5, 0xf7, 0xd5, 0xe0
1752 ]);
1753 builder.rewind_bits(32).unwrap();
1754 assert_eq!(builder.data[..8], [
1755 0xde, 0xaf, 0xbe, 0xa0, 0x00, 0x00, 0x00, 0x00
1756 ]);
1757
1758 assert_eq!(builder.rewind_bits(32), Err(Error::CellUnderflow));
1759
1760 builder.rewind_bits(27).unwrap();
1761 assert_eq!(builder.size_bits(), 0);
1762 assert_eq!(builder.data, [0u8; 128]);
1763
1764 builder.store_raw(&[0xff; 128], MAX_BIT_LEN).unwrap();
1765 assert_eq!(builder.size_bits(), MAX_BIT_LEN);
1766
1767 let mut target = [0xff; 128];
1768 target[127] = 0xfe;
1769 assert_eq!(builder.data, target);
1770
1771 builder.rewind_bits(3).unwrap();
1772 assert_eq!(builder.size_bits(), MAX_BIT_LEN - 3);
1773 target[127] = 0xf0;
1774 assert_eq!(builder.data, target);
1775
1776 builder.rewind_bits(8).unwrap();
1777 assert_eq!(builder.size_bits(), MAX_BIT_LEN - 3 - 8);
1778 target[126] = 0xf0;
1779 target[127] = 0x00;
1780 assert_eq!(builder.data, target);
1781 }
1782
1783 #[test]
1784 #[cfg_attr(miri, ignore)] fn store_raw() {
1786 const ONES: &[u8] = &[0xff; 128];
1787 for offset in 0..8 {
1788 for bits in 0..=1016 {
1789 let mut builder = CellBuilder::new();
1790 builder.store_zeros(offset).unwrap();
1791 builder.store_raw(ONES, bits).unwrap();
1792 builder.build().unwrap();
1793 }
1794 }
1795 }
1796
1797 #[test]
1798 fn prepend_raw() {
1799 let mut builder = CellBuilder::new();
1800 builder.store_raw(&[0xde, 0xaf, 0xbe, 0xaf], 20).unwrap();
1801 builder.prepend_raw(&[0xaa, 0x55], 5).unwrap();
1802 let cell = builder.build().unwrap();
1803 println!("{}", cell.display_tree());
1804 }
1805
1806 #[test]
1807 fn store_slice() -> anyhow::Result<()> {
1808 const SOME_HASH: &HashBytes = HashBytes::wrap(&[
1809 0xdf, 0x86, 0xce, 0xbc, 0xe8, 0xd5, 0xab, 0x0c, 0x69, 0xb4, 0xce, 0x33, 0xfe, 0x9b,
1810 0x0e, 0x2c, 0xdf, 0x69, 0xa3, 0xe1, 0x13, 0x7e, 0x64, 0x85, 0x6b, 0xbc, 0xfd, 0x39,
1811 0xe7, 0x9b, 0xc1, 0x6f,
1812 ]);
1813
1814 let mut builder = CellBuilder::new();
1815 builder.store_zeros(3)?;
1816 builder.store_u256(SOME_HASH)?;
1817 let cell = builder.build()?;
1818 println!("{}", cell.display_tree());
1819
1820 let mut builder = CellBuilder::new();
1821 let mut slice = cell.as_slice()?;
1822 slice.skip_first(3, 0)?;
1823 builder.store_slice(slice)?;
1824 let cell = builder.build()?;
1825 println!("{}", cell.display_tree());
1826 assert_eq!(cell.data(), SOME_HASH);
1827
1828 Ok(())
1829 }
1830}