1use std::cmp::Ordering;
2use std::mem::MaybeUninit;
3use std::rc::Rc;
4use std::sync::Arc;
5
6use super::CellFamily;
7use crate::cell::cell_context::{CellContext, CellParts};
8use crate::cell::{
9 Cell, CellDescriptor, CellImpl, CellInner, CellSlice, CellType, DynCell, HashBytes, LevelMask,
10 MAX_BIT_LEN, MAX_REF_COUNT, Size,
11};
12use crate::error::Error;
13use crate::util::{ArrayVec, ArrayVecIntoIter, Bitstring};
14
15pub trait Store {
17 fn store_into(&self, builder: &mut CellBuilder, context: &dyn CellContext)
19 -> Result<(), Error>;
20}
21
22impl<T: Store + ?Sized> Store for &T {
23 #[inline]
24 fn store_into(
25 &self,
26 builder: &mut CellBuilder,
27 context: &dyn CellContext,
28 ) -> Result<(), Error> {
29 <T as Store>::store_into(self, builder, context)
30 }
31}
32
33impl<T: Store + ?Sized> Store for &mut T {
34 #[inline]
35 fn store_into(
36 &self,
37 builder: &mut CellBuilder,
38 context: &dyn CellContext,
39 ) -> Result<(), Error> {
40 <T as Store>::store_into(self, builder, context)
41 }
42}
43
44impl<T: Store + ?Sized> Store for Box<T> {
45 #[inline]
46 fn store_into(
47 &self,
48 builder: &mut CellBuilder,
49 context: &dyn CellContext,
50 ) -> Result<(), Error> {
51 <T as Store>::store_into(self.as_ref(), builder, context)
52 }
53}
54
55impl<T: Store + ?Sized> Store for Arc<T> {
56 #[inline]
57 fn store_into(
58 &self,
59 builder: &mut CellBuilder,
60 context: &dyn CellContext,
61 ) -> Result<(), Error> {
62 <T as Store>::store_into(self.as_ref(), builder, context)
63 }
64}
65
66impl<T: Store + ?Sized> Store for Rc<T> {
67 #[inline]
68 fn store_into(
69 &self,
70 builder: &mut CellBuilder,
71 context: &dyn CellContext,
72 ) -> Result<(), Error> {
73 <T as Store>::store_into(self.as_ref(), builder, context)
74 }
75}
76
77impl Store for () {
78 #[inline]
79 fn store_into(&self, _: &mut CellBuilder, _: &dyn CellContext) -> Result<(), Error> {
80 Ok(())
81 }
82}
83
84macro_rules! impl_store_for_tuples {
85 ($( ($($field:ident: $t:ident),+) ),*$(,)?) => {$(
86 impl<$($t: Store),+> Store for ($($t),*) {
87 fn store_into(
88 &self,
89 builder: &mut CellBuilder,
90 context: &dyn CellContext
91 ) -> Result<(), Error> {
92 let ($($field),+) = self;
93 $(ok!($field.store_into(builder, context)));*;
94 Ok(())
95 }
96 }
97 )*};
98}
99
100impl_store_for_tuples! {
101 (t1: T1, t2: T2),
102 (t1: T1, t2: T2, t3: T3),
103 (t1: T1, t2: T2, t3: T3, t4: T4),
104 (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5),
105 (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5, t6: T6),
106}
107
108impl<T: Store> Store for Option<T> {
109 #[inline]
110 fn store_into(
111 &self,
112 builder: &mut CellBuilder,
113 context: &dyn CellContext,
114 ) -> Result<(), Error> {
115 match self {
116 Some(data) => {
117 ok!(builder.store_bit_one());
118 data.store_into(builder, context)
119 }
120 None => builder.store_bit_zero(),
121 }
122 }
123}
124
125impl Store for CellBuilder {
126 #[inline]
127 fn store_into(&self, builder: &mut CellBuilder, _: &dyn CellContext) -> Result<(), Error> {
128 builder.store_builder(self)
129 }
130}
131
132impl Store for CellSlice<'_> {
133 #[inline]
134 fn store_into(&self, builder: &mut CellBuilder, _: &dyn CellContext) -> Result<(), Error> {
135 builder.store_slice(self)
136 }
137}
138
139impl Store for Cell {
140 #[inline]
141 fn store_into(&self, builder: &mut CellBuilder, _: &dyn CellContext) -> Result<(), Error> {
142 builder.store_reference(self.clone())
143 }
144}
145
146macro_rules! impl_primitive_store {
147 ($($type:ty => |$b:ident, $v:ident| $expr:expr),*$(,)?) => {
148 $(impl Store for $type {
149 #[inline]
150 fn store_into(&self,
151 $b: &mut CellBuilder,
152 _: &dyn CellContext
153 ) -> Result<(), Error> {
154 let $v = self;
155 $expr
156 }
157 })*
158 };
159}
160
161impl_primitive_store! {
162 bool => |b, v| b.store_bit(*v),
163 u8 => |b, v| b.store_u8(*v),
164 i8 => |b, v| b.store_u8(*v as u8),
165 u16 => |b, v| b.store_u16(*v),
166 i16 => |b, v| b.store_u16(*v as u16),
167 u32 => |b, v| b.store_u32(*v),
168 i32 => |b, v| b.store_u32(*v as u32),
169 u64 => |b, v| b.store_u64(*v),
170 i64 => |b, v| b.store_u64(*v as u64),
171 u128 => |b, v| b.store_u128(*v),
172 i128 => |b, v| b.store_u128(*v as u128),
173 HashBytes => |b, v| b.store_u256(v),
174}
175
176#[derive(Default, Clone)]
178pub struct CellBuilder {
179 inner: CellDataBuilder,
180 is_exotic: bool,
181 references: ArrayVec<Cell, MAX_REF_COUNT>,
182}
183
184impl Eq for CellBuilder {}
185
186impl PartialEq for CellBuilder {
187 #[inline]
188 fn eq(&self, other: &Self) -> bool {
189 self.is_exotic == other.is_exotic
190 && self.inner == other.inner
191 && self.references.as_ref() == other.references.as_ref()
192 }
193}
194
195impl Ord for CellBuilder {
196 fn cmp(&self, other: &Self) -> Ordering {
197 match (self.inner.bit_len, self.references.len())
198 .cmp(&(other.inner.bit_len, other.references.len()))
199 {
200 Ordering::Equal => {}
201 ord => return ord,
202 }
203
204 match self.inner.data.cmp(&other.inner.data) {
206 Ordering::Equal => {}
207 ord => return ord,
208 }
209
210 for (a, b) in self.references().iter().zip(other.references().iter()) {
211 match a.repr_hash().cmp(b.repr_hash()) {
212 Ordering::Equal => {}
213 ord => return ord,
214 }
215 }
216
217 Ordering::Equal
218 }
219}
220
221impl PartialOrd for CellBuilder {
222 #[inline]
223 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
224 Some(self.cmp(other))
225 }
226}
227
228impl std::fmt::Debug for CellBuilder {
229 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
230 #[repr(transparent)]
231 struct Data<'a, T>(&'a T);
232
233 impl<T: std::fmt::Display> std::fmt::Debug for Data<'_, T> {
234 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
235 std::fmt::Display::fmt(self.0, f)
236 }
237 }
238
239 f.debug_struct("CellBuilder")
240 .field("data", &Data(&self.inner.display_data()))
241 .field("bit_len", &self.inner.bit_len)
242 .field("is_exotic", &self.is_exotic)
243 .field("references", &self.references.as_ref())
244 .finish()
245 }
246}
247
248impl std::ops::Deref for CellBuilder {
249 type Target = CellDataBuilder;
250
251 #[inline]
252 fn deref(&self) -> &Self::Target {
253 &self.inner
254 }
255}
256
257impl std::ops::DerefMut for CellBuilder {
258 #[inline]
259 fn deref_mut(&mut self) -> &mut Self::Target {
260 &mut self.inner
261 }
262}
263
264impl AsRef<CellDataBuilder> for CellBuilder {
265 #[inline]
266 fn as_ref(&self) -> &CellDataBuilder {
267 &self.inner
268 }
269}
270
271impl AsMut<CellDataBuilder> for CellBuilder {
272 #[inline]
273 fn as_mut(&mut self) -> &mut CellDataBuilder {
274 &mut self.inner
275 }
276}
277
278impl From<CellDataBuilder> for CellBuilder {
279 #[inline]
280 fn from(inner: CellDataBuilder) -> Self {
281 Self {
282 inner,
283 is_exotic: false,
284 references: ArrayVec::new(),
285 }
286 }
287}
288
289impl CellBuilder {
290 #[inline]
292 pub fn build_from<T>(data: T) -> Result<Cell, Error>
293 where
294 T: Store,
295 {
296 Self::build_from_ext(data, Cell::empty_context())
297 }
298
299 #[inline]
301 pub fn build_from_ext<T>(data: T, context: &dyn CellContext) -> Result<Cell, Error>
302 where
303 T: Store,
304 {
305 fn build_from_ext_impl(data: &dyn Store, context: &dyn CellContext) -> Result<Cell, Error> {
306 let mut builder = CellBuilder::new();
307 ok!(data.store_into(&mut builder, context));
308 builder.build_ext(context)
309 }
310 build_from_ext_impl(&data, context)
311 }
312
313 #[inline]
317 pub fn build_library<T: AsRef<[u8; 32]>>(hash: &T) -> Cell {
318 Self::build_library_ext(hash, Cell::empty_context())
319 }
320
321 #[inline]
325 pub fn build_library_ext<T: AsRef<[u8; 32]>>(hash: &T, context: &dyn CellContext) -> Cell {
326 fn build_library_ext_impl(
327 hash: &[u8; 32],
328 context: &dyn CellContext,
329 ) -> Result<Cell, Error> {
330 let mut b = CellBuilder::new();
331 b.set_exotic(true);
332 ok!(b.store_u8(CellType::LibraryReference.to_byte()));
333 ok!(b.store_u256(HashBytes::wrap(hash)));
334 b.build_ext(context)
335 }
336 build_library_ext_impl(hash.as_ref(), context).unwrap()
337 }
338
339 pub const fn new() -> Self {
341 Self {
342 inner: CellDataBuilder::new(),
343 is_exotic: false,
344 references: ArrayVec::new(),
345 }
346 }
347
348 pub fn from_raw_data(value: &[u8], bits: u16) -> Result<Self, Error> {
352 Ok(Self {
353 inner: ok!(CellDataBuilder::from_raw_data(value, bits)),
354 is_exotic: false,
355 references: ArrayVec::new(),
356 })
357 }
358
359 #[inline]
361 pub fn from_parts(is_exotic: bool, data: CellDataBuilder, refs: CellRefsBuilder) -> Self {
362 Self {
363 inner: data,
364 is_exotic,
365 references: refs.0,
366 }
367 }
368
369 #[inline]
371 pub fn into_parts(self) -> (bool, CellDataBuilder, CellRefsBuilder) {
372 (self.is_exotic, self.inner, CellRefsBuilder(self.references))
373 }
374
375 pub fn as_full_slice(&self) -> CellSlice<'_> {
379 CellSlice::new_allow_exotic(IntermediateFullCell::wrap(self))
380 }
381
382 pub const fn size(&self) -> Size {
384 Size {
385 bits: self.inner.bit_len,
386 refs: self.references.len() as u8,
387 }
388 }
389
390 #[inline(always)]
392 pub const fn size_refs(&self) -> u8 {
393 self.references.len() as u8
394 }
395
396 pub const fn spare_capacity(&self) -> Size {
398 Size {
399 bits: self.inner.spare_capacity_bits(),
400 refs: self.spare_capacity_refs(),
401 }
402 }
403
404 #[inline]
406 pub const fn spare_capacity_refs(&self) -> u8 {
407 (MAX_REF_COUNT - self.references.len()) as u8
408 }
409
410 #[inline]
412 pub const fn has_capacity(&self, bits: u16, refs: u8) -> bool {
413 self.inner.bit_len + bits <= MAX_BIT_LEN
414 && self.references.len() + refs as usize <= MAX_REF_COUNT
415 }
416
417 #[inline]
419 pub const fn is_exotic(&self) -> bool {
420 self.is_exotic
421 }
422
423 #[inline]
425 pub fn set_exotic(&mut self, is_exotic: bool) {
426 self.is_exotic = is_exotic;
427 }
428
429 #[inline]
431 pub fn references(&self) -> &[Cell] {
432 self.references.as_ref()
433 }
434
435 pub fn store_reference(&mut self, cell: Cell) -> Result<(), Error> {
438 if self.references.len() < MAX_REF_COUNT {
439 unsafe { self.references.push(cell) }
441 Ok(())
442 } else {
443 Err(Error::CellOverflow)
444 }
445 }
446
447 pub fn set_references(&mut self, refs: CellRefsBuilder) {
449 self.references = refs.0;
450 }
451
452 pub fn store_builder(&mut self, builder: &Self) -> Result<(), Error> {
455 if self.inner.bit_len + builder.inner.bit_len <= MAX_BIT_LEN
456 && self.references.len() + builder.references.len() <= MAX_REF_COUNT
457 {
458 ok!(self.store_raw(&builder.inner.data, builder.inner.bit_len));
459 for cell in builder.references.as_ref() {
460 ok!(self.store_reference(cell.clone()));
461 }
462 Ok(())
463 } else {
464 Err(Error::CellOverflow)
465 }
466 }
467
468 #[inline]
471 pub fn store_slice<'a, T>(&mut self, value: T) -> Result<(), Error>
472 where
473 T: AsRef<CellSlice<'a>>,
474 {
475 fn store_slice_impl(builder: &mut CellBuilder, value: &CellSlice<'_>) -> Result<(), Error> {
476 if builder.inner.bit_len + value.size_bits() <= MAX_BIT_LEN
477 && builder.references.len() + value.size_refs() as usize <= MAX_REF_COUNT
478 {
479 ok!(builder.store_slice_data(value));
480 for cell in value.references().cloned() {
481 ok!(builder.store_reference(cell));
482 }
483 Ok(())
484 } else {
485 Err(Error::CellOverflow)
486 }
487 }
488 store_slice_impl(self, value.as_ref())
489 }
490
491 pub fn build_ext(mut self, context: &dyn CellContext) -> Result<Cell, Error> {
493 debug_assert!(self.inner.bit_len <= MAX_BIT_LEN);
494 debug_assert!(self.references.len() <= MAX_REF_COUNT);
495
496 let mut children_mask = LevelMask::EMPTY;
497 for child in self.references.as_ref() {
498 let child = child.as_ref();
499 children_mask |= child.descriptor().level_mask();
500 }
501
502 let is_exotic = self.is_exotic;
503
504 let level_mask = 'mask: {
505 if is_exotic
507 && self.inner.bit_len >= 8
508 && let Some(ty) = CellType::from_byte_exotic(self.data[0])
509 {
510 match ty {
511 CellType::PrunedBranch => break 'mask LevelMask::new(self.data[1]),
512 CellType::MerkleProof | CellType::MerkleUpdate => {
513 break 'mask children_mask.virtualize(1);
514 }
515 CellType::LibraryReference => break 'mask LevelMask::EMPTY,
516 _ => {}
517 };
518 }
519
520 children_mask
521 };
522
523 let d1 = CellDescriptor::compute_d1(level_mask, is_exotic, self.references.len() as u8);
524 let d2 = CellDescriptor::compute_d2(self.inner.bit_len);
525
526 let rem = self.inner.bit_len % 8;
527 let last_byte = (self.inner.bit_len / 8) as usize;
528 if rem > 0 {
529 let last_byte = unsafe { self.inner.data.get_unchecked_mut(last_byte) };
531
532 let tag_mask: u8 = 1 << (7 - rem);
540 let data_mask = !(tag_mask - 1);
541
542 *last_byte = (*last_byte & data_mask) | tag_mask;
544 }
545
546 let byte_len = self.inner.bit_len.div_ceil(8);
547 let data = &self.inner.data[..std::cmp::min(byte_len as usize, 128)];
548
549 let cell_parts = CellParts {
550 bit_len: self.inner.bit_len,
551 descriptor: CellDescriptor { d1, d2 },
552 children_mask,
553 references: self.references,
554 data,
555 };
556 context.finalize_cell(cell_parts)
557 }
558
559 pub fn build(self) -> Result<Cell, Error> {
565 self.build_ext(Cell::empty_context())
566 }
567}
568
569#[derive(Clone, Copy)]
571#[repr(transparent)]
572pub struct DisplayCellBuilderData<'a>(&'a CellDataBuilder);
573
574impl std::fmt::Display for DisplayCellBuilderData<'_> {
575 #[inline]
576 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
577 std::fmt::LowerHex::fmt(self, f)
578 }
579}
580
581impl std::fmt::LowerHex for DisplayCellBuilderData<'_> {
582 #[inline]
583 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
584 std::fmt::LowerHex::fmt(&self.0.as_bitstring(), f)
585 }
586}
587
588impl std::fmt::UpperHex for DisplayCellBuilderData<'_> {
589 #[inline]
590 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
591 std::fmt::UpperHex::fmt(&self.0.as_bitstring(), f)
592 }
593}
594
595impl std::fmt::Binary for DisplayCellBuilderData<'_> {
596 #[inline]
597 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
598 std::fmt::Binary::fmt(&self.0.as_bitstring(), f)
599 }
600}
601
602#[cfg(feature = "arbitrary")]
604impl<'a> arbitrary::Arbitrary<'a> for CellBuilder {
605 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
606 match u.arbitrary::<CellType>()? {
607 CellType::Ordinary => {
608 let bit_len = u.int_in_range(0..=MAX_BIT_LEN)?;
609 let refs = u.int_in_range(0..=4)?;
610
611 let mut b = CellBuilder::new();
612 b.store_raw(u.bytes(bit_len.div_ceil(8) as _)?, bit_len)
613 .unwrap();
614
615 let mut children = ArrayVec::<Cell, 4>::new();
616 for i in 0..refs as u8 {
617 let cell = 'cell: {
618 if i > 0 {
619 if let Some(i) = u.int_in_range(0..=i)?.checked_sub(1) {
621 break 'cell children.get(i).cloned().unwrap();
622 }
623 }
624
625 u.arbitrary::<Cell>()
626 .and_then(crate::arbitrary::check_max_depth)?
627 };
628
629 b.store_reference(cell.clone()).unwrap();
630
631 unsafe { children.push(cell) };
633 }
634
635 Ok(b)
636 }
637 CellType::PrunedBranch => {
638 let level_mask = LevelMask::new(u.int_in_range(0b001..=0b111)?);
639
640 let mut b = CellBuilder::new();
641 b.set_exotic(true);
642 b.store_u16(u16::from_be_bytes([
643 CellType::PrunedBranch.to_byte(),
644 level_mask.to_byte(),
645 ]))
646 .unwrap();
647
648 let level_count = level_mask.level() as usize;
649
650 let hashes = 32 * level_count;
651 b.store_raw(u.bytes(hashes)?, hashes as u16 * 8).unwrap();
652
653 for _ in 0..level_count {
654 b.store_u16(u.int_in_range(0..=(u16::MAX - 1))?).unwrap();
655 }
656
657 Ok(b)
658 }
659 CellType::LibraryReference => {
660 let hash = u.bytes(32)?;
661
662 let mut b = CellBuilder::new();
663 b.set_exotic(true);
664 b.store_u8(CellType::LibraryReference.to_byte()).unwrap();
665 b.store_raw(hash, 256).unwrap();
666 Ok(b)
667 }
668 CellType::MerkleProof => {
669 let mut b = CellBuilder::new();
670 u.arbitrary::<crate::merkle::MerkleProof>()?
671 .store_into(&mut b, Cell::empty_context())
672 .unwrap();
673 Ok(b)
674 }
675 CellType::MerkleUpdate => {
676 let mut b = CellBuilder::new();
677 u.arbitrary::<crate::merkle::MerkleUpdate>()?
678 .store_into(&mut b, Cell::empty_context())
679 .unwrap();
680 Ok(b)
681 }
682 }
683 }
684
685 fn size_hint(_: usize) -> (usize, Option<usize>) {
686 (3, None)
687 }
688}
689
690#[derive(Clone)]
692pub struct CellDataBuilder {
693 data: [u8; 128],
694 bit_len: u16,
695}
696
697impl Default for CellDataBuilder {
698 #[inline]
699 fn default() -> Self {
700 Self::new()
701 }
702}
703
704impl Eq for CellDataBuilder {}
705impl PartialEq for CellDataBuilder {
706 #[inline]
707 fn eq(&self, other: &Self) -> bool {
708 self.bit_len == other.bit_len && self.data == other.data
709 }
710}
711
712impl Ord for CellDataBuilder {
713 fn cmp(&self, other: &Self) -> Ordering {
714 match self.bit_len.cmp(&other.bit_len) {
715 Ordering::Equal => {}
716 ord => return ord,
717 }
718
719 self.data.cmp(&other.data)
721 }
722}
723
724impl PartialOrd for CellDataBuilder {
725 #[inline]
726 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
727 Some(self.cmp(other))
728 }
729}
730
731impl std::fmt::Debug for CellDataBuilder {
732 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
733 #[repr(transparent)]
734 struct Data<'a, T>(&'a T);
735
736 impl<T: std::fmt::Display> std::fmt::Debug for Data<'_, T> {
737 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
738 std::fmt::Display::fmt(self.0, f)
739 }
740 }
741
742 f.debug_struct("CellBuilder")
743 .field("data", &Data(&self.display_data()))
744 .field("bit_len", &self.bit_len)
745 .finish()
746 }
747}
748
749macro_rules! impl_store_uint {
750 ($self:ident, $value:ident, bytes: $bytes:literal, bits: $bits:literal) => {
751 if $self.bit_len + $bits <= MAX_BIT_LEN {
752 let q = ($self.bit_len / 8) as usize;
753 let r = $self.bit_len % 8;
754 unsafe {
756 let data_ptr = $self.data.as_mut_ptr().add(q);
757 debug_assert!(q + $bytes + usize::from(r > 0) <= 128);
758 if r == 0 {
759 let value = $value.to_be_bytes();
761 std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, $bytes);
762 } else {
763 *data_ptr |= ($value >> ($bits - 8 + r)) as u8;
765 let value: [u8; $bytes] = ($value << (8 - r)).to_be_bytes();
767 std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr.add(1), $bytes);
769 }
770 };
771 $self.bit_len += $bits;
772 Ok(())
773 } else {
774 Err(Error::CellOverflow)
775 }
776 };
777}
778
779impl CellDataBuilder {
780 pub const fn new() -> Self {
782 Self {
783 data: [0; 128],
784 bit_len: 0,
785 }
786 }
787
788 pub fn from_raw_data(value: &[u8], bits: u16) -> Result<Self, Error> {
792 let mut res = Self::new();
793 ok!(res.store_raw(value, bits));
794 Ok(res)
795 }
796
797 pub fn as_data_slice(&self) -> CellSlice<'_> {
801 CellSlice::new_allow_exotic(IntermediateDataCell::wrap(self))
802 }
803
804 #[inline]
807 pub fn display_data(&self) -> DisplayCellBuilderData<'_> {
808 DisplayCellBuilderData(self)
809 }
810
811 #[inline]
813 pub fn as_bitstring(&self) -> Bitstring<'_> {
814 Bitstring {
815 bytes: &self.data,
816 bit_len: self.bit_len,
817 }
818 }
819
820 #[inline]
822 pub const fn raw_data(&self) -> &[u8; 128] {
823 &self.data
824 }
825
826 #[inline]
828 pub const fn size_bits(&self) -> u16 {
829 self.bit_len
830 }
831
832 #[inline]
834 pub const fn spare_capacity_bits(&self) -> u16 {
835 MAX_BIT_LEN - self.bit_len
836 }
837
838 #[inline]
840 pub const fn has_capacity_bits(&self, bits: u16) -> bool {
841 self.bit_len + bits <= MAX_BIT_LEN
842 }
843
844 pub fn clear_bits(&mut self) {
846 self.data = [0; 128];
847 self.bit_len = 0;
848 }
849
850 pub fn rewind_bits(&mut self, mut bits: u16) -> Result<(), Error> {
852 if bits == 0 {
853 return Ok(());
854 }
855 let Some(new_bit_len) = self.bit_len.checked_sub(bits) else {
856 return Err(Error::CellUnderflow);
857 };
858
859 let q = (new_bit_len / 8) as usize;
860 let r = new_bit_len % 8;
861
862 unsafe {
864 let mut data_ptr = self.data.as_mut_ptr().add(q);
865
866 if r != 0 {
867 let shift = 8 - r;
868 *data_ptr &= 0xff << shift;
869 bits = bits.saturating_sub(shift);
870 data_ptr = data_ptr.add(1);
871 }
872
873 std::ptr::write_bytes(data_ptr, 0, bits.div_ceil(8) as usize);
874 }
875
876 self.bit_len = new_bit_len;
877 Ok(())
878 }
879
880 pub fn store_zeros(&mut self, bits: u16) -> Result<(), Error> {
883 if self.bit_len + bits <= MAX_BIT_LEN {
884 self.bit_len += bits;
885 Ok(())
886 } else {
887 Err(Error::CellOverflow)
888 }
889 }
890
891 pub fn store_ones(&mut self, bits: u16) -> Result<(), Error> {
894 self.store_raw(crate::cell::cell_impl::ALL_ONES_CELL.data(), bits)
895 }
896
897 pub fn store_bit_zero(&mut self) -> Result<(), Error> {
900 let fits = self.bit_len < MAX_BIT_LEN;
901 self.bit_len += fits as u16;
902 if fits {
903 Ok(())
904 } else {
905 Err(Error::CellOverflow)
906 }
907 }
908
909 pub fn store_bit_one(&mut self) -> Result<(), Error> {
912 if self.bit_len < MAX_BIT_LEN {
913 let q = (self.bit_len / 8) as usize;
914 let r = self.bit_len % 8;
915 unsafe { *self.data.get_unchecked_mut(q) |= 1 << (7 - r) };
916 self.bit_len += 1;
917 Ok(())
918 } else {
919 Err(Error::CellOverflow)
920 }
921 }
922
923 pub fn store_bit(&mut self, value: bool) -> Result<(), Error> {
926 if value {
927 self.store_bit_one()
928 } else {
929 self.store_bit_zero()
930 }
931 }
932
933 pub fn store_u8(&mut self, value: u8) -> Result<(), Error> {
936 if self.bit_len + 8 <= MAX_BIT_LEN {
937 let q = (self.bit_len / 8) as usize;
938 let r = self.bit_len % 8;
939 unsafe {
940 if r == 0 {
941 debug_assert!(q < 128);
942 *self.data.get_unchecked_mut(q) = value;
944 } else {
945 debug_assert!(q + 1 < 128);
946 *self.data.get_unchecked_mut(q) |= value >> r;
948 *self.data.get_unchecked_mut(q + 1) = value << (8 - r);
949 }
950 };
951 self.bit_len += 8;
952 Ok(())
953 } else {
954 Err(Error::CellOverflow)
955 }
956 }
957
958 pub fn store_u16(&mut self, value: u16) -> Result<(), Error> {
961 impl_store_uint!(self, value, bytes: 2, bits: 16)
962 }
963
964 pub fn store_u32(&mut self, value: u32) -> Result<(), Error> {
967 impl_store_uint!(self, value, bytes: 4, bits: 32)
968 }
969
970 pub fn store_u64(&mut self, value: u64) -> Result<(), Error> {
973 impl_store_uint!(self, value, bytes: 8, bits: 64)
974 }
975
976 pub fn store_u128(&mut self, value: u128) -> Result<(), Error> {
979 impl_store_uint!(self, value, bytes: 16, bits: 128)
980 }
981
982 #[inline]
985 pub fn store_u256<T>(&mut self, value: &T) -> Result<(), Error>
986 where
987 T: AsRef<[u8; 32]>,
988 {
989 fn store_u256_impl(builder: &mut CellDataBuilder, value: &[u8; 32]) -> Result<(), Error> {
990 if builder.bit_len + 256 <= MAX_BIT_LEN {
991 let q = (builder.bit_len / 8) as usize;
992 let r = builder.bit_len % 8;
993 unsafe {
994 let data_ptr = builder.data.as_mut_ptr().add(q);
995 debug_assert!(q + 32 + usize::from(r > 0) <= 128);
996 if r == 0 {
997 std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, 32);
999 } else {
1000 let [mut hi, mut lo]: [u128; 2] = std::mem::transmute_copy(value);
1002
1003 #[cfg(target_endian = "little")]
1005 {
1006 hi = hi.swap_bytes();
1007 lo = lo.swap_bytes();
1008 }
1009
1010 let shift = 8 - r;
1011
1012 *data_ptr |= (hi >> (128 - shift)) as u8;
1014 let hi: [u8; 16] = ((hi << shift) | (lo >> (128 - shift))).to_be_bytes();
1016 let lo: [u8; 16] = (lo << shift).to_be_bytes();
1017 std::ptr::copy_nonoverlapping(hi.as_ptr(), data_ptr.add(1), 16);
1019 std::ptr::copy_nonoverlapping(lo.as_ptr(), data_ptr.add(17), 16);
1020 }
1021 };
1022 builder.bit_len += 256;
1023 Ok(())
1024 } else {
1025 Err(Error::CellOverflow)
1026 }
1027 }
1028
1029 store_u256_impl(self, value.as_ref())
1030 }
1031
1032 pub fn store_small_uint(&mut self, mut value: u8, mut bits: u16) -> Result<(), Error> {
1037 if bits == 0 {
1038 return Ok(());
1039 }
1040
1041 if self.bit_len + bits <= MAX_BIT_LEN {
1042 bits = if let Some(offset) = bits.checked_sub(8) {
1043 self.bit_len += offset;
1044 8
1045 } else {
1046 bits
1047 };
1048
1049 value <<= 8 - bits;
1051
1052 let q = (self.bit_len / 8) as usize;
1053 let r = self.bit_len % 8;
1054 unsafe {
1055 debug_assert!(q < 128);
1056 if r == 0 {
1057 *self.data.get_unchecked_mut(q) = value;
1059 } else {
1060 *self.data.get_unchecked_mut(q) |= value >> r;
1062 if bits + r > 8 {
1063 debug_assert!(q + 1 < 128);
1064 *self.data.get_unchecked_mut(q + 1) = value << (8 - r);
1065 }
1066 }
1067 };
1068 self.bit_len += bits;
1069 Ok(())
1070 } else {
1071 Err(Error::CellOverflow)
1072 }
1073 }
1074
1075 pub fn store_uint(&mut self, mut value: u64, mut bits: u16) -> Result<(), Error> {
1080 if bits == 0 {
1081 return Ok(());
1082 }
1083
1084 if self.bit_len + bits <= MAX_BIT_LEN {
1085 bits = if let Some(offset) = bits.checked_sub(64) {
1087 self.bit_len += offset;
1088 64
1089 } else {
1090 bits
1091 };
1092
1093 value <<= 64 - bits;
1095
1096 let q = (self.bit_len / 8) as usize;
1097 let r = self.bit_len % 8;
1098 unsafe {
1100 let data_ptr = self.data.as_mut_ptr().add(q);
1101 if r == 0 {
1102 let byte_len = bits.div_ceil(8) as usize;
1103 debug_assert!(q + byte_len <= 128);
1104
1105 let value = value.to_be_bytes();
1107 std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr, byte_len);
1108 } else {
1109 debug_assert!(q < 128);
1110
1111 let shift = 8 - r;
1113 *data_ptr |= (value >> (64 - shift)) as u8;
1114
1115 if let Some(bits) = bits.checked_sub(shift)
1117 && bits > 0
1118 {
1119 let byte_len = bits.div_ceil(8) as usize;
1120 debug_assert!(q + 1 + byte_len <= 128);
1121
1122 let value: [u8; 8] = (value << shift).to_be_bytes();
1124 std::ptr::copy_nonoverlapping(value.as_ptr(), data_ptr.add(1), byte_len);
1126 }
1127 }
1128 }
1129 self.bit_len += bits;
1130 Ok(())
1131 } else {
1132 Err(Error::CellOverflow)
1133 }
1134 }
1135
1136 #[cfg(feature = "bigint")]
1138 pub fn store_bigint(
1139 &mut self,
1140 x: &num_bigint::BigInt,
1141 bits: u16,
1142 signed: bool,
1143 ) -> Result<(), Error> {
1144 use crate::util::BigIntExt;
1145
1146 let int_bits = x.bitsize(signed);
1147 if int_bits > bits {
1148 return Err(Error::IntOverflow);
1149 }
1150 self.store_bigint_truncate(x, bits, signed)
1151 }
1152
1153 #[cfg(feature = "bigint")]
1156 pub fn store_bigint_truncate(
1157 &mut self,
1158 x: &num_bigint::BigInt,
1159 bits: u16,
1160 signed: bool,
1161 ) -> Result<(), Error> {
1162 use std::borrow::Cow;
1163
1164 use num_traits::ToPrimitive;
1165
1166 match x.to_u64() {
1167 Some(value) => self.store_uint(value, bits),
1168 None => {
1169 let int = if !bits.is_multiple_of(8) {
1170 let align = 8 - bits % 8;
1171 Cow::Owned(x.clone() << align)
1172 } else {
1173 Cow::Borrowed(x)
1174 };
1175
1176 let minimal_bytes = bits.div_ceil(8) as usize;
1177
1178 let (prefix, mut bytes) = if signed {
1179 let bytes = int.to_signed_bytes_le();
1180 (
1181 bytes
1182 .last()
1183 .map(|first| (first >> 7) * 255)
1184 .unwrap_or_default(),
1185 bytes,
1186 )
1187 } else {
1188 (0, int.to_bytes_le().1)
1189 };
1190 bytes.resize(minimal_bytes, prefix);
1191 bytes.reverse();
1192
1193 self.store_raw(&bytes, bits)
1194 }
1195 }
1196 }
1197
1198 #[cfg(feature = "bigint")]
1200 pub fn store_biguint(
1201 &mut self,
1202 x: &num_bigint::BigUint,
1203 bits: u16,
1204 signed: bool,
1205 ) -> Result<(), Error> {
1206 use crate::util::BigIntExt;
1207
1208 let int_bits = x.bitsize(signed);
1209 if int_bits > bits {
1210 return Err(Error::IntOverflow);
1211 }
1212 self.store_biguint_truncate(x, bits)
1213 }
1214
1215 #[cfg(feature = "bigint")]
1218 pub fn store_biguint_truncate(
1219 &mut self,
1220 x: &num_bigint::BigUint,
1221 bits: u16,
1222 ) -> Result<(), Error> {
1223 use std::borrow::Cow;
1224
1225 use num_traits::ToPrimitive;
1226
1227 match x.to_u64() {
1228 Some(value) => self.store_uint(value, bits),
1229 None => {
1230 let int = if !bits.is_multiple_of(8) {
1231 let align = 8 - bits % 8;
1232 Cow::Owned(x.clone() << align)
1233 } else {
1234 Cow::Borrowed(x)
1235 };
1236
1237 let minimal_bytes = bits.div_ceil(8) as usize;
1238
1239 let mut bytes = int.to_bytes_le();
1240 bytes.resize(minimal_bytes, 0);
1241 bytes.reverse();
1242
1243 self.store_raw(&bytes, bits)
1244 }
1245 }
1246 }
1247
1248 #[cfg(feature = "bigint")]
1250 pub fn store_var_bigint(
1251 &mut self,
1252 x: &num_bigint::BigInt,
1253 len_bits: u16,
1254 signed: bool,
1255 ) -> Result<(), Error> {
1256 use crate::util::BigIntExt;
1257
1258 let bits = x.bitsize(signed);
1259 let bytes = bits.div_ceil(8);
1260 if bytes >= (1 << len_bits) {
1261 return Err(Error::IntOverflow);
1262 }
1263
1264 ok!(self.store_small_uint(bytes as u8, len_bits));
1265 self.store_bigint_truncate(x, bytes * 8, signed)
1266 }
1267
1268 #[cfg(feature = "bigint")]
1270 pub fn store_var_biguint(
1271 &mut self,
1272 x: &num_bigint::BigUint,
1273 len_bits: u16,
1274 ) -> Result<(), Error> {
1275 let bits = x.bits() as u16;
1276 let bytes = bits.div_ceil(8);
1277 if bytes >= (1 << len_bits) {
1278 return Err(Error::IntOverflow);
1279 }
1280
1281 ok!(self.store_small_uint(bytes as u8, len_bits));
1282 self.store_biguint_truncate(x, bytes * 8)
1283 }
1284
1285 pub fn store_raw(&mut self, value: &[u8], bits: u16) -> Result<(), Error> {
1290 store_raw(&mut self.data, &mut self.bit_len, value, bits)
1291 }
1292
1293 #[inline]
1296 pub fn store_cell_data<T>(&mut self, value: T) -> Result<(), Error>
1297 where
1298 T: AsRef<DynCell>,
1299 {
1300 fn store_cell_data_impl(
1301 builder: &mut CellDataBuilder,
1302 value: &DynCell,
1303 ) -> Result<(), Error> {
1304 store_raw(
1305 &mut builder.data,
1306 &mut builder.bit_len,
1307 value.data(),
1308 value.bit_len(),
1309 )
1310 }
1311 store_cell_data_impl(self, value.as_ref())
1312 }
1313
1314 #[inline]
1317 pub fn store_slice_data<'a, T>(&mut self, value: T) -> Result<(), Error>
1318 where
1319 T: AsRef<CellSlice<'a>>,
1320 {
1321 fn store_slice_data_impl(
1322 builder: &mut CellDataBuilder,
1323 value: &CellSlice<'_>,
1324 ) -> Result<(), Error> {
1325 let bits = value.size_bits();
1326 if builder.bit_len + bits <= MAX_BIT_LEN {
1327 let mut slice_data =
1329 unsafe { MaybeUninit::<[MaybeUninit<u8>; 128]>::uninit().assume_init() };
1330
1331 let slice_data = unsafe {
1336 &mut *(&mut slice_data as *mut [MaybeUninit<u8>; 128] as *mut [u8; 128])
1337 };
1338
1339 let slice_data = ok!(value.get_raw(0, slice_data, bits));
1340 builder.store_raw(slice_data, bits)
1341 } else {
1342 Err(Error::CellOverflow)
1343 }
1344 }
1345 store_slice_data_impl(self, value.as_ref())
1346 }
1347
1348 pub fn prepend_raw(&mut self, value: &[u8], bits: u16) -> Result<(), Error> {
1353 if bits == 0 {
1354 return Ok(());
1355 }
1356
1357 fn store_raw_impl(
1359 data: &mut [u8; 128],
1360 bit_len: &mut u16,
1361 value: &[u8],
1362 bits: u16,
1363 ) -> Result<(), Error> {
1364 store_raw(data, bit_len, value, bits)
1365 }
1366
1367 if self.bit_len + bits <= MAX_BIT_LEN {
1368 let mut data = [0; 128];
1369 let mut bit_len = 0;
1370 ok!(store_raw_impl(&mut data, &mut bit_len, value, bits));
1371 ok!(store_raw_impl(
1372 &mut data,
1373 &mut bit_len,
1374 &self.data,
1375 self.bit_len
1376 ));
1377 self.data = data;
1378 self.bit_len = bit_len;
1379 Ok(())
1380 } else {
1381 Err(Error::CellOverflow)
1382 }
1383 }
1384}
1385
1386#[inline]
1387fn store_raw(
1388 data: &mut [u8; 128],
1389 bit_len: &mut u16,
1390 value: &[u8],
1391 mut bits: u16,
1392) -> Result<(), Error> {
1393 if *bit_len + bits <= MAX_BIT_LEN {
1394 let max_bit_len = value.len().saturating_mul(8) as u16;
1395 bits = if let Some(offset) = bits.checked_sub(max_bit_len) {
1396 *bit_len += offset;
1397 max_bit_len
1398 } else {
1399 bits
1400 };
1401
1402 if bits == 0 {
1404 return Ok(());
1405 }
1406
1407 let q = (*bit_len / 8) as usize;
1408 let r = *bit_len % 8;
1409 unsafe {
1411 let mut data_ptr = data.as_mut_ptr().add(q);
1412 let mut value_ptr = value.as_ptr();
1413
1414 if r == 0 {
1415 let byte_len = bits.div_ceil(8) as usize;
1416 debug_assert!(q + byte_len <= 128);
1417 debug_assert!(byte_len <= value.len());
1418
1419 std::ptr::copy_nonoverlapping(value_ptr, data_ptr, byte_len);
1420
1421 let bits_r = bits % 8;
1422 if bits_r != 0 {
1423 *data_ptr.add(byte_len - 1) &= 0xff << (8 - bits_r);
1424 }
1425 } else {
1426 let byte_len = (bits + r).div_ceil(8) as usize - 1;
1427 let value_len = bits.div_ceil(8) as usize;
1428 debug_assert!(q + byte_len <= 128);
1429 debug_assert!(byte_len <= value_len && value_len <= value.len());
1430
1431 let shift = 8 - r;
1432 for _ in 0..byte_len {
1433 *data_ptr |= *value_ptr >> r;
1434 data_ptr = data_ptr.add(1);
1435 *data_ptr = *value_ptr << shift;
1436 value_ptr = value_ptr.add(1);
1437 }
1438 if byte_len < value_len {
1439 *data_ptr |= *value_ptr >> r;
1440 }
1441
1442 let bits_r = (r + bits) % 8;
1443 if bits_r != 0 {
1444 *data_ptr &= 0xff << (8 - bits_r);
1445 }
1446 }
1447 }
1448 *bit_len += bits;
1449 Ok(())
1450 } else {
1451 Err(Error::CellOverflow)
1452 }
1453}
1454
1455#[derive(Default)]
1459#[repr(transparent)]
1460pub struct CellRefsBuilder(ArrayVec<Cell, MAX_REF_COUNT>);
1461
1462impl CellRefsBuilder {
1463 pub fn store_reference(&mut self, cell: Cell) -> Result<(), Error> {
1466 if self.0.len() < MAX_REF_COUNT {
1467 unsafe { self.0.push(cell) }
1469 Ok(())
1470 } else {
1471 Err(Error::CellOverflow)
1472 }
1473 }
1474
1475 pub fn compute_level_mask(&self) -> LevelMask {
1477 let mut result = LevelMask::EMPTY;
1478 for child in self.0.as_ref() {
1479 result |= child.as_ref().level_mask();
1480 }
1481 result
1482 }
1483
1484 #[inline]
1486 pub fn len(&self) -> usize {
1487 self.0.len()
1488 }
1489
1490 #[inline]
1492 pub const fn is_empty(&self) -> bool {
1493 self.0.is_empty()
1494 }
1495
1496 pub fn iter(&self) -> std::slice::Iter<'_, Cell> {
1498 self.0.as_ref().iter()
1499 }
1500}
1501
1502impl IntoIterator for CellRefsBuilder {
1503 type Item = Cell;
1504 type IntoIter = ArrayVecIntoIter<Cell, MAX_REF_COUNT>;
1505
1506 #[inline]
1507 fn into_iter(self) -> Self::IntoIter {
1508 self.0.into_iter()
1509 }
1510}
1511
1512impl<'a> IntoIterator for &'a CellRefsBuilder {
1513 type Item = &'a Cell;
1514 type IntoIter = std::slice::Iter<'a, Cell>;
1515
1516 #[inline]
1517 fn into_iter(self) -> Self::IntoIter {
1518 self.0.as_ref().iter()
1519 }
1520}
1521
1522#[repr(transparent)]
1523struct IntermediateDataCell(CellDataBuilder);
1524
1525impl IntermediateDataCell {
1526 #[inline(always)]
1527 const fn wrap(value: &CellDataBuilder) -> &Self {
1528 unsafe { &*(value as *const CellDataBuilder as *const Self) }
1530 }
1531}
1532
1533impl CellImpl for IntermediateDataCell {
1534 fn untrack(self: CellInner<Self>) -> Cell {
1535 Cell(self)
1536 }
1537
1538 fn descriptor(&self) -> CellDescriptor {
1539 CellDescriptor {
1540 d1: 0,
1541 d2: CellDescriptor::compute_d2(self.0.bit_len),
1542 }
1543 }
1544
1545 fn data(&self) -> &[u8] {
1546 self.0.raw_data()
1547 }
1548
1549 fn bit_len(&self) -> u16 {
1550 self.0.bit_len
1551 }
1552
1553 fn reference(&self, _: u8) -> Option<&DynCell> {
1554 None
1555 }
1556
1557 fn reference_cloned(&self, _: u8) -> Option<Cell> {
1558 None
1559 }
1560
1561 fn virtualize(&self) -> &DynCell {
1562 self
1563 }
1564
1565 fn hash(&self, _: u8) -> &HashBytes {
1566 panic!("Hash for an intermediate data cell is not defined");
1567 }
1568
1569 fn depth(&self, _: u8) -> u16 {
1570 0
1571 }
1572}
1573
1574#[repr(transparent)]
1575struct IntermediateFullCell(CellBuilder);
1576
1577impl IntermediateFullCell {
1578 #[inline(always)]
1579 const fn wrap(value: &CellBuilder) -> &Self {
1580 unsafe { &*(value as *const CellBuilder as *const Self) }
1582 }
1583}
1584
1585impl CellImpl for IntermediateFullCell {
1586 fn untrack(self: CellInner<Self>) -> Cell {
1587 Cell(self)
1588 }
1589
1590 fn descriptor(&self) -> CellDescriptor {
1591 CellDescriptor {
1592 d1: CellDescriptor::compute_d1(LevelMask::EMPTY, false, self.0.references.len() as u8),
1593 d2: CellDescriptor::compute_d2(self.0.bit_len),
1594 }
1595 }
1596
1597 fn data(&self) -> &[u8] {
1598 self.0.raw_data()
1599 }
1600
1601 fn bit_len(&self) -> u16 {
1602 self.0.bit_len
1603 }
1604
1605 fn reference(&self, index: u8) -> Option<&DynCell> {
1606 match self.0.references.get(index) {
1607 Some(cell) => Some(cell.as_ref()),
1608 None => None,
1609 }
1610 }
1611
1612 fn reference_cloned(&self, index: u8) -> Option<Cell> {
1613 self.0.references.get(index).cloned()
1614 }
1615
1616 fn virtualize(&self) -> &DynCell {
1617 self
1618 }
1619
1620 fn hash(&self, _: u8) -> &HashBytes {
1621 panic!("Hash for an intermediate data cell is not defined");
1622 }
1623
1624 fn depth(&self, _: u8) -> u16 {
1625 0
1626 }
1627}
1628
1629#[cfg(test)]
1630mod tests {
1631 use super::*;
1632
1633 #[test]
1634 fn clone_builder() {
1635 let mut builder = CellBuilder::new();
1636 builder.store_u32(0xdeafbeaf).unwrap();
1637 let cell1 = builder.clone().build().unwrap();
1638 let cell2 = builder.clone().build().unwrap();
1639 assert_eq!(cell1.as_ref(), cell2.as_ref());
1640
1641 builder.store_u32(0xb00b5).unwrap();
1642 let cell3 = builder.build().unwrap();
1643 assert_ne!(cell1.as_ref(), cell3.as_ref());
1644 }
1645
1646 #[test]
1647 fn compare_builders() {
1648 let mut a = CellBuilder::new();
1649 a.store_u32(0xdeafbeaf).unwrap();
1650
1651 let mut b = CellBuilder::new();
1652 b.store_u32(0xdeafbeaf).unwrap();
1653
1654 assert_eq!(a, b);
1655
1656 b.store_u8(1).unwrap();
1657 assert!(a < b);
1658 a.store_u8(2).unwrap();
1659 assert!(a > b);
1660
1661 a.rewind_bits(8).unwrap();
1662 a.store_u8(1).unwrap();
1663 assert_eq!(a, b);
1664
1665 let child_a = a.clone().build().unwrap();
1666 a.store_reference(child_a.clone()).unwrap();
1667 assert!(a > b);
1668
1669 let child_b = b.clone().build().unwrap();
1670 b.store_reference(child_b).unwrap();
1671 assert_eq!(a, b);
1672
1673 let child_b2 = b.clone().build().unwrap();
1674 a.store_reference(child_a).unwrap();
1675 b.store_reference(child_b2).unwrap();
1676 assert_ne!(a, b);
1677 }
1678
1679 #[test]
1680 fn rewind_builder() {
1681 let mut builder = CellBuilder::new();
1682 builder.store_u32(0xdeafbeaf).unwrap();
1683 assert_eq!(builder.size_bits(), 32);
1684 assert_eq!(builder.data[..4], 0xdeafbeaf_u32.to_be_bytes());
1685
1686 builder.rewind_bits(5).unwrap();
1687 assert_eq!(builder.size_bits(), 27);
1688 assert_eq!(builder.data[..4], 0xdeafbea0_u32.to_be_bytes());
1689
1690 builder.store_u32(0xdeafbeaf).unwrap();
1691 assert_eq!(builder.size_bits(), 32 + 27);
1692 assert_eq!(builder.data[..8], [
1693 0xde, 0xaf, 0xbe, 0xbb, 0xd5, 0xf7, 0xd5, 0xe0
1694 ]);
1695 builder.rewind_bits(32).unwrap();
1696 assert_eq!(builder.data[..8], [
1697 0xde, 0xaf, 0xbe, 0xa0, 0x00, 0x00, 0x00, 0x00
1698 ]);
1699
1700 assert_eq!(builder.rewind_bits(32), Err(Error::CellUnderflow));
1701
1702 builder.rewind_bits(27).unwrap();
1703 assert_eq!(builder.size_bits(), 0);
1704 assert_eq!(builder.data, [0u8; 128]);
1705
1706 builder.store_raw(&[0xff; 128], MAX_BIT_LEN).unwrap();
1707 assert_eq!(builder.size_bits(), MAX_BIT_LEN);
1708
1709 let mut target = [0xff; 128];
1710 target[127] = 0xfe;
1711 assert_eq!(builder.data, target);
1712
1713 builder.rewind_bits(3).unwrap();
1714 assert_eq!(builder.size_bits(), MAX_BIT_LEN - 3);
1715 target[127] = 0xf0;
1716 assert_eq!(builder.data, target);
1717
1718 builder.rewind_bits(8).unwrap();
1719 assert_eq!(builder.size_bits(), MAX_BIT_LEN - 3 - 8);
1720 target[126] = 0xf0;
1721 target[127] = 0x00;
1722 assert_eq!(builder.data, target);
1723 }
1724
1725 #[test]
1726 #[cfg_attr(miri, ignore)] fn store_raw() {
1728 const ONES: &[u8] = &[0xff; 128];
1729 for offset in 0..8 {
1730 for bits in 0..=1016 {
1731 let mut builder = CellBuilder::new();
1732 builder.store_zeros(offset).unwrap();
1733 builder.store_raw(ONES, bits).unwrap();
1734 builder.build().unwrap();
1735 }
1736 }
1737 }
1738
1739 #[test]
1740 fn prepend_raw() {
1741 let mut builder = CellBuilder::new();
1742 builder.store_raw(&[0xde, 0xaf, 0xbe, 0xaf], 20).unwrap();
1743 builder.prepend_raw(&[0xaa, 0x55], 5).unwrap();
1744 let cell = builder.build().unwrap();
1745 println!("{}", cell.display_tree());
1746 }
1747
1748 #[test]
1749 fn store_slice() -> anyhow::Result<()> {
1750 const SOME_HASH: &HashBytes = HashBytes::wrap(&[
1751 0xdf, 0x86, 0xce, 0xbc, 0xe8, 0xd5, 0xab, 0x0c, 0x69, 0xb4, 0xce, 0x33, 0xfe, 0x9b,
1752 0x0e, 0x2c, 0xdf, 0x69, 0xa3, 0xe1, 0x13, 0x7e, 0x64, 0x85, 0x6b, 0xbc, 0xfd, 0x39,
1753 0xe7, 0x9b, 0xc1, 0x6f,
1754 ]);
1755
1756 let mut builder = CellBuilder::new();
1757 builder.store_zeros(3)?;
1758 builder.store_u256(SOME_HASH)?;
1759 let cell = builder.build()?;
1760 println!("{}", cell.display_tree());
1761
1762 let mut builder = CellBuilder::new();
1763 let mut slice = cell.as_slice()?;
1764 slice.skip_first(3, 0)?;
1765 builder.store_slice(slice)?;
1766 let cell = builder.build()?;
1767 println!("{}", cell.display_tree());
1768 assert_eq!(cell.data(), SOME_HASH);
1769
1770 Ok(())
1771 }
1772}