1use std::ops::{BitOr, BitOrAssign};
4use std::str::FromStr;
5
6pub use tycho_types_proc::{Load, Store};
7
8pub use self::builder::{
9 CellBuilder, CellDataBuilder, CellRefsBuilder, DisplayCellBuilderData, Store,
10};
11pub use self::cell_context::{CellContext, CellParts, LoadMode};
12#[cfg(not(feature = "sync"))]
13pub use self::cell_impl::rc::{Cell, CellInner, WeakCell};
14#[cfg(feature = "sync")]
15pub use self::cell_impl::sync::{Cell, CellInner, WeakCell};
16pub use self::cell_impl::{SafeDeleter, StaticCell, VirtualCellWrapper};
17pub use self::lazy::{Lazy, LazyExotic};
18pub use self::slice::{
19 CellSlice, CellSliceParts, CellSliceRange, DisplayCellSliceData, ExactSize, Load, LoadCell,
20};
21pub use self::usage_tree::{UsageTree, UsageTreeMode, UsageTreeWithSubtrees};
22use crate::error::{Error, ParseHashBytesError};
23use crate::util::Bitstring;
24
25mod cell_impl;
27
28mod cell_context;
30
31mod slice;
33
34mod builder;
36
37mod lazy;
39
40mod usage_tree;
41
42#[cfg(feature = "sync")]
43#[doc(hidden)]
44mod __checks {
45 use super::*;
46
47 assert_impl_all!(Cell: Send);
48 assert_impl_all!(CellSlice: Send);
49 assert_impl_all!(CellBuilder: Send);
50}
51
52pub trait EquivalentRepr<T> {}
54
55impl<T> EquivalentRepr<T> for T {}
57
58pub trait CellFamily: Sized {
60 type EmptyCellContext: CellContext;
62
63 fn empty_cell() -> Cell;
67
68 fn empty_cell_ref() -> &'static DynCell;
70
71 fn empty_context() -> &'static Self::EmptyCellContext;
73
74 fn all_zeros_ref() -> &'static DynCell;
76
77 fn all_ones_ref() -> &'static DynCell;
79
80 fn virtualize(cell: Cell) -> Cell;
82}
83
84#[cfg(not(feature = "sync"))]
86pub type DynCell = dyn CellImpl;
87
88#[cfg(feature = "sync")]
90pub type DynCell = dyn CellImpl + Send + Sync;
91
92impl AsRef<DynCell> for DynCell {
93 #[inline(always)]
94 fn as_ref(&self) -> &Self {
95 self
96 }
97}
98
99impl AsMut<DynCell> for DynCell {
100 #[inline(always)]
101 fn as_mut(&mut self) -> &mut Self {
102 self
103 }
104}
105
106pub trait CellImpl {
111 fn untrack(self: CellInner<Self>) -> Cell;
113
114 fn descriptor(&self) -> CellDescriptor;
127
128 fn data(&self) -> &[u8];
130
131 fn bit_len(&self) -> u16;
133
134 fn reference(&self, index: u8) -> Option<&DynCell>;
136
137 fn reference_cloned(&self, index: u8) -> Option<Cell>;
139
140 fn reference_repr_hash(&self, index: u8) -> Option<HashBytes>;
148
149 fn virtualize(&self) -> &DynCell;
152
153 fn hash(&self, level: u8) -> &HashBytes;
158
159 fn depth(&self, level: u8) -> u16;
161}
162
163impl DynCell {
164 #[inline]
166 pub fn cell_type(&self) -> CellType {
167 self.descriptor().cell_type()
168 }
169
170 #[inline]
172 pub fn level(&self) -> u8 {
173 self.descriptor().level_mask().level()
174 }
175
176 #[inline]
178 pub fn level_mask(&self) -> LevelMask {
179 self.descriptor().level_mask()
180 }
181
182 #[inline]
184 pub fn reference_count(&self) -> u8 {
185 self.descriptor().reference_count()
186 }
187
188 pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'_>, Error> {
191 match self.reference(index) {
192 Some(cell) => CellSlice::new(cell),
193 None => Err(Error::CellUnderflow),
194 }
195 }
196
197 #[inline]
201 pub fn is_exotic(&self) -> bool {
202 self.descriptor().is_exotic()
203 }
204
205 #[inline]
207 pub fn repr_hash(&self) -> &HashBytes {
208 self.hash(LevelMask::MAX_LEVEL)
209 }
210
211 #[inline]
213 pub fn repr_depth(&self) -> u16 {
214 self.depth(LevelMask::MAX_LEVEL)
215 }
216
217 pub fn has_max_depth(&self) -> bool {
219 for level in self.descriptor().level_mask() {
220 if self.depth(level) == u16::MAX {
221 return true;
222 }
223 }
224 false
225 }
226
227 pub fn is_empty(&self) -> bool {
229 self.hash(LevelMask::MAX_LEVEL) == EMPTY_CELL_HASH
230 }
231
232 #[inline]
234 pub fn references(&self) -> RefsIter<'_> {
235 RefsIter {
236 cell: self,
237 max: self.reference_count(),
238 index: 0,
239 }
240 }
241
242 #[inline]
244 pub fn reference_repr_hashes(&self) -> RefsHashesIter<'_> {
245 RefsHashesIter {
246 cell: self,
247 max: self.reference_count(),
248 index: 0,
249 }
250 }
251
252 #[inline]
255 pub fn as_slice(&'_ self) -> Result<CellSlice<'_>, Error> {
256 CellSlice::new(self)
257 }
258
259 #[inline]
263 pub fn as_slice_allow_exotic(&'_ self) -> CellSlice<'_> {
264 CellSlice::new_allow_exotic(self)
265 }
266
267 pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
271 StorageStat::compute_for_cell(self, limit)
272 }
273
274 pub fn touch_recursive(&self) {
277 self.data(); for child in self.references() {
279 child.touch_recursive();
280 }
281 }
282
283 #[inline]
288 pub fn debug_root(&'_ self) -> DebugCell<'_> {
289 DebugCell(self)
290 }
291
292 #[inline]
297 pub fn display_root(&'_ self) -> DisplayCellRoot<'_> {
298 DisplayCellRoot {
299 cell: self,
300 level: 0,
301 }
302 }
303
304 #[inline]
309 pub fn display_tree(&'_ self) -> DisplayCellTree<'_> {
310 DisplayCellTree(self)
311 }
312
313 #[inline]
316 pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
317 struct DisplayData<'a>(&'a DynCell);
318
319 impl std::fmt::Display for DisplayData<'_> {
320 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
321 std::fmt::Display::fmt(
322 &Bitstring {
323 bytes: self.0.data(),
324 bit_len: self.0.bit_len(),
325 },
326 f,
327 )
328 }
329 }
330
331 impl std::fmt::Binary for DisplayData<'_> {
332 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
333 std::fmt::Binary::fmt(
334 &Bitstring {
335 bytes: self.0.data(),
336 bit_len: self.0.bit_len(),
337 },
338 f,
339 )
340 }
341 }
342
343 DisplayData(self)
344 }
345
346 #[inline]
351 pub fn parse<'a, T: Load<'a>>(&'a self) -> Result<T, Error> {
352 T::load_from(&mut ok!(self.as_slice()))
353 }
354
355 #[inline]
357 pub fn parse_exotic<'a, T: LoadCell<'a>>(&'a self) -> Result<T, Error> {
358 if self.is_exotic() {
359 T::load_from_cell(self)
360 } else {
361 Err(Error::UnexpectedOrdinaryCell)
362 }
363 }
364}
365
366impl std::fmt::Debug for DynCell {
367 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
368 crate::util::debug_struct_field2_finish(
369 f,
370 "Cell",
371 "ty",
372 &self.cell_type(),
373 "hash",
374 self.repr_hash(),
375 )
376 }
377}
378
379impl Eq for DynCell {}
380
381impl PartialEq<DynCell> for DynCell {
382 #[inline]
383 fn eq(&self, other: &DynCell) -> bool {
384 self.repr_hash() == other.repr_hash()
385 }
386}
387
388#[cfg(feature = "serde")]
389impl serde::Serialize for DynCell {
390 fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
391 let boc = crate::boc::Boc::encode(self);
392 if serializer.is_human_readable() {
393 serializer.serialize_str(&crate::util::encode_base64(boc))
394 } else {
395 serializer.serialize_bytes(&boc)
396 }
397 }
398}
399
400#[cfg(feature = "arbitrary")]
401impl<'a> arbitrary::Arbitrary<'a> for Cell {
402 #[inline]
403 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
404 Ok(u.arbitrary::<CellBuilder>()?.build().unwrap())
405 }
406
407 #[inline]
408 fn size_hint(depth: usize) -> (usize, Option<usize>) {
409 Self::try_size_hint(depth).unwrap_or_default()
410 }
411
412 #[inline]
413 fn try_size_hint(
414 depth: usize,
415 ) -> Result<(usize, Option<usize>), arbitrary::MaxRecursionReached> {
416 <CellBuilder as arbitrary::Arbitrary>::try_size_hint(depth)
417 }
418}
419
420impl From<Cell> for CellSliceParts {
421 #[inline]
422 fn from(value: Cell) -> Self {
423 (CellSliceRange::full(&value), value)
424 }
425}
426
427#[must_use = "iterators are lazy and do nothing unless consumed"]
429pub struct RefsIter<'a> {
430 cell: &'a DynCell,
431 max: u8,
432 index: u8,
433}
434
435impl<'a> RefsIter<'a> {
436 #[inline]
438 pub fn cell(&self) -> &'a DynCell {
439 self.cell
440 }
441
442 #[inline]
444 pub fn peek(&self) -> Option<&'a DynCell> {
445 if self.index >= self.max {
446 None
447 } else {
448 self.cell.reference(self.index)
449 }
450 }
451
452 #[inline]
454 pub fn peek_cloned(&self) -> Option<Cell> {
455 if self.index >= self.max {
456 None
457 } else {
458 self.cell.reference_cloned(self.index)
459 }
460 }
461
462 #[inline]
464 pub fn peek_prev(&self) -> Option<&'a DynCell> {
465 if self.index > 0 {
466 self.cell.reference(self.index - 1)
467 } else {
468 None
469 }
470 }
471
472 #[inline]
474 pub fn peek_prev_cloned(&self) -> Option<Cell> {
475 if self.index > 0 {
476 self.cell.reference_cloned(self.index - 1)
477 } else {
478 None
479 }
480 }
481
482 #[inline]
484 pub fn cloned(self) -> ClonedRefsIter<'a> {
485 ClonedRefsIter { inner: self }
486 }
487}
488
489impl Clone for RefsIter<'_> {
490 #[inline]
491 fn clone(&self) -> Self {
492 Self {
493 cell: self.cell,
494 max: self.max,
495 index: self.index,
496 }
497 }
498}
499
500impl<'a> Iterator for RefsIter<'a> {
501 type Item = &'a DynCell;
502
503 #[inline]
504 fn next(&mut self) -> Option<Self::Item> {
505 if self.index >= self.max {
506 None
507 } else {
508 let child = self.cell.reference(self.index);
509 self.index += 1;
510 child
511 }
512 }
513
514 #[inline]
515 fn size_hint(&self) -> (usize, Option<usize>) {
516 let remaining = self.max.saturating_sub(self.index) as usize;
517 (remaining, Some(remaining))
518 }
519}
520
521impl DoubleEndedIterator for RefsIter<'_> {
522 #[inline]
523 fn next_back(&mut self) -> Option<Self::Item> {
524 if self.max > self.index {
525 self.max -= 1;
526 self.cell.reference(self.max)
527 } else {
528 None
529 }
530 }
531}
532
533impl ExactSizeIterator for RefsIter<'_> {
534 #[inline]
535 fn len(&self) -> usize {
536 self.size_hint().0
537 }
538}
539
540#[must_use = "iterators are lazy and do nothing unless consumed"]
542pub struct ClonedRefsIter<'a> {
543 inner: RefsIter<'a>,
544}
545
546impl<'a> ClonedRefsIter<'a> {
547 #[inline]
549 pub fn cell(&self) -> &'a DynCell {
550 self.inner.cell
551 }
552
553 #[inline]
555 pub fn peek(&self) -> Option<Cell> {
556 self.inner.peek_cloned()
557 }
558
559 #[inline]
561 pub fn peek_prev(&self) -> Option<Cell> {
562 self.inner.peek_prev_cloned()
563 }
564}
565
566impl Clone for ClonedRefsIter<'_> {
567 #[inline]
568 fn clone(&self) -> Self {
569 Self {
570 inner: self.inner.clone(),
571 }
572 }
573}
574
575impl Iterator for ClonedRefsIter<'_> {
576 type Item = Cell;
577
578 #[inline]
579 fn next(&mut self) -> Option<Self::Item> {
580 if self.inner.index >= self.inner.max {
581 None
582 } else {
583 let child = self.inner.cell.reference_cloned(self.inner.index);
584 self.inner.index += 1;
585 child
586 }
587 }
588
589 #[inline]
590 fn size_hint(&self) -> (usize, Option<usize>) {
591 self.inner.size_hint()
592 }
593}
594
595impl DoubleEndedIterator for ClonedRefsIter<'_> {
596 #[inline]
597 fn next_back(&mut self) -> Option<Self::Item> {
598 if self.inner.max > self.inner.index {
599 self.inner.max -= 1;
600 self.inner.cell.reference_cloned(self.inner.max)
601 } else {
602 None
603 }
604 }
605}
606
607impl ExactSizeIterator for ClonedRefsIter<'_> {
608 #[inline]
609 fn len(&self) -> usize {
610 self.size_hint().0
611 }
612}
613
614#[must_use = "iterators are lazy and do nothing unless consumed"]
616pub struct RefsHashesIter<'a> {
617 cell: &'a DynCell,
618 max: u8,
619 index: u8,
620}
621
622impl<'a> RefsHashesIter<'a> {
623 #[inline]
625 pub fn cell(&self) -> &'a DynCell {
626 self.cell
627 }
628
629 #[inline]
631 pub fn peek(&self) -> Option<HashBytes> {
632 if self.index >= self.max {
633 None
634 } else {
635 self.cell.reference_repr_hash(self.index)
636 }
637 }
638
639 #[inline]
641 pub fn peek_prev(&self) -> Option<HashBytes> {
642 if let Some(index) = self.index.checked_sub(1) {
643 self.cell.reference_repr_hash(index)
644 } else {
645 None
646 }
647 }
648}
649
650impl Clone for RefsHashesIter<'_> {
651 #[inline]
652 fn clone(&self) -> Self {
653 Self {
654 cell: self.cell,
655 max: self.max,
656 index: self.index,
657 }
658 }
659}
660
661impl Iterator for RefsHashesIter<'_> {
662 type Item = HashBytes;
663
664 #[inline]
665 fn next(&mut self) -> Option<Self::Item> {
666 if self.index >= self.max {
667 None
668 } else {
669 let hash = self.cell.reference_repr_hash(self.index);
670 self.index += 1;
671 hash
672 }
673 }
674
675 #[inline]
676 fn size_hint(&self) -> (usize, Option<usize>) {
677 let remaining = self.max.saturating_sub(self.index) as usize;
678 (remaining, Some(remaining))
679 }
680}
681
682impl DoubleEndedIterator for RefsHashesIter<'_> {
683 #[inline]
684 fn next_back(&mut self) -> Option<Self::Item> {
685 if self.max > self.index {
686 self.max -= 1;
687 self.cell.reference_repr_hash(self.max)
688 } else {
689 None
690 }
691 }
692}
693
694impl ExactSizeIterator for RefsHashesIter<'_> {
695 #[inline]
696 fn len(&self) -> usize {
697 self.size_hint().0
698 }
699}
700
701#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
703#[repr(transparent)]
704pub struct HashBytes(pub [u8; 32]);
705
706impl HashBytes {
707 pub const ZERO: Self = Self([0; 32]);
709
710 #[inline]
716 pub fn from_slice(slice: &[u8]) -> Self {
717 Self(slice.try_into().expect("slice with incorrect length"))
718 }
719
720 #[cfg(feature = "bigint")]
724 pub fn from_bigint(int: &num_bigint::BigInt) -> Option<Self> {
725 if int.sign() == num_bigint::Sign::Minus {
726 return None;
727 }
728 Self::from_biguint(int.magnitude())
729 }
730
731 #[cfg(feature = "bigint")]
735 pub fn from_biguint(uint: &num_bigint::BigUint) -> Option<Self> {
736 let mut bytes = uint.to_bytes_le();
737 if bytes.len() > 32 {
738 return None;
739 }
740
741 bytes.resize(32, 0);
742 bytes.reverse();
743 Some(Self::from_slice(&bytes))
744 }
745
746 #[cfg(feature = "bigint")]
750 pub fn from_biguint_lossy(uint: &num_bigint::BigUint) -> Self {
751 let mut bytes = uint.to_bytes_le();
752 bytes.resize(32, 0);
753 bytes.reverse();
754 Self::from_slice(&bytes)
755 }
756
757 #[inline(always)]
759 pub const fn wrap(value: &[u8; 32]) -> &Self {
760 unsafe { &*(value as *const [u8; 32] as *const Self) }
762 }
763
764 pub const fn as_slice(&self) -> &[u8] {
766 self.0.as_slice()
767 }
768
769 pub fn as_mut_slice(&mut self) -> &mut [u8] {
771 self.0.as_mut_slice()
772 }
773
774 pub const fn as_array(&self) -> &[u8; 32] {
776 &self.0
777 }
778
779 pub fn as_mut_array(&mut self) -> &mut [u8; 32] {
781 &mut self.0
782 }
783
784 #[inline(always)]
786 pub const fn as_ptr(&self) -> *const u8 {
787 &self.0 as *const [u8] as *const u8
788 }
789
790 pub const fn first_chunk(&self) -> &[u8; 8] {
792 match self.0.first_chunk::<8>() {
793 Some(chunk) => chunk,
794 None => unsafe { std::hint::unreachable_unchecked() },
796 }
797 }
798
799 pub const fn is_zero(&self) -> bool {
801 let mut i = 0;
802 while i < 32 {
803 if self.0[i] != 0 {
804 return false;
805 }
806 i += 1;
807 }
808 true
809 }
810
811 #[inline(always)]
812 fn fmt_hex(&self, f: &mut std::fmt::Formatter<'_>, table: &[u8; 16]) -> std::fmt::Result {
813 let mut output = [0u8; 64];
814 crate::util::encode_to_hex_slice(&self.0, &mut output, table).ok();
815
816 let output = unsafe { std::str::from_utf8_unchecked(&output) };
818 f.write_str(output)
819 }
820
821 #[cfg(feature = "bigint")]
823 pub fn as_biguint(&self) -> num_bigint::BigUint {
824 num_bigint::BigUint::from_bytes_be(&self.0)
825 }
826
827 #[cfg(feature = "bigint")]
829 pub fn as_bigint(&self) -> num_bigint::BigInt {
830 self.as_biguint().into()
831 }
832}
833
834impl Default for HashBytes {
835 #[inline(always)]
836 fn default() -> Self {
837 Self::ZERO
838 }
839}
840
841impl AsRef<[u8; 32]> for HashBytes {
842 #[inline(always)]
843 fn as_ref(&self) -> &[u8; 32] {
844 &self.0
845 }
846}
847
848impl AsRef<[u8]> for HashBytes {
849 #[inline(always)]
850 fn as_ref(&self) -> &[u8] {
851 self.0.as_ref()
852 }
853}
854
855impl AsMut<[u8; 32]> for HashBytes {
856 #[inline(always)]
857 fn as_mut(&mut self) -> &mut [u8; 32] {
858 &mut self.0
859 }
860}
861
862impl AsMut<[u8]> for HashBytes {
863 #[inline(always)]
864 fn as_mut(&mut self) -> &mut [u8] {
865 self.0.as_mut()
866 }
867}
868
869impl std::borrow::Borrow<[u8]> for HashBytes {
870 #[inline(always)]
871 fn borrow(&self) -> &[u8] {
872 &self.0
873 }
874}
875
876impl std::borrow::BorrowMut<[u8]> for HashBytes {
877 #[inline(always)]
878 fn borrow_mut(&mut self) -> &mut [u8] {
879 &mut self.0
880 }
881}
882
883impl std::borrow::Borrow<HashBytes> for [u8; 32] {
884 #[inline(always)]
885 fn borrow(&self) -> &HashBytes {
886 HashBytes::wrap(self)
887 }
888}
889
890impl PartialEq<[u8; 32]> for HashBytes {
891 #[inline(always)]
892 fn eq(&self, other: &[u8; 32]) -> bool {
893 &self.0 == other
894 }
895}
896
897impl PartialEq<HashBytes> for [u8; 32] {
898 #[inline(always)]
899 fn eq(&self, other: &HashBytes) -> bool {
900 self == &other.0
901 }
902}
903
904impl PartialEq<[u8]> for HashBytes {
905 #[inline(always)]
906 fn eq(&self, other: &[u8]) -> bool {
907 self.0 == other
908 }
909}
910
911impl PartialEq<HashBytes> for [u8] {
912 #[inline(always)]
913 fn eq(&self, other: &HashBytes) -> bool {
914 self == other.0
915 }
916}
917
918impl From<[u8; 32]> for HashBytes {
919 #[inline(always)]
920 fn from(value: [u8; 32]) -> Self {
921 Self(value)
922 }
923}
924
925impl From<sha2::digest::Output<sha2::Sha256>> for HashBytes {
926 #[inline(always)]
927 fn from(value: sha2::digest::Output<sha2::Sha256>) -> Self {
928 Self(value.into())
929 }
930}
931
932#[cfg(feature = "blake3")]
933impl From<blake3::Hash> for HashBytes {
934 #[inline(always)]
935 fn from(value: blake3::Hash) -> Self {
936 Self(value.into())
937 }
938}
939
940impl From<HashBytes> for [u8; 32] {
941 #[inline(always)]
942 fn from(value: HashBytes) -> Self {
943 value.0
944 }
945}
946
947impl FromStr for HashBytes {
948 type Err = ParseHashBytesError;
949
950 fn from_str(s: &str) -> Result<Self, Self::Err> {
951 let mut result = Self::default();
952 match s.len() {
953 64 => {
954 if let Err(e) = hex::decode_to_slice(s, &mut result.0) {
955 return Err(ParseHashBytesError::InvalidHex(e));
956 }
957 }
958 66 => {
959 if let Err(e) = hex::decode_to_slice(&s[2..], &mut result.0) {
960 return Err(ParseHashBytesError::InvalidHex(e));
961 }
962 }
963 #[cfg(feature = "base64")]
964 44 => {
965 if let Err(e) = crate::util::decode_base64_slice(s, &mut result.0) {
966 return Err(ParseHashBytesError::InvalidBase64(e));
967 }
968 }
969 _ => return Err(ParseHashBytesError::UnexpectedStringLength),
970 }
971 Ok(result)
972 }
973}
974
975impl std::fmt::Display for HashBytes {
976 #[inline]
977 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
978 std::fmt::LowerHex::fmt(self, f)
979 }
980}
981
982impl std::fmt::LowerHex for HashBytes {
983 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
984 Self::fmt_hex(self, f, crate::util::HEX_CHARS_LOWER)
985 }
986}
987
988impl std::fmt::UpperHex for HashBytes {
989 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
990 Self::fmt_hex(self, f, crate::util::HEX_CHARS_UPPER)
991 }
992}
993
994impl std::fmt::Debug for HashBytes {
995 #[inline(always)]
996 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
997 std::fmt::Display::fmt(self, f)
998 }
999}
1000
1001impl<I> std::ops::Index<I> for HashBytes
1002where
1003 [u8; 32]: std::ops::Index<I>,
1004{
1005 type Output = <[u8; 32] as std::ops::Index<I>>::Output;
1006
1007 #[inline]
1008 fn index(&self, index: I) -> &Self::Output {
1009 std::ops::Index::index(&self.0, index)
1010 }
1011}
1012
1013impl<I> std::ops::IndexMut<I> for HashBytes
1014where
1015 [u8; 32]: std::ops::IndexMut<I>,
1016{
1017 #[inline]
1018 fn index_mut(&mut self, index: I) -> &mut Self::Output {
1019 std::ops::IndexMut::index_mut(&mut self.0, index)
1020 }
1021}
1022
1023#[cfg(feature = "rand8")]
1024impl rand8::distributions::Distribution<HashBytes> for rand8::distributions::Standard {
1025 #[inline]
1026 fn sample<R: rand8::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
1027 HashBytes(rand8::distributions::Standard.sample(rng))
1028 }
1029}
1030
1031#[cfg(feature = "rand9")]
1032impl rand9::distr::Distribution<HashBytes> for rand9::distr::StandardUniform {
1033 #[inline]
1034 fn sample<R: rand9::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
1035 HashBytes(rand9::distr::StandardUniform.sample(rng))
1036 }
1037}
1038
1039#[cfg(feature = "serde")]
1040impl serde::Serialize for HashBytes {
1041 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1042 where
1043 S: serde::Serializer,
1044 {
1045 if serializer.is_human_readable() {
1046 let mut output = [0u8; 64];
1047 crate::util::encode_to_hex_slice(&self.0, &mut output, crate::util::HEX_CHARS_LOWER)
1048 .ok();
1049
1050 let output = unsafe { std::str::from_utf8_unchecked(&output) };
1052 serializer.serialize_str(output)
1053 } else {
1054 serializer.serialize_bytes(&self.0)
1055 }
1056 }
1057}
1058
1059#[cfg(feature = "serde")]
1060impl<'de> serde::Deserialize<'de> for HashBytes {
1061 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
1062 where
1063 D: serde::Deserializer<'de>,
1064 {
1065 use ::serde::de::{Error, Visitor};
1066
1067 struct HashBytesHexVisitor;
1068
1069 impl Visitor<'_> for HashBytesHexVisitor {
1070 type Value = HashBytes;
1071
1072 fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1073 formatter.write_str("hex-encoded byte array of size 32")
1074 }
1075
1076 fn visit_str<E: Error>(self, value: &str) -> Result<Self::Value, E> {
1077 let mut result = HashBytes([0; 32]);
1078 match hex::decode_to_slice(value, &mut result.0) {
1079 Ok(()) => Ok(result),
1080 Err(_) => Err(Error::invalid_value(
1081 serde::de::Unexpected::Str(value),
1082 &self,
1083 )),
1084 }
1085 }
1086 }
1087
1088 pub struct HashBytesRawVisitor;
1089
1090 impl Visitor<'_> for HashBytesRawVisitor {
1091 type Value = HashBytes;
1092
1093 fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1094 f.write_fmt(format_args!("a byte array of size 32"))
1095 }
1096
1097 fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
1098 v.try_into()
1099 .map(HashBytes)
1100 .map_err(|_e| Error::invalid_length(v.len(), &self))
1101 }
1102 }
1103
1104 if deserializer.is_human_readable() {
1105 deserializer.deserialize_str(HashBytesHexVisitor)
1106 } else {
1107 deserializer.deserialize_bytes(HashBytesRawVisitor)
1108 }
1109 }
1110}
1111
1112#[cfg(feature = "arbitrary")]
1113impl<'a> arbitrary::Arbitrary<'a> for HashBytes {
1114 #[inline]
1115 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1116 u.arbitrary().map(Self)
1117 }
1118
1119 #[inline]
1120 fn size_hint(_: usize) -> (usize, Option<usize>) {
1121 (32, Some(32))
1122 }
1123}
1124
1125pub static EMPTY_CELL_HASH: &HashBytes = HashBytes::wrap(&[
1127 0x96, 0xa2, 0x96, 0xd2, 0x24, 0xf2, 0x85, 0xc6, 0x7b, 0xee, 0x93, 0xc3, 0x0f, 0x8a, 0x30, 0x91,
1128 0x57, 0xf0, 0xda, 0xa3, 0x5d, 0xc5, 0xb8, 0x7e, 0x41, 0x0b, 0x78, 0x63, 0x0a, 0x09, 0xcf, 0xc7,
1129]);
1130
1131#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
1133#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
1134pub enum CellType {
1135 #[default]
1137 Ordinary,
1138 PrunedBranch,
1141 LibraryReference,
1143 MerkleProof,
1145 MerkleUpdate,
1147}
1148
1149impl CellType {
1150 #[inline]
1152 pub const fn is_merkle(self) -> bool {
1153 matches!(self, Self::MerkleProof | Self::MerkleUpdate)
1154 }
1155
1156 #[inline]
1158 pub const fn is_exotic(self) -> bool {
1159 !matches!(self, Self::Ordinary)
1160 }
1161
1162 #[inline]
1164 pub const fn is_pruned_branch(self) -> bool {
1165 matches!(self, Self::PrunedBranch)
1166 }
1167
1168 #[inline(always)]
1170 pub const fn is_library(self) -> bool {
1171 matches!(self, Self::LibraryReference)
1172 }
1173
1174 #[inline]
1176 pub const fn to_byte(self) -> u8 {
1177 match self {
1178 CellType::Ordinary => 0xff,
1179 CellType::PrunedBranch => 1,
1180 CellType::LibraryReference => 2,
1181 CellType::MerkleProof => 3,
1182 CellType::MerkleUpdate => 4,
1183 }
1184 }
1185
1186 #[inline]
1188 pub const fn from_byte(byte: u8) -> Option<Self> {
1189 Some(match byte {
1190 0xff => CellType::Ordinary,
1191 1 => CellType::PrunedBranch,
1192 2 => CellType::LibraryReference,
1193 3 => CellType::MerkleProof,
1194 4 => CellType::MerkleUpdate,
1195 _ => return None,
1196 })
1197 }
1198
1199 #[inline]
1201 pub const fn from_byte_exotic(byte: u8) -> Option<Self> {
1202 Some(match byte {
1203 1 => CellType::PrunedBranch,
1204 2 => CellType::LibraryReference,
1205 3 => CellType::MerkleProof,
1206 4 => CellType::MerkleUpdate,
1207 _ => return None,
1208 })
1209 }
1210}
1211
1212impl From<CellType> for u8 {
1213 #[inline]
1214 fn from(cell_type: CellType) -> u8 {
1215 cell_type.to_byte()
1216 }
1217}
1218
1219#[cfg(feature = "arbitrary")]
1220impl<'a> arbitrary::Arbitrary<'a> for CellType {
1221 #[inline]
1222 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1223 static ALL_TYPES: [CellType; 5] = [
1224 CellType::Ordinary,
1225 CellType::PrunedBranch,
1226 CellType::LibraryReference,
1227 CellType::MerkleProof,
1228 CellType::MerkleUpdate,
1229 ];
1230
1231 u.choose(&ALL_TYPES).copied()
1232 }
1233
1234 #[inline]
1235 fn size_hint(_: usize) -> (usize, Option<usize>) {
1236 (1, Some(1))
1237 }
1238}
1239
1240#[derive(Hash, Debug, Clone, Copy)]
1242#[repr(C)]
1243pub struct CellDescriptor {
1244 pub d1: u8,
1246 pub d2: u8,
1248}
1249
1250impl CellDescriptor {
1251 pub const REF_COUNT_MASK: u8 = 0b0000_0111;
1253 pub const IS_EXOTIC_MASK: u8 = 0b0000_1000;
1255 pub const STORE_HASHES_MASK: u8 = 0b0001_0000;
1257 pub const LEVEL_MASK: u8 = 0b1110_0000;
1259
1260 #[inline(always)]
1262 pub const fn compute_d1(level_mask: LevelMask, is_exotic: bool, ref_count: u8) -> u8 {
1263 (level_mask.0 << 5) | ((is_exotic as u8) << 3) | (ref_count & Self::REF_COUNT_MASK)
1264 }
1265
1266 #[inline(always)]
1268 pub const fn compute_d2(bit_len: u16) -> u8 {
1269 (((bit_len >> 2) as u8) & !0b1) | (!bit_len.is_multiple_of(8) as u8)
1270 }
1271
1272 #[inline(always)]
1274 pub const fn new(bytes: [u8; 2]) -> Self {
1275 Self {
1276 d1: bytes[0],
1277 d2: bytes[1],
1278 }
1279 }
1280
1281 #[must_use]
1283 pub const fn virtualize(mut self, offset: u8) -> Self {
1284 let virtualized_mask = (self.d1 >> offset) & Self::LEVEL_MASK;
1285 self.d1 = virtualized_mask | (self.d1 & !Self::LEVEL_MASK);
1286 self
1287 }
1288
1289 pub fn cell_type(self) -> CellType {
1291 if self.d1 & Self::IS_EXOTIC_MASK == 0 {
1292 CellType::Ordinary
1293 } else {
1294 match self.d1 & Self::REF_COUNT_MASK {
1295 0 => {
1296 if self.d1 & Self::LEVEL_MASK == 0 {
1298 CellType::LibraryReference
1299 } else {
1300 CellType::PrunedBranch
1301 }
1302 }
1303 1 => CellType::MerkleProof,
1304 _ => CellType::MerkleUpdate,
1305 }
1306 }
1307 }
1308
1309 #[inline(always)]
1311 pub const fn reference_count(self) -> u8 {
1312 self.d1 & Self::REF_COUNT_MASK
1313 }
1314
1315 #[inline(always)]
1319 pub const fn hash_count(self) -> u8 {
1320 let level = self.level_mask().level();
1321 if self.is_exotic() && self.reference_count() == 0 && level > 0 {
1322 1 } else {
1324 level + 1
1325 }
1326 }
1327
1328 #[inline(always)]
1332 pub const fn is_exotic(self) -> bool {
1333 self.d1 & Self::IS_EXOTIC_MASK != 0
1334 }
1335
1336 #[inline(always)]
1338 pub const fn is_pruned_branch(self) -> bool {
1339 self.is_exotic() && self.reference_count() == 0 && !self.level_mask().is_empty()
1340 }
1341
1342 #[inline(always)]
1344 pub const fn is_library(self) -> bool {
1345 self.is_exotic() && self.reference_count() == 0 && self.level_mask().is_empty()
1346 }
1347
1348 #[inline(always)]
1350 pub const fn is_merkle(self) -> bool {
1351 self.is_exotic() && self.reference_count() != 0
1352 }
1353
1354 #[inline(always)]
1356 pub const fn is_absent(self) -> bool {
1357 self.d1 == (Self::REF_COUNT_MASK | Self::IS_EXOTIC_MASK)
1358 }
1359
1360 #[inline(always)]
1362 pub const fn store_hashes(self) -> bool {
1363 self.d1 & Self::STORE_HASHES_MASK != 0
1364 }
1365
1366 #[inline(always)]
1368 pub const fn level_mask(self) -> LevelMask {
1369 unsafe { LevelMask::new_unchecked(self.d1 >> 5) }
1371 }
1372
1373 #[inline(always)]
1375 pub const fn is_aligned(self) -> bool {
1376 self.d2 & 1 == 0
1377 }
1378
1379 #[inline(always)]
1381 pub const fn byte_len(self) -> u8 {
1382 (self.d2 & 1) + (self.d2 >> 1)
1383 }
1384}
1385
1386#[derive(Copy, Clone, Eq, PartialEq, Hash)]
1388pub struct LevelMask(u8);
1389
1390impl LevelMask {
1391 pub const EMPTY: Self = LevelMask(0);
1393 pub const MAX_LEVEL: u8 = 3;
1395
1396 #[inline(always)]
1398 pub const fn new(mask: u8) -> Self {
1399 Self(mask & 0b111)
1400 }
1401
1402 #[inline(always)]
1404 pub const fn is_empty(self) -> bool {
1405 self.0 == 0
1406 }
1407
1408 #[inline(always)]
1415 pub const unsafe fn new_unchecked(mask: u8) -> Self {
1416 Self(mask)
1417 }
1418
1419 #[inline(always)]
1423 pub const fn from_level(level: u8) -> Self {
1424 Self(match level {
1425 0 => 0,
1426 1 => 1,
1427 2 => 3,
1428 _ => 7,
1429 })
1430 }
1431
1432 pub const fn level(self) -> u8 {
1434 (self.0 & 1) + ((self.0 >> 1) & 1) + ((self.0 >> 2) & 1)
1435 }
1436
1437 pub const fn hash_index(self, level: u8) -> u8 {
1439 Self(self.0 & Self::from_level(level).0).level()
1440 }
1441
1442 pub const fn contains(self, level: u8) -> bool {
1444 level == 0 || self.0 & (1 << (level - 1)) != 0
1445 }
1446
1447 #[inline(always)]
1449 #[must_use]
1450 pub const fn virtualize(self, offset: u8) -> Self {
1451 Self(self.0 >> offset)
1452 }
1453
1454 #[inline]
1456 pub const fn to_byte(self) -> u8 {
1457 self.0
1458 }
1459}
1460
1461impl IntoIterator for LevelMask {
1462 type Item = u8;
1463 type IntoIter = LevelMaskIter;
1464
1465 #[inline]
1466 fn into_iter(self) -> Self::IntoIter {
1467 LevelMaskIter(1 | (self.0 << 1))
1469 }
1470}
1471
1472impl PartialEq<u8> for LevelMask {
1473 #[inline]
1474 fn eq(&self, other: &u8) -> bool {
1475 self.0 == *other
1476 }
1477}
1478
1479impl BitOr for LevelMask {
1480 type Output = Self;
1481
1482 #[inline(always)]
1483 fn bitor(self, rhs: Self) -> Self::Output {
1484 Self(self.0 | rhs.0)
1485 }
1486}
1487
1488impl BitOrAssign for LevelMask {
1489 #[inline(always)]
1490 fn bitor_assign(&mut self, rhs: Self) {
1491 self.0 |= rhs.0;
1492 }
1493}
1494
1495impl From<LevelMask> for u8 {
1496 #[inline(always)]
1497 fn from(m: LevelMask) -> u8 {
1498 m.0
1499 }
1500}
1501
1502impl std::fmt::Debug for LevelMask {
1503 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1504 f.write_fmt(format_args!("{:03b}", self.0))
1505 }
1506}
1507
1508#[cfg(feature = "arbitrary")]
1509impl<'a> arbitrary::Arbitrary<'a> for LevelMask {
1510 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1511 u.int_in_range(0b000..=0b111).map(Self::new)
1512 }
1513
1514 #[inline]
1515 fn size_hint(_: usize) -> (usize, Option<usize>) {
1516 (1, Some(1))
1517 }
1518}
1519
1520#[derive(Clone)]
1522pub struct LevelMaskIter(u8);
1523
1524impl LevelMaskIter {
1525 pub fn is_empty(&self) -> bool {
1527 self.0 == 0
1528 }
1529}
1530
1531impl Iterator for LevelMaskIter {
1532 type Item = u8;
1533
1534 #[inline]
1535 fn next(&mut self) -> Option<Self::Item> {
1536 (self.0 != 0).then(|| {
1537 let mask = self.0 & !(self.0 - 1);
1541
1542 self.0 &= !mask;
1544
1545 mask.trailing_zeros() as u8
1546 })
1547 }
1548
1549 fn size_hint(&self) -> (usize, Option<usize>) {
1550 let len = self.0.count_ones() as usize;
1551 (len, Some(len))
1552 }
1553}
1554
1555#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
1557pub struct Size {
1558 pub bits: u16,
1560 pub refs: u8,
1562}
1563
1564impl Size {
1565 pub const ZERO: Self = Self { bits: 0, refs: 0 };
1567
1568 pub const BIT: Self = Self { bits: 1, refs: 0 };
1570
1571 pub const REF: Self = Self { bits: 0, refs: 1 };
1573
1574 pub const MAX: Self = Self {
1576 bits: MAX_BIT_LEN,
1577 refs: MAX_REF_COUNT as _,
1578 };
1579
1580 #[inline]
1582 pub const fn fits_into_cell(&self) -> bool {
1583 self.bits <= MAX_BIT_LEN && self.refs <= MAX_REF_COUNT as _
1584 }
1585
1586 #[inline]
1589 pub const fn saturating_add(self, rhs: Self) -> Self {
1590 Self {
1591 bits: self.bits.saturating_add(rhs.bits),
1592 refs: self.refs.saturating_add(rhs.refs),
1593 }
1594 }
1595
1596 #[inline]
1599 pub const fn saturating_sub(self, rhs: Self) -> Self {
1600 Self {
1601 bits: self.bits.saturating_sub(rhs.bits),
1602 refs: self.refs.saturating_sub(rhs.refs),
1603 }
1604 }
1605
1606 pub const fn is_zero(self) -> bool {
1608 self.bits == 0 && self.refs == 0
1609 }
1610}
1611
1612impl From<Size> for CellTreeStats {
1613 #[inline]
1614 fn from(value: Size) -> Self {
1615 Self {
1616 bit_count: value.bits as _,
1617 cell_count: value.refs as _,
1618 }
1619 }
1620}
1621
1622impl std::ops::Add for Size {
1623 type Output = Self;
1624
1625 #[inline]
1626 fn add(mut self, rhs: Self) -> Self::Output {
1627 self += rhs;
1628 self
1629 }
1630}
1631
1632impl std::ops::AddAssign for Size {
1633 #[inline]
1634 fn add_assign(&mut self, rhs: Self) {
1635 self.bits += rhs.bits;
1636 self.refs += rhs.refs;
1637 }
1638}
1639
1640impl std::ops::Sub for Size {
1641 type Output = Self;
1642
1643 #[inline]
1644 fn sub(mut self, rhs: Self) -> Self::Output {
1645 self -= rhs;
1646 self
1647 }
1648}
1649
1650impl std::ops::SubAssign for Size {
1651 #[inline]
1652 fn sub_assign(&mut self, rhs: Self) {
1653 self.bits -= rhs.bits;
1654 self.refs -= rhs.refs;
1655 }
1656}
1657
1658#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
1660pub struct CellTreeStats {
1661 pub bit_count: u64,
1663 pub cell_count: u64,
1665}
1666
1667impl CellTreeStats {
1668 pub const ZERO: Self = CellTreeStats {
1670 bit_count: 0,
1671 cell_count: 0,
1672 };
1673}
1674
1675impl std::ops::Add for CellTreeStats {
1676 type Output = Self;
1677
1678 #[inline]
1679 fn add(self, rhs: Self) -> Self::Output {
1680 Self {
1681 bit_count: self.bit_count.saturating_add(rhs.bit_count),
1682 cell_count: self.cell_count.saturating_add(rhs.cell_count),
1683 }
1684 }
1685}
1686
1687impl std::ops::AddAssign for CellTreeStats {
1688 #[inline]
1689 fn add_assign(&mut self, rhs: Self) {
1690 self.bit_count = self.bit_count.saturating_add(rhs.bit_count);
1691 self.cell_count = self.cell_count.saturating_add(rhs.cell_count);
1692 }
1693}
1694
1695impl std::ops::Add<Size> for CellTreeStats {
1696 type Output = Self;
1697
1698 #[inline]
1699 fn add(self, rhs: Size) -> Self::Output {
1700 Self {
1701 bit_count: self.bit_count.saturating_add(rhs.bits as _),
1702 cell_count: self.cell_count.saturating_add(rhs.refs as _),
1703 }
1704 }
1705}
1706
1707impl std::iter::Sum for CellTreeStats {
1708 #[inline]
1709 fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
1710 let mut res = Self::ZERO;
1711 for item in iter {
1712 res += item;
1713 }
1714 res
1715 }
1716}
1717
1718impl std::ops::AddAssign<Size> for CellTreeStats {
1719 fn add_assign(&mut self, rhs: Size) {
1720 self.bit_count = self.bit_count.saturating_add(rhs.bits as _);
1721 self.cell_count = self.cell_count.saturating_add(rhs.refs as _);
1722 }
1723}
1724
1725impl std::ops::Sub for CellTreeStats {
1726 type Output = Self;
1727
1728 #[inline]
1729 fn sub(mut self, rhs: Self) -> Self::Output {
1730 self -= rhs;
1731 self
1732 }
1733}
1734
1735impl std::ops::SubAssign for CellTreeStats {
1736 #[inline]
1737 fn sub_assign(&mut self, rhs: Self) {
1738 self.bit_count = self.bit_count.saturating_sub(rhs.bit_count);
1739 self.cell_count = self.cell_count.saturating_sub(rhs.cell_count);
1740 }
1741}
1742
1743impl std::ops::SubAssign<Size> for CellTreeStats {
1744 #[inline]
1745 fn sub_assign(&mut self, rhs: Size) {
1746 self.bit_count = self.bit_count.saturating_sub(rhs.bits as _);
1747 self.cell_count = self.cell_count.saturating_sub(rhs.refs as _);
1748 }
1749}
1750
1751pub struct StorageStat<'a> {
1756 visited: ahash::HashSet<&'a HashBytes>,
1757 stack: Vec<RefsIter<'a>>,
1758 stats: CellTreeStats,
1759 limit: usize,
1760}
1761
1762impl<'a> StorageStat<'a> {
1763 pub fn compute_for_slice<'b: 'a>(
1770 slice: &'a CellSlice<'b>,
1771 limit: usize,
1772 ) -> Option<CellTreeStats> {
1773 let mut this = Self::with_limit(limit);
1774 if this.add_slice(slice) {
1775 Some(this.stats)
1776 } else {
1777 None
1778 }
1779 }
1780
1781 pub fn compute_for_cell(cell: &'a DynCell, limit: usize) -> Option<CellTreeStats> {
1785 let mut this = Self::with_limit(limit);
1786 if this.add_cell(cell) {
1787 Some(this.stats)
1788 } else {
1789 None
1790 }
1791 }
1792
1793 pub fn with_limit(limit: usize) -> Self {
1795 Self {
1796 visited: Default::default(),
1797 stack: Vec::new(),
1798 stats: CellTreeStats::ZERO,
1799 limit,
1800 }
1801 }
1802
1803 pub fn unlimited() -> Self {
1805 Self::with_limit(usize::MAX)
1806 }
1807
1808 pub fn stats(&self) -> CellTreeStats {
1810 self.stats
1811 }
1812
1813 pub fn add_cell(&mut self, cell: &'a DynCell) -> bool {
1817 if !self.visited.insert(cell.repr_hash()) {
1818 return true;
1819 }
1820
1821 self.stats.bit_count += cell.bit_len() as u64;
1822 self.stats.cell_count += 1;
1823
1824 self.stack.clear();
1825 self.stack.push(cell.references());
1826 self.reduce_stack()
1827 }
1828
1829 pub fn add_slice(&mut self, slice: &CellSlice<'a>) -> bool {
1833 self.stats.bit_count += slice.size_bits() as u64;
1834
1835 self.stack.clear();
1836 self.stack.push(slice.references());
1837 self.reduce_stack()
1838 }
1839
1840 fn reduce_stack(&mut self) -> bool {
1841 'outer: while let Some(item) = self.stack.last_mut() {
1842 for cell in item.by_ref() {
1843 if !self.visited.insert(cell.repr_hash()) {
1844 continue;
1845 }
1846
1847 if self.stats.cell_count >= self.limit as u64 {
1848 return false;
1849 }
1850
1851 self.stats.bit_count += cell.bit_len() as u64;
1852 self.stats.cell_count += 1;
1853
1854 let next = cell.references();
1855 if next.max > 0 {
1856 self.stack.push(next);
1857 continue 'outer;
1858 }
1859 }
1860
1861 self.stack.pop();
1862 }
1863
1864 true
1865 }
1866}
1867
1868#[derive(Clone, Copy)]
1870pub struct DebugCell<'a>(&'a DynCell);
1871
1872impl std::fmt::Debug for DebugCell<'_> {
1873 #[inline]
1874 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1875 self.0.fmt(f)
1876 }
1877}
1878
1879#[derive(Clone, Copy)]
1881pub struct DisplayCellRoot<'a> {
1882 cell: &'a DynCell,
1883 level: usize,
1884}
1885
1886impl std::fmt::Display for DisplayCellRoot<'_> {
1887 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1888 let data = hex::encode(self.cell.data());
1890
1891 let indent = self.level * 2;
1892 if f.alternate() {
1893 f.write_fmt(format_args!("{:indent$}{data}\n", ""))
1894 } else {
1895 let repr_depth = self.cell.depth(LevelMask::MAX_LEVEL);
1896 let repr_hash = self.cell.repr_hash();
1897 let descriptor = self.cell.descriptor();
1898 f.write_fmt(format_args!(
1899 "{:indent$}{:?}: {data}\n{:indent$}bits: {:>4}, refs: {}, l: {:?}, depth: {}, hash: {}\n",
1900 "",
1901 descriptor.cell_type(),
1902 "",
1903 self.cell.bit_len(),
1904 descriptor.reference_count(),
1905 descriptor.level_mask(),
1906 repr_depth,
1907 repr_hash,
1908 ))
1909 }
1910 }
1911}
1912
1913#[derive(Clone, Copy)]
1915pub struct DisplayCellTree<'a>(&'a DynCell);
1916
1917impl std::fmt::Display for DisplayCellTree<'_> {
1918 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1919 let mut stack = vec![(0, self.0)];
1920
1921 while let Some((level, cell)) = stack.pop() {
1922 ok!(std::fmt::Display::fmt(&DisplayCellRoot { cell, level }, f));
1923
1924 let reference_count = cell.reference_count();
1925 for i in (0..reference_count).rev() {
1926 if let Some(child) = cell.reference(i) {
1927 stack.push((level + 1, child));
1928 }
1929 }
1930 }
1931
1932 Ok(())
1933 }
1934}
1935
1936pub const MAX_BIT_LEN: u16 = 1023;
1938pub const MAX_REF_COUNT: usize = 4;
1940
1941#[cfg(test)]
1942mod tests {
1943 use super::*;
1944
1945 #[test]
1946 fn correct_level() {
1947 const LEVEL: [u8; 8] = [0, 1, 1, 2, 1, 2, 2, 3];
1948
1949 for mask in 0b000..=0b111 {
1950 assert_eq!(LevelMask(mask).level(), LEVEL[mask as usize]);
1951 }
1952 }
1953
1954 #[test]
1955 fn virtualize_descriptor() {
1956 let level_mask = LevelMask(0b111);
1957 let desc = CellDescriptor::new([
1958 CellDescriptor::compute_d1(level_mask, false, 3),
1959 CellDescriptor::compute_d2(123),
1960 ]);
1961
1962 assert_eq!(desc.level_mask(), level_mask);
1963
1964 for i in 0..3 {
1965 let v_desc = desc.virtualize(i);
1966
1967 assert_eq!(v_desc.cell_type(), desc.cell_type());
1968 assert_eq!(v_desc.reference_count(), desc.reference_count());
1969 assert_eq!(v_desc.is_exotic(), desc.is_exotic());
1970 assert_eq!(v_desc.store_hashes(), desc.store_hashes());
1971 assert_eq!(v_desc.is_aligned(), desc.is_aligned());
1972 assert_eq!(v_desc.byte_len(), desc.byte_len());
1973
1974 assert_eq!(v_desc.level_mask(), level_mask.virtualize(i));
1975 }
1976 }
1977
1978 #[test]
1979 fn correct_hash_index() {
1980 const HASH_INDEX_TABLE: [[u8; 4]; 8] = [
1981 [0, 0, 0, 0], [0, 1, 1, 1], [0, 0, 1, 1], [0, 1, 2, 2], [0, 0, 0, 1], [0, 1, 1, 2], [0, 0, 1, 2], [0, 1, 2, 3], ];
1991
1992 for mask in 0b000..=0b111 {
1993 let mask = LevelMask(mask);
1994
1995 for i in 0..=3 {
1996 let hash_index = mask.hash_index(i);
1997 assert_eq!(hash_index, HASH_INDEX_TABLE[mask.0 as usize][i as usize]);
1998 }
1999 }
2000 }
2001
2002 #[test]
2003 fn ultra_virtual_cell_by_ref() {
2004 let cell = Cell::empty_cell();
2005
2006 let pruned1 =
2007 crate::merkle::make_pruned_branch(cell.as_ref(), 0, Cell::empty_context()).unwrap();
2008
2009 let pruned2 =
2010 crate::merkle::make_pruned_branch(pruned1.as_ref(), 1, Cell::empty_context()).unwrap();
2011
2012 let pruned3 =
2013 crate::merkle::make_pruned_branch(pruned2.as_ref(), 2, Cell::empty_context()).unwrap();
2014
2015 let pruned3 = pruned3.virtualize();
2017 assert_eq!(pruned3.repr_hash(), pruned2.repr_hash());
2018 assert_eq!(pruned3.repr_depth(), pruned2.repr_depth());
2019
2020 let pruned3 = pruned3.virtualize();
2022 assert_eq!(pruned3.repr_hash(), pruned1.repr_hash());
2023 assert_eq!(pruned3.repr_depth(), pruned1.repr_depth());
2024
2025 let pruned3 = pruned3.virtualize();
2027 assert_eq!(pruned3.repr_hash(), cell.repr_hash());
2028 assert_eq!(pruned3.repr_depth(), cell.repr_depth());
2029
2030 let pruned3 = pruned3.virtualize();
2032 assert_eq!(pruned3.repr_hash(), cell.repr_hash());
2033 assert_eq!(pruned3.repr_depth(), cell.repr_depth());
2034
2035 let pruned3 = pruned3.virtualize();
2037 assert_eq!(pruned3.repr_hash(), cell.repr_hash());
2038 assert_eq!(pruned3.repr_depth(), cell.repr_depth());
2039 }
2040
2041 #[test]
2042 fn versioned_store_load() {
2043 #[derive(Debug, Clone, Copy, Eq, PartialEq, Store, Load)]
2044 #[tlb(tag = ["#12", "#34", "#56"])]
2045 struct Versioned {
2046 old_field1: u32,
2047 #[tlb(since_tag = 1)]
2048 new_field1: u32,
2049 old_field2: bool,
2050 #[tlb(since_tag = 2)]
2051 new_field2: bool,
2052 }
2053
2054 let old = Versioned {
2055 old_field1: 123,
2056 new_field1: 0,
2057 old_field2: true,
2058 new_field2: false,
2059 };
2060 let cell = CellBuilder::build_from(old).unwrap();
2061
2062 {
2063 let mut slice = cell.as_slice().unwrap();
2064 assert_eq!(slice.size_bits(), 8 + 32 + 1);
2065 assert_eq!(slice.load_u8().unwrap(), 0x12);
2066 assert_eq!(slice.load_u32().unwrap(), 123);
2067 assert!(slice.load_bit().unwrap());
2068 assert!(slice.is_empty());
2069 }
2070 assert_eq!(
2071 Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
2072 old,
2073 );
2074
2075 let new = Versioned {
2076 old_field1: 123,
2077 new_field1: 456,
2078 old_field2: true,
2079 new_field2: false,
2080 };
2081 let cell = CellBuilder::build_from(new).unwrap();
2082
2083 {
2084 let mut slice = cell.as_slice().unwrap();
2085 assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1);
2086 assert_eq!(slice.load_u8().unwrap(), 0x34);
2087 assert_eq!(slice.load_u32().unwrap(), 123);
2088 assert_eq!(slice.load_u32().unwrap(), 456);
2089 assert!(slice.load_bit().unwrap());
2090 assert!(slice.is_empty());
2091 }
2092 assert_eq!(
2093 Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
2094 new
2095 );
2096
2097 let too_new = Versioned {
2098 old_field1: 123,
2099 new_field1: 0,
2100 old_field2: true,
2101 new_field2: true,
2102 };
2103 let cell = CellBuilder::build_from(too_new).unwrap();
2104
2105 {
2106 let mut slice = cell.as_slice().unwrap();
2107 assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1 + 1);
2108 assert_eq!(slice.load_u8().unwrap(), 0x56);
2109 assert_eq!(slice.load_u32().unwrap(), 123);
2110 assert_eq!(slice.load_u32().unwrap(), 0);
2111 assert!(slice.load_bit().unwrap());
2112 assert!(slice.load_bit().unwrap());
2113 assert!(slice.is_empty());
2114 }
2115 assert_eq!(
2116 Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
2117 too_new
2118 );
2119 }
2120}