1use std::ops::{BitOr, BitOrAssign};
4use std::str::FromStr;
5
6pub use tycho_types_proc::{Load, Store};
7
8pub use self::builder::{
9 CellBuilder, CellDataBuilder, CellRefsBuilder, DisplayCellBuilderData, Store,
10};
11pub use self::cell_context::{CellContext, CellParts, LoadMode};
12#[cfg(not(feature = "sync"))]
13pub use self::cell_impl::rc::{Cell, CellInner, WeakCell};
14#[cfg(feature = "sync")]
15pub use self::cell_impl::sync::{Cell, CellInner, WeakCell};
16pub use self::cell_impl::{StaticCell, VirtualCellWrapper};
17pub use self::lazy::{Lazy, LazyExotic};
18pub use self::slice::{
19 CellSlice, CellSliceParts, CellSliceRange, DisplayCellSliceData, ExactSize, Load, LoadCell,
20};
21pub use self::usage_tree::{UsageTree, UsageTreeMode, UsageTreeWithSubtrees};
22use crate::error::{Error, ParseHashBytesError};
23use crate::util::Bitstring;
24
25mod cell_impl;
27
28mod cell_context;
30
31mod slice;
33
34mod builder;
36
37mod lazy;
39
40mod usage_tree;
41
42#[cfg(feature = "sync")]
43#[doc(hidden)]
44mod __checks {
45 use super::*;
46
47 assert_impl_all!(Cell: Send);
48 assert_impl_all!(CellSlice: Send);
49 assert_impl_all!(CellBuilder: Send);
50}
51
52pub trait EquivalentRepr<T> {}
54
55impl<T> EquivalentRepr<T> for T {}
57
58pub trait CellFamily: Sized {
60 type EmptyCellContext: CellContext;
62
63 fn empty_cell() -> Cell;
67
68 fn empty_cell_ref() -> &'static DynCell;
70
71 fn empty_context() -> &'static Self::EmptyCellContext;
73
74 fn all_zeros_ref() -> &'static DynCell;
76
77 fn all_ones_ref() -> &'static DynCell;
79
80 fn virtualize(cell: Cell) -> Cell;
82}
83
84#[cfg(not(feature = "sync"))]
86pub type DynCell = dyn CellImpl;
87
88#[cfg(feature = "sync")]
90pub type DynCell = dyn CellImpl + Send + Sync;
91
92impl AsRef<DynCell> for DynCell {
93 #[inline(always)]
94 fn as_ref(&self) -> &Self {
95 self
96 }
97}
98
99impl AsMut<DynCell> for DynCell {
100 #[inline(always)]
101 fn as_mut(&mut self) -> &mut Self {
102 self
103 }
104}
105
106pub trait CellImpl {
111 fn untrack(self: CellInner<Self>) -> Cell;
113
114 fn descriptor(&self) -> CellDescriptor;
127
128 fn data(&self) -> &[u8];
130
131 fn bit_len(&self) -> u16;
133
134 fn reference(&self, index: u8) -> Option<&DynCell>;
136
137 fn reference_cloned(&self, index: u8) -> Option<Cell>;
139
140 fn virtualize(&self) -> &DynCell;
143
144 fn hash(&self, level: u8) -> &HashBytes;
149
150 fn depth(&self, level: u8) -> u16;
152
153 fn take_first_child(&mut self) -> Option<Cell>;
155
156 fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell>;
161
162 fn take_next_child(&mut self) -> Option<Cell>;
164
165 #[cfg(feature = "stats")]
170 fn stats(&self) -> CellTreeStats;
171}
172
173impl DynCell {
174 #[inline]
176 pub fn cell_type(&self) -> CellType {
177 self.descriptor().cell_type()
178 }
179
180 #[inline]
182 pub fn level(&self) -> u8 {
183 self.descriptor().level_mask().level()
184 }
185
186 #[inline]
188 pub fn level_mask(&self) -> LevelMask {
189 self.descriptor().level_mask()
190 }
191
192 #[inline]
194 pub fn reference_count(&self) -> u8 {
195 self.descriptor().reference_count()
196 }
197
198 pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'_>, Error> {
201 match self.reference(index) {
202 Some(cell) => CellSlice::new(cell),
203 None => Err(Error::CellUnderflow),
204 }
205 }
206
207 #[inline]
211 pub fn is_exotic(&self) -> bool {
212 self.descriptor().is_exotic()
213 }
214
215 #[inline]
217 pub fn repr_hash(&self) -> &HashBytes {
218 self.hash(LevelMask::MAX_LEVEL)
219 }
220
221 #[inline]
223 pub fn repr_depth(&self) -> u16 {
224 self.depth(LevelMask::MAX_LEVEL)
225 }
226
227 pub fn has_max_depth(&self) -> bool {
229 for level in self.descriptor().level_mask() {
230 if self.depth(level) == u16::MAX {
231 return true;
232 }
233 }
234 false
235 }
236
237 pub fn is_empty(&self) -> bool {
239 self.hash(LevelMask::MAX_LEVEL) == EMPTY_CELL_HASH
240 }
241
242 #[inline]
244 pub fn references(&self) -> RefsIter<'_> {
245 RefsIter {
246 cell: self,
247 max: self.reference_count(),
248 index: 0,
249 }
250 }
251
252 #[inline]
255 pub fn as_slice(&'_ self) -> Result<CellSlice<'_>, Error> {
256 CellSlice::new(self)
257 }
258
259 #[inline]
263 pub fn as_slice_allow_exotic(&'_ self) -> CellSlice<'_> {
264 CellSlice::new_allow_exotic(self)
265 }
266
267 pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
271 StorageStat::compute_for_cell(self, limit)
272 }
273
274 pub fn touch_recursive(&self) {
277 self.data(); for child in self.references() {
279 child.touch_recursive();
280 }
281 }
282
283 #[inline]
288 pub fn debug_root(&'_ self) -> DebugCell<'_> {
289 DebugCell(self)
290 }
291
292 #[inline]
297 pub fn display_root(&'_ self) -> DisplayCellRoot<'_> {
298 DisplayCellRoot {
299 cell: self,
300 level: 0,
301 }
302 }
303
304 #[inline]
309 pub fn display_tree(&'_ self) -> DisplayCellTree<'_> {
310 DisplayCellTree(self)
311 }
312
313 #[inline]
316 pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
317 struct DisplayData<'a>(&'a DynCell);
318
319 impl std::fmt::Display for DisplayData<'_> {
320 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
321 std::fmt::Display::fmt(
322 &Bitstring {
323 bytes: self.0.data(),
324 bit_len: self.0.bit_len(),
325 },
326 f,
327 )
328 }
329 }
330
331 impl std::fmt::Binary for DisplayData<'_> {
332 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
333 std::fmt::Binary::fmt(
334 &Bitstring {
335 bytes: self.0.data(),
336 bit_len: self.0.bit_len(),
337 },
338 f,
339 )
340 }
341 }
342
343 DisplayData(self)
344 }
345
346 #[inline]
351 pub fn parse<'a, T: Load<'a>>(&'a self) -> Result<T, Error> {
352 T::load_from(&mut ok!(self.as_slice()))
353 }
354
355 #[inline]
357 pub fn parse_exotic<'a, T: LoadCell<'a>>(&'a self) -> Result<T, Error> {
358 if self.is_exotic() {
359 T::load_from_cell(self)
360 } else {
361 Err(Error::UnexpectedOrdinaryCell)
362 }
363 }
364}
365
366impl std::fmt::Debug for DynCell {
367 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
368 crate::util::debug_struct_field2_finish(
369 f,
370 "Cell",
371 "ty",
372 &self.cell_type(),
373 "hash",
374 self.repr_hash(),
375 )
376 }
377}
378
379impl Eq for DynCell {}
380
381impl PartialEq<DynCell> for DynCell {
382 #[inline]
383 fn eq(&self, other: &DynCell) -> bool {
384 self.repr_hash() == other.repr_hash()
385 }
386}
387
388#[cfg(feature = "serde")]
389impl serde::Serialize for DynCell {
390 fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
391 let boc = crate::boc::Boc::encode(self);
392 if serializer.is_human_readable() {
393 serializer.serialize_str(&crate::util::encode_base64(boc))
394 } else {
395 serializer.serialize_bytes(&boc)
396 }
397 }
398}
399
400#[cfg(feature = "arbitrary")]
401impl<'a> arbitrary::Arbitrary<'a> for Cell {
402 #[inline]
403 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
404 Ok(u.arbitrary::<CellBuilder>()?.build().unwrap())
405 }
406
407 #[inline]
408 fn size_hint(depth: usize) -> (usize, Option<usize>) {
409 Self::try_size_hint(depth).unwrap_or_default()
410 }
411
412 #[inline]
413 fn try_size_hint(
414 depth: usize,
415 ) -> Result<(usize, Option<usize>), arbitrary::MaxRecursionReached> {
416 <CellBuilder as arbitrary::Arbitrary>::try_size_hint(depth)
417 }
418}
419
420impl From<Cell> for CellSliceParts {
421 #[inline]
422 fn from(value: Cell) -> Self {
423 (CellSliceRange::full(&value), value)
424 }
425}
426
427#[must_use = "iterators are lazy and do nothing unless consumed"]
429pub struct RefsIter<'a> {
430 cell: &'a DynCell,
431 max: u8,
432 index: u8,
433}
434
435impl<'a> RefsIter<'a> {
436 #[inline]
438 pub fn cell(&self) -> &'a DynCell {
439 self.cell
440 }
441
442 #[inline]
444 pub fn peek(&self) -> Option<&'a DynCell> {
445 if self.index >= self.max {
446 None
447 } else {
448 self.cell.reference(self.index)
449 }
450 }
451
452 #[inline]
454 pub fn peek_cloned(&self) -> Option<Cell> {
455 if self.index >= self.max {
456 None
457 } else {
458 self.cell.reference_cloned(self.index)
459 }
460 }
461
462 #[inline]
464 pub fn peek_prev(&self) -> Option<&'a DynCell> {
465 if self.index > 0 {
466 self.cell.reference(self.index - 1)
467 } else {
468 None
469 }
470 }
471
472 #[inline]
474 pub fn peek_prev_cloned(&self) -> Option<Cell> {
475 if self.index > 0 {
476 self.cell.reference_cloned(self.index - 1)
477 } else {
478 None
479 }
480 }
481
482 #[inline]
484 pub fn cloned(self) -> ClonedRefsIter<'a> {
485 ClonedRefsIter { inner: self }
486 }
487}
488
489impl Clone for RefsIter<'_> {
490 #[inline]
491 fn clone(&self) -> Self {
492 Self {
493 cell: self.cell,
494 max: self.max,
495 index: self.index,
496 }
497 }
498}
499
500impl<'a> Iterator for RefsIter<'a> {
501 type Item = &'a DynCell;
502
503 #[inline]
504 fn next(&mut self) -> Option<Self::Item> {
505 if self.index >= self.max {
506 None
507 } else {
508 let child = self.cell.reference(self.index);
509 self.index += 1;
510 child
511 }
512 }
513
514 #[inline]
515 fn size_hint(&self) -> (usize, Option<usize>) {
516 let remaining = self.max.saturating_sub(self.index) as usize;
517 (remaining, Some(remaining))
518 }
519}
520
521impl DoubleEndedIterator for RefsIter<'_> {
522 #[inline]
523 fn next_back(&mut self) -> Option<Self::Item> {
524 if self.max > self.index {
525 self.max -= 1;
526 self.cell.reference(self.max)
527 } else {
528 None
529 }
530 }
531}
532
533impl ExactSizeIterator for RefsIter<'_> {
534 #[inline]
535 fn len(&self) -> usize {
536 self.size_hint().0
537 }
538}
539
540#[must_use = "iterators are lazy and do nothing unless consumed"]
542pub struct ClonedRefsIter<'a> {
543 inner: RefsIter<'a>,
544}
545
546impl<'a> ClonedRefsIter<'a> {
547 #[inline]
549 pub fn cell(&self) -> &'a DynCell {
550 self.inner.cell
551 }
552
553 #[inline]
555 pub fn peek(&self) -> Option<Cell> {
556 self.inner.peek_cloned()
557 }
558
559 #[inline]
561 pub fn peek_prev(&self) -> Option<Cell> {
562 self.inner.peek_prev_cloned()
563 }
564}
565
566impl Clone for ClonedRefsIter<'_> {
567 #[inline]
568 fn clone(&self) -> Self {
569 Self {
570 inner: self.inner.clone(),
571 }
572 }
573}
574
575impl Iterator for ClonedRefsIter<'_> {
576 type Item = Cell;
577
578 #[inline]
579 fn next(&mut self) -> Option<Self::Item> {
580 if self.inner.index >= self.inner.max {
581 None
582 } else {
583 let child = self.inner.cell.reference_cloned(self.inner.index);
584 self.inner.index += 1;
585 child
586 }
587 }
588
589 #[inline]
590 fn size_hint(&self) -> (usize, Option<usize>) {
591 self.inner.size_hint()
592 }
593}
594
595impl DoubleEndedIterator for ClonedRefsIter<'_> {
596 #[inline]
597 fn next_back(&mut self) -> Option<Self::Item> {
598 if self.inner.max > self.inner.index {
599 self.inner.max -= 1;
600 self.inner.cell.reference_cloned(self.inner.max)
601 } else {
602 None
603 }
604 }
605}
606
607impl ExactSizeIterator for ClonedRefsIter<'_> {
608 #[inline]
609 fn len(&self) -> usize {
610 self.size_hint().0
611 }
612}
613
614#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
616#[repr(transparent)]
617pub struct HashBytes(pub [u8; 32]);
618
619impl HashBytes {
620 pub const ZERO: Self = Self([0; 32]);
622
623 #[inline]
629 pub fn from_slice(slice: &[u8]) -> Self {
630 Self(slice.try_into().expect("slice with incorrect length"))
631 }
632
633 #[inline(always)]
635 pub const fn wrap(value: &[u8; 32]) -> &Self {
636 unsafe { &*(value as *const [u8; 32] as *const Self) }
638 }
639
640 pub const fn as_slice(&self) -> &[u8] {
642 self.0.as_slice()
643 }
644
645 pub fn as_mut_slice(&mut self) -> &mut [u8] {
647 self.0.as_mut_slice()
648 }
649
650 pub const fn as_array(&self) -> &[u8; 32] {
652 &self.0
653 }
654
655 pub fn as_mut_array(&mut self) -> &mut [u8; 32] {
657 &mut self.0
658 }
659
660 #[inline(always)]
662 pub const fn as_ptr(&self) -> *const u8 {
663 &self.0 as *const [u8] as *const u8
664 }
665
666 pub const fn first_chunk(&self) -> &[u8; 8] {
668 match self.0.first_chunk::<8>() {
669 Some(chunk) => chunk,
670 None => unsafe { std::hint::unreachable_unchecked() },
672 }
673 }
674
675 #[inline(always)]
676 fn fmt_hex(&self, f: &mut std::fmt::Formatter<'_>, table: &[u8; 16]) -> std::fmt::Result {
677 let mut output = [0u8; 64];
678 crate::util::encode_to_hex_slice(&self.0, &mut output, table).ok();
679
680 let output = unsafe { std::str::from_utf8_unchecked(&output) };
682 f.write_str(output)
683 }
684}
685
686impl Default for HashBytes {
687 #[inline(always)]
688 fn default() -> Self {
689 Self::ZERO
690 }
691}
692
693impl AsRef<[u8; 32]> for HashBytes {
694 #[inline(always)]
695 fn as_ref(&self) -> &[u8; 32] {
696 &self.0
697 }
698}
699
700impl AsRef<[u8]> for HashBytes {
701 #[inline(always)]
702 fn as_ref(&self) -> &[u8] {
703 self.0.as_ref()
704 }
705}
706
707impl AsMut<[u8; 32]> for HashBytes {
708 #[inline(always)]
709 fn as_mut(&mut self) -> &mut [u8; 32] {
710 &mut self.0
711 }
712}
713
714impl AsMut<[u8]> for HashBytes {
715 #[inline(always)]
716 fn as_mut(&mut self) -> &mut [u8] {
717 self.0.as_mut()
718 }
719}
720
721impl std::borrow::Borrow<[u8]> for HashBytes {
722 #[inline(always)]
723 fn borrow(&self) -> &[u8] {
724 &self.0
725 }
726}
727
728impl std::borrow::BorrowMut<[u8]> for HashBytes {
729 #[inline(always)]
730 fn borrow_mut(&mut self) -> &mut [u8] {
731 &mut self.0
732 }
733}
734
735impl std::borrow::Borrow<HashBytes> for [u8; 32] {
736 #[inline(always)]
737 fn borrow(&self) -> &HashBytes {
738 HashBytes::wrap(self)
739 }
740}
741
742impl PartialEq<[u8; 32]> for HashBytes {
743 #[inline(always)]
744 fn eq(&self, other: &[u8; 32]) -> bool {
745 &self.0 == other
746 }
747}
748
749impl PartialEq<HashBytes> for [u8; 32] {
750 #[inline(always)]
751 fn eq(&self, other: &HashBytes) -> bool {
752 self == &other.0
753 }
754}
755
756impl PartialEq<[u8]> for HashBytes {
757 #[inline(always)]
758 fn eq(&self, other: &[u8]) -> bool {
759 self.0 == other
760 }
761}
762
763impl PartialEq<HashBytes> for [u8] {
764 #[inline(always)]
765 fn eq(&self, other: &HashBytes) -> bool {
766 self == other.0
767 }
768}
769
770impl From<[u8; 32]> for HashBytes {
771 #[inline(always)]
772 fn from(value: [u8; 32]) -> Self {
773 Self(value)
774 }
775}
776
777impl From<sha2::digest::Output<sha2::Sha256>> for HashBytes {
778 #[inline(always)]
779 fn from(value: sha2::digest::Output<sha2::Sha256>) -> Self {
780 Self(value.into())
781 }
782}
783
784#[cfg(feature = "blake3")]
785impl From<blake3::Hash> for HashBytes {
786 #[inline(always)]
787 fn from(value: blake3::Hash) -> Self {
788 Self(value.into())
789 }
790}
791
792impl From<HashBytes> for [u8; 32] {
793 #[inline(always)]
794 fn from(value: HashBytes) -> Self {
795 value.0
796 }
797}
798
799impl FromStr for HashBytes {
800 type Err = ParseHashBytesError;
801
802 fn from_str(s: &str) -> Result<Self, Self::Err> {
803 let mut result = Self::default();
804 match s.len() {
805 64 => {
806 if let Err(e) = hex::decode_to_slice(s, &mut result.0) {
807 return Err(ParseHashBytesError::InvalidHex(e));
808 }
809 }
810 66 => {
811 if let Err(e) = hex::decode_to_slice(&s[2..], &mut result.0) {
812 return Err(ParseHashBytesError::InvalidHex(e));
813 }
814 }
815 #[cfg(feature = "base64")]
816 44 => {
817 if let Err(e) = crate::util::decode_base64_slice(s, &mut result.0) {
818 return Err(ParseHashBytesError::InvalidBase64(e));
819 }
820 }
821 _ => return Err(ParseHashBytesError::UnexpectedStringLength),
822 }
823 Ok(result)
824 }
825}
826
827impl std::fmt::Display for HashBytes {
828 #[inline]
829 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
830 std::fmt::LowerHex::fmt(self, f)
831 }
832}
833
834impl std::fmt::LowerHex for HashBytes {
835 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
836 Self::fmt_hex(self, f, crate::util::HEX_CHARS_LOWER)
837 }
838}
839
840impl std::fmt::UpperHex for HashBytes {
841 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
842 Self::fmt_hex(self, f, crate::util::HEX_CHARS_UPPER)
843 }
844}
845
846impl std::fmt::Debug for HashBytes {
847 #[inline(always)]
848 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
849 std::fmt::Display::fmt(self, f)
850 }
851}
852
853impl<I> std::ops::Index<I> for HashBytes
854where
855 [u8; 32]: std::ops::Index<I>,
856{
857 type Output = <[u8; 32] as std::ops::Index<I>>::Output;
858
859 #[inline]
860 fn index(&self, index: I) -> &Self::Output {
861 std::ops::Index::index(&self.0, index)
862 }
863}
864
865impl<I> std::ops::IndexMut<I> for HashBytes
866where
867 [u8; 32]: std::ops::IndexMut<I>,
868{
869 #[inline]
870 fn index_mut(&mut self, index: I) -> &mut Self::Output {
871 std::ops::IndexMut::index_mut(&mut self.0, index)
872 }
873}
874
875#[cfg(feature = "rand8")]
876impl rand8::distributions::Distribution<HashBytes> for rand8::distributions::Standard {
877 #[inline]
878 fn sample<R: rand8::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
879 HashBytes(rand8::distributions::Standard.sample(rng))
880 }
881}
882
883#[cfg(feature = "rand9")]
884impl rand9::distr::Distribution<HashBytes> for rand9::distr::StandardUniform {
885 #[inline]
886 fn sample<R: rand9::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
887 HashBytes(rand9::distr::StandardUniform.sample(rng))
888 }
889}
890
891#[cfg(feature = "serde")]
892impl serde::Serialize for HashBytes {
893 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
894 where
895 S: serde::Serializer,
896 {
897 if serializer.is_human_readable() {
898 let mut output = [0u8; 64];
899 crate::util::encode_to_hex_slice(&self.0, &mut output, crate::util::HEX_CHARS_LOWER)
900 .ok();
901
902 let output = unsafe { std::str::from_utf8_unchecked(&output) };
904 serializer.serialize_str(output)
905 } else {
906 serializer.serialize_bytes(&self.0)
907 }
908 }
909}
910
911#[cfg(feature = "serde")]
912impl<'de> serde::Deserialize<'de> for HashBytes {
913 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
914 where
915 D: serde::Deserializer<'de>,
916 {
917 use ::serde::de::{Error, Visitor};
918
919 struct HashBytesHexVisitor;
920
921 impl Visitor<'_> for HashBytesHexVisitor {
922 type Value = HashBytes;
923
924 fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
925 formatter.write_str("hex-encoded byte array of size 32")
926 }
927
928 fn visit_str<E: Error>(self, value: &str) -> Result<Self::Value, E> {
929 let mut result = HashBytes([0; 32]);
930 match hex::decode_to_slice(value, &mut result.0) {
931 Ok(()) => Ok(result),
932 Err(_) => Err(Error::invalid_value(
933 serde::de::Unexpected::Str(value),
934 &self,
935 )),
936 }
937 }
938 }
939
940 pub struct HashBytesRawVisitor;
941
942 impl Visitor<'_> for HashBytesRawVisitor {
943 type Value = HashBytes;
944
945 fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
946 f.write_fmt(format_args!("a byte array of size 32"))
947 }
948
949 fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
950 v.try_into()
951 .map(HashBytes)
952 .map_err(|_e| Error::invalid_length(v.len(), &self))
953 }
954 }
955
956 if deserializer.is_human_readable() {
957 deserializer.deserialize_str(HashBytesHexVisitor)
958 } else {
959 deserializer.deserialize_bytes(HashBytesRawVisitor)
960 }
961 }
962}
963
964#[cfg(feature = "arbitrary")]
965impl<'a> arbitrary::Arbitrary<'a> for HashBytes {
966 #[inline]
967 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
968 u.arbitrary().map(Self)
969 }
970
971 #[inline]
972 fn size_hint(_: usize) -> (usize, Option<usize>) {
973 (32, Some(32))
974 }
975}
976
977pub static EMPTY_CELL_HASH: &HashBytes = HashBytes::wrap(&[
979 0x96, 0xa2, 0x96, 0xd2, 0x24, 0xf2, 0x85, 0xc6, 0x7b, 0xee, 0x93, 0xc3, 0x0f, 0x8a, 0x30, 0x91,
980 0x57, 0xf0, 0xda, 0xa3, 0x5d, 0xc5, 0xb8, 0x7e, 0x41, 0x0b, 0x78, 0x63, 0x0a, 0x09, 0xcf, 0xc7,
981]);
982
983#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
985#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
986pub enum CellType {
987 #[default]
989 Ordinary,
990 PrunedBranch,
993 LibraryReference,
995 MerkleProof,
997 MerkleUpdate,
999}
1000
1001impl CellType {
1002 #[inline]
1004 pub const fn is_merkle(self) -> bool {
1005 matches!(self, Self::MerkleProof | Self::MerkleUpdate)
1006 }
1007
1008 #[inline]
1010 pub const fn is_exotic(self) -> bool {
1011 !matches!(self, Self::Ordinary)
1012 }
1013
1014 #[inline]
1016 pub const fn is_pruned_branch(self) -> bool {
1017 matches!(self, Self::PrunedBranch)
1018 }
1019
1020 #[inline(always)]
1022 pub const fn is_library(self) -> bool {
1023 matches!(self, Self::LibraryReference)
1024 }
1025
1026 #[inline]
1028 pub const fn to_byte(self) -> u8 {
1029 match self {
1030 CellType::Ordinary => 0xff,
1031 CellType::PrunedBranch => 1,
1032 CellType::LibraryReference => 2,
1033 CellType::MerkleProof => 3,
1034 CellType::MerkleUpdate => 4,
1035 }
1036 }
1037
1038 #[inline]
1040 pub const fn from_byte(byte: u8) -> Option<Self> {
1041 Some(match byte {
1042 0xff => CellType::Ordinary,
1043 1 => CellType::PrunedBranch,
1044 2 => CellType::LibraryReference,
1045 3 => CellType::MerkleProof,
1046 4 => CellType::MerkleUpdate,
1047 _ => return None,
1048 })
1049 }
1050
1051 #[inline]
1053 pub const fn from_byte_exotic(byte: u8) -> Option<Self> {
1054 Some(match byte {
1055 1 => CellType::PrunedBranch,
1056 2 => CellType::LibraryReference,
1057 3 => CellType::MerkleProof,
1058 4 => CellType::MerkleUpdate,
1059 _ => return None,
1060 })
1061 }
1062}
1063
1064impl From<CellType> for u8 {
1065 #[inline]
1066 fn from(cell_type: CellType) -> u8 {
1067 cell_type.to_byte()
1068 }
1069}
1070
1071#[cfg(feature = "arbitrary")]
1072impl<'a> arbitrary::Arbitrary<'a> for CellType {
1073 #[inline]
1074 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1075 static ALL_TYPES: [CellType; 5] = [
1076 CellType::Ordinary,
1077 CellType::PrunedBranch,
1078 CellType::LibraryReference,
1079 CellType::MerkleProof,
1080 CellType::MerkleUpdate,
1081 ];
1082
1083 u.choose(&ALL_TYPES).copied()
1084 }
1085
1086 #[inline]
1087 fn size_hint(_: usize) -> (usize, Option<usize>) {
1088 (1, Some(1))
1089 }
1090}
1091
1092#[derive(Hash, Debug, Clone, Copy)]
1094#[repr(C)]
1095pub struct CellDescriptor {
1096 pub d1: u8,
1098 pub d2: u8,
1100}
1101
1102impl CellDescriptor {
1103 pub const REF_COUNT_MASK: u8 = 0b0000_0111;
1105 pub const IS_EXOTIC_MASK: u8 = 0b0000_1000;
1107 pub const STORE_HASHES_MASK: u8 = 0b0001_0000;
1109 pub const LEVEL_MASK: u8 = 0b1110_0000;
1111
1112 #[inline(always)]
1114 pub const fn compute_d1(level_mask: LevelMask, is_exotic: bool, ref_count: u8) -> u8 {
1115 (level_mask.0 << 5) | ((is_exotic as u8) << 3) | (ref_count & Self::REF_COUNT_MASK)
1116 }
1117
1118 #[inline(always)]
1120 pub const fn compute_d2(bit_len: u16) -> u8 {
1121 (((bit_len >> 2) as u8) & !0b1) | ((bit_len % 8 != 0) as u8)
1122 }
1123
1124 #[inline(always)]
1126 pub const fn new(bytes: [u8; 2]) -> Self {
1127 Self {
1128 d1: bytes[0],
1129 d2: bytes[1],
1130 }
1131 }
1132
1133 #[must_use]
1135 pub const fn virtualize(mut self, offset: u8) -> Self {
1136 let virtualized_mask = (self.d1 >> offset) & Self::LEVEL_MASK;
1137 self.d1 = virtualized_mask | (self.d1 & !Self::LEVEL_MASK);
1138 self
1139 }
1140
1141 pub fn cell_type(self) -> CellType {
1143 if self.d1 & Self::IS_EXOTIC_MASK == 0 {
1144 CellType::Ordinary
1145 } else {
1146 match self.d1 & Self::REF_COUNT_MASK {
1147 0 => {
1148 if self.d1 & Self::LEVEL_MASK == 0 {
1150 CellType::LibraryReference
1151 } else {
1152 CellType::PrunedBranch
1153 }
1154 }
1155 1 => CellType::MerkleProof,
1156 _ => CellType::MerkleUpdate,
1157 }
1158 }
1159 }
1160
1161 #[inline(always)]
1163 pub const fn reference_count(self) -> u8 {
1164 self.d1 & Self::REF_COUNT_MASK
1165 }
1166
1167 #[inline(always)]
1171 pub const fn hash_count(self) -> u8 {
1172 let level = self.level_mask().level();
1173 if self.is_exotic() && self.reference_count() == 0 && level > 0 {
1174 1 } else {
1176 level + 1
1177 }
1178 }
1179
1180 #[inline(always)]
1184 pub const fn is_exotic(self) -> bool {
1185 self.d1 & Self::IS_EXOTIC_MASK != 0
1186 }
1187
1188 #[inline(always)]
1190 pub const fn is_pruned_branch(self) -> bool {
1191 self.is_exotic() && self.reference_count() == 0 && !self.level_mask().is_empty()
1192 }
1193
1194 #[inline(always)]
1196 pub const fn is_library(self) -> bool {
1197 self.is_exotic() && self.reference_count() == 0 && self.level_mask().is_empty()
1198 }
1199
1200 #[inline(always)]
1202 pub const fn is_merkle(self) -> bool {
1203 self.is_exotic() && self.reference_count() != 0
1204 }
1205
1206 #[inline(always)]
1208 pub const fn is_absent(self) -> bool {
1209 self.d1 == (Self::REF_COUNT_MASK | Self::IS_EXOTIC_MASK)
1210 }
1211
1212 #[inline(always)]
1214 pub const fn store_hashes(self) -> bool {
1215 self.d1 & Self::STORE_HASHES_MASK != 0
1216 }
1217
1218 #[inline(always)]
1220 pub const fn level_mask(self) -> LevelMask {
1221 unsafe { LevelMask::new_unchecked(self.d1 >> 5) }
1223 }
1224
1225 #[inline(always)]
1227 pub const fn is_aligned(self) -> bool {
1228 self.d2 & 1 == 0
1229 }
1230
1231 #[inline(always)]
1233 pub const fn byte_len(self) -> u8 {
1234 (self.d2 & 1) + (self.d2 >> 1)
1235 }
1236}
1237
1238#[derive(Copy, Clone, Eq, PartialEq, Hash)]
1240pub struct LevelMask(u8);
1241
1242impl LevelMask {
1243 pub const EMPTY: Self = LevelMask(0);
1245 pub const MAX_LEVEL: u8 = 3;
1247
1248 #[inline(always)]
1250 pub const fn new(mask: u8) -> Self {
1251 Self(mask & 0b111)
1252 }
1253
1254 #[inline(always)]
1256 pub const fn is_empty(self) -> bool {
1257 self.0 == 0
1258 }
1259
1260 #[inline(always)]
1267 pub const unsafe fn new_unchecked(mask: u8) -> Self {
1268 Self(mask)
1269 }
1270
1271 #[inline(always)]
1275 pub const fn from_level(level: u8) -> Self {
1276 Self(match level {
1277 0 => 0,
1278 1 => 1,
1279 2 => 3,
1280 _ => 7,
1281 })
1282 }
1283
1284 pub const fn level(self) -> u8 {
1286 (self.0 & 1) + ((self.0 >> 1) & 1) + ((self.0 >> 2) & 1)
1287 }
1288
1289 pub const fn hash_index(self, level: u8) -> u8 {
1291 Self(self.0 & Self::from_level(level).0).level()
1292 }
1293
1294 pub const fn contains(self, level: u8) -> bool {
1296 level == 0 || self.0 & (1 << (level - 1)) != 0
1297 }
1298
1299 #[inline(always)]
1301 #[must_use]
1302 pub const fn virtualize(self, offset: u8) -> Self {
1303 Self(self.0 >> offset)
1304 }
1305
1306 #[inline]
1308 pub const fn to_byte(self) -> u8 {
1309 self.0
1310 }
1311}
1312
1313impl IntoIterator for LevelMask {
1314 type Item = u8;
1315 type IntoIter = LevelMaskIter;
1316
1317 #[inline]
1318 fn into_iter(self) -> Self::IntoIter {
1319 LevelMaskIter(1 | (self.0 << 1))
1321 }
1322}
1323
1324impl PartialEq<u8> for LevelMask {
1325 #[inline]
1326 fn eq(&self, other: &u8) -> bool {
1327 self.0 == *other
1328 }
1329}
1330
1331impl BitOr for LevelMask {
1332 type Output = Self;
1333
1334 #[inline(always)]
1335 fn bitor(self, rhs: Self) -> Self::Output {
1336 Self(self.0 | rhs.0)
1337 }
1338}
1339
1340impl BitOrAssign for LevelMask {
1341 #[inline(always)]
1342 fn bitor_assign(&mut self, rhs: Self) {
1343 self.0 |= rhs.0;
1344 }
1345}
1346
1347impl From<LevelMask> for u8 {
1348 #[inline(always)]
1349 fn from(m: LevelMask) -> u8 {
1350 m.0
1351 }
1352}
1353
1354impl std::fmt::Debug for LevelMask {
1355 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1356 f.write_fmt(format_args!("{:03b}", self.0))
1357 }
1358}
1359
1360#[cfg(feature = "arbitrary")]
1361impl<'a> arbitrary::Arbitrary<'a> for LevelMask {
1362 fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
1363 u.int_in_range(0b000..=0b111).map(Self::new)
1364 }
1365
1366 #[inline]
1367 fn size_hint(_: usize) -> (usize, Option<usize>) {
1368 (1, Some(1))
1369 }
1370}
1371
1372#[derive(Clone)]
1374pub struct LevelMaskIter(u8);
1375
1376impl LevelMaskIter {
1377 pub fn is_empty(&self) -> bool {
1379 self.0 == 0
1380 }
1381}
1382
1383impl Iterator for LevelMaskIter {
1384 type Item = u8;
1385
1386 #[inline]
1387 fn next(&mut self) -> Option<Self::Item> {
1388 (self.0 != 0).then(|| {
1389 let mask = self.0 & !(self.0 - 1);
1393
1394 self.0 &= !mask;
1396
1397 mask.trailing_zeros() as u8
1398 })
1399 }
1400
1401 fn size_hint(&self) -> (usize, Option<usize>) {
1402 let len = self.0.count_ones() as usize;
1403 (len, Some(len))
1404 }
1405}
1406
1407#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
1409pub struct Size {
1410 pub bits: u16,
1412 pub refs: u8,
1414}
1415
1416impl Size {
1417 pub const ZERO: Self = Self { bits: 0, refs: 0 };
1419
1420 pub const BIT: Self = Self { bits: 1, refs: 0 };
1422
1423 pub const REF: Self = Self { bits: 0, refs: 1 };
1425
1426 pub const MAX: Self = Self {
1428 bits: MAX_BIT_LEN,
1429 refs: MAX_REF_COUNT as _,
1430 };
1431
1432 #[inline]
1434 pub const fn fits_into_cell(&self) -> bool {
1435 self.bits <= MAX_BIT_LEN && self.refs <= MAX_REF_COUNT as _
1436 }
1437
1438 #[inline]
1441 pub const fn saturating_add(self, rhs: Self) -> Self {
1442 Self {
1443 bits: self.bits.saturating_add(rhs.bits),
1444 refs: self.refs.saturating_add(rhs.refs),
1445 }
1446 }
1447
1448 #[inline]
1451 pub const fn saturating_sub(self, rhs: Self) -> Self {
1452 Self {
1453 bits: self.bits.saturating_sub(rhs.bits),
1454 refs: self.refs.saturating_sub(rhs.refs),
1455 }
1456 }
1457
1458 pub const fn is_zero(self) -> bool {
1460 self.bits == 0 && self.refs == 0
1461 }
1462}
1463
1464impl From<Size> for CellTreeStats {
1465 #[inline]
1466 fn from(value: Size) -> Self {
1467 Self {
1468 bit_count: value.bits as _,
1469 cell_count: value.refs as _,
1470 }
1471 }
1472}
1473
1474impl std::ops::Add for Size {
1475 type Output = Self;
1476
1477 #[inline]
1478 fn add(mut self, rhs: Self) -> Self::Output {
1479 self += rhs;
1480 self
1481 }
1482}
1483
1484impl std::ops::AddAssign for Size {
1485 #[inline]
1486 fn add_assign(&mut self, rhs: Self) {
1487 self.bits += rhs.bits;
1488 self.refs += rhs.refs;
1489 }
1490}
1491
1492impl std::ops::Sub for Size {
1493 type Output = Self;
1494
1495 #[inline]
1496 fn sub(mut self, rhs: Self) -> Self::Output {
1497 self -= rhs;
1498 self
1499 }
1500}
1501
1502impl std::ops::SubAssign for Size {
1503 #[inline]
1504 fn sub_assign(&mut self, rhs: Self) {
1505 self.bits -= rhs.bits;
1506 self.refs -= rhs.refs;
1507 }
1508}
1509
1510#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
1512pub struct CellTreeStats {
1513 pub bit_count: u64,
1515 pub cell_count: u64,
1517}
1518
1519impl CellTreeStats {
1520 pub const ZERO: Self = CellTreeStats {
1522 bit_count: 0,
1523 cell_count: 0,
1524 };
1525}
1526
1527impl std::ops::Add for CellTreeStats {
1528 type Output = Self;
1529
1530 #[inline]
1531 fn add(self, rhs: Self) -> Self::Output {
1532 Self {
1533 bit_count: self.bit_count.saturating_add(rhs.bit_count),
1534 cell_count: self.cell_count.saturating_add(rhs.cell_count),
1535 }
1536 }
1537}
1538
1539impl std::ops::AddAssign for CellTreeStats {
1540 #[inline]
1541 fn add_assign(&mut self, rhs: Self) {
1542 self.bit_count = self.bit_count.saturating_add(rhs.bit_count);
1543 self.cell_count = self.cell_count.saturating_add(rhs.cell_count);
1544 }
1545}
1546
1547impl std::ops::Add<Size> for CellTreeStats {
1548 type Output = Self;
1549
1550 #[inline]
1551 fn add(self, rhs: Size) -> Self::Output {
1552 Self {
1553 bit_count: self.bit_count.saturating_add(rhs.bits as _),
1554 cell_count: self.cell_count.saturating_add(rhs.refs as _),
1555 }
1556 }
1557}
1558
1559impl std::iter::Sum for CellTreeStats {
1560 #[inline]
1561 fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
1562 let mut res = Self::ZERO;
1563 for item in iter {
1564 res += item;
1565 }
1566 res
1567 }
1568}
1569
1570impl std::ops::AddAssign<Size> for CellTreeStats {
1571 fn add_assign(&mut self, rhs: Size) {
1572 self.bit_count = self.bit_count.saturating_add(rhs.bits as _);
1573 self.cell_count = self.cell_count.saturating_add(rhs.refs as _);
1574 }
1575}
1576
1577impl std::ops::Sub for CellTreeStats {
1578 type Output = Self;
1579
1580 #[inline]
1581 fn sub(mut self, rhs: Self) -> Self::Output {
1582 self -= rhs;
1583 self
1584 }
1585}
1586
1587impl std::ops::SubAssign for CellTreeStats {
1588 #[inline]
1589 fn sub_assign(&mut self, rhs: Self) {
1590 self.bit_count = self.bit_count.saturating_sub(rhs.bit_count);
1591 self.cell_count = self.cell_count.saturating_sub(rhs.cell_count);
1592 }
1593}
1594
1595impl std::ops::SubAssign<Size> for CellTreeStats {
1596 #[inline]
1597 fn sub_assign(&mut self, rhs: Size) {
1598 self.bit_count = self.bit_count.saturating_sub(rhs.bits as _);
1599 self.cell_count = self.cell_count.saturating_sub(rhs.refs as _);
1600 }
1601}
1602
1603pub struct StorageStat<'a> {
1608 visited: ahash::HashSet<&'a HashBytes>,
1609 stack: Vec<RefsIter<'a>>,
1610 stats: CellTreeStats,
1611 limit: usize,
1612}
1613
1614impl<'a> StorageStat<'a> {
1615 pub fn compute_for_slice<'b: 'a>(
1622 slice: &'a CellSlice<'b>,
1623 limit: usize,
1624 ) -> Option<CellTreeStats> {
1625 let mut this = Self::with_limit(limit);
1626 if this.add_slice(slice) {
1627 Some(this.stats)
1628 } else {
1629 None
1630 }
1631 }
1632
1633 pub fn compute_for_cell(cell: &'a DynCell, limit: usize) -> Option<CellTreeStats> {
1637 let mut this = Self::with_limit(limit);
1638 if this.add_cell(cell) {
1639 Some(this.stats)
1640 } else {
1641 None
1642 }
1643 }
1644
1645 pub fn with_limit(limit: usize) -> Self {
1647 Self {
1648 visited: Default::default(),
1649 stack: Vec::new(),
1650 stats: CellTreeStats::ZERO,
1651 limit,
1652 }
1653 }
1654
1655 pub fn unlimited() -> Self {
1657 Self::with_limit(usize::MAX)
1658 }
1659
1660 pub fn stats(&self) -> CellTreeStats {
1662 self.stats
1663 }
1664
1665 pub fn add_cell(&mut self, cell: &'a DynCell) -> bool {
1669 if !self.visited.insert(cell.repr_hash()) {
1670 return true;
1671 }
1672
1673 self.stats.bit_count += cell.bit_len() as u64;
1674 self.stats.cell_count += 1;
1675
1676 self.stack.clear();
1677 self.stack.push(cell.references());
1678 self.reduce_stack()
1679 }
1680
1681 pub fn add_slice(&mut self, slice: &CellSlice<'a>) -> bool {
1685 self.stats.bit_count += slice.size_bits() as u64;
1686
1687 self.stack.clear();
1688 self.stack.push(slice.references());
1689 self.reduce_stack()
1690 }
1691
1692 fn reduce_stack(&mut self) -> bool {
1693 'outer: while let Some(item) = self.stack.last_mut() {
1694 for cell in item.by_ref() {
1695 if !self.visited.insert(cell.repr_hash()) {
1696 continue;
1697 }
1698
1699 if self.stats.cell_count >= self.limit as u64 {
1700 return false;
1701 }
1702
1703 self.stats.bit_count += cell.bit_len() as u64;
1704 self.stats.cell_count += 1;
1705
1706 let next = cell.references();
1707 if next.max > 0 {
1708 self.stack.push(next);
1709 continue 'outer;
1710 }
1711 }
1712
1713 self.stack.pop();
1714 }
1715
1716 true
1717 }
1718}
1719
1720#[derive(Clone, Copy)]
1722pub struct DebugCell<'a>(&'a DynCell);
1723
1724impl std::fmt::Debug for DebugCell<'_> {
1725 #[inline]
1726 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1727 self.0.fmt(f)
1728 }
1729}
1730
1731#[derive(Clone, Copy)]
1733pub struct DisplayCellRoot<'a> {
1734 cell: &'a DynCell,
1735 level: usize,
1736}
1737
1738impl std::fmt::Display for DisplayCellRoot<'_> {
1739 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1740 let data = hex::encode(self.cell.data());
1742
1743 let indent = self.level * 2;
1744 if f.alternate() {
1745 f.write_fmt(format_args!("{:indent$}{data}\n", ""))
1746 } else {
1747 let repr_depth = self.cell.depth(LevelMask::MAX_LEVEL);
1748 let repr_hash = self.cell.repr_hash();
1749 let descriptor = self.cell.descriptor();
1750 f.write_fmt(format_args!(
1751 "{:indent$}{:?}: {data}\n{:indent$}bits: {:>4}, refs: {}, l: {:?}, depth: {}, hash: {}\n",
1752 "",
1753 descriptor.cell_type(),
1754 "",
1755 self.cell.bit_len(),
1756 descriptor.reference_count(),
1757 descriptor.level_mask(),
1758 repr_depth,
1759 repr_hash,
1760 ))
1761 }
1762 }
1763}
1764
1765#[derive(Clone, Copy)]
1767pub struct DisplayCellTree<'a>(&'a DynCell);
1768
1769impl std::fmt::Display for DisplayCellTree<'_> {
1770 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1771 let mut stack = vec![(0, self.0)];
1772
1773 while let Some((level, cell)) = stack.pop() {
1774 ok!(std::fmt::Display::fmt(&DisplayCellRoot { cell, level }, f));
1775
1776 let reference_count = cell.reference_count();
1777 for i in (0..reference_count).rev() {
1778 if let Some(child) = cell.reference(i) {
1779 stack.push((level + 1, child));
1780 }
1781 }
1782 }
1783
1784 Ok(())
1785 }
1786}
1787
1788pub const MAX_BIT_LEN: u16 = 1023;
1790pub const MAX_REF_COUNT: usize = 4;
1792
1793#[cfg(test)]
1794mod tests {
1795 use super::*;
1796
1797 #[test]
1798 fn correct_level() {
1799 const LEVEL: [u8; 8] = [0, 1, 1, 2, 1, 2, 2, 3];
1800
1801 for mask in 0b000..=0b111 {
1802 assert_eq!(LevelMask(mask).level(), LEVEL[mask as usize]);
1803 }
1804 }
1805
1806 #[test]
1807 fn virtualize_descriptor() {
1808 let level_mask = LevelMask(0b111);
1809 let desc = CellDescriptor::new([
1810 CellDescriptor::compute_d1(level_mask, false, 3),
1811 CellDescriptor::compute_d2(123),
1812 ]);
1813
1814 assert_eq!(desc.level_mask(), level_mask);
1815
1816 for i in 0..3 {
1817 let v_desc = desc.virtualize(i);
1818
1819 assert_eq!(v_desc.cell_type(), desc.cell_type());
1820 assert_eq!(v_desc.reference_count(), desc.reference_count());
1821 assert_eq!(v_desc.is_exotic(), desc.is_exotic());
1822 assert_eq!(v_desc.store_hashes(), desc.store_hashes());
1823 assert_eq!(v_desc.is_aligned(), desc.is_aligned());
1824 assert_eq!(v_desc.byte_len(), desc.byte_len());
1825
1826 assert_eq!(v_desc.level_mask(), level_mask.virtualize(i));
1827 }
1828 }
1829
1830 #[test]
1831 fn correct_hash_index() {
1832 const HASH_INDEX_TABLE: [[u8; 4]; 8] = [
1833 [0, 0, 0, 0], [0, 1, 1, 1], [0, 0, 1, 1], [0, 1, 2, 2], [0, 0, 0, 1], [0, 1, 1, 2], [0, 0, 1, 2], [0, 1, 2, 3], ];
1843
1844 for mask in 0b000..=0b111 {
1845 let mask = LevelMask(mask);
1846
1847 for i in 0..=3 {
1848 let hash_index = mask.hash_index(i);
1849 assert_eq!(hash_index, HASH_INDEX_TABLE[mask.0 as usize][i as usize]);
1850 }
1851 }
1852 }
1853
1854 #[test]
1855 fn ultra_virtual_cell_by_ref() {
1856 let cell = Cell::empty_cell();
1857
1858 let pruned1 =
1859 crate::merkle::make_pruned_branch(cell.as_ref(), 0, Cell::empty_context()).unwrap();
1860
1861 let pruned2 =
1862 crate::merkle::make_pruned_branch(pruned1.as_ref(), 1, Cell::empty_context()).unwrap();
1863
1864 let pruned3 =
1865 crate::merkle::make_pruned_branch(pruned2.as_ref(), 2, Cell::empty_context()).unwrap();
1866
1867 let pruned3 = pruned3.virtualize();
1869 assert_eq!(pruned3.repr_hash(), pruned2.repr_hash());
1870 assert_eq!(pruned3.repr_depth(), pruned2.repr_depth());
1871
1872 let pruned3 = pruned3.virtualize();
1874 assert_eq!(pruned3.repr_hash(), pruned1.repr_hash());
1875 assert_eq!(pruned3.repr_depth(), pruned1.repr_depth());
1876
1877 let pruned3 = pruned3.virtualize();
1879 assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1880 assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1881
1882 let pruned3 = pruned3.virtualize();
1884 assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1885 assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1886
1887 let pruned3 = pruned3.virtualize();
1889 assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1890 assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1891 }
1892
1893 #[test]
1894 fn versioned_store_load() {
1895 #[derive(Debug, Clone, Copy, Eq, PartialEq, Store, Load)]
1896 #[tlb(tag = ["#12", "#34", "#56"])]
1897 struct Versioned {
1898 old_field1: u32,
1899 #[tlb(since_tag = 1)]
1900 new_field1: u32,
1901 old_field2: bool,
1902 #[tlb(since_tag = 2)]
1903 new_field2: bool,
1904 }
1905
1906 let old = Versioned {
1907 old_field1: 123,
1908 new_field1: 0,
1909 old_field2: true,
1910 new_field2: false,
1911 };
1912 let cell = CellBuilder::build_from(old).unwrap();
1913
1914 {
1915 let mut slice = cell.as_slice().unwrap();
1916 assert_eq!(slice.size_bits(), 8 + 32 + 1);
1917 assert_eq!(slice.load_u8().unwrap(), 0x12);
1918 assert_eq!(slice.load_u32().unwrap(), 123);
1919 assert!(slice.load_bit().unwrap());
1920 assert!(slice.is_empty());
1921 }
1922 assert_eq!(
1923 Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1924 old,
1925 );
1926
1927 let new = Versioned {
1928 old_field1: 123,
1929 new_field1: 456,
1930 old_field2: true,
1931 new_field2: false,
1932 };
1933 let cell = CellBuilder::build_from(new).unwrap();
1934
1935 {
1936 let mut slice = cell.as_slice().unwrap();
1937 assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1);
1938 assert_eq!(slice.load_u8().unwrap(), 0x34);
1939 assert_eq!(slice.load_u32().unwrap(), 123);
1940 assert_eq!(slice.load_u32().unwrap(), 456);
1941 assert!(slice.load_bit().unwrap());
1942 assert!(slice.is_empty());
1943 }
1944 assert_eq!(
1945 Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1946 new
1947 );
1948
1949 let too_new = Versioned {
1950 old_field1: 123,
1951 new_field1: 0,
1952 old_field2: true,
1953 new_field2: true,
1954 };
1955 let cell = CellBuilder::build_from(too_new).unwrap();
1956
1957 {
1958 let mut slice = cell.as_slice().unwrap();
1959 assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1 + 1);
1960 assert_eq!(slice.load_u8().unwrap(), 0x56);
1961 assert_eq!(slice.load_u32().unwrap(), 123);
1962 assert_eq!(slice.load_u32().unwrap(), 0);
1963 assert!(slice.load_bit().unwrap());
1964 assert!(slice.load_bit().unwrap());
1965 assert!(slice.is_empty());
1966 }
1967 assert_eq!(
1968 Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1969 too_new
1970 );
1971 }
1972}