1use std::ops::{BitOr, BitOrAssign};
4use std::str::FromStr;
5
6use crate::error::{Error, ParseHashBytesError};
7use crate::util::Bitstring;
8
9pub use self::builder::{CellBuilder, CellRefsBuilder, Store};
10pub use self::cell_context::{CellContext, CellParts, LoadMode};
11pub use self::cell_impl::{StaticCell, VirtualCellWrapper};
12pub use self::slice::{CellSlice, CellSliceParts, CellSliceRange, ExactSize, Load};
13pub use self::usage_tree::{UsageTree, UsageTreeMode, UsageTreeWithSubtrees};
14
15#[cfg(not(feature = "sync"))]
16pub use self::cell_impl::rc::{Cell, CellInner, WeakCell};
17
18#[cfg(feature = "sync")]
19pub use self::cell_impl::sync::{Cell, CellInner, WeakCell};
20
21pub use everscale_types_proc::{Load, Store};
22
23mod cell_impl;
25
26mod cell_context;
28
29mod slice;
31
32mod builder;
34
35mod usage_tree;
36
37#[cfg(feature = "sync")]
38#[doc(hidden)]
39mod __checks {
40 use super::*;
41
42 assert_impl_all!(Cell: Send);
43 assert_impl_all!(CellSlice: Send);
44 assert_impl_all!(CellBuilder: Send);
45}
46
47pub trait EquivalentRepr<T> {}
49
50impl<T> EquivalentRepr<T> for T {}
52
53pub trait CellFamily: Sized {
55 type EmptyCellContext: CellContext;
57
58 fn empty_cell() -> Cell;
62
63 fn empty_cell_ref() -> &'static DynCell;
65
66 fn empty_context() -> Self::EmptyCellContext;
68
69 fn all_zeros_ref() -> &'static DynCell;
71
72 fn all_ones_ref() -> &'static DynCell;
74
75 fn virtualize(cell: Cell) -> Cell;
77}
78
79#[cfg(not(feature = "sync"))]
81pub type DynCell = dyn CellImpl;
82
83#[cfg(feature = "sync")]
85pub type DynCell = dyn CellImpl + Send + Sync;
86
87impl AsRef<DynCell> for DynCell {
88 #[inline(always)]
89 fn as_ref(&self) -> &Self {
90 self
91 }
92}
93
94impl AsMut<DynCell> for DynCell {
95 #[inline(always)]
96 fn as_mut(&mut self) -> &mut Self {
97 self
98 }
99}
100
101pub trait CellImpl {
106 fn untrack(self: CellInner<Self>) -> Cell;
108
109 fn descriptor(&self) -> CellDescriptor;
122
123 fn data(&self) -> &[u8];
125
126 fn bit_len(&self) -> u16;
128
129 fn reference(&self, index: u8) -> Option<&DynCell>;
131
132 fn reference_cloned(&self, index: u8) -> Option<Cell>;
134
135 fn virtualize(&self) -> &DynCell;
138
139 fn hash(&self, level: u8) -> &HashBytes;
144
145 fn depth(&self, level: u8) -> u16;
147
148 fn take_first_child(&mut self) -> Option<Cell>;
150
151 fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell>;
156
157 fn take_next_child(&mut self) -> Option<Cell>;
159
160 #[cfg(feature = "stats")]
165 fn stats(&self) -> CellTreeStats;
166}
167
168impl DynCell {
169 #[inline]
171 pub fn cell_type(&self) -> CellType {
172 self.descriptor().cell_type()
173 }
174
175 #[inline]
177 pub fn level(&self) -> u8 {
178 self.descriptor().level_mask().level()
179 }
180
181 #[inline]
183 pub fn level_mask(&self) -> LevelMask {
184 self.descriptor().level_mask()
185 }
186
187 #[inline]
189 pub fn reference_count(&self) -> u8 {
190 self.descriptor().reference_count()
191 }
192
193 pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'_>, Error> {
196 match self.reference(index) {
197 Some(cell) => CellSlice::new(cell),
198 None => Err(Error::CellUnderflow),
199 }
200 }
201
202 #[inline]
206 pub fn is_exotic(&self) -> bool {
207 self.descriptor().is_exotic()
208 }
209
210 #[inline]
212 pub fn repr_hash(&self) -> &HashBytes {
213 self.hash(LevelMask::MAX_LEVEL)
214 }
215
216 #[inline]
218 pub fn repr_depth(&self) -> u16 {
219 self.depth(LevelMask::MAX_LEVEL)
220 }
221
222 pub fn is_empty(&self) -> bool {
224 self.hash(LevelMask::MAX_LEVEL) == EMPTY_CELL_HASH
225 }
226
227 #[inline]
229 pub fn references(&self) -> RefsIter<'_> {
230 RefsIter {
231 cell: self,
232 max: self.reference_count(),
233 index: 0,
234 }
235 }
236
237 #[inline]
240 pub fn as_slice(&'_ self) -> Result<CellSlice<'_>, Error> {
241 CellSlice::new(self)
242 }
243
244 #[inline]
251 pub unsafe fn as_slice_unchecked(&'_ self) -> CellSlice<'_> {
252 CellSlice::new_unchecked(self)
253 }
254
255 pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
259 StorageStat::compute_for_cell(self, limit)
260 }
261
262 #[inline]
267 pub fn debug_root(&'_ self) -> DebugCell<'_> {
268 DebugCell(self)
269 }
270
271 #[inline]
276 pub fn display_root(&'_ self) -> DisplayCellRoot<'_> {
277 DisplayCellRoot {
278 cell: self,
279 level: 0,
280 }
281 }
282
283 #[inline]
288 pub fn display_tree(&'_ self) -> DisplayCellTree<'_> {
289 DisplayCellTree(self)
290 }
291
292 #[inline]
295 pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
296 struct DisplayData<'a>(&'a DynCell);
297
298 impl std::fmt::Display for DisplayData<'_> {
299 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
300 std::fmt::Display::fmt(
301 &Bitstring {
302 bytes: self.0.data(),
303 bit_len: self.0.bit_len(),
304 },
305 f,
306 )
307 }
308 }
309
310 impl std::fmt::Binary for DisplayData<'_> {
311 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
312 std::fmt::Binary::fmt(
313 &Bitstring {
314 bytes: self.0.data(),
315 bit_len: self.0.bit_len(),
316 },
317 f,
318 )
319 }
320 }
321
322 DisplayData(self)
323 }
324
325 #[inline]
329 pub fn parse<'a, T: Load<'a>>(&'a self) -> Result<T, Error> {
330 T::load_from(&mut ok!(self.as_slice()))
331 }
332}
333
334impl std::fmt::Debug for DynCell {
335 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
336 crate::util::debug_struct_field2_finish(
337 f,
338 "Cell",
339 "ty",
340 &self.cell_type(),
341 "hash",
342 self.repr_hash(),
343 )
344 }
345}
346
347impl Eq for DynCell {}
348
349impl PartialEq<DynCell> for DynCell {
350 #[inline]
351 fn eq(&self, other: &DynCell) -> bool {
352 self.repr_hash() == other.repr_hash()
353 }
354}
355
356#[cfg(feature = "serde")]
357impl serde::Serialize for DynCell {
358 fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
359 let boc = crate::boc::Boc::encode(self);
360 if serializer.is_human_readable() {
361 serializer.serialize_str(&crate::util::encode_base64(boc))
362 } else {
363 serializer.serialize_bytes(&boc)
364 }
365 }
366}
367
368#[must_use = "iterators are lazy and do nothing unless consumed"]
370pub struct RefsIter<'a> {
371 cell: &'a DynCell,
372 max: u8,
373 index: u8,
374}
375
376impl<'a> RefsIter<'a> {
377 #[inline]
379 pub fn cell(&self) -> &'a DynCell {
380 self.cell
381 }
382
383 #[inline]
385 pub fn peek(&self) -> Option<&'a DynCell> {
386 if self.index >= self.max {
387 None
388 } else {
389 self.cell.reference(self.index)
390 }
391 }
392
393 #[inline]
395 pub fn peek_cloned(&self) -> Option<Cell> {
396 if self.index >= self.max {
397 None
398 } else {
399 self.cell.reference_cloned(self.index)
400 }
401 }
402
403 #[inline]
405 pub fn peek_prev(&self) -> Option<&'a DynCell> {
406 if self.index > 0 {
407 self.cell.reference(self.index - 1)
408 } else {
409 None
410 }
411 }
412
413 #[inline]
415 pub fn peek_prev_cloned(&self) -> Option<Cell> {
416 if self.index > 0 {
417 self.cell.reference_cloned(self.index - 1)
418 } else {
419 None
420 }
421 }
422
423 #[inline]
425 pub fn cloned(self) -> ClonedRefsIter<'a> {
426 ClonedRefsIter { inner: self }
427 }
428}
429
430impl Clone for RefsIter<'_> {
431 #[inline]
432 fn clone(&self) -> Self {
433 Self {
434 cell: self.cell,
435 max: self.max,
436 index: self.index,
437 }
438 }
439}
440
441impl<'a> Iterator for RefsIter<'a> {
442 type Item = &'a DynCell;
443
444 #[inline]
445 fn next(&mut self) -> Option<Self::Item> {
446 if self.index >= self.max {
447 None
448 } else {
449 let child = self.cell.reference(self.index);
450 self.index += 1;
451 child
452 }
453 }
454
455 #[inline]
456 fn size_hint(&self) -> (usize, Option<usize>) {
457 let remaining = self.max.saturating_sub(self.index) as usize;
458 (remaining, Some(remaining))
459 }
460}
461
462impl<'a> DoubleEndedIterator for RefsIter<'a> {
463 #[inline]
464 fn next_back(&mut self) -> Option<Self::Item> {
465 if self.max > self.index {
466 self.max -= 1;
467 self.cell.reference(self.max)
468 } else {
469 None
470 }
471 }
472}
473
474impl ExactSizeIterator for RefsIter<'_> {
475 #[inline]
476 fn len(&self) -> usize {
477 self.size_hint().0
478 }
479}
480
481#[must_use = "iterators are lazy and do nothing unless consumed"]
483pub struct ClonedRefsIter<'a> {
484 inner: RefsIter<'a>,
485}
486
487impl<'a> ClonedRefsIter<'a> {
488 #[inline]
490 pub fn cell(&self) -> &'a DynCell {
491 self.inner.cell
492 }
493
494 #[inline]
496 pub fn peek(&self) -> Option<Cell> {
497 self.inner.peek_cloned()
498 }
499
500 #[inline]
502 pub fn peek_prev(&self) -> Option<Cell> {
503 self.inner.peek_prev_cloned()
504 }
505}
506
507impl Clone for ClonedRefsIter<'_> {
508 #[inline]
509 fn clone(&self) -> Self {
510 Self {
511 inner: self.inner.clone(),
512 }
513 }
514}
515
516impl<'a> Iterator for ClonedRefsIter<'a> {
517 type Item = Cell;
518
519 #[inline]
520 fn next(&mut self) -> Option<Self::Item> {
521 if self.inner.index >= self.inner.max {
522 None
523 } else {
524 let child = self.inner.cell.reference_cloned(self.inner.index);
525 self.inner.index += 1;
526 child
527 }
528 }
529
530 #[inline]
531 fn size_hint(&self) -> (usize, Option<usize>) {
532 self.inner.size_hint()
533 }
534}
535
536impl<'a> DoubleEndedIterator for ClonedRefsIter<'a> {
537 #[inline]
538 fn next_back(&mut self) -> Option<Self::Item> {
539 if self.inner.max > self.inner.index {
540 self.inner.max -= 1;
541 self.inner.cell.reference_cloned(self.inner.max)
542 } else {
543 None
544 }
545 }
546}
547
548impl ExactSizeIterator for ClonedRefsIter<'_> {
549 #[inline]
550 fn len(&self) -> usize {
551 self.size_hint().0
552 }
553}
554
555#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
557#[repr(transparent)]
558pub struct HashBytes(pub [u8; 32]);
559
560impl HashBytes {
561 pub const ZERO: Self = Self([0; 32]);
563
564 #[inline]
570 pub fn from_slice(slice: &[u8]) -> Self {
571 Self(slice.try_into().expect("slice with incorrect length"))
572 }
573
574 #[inline(always)]
576 pub const fn wrap(value: &[u8; 32]) -> &Self {
577 unsafe { &*(value as *const [u8; 32] as *const Self) }
579 }
580
581 pub const fn as_slice(&self) -> &[u8] {
583 self.0.as_slice()
584 }
585
586 pub fn as_mut_slice(&mut self) -> &mut [u8] {
588 self.0.as_mut_slice()
589 }
590
591 pub const fn as_array(&self) -> &[u8; 32] {
593 &self.0
594 }
595
596 pub fn as_mut_array(&mut self) -> &mut [u8; 32] {
598 &mut self.0
599 }
600
601 #[inline(always)]
603 pub const fn as_ptr(&self) -> *const u8 {
604 &self.0 as *const [u8] as *const u8
605 }
606
607 pub const fn first_chunk(&self) -> &[u8; 8] {
609 match self.0.first_chunk::<8>() {
610 Some(chunk) => chunk,
611 None => unsafe { std::hint::unreachable_unchecked() },
613 }
614 }
615}
616
617impl Default for HashBytes {
618 #[inline(always)]
619 fn default() -> Self {
620 Self::ZERO
621 }
622}
623
624impl AsRef<[u8; 32]> for HashBytes {
625 #[inline(always)]
626 fn as_ref(&self) -> &[u8; 32] {
627 &self.0
628 }
629}
630
631impl AsRef<[u8]> for HashBytes {
632 #[inline(always)]
633 fn as_ref(&self) -> &[u8] {
634 self.0.as_ref()
635 }
636}
637
638impl AsMut<[u8; 32]> for HashBytes {
639 #[inline(always)]
640 fn as_mut(&mut self) -> &mut [u8; 32] {
641 &mut self.0
642 }
643}
644
645impl AsMut<[u8]> for HashBytes {
646 #[inline(always)]
647 fn as_mut(&mut self) -> &mut [u8] {
648 self.0.as_mut()
649 }
650}
651
652impl std::borrow::Borrow<[u8]> for HashBytes {
653 #[inline(always)]
654 fn borrow(&self) -> &[u8] {
655 &self.0
656 }
657}
658
659impl std::borrow::BorrowMut<[u8]> for HashBytes {
660 #[inline(always)]
661 fn borrow_mut(&mut self) -> &mut [u8] {
662 &mut self.0
663 }
664}
665
666impl std::borrow::Borrow<HashBytes> for [u8; 32] {
667 #[inline(always)]
668 fn borrow(&self) -> &HashBytes {
669 HashBytes::wrap(self)
670 }
671}
672
673impl PartialEq<[u8; 32]> for HashBytes {
674 #[inline(always)]
675 fn eq(&self, other: &[u8; 32]) -> bool {
676 &self.0 == other
677 }
678}
679
680impl PartialEq<HashBytes> for [u8; 32] {
681 #[inline(always)]
682 fn eq(&self, other: &HashBytes) -> bool {
683 self == &other.0
684 }
685}
686
687impl PartialEq<[u8]> for HashBytes {
688 #[inline(always)]
689 fn eq(&self, other: &[u8]) -> bool {
690 self.0 == other
691 }
692}
693
694impl PartialEq<HashBytes> for [u8] {
695 #[inline(always)]
696 fn eq(&self, other: &HashBytes) -> bool {
697 self == other.0
698 }
699}
700
701impl From<[u8; 32]> for HashBytes {
702 #[inline(always)]
703 fn from(value: [u8; 32]) -> Self {
704 Self(value)
705 }
706}
707
708impl From<sha2::digest::Output<sha2::Sha256>> for HashBytes {
709 #[inline(always)]
710 fn from(value: sha2::digest::Output<sha2::Sha256>) -> Self {
711 Self(value.into())
712 }
713}
714
715#[cfg(feature = "blake3")]
716impl From<blake3::Hash> for HashBytes {
717 #[inline(always)]
718 fn from(value: blake3::Hash) -> Self {
719 Self(value.into())
720 }
721}
722
723impl From<HashBytes> for [u8; 32] {
724 #[inline(always)]
725 fn from(value: HashBytes) -> Self {
726 value.0
727 }
728}
729
730impl FromStr for HashBytes {
731 type Err = ParseHashBytesError;
732
733 fn from_str(s: &str) -> Result<Self, Self::Err> {
734 let mut result = Self::default();
735 match s.len() {
736 64 => {
737 if let Err(e) = hex::decode_to_slice(s, &mut result.0) {
738 return Err(ParseHashBytesError::InvalidHex(e));
739 }
740 }
741 66 => {
742 if let Err(e) = hex::decode_to_slice(&s[2..], &mut result.0) {
743 return Err(ParseHashBytesError::InvalidHex(e));
744 }
745 }
746 #[cfg(feature = "base64")]
747 44 => {
748 if let Err(e) = crate::util::decode_base64_slice(s, &mut result.0) {
749 return Err(ParseHashBytesError::InvalidBase64(e));
750 }
751 }
752 _ => return Err(ParseHashBytesError::UnexpectedStringLength),
753 }
754 Ok(result)
755 }
756}
757
758impl std::fmt::Display for HashBytes {
759 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
760 let mut output = [0u8; 64];
761 hex::encode_to_slice(self, &mut output).ok();
762
763 let output = unsafe { std::str::from_utf8_unchecked(&output) };
765 f.write_str(output)
766 }
767}
768
769impl std::fmt::Debug for HashBytes {
770 #[inline(always)]
771 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
772 std::fmt::Display::fmt(self, f)
773 }
774}
775
776impl<I> std::ops::Index<I> for HashBytes
777where
778 [u8; 32]: std::ops::Index<I>,
779{
780 type Output = <[u8; 32] as std::ops::Index<I>>::Output;
781
782 #[inline]
783 fn index(&self, index: I) -> &Self::Output {
784 std::ops::Index::index(&self.0, index)
785 }
786}
787
788impl<I> std::ops::IndexMut<I> for HashBytes
789where
790 [u8; 32]: std::ops::IndexMut<I>,
791{
792 #[inline]
793 fn index_mut(&mut self, index: I) -> &mut Self::Output {
794 std::ops::IndexMut::index_mut(&mut self.0, index)
795 }
796}
797
798#[cfg(any(feature = "rand", test))]
799impl rand::distributions::Distribution<HashBytes> for rand::distributions::Standard {
800 #[inline]
801 fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
802 HashBytes(rand::distributions::Standard.sample(rng))
803 }
804}
805
806#[cfg(feature = "serde")]
807impl serde::Serialize for HashBytes {
808 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
809 where
810 S: serde::Serializer,
811 {
812 if serializer.is_human_readable() {
813 let mut output = [0u8; 64];
814 hex::encode_to_slice(self.0.as_slice(), &mut output).ok();
815
816 let output = unsafe { std::str::from_utf8_unchecked(&output) };
818 serializer.serialize_str(output)
819 } else {
820 serializer.serialize_bytes(&self.0)
821 }
822 }
823}
824
825#[cfg(feature = "serde")]
826impl<'de> serde::Deserialize<'de> for HashBytes {
827 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
828 where
829 D: serde::Deserializer<'de>,
830 {
831 use ::serde::de::{Error, Visitor};
832
833 struct HashBytesHexVisitor;
834
835 impl<'de> Visitor<'de> for HashBytesHexVisitor {
836 type Value = HashBytes;
837
838 fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
839 formatter.write_str("hex-encoded byte array of size 32")
840 }
841
842 fn visit_str<E: Error>(self, value: &str) -> Result<Self::Value, E> {
843 let mut result = HashBytes([0; 32]);
844 match hex::decode_to_slice(value, &mut result.0) {
845 Ok(()) => Ok(result),
846 Err(_) => Err(Error::invalid_value(
847 serde::de::Unexpected::Str(value),
848 &self,
849 )),
850 }
851 }
852 }
853
854 pub struct HashBytesRawVisitor;
855
856 impl<'de> Visitor<'de> for HashBytesRawVisitor {
857 type Value = HashBytes;
858
859 fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
860 f.write_fmt(format_args!("a byte array of size 32"))
861 }
862
863 fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
864 v.try_into()
865 .map(HashBytes)
866 .map_err(|_e| Error::invalid_length(v.len(), &self))
867 }
868 }
869
870 if deserializer.is_human_readable() {
871 deserializer.deserialize_str(HashBytesHexVisitor)
872 } else {
873 deserializer.deserialize_bytes(HashBytesRawVisitor)
874 }
875 }
876}
877
878pub static EMPTY_CELL_HASH: &HashBytes = HashBytes::wrap(&[
880 0x96, 0xa2, 0x96, 0xd2, 0x24, 0xf2, 0x85, 0xc6, 0x7b, 0xee, 0x93, 0xc3, 0x0f, 0x8a, 0x30, 0x91,
881 0x57, 0xf0, 0xda, 0xa3, 0x5d, 0xc5, 0xb8, 0x7e, 0x41, 0x0b, 0x78, 0x63, 0x0a, 0x09, 0xcf, 0xc7,
882]);
883
884#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
886#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
887pub enum CellType {
888 #[default]
890 Ordinary,
891 PrunedBranch,
894 LibraryReference,
896 MerkleProof,
898 MerkleUpdate,
900}
901
902impl CellType {
903 #[inline]
905 pub const fn is_merkle(self) -> bool {
906 matches!(self, Self::MerkleProof | Self::MerkleUpdate)
907 }
908
909 #[inline]
911 pub const fn is_exotic(self) -> bool {
912 !matches!(self, Self::Ordinary)
913 }
914
915 #[inline]
917 pub const fn is_pruned_branch(self) -> bool {
918 matches!(self, Self::PrunedBranch)
919 }
920
921 #[inline]
923 pub const fn to_byte(self) -> u8 {
924 match self {
925 CellType::Ordinary => 0xff,
926 CellType::PrunedBranch => 1,
927 CellType::LibraryReference => 2,
928 CellType::MerkleProof => 3,
929 CellType::MerkleUpdate => 4,
930 }
931 }
932
933 #[inline]
935 pub const fn from_byte(byte: u8) -> Option<Self> {
936 Some(match byte {
937 0xff => CellType::Ordinary,
938 1 => CellType::PrunedBranch,
939 2 => CellType::LibraryReference,
940 3 => CellType::MerkleProof,
941 4 => CellType::MerkleUpdate,
942 _ => return None,
943 })
944 }
945
946 #[inline]
948 pub const fn from_byte_exotic(byte: u8) -> Option<Self> {
949 Some(match byte {
950 1 => CellType::PrunedBranch,
951 2 => CellType::LibraryReference,
952 3 => CellType::MerkleProof,
953 4 => CellType::MerkleUpdate,
954 _ => return None,
955 })
956 }
957}
958
959impl From<CellType> for u8 {
960 #[inline]
961 fn from(cell_type: CellType) -> u8 {
962 cell_type.to_byte()
963 }
964}
965
966#[derive(Hash, Debug, Clone, Copy)]
968#[repr(C)]
969pub struct CellDescriptor {
970 pub d1: u8,
972 pub d2: u8,
974}
975
976impl CellDescriptor {
977 pub const REF_COUNT_MASK: u8 = 0b0000_0111;
979 pub const IS_EXOTIC_MASK: u8 = 0b0000_1000;
981 pub const STORE_HASHES_MASK: u8 = 0b0001_0000;
983 pub const LEVEL_MASK: u8 = 0b1110_0000;
985
986 #[inline(always)]
988 pub const fn compute_d1(level_mask: LevelMask, is_exotic: bool, ref_count: u8) -> u8 {
989 (level_mask.0 << 5) | ((is_exotic as u8) << 3) | (ref_count & Self::REF_COUNT_MASK)
990 }
991
992 #[inline(always)]
994 pub const fn compute_d2(bit_len: u16) -> u8 {
995 (((bit_len >> 2) as u8) & !0b1) | ((bit_len % 8 != 0) as u8)
996 }
997
998 #[inline(always)]
1000 pub const fn new(bytes: [u8; 2]) -> Self {
1001 Self {
1002 d1: bytes[0],
1003 d2: bytes[1],
1004 }
1005 }
1006
1007 pub fn cell_type(self) -> CellType {
1009 if self.d1 & Self::IS_EXOTIC_MASK == 0 {
1010 CellType::Ordinary
1011 } else {
1012 match self.d1 & Self::REF_COUNT_MASK {
1013 0 => {
1014 if self.d1 & Self::LEVEL_MASK == 0 {
1016 CellType::LibraryReference
1017 } else {
1018 CellType::PrunedBranch
1019 }
1020 }
1021 1 => CellType::MerkleProof,
1022 _ => CellType::MerkleUpdate,
1023 }
1024 }
1025 }
1026
1027 #[inline(always)]
1029 pub const fn reference_count(self) -> u8 {
1030 self.d1 & Self::REF_COUNT_MASK
1031 }
1032
1033 #[inline(always)]
1037 pub const fn hash_count(self) -> u8 {
1038 let level = self.level_mask().level();
1039 if self.is_exotic() && self.reference_count() == 0 && level > 0 {
1040 1 } else {
1042 level + 1
1043 }
1044 }
1045
1046 #[inline(always)]
1050 pub const fn is_exotic(self) -> bool {
1051 self.d1 & Self::IS_EXOTIC_MASK != 0
1052 }
1053
1054 #[inline(always)]
1056 pub const fn is_pruned_branch(self) -> bool {
1057 self.is_exotic() && self.reference_count() == 0 && !self.level_mask().is_empty()
1058 }
1059
1060 #[inline(always)]
1062 pub const fn is_merkle(self) -> bool {
1063 self.is_exotic() && self.reference_count() != 0
1064 }
1065
1066 #[inline(always)]
1068 pub const fn is_absent(self) -> bool {
1069 self.d1 == (Self::REF_COUNT_MASK | Self::IS_EXOTIC_MASK)
1070 }
1071
1072 #[inline(always)]
1074 pub const fn store_hashes(self) -> bool {
1075 self.d1 & Self::STORE_HASHES_MASK != 0
1076 }
1077
1078 #[inline(always)]
1080 pub const fn level_mask(self) -> LevelMask {
1081 unsafe { LevelMask::new_unchecked(self.d1 >> 5) }
1083 }
1084
1085 #[inline(always)]
1087 pub const fn is_aligned(self) -> bool {
1088 self.d2 & 1 == 0
1089 }
1090
1091 #[inline(always)]
1093 pub const fn byte_len(self) -> u8 {
1094 (self.d2 & 1) + (self.d2 >> 1)
1095 }
1096}
1097
1098#[derive(Copy, Clone, Eq, PartialEq, Hash)]
1100pub struct LevelMask(u8);
1101
1102impl LevelMask {
1103 pub const EMPTY: Self = LevelMask(0);
1105 pub const MAX_LEVEL: u8 = 3;
1107
1108 #[inline(always)]
1110 pub const fn new(mask: u8) -> Self {
1111 Self(mask & 0b111)
1112 }
1113
1114 #[inline(always)]
1116 pub const fn is_empty(self) -> bool {
1117 self.0 == 0
1118 }
1119
1120 #[inline(always)]
1127 pub const unsafe fn new_unchecked(mask: u8) -> Self {
1128 Self(mask)
1129 }
1130
1131 #[inline(always)]
1135 pub const fn from_level(level: u8) -> Self {
1136 Self(match level {
1137 0 => 0,
1138 1 => 1,
1139 2 => 3,
1140 _ => 7,
1141 })
1142 }
1143
1144 pub const fn level(self) -> u8 {
1146 (self.0 & 1) + ((self.0 >> 1) & 1) + ((self.0 >> 2) & 1)
1147 }
1148
1149 pub const fn hash_index(self, level: u8) -> u8 {
1151 Self(self.0 & Self::from_level(level).0).level()
1152 }
1153
1154 pub const fn contains(self, level: u8) -> bool {
1156 level == 0 || self.0 & (1 << (level - 1)) != 0
1157 }
1158
1159 #[inline(always)]
1161 pub const fn virtualize(self, offset: u8) -> Self {
1162 Self(self.0 >> offset)
1163 }
1164
1165 #[inline]
1167 pub const fn to_byte(self) -> u8 {
1168 self.0
1169 }
1170}
1171
1172impl IntoIterator for LevelMask {
1173 type Item = u8;
1174 type IntoIter = LevelMaskIter;
1175
1176 #[inline]
1177 fn into_iter(self) -> Self::IntoIter {
1178 LevelMaskIter(1 | self.0 << 1)
1180 }
1181}
1182
1183impl PartialEq<u8> for LevelMask {
1184 #[inline]
1185 fn eq(&self, other: &u8) -> bool {
1186 self.0 == *other
1187 }
1188}
1189
1190impl BitOr for LevelMask {
1191 type Output = Self;
1192
1193 #[inline(always)]
1194 fn bitor(self, rhs: Self) -> Self::Output {
1195 Self(self.0 | rhs.0)
1196 }
1197}
1198
1199impl BitOrAssign for LevelMask {
1200 #[inline(always)]
1201 fn bitor_assign(&mut self, rhs: Self) {
1202 self.0 |= rhs.0;
1203 }
1204}
1205
1206impl From<LevelMask> for u8 {
1207 #[inline(always)]
1208 fn from(m: LevelMask) -> u8 {
1209 m.0
1210 }
1211}
1212
1213impl std::fmt::Debug for LevelMask {
1214 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1215 f.write_fmt(format_args!("{:03b}", self.0))
1216 }
1217}
1218
1219#[derive(Clone)]
1221pub struct LevelMaskIter(u8);
1222
1223impl Iterator for LevelMaskIter {
1224 type Item = u8;
1225
1226 #[inline]
1227 fn next(&mut self) -> Option<Self::Item> {
1228 (self.0 != 0).then(|| {
1229 let mask = self.0 & !(self.0 - 1);
1233
1234 self.0 &= !mask;
1236
1237 mask.trailing_zeros() as u8
1238 })
1239 }
1240
1241 fn size_hint(&self) -> (usize, Option<usize>) {
1242 let len = self.0.count_ones() as usize;
1243 (len, Some(len))
1244 }
1245}
1246
1247#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)]
1249pub struct Size {
1250 pub bits: u16,
1252 pub refs: u8,
1254}
1255
1256impl Size {
1257 pub const ZERO: Self = Self { bits: 0, refs: 0 };
1259
1260 pub const BIT: Self = Self { bits: 1, refs: 0 };
1262
1263 pub const REF: Self = Self { bits: 0, refs: 1 };
1265
1266 pub const MAX: Self = Self {
1268 bits: MAX_BIT_LEN,
1269 refs: MAX_REF_COUNT as _,
1270 };
1271
1272 #[inline]
1274 pub const fn fits_into_cell(&self) -> bool {
1275 self.bits <= MAX_BIT_LEN && self.refs <= MAX_REF_COUNT as _
1276 }
1277
1278 #[inline]
1281 pub const fn saturating_add(self, rhs: Self) -> Self {
1282 Self {
1283 bits: self.bits.saturating_add(rhs.bits),
1284 refs: self.refs.saturating_add(rhs.refs),
1285 }
1286 }
1287
1288 #[inline]
1291 pub const fn saturating_sub(self, rhs: Self) -> Self {
1292 Self {
1293 bits: self.bits.saturating_sub(rhs.bits),
1294 refs: self.refs.saturating_sub(rhs.refs),
1295 }
1296 }
1297
1298 pub const fn is_zero(self) -> bool {
1300 self.bits == 0 && self.refs == 0
1301 }
1302}
1303
1304impl From<Size> for CellTreeStats {
1305 #[inline]
1306 fn from(value: Size) -> Self {
1307 Self {
1308 bit_count: value.bits as _,
1309 cell_count: value.refs as _,
1310 }
1311 }
1312}
1313
1314impl std::ops::Add for Size {
1315 type Output = Self;
1316
1317 #[inline]
1318 fn add(mut self, rhs: Self) -> Self::Output {
1319 self += rhs;
1320 self
1321 }
1322}
1323
1324impl std::ops::AddAssign for Size {
1325 #[inline]
1326 fn add_assign(&mut self, rhs: Self) {
1327 self.bits += rhs.bits;
1328 self.refs += rhs.refs;
1329 }
1330}
1331
1332impl std::ops::Sub for Size {
1333 type Output = Self;
1334
1335 #[inline]
1336 fn sub(mut self, rhs: Self) -> Self::Output {
1337 self -= rhs;
1338 self
1339 }
1340}
1341
1342impl std::ops::SubAssign for Size {
1343 #[inline]
1344 fn sub_assign(&mut self, rhs: Self) {
1345 self.bits -= rhs.bits;
1346 self.refs -= rhs.refs;
1347 }
1348}
1349
1350#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
1352pub struct CellTreeStats {
1353 pub bit_count: u64,
1355 pub cell_count: u64,
1357}
1358
1359impl CellTreeStats {
1360 pub const ZERO: Self = CellTreeStats {
1362 bit_count: 0,
1363 cell_count: 0,
1364 };
1365}
1366
1367impl std::ops::Add for CellTreeStats {
1368 type Output = Self;
1369
1370 #[inline]
1371 fn add(self, rhs: Self) -> Self::Output {
1372 Self {
1373 bit_count: self.bit_count.saturating_add(rhs.bit_count),
1374 cell_count: self.cell_count.saturating_add(rhs.cell_count),
1375 }
1376 }
1377}
1378
1379impl std::ops::AddAssign for CellTreeStats {
1380 #[inline]
1381 fn add_assign(&mut self, rhs: Self) {
1382 self.bit_count = self.bit_count.saturating_add(rhs.bit_count);
1383 self.cell_count = self.cell_count.saturating_add(rhs.cell_count);
1384 }
1385}
1386
1387impl std::ops::Add<Size> for CellTreeStats {
1388 type Output = Self;
1389
1390 #[inline]
1391 fn add(self, rhs: Size) -> Self::Output {
1392 Self {
1393 bit_count: self.bit_count.saturating_add(rhs.bits as _),
1394 cell_count: self.cell_count.saturating_add(rhs.refs as _),
1395 }
1396 }
1397}
1398
1399impl std::iter::Sum for CellTreeStats {
1400 #[inline]
1401 fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
1402 let mut res = Self::ZERO;
1403 for item in iter {
1404 res += item;
1405 }
1406 res
1407 }
1408}
1409
1410impl std::ops::AddAssign<Size> for CellTreeStats {
1411 fn add_assign(&mut self, rhs: Size) {
1412 self.bit_count = self.bit_count.saturating_add(rhs.bits as _);
1413 self.cell_count = self.cell_count.saturating_add(rhs.refs as _);
1414 }
1415}
1416
1417impl std::ops::Sub for CellTreeStats {
1418 type Output = Self;
1419
1420 #[inline]
1421 fn sub(mut self, rhs: Self) -> Self::Output {
1422 self -= rhs;
1423 self
1424 }
1425}
1426
1427impl std::ops::SubAssign for CellTreeStats {
1428 #[inline]
1429 fn sub_assign(&mut self, rhs: Self) {
1430 self.bit_count = self.bit_count.saturating_sub(rhs.bit_count);
1431 self.cell_count = self.cell_count.saturating_sub(rhs.cell_count);
1432 }
1433}
1434
1435impl std::ops::SubAssign<Size> for CellTreeStats {
1436 #[inline]
1437 fn sub_assign(&mut self, rhs: Size) {
1438 self.bit_count = self.bit_count.saturating_sub(rhs.bits as _);
1439 self.cell_count = self.cell_count.saturating_sub(rhs.refs as _);
1440 }
1441}
1442
1443pub struct StorageStat<'a> {
1448 visited: ahash::HashSet<&'a HashBytes>,
1449 stack: Vec<RefsIter<'a>>,
1450 stats: CellTreeStats,
1451 limit: usize,
1452}
1453
1454impl<'a> StorageStat<'a> {
1455 pub fn compute_for_slice<'b: 'a>(
1462 slice: &'a CellSlice<'b>,
1463 limit: usize,
1464 ) -> Option<CellTreeStats> {
1465 let mut this = Self::with_limit(limit);
1466 if this.add_slice(slice) {
1467 Some(this.stats)
1468 } else {
1469 None
1470 }
1471 }
1472
1473 pub fn compute_for_cell(cell: &'a DynCell, limit: usize) -> Option<CellTreeStats> {
1477 let mut this = Self::with_limit(limit);
1478 if this.add_cell(cell) {
1479 Some(this.stats)
1480 } else {
1481 None
1482 }
1483 }
1484
1485 pub fn with_limit(limit: usize) -> Self {
1487 Self {
1488 visited: Default::default(),
1489 stack: Vec::new(),
1490 stats: CellTreeStats::ZERO,
1491 limit,
1492 }
1493 }
1494
1495 pub fn unlimited() -> Self {
1497 Self::with_limit(usize::MAX)
1498 }
1499
1500 pub fn stats(&self) -> CellTreeStats {
1502 self.stats
1503 }
1504
1505 pub fn add_cell(&mut self, cell: &'a DynCell) -> bool {
1509 if !self.visited.insert(cell.repr_hash()) {
1510 return true;
1511 }
1512
1513 self.stats.bit_count += cell.bit_len() as u64;
1514 self.stats.cell_count += 1;
1515
1516 self.stack.clear();
1517 self.stack.push(cell.references());
1518 self.reduce_stack()
1519 }
1520
1521 pub fn add_slice(&mut self, slice: &CellSlice<'a>) -> bool {
1525 self.stats.bit_count += slice.size_bits() as u64;
1526
1527 self.stack.clear();
1528 self.stack.push(slice.references());
1529 self.reduce_stack()
1530 }
1531
1532 fn reduce_stack(&mut self) -> bool {
1533 'outer: while let Some(item) = self.stack.last_mut() {
1534 for cell in item.by_ref() {
1535 if !self.visited.insert(cell.repr_hash()) {
1536 continue;
1537 }
1538
1539 if self.stats.cell_count >= self.limit as u64 {
1540 return false;
1541 }
1542
1543 self.stats.bit_count += cell.bit_len() as u64;
1544 self.stats.cell_count += 1;
1545
1546 let next = cell.references();
1547 if next.max > 0 {
1548 self.stack.push(next);
1549 continue 'outer;
1550 }
1551 }
1552
1553 self.stack.pop();
1554 }
1555
1556 true
1557 }
1558}
1559
1560#[derive(Clone, Copy)]
1562pub struct DebugCell<'a>(&'a DynCell);
1563
1564impl std::fmt::Debug for DebugCell<'_> {
1565 #[inline]
1566 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1567 self.0.fmt(f)
1568 }
1569}
1570
1571#[derive(Clone, Copy)]
1573pub struct DisplayCellRoot<'a> {
1574 cell: &'a DynCell,
1575 level: usize,
1576}
1577
1578impl std::fmt::Display for DisplayCellRoot<'_> {
1579 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1580 let data = hex::encode(self.cell.data());
1582
1583 let indent = self.level * 2;
1584 if f.alternate() {
1585 f.write_fmt(format_args!("{:indent$}{data}\n", ""))
1586 } else {
1587 let repr_depth = self.cell.depth(LevelMask::MAX_LEVEL);
1588 let repr_hash = self.cell.repr_hash();
1589 let descriptor = self.cell.descriptor();
1590 f.write_fmt(format_args!(
1591 "{:indent$}{:?}: {data}\n{:indent$}bits: {:>4}, refs: {}, l: {:?}, depth: {}, hash: {}\n",
1592 "",
1593 descriptor.cell_type(),
1594 "",
1595 self.cell.bit_len(),
1596 descriptor.reference_count(),
1597 descriptor.level_mask(),
1598 repr_depth,
1599 repr_hash,
1600 ))
1601 }
1602 }
1603}
1604
1605#[derive(Clone, Copy)]
1607pub struct DisplayCellTree<'a>(&'a DynCell);
1608
1609impl std::fmt::Display for DisplayCellTree<'_> {
1610 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1611 let mut stack = vec![(0, self.0)];
1612
1613 while let Some((level, cell)) = stack.pop() {
1614 ok!(std::fmt::Display::fmt(&DisplayCellRoot { cell, level }, f));
1615
1616 let reference_count = cell.reference_count();
1617 for i in (0..reference_count).rev() {
1618 if let Some(child) = cell.reference(i) {
1619 stack.push((level + 1, child));
1620 }
1621 }
1622 }
1623
1624 Ok(())
1625 }
1626}
1627
1628pub const MAX_BIT_LEN: u16 = 1023;
1630pub const MAX_REF_COUNT: usize = 4;
1632
1633#[cfg(test)]
1634mod tests {
1635 use super::*;
1636
1637 #[test]
1638 fn correct_level() {
1639 const LEVEL: [u8; 8] = [0, 1, 1, 2, 1, 2, 2, 3];
1640
1641 for mask in 0b000..=0b111 {
1642 assert_eq!(LevelMask(mask).level(), LEVEL[mask as usize]);
1643 }
1644 }
1645
1646 #[test]
1647 fn correct_hash_index() {
1648 const HASH_INDEX_TABLE: [[u8; 4]; 8] = [
1649 [0, 0, 0, 0], [0, 1, 1, 1], [0, 0, 1, 1], [0, 1, 2, 2], [0, 0, 0, 1], [0, 1, 1, 2], [0, 0, 1, 2], [0, 1, 2, 3], ];
1659
1660 for mask in 0b000..=0b111 {
1661 let mask = LevelMask(mask);
1662
1663 for i in 0..=3 {
1664 let hash_index = mask.hash_index(i);
1665 assert_eq!(hash_index, HASH_INDEX_TABLE[mask.0 as usize][i as usize]);
1666 }
1667 }
1668 }
1669
1670 #[test]
1671 fn ultra_virtual_cell_by_ref() {
1672 let cell = Cell::empty_cell();
1673
1674 let pruned1 =
1675 crate::merkle::make_pruned_branch(cell.as_ref(), 0, &mut Cell::empty_context())
1676 .unwrap();
1677
1678 let pruned2 =
1679 crate::merkle::make_pruned_branch(pruned1.as_ref(), 1, &mut Cell::empty_context())
1680 .unwrap();
1681
1682 let pruned3 =
1683 crate::merkle::make_pruned_branch(pruned2.as_ref(), 2, &mut Cell::empty_context())
1684 .unwrap();
1685
1686 let pruned3 = pruned3.virtualize();
1688 assert_eq!(pruned3.repr_hash(), pruned2.repr_hash());
1689 assert_eq!(pruned3.repr_depth(), pruned2.repr_depth());
1690
1691 let pruned3 = pruned3.virtualize();
1693 assert_eq!(pruned3.repr_hash(), pruned1.repr_hash());
1694 assert_eq!(pruned3.repr_depth(), pruned1.repr_depth());
1695
1696 let pruned3 = pruned3.virtualize();
1698 assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1699 assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1700
1701 let pruned3 = pruned3.virtualize();
1703 assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1704 assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1705
1706 let pruned3 = pruned3.virtualize();
1708 assert_eq!(pruned3.repr_hash(), cell.repr_hash());
1709 assert_eq!(pruned3.repr_depth(), cell.repr_depth());
1710 }
1711
1712 #[test]
1713 fn versioned_store_load() {
1714 #[derive(Debug, Clone, Copy, Eq, PartialEq, Store, Load)]
1715 #[tlb(tag = ["#12", "#34", "#56"])]
1716 struct Versioned {
1717 old_field1: u32,
1718 #[tlb(since_tag = 1)]
1719 new_field1: u32,
1720 old_field2: bool,
1721 #[tlb(since_tag = 2)]
1722 new_field2: bool,
1723 }
1724
1725 let old = Versioned {
1726 old_field1: 123,
1727 new_field1: 0,
1728 old_field2: true,
1729 new_field2: false,
1730 };
1731 let cell = CellBuilder::build_from(old).unwrap();
1732
1733 {
1734 let mut slice = cell.as_slice().unwrap();
1735 assert_eq!(slice.size_bits(), 8 + 32 + 1);
1736 assert_eq!(slice.load_u8().unwrap(), 0x12);
1737 assert_eq!(slice.load_u32().unwrap(), 123);
1738 assert_eq!(slice.load_bit().unwrap(), true);
1739 assert!(slice.is_empty());
1740 }
1741 assert_eq!(
1742 Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1743 old,
1744 );
1745
1746 let new = Versioned {
1747 old_field1: 123,
1748 new_field1: 456,
1749 old_field2: true,
1750 new_field2: false,
1751 };
1752 let cell = CellBuilder::build_from(new).unwrap();
1753
1754 {
1755 let mut slice = cell.as_slice().unwrap();
1756 assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1);
1757 assert_eq!(slice.load_u8().unwrap(), 0x34);
1758 assert_eq!(slice.load_u32().unwrap(), 123);
1759 assert_eq!(slice.load_u32().unwrap(), 456);
1760 assert_eq!(slice.load_bit().unwrap(), true);
1761 assert!(slice.is_empty());
1762 }
1763 assert_eq!(
1764 Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1765 new
1766 );
1767
1768 let too_new = Versioned {
1769 old_field1: 123,
1770 new_field1: 0,
1771 old_field2: true,
1772 new_field2: true,
1773 };
1774 let cell = CellBuilder::build_from(too_new).unwrap();
1775
1776 {
1777 let mut slice = cell.as_slice().unwrap();
1778 assert_eq!(slice.size_bits(), 8 + 32 + 32 + 1 + 1);
1779 assert_eq!(slice.load_u8().unwrap(), 0x56);
1780 assert_eq!(slice.load_u32().unwrap(), 123);
1781 assert_eq!(slice.load_u32().unwrap(), 0);
1782 assert_eq!(slice.load_bit().unwrap(), true);
1783 assert_eq!(slice.load_bit().unwrap(), true);
1784 assert!(slice.is_empty());
1785 }
1786 assert_eq!(
1787 Versioned::load_from(&mut cell.as_slice().unwrap()).unwrap(),
1788 too_new
1789 );
1790 }
1791}