use std::ops::{BitOr, BitOrAssign};
use std::str::FromStr;
use crate::error::{Error, ParseHashBytesError};
use crate::util::Bitstring;
pub use self::builder::{CellBuilder, CellRefsBuilder, Store};
pub use self::cell_context::{CellContext, CellParts, LoadMode};
pub use self::cell_impl::{StaticCell, VirtualCellWrapper};
pub use self::slice::{CellSlice, CellSliceParts, CellSliceRange, CellSliceSize, ExactSize, Load};
pub use self::usage_tree::{UsageTree, UsageTreeMode, UsageTreeWithSubtrees};
#[cfg(not(feature = "sync"))]
pub use self::cell_impl::rc::Cell;
#[cfg(feature = "sync")]
pub use self::cell_impl::sync::Cell;
pub use everscale_types_proc::{Load, Store};
mod cell_impl;
mod cell_context;
mod slice;
mod builder;
mod usage_tree;
#[cfg(feature = "sync")]
#[doc(hidden)]
mod __checks {
use super::*;
assert_impl_all!(Cell: Send);
assert_impl_all!(CellSlice: Send);
assert_impl_all!(CellBuilder: Send);
}
pub trait EquivalentRepr<T> {}
impl<T> EquivalentRepr<T> for T {}
pub trait CellFamily: Sized {
type EmptyCellContext: CellContext;
fn empty_cell() -> Cell;
fn empty_cell_ref() -> &'static DynCell;
fn empty_context() -> Self::EmptyCellContext;
fn all_zeros_ref() -> &'static DynCell;
fn all_ones_ref() -> &'static DynCell;
fn virtualize(cell: Cell) -> Cell;
}
#[cfg(not(feature = "sync"))]
pub type DynCell = dyn CellImpl;
#[cfg(feature = "sync")]
pub type DynCell = dyn CellImpl + Send + Sync;
impl AsRef<DynCell> for DynCell {
#[inline(always)]
fn as_ref(&self) -> &Self {
self
}
}
impl AsMut<DynCell> for DynCell {
#[inline(always)]
fn as_mut(&mut self) -> &mut Self {
self
}
}
pub trait CellImpl {
fn descriptor(&self) -> CellDescriptor;
fn data(&self) -> &[u8];
fn bit_len(&self) -> u16;
fn reference(&self, index: u8) -> Option<&DynCell>;
fn reference_cloned(&self, index: u8) -> Option<Cell>;
fn virtualize(&self) -> &DynCell;
fn hash(&self, level: u8) -> &HashBytes;
fn depth(&self, level: u8) -> u16;
fn take_first_child(&mut self) -> Option<Cell>;
fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell>;
fn take_next_child(&mut self) -> Option<Cell>;
#[cfg(feature = "stats")]
fn stats(&self) -> CellTreeStats;
}
impl DynCell {
#[inline]
pub fn cell_type(&self) -> CellType {
self.descriptor().cell_type()
}
#[inline]
pub fn level(&self) -> u8 {
self.descriptor().level_mask().level()
}
#[inline]
pub fn level_mask(&self) -> LevelMask {
self.descriptor().level_mask()
}
#[inline]
pub fn reference_count(&self) -> u8 {
self.descriptor().reference_count()
}
pub fn get_reference_as_slice(&self, index: u8) -> Result<CellSlice<'_>, Error> {
match self.reference(index) {
Some(cell) => CellSlice::new(cell),
None => Err(Error::CellUnderflow),
}
}
#[inline]
pub fn is_exotic(&self) -> bool {
self.descriptor().is_exotic()
}
#[inline]
pub fn repr_hash(&self) -> &HashBytes {
self.hash(LevelMask::MAX_LEVEL)
}
#[inline]
pub fn repr_depth(&self) -> u16 {
self.depth(LevelMask::MAX_LEVEL)
}
pub fn is_empty(&self) -> bool {
self.hash(LevelMask::MAX_LEVEL) == EMPTY_CELL_HASH
}
#[inline]
pub fn references(&self) -> RefsIter<'_> {
RefsIter {
cell: self,
max: self.reference_count(),
index: 0,
}
}
#[inline]
pub fn as_slice(&'_ self) -> Result<CellSlice<'_>, Error> {
CellSlice::new(self)
}
#[inline]
pub unsafe fn as_slice_unchecked(&'_ self) -> CellSlice<'_> {
CellSlice::new_unchecked(self)
}
pub fn compute_unique_stats(&self, limit: usize) -> Option<CellTreeStats> {
StorageStat::compute_for_cell(self, limit)
}
#[inline]
pub fn debug_root(&'_ self) -> DebugCell<'_> {
DebugCell(self)
}
#[inline]
pub fn display_root(&'_ self) -> DisplayCellRoot<'_> {
DisplayCellRoot {
cell: self,
level: 0,
}
}
#[inline]
pub fn display_tree(&'_ self) -> DisplayCellTree<'_> {
DisplayCellTree(self)
}
#[inline]
pub fn display_data(&self) -> impl std::fmt::Display + std::fmt::Binary + '_ {
struct DisplayData<'a>(&'a DynCell);
impl std::fmt::Display for DisplayData<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(
&Bitstring {
bytes: self.0.data(),
bit_len: self.0.bit_len(),
},
f,
)
}
}
impl std::fmt::Binary for DisplayData<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Binary::fmt(
&Bitstring {
bytes: self.0.data(),
bit_len: self.0.bit_len(),
},
f,
)
}
}
DisplayData(self)
}
#[inline]
pub fn parse<'a, T: Load<'a>>(&'a self) -> Result<T, Error> {
T::load_from(&mut ok!(self.as_slice()))
}
}
impl std::fmt::Debug for DynCell {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
crate::util::debug_struct_field2_finish(
f,
"Cell",
"ty",
&self.cell_type(),
"hash",
self.repr_hash(),
)
}
}
impl Eq for DynCell {}
impl PartialEq<DynCell> for DynCell {
#[inline]
fn eq(&self, other: &DynCell) -> bool {
self.repr_hash() == other.repr_hash()
}
}
#[cfg(feature = "serde")]
impl serde::Serialize for DynCell {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let boc = crate::boc::Boc::encode(self);
if serializer.is_human_readable() {
serializer.serialize_str(&crate::util::encode_base64(boc))
} else {
serializer.serialize_bytes(&boc)
}
}
}
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct RefsIter<'a> {
cell: &'a DynCell,
max: u8,
index: u8,
}
impl<'a> RefsIter<'a> {
#[inline]
pub fn cell(&self) -> &'a DynCell {
self.cell
}
#[inline]
pub fn peek(&self) -> Option<&'a DynCell> {
if self.index >= self.max {
None
} else {
self.cell.reference(self.index)
}
}
#[inline]
pub fn peek_cloned(&self) -> Option<Cell> {
if self.index >= self.max {
None
} else {
self.cell.reference_cloned(self.index)
}
}
#[inline]
pub fn peek_prev(&self) -> Option<&'a DynCell> {
if self.index > 0 {
self.cell.reference(self.index - 1)
} else {
None
}
}
#[inline]
pub fn peek_prev_cloned(&self) -> Option<Cell> {
if self.index > 0 {
self.cell.reference_cloned(self.index - 1)
} else {
None
}
}
#[inline]
pub fn cloned(self) -> ClonedRefsIter<'a> {
ClonedRefsIter { inner: self }
}
}
impl Clone for RefsIter<'_> {
#[inline]
fn clone(&self) -> Self {
Self {
cell: self.cell,
max: self.max,
index: self.index,
}
}
}
impl<'a> Iterator for RefsIter<'a> {
type Item = &'a DynCell;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.index >= self.max {
None
} else {
let child = self.cell.reference(self.index);
self.index += 1;
child
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let remaining = self.max.saturating_sub(self.index) as usize;
(remaining, Some(remaining))
}
}
impl<'a> DoubleEndedIterator for RefsIter<'a> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if self.max > self.index {
self.max -= 1;
self.cell.reference(self.max)
} else {
None
}
}
}
impl ExactSizeIterator for RefsIter<'_> {
#[inline]
fn len(&self) -> usize {
self.size_hint().0
}
}
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct ClonedRefsIter<'a> {
inner: RefsIter<'a>,
}
impl<'a> ClonedRefsIter<'a> {
#[inline]
pub fn cell(&self) -> &'a DynCell {
self.inner.cell
}
#[inline]
pub fn peek(&self) -> Option<Cell> {
self.inner.peek_cloned()
}
#[inline]
pub fn peek_prev(&self) -> Option<Cell> {
self.inner.peek_prev_cloned()
}
}
impl Clone for ClonedRefsIter<'_> {
#[inline]
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
}
}
}
impl<'a> Iterator for ClonedRefsIter<'a> {
type Item = Cell;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.inner.index >= self.inner.max {
None
} else {
let child = self.inner.cell.reference_cloned(self.inner.index);
self.inner.index += 1;
child
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
}
impl<'a> DoubleEndedIterator for ClonedRefsIter<'a> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if self.inner.max > self.inner.index {
self.inner.max -= 1;
self.inner.cell.reference_cloned(self.inner.max)
} else {
None
}
}
}
impl ExactSizeIterator for ClonedRefsIter<'_> {
#[inline]
fn len(&self) -> usize {
self.size_hint().0
}
}
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
#[repr(transparent)]
pub struct HashBytes(pub [u8; 32]);
impl HashBytes {
pub const ZERO: Self = Self([0; 32]);
#[inline]
pub fn from_slice(slice: &[u8]) -> Self {
Self(slice.try_into().expect("slice with incorrect length"))
}
#[inline(always)]
pub const fn wrap(value: &[u8; 32]) -> &Self {
unsafe { &*(value as *const [u8; 32] as *const Self) }
}
pub const fn as_slice(&self) -> &[u8] {
self.0.as_slice()
}
pub fn as_mut_slice(&mut self) -> &mut [u8] {
self.0.as_mut_slice()
}
pub const fn as_array(&self) -> &[u8; 32] {
&self.0
}
pub fn as_mut_array(&mut self) -> &mut [u8; 32] {
&mut self.0
}
#[inline(always)]
pub const fn as_ptr(&self) -> *const u8 {
&self.0 as *const [u8] as *const u8
}
}
impl Default for HashBytes {
#[inline(always)]
fn default() -> Self {
Self::ZERO
}
}
impl AsRef<[u8; 32]> for HashBytes {
#[inline(always)]
fn as_ref(&self) -> &[u8; 32] {
&self.0
}
}
impl AsRef<[u8]> for HashBytes {
#[inline(always)]
fn as_ref(&self) -> &[u8] {
self.0.as_ref()
}
}
impl AsMut<[u8; 32]> for HashBytes {
#[inline(always)]
fn as_mut(&mut self) -> &mut [u8; 32] {
&mut self.0
}
}
impl AsMut<[u8]> for HashBytes {
#[inline(always)]
fn as_mut(&mut self) -> &mut [u8] {
self.0.as_mut()
}
}
impl std::borrow::Borrow<[u8]> for HashBytes {
#[inline(always)]
fn borrow(&self) -> &[u8] {
&self.0
}
}
impl std::borrow::BorrowMut<[u8]> for HashBytes {
#[inline(always)]
fn borrow_mut(&mut self) -> &mut [u8] {
&mut self.0
}
}
impl std::borrow::Borrow<HashBytes> for [u8; 32] {
#[inline(always)]
fn borrow(&self) -> &HashBytes {
HashBytes::wrap(self)
}
}
impl PartialEq<[u8; 32]> for HashBytes {
#[inline(always)]
fn eq(&self, other: &[u8; 32]) -> bool {
&self.0 == other
}
}
impl PartialEq<HashBytes> for [u8; 32] {
#[inline(always)]
fn eq(&self, other: &HashBytes) -> bool {
self == &other.0
}
}
impl PartialEq<[u8]> for HashBytes {
#[inline(always)]
fn eq(&self, other: &[u8]) -> bool {
self.0 == other
}
}
impl PartialEq<HashBytes> for [u8] {
#[inline(always)]
fn eq(&self, other: &HashBytes) -> bool {
self == other.0
}
}
impl From<[u8; 32]> for HashBytes {
#[inline(always)]
fn from(value: [u8; 32]) -> Self {
Self(value)
}
}
impl From<sha2::digest::Output<sha2::Sha256>> for HashBytes {
#[inline(always)]
fn from(value: sha2::digest::Output<sha2::Sha256>) -> Self {
Self(value.into())
}
}
impl From<HashBytes> for [u8; 32] {
#[inline(always)]
fn from(value: HashBytes) -> Self {
value.0
}
}
impl FromStr for HashBytes {
type Err = ParseHashBytesError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut result = Self::default();
match s.len() {
64 => {
if let Err(e) = hex::decode_to_slice(s, &mut result.0) {
return Err(ParseHashBytesError::InvalidHex(e));
}
}
66 => {
if let Err(e) = hex::decode_to_slice(&s[2..], &mut result.0) {
return Err(ParseHashBytesError::InvalidHex(e));
}
}
#[cfg(feature = "base64")]
44 => {
if let Err(e) = crate::util::decode_base64_slice(s, &mut result.0) {
return Err(ParseHashBytesError::InvalidBase64(e));
}
}
_ => return Err(ParseHashBytesError::UnexpectedStringLength),
}
Ok(result)
}
}
impl std::fmt::Display for HashBytes {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut output = [0u8; 64];
hex::encode_to_slice(self, &mut output).ok();
let output = unsafe { std::str::from_utf8_unchecked(&output) };
f.write_str(output)
}
}
impl std::fmt::Debug for HashBytes {
#[inline(always)]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
}
}
impl<I> std::ops::Index<I> for HashBytes
where
[u8; 32]: std::ops::Index<I>,
{
type Output = <[u8; 32] as std::ops::Index<I>>::Output;
#[inline]
fn index(&self, index: I) -> &Self::Output {
std::ops::Index::index(&self.0, index)
}
}
impl<I> std::ops::IndexMut<I> for HashBytes
where
[u8; 32]: std::ops::IndexMut<I>,
{
#[inline]
fn index_mut(&mut self, index: I) -> &mut Self::Output {
std::ops::IndexMut::index_mut(&mut self.0, index)
}
}
#[cfg(any(feature = "rand", test))]
impl rand::distributions::Distribution<HashBytes> for rand::distributions::Standard {
#[inline]
fn sample<R: rand::Rng + ?Sized>(&self, rng: &mut R) -> HashBytes {
HashBytes(rand::distributions::Standard.sample(rng))
}
}
#[cfg(feature = "serde")]
impl serde::Serialize for HashBytes {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
if serializer.is_human_readable() {
let mut output = [0u8; 64];
hex::encode_to_slice(self.0.as_slice(), &mut output).ok();
let output = unsafe { std::str::from_utf8_unchecked(&output) };
serializer.serialize_str(output)
} else {
serializer.serialize_bytes(&self.0)
}
}
}
#[cfg(feature = "serde")]
impl<'de> serde::Deserialize<'de> for HashBytes {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
use ::serde::de::{Error, Visitor};
struct HashBytesHexVisitor;
impl<'de> Visitor<'de> for HashBytesHexVisitor {
type Value = HashBytes;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str("hex-encoded byte array of size 32")
}
fn visit_str<E: Error>(self, value: &str) -> Result<Self::Value, E> {
let mut result = HashBytes([0; 32]);
match hex::decode_to_slice(value, &mut result.0) {
Ok(()) => Ok(result),
Err(_) => Err(Error::invalid_value(
serde::de::Unexpected::Str(value),
&self,
)),
}
}
}
pub struct HashBytesRawVisitor;
impl<'de> Visitor<'de> for HashBytesRawVisitor {
type Value = HashBytes;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("a byte array of size 32"))
}
fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
v.try_into()
.map(HashBytes)
.map_err(|_e| Error::invalid_length(v.len(), &self))
}
}
if deserializer.is_human_readable() {
deserializer.deserialize_str(HashBytesHexVisitor)
} else {
deserializer.deserialize_bytes(HashBytesRawVisitor)
}
}
}
pub static EMPTY_CELL_HASH: &HashBytes = HashBytes::wrap(&[
0x96, 0xa2, 0x96, 0xd2, 0x24, 0xf2, 0x85, 0xc6, 0x7b, 0xee, 0x93, 0xc3, 0x0f, 0x8a, 0x30, 0x91,
0x57, 0xf0, 0xda, 0xa3, 0x5d, 0xc5, 0xb8, 0x7e, 0x41, 0x0b, 0x78, 0x63, 0x0a, 0x09, 0xcf, 0xc7,
]);
#[derive(Default, Debug, Copy, Clone, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum CellType {
#[default]
Ordinary,
PrunedBranch,
LibraryReference,
MerkleProof,
MerkleUpdate,
}
impl CellType {
#[inline]
pub const fn is_merkle(self) -> bool {
matches!(self, Self::MerkleProof | Self::MerkleUpdate)
}
#[inline]
pub const fn is_exotic(self) -> bool {
!matches!(self, Self::Ordinary)
}
#[inline]
pub const fn is_pruned_branch(self) -> bool {
matches!(self, Self::PrunedBranch)
}
#[inline]
pub const fn to_byte(self) -> u8 {
match self {
CellType::Ordinary => 0xff,
CellType::PrunedBranch => 1,
CellType::LibraryReference => 2,
CellType::MerkleProof => 3,
CellType::MerkleUpdate => 4,
}
}
#[inline]
pub const fn from_byte(byte: u8) -> Option<Self> {
Some(match byte {
0xff => CellType::Ordinary,
1 => CellType::PrunedBranch,
2 => CellType::LibraryReference,
3 => CellType::MerkleProof,
4 => CellType::MerkleUpdate,
_ => return None,
})
}
#[inline]
pub const fn from_byte_exotic(byte: u8) -> Option<Self> {
Some(match byte {
1 => CellType::PrunedBranch,
2 => CellType::LibraryReference,
3 => CellType::MerkleProof,
4 => CellType::MerkleUpdate,
_ => return None,
})
}
}
impl From<CellType> for u8 {
#[inline]
fn from(cell_type: CellType) -> u8 {
cell_type.to_byte()
}
}
#[derive(Hash, Debug, Clone, Copy)]
#[repr(C)]
pub struct CellDescriptor {
pub d1: u8,
pub d2: u8,
}
impl CellDescriptor {
pub const REF_COUNT_MASK: u8 = 0b0000_0111;
pub const IS_EXOTIC_MASK: u8 = 0b0000_1000;
pub const STORE_HASHES_MASK: u8 = 0b0001_0000;
pub const LEVEL_MASK: u8 = 0b1110_0000;
#[inline(always)]
pub const fn compute_d1(level_mask: LevelMask, is_exotic: bool, ref_count: u8) -> u8 {
(level_mask.0 << 5) | ((is_exotic as u8) << 3) | (ref_count & Self::REF_COUNT_MASK)
}
#[inline(always)]
pub const fn compute_d2(bit_len: u16) -> u8 {
(((bit_len >> 2) as u8) & !0b1) | ((bit_len % 8 != 0) as u8)
}
#[inline(always)]
pub const fn new(bytes: [u8; 2]) -> Self {
Self {
d1: bytes[0],
d2: bytes[1],
}
}
pub fn cell_type(self) -> CellType {
if self.d1 & Self::IS_EXOTIC_MASK == 0 {
CellType::Ordinary
} else {
match self.d1 & Self::REF_COUNT_MASK {
0 => {
if self.d1 & Self::LEVEL_MASK == 0 {
CellType::LibraryReference
} else {
CellType::PrunedBranch
}
}
1 => CellType::MerkleProof,
_ => CellType::MerkleUpdate,
}
}
}
#[inline(always)]
pub const fn reference_count(self) -> u8 {
self.d1 & Self::REF_COUNT_MASK
}
#[inline(always)]
pub const fn hash_count(self) -> u8 {
let level = self.level_mask().level();
if self.is_exotic() && self.reference_count() == 0 && level > 0 {
1 } else {
level + 1
}
}
#[inline(always)]
pub const fn is_exotic(self) -> bool {
self.d1 & Self::IS_EXOTIC_MASK != 0
}
#[inline(always)]
pub const fn is_pruned_branch(self) -> bool {
self.is_exotic() && self.reference_count() == 0 && !self.level_mask().is_empty()
}
#[inline(always)]
pub const fn is_merkle(self) -> bool {
self.is_exotic() && self.reference_count() != 0
}
#[inline(always)]
pub const fn is_absent(self) -> bool {
self.d1 == (Self::REF_COUNT_MASK | Self::IS_EXOTIC_MASK)
}
#[inline(always)]
pub const fn store_hashes(self) -> bool {
self.d1 & Self::STORE_HASHES_MASK != 0
}
#[inline(always)]
pub const fn level_mask(self) -> LevelMask {
unsafe { LevelMask::new_unchecked(self.d1 >> 5) }
}
#[inline(always)]
pub const fn is_aligned(self) -> bool {
self.d2 & 1 == 0
}
#[inline(always)]
pub const fn byte_len(self) -> u8 {
(self.d2 & 1) + (self.d2 >> 1)
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct LevelMask(u8);
impl LevelMask {
pub const EMPTY: Self = LevelMask(0);
pub const MAX_LEVEL: u8 = 3;
#[inline(always)]
pub const fn new(mask: u8) -> Self {
Self(mask & 0b111)
}
#[inline(always)]
pub const fn is_empty(self) -> bool {
self.0 == 0
}
#[inline(always)]
pub const unsafe fn new_unchecked(mask: u8) -> Self {
Self(mask)
}
#[inline(always)]
pub const fn from_level(level: u8) -> Self {
Self(match level {
0 => 0,
1 => 1,
2 => 3,
_ => 7,
})
}
pub const fn level(self) -> u8 {
(self.0 & 1) + ((self.0 >> 1) & 1) + ((self.0 >> 2) & 1)
}
pub const fn hash_index(self, level: u8) -> u8 {
Self(self.0 & Self::from_level(level).0).level()
}
pub const fn contains(self, level: u8) -> bool {
level == 0 || self.0 & LevelMask::from_level(level).0 != 0
}
#[inline(always)]
pub const fn virtualize(self, offset: u8) -> Self {
Self(self.0 >> offset)
}
#[inline]
pub const fn to_byte(self) -> u8 {
self.0
}
}
impl IntoIterator for LevelMask {
type Item = u8;
type IntoIter = LevelMaskIter;
#[inline]
fn into_iter(self) -> Self::IntoIter {
LevelMaskIter(1 | self.0 << 1)
}
}
impl PartialEq<u8> for LevelMask {
#[inline]
fn eq(&self, other: &u8) -> bool {
self.0 == *other
}
}
impl BitOr for LevelMask {
type Output = Self;
#[inline(always)]
fn bitor(self, rhs: Self) -> Self::Output {
Self(self.0 | rhs.0)
}
}
impl BitOrAssign for LevelMask {
#[inline(always)]
fn bitor_assign(&mut self, rhs: Self) {
self.0 |= rhs.0;
}
}
impl From<LevelMask> for u8 {
#[inline(always)]
fn from(m: LevelMask) -> u8 {
m.0
}
}
impl std::fmt::Debug for LevelMask {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_fmt(format_args!("{:03b}", self.0))
}
}
#[derive(Clone)]
pub struct LevelMaskIter(u8);
impl Iterator for LevelMaskIter {
type Item = u8;
#[inline]
fn next(&mut self) -> Option<Self::Item> {
(self.0 != 0).then(|| {
let mask = self.0 & !(self.0 - 1);
self.0 &= !mask;
mask.trailing_zeros() as u8
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.0.count_ones() as usize;
(len, Some(len))
}
}
#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]
pub struct CellTreeStats {
pub bit_count: u64,
pub cell_count: u64,
}
impl CellTreeStats {
pub const ZERO: Self = CellTreeStats {
bit_count: 0,
cell_count: 0,
};
}
impl std::ops::Add for CellTreeStats {
type Output = Self;
#[inline]
fn add(self, rhs: Self) -> Self::Output {
Self {
bit_count: self.bit_count.saturating_add(rhs.bit_count),
cell_count: self.cell_count.saturating_add(rhs.cell_count),
}
}
}
impl std::ops::AddAssign for CellTreeStats {
#[inline]
fn add_assign(&mut self, rhs: Self) {
self.bit_count = self.bit_count.saturating_add(rhs.bit_count);
self.cell_count = self.cell_count.saturating_add(rhs.cell_count);
}
}
impl std::ops::Add<CellSliceSize> for CellTreeStats {
type Output = Self;
#[inline]
fn add(self, rhs: CellSliceSize) -> Self::Output {
Self {
bit_count: self.bit_count.saturating_add(rhs.bits as _),
cell_count: self.cell_count.saturating_add(rhs.refs as _),
}
}
}
impl std::iter::Sum for CellTreeStats {
#[inline]
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
let mut res = Self::ZERO;
for item in iter {
res += item;
}
res
}
}
impl std::ops::AddAssign<CellSliceSize> for CellTreeStats {
fn add_assign(&mut self, rhs: CellSliceSize) {
self.bit_count = self.bit_count.saturating_add(rhs.bits as _);
self.cell_count = self.cell_count.saturating_add(rhs.refs as _);
}
}
impl std::ops::Sub for CellTreeStats {
type Output = Self;
#[inline]
fn sub(mut self, rhs: Self) -> Self::Output {
self -= rhs;
self
}
}
impl std::ops::SubAssign for CellTreeStats {
#[inline]
fn sub_assign(&mut self, rhs: Self) {
self.bit_count = self.bit_count.saturating_sub(rhs.bit_count);
self.cell_count = self.cell_count.saturating_sub(rhs.cell_count);
}
}
impl std::ops::SubAssign<CellSliceSize> for CellTreeStats {
#[inline]
fn sub_assign(&mut self, rhs: CellSliceSize) {
self.bit_count = self.bit_count.saturating_sub(rhs.bits as _);
self.cell_count = self.cell_count.saturating_sub(rhs.refs as _);
}
}
pub struct StorageStat<'a> {
visited: ahash::HashSet<&'a HashBytes>,
stack: Vec<RefsIter<'a>>,
stats: CellTreeStats,
limit: usize,
}
impl<'a> StorageStat<'a> {
pub fn compute_for_slice<'b: 'a>(
slice: &'a CellSlice<'b>,
limit: usize,
) -> Option<CellTreeStats> {
let mut this = Self::with_limit(limit);
if this.add_slice(slice) {
Some(this.stats)
} else {
None
}
}
pub fn compute_for_cell(cell: &'a DynCell, limit: usize) -> Option<CellTreeStats> {
let mut this = Self::with_limit(limit);
if this.add_cell(cell) {
Some(this.stats)
} else {
None
}
}
pub fn with_limit(limit: usize) -> Self {
Self {
visited: Default::default(),
stack: Vec::new(),
stats: CellTreeStats::ZERO,
limit,
}
}
pub fn unlimited() -> Self {
Self::with_limit(usize::MAX)
}
pub fn stats(&self) -> CellTreeStats {
self.stats
}
pub fn add_cell(&mut self, cell: &'a DynCell) -> bool {
if !self.visited.insert(cell.repr_hash()) {
return true;
}
self.stats.bit_count += cell.bit_len() as u64;
self.stats.cell_count += 1;
self.stack.clear();
self.stack.push(cell.references());
self.reduce_stack()
}
pub fn add_slice(&mut self, slice: &CellSlice<'a>) -> bool {
self.stats.bit_count += slice.remaining_bits() as u64;
self.stack.clear();
self.stack.push(slice.references());
self.reduce_stack()
}
fn reduce_stack(&mut self) -> bool {
'outer: while let Some(item) = self.stack.last_mut() {
for cell in item.by_ref() {
if !self.visited.insert(cell.repr_hash()) {
continue;
}
if self.stats.cell_count >= self.limit as u64 {
return false;
}
self.stats.bit_count += cell.bit_len() as u64;
self.stats.cell_count += 1;
let next = cell.references();
if next.max > 0 {
self.stack.push(next);
continue 'outer;
}
}
self.stack.pop();
}
true
}
}
#[derive(Clone, Copy)]
pub struct DebugCell<'a>(&'a DynCell);
impl std::fmt::Debug for DebugCell<'_> {
#[inline]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
#[derive(Clone, Copy)]
pub struct DisplayCellRoot<'a> {
cell: &'a DynCell,
level: usize,
}
impl std::fmt::Display for DisplayCellRoot<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let data = hex::encode(self.cell.data());
let indent = self.level * 2;
if f.alternate() {
f.write_fmt(format_args!("{:indent$}{data}\n", ""))
} else {
let repr_depth = self.cell.depth(LevelMask::MAX_LEVEL);
let repr_hash = self.cell.repr_hash();
let descriptor = self.cell.descriptor();
f.write_fmt(format_args!(
"{:indent$}{:?}: {data}\n{:indent$}bits: {:>4}, refs: {}, l: {:?}, depth: {}, hash: {}\n",
"",
descriptor.cell_type(),
"",
self.cell.bit_len(),
descriptor.reference_count(),
descriptor.level_mask(),
repr_depth,
repr_hash,
))
}
}
}
#[derive(Clone, Copy)]
pub struct DisplayCellTree<'a>(&'a DynCell);
impl std::fmt::Display for DisplayCellTree<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut stack = vec![(0, self.0)];
while let Some((level, cell)) = stack.pop() {
ok!(std::fmt::Display::fmt(&DisplayCellRoot { cell, level }, f));
let reference_count = cell.reference_count();
for i in (0..reference_count).rev() {
if let Some(child) = cell.reference(i) {
stack.push((level + 1, child));
}
}
}
Ok(())
}
}
pub const MAX_BIT_LEN: u16 = 1023;
pub const MAX_REF_COUNT: usize = 4;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn correct_level() {
const LEVEL: [u8; 8] = [0, 1, 1, 2, 1, 2, 2, 3];
for mask in 0b000..=0b111 {
assert_eq!(LevelMask(mask).level(), LEVEL[mask as usize]);
}
}
#[test]
fn correct_hash_index() {
const HASH_INDEX_TABLE: [[u8; 4]; 8] = [
[0, 0, 0, 0], [0, 1, 1, 1], [0, 0, 1, 1], [0, 1, 2, 2], [0, 0, 0, 1], [0, 1, 1, 2], [0, 0, 1, 2], [0, 1, 2, 3], ];
for mask in 0b000..=0b111 {
let mask = LevelMask(mask);
for i in 0..=3 {
let hash_index = mask.hash_index(i);
assert_eq!(hash_index, HASH_INDEX_TABLE[mask.0 as usize][i as usize]);
}
}
}
#[test]
fn ultra_virtual_cell_by_ref() {
let cell = Cell::empty_cell();
let pruned1 =
crate::merkle::make_pruned_branch(cell.as_ref(), 0, &mut Cell::empty_context())
.unwrap();
let pruned2 =
crate::merkle::make_pruned_branch(pruned1.as_ref(), 1, &mut Cell::empty_context())
.unwrap();
let pruned3 =
crate::merkle::make_pruned_branch(pruned2.as_ref(), 2, &mut Cell::empty_context())
.unwrap();
let pruned3 = pruned3.virtualize();
assert_eq!(pruned3.repr_hash(), pruned2.repr_hash());
assert_eq!(pruned3.repr_depth(), pruned2.repr_depth());
let pruned3 = pruned3.virtualize();
assert_eq!(pruned3.repr_hash(), pruned1.repr_hash());
assert_eq!(pruned3.repr_depth(), pruned1.repr_depth());
let pruned3 = pruned3.virtualize();
assert_eq!(pruned3.repr_hash(), cell.repr_hash());
assert_eq!(pruned3.repr_depth(), cell.repr_depth());
let pruned3 = pruned3.virtualize();
assert_eq!(pruned3.repr_hash(), cell.repr_hash());
assert_eq!(pruned3.repr_depth(), cell.repr_depth());
let pruned3 = pruned3.virtualize();
assert_eq!(pruned3.repr_hash(), cell.repr_hash());
assert_eq!(pruned3.repr_depth(), cell.repr_depth());
}
}