everscale_types/cell/
cell_context.rsuse sha2::digest::Digest;
use crate::cell::{Cell, CellDescriptor, CellType, DynCell, HashBytes, LevelMask, MAX_REF_COUNT};
use crate::error::Error;
use crate::util::{unlikely, ArrayVec};
#[cfg(feature = "stats")]
use crate::cell::CellTreeStats;
pub trait CellContext {
fn finalize_cell(&mut self, cell: CellParts<'_>) -> Result<Cell, Error>;
fn load_cell(&mut self, cell: Cell, mode: LoadMode) -> Result<Cell, Error>;
fn load_dyn_cell<'a>(
&mut self,
cell: &'a DynCell,
mode: LoadMode,
) -> Result<&'a DynCell, Error>;
}
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
#[repr(u8)]
pub enum LoadMode {
Noop = 0b00,
UseGas = 0b01,
Resolve = 0b10,
Full = 0b11,
}
impl LoadMode {
#[inline]
pub const fn use_gas(self) -> bool {
self as u8 & 0b01 != 0
}
#[inline]
pub const fn resolve(self) -> bool {
self as u8 & 0b10 != 0
}
}
pub struct CellParts<'a> {
#[cfg(feature = "stats")]
pub stats: CellTreeStats,
pub bit_len: u16,
pub descriptor: CellDescriptor,
pub children_mask: LevelMask,
pub references: ArrayVec<Cell, MAX_REF_COUNT>,
pub data: &'a [u8],
}
impl<'a> CellParts<'a> {
pub fn compute_hashes(&self) -> Result<Vec<(HashBytes, u16)>, Error> {
const HASH_BITS: usize = 256;
const DEPTH_BITS: usize = 16;
let mut descriptor = self.descriptor;
let bit_len = self.bit_len as usize;
let level_mask = descriptor.level_mask();
let level = level_mask.level() as usize;
let references = self.references.as_ref();
let mut hashes_len = level + 1;
let (cell_type, computed_level_mask) = if unlikely(descriptor.is_exotic()) {
let Some(&first_byte) = self.data.first() else {
return Err(Error::InvalidCell);
};
match CellType::from_byte_exotic(first_byte) {
Some(CellType::PrunedBranch) => {
if unlikely(level == 0) {
return Err(Error::InvalidCell);
}
let expected_bit_len = 8 + 8 + level * (HASH_BITS + DEPTH_BITS);
if unlikely(bit_len != expected_bit_len || !references.is_empty()) {
return Err(Error::InvalidCell);
}
let stored_mask = self.data.get(1).copied().unwrap_or_default();
if unlikely(level_mask != stored_mask) {
return Err(Error::InvalidCell);
}
hashes_len = 1;
(CellType::PrunedBranch, level_mask)
}
Some(CellType::MerkleProof) => {
const EXPECTED_BIT_LEN: usize = 8 + HASH_BITS + DEPTH_BITS;
if unlikely(bit_len != EXPECTED_BIT_LEN || references.len() != 1) {
return Err(Error::InvalidCell);
}
(CellType::MerkleProof, self.children_mask.virtualize(1))
}
Some(CellType::MerkleUpdate) => {
const EXPECTED_BIT_LEN: usize = 8 + 2 * (HASH_BITS + DEPTH_BITS);
if unlikely(bit_len != EXPECTED_BIT_LEN || references.len() != 2) {
return Err(Error::InvalidCell);
}
(CellType::MerkleUpdate, self.children_mask.virtualize(1))
}
Some(CellType::LibraryReference) => {
const EXPECTED_BIT_LEN: usize = 8 + HASH_BITS;
if unlikely(bit_len != EXPECTED_BIT_LEN || !references.is_empty()) {
return Err(Error::InvalidCell);
}
(CellType::LibraryReference, LevelMask::EMPTY)
}
_ => return Err(Error::InvalidCell),
}
} else {
(CellType::Ordinary, self.children_mask)
};
if unlikely(computed_level_mask != level_mask) {
return Err(Error::InvalidCell);
}
let level_offset = cell_type.is_merkle() as u8;
let is_pruned = cell_type.is_pruned_branch();
let mut hashes = Vec::<(HashBytes, u16)>::with_capacity(hashes_len);
for level in 0..4 {
if level != 0 && (is_pruned || !level_mask.contains(level)) {
continue;
}
let mut hasher = sha2::Sha256::new();
let level_mask = if is_pruned {
level_mask
} else {
LevelMask::from_level(level)
};
descriptor.d1 &= !(CellDescriptor::LEVEL_MASK | CellDescriptor::STORE_HASHES_MASK);
descriptor.d1 |= u8::from(level_mask) << 5;
hasher.update([descriptor.d1, descriptor.d2]);
if level == 0 {
hasher.update(self.data);
} else {
let prev_hash = unsafe { hashes.last().unwrap_unchecked() };
hasher.update(prev_hash.0.as_slice());
}
let mut depth = 0;
for child in references {
let child_depth = child.as_ref().depth(level + level_offset);
let next_depth = match child_depth.checked_add(1) {
Some(next_depth) => next_depth,
None => return Err(Error::DepthOverflow),
};
depth = std::cmp::max(depth, next_depth);
hasher.update(child_depth.to_be_bytes());
}
for child in references {
let child_hash = child.as_ref().hash(level + level_offset);
hasher.update(child_hash.as_slice());
}
let hash = hasher.finalize().into();
hashes.push((hash, depth));
}
Ok(hashes)
}
}