everscale_types/cell/
cell_context.rs1use sha2::digest::Digest;
2
3use crate::cell::{Cell, CellDescriptor, CellType, DynCell, HashBytes, LevelMask, MAX_REF_COUNT};
4use crate::error::Error;
5use crate::util::{unlikely, ArrayVec};
6
7#[cfg(feature = "stats")]
8use crate::cell::CellTreeStats;
9
10pub trait CellContext {
12 fn finalize_cell(&mut self, cell: CellParts<'_>) -> Result<Cell, Error>;
14
15 fn load_cell(&mut self, cell: Cell, mode: LoadMode) -> Result<Cell, Error>;
17
18 fn load_dyn_cell<'a>(
20 &mut self,
21 cell: &'a DynCell,
22 mode: LoadMode,
23 ) -> Result<&'a DynCell, Error>;
24}
25
26#[derive(Debug, Clone, Copy, Eq, PartialEq)]
28#[repr(u8)]
29pub enum LoadMode {
30 Noop = 0b00,
32 UseGas = 0b01,
34 Resolve = 0b10,
36 Full = 0b11,
38}
39
40impl LoadMode {
41 #[inline]
43 pub const fn use_gas(self) -> bool {
44 self as u8 & 0b01 != 0
45 }
46
47 #[inline]
49 pub const fn resolve(self) -> bool {
50 self as u8 & 0b10 != 0
51 }
52}
53
54pub struct CellParts<'a> {
56 #[cfg(feature = "stats")]
58 pub stats: CellTreeStats,
59
60 pub bit_len: u16,
62
63 pub descriptor: CellDescriptor,
65
66 pub children_mask: LevelMask,
68
69 pub references: ArrayVec<Cell, MAX_REF_COUNT>,
74
75 pub data: &'a [u8],
77}
78
79impl<'a> CellParts<'a> {
80 pub fn compute_hashes(&self) -> Result<Vec<(HashBytes, u16)>, Error> {
82 const HASH_BITS: usize = 256;
83 const DEPTH_BITS: usize = 16;
84
85 let mut descriptor = self.descriptor;
86 let bit_len = self.bit_len as usize;
87 let level_mask = descriptor.level_mask();
88 let level = level_mask.level() as usize;
89
90 let references = self.references.as_ref();
91
92 let mut hashes_len = level + 1;
94
95 let (cell_type, computed_level_mask) = if unlikely(descriptor.is_exotic()) {
96 let Some(&first_byte) = self.data.first() else {
97 return Err(Error::InvalidCell);
98 };
99
100 match CellType::from_byte_exotic(first_byte) {
101 Some(CellType::PrunedBranch) => {
103 if unlikely(level == 0) {
104 return Err(Error::InvalidCell);
105 }
106
107 let expected_bit_len = 8 + 8 + level * (HASH_BITS + DEPTH_BITS);
108 if unlikely(bit_len != expected_bit_len || !references.is_empty()) {
109 return Err(Error::InvalidCell);
110 }
111
112 let stored_mask = self.data.get(1).copied().unwrap_or_default();
113 if unlikely(level_mask != stored_mask) {
114 return Err(Error::InvalidCell);
115 }
116
117 hashes_len = 1;
118 (CellType::PrunedBranch, level_mask)
119 }
120 Some(CellType::MerkleProof) => {
122 const EXPECTED_BIT_LEN: usize = 8 + HASH_BITS + DEPTH_BITS;
123 if unlikely(bit_len != EXPECTED_BIT_LEN || references.len() != 1) {
124 return Err(Error::InvalidCell);
125 }
126
127 (CellType::MerkleProof, self.children_mask.virtualize(1))
128 }
129 Some(CellType::MerkleUpdate) => {
131 const EXPECTED_BIT_LEN: usize = 8 + 2 * (HASH_BITS + DEPTH_BITS);
132 if unlikely(bit_len != EXPECTED_BIT_LEN || references.len() != 2) {
133 return Err(Error::InvalidCell);
134 }
135
136 (CellType::MerkleUpdate, self.children_mask.virtualize(1))
137 }
138 Some(CellType::LibraryReference) => {
140 const EXPECTED_BIT_LEN: usize = 8 + HASH_BITS;
141 if unlikely(bit_len != EXPECTED_BIT_LEN || !references.is_empty()) {
142 return Err(Error::InvalidCell);
143 }
144
145 (CellType::LibraryReference, LevelMask::EMPTY)
146 }
147 _ => return Err(Error::InvalidCell),
148 }
149 } else {
150 (CellType::Ordinary, self.children_mask)
151 };
152
153 if unlikely(computed_level_mask != level_mask) {
154 return Err(Error::InvalidCell);
155 }
156
157 let level_offset = cell_type.is_merkle() as u8;
158 let is_pruned = cell_type.is_pruned_branch();
159
160 let mut hashes = Vec::<(HashBytes, u16)>::with_capacity(hashes_len);
161 for level in 0..4 {
162 if level != 0 && (is_pruned || !level_mask.contains(level)) {
164 continue;
165 }
166
167 let mut hasher = sha2::Sha256::new();
168
169 let level_mask = if is_pruned {
170 level_mask
171 } else {
172 LevelMask::from_level(level)
173 };
174
175 descriptor.d1 &= !(CellDescriptor::LEVEL_MASK | CellDescriptor::STORE_HASHES_MASK);
176 descriptor.d1 |= u8::from(level_mask) << 5;
177 hasher.update([descriptor.d1, descriptor.d2]);
178
179 if level == 0 {
180 hasher.update(self.data);
181 } else {
182 let prev_hash = unsafe { hashes.last().unwrap_unchecked() };
185 hasher.update(prev_hash.0.as_slice());
186 }
187
188 let mut depth = 0;
189 for child in references {
190 let child_depth = child.as_ref().depth(level + level_offset);
191 let next_depth = match child_depth.checked_add(1) {
192 Some(next_depth) => next_depth,
193 None => return Err(Error::DepthOverflow),
194 };
195 depth = std::cmp::max(depth, next_depth);
196
197 hasher.update(child_depth.to_be_bytes());
198 }
199
200 for child in references {
201 let child_hash = child.as_ref().hash(level + level_offset);
202 hasher.update(child_hash.as_slice());
203 }
204
205 let hash = hasher.finalize().into();
206 hashes.push((hash, depth));
207 }
208
209 Ok(hashes)
210 }
211}