tycho_types/cell/
cell_context.rs1use sha2::digest::Digest;
2
3#[cfg(feature = "stats")]
4use crate::cell::CellTreeStats;
5use crate::cell::{Cell, CellDescriptor, CellType, DynCell, HashBytes, LevelMask, MAX_REF_COUNT};
6use crate::error::Error;
7use crate::util::{unlikely, ArrayVec};
8
9pub trait CellContext {
11 fn finalize_cell(&self, cell: CellParts<'_>) -> Result<Cell, Error>;
13
14 fn load_cell(&self, cell: Cell, mode: LoadMode) -> Result<Cell, Error>;
16
17 fn load_dyn_cell<'s: 'a, 'a>(
19 &'s self,
20 cell: &'a DynCell,
21 mode: LoadMode,
22 ) -> Result<&'a DynCell, Error>;
23}
24
25#[derive(Debug, Clone, Copy, Eq, PartialEq)]
27#[repr(u8)]
28pub enum LoadMode {
29 Noop = 0b00,
31 UseGas = 0b01,
33 Resolve = 0b10,
35 Full = 0b11,
37}
38
39impl LoadMode {
40 #[inline]
42 pub const fn use_gas(self) -> bool {
43 self as u8 & 0b01 != 0
44 }
45
46 #[inline]
48 pub const fn resolve(self) -> bool {
49 self as u8 & 0b10 != 0
50 }
51}
52
53pub struct CellParts<'a> {
55 #[cfg(feature = "stats")]
57 pub stats: CellTreeStats,
58
59 pub bit_len: u16,
61
62 pub descriptor: CellDescriptor,
64
65 pub children_mask: LevelMask,
67
68 pub references: ArrayVec<Cell, MAX_REF_COUNT>,
73
74 pub data: &'a [u8],
76}
77
78impl CellParts<'_> {
79 pub fn compute_hashes(&self) -> Result<Vec<(HashBytes, u16)>, Error> {
81 const HASH_BITS: usize = 256;
82 const DEPTH_BITS: usize = 16;
83
84 let mut descriptor = self.descriptor;
85 let bit_len = self.bit_len as usize;
86 let level_mask = descriptor.level_mask();
87 let level = level_mask.level() as usize;
88
89 let references = self.references.as_ref();
90
91 let mut hashes_len = level + 1;
93
94 let (cell_type, computed_level_mask) = if unlikely(descriptor.is_exotic()) {
95 let Some(&first_byte) = self.data.first() else {
96 return Err(Error::InvalidCell);
97 };
98
99 match CellType::from_byte_exotic(first_byte) {
100 Some(CellType::PrunedBranch) => {
102 if unlikely(level == 0) {
103 return Err(Error::InvalidCell);
104 }
105
106 let expected_bit_len = 8 + 8 + level * (HASH_BITS + DEPTH_BITS);
107 if unlikely(bit_len != expected_bit_len || !references.is_empty()) {
108 return Err(Error::InvalidCell);
109 }
110
111 let stored_mask = self.data.get(1).copied().unwrap_or_default();
112 if unlikely(level_mask != stored_mask) {
113 return Err(Error::InvalidCell);
114 }
115
116 hashes_len = 1;
117 (CellType::PrunedBranch, level_mask)
118 }
119 Some(CellType::MerkleProof) => {
121 const EXPECTED_BIT_LEN: usize = 8 + HASH_BITS + DEPTH_BITS;
122 if unlikely(bit_len != EXPECTED_BIT_LEN || references.len() != 1) {
123 return Err(Error::InvalidCell);
124 }
125
126 (CellType::MerkleProof, self.children_mask.virtualize(1))
127 }
128 Some(CellType::MerkleUpdate) => {
130 const EXPECTED_BIT_LEN: usize = 8 + 2 * (HASH_BITS + DEPTH_BITS);
131 if unlikely(bit_len != EXPECTED_BIT_LEN || references.len() != 2) {
132 return Err(Error::InvalidCell);
133 }
134
135 (CellType::MerkleUpdate, self.children_mask.virtualize(1))
136 }
137 Some(CellType::LibraryReference) => {
139 const EXPECTED_BIT_LEN: usize = 8 + HASH_BITS;
140 if unlikely(bit_len != EXPECTED_BIT_LEN || !references.is_empty()) {
141 return Err(Error::InvalidCell);
142 }
143
144 (CellType::LibraryReference, LevelMask::EMPTY)
145 }
146 _ => return Err(Error::InvalidCell),
147 }
148 } else {
149 (CellType::Ordinary, self.children_mask)
150 };
151
152 if unlikely(computed_level_mask != level_mask) {
153 return Err(Error::InvalidCell);
154 }
155
156 let level_offset = cell_type.is_merkle() as u8;
157 let is_pruned = cell_type.is_pruned_branch();
158
159 let mut hashes = Vec::<(HashBytes, u16)>::with_capacity(hashes_len);
160 for level in 0..4 {
161 if level != 0 && (is_pruned || !level_mask.contains(level)) {
163 continue;
164 }
165
166 let mut hasher = sha2::Sha256::new();
167
168 let level_mask = if is_pruned {
169 level_mask
170 } else {
171 LevelMask::from_level(level)
172 };
173
174 descriptor.d1 &= !(CellDescriptor::LEVEL_MASK | CellDescriptor::STORE_HASHES_MASK);
175 descriptor.d1 |= u8::from(level_mask) << 5;
176 hasher.update([descriptor.d1, descriptor.d2]);
177
178 if level == 0 {
179 hasher.update(self.data);
180 } else {
181 let prev_hash = unsafe { hashes.last().unwrap_unchecked() };
184 hasher.update(prev_hash.0.as_slice());
185 }
186
187 let mut depth = 0;
188 for child in references {
189 let child_depth = child.as_ref().depth(level + level_offset);
190 let next_depth = match child_depth.checked_add(1) {
191 Some(next_depth) => next_depth,
192 None => return Err(Error::DepthOverflow),
193 };
194 depth = std::cmp::max(depth, next_depth);
195
196 hasher.update(child_depth.to_be_bytes());
197 }
198
199 for child in references {
200 let child_hash = child.as_ref().hash(level + level_offset);
201 hasher.update(child_hash.as_slice());
202 }
203
204 let hash = hasher.finalize().into();
205 hashes.push((hash, depth));
206 }
207
208 Ok(hashes)
209 }
210}