tycho_types/cell/
cell_context.rs1use sha2::digest::Digest;
2
3use crate::cell::{Cell, CellDescriptor, CellType, DynCell, HashBytes, LevelMask, MAX_REF_COUNT};
4use crate::error::Error;
5use crate::util::{ArrayVec, unlikely};
6
7pub trait CellContext {
9 fn finalize_cell(&self, cell: CellParts<'_>) -> Result<Cell, Error>;
11
12 fn load_cell(&self, cell: Cell, mode: LoadMode) -> Result<Cell, Error>;
14
15 fn load_dyn_cell<'s: 'a, 'a>(
17 &'s self,
18 cell: &'a DynCell,
19 mode: LoadMode,
20 ) -> Result<&'a DynCell, Error>;
21}
22
23#[derive(Debug, Clone, Copy, Eq, PartialEq)]
25#[repr(u8)]
26pub enum LoadMode {
27 Noop = 0b00,
29 UseGas = 0b01,
31 Resolve = 0b10,
33 Full = 0b11,
35}
36
37impl LoadMode {
38 #[inline]
40 pub const fn use_gas(self) -> bool {
41 self as u8 & 0b01 != 0
42 }
43
44 #[inline]
46 pub const fn resolve(self) -> bool {
47 self as u8 & 0b10 != 0
48 }
49}
50
51pub struct CellParts<'a> {
53 pub bit_len: u16,
55
56 pub descriptor: CellDescriptor,
58
59 pub children_mask: LevelMask,
61
62 pub references: ArrayVec<Cell, MAX_REF_COUNT>,
67
68 pub data: &'a [u8],
70}
71
72impl CellParts<'_> {
73 pub fn compute_hashes(&self) -> Result<Box<[(HashBytes, u16)]>, Error> {
75 const HASH_BITS: usize = 256;
76 const DEPTH_BITS: usize = 16;
77
78 let mut descriptor = self.descriptor;
79 let bit_len = self.bit_len as usize;
80 let level_mask = descriptor.level_mask();
81 let level = level_mask.level() as usize;
82
83 let references = self.references.as_ref();
84
85 if unlikely(self.data.len() < bit_len.div_ceil(8)) {
87 return Err(Error::InvalidCell);
88 }
89
90 let mut hashes_len = level + 1;
92
93 let (cell_type, computed_level_mask) = if unlikely(descriptor.is_exotic()) {
94 let Some(&first_byte) = self.data.first() else {
95 return Err(Error::InvalidCell);
96 };
97
98 match CellType::from_byte_exotic(first_byte) {
99 Some(CellType::PrunedBranch) => {
101 if unlikely(level == 0) {
102 return Err(Error::InvalidCell);
103 }
104
105 let expected_bit_len = 8 + 8 + level * (HASH_BITS + DEPTH_BITS);
106 if unlikely(bit_len != expected_bit_len || !references.is_empty()) {
107 return Err(Error::InvalidCell);
108 }
109
110 let stored_mask = self.data.get(1).copied().unwrap_or_default();
111 if unlikely(level_mask != stored_mask) {
112 return Err(Error::InvalidCell);
113 }
114
115 hashes_len = 1;
116 (CellType::PrunedBranch, level_mask)
117 }
118 Some(CellType::MerkleProof) => {
120 const EXPECTED_BIT_LEN: usize = 8 + HASH_BITS + DEPTH_BITS;
121 if unlikely(
122 bit_len != EXPECTED_BIT_LEN
123 || references.len() != 1
124 || self.data.len() < 35,
125 ) {
126 return Err(Error::InvalidCell);
127 }
128
129 let stored_hash = &self.data[1..33];
130 let stored_depth = u16::from_be_bytes([self.data[33], self.data[34]]);
131
132 let child = &references[0];
133 if unlikely(child.hash(0) != stored_hash || child.depth(0) != stored_depth) {
134 return Err(Error::InvalidCell);
135 }
136
137 (CellType::MerkleProof, self.children_mask.virtualize(1))
138 }
139 Some(CellType::MerkleUpdate) => {
141 const EXPECTED_BIT_LEN: usize = 8 + 2 * (HASH_BITS + DEPTH_BITS);
142 if unlikely(
143 bit_len != EXPECTED_BIT_LEN
144 || references.len() != 2
145 || self.data.len() < 69,
146 ) {
147 return Err(Error::InvalidCell);
148 }
149
150 let stored_old_hash = &self.data[1..33];
151 let stored_new_hash = &self.data[33..65];
152
153 let stored_old_depth = u16::from_be_bytes([self.data[65], self.data[66]]);
154 let stored_new_depth = u16::from_be_bytes([self.data[67], self.data[68]]);
155
156 let old = &references[0];
157 let new = &references[1];
158 if unlikely(
159 old.hash(0) != stored_old_hash
160 || old.depth(0) != stored_old_depth
161 || new.hash(0) != stored_new_hash
162 || new.depth(0) != stored_new_depth,
163 ) {
164 return Err(Error::InvalidCell);
165 }
166
167 (CellType::MerkleUpdate, self.children_mask.virtualize(1))
168 }
169 Some(CellType::LibraryReference) => {
171 const EXPECTED_BIT_LEN: usize = 8 + HASH_BITS;
172 if unlikely(bit_len != EXPECTED_BIT_LEN || !references.is_empty()) {
173 return Err(Error::InvalidCell);
174 }
175
176 (CellType::LibraryReference, LevelMask::EMPTY)
177 }
178 _ => return Err(Error::InvalidCell),
179 }
180 } else {
181 (CellType::Ordinary, self.children_mask)
182 };
183
184 if unlikely(computed_level_mask != level_mask) {
185 return Err(Error::InvalidCell);
186 }
187
188 let level_offset = cell_type.is_merkle() as u8;
189 let is_pruned = cell_type.is_pruned_branch();
190
191 let mut hashes = Vec::<(HashBytes, u16)>::with_capacity(hashes_len);
192 for level in 0..4 {
193 if level != 0 && (is_pruned || !level_mask.contains(level)) {
195 continue;
196 }
197
198 let mut hasher = sha2::Sha256::new();
199
200 let level_mask = if is_pruned {
201 level_mask
202 } else {
203 LevelMask::from_level(level)
204 };
205
206 descriptor.d1 &= !(CellDescriptor::LEVEL_MASK | CellDescriptor::STORE_HASHES_MASK);
207 descriptor.d1 |= u8::from(level_mask) << 5;
208 hasher.update([descriptor.d1, descriptor.d2]);
209
210 if level == 0 {
211 hasher.update(self.data);
212 } else {
213 let prev_hash = unsafe { hashes.last().unwrap_unchecked() };
216 hasher.update(prev_hash.0.as_slice());
217 }
218
219 let mut depth = 0;
220 for child in references {
221 let child_depth = child.as_ref().depth(level + level_offset);
222 let next_depth = match child_depth.checked_add(1) {
223 Some(next_depth) => next_depth,
224 None => return Err(Error::DepthOverflow),
225 };
226 depth = std::cmp::max(depth, next_depth);
227
228 hasher.update(child_depth.to_be_bytes());
229 }
230
231 for child in references {
232 let child_hash = child.as_ref().hash(level + level_offset);
233 hasher.update(child_hash.as_slice());
234 }
235
236 let hash = hasher.finalize().into();
237 hashes.push((hash, depth));
238 }
239
240 Ok(hashes.into_boxed_slice())
241 }
242}