1use std::mem::ManuallyDrop;
2
3use ahash::HashMap;
4use tycho_types::cell::CellTreeStats;
5use tycho_types::models::{
6 IntAddr, ShardIdent, SimpleLib, SizeLimitsConfig, StateInit, StdAddr, WorkchainDescription,
7 WorkchainFormat,
8};
9use tycho_types::num::{VarUint24, VarUint56};
10use tycho_types::prelude::*;
11
12#[inline(always)]
14pub(crate) const fn unlikely(b: bool) -> bool {
15 #[allow(clippy::needless_bool, clippy::bool_to_int_with_if)]
16 if (1i32).checked_div(if b { 0 } else { 1 }).is_none() {
17 true
18 } else {
19 false
20 }
21}
22
23#[derive(Default)]
24pub struct StorageStatLimits {
25 pub bit_count: u32,
26 pub cell_count: u32,
27}
28
29impl StorageStatLimits {
30 pub const UNLIMITED: Self = Self {
31 bit_count: u32::MAX,
32 cell_count: u32::MAX,
33 };
34}
35
36pub struct OwnedExtStorageStat {
37 cells: Vec<Cell>,
38 inner: ManuallyDrop<ExtStorageStat<'static>>,
39}
40
41impl OwnedExtStorageStat {
42 pub fn unlimited() -> Self {
43 Self::with_limits(StorageStatLimits::UNLIMITED)
44 }
45
46 pub fn with_limits(limits: StorageStatLimits) -> Self {
47 Self {
48 cells: Vec::new(),
49 inner: ManuallyDrop::new(ExtStorageStat::with_limits(limits)),
50 }
51 }
52
53 pub fn set_unlimited(&mut self) {
54 self.inner.limits = StorageStatLimits::UNLIMITED;
55 }
56
57 pub fn stats(&self) -> CellTreeStats {
58 self.inner.stats()
59 }
60
61 pub fn add_cell(&mut self, cell: Cell) -> bool {
62 if self.inner.visited.contains_key(cell.repr_hash()) {
63 return true;
64 }
65
66 let cell_ref = unsafe { std::mem::transmute::<&DynCell, &'static DynCell>(cell.as_ref()) };
69 let res = self.inner.add_cell(cell_ref);
70 self.cells.push(cell);
71 res
72 }
73
74 pub fn clear(&mut self) {
75 self.inner.visited.clear();
76 self.inner.cells = 0;
77 self.inner.bits = 0;
78
79 self.cells.clear();
81 }
82}
83
84impl Drop for OwnedExtStorageStat {
85 fn drop(&mut self) {
86 unsafe { ManuallyDrop::drop(&mut self.inner) };
89 }
90}
91
92#[derive(Default)]
93pub struct ExtStorageStat<'a> {
94 visited: ahash::HashMap<&'a HashBytes, u8>,
95 limits: StorageStatLimits,
96 pub cells: u32,
97 pub bits: u32,
98}
99
100impl<'a> ExtStorageStat<'a> {
101 pub const MAX_ALLOWED_MERKLE_DEPTH: u8 = 2;
102
103 pub fn with_limits(limits: StorageStatLimits) -> Self {
104 Self {
105 visited: ahash::HashMap::default(),
106 limits,
107 cells: 0,
108 bits: 0,
109 }
110 }
111
112 pub fn compute_for_slice(
113 cs: &CellSlice<'a>,
114 limits: StorageStatLimits,
115 ) -> Option<CellTreeStats> {
116 let mut state = Self {
117 visited: ahash::HashMap::default(),
118 limits,
119 cells: 1,
120 bits: cs.size_bits() as u32,
121 };
122
123 for cell in cs.references() {
124 state.add_cell_impl(cell)?;
125 }
126
127 Some(CellTreeStats {
128 bit_count: state.bits as u64,
129 cell_count: state.cells as u64,
130 })
131 }
132
133 pub fn stats(&self) -> CellTreeStats {
134 CellTreeStats {
135 bit_count: self.bits as u64,
136 cell_count: self.cells as u64,
137 }
138 }
139
140 pub fn add_cell(&mut self, cell: &'a DynCell) -> bool {
141 self.add_cell_impl(cell).is_some()
142 }
143
144 fn add_cell_impl(&mut self, cell: &'a DynCell) -> Option<u8> {
145 if let Some(merkle_depth) = self.visited.get(cell.repr_hash()).copied() {
146 return Some(merkle_depth);
147 }
148
149 self.cells = self.cells.checked_add(1)?;
150 self.bits = self.bits.checked_add(cell.bit_len() as u32)?;
151
152 if self.cells > self.limits.cell_count || self.bits > self.limits.bit_count {
153 return None;
154 }
155
156 let mut max_merkle_depth = 0u8;
157 for cell in cell.references() {
158 max_merkle_depth = std::cmp::max(self.add_cell_impl(cell)?, max_merkle_depth);
159 }
160 max_merkle_depth = max_merkle_depth.saturating_add(cell.cell_type().is_merkle() as u8);
161
162 self.visited.insert(cell.repr_hash(), max_merkle_depth);
163 (max_merkle_depth <= Self::MAX_ALLOWED_MERKLE_DEPTH).then_some(max_merkle_depth)
164 }
165}
166
167pub fn new_varuint24_truncate(value: u64) -> VarUint24 {
168 VarUint24::new(std::cmp::min(value, VarUint24::MAX.into_inner() as u64) as _)
169}
170
171pub fn new_varuint56_truncate(value: u64) -> VarUint56 {
172 VarUint56::new(std::cmp::min(value, VarUint56::MAX.into_inner()))
173}
174
175pub fn check_rewrite_src_addr(my_addr: &StdAddr, addr: &mut Option<IntAddr>) -> bool {
177 match addr {
178 None => {
180 *addr = Some(my_addr.clone().into());
181 true
182 }
183 Some(IntAddr::Std(addr)) if addr == my_addr => true,
185 Some(_) => false,
187 }
188}
189
190pub fn check_rewrite_dst_addr(
192 workchains: &HashMap<i32, WorkchainDescription>,
193 addr: &mut IntAddr,
194) -> bool {
195 const STD_WORKCHAINS: std::ops::Range<i32> = -128..128;
196 const STD_ADDR_LEN: u16 = 256;
197
198 let mut can_rewrite = false;
200 let workchain = match addr {
201 IntAddr::Std(addr) => {
202 if addr.anycast.is_some() {
203 return false;
205 }
206
207 addr.workchain as i32
208 }
209 IntAddr::Var(addr) => {
210 if addr.anycast.is_some() {
211 return false;
213 }
214
215 can_rewrite = addr.address_len.into_inner() == STD_ADDR_LEN
218 && STD_WORKCHAINS.contains(&addr.workchain);
219
220 addr.workchain
221 }
222 };
223
224 if workchain != ShardIdent::MASTERCHAIN.workchain() {
225 let Some(workchain) = workchains.get(&workchain) else {
226 return false;
228 };
229
230 if !workchain.accept_msgs {
231 return false;
233 }
234
235 match (&workchain.format, &*addr) {
236 (WorkchainFormat::Basic(_), IntAddr::Std(_)) => {}
238 (WorkchainFormat::Basic(_), IntAddr::Var(_)) if can_rewrite => {}
240 (WorkchainFormat::Extended(f), IntAddr::Std(_)) if f.check_addr_len(STD_ADDR_LEN) => {}
242 (WorkchainFormat::Extended(f), IntAddr::Var(a))
244 if f.check_addr_len(a.address_len.into_inner()) => {}
245 _ => return false,
247 }
248 }
249
250 if can_rewrite && let IntAddr::Var(var) = addr {
252 debug_assert!(STD_WORKCHAINS.contains(&var.workchain));
253 debug_assert_eq!(var.address_len.into_inner(), STD_ADDR_LEN);
254
255 let len = std::cmp::min(var.address.len(), 32);
257 let mut address = [0; 32];
258 address[..len].copy_from_slice(&var.address[..len]);
259
260 *addr = IntAddr::Std(StdAddr::new(var.workchain as i8, HashBytes(address)));
262 }
263
264 true
266}
267
268pub enum StateLimitsResult {
269 Unchanged,
270 Exceeds,
271 Fits,
272}
273
274pub fn check_state_limits_diff(
276 old_state: &StateInit,
277 new_state: &StateInit,
278 limits: &SizeLimitsConfig,
279 is_masterchain: bool,
280 stats_cache: &mut Option<OwnedExtStorageStat>,
281) -> StateLimitsResult {
282 fn unpack_state(state: &StateInit) -> (Option<&'_ Cell>, Option<&'_ Cell>, &'_ StateLibs) {
284 (state.code.as_ref(), state.data.as_ref(), &state.libraries)
285 }
286
287 let (old_code, old_data, old_libs) = unpack_state(old_state);
288 let (new_code, new_data, new_libs) = unpack_state(new_state);
289
290 let libs_changed = old_libs != new_libs;
292 if old_code == new_code && old_data == new_data && !libs_changed {
293 return StateLimitsResult::Unchanged;
294 }
295
296 let check_public_libs = is_masterchain && libs_changed;
299
300 check_state_limits(
301 new_code,
302 new_data,
303 new_libs,
304 limits,
305 check_public_libs,
306 stats_cache,
307 )
308}
309
310pub fn check_state_limits(
311 code: Option<&Cell>,
312 data: Option<&Cell>,
313 libs: &StateLibs,
314 limits: &SizeLimitsConfig,
315 check_public_libs: bool,
316 stats_cache: &mut Option<OwnedExtStorageStat>,
317) -> StateLimitsResult {
318 let mut stats = OwnedExtStorageStat::with_limits(StorageStatLimits {
320 bit_count: limits.max_acc_state_bits,
321 cell_count: limits.max_acc_state_cells,
322 });
323
324 if let Some(code) = code
325 && !stats.add_cell(code.clone())
326 {
327 return StateLimitsResult::Exceeds;
328 }
329
330 if let Some(data) = data
331 && !stats.add_cell(data.clone())
332 {
333 return StateLimitsResult::Exceeds;
334 }
335
336 if let Some(libs) = libs.root()
337 && !stats.add_cell(libs.clone())
338 {
339 return StateLimitsResult::Exceeds;
340 }
341
342 if check_public_libs {
345 let mut public_libs_count = 0;
346 for lib in libs.values() {
347 let Ok(lib) = lib else {
348 return StateLimitsResult::Exceeds;
349 };
350
351 public_libs_count += lib.public as usize;
352 if public_libs_count > limits.max_acc_public_libraries as usize {
353 return StateLimitsResult::Exceeds;
354 }
355 }
356 }
357
358 *stats_cache = Some(stats);
360 StateLimitsResult::Fits
361}
362
363type StateLibs = Dict<HashBytes, SimpleLib>;
364
365pub const fn shift_ceil_price(value: u128) -> u128 {
366 let r = value & 0xffff != 0;
367 (value >> 16) + r as u128
368}
369
370#[cfg(test)]
371mod tests {
372 use super::*;
373
374 #[test]
375 fn miri_check() {
376 let mut owned = OwnedExtStorageStat::with_limits(StorageStatLimits {
378 bit_count: 1000,
379 cell_count: 1000,
380 });
381
382 fn fill(owned: &mut OwnedExtStorageStat) {
383 let res = owned.add_cell(Cell::empty_cell());
384 assert!(res);
385 assert_eq!(owned.inner.bits, 0);
386 assert_eq!(owned.inner.cells, 1);
387
388 let res = owned.add_cell({
389 let mut b = CellBuilder::new();
390 b.store_u32(123).unwrap();
391 b.store_reference(Cell::empty_cell()).unwrap();
392 b.build().unwrap()
393 });
394 assert!(res);
395 assert_eq!(owned.inner.bits, 32);
396 assert_eq!(owned.inner.cells, 2);
397
398 let res = owned.add_cell({
400 let mut b = CellBuilder::new();
401 b.store_u32(123).unwrap();
402 b.store_reference(Cell::empty_cell()).unwrap();
403 b.build().unwrap()
404 });
405 assert!(res);
406 assert_eq!(owned.inner.bits, 32);
407 assert_eq!(owned.inner.cells, 2);
408 }
409
410 fill(&mut owned);
411 drop(owned);
412
413 let mut owned = OwnedExtStorageStat::with_limits(StorageStatLimits {
415 bit_count: 1000,
416 cell_count: 1000,
417 });
418
419 fill(&mut owned);
420 owned.clear();
421 fill(&mut owned);
422 }
423}