1use super::cell_impl::VirtualCellWrapper;
2use super::{Cell, CellDescriptor, CellImpl, CellInner, DynCell, HashBytes};
3use crate::util::TryAsMut;
4
5#[cfg(feature = "stats")]
6use super::CellTreeStats;
7
8#[derive(Debug, Clone, Copy, Eq, PartialEq)]
10pub enum UsageTreeMode {
11 OnLoad,
13 OnDataAccess,
15}
16
17pub struct UsageTree {
19 state: SharedState,
20}
21
22impl UsageTree {
23 pub fn new(mode: UsageTreeMode) -> Self {
25 Self {
26 state: UsageTreeState::new(mode),
27 }
28 }
29
30 pub fn with_mode_and_capacity(mode: UsageTreeMode, capacity: usize) -> Self {
33 Self {
34 state: UsageTreeState::with_mode_and_capacity(mode, capacity),
35 }
36 }
37
38 pub fn track(&self, cell: &Cell) -> Cell {
41 if self.state.mode == UsageTreeMode::OnLoad {
42 self.state.insert(cell);
43 }
44 self.state.wrap(cell.clone())
45 }
46
47 pub fn contains(&self, repr_hash: &HashBytes) -> bool {
50 self.state.contains(repr_hash)
51 }
52
53 pub fn with_subtrees(self) -> UsageTreeWithSubtrees {
55 UsageTreeWithSubtrees {
56 state: self.state,
57 subtrees: Default::default(),
58 }
59 }
60
61 pub fn is_empty(&self) -> bool {
63 self.state.is_empty()
64 }
65
66 pub fn len(&self) -> usize {
68 self.state.len()
69 }
70}
71
72pub struct UsageTreeWithSubtrees {
74 state: SharedState,
75 subtrees: ahash::HashSet<HashBytes>,
76}
77
78impl UsageTreeWithSubtrees {
79 pub fn track(&self, cell: &Cell) -> Cell {
82 if self.state.mode == UsageTreeMode::OnLoad {
83 self.state.insert(cell);
84 }
85 self.state.wrap(cell.clone())
86 }
87
88 pub fn contains_direct(&self, repr_hash: &HashBytes) -> bool {
91 self.state.as_ref().contains(repr_hash)
92 }
93
94 pub fn contains_subtree(&self, repr_hash: &HashBytes) -> bool {
97 self.subtrees.contains(repr_hash)
98 }
99
100 pub fn add_subtree(&mut self, root: &DynCell) -> bool {
103 self.subtrees.insert(*root.repr_hash())
104 }
105}
106
107#[cfg(not(feature = "sync"))]
108use self::rc::{SharedState, UsageCell, UsageTreeState};
109
110#[cfg(feature = "sync")]
111use self::sync::{SharedState, UsageCell, UsageTreeState};
112
113impl CellImpl for UsageCell {
114 #[inline]
115 fn untrack(self: CellInner<Self>) -> Cell {
116 UsageCell::untrack_impl(self)
117 }
118
119 fn descriptor(&self) -> CellDescriptor {
120 self.cell.descriptor()
121 }
122
123 fn data(&self) -> &[u8] {
124 if self.should_insert() {
125 if let Some(usage_tree) = self.usage_tree.upgrade() {
126 usage_tree.insert(&self.cell);
127 }
128 self.set_inserted();
129 }
130 self.cell.data()
131 }
132
133 fn bit_len(&self) -> u16 {
134 self.cell.bit_len()
135 }
136
137 fn reference(&self, index: u8) -> Option<&DynCell> {
138 Some(self.load_reference(index)?.as_ref())
139 }
140
141 fn reference_cloned(&self, index: u8) -> Option<Cell> {
142 let cell = self.load_reference(index)?.clone();
143
144 #[cfg(not(feature = "sync"))]
145 {
146 Some(Cell::from(cell as std::rc::Rc<DynCell>))
147 }
148
149 #[cfg(feature = "sync")]
150 {
151 Some(Cell::from(cell as std::sync::Arc<DynCell>))
152 }
153 }
154
155 fn virtualize(&self) -> &DynCell {
156 VirtualCellWrapper::wrap(self)
157 }
158
159 fn hash(&self, level: u8) -> &HashBytes {
160 self.cell.hash(level)
161 }
162
163 fn depth(&self, level: u8) -> u16 {
164 self.cell.depth(level)
165 }
166
167 fn take_first_child(&mut self) -> Option<Cell> {
168 self.cell.try_as_mut()?.take_first_child()
169 }
170
171 fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell> {
172 match self.cell.try_as_mut() {
173 Some(cell) => cell.replace_first_child(parent),
174 None => Err(parent),
175 }
176 }
177
178 fn take_next_child(&mut self) -> Option<Cell> {
179 self.cell.try_as_mut()?.take_next_child()
180 }
181
182 #[cfg(feature = "stats")]
183 fn stats(&self) -> CellTreeStats {
184 self.cell.stats()
185 }
186}
187
188#[cfg(not(feature = "sync"))]
189mod rc {
190 use std::rc::{Rc, Weak};
191
192 use super::UsageTreeMode;
193 use crate::cell::{Cell, DynCell, HashBytes};
194
195 pub type SharedState = Rc<UsageTreeState>;
196
197 type VisitedCells = std::cell::RefCell<ahash::HashSet<HashBytes>>;
198
199 pub struct UsageTreeState {
200 pub mode: UsageTreeMode,
201 pub visited: VisitedCells,
202 }
203
204 impl UsageTreeState {
205 pub fn new(mode: UsageTreeMode) -> SharedState {
206 Rc::new(Self {
207 mode,
208 visited: Default::default(),
209 })
210 }
211
212 pub fn with_mode_and_capacity(mode: UsageTreeMode, capacity: usize) -> SharedState {
213 Rc::new(Self {
214 mode,
215 visited: std::cell::RefCell::new(ahash::HashSet::with_capacity_and_hasher(
216 capacity,
217 Default::default(),
218 )),
219 })
220 }
221
222 pub fn wrap(self: &SharedState, cell: Cell) -> Cell {
223 Cell::from(Rc::new(UsageCell::new(cell, Rc::downgrade(self), self.mode)) as Rc<DynCell>)
224 }
225
226 #[inline]
227 pub fn insert(&self, cell: &Cell) {
228 self.visited.borrow_mut().insert(*cell.repr_hash());
229 }
230
231 #[inline]
232 pub fn contains(&self, repr_hash: &HashBytes) -> bool {
233 self.visited.borrow().contains(repr_hash)
234 }
235
236 #[inline]
237 pub fn is_empty(&self) -> bool {
238 self.visited.borrow().is_empty()
239 }
240
241 #[inline]
242 pub fn len(&self) -> usize {
243 self.visited.borrow().len()
244 }
245 }
246
247 pub struct UsageCell {
248 pub cell: Cell,
249 pub usage_tree: Weak<UsageTreeState>,
250 pub children: std::cell::UnsafeCell<[Option<Rc<Self>>; 4]>,
251 pub inserted: std::cell::Cell<bool>,
252 pub mode: UsageTreeMode,
253 }
254
255 impl UsageCell {
256 pub fn new(cell: Cell, usage_tree: Weak<UsageTreeState>, mode: UsageTreeMode) -> Self {
257 Self {
258 cell,
259 usage_tree,
260 children: Default::default(),
261 inserted: std::cell::Cell::new(mode == UsageTreeMode::OnLoad),
262 mode,
263 }
264 }
265
266 pub fn untrack_impl(self: Rc<Self>) -> Cell {
267 self.cell.clone()
268 }
269
270 pub fn should_insert(&self) -> bool {
271 !self.inserted.get()
272 }
273
274 pub fn set_inserted(&self) {
275 self.inserted.set(true);
276 }
277
278 pub fn load_reference(&self, index: u8) -> Option<&Rc<Self>> {
279 if index < 4 {
280 let children = unsafe { &mut *self.children.get() };
281 Some(match &mut children[index as usize] {
282 Some(value) => value,
283 slot @ None => {
284 let child = self.cell.as_ref().reference_cloned(index)?;
285 if self.mode == UsageTreeMode::OnLoad {
286 if let Some(usage_tree) = self.usage_tree.upgrade() {
287 usage_tree.insert(&child);
288 }
289 }
290
291 slot.insert(Rc::new(UsageCell::new(
292 child,
293 self.usage_tree.clone(),
294 self.mode,
295 )))
296 }
297 })
298 } else {
299 None
300 }
301 }
302 }
303}
304
305#[cfg(feature = "sync")]
306mod sync {
307 use std::cell::UnsafeCell;
308 use std::sync::atomic::{AtomicBool, Ordering};
309 use std::sync::{Arc, Once, Weak};
310
311 use super::UsageTreeMode;
312 use crate::cell::{Cell, DynCell, HashBytes};
313
314 pub type SharedState = Arc<UsageTreeState>;
315
316 type VisitedCells = scc::HashSet<HashBytes, ahash::RandomState>;
317
318 pub struct UsageTreeState {
319 pub mode: UsageTreeMode,
320 pub visited: VisitedCells,
321 }
322
323 impl UsageTreeState {
324 pub fn new(mode: UsageTreeMode) -> SharedState {
325 Arc::new(Self {
326 mode,
327 visited: Default::default(),
328 })
329 }
330
331 pub fn with_mode_and_capacity(mode: UsageTreeMode, capacity: usize) -> SharedState {
332 Arc::new(Self {
333 mode,
334 visited: VisitedCells::with_capacity_and_hasher(capacity, Default::default()),
335 })
336 }
337
338 pub fn wrap(self: &SharedState, cell: Cell) -> Cell {
339 Cell::from(
340 Arc::new(UsageCell::new(cell, Arc::downgrade(self), self.mode)) as Arc<DynCell>,
341 )
342 }
343
344 #[inline]
345 pub fn insert(&self, cell: &Cell) {
346 _ = self.visited.insert(*cell.repr_hash());
347 }
348
349 #[inline]
350 pub fn contains(&self, repr_hash: &HashBytes) -> bool {
351 self.visited.contains(repr_hash)
352 }
353
354 #[inline]
355 pub fn is_empty(&self) -> bool {
356 self.visited.is_empty()
357 }
358
359 #[inline]
360 pub fn len(&self) -> usize {
361 self.visited.len()
362 }
363 }
364
365 pub struct UsageCell {
366 pub cell: Cell,
367 pub usage_tree: Weak<UsageTreeState>,
368 pub reference_states: [Once; 4],
370 pub reference_data: [UnsafeCell<Option<Arc<Self>>>; 4],
371 pub inserted: AtomicBool,
372 pub mode: UsageTreeMode,
373 }
374
375 impl UsageCell {
376 pub fn new(cell: Cell, usage_tree: Weak<UsageTreeState>, mode: UsageTreeMode) -> Self {
377 Self {
378 cell,
379 usage_tree,
380 reference_states: [(); 4].map(|_| Once::new()),
381 reference_data: [(); 4].map(|_| UnsafeCell::new(None)),
382 inserted: AtomicBool::new(mode == UsageTreeMode::OnLoad),
383 mode,
384 }
385 }
386
387 pub fn untrack_impl(self: Arc<Self>) -> Cell {
388 match Arc::try_unwrap(self) {
389 Ok(inner) => inner.cell,
390 Err(this) => this.cell.clone(),
391 }
392 }
393
394 pub fn should_insert(&self) -> bool {
395 !self.inserted.load(Ordering::Acquire)
396 }
397
398 pub fn set_inserted(&self) {
399 self.inserted.store(true, Ordering::Release);
400 }
401
402 pub fn load_reference(&self, index: u8) -> Option<&Arc<Self>> {
403 if index < 4 {
404 let mut should_insert = false;
405 self.reference_states[index as usize].call_once_force(|_| {
406 let Some(child) = self.cell.as_ref().reference_cloned(index) else {
407 return;
409 };
410
411 should_insert = self.mode == UsageTreeMode::OnLoad;
412
413 unsafe {
415 *self.reference_data[index as usize].get() = Some(Arc::new(Self::new(
416 child,
417 self.usage_tree.clone(),
418 self.mode,
419 )));
420 };
421 });
422
423 let child = unsafe { &*self.reference_data[index as usize].get().cast_const() };
425 if crate::util::unlikely(should_insert) {
426 if let Some(child) = child {
427 if let Some(usage_tree) = self.usage_tree.upgrade() {
428 usage_tree.insert(&child.cell);
429 }
430 }
431 }
432
433 child.as_ref()
434 } else {
435 None
436 }
437 }
438 }
439
440 unsafe impl Send for UsageCell {}
442 unsafe impl Sync for UsageCell {}
443}