1use super::cell_impl::VirtualCellWrapper;
2#[cfg(feature = "stats")]
3use super::CellTreeStats;
4use super::{Cell, CellDescriptor, CellImpl, CellInner, DynCell, HashBytes};
5use crate::util::TryAsMut;
6
7#[derive(Debug, Clone, Copy, Eq, PartialEq)]
9pub enum UsageTreeMode {
10 OnLoad,
12 OnDataAccess,
14}
15
16pub struct UsageTree {
18 state: SharedState,
19}
20
21impl UsageTree {
22 pub fn new(mode: UsageTreeMode) -> Self {
24 Self {
25 state: UsageTreeState::new(mode),
26 }
27 }
28
29 pub fn with_mode_and_capacity(mode: UsageTreeMode, capacity: usize) -> Self {
32 Self {
33 state: UsageTreeState::with_mode_and_capacity(mode, capacity),
34 }
35 }
36
37 pub fn track(&self, cell: &Cell) -> Cell {
40 if self.state.mode == UsageTreeMode::OnLoad {
41 self.state.insert(cell);
42 }
43 self.state.wrap(cell.clone())
44 }
45
46 pub fn contains(&self, repr_hash: &HashBytes) -> bool {
49 self.state.contains(repr_hash)
50 }
51
52 pub fn with_subtrees(self) -> UsageTreeWithSubtrees {
54 UsageTreeWithSubtrees {
55 state: self.state,
56 subtrees: Default::default(),
57 }
58 }
59
60 pub fn is_empty(&self) -> bool {
62 self.state.is_empty()
63 }
64
65 pub fn len(&self) -> usize {
67 self.state.len()
68 }
69}
70
71pub struct UsageTreeWithSubtrees {
73 state: SharedState,
74 subtrees: ahash::HashSet<HashBytes>,
75}
76
77impl UsageTreeWithSubtrees {
78 pub fn track(&self, cell: &Cell) -> Cell {
81 if self.state.mode == UsageTreeMode::OnLoad {
82 self.state.insert(cell);
83 }
84 self.state.wrap(cell.clone())
85 }
86
87 pub fn contains_direct(&self, repr_hash: &HashBytes) -> bool {
90 self.state.as_ref().contains(repr_hash)
91 }
92
93 pub fn contains_subtree(&self, repr_hash: &HashBytes) -> bool {
96 self.subtrees.contains(repr_hash)
97 }
98
99 pub fn add_subtree(&mut self, root: &DynCell) -> bool {
102 self.subtrees.insert(*root.repr_hash())
103 }
104}
105
106#[cfg(not(feature = "sync"))]
107use self::rc::{SharedState, UsageCell, UsageTreeState};
108#[cfg(feature = "sync")]
109use self::sync::{SharedState, UsageCell, UsageTreeState};
110
111impl CellImpl for UsageCell {
112 #[inline]
113 fn untrack(self: CellInner<Self>) -> Cell {
114 UsageCell::untrack_impl(self)
115 }
116
117 fn descriptor(&self) -> CellDescriptor {
118 self.cell.descriptor()
119 }
120
121 fn data(&self) -> &[u8] {
122 if self.should_insert() {
123 if let Some(usage_tree) = self.usage_tree.upgrade() {
124 usage_tree.insert(&self.cell);
125 }
126 self.set_inserted();
127 }
128 self.cell.data()
129 }
130
131 fn bit_len(&self) -> u16 {
132 self.cell.bit_len()
133 }
134
135 fn reference(&self, index: u8) -> Option<&DynCell> {
136 Some(self.load_reference(index)?.as_ref())
137 }
138
139 fn reference_cloned(&self, index: u8) -> Option<Cell> {
140 let cell = self.load_reference(index)?.clone();
141
142 #[cfg(not(feature = "sync"))]
143 {
144 Some(Cell::from(cell as std::rc::Rc<DynCell>))
145 }
146
147 #[cfg(feature = "sync")]
148 {
149 Some(Cell::from(cell as std::sync::Arc<DynCell>))
150 }
151 }
152
153 fn virtualize(&self) -> &DynCell {
154 VirtualCellWrapper::wrap(self)
155 }
156
157 fn hash(&self, level: u8) -> &HashBytes {
158 self.cell.hash(level)
159 }
160
161 fn depth(&self, level: u8) -> u16 {
162 self.cell.depth(level)
163 }
164
165 fn take_first_child(&mut self) -> Option<Cell> {
166 self.cell.try_as_mut()?.take_first_child()
167 }
168
169 fn replace_first_child(&mut self, parent: Cell) -> Result<Cell, Cell> {
170 match self.cell.try_as_mut() {
171 Some(cell) => cell.replace_first_child(parent),
172 None => Err(parent),
173 }
174 }
175
176 fn take_next_child(&mut self) -> Option<Cell> {
177 self.cell.try_as_mut()?.take_next_child()
178 }
179
180 #[cfg(feature = "stats")]
181 fn stats(&self) -> CellTreeStats {
182 self.cell.stats()
183 }
184}
185
186#[cfg(not(feature = "sync"))]
187mod rc {
188 use std::rc::{Rc, Weak};
189
190 use super::UsageTreeMode;
191 use crate::cell::{Cell, DynCell, HashBytes};
192
193 pub type SharedState = Rc<UsageTreeState>;
194
195 type VisitedCells = std::cell::RefCell<ahash::HashSet<HashBytes>>;
196
197 pub struct UsageTreeState {
198 pub mode: UsageTreeMode,
199 pub visited: VisitedCells,
200 }
201
202 impl UsageTreeState {
203 pub fn new(mode: UsageTreeMode) -> SharedState {
204 Rc::new(Self {
205 mode,
206 visited: Default::default(),
207 })
208 }
209
210 pub fn with_mode_and_capacity(mode: UsageTreeMode, capacity: usize) -> SharedState {
211 Rc::new(Self {
212 mode,
213 visited: std::cell::RefCell::new(ahash::HashSet::with_capacity_and_hasher(
214 capacity,
215 Default::default(),
216 )),
217 })
218 }
219
220 pub fn wrap(self: &SharedState, cell: Cell) -> Cell {
221 Cell::from(Rc::new(UsageCell::new(cell, Rc::downgrade(self), self.mode)) as Rc<DynCell>)
222 }
223
224 #[inline]
225 pub fn insert(&self, cell: &Cell) {
226 self.visited.borrow_mut().insert(*cell.repr_hash());
227 }
228
229 #[inline]
230 pub fn contains(&self, repr_hash: &HashBytes) -> bool {
231 self.visited.borrow().contains(repr_hash)
232 }
233
234 #[inline]
235 pub fn is_empty(&self) -> bool {
236 self.visited.borrow().is_empty()
237 }
238
239 #[inline]
240 pub fn len(&self) -> usize {
241 self.visited.borrow().len()
242 }
243 }
244
245 pub struct UsageCell {
246 pub cell: Cell,
247 pub usage_tree: Weak<UsageTreeState>,
248 pub children: std::cell::UnsafeCell<[Option<Rc<Self>>; 4]>,
249 pub inserted: std::cell::Cell<bool>,
250 pub mode: UsageTreeMode,
251 }
252
253 impl UsageCell {
254 pub fn new(cell: Cell, usage_tree: Weak<UsageTreeState>, mode: UsageTreeMode) -> Self {
255 Self {
256 cell,
257 usage_tree,
258 children: Default::default(),
259 inserted: std::cell::Cell::new(mode == UsageTreeMode::OnLoad),
260 mode,
261 }
262 }
263
264 pub fn untrack_impl(self: Rc<Self>) -> Cell {
265 self.cell.clone()
266 }
267
268 pub fn should_insert(&self) -> bool {
269 !self.inserted.get()
270 }
271
272 pub fn set_inserted(&self) {
273 self.inserted.set(true);
274 }
275
276 pub fn load_reference(&self, index: u8) -> Option<&Rc<Self>> {
277 if index < 4 {
278 let children = unsafe { &mut *self.children.get() };
279 Some(match &mut children[index as usize] {
280 Some(value) => value,
281 slot @ None => {
282 let child = self.cell.as_ref().reference_cloned(index)?;
283 if self.mode == UsageTreeMode::OnLoad {
284 if let Some(usage_tree) = self.usage_tree.upgrade() {
285 usage_tree.insert(&child);
286 }
287 }
288
289 slot.insert(Rc::new(UsageCell::new(
290 child,
291 self.usage_tree.clone(),
292 self.mode,
293 )))
294 }
295 })
296 } else {
297 None
298 }
299 }
300 }
301}
302
303#[cfg(feature = "sync")]
304mod sync {
305 use std::cell::UnsafeCell;
306 use std::sync::atomic::{AtomicBool, Ordering};
307 use std::sync::{Arc, Once, Weak};
308
309 use super::UsageTreeMode;
310 use crate::cell::{Cell, DynCell, HashBytes};
311
312 pub type SharedState = Arc<UsageTreeState>;
313
314 type VisitedCells = scc::HashSet<HashBytes, ahash::RandomState>;
315
316 pub struct UsageTreeState {
317 pub mode: UsageTreeMode,
318 pub visited: VisitedCells,
319 }
320
321 impl UsageTreeState {
322 pub fn new(mode: UsageTreeMode) -> SharedState {
323 Arc::new(Self {
324 mode,
325 visited: Default::default(),
326 })
327 }
328
329 pub fn with_mode_and_capacity(mode: UsageTreeMode, capacity: usize) -> SharedState {
330 Arc::new(Self {
331 mode,
332 visited: VisitedCells::with_capacity_and_hasher(capacity, Default::default()),
333 })
334 }
335
336 pub fn wrap(self: &SharedState, cell: Cell) -> Cell {
337 Cell::from(
338 Arc::new(UsageCell::new(cell, Arc::downgrade(self), self.mode)) as Arc<DynCell>,
339 )
340 }
341
342 #[inline]
343 pub fn insert(&self, cell: &Cell) {
344 _ = self.visited.insert(*cell.repr_hash());
345 }
346
347 #[inline]
348 pub fn contains(&self, repr_hash: &HashBytes) -> bool {
349 self.visited.contains(repr_hash)
350 }
351
352 #[inline]
353 pub fn is_empty(&self) -> bool {
354 self.visited.is_empty()
355 }
356
357 #[inline]
358 pub fn len(&self) -> usize {
359 self.visited.len()
360 }
361 }
362
363 pub struct UsageCell {
364 pub cell: Cell,
365 pub usage_tree: Weak<UsageTreeState>,
366 pub reference_states: [Once; 4],
368 pub reference_data: [UnsafeCell<Option<Arc<Self>>>; 4],
369 pub inserted: AtomicBool,
370 pub mode: UsageTreeMode,
371 }
372
373 impl UsageCell {
374 pub fn new(cell: Cell, usage_tree: Weak<UsageTreeState>, mode: UsageTreeMode) -> Self {
375 Self {
376 cell,
377 usage_tree,
378 reference_states: [(); 4].map(|_| Once::new()),
379 reference_data: [(); 4].map(|_| UnsafeCell::new(None)),
380 inserted: AtomicBool::new(mode == UsageTreeMode::OnLoad),
381 mode,
382 }
383 }
384
385 pub fn untrack_impl(self: Arc<Self>) -> Cell {
386 match Arc::try_unwrap(self) {
387 Ok(inner) => inner.cell,
388 Err(this) => this.cell.clone(),
389 }
390 }
391
392 pub fn should_insert(&self) -> bool {
393 !self.inserted.load(Ordering::Acquire)
394 }
395
396 pub fn set_inserted(&self) {
397 self.inserted.store(true, Ordering::Release);
398 }
399
400 pub fn load_reference(&self, index: u8) -> Option<&Arc<Self>> {
401 if index < 4 {
402 let mut should_insert = false;
403 self.reference_states[index as usize].call_once_force(|_| {
404 let Some(child) = self.cell.as_ref().reference_cloned(index) else {
405 return;
407 };
408
409 should_insert = self.mode == UsageTreeMode::OnLoad;
410
411 unsafe {
413 *self.reference_data[index as usize].get() = Some(Arc::new(Self::new(
414 child,
415 self.usage_tree.clone(),
416 self.mode,
417 )));
418 };
419 });
420
421 let child = unsafe { &*self.reference_data[index as usize].get().cast_const() };
423 if crate::util::unlikely(should_insert) {
424 if let Some(child) = child {
425 if let Some(usage_tree) = self.usage_tree.upgrade() {
426 usage_tree.insert(&child.cell);
427 }
428 }
429 }
430
431 child.as_ref()
432 } else {
433 None
434 }
435 }
436 }
437
438 unsafe impl Send for UsageCell {}
440 unsafe impl Sync for UsageCell {}
441}