1use super::cell_impl::VirtualCellWrapper;
2use super::{Cell, CellDescriptor, CellImpl, CellInner, DynCell, HashBytes};
3
4#[derive(Debug, Clone, Copy, Eq, PartialEq)]
6pub enum UsageTreeMode {
7 OnLoad,
9 OnDataAccess,
11}
12
13pub struct UsageTree {
15 state: SharedState,
16}
17
18impl UsageTree {
19 pub fn new(mode: UsageTreeMode) -> Self {
21 Self {
22 state: UsageTreeState::new(mode),
23 }
24 }
25
26 pub fn with_mode_and_capacity(mode: UsageTreeMode, capacity: usize) -> Self {
29 Self {
30 state: UsageTreeState::with_mode_and_capacity(mode, capacity),
31 }
32 }
33
34 pub fn track(&self, cell: &Cell) -> Cell {
37 if self.state.mode == UsageTreeMode::OnLoad {
38 self.state.insert(cell);
39 }
40 self.state.wrap(cell.clone())
41 }
42
43 pub fn contains(&self, repr_hash: &HashBytes) -> bool {
46 self.state.contains(repr_hash)
47 }
48
49 pub fn with_subtrees(self) -> UsageTreeWithSubtrees {
51 UsageTreeWithSubtrees {
52 state: self.state,
53 subtrees: Default::default(),
54 }
55 }
56
57 pub fn is_empty(&self) -> bool {
59 self.state.is_empty()
60 }
61
62 pub fn len(&self) -> usize {
64 self.state.len()
65 }
66}
67
68pub struct UsageTreeWithSubtrees {
70 state: SharedState,
71 subtrees: ahash::HashSet<HashBytes>,
72}
73
74impl UsageTreeWithSubtrees {
75 pub fn track(&self, cell: &Cell) -> Cell {
78 if self.state.mode == UsageTreeMode::OnLoad {
79 self.state.insert(cell);
80 }
81 self.state.wrap(cell.clone())
82 }
83
84 pub fn contains_direct(&self, repr_hash: &HashBytes) -> bool {
87 self.state.as_ref().contains(repr_hash)
88 }
89
90 pub fn contains_subtree(&self, repr_hash: &HashBytes) -> bool {
93 self.subtrees.contains(repr_hash)
94 }
95
96 pub fn add_subtree(&mut self, root: &DynCell) -> bool {
99 self.subtrees.insert(*root.repr_hash())
100 }
101}
102
103#[cfg(not(feature = "sync"))]
104use self::rc::{SharedState, UsageCell, UsageTreeState};
105#[cfg(feature = "sync")]
106use self::sync::{SharedState, UsageCell, UsageTreeState};
107
108impl CellImpl for UsageCell {
109 #[inline]
110 fn untrack(self: CellInner<Self>) -> Cell {
111 UsageCell::untrack_impl(self)
112 }
113
114 fn descriptor(&self) -> CellDescriptor {
115 self.cell.descriptor()
116 }
117
118 fn data(&self) -> &[u8] {
119 if self.should_insert() {
120 if let Some(usage_tree) = self.usage_tree.upgrade() {
121 usage_tree.insert(&self.cell);
122 } else {
123 self.set_orphaned();
124 }
125 self.set_inserted();
126 }
127 self.cell.data()
128 }
129
130 fn bit_len(&self) -> u16 {
131 self.cell.bit_len()
132 }
133
134 fn reference(&self, index: u8) -> Option<&DynCell> {
135 if self.is_orphaned() {
136 self.cell.reference(index)
137 } else {
138 Some(self.load_reference(index)?.as_ref())
139 }
140 }
141
142 fn reference_cloned(&self, index: u8) -> Option<Cell> {
143 if self.is_orphaned() {
144 self.cell.reference_cloned(index)
145 } else {
146 let cell = self.load_reference(index)?.clone();
147
148 #[cfg(not(feature = "sync"))]
149 {
150 Some(Cell::from(cell as std::rc::Rc<DynCell>))
151 }
152
153 #[cfg(feature = "sync")]
154 {
155 Some(Cell::from(cell as std::sync::Arc<DynCell>))
156 }
157 }
158 }
159
160 fn virtualize(&self) -> &DynCell {
161 VirtualCellWrapper::wrap(self)
162 }
163
164 fn hash(&self, level: u8) -> &HashBytes {
165 self.cell.hash(level)
166 }
167
168 fn depth(&self, level: u8) -> u16 {
169 self.cell.depth(level)
170 }
171}
172
173#[cfg(not(feature = "sync"))]
174mod rc {
175 use std::rc::{Rc, Weak};
176
177 use super::UsageTreeMode;
178 use crate::cell::{Cell, DynCell, HashBytes};
179
180 pub type SharedState = Rc<UsageTreeState>;
181
182 type VisitedCells = std::cell::RefCell<ahash::HashSet<HashBytes>>;
183
184 pub struct UsageTreeState {
185 pub mode: UsageTreeMode,
186 pub visited: VisitedCells,
187 }
188
189 impl UsageTreeState {
190 pub fn new(mode: UsageTreeMode) -> SharedState {
191 Rc::new(Self {
192 mode,
193 visited: Default::default(),
194 })
195 }
196
197 pub fn with_mode_and_capacity(mode: UsageTreeMode, capacity: usize) -> SharedState {
198 Rc::new(Self {
199 mode,
200 visited: std::cell::RefCell::new(ahash::HashSet::with_capacity_and_hasher(
201 capacity,
202 Default::default(),
203 )),
204 })
205 }
206
207 pub fn wrap(self: &SharedState, cell: Cell) -> Cell {
208 Cell::from(Rc::new(UsageCell::new(cell, Rc::downgrade(self), self.mode)) as Rc<DynCell>)
209 }
210
211 #[inline]
212 pub fn insert(&self, cell: &Cell) {
213 self.visited.borrow_mut().insert(*cell.repr_hash());
214 }
215
216 #[inline]
217 pub fn contains(&self, repr_hash: &HashBytes) -> bool {
218 self.visited.borrow().contains(repr_hash)
219 }
220
221 #[inline]
222 pub fn is_empty(&self) -> bool {
223 self.visited.borrow().is_empty()
224 }
225
226 #[inline]
227 pub fn len(&self) -> usize {
228 self.visited.borrow().len()
229 }
230 }
231
232 pub struct UsageCell {
233 pub cell: Cell,
234 pub usage_tree: Weak<UsageTreeState>,
235 pub children: std::cell::UnsafeCell<[Option<Rc<Self>>; 4]>,
236 pub inserted: std::cell::Cell<bool>,
237 pub orphaned: std::cell::Cell<bool>,
238 pub mode: UsageTreeMode,
239 }
240
241 impl UsageCell {
242 pub fn new(cell: Cell, usage_tree: Weak<UsageTreeState>, mode: UsageTreeMode) -> Self {
243 Self {
244 cell,
245 usage_tree,
246 children: Default::default(),
247 inserted: std::cell::Cell::new(mode == UsageTreeMode::OnLoad),
248 orphaned: std::cell::Cell::new(false),
249 mode,
250 }
251 }
252
253 pub fn untrack_impl(self: Rc<Self>) -> Cell {
254 self.cell.clone()
255 }
256
257 #[inline]
258 pub fn is_orphaned(&self) -> bool {
259 self.orphaned.get()
260 }
261
262 #[inline]
263 pub fn set_orphaned(&self) {
264 self.orphaned.set(true);
265 }
266
267 #[inline]
268 pub fn should_insert(&self) -> bool {
269 !self.inserted.get()
270 }
271
272 #[inline]
273 pub fn set_inserted(&self) {
274 self.inserted.set(true);
275 }
276
277 pub fn load_reference(&self, index: u8) -> Option<&Rc<Self>> {
278 if index < 4 {
279 let children = unsafe { &mut *self.children.get() };
280 Some(match &mut children[index as usize] {
281 Some(value) => value,
282 slot @ None => {
283 let child = self.cell.as_ref().reference_cloned(index)?;
284 if self.mode == UsageTreeMode::OnLoad {
285 if let Some(usage_tree) = self.usage_tree.upgrade() {
286 usage_tree.insert(&child);
287 } else {
288 self.set_orphaned();
289 }
290 }
291
292 slot.insert(Rc::new(UsageCell::new(
293 child,
294 self.usage_tree.clone(),
295 self.mode,
296 )))
297 }
298 })
299 } else {
300 None
301 }
302 }
303 }
304}
305
306#[cfg(feature = "sync")]
307mod sync {
308 use std::cell::UnsafeCell;
309 use std::sync::atomic::{AtomicBool, Ordering};
310 use std::sync::{Arc, Once, Weak};
311
312 use super::UsageTreeMode;
313 use crate::cell::{Cell, DynCell, HashBytes};
314
315 pub type SharedState = Arc<UsageTreeState>;
316
317 type VisitedCells = scc::HashSet<HashBytes, ahash::RandomState>;
318
319 pub struct UsageTreeState {
320 pub mode: UsageTreeMode,
321 pub visited: VisitedCells,
322 }
323
324 impl UsageTreeState {
325 pub fn new(mode: UsageTreeMode) -> SharedState {
326 Arc::new(Self {
327 mode,
328 visited: Default::default(),
329 })
330 }
331
332 pub fn with_mode_and_capacity(mode: UsageTreeMode, capacity: usize) -> SharedState {
333 Arc::new(Self {
334 mode,
335 visited: VisitedCells::with_capacity_and_hasher(capacity, Default::default()),
336 })
337 }
338
339 pub fn wrap(self: &SharedState, cell: Cell) -> Cell {
340 Cell::from(
341 Arc::new(UsageCell::new(cell, Arc::downgrade(self), self.mode)) as Arc<DynCell>,
342 )
343 }
344
345 #[inline]
346 pub fn insert(&self, cell: &Cell) {
347 _ = self.visited.insert(*cell.repr_hash());
348 }
349
350 #[inline]
351 pub fn contains(&self, repr_hash: &HashBytes) -> bool {
352 self.visited.contains(repr_hash)
353 }
354
355 #[inline]
356 pub fn is_empty(&self) -> bool {
357 self.visited.is_empty()
358 }
359
360 #[inline]
361 pub fn len(&self) -> usize {
362 self.visited.len()
363 }
364 }
365
366 pub struct UsageCell {
367 pub cell: Cell,
368 pub usage_tree: Weak<UsageTreeState>,
369 pub reference_states: [Once; 4],
371 pub reference_data: [UnsafeCell<Option<Arc<Self>>>; 4],
372 pub inserted: AtomicBool,
373 pub orphaned: AtomicBool,
374 pub mode: UsageTreeMode,
375 }
376
377 impl UsageCell {
378 pub fn new(cell: Cell, usage_tree: Weak<UsageTreeState>, mode: UsageTreeMode) -> Self {
379 Self {
380 cell: Cell::untrack(cell),
382 usage_tree,
383 reference_states: [(); 4].map(|_| Once::new()),
384 reference_data: [(); 4].map(|_| UnsafeCell::new(None)),
385 inserted: AtomicBool::new(mode == UsageTreeMode::OnLoad),
386 orphaned: AtomicBool::new(false),
387 mode,
388 }
389 }
390
391 pub fn untrack_impl(self: Arc<Self>) -> Cell {
392 match Arc::try_unwrap(self) {
393 Ok(inner) => inner.cell,
394 Err(this) => this.cell.clone(),
395 }
396 }
397
398 #[inline]
399 pub fn is_orphaned(&self) -> bool {
400 self.orphaned.load(Ordering::Acquire)
401 }
402
403 #[inline]
404 pub fn set_orphaned(&self) {
405 self.orphaned.store(true, Ordering::Release);
406 }
407
408 #[inline]
409 pub fn should_insert(&self) -> bool {
410 !self.inserted.load(Ordering::Acquire)
411 }
412
413 #[inline]
414 pub fn set_inserted(&self) {
415 self.inserted.store(true, Ordering::Release);
416 }
417
418 pub fn load_reference(&self, index: u8) -> Option<&Arc<Self>> {
419 if index < 4 {
420 let mut should_insert = false;
421 self.reference_states[index as usize].call_once_force(|_| {
422 let Some(child) = self.cell.as_ref().reference_cloned(index) else {
423 return;
425 };
426
427 should_insert = self.mode == UsageTreeMode::OnLoad;
428
429 unsafe {
431 *self.reference_data[index as usize].get() = Some(Arc::new(Self::new(
432 child,
433 self.usage_tree.clone(),
434 self.mode,
435 )));
436 };
437 });
438
439 let child = unsafe { &*self.reference_data[index as usize].get().cast_const() };
441 if crate::util::unlikely(should_insert)
442 && let Some(child) = child
443 {
444 if let Some(usage_tree) = self.usage_tree.upgrade() {
445 usage_tree.insert(&child.cell);
446 } else {
447 self.set_orphaned();
448 }
449 }
450
451 child.as_ref()
452 } else {
453 None
454 }
455 }
456 }
457
458 unsafe impl Send for UsageCell {}
460 unsafe impl Sync for UsageCell {}
461}