tracing_rc/rc/
mod.rs

1use std::{
2    cell::{
3        Cell,
4        RefCell,
5    },
6    mem::ManuallyDrop,
7    ops::{
8        Deref,
9        DerefMut,
10    },
11    rc::Rc,
12};
13
14/// Collection algorithms for cleaning up dead [`Gc`]s.
15mod collector;
16/// Contains the [`Trace`] trait which must be implemented for items stored in a [`Gc`].
17pub mod trace;
18
19pub use collector::{
20    collect,
21    collect_full,
22    collect_with_options,
23    count_roots,
24    GcVisitor,
25};
26use collector::{
27    WeakNode,
28    YOUNG_GEN,
29};
30#[doc(inline)]
31pub use trace::Trace;
32
33/// Wraps an immutable borrowed reference to a value in a [`Gc`].
34pub struct Ref<'a, T: ?Sized> {
35    cell_ref: std::cell::Ref<'a, ManuallyDrop<T>>,
36}
37
38impl<T: ?Sized> Deref for Ref<'_, T> {
39    type Target = T;
40
41    #[inline]
42    fn deref(&self) -> &T {
43        self.cell_ref.deref().deref()
44    }
45}
46
47/// Wraps a mutable borrowed reference to a value in a [`Gc`].
48pub struct RefMut<'a, T: ?Sized> {
49    cell_ref: std::cell::RefMut<'a, ManuallyDrop<T>>,
50}
51
52impl<T: ?Sized> Deref for RefMut<'_, T> {
53    type Target = T;
54
55    #[inline]
56    fn deref(&self) -> &T {
57        self.cell_ref.deref().deref()
58    }
59}
60
61impl<T: ?Sized> DerefMut for RefMut<'_, T> {
62    #[inline]
63    fn deref_mut(&mut self) -> &mut Self::Target {
64        self.cell_ref.deref_mut().deref_mut()
65    }
66}
67
68/// A cycle-collected reference-counted smart pointer.
69///
70/// `Gc<T>` provides shared ownership of a value of type `T`, allocated on the heap. Cloning it
71/// will produce a new `Gc` instance which points to the same allocation as the original `Gc`.
72///
73/// `Gc` provides `RefCell` like behavior for its data so there is no need to wrap inner data in a
74/// `Cell` or `RefCell` type in order to achieve interior mutability. You may use [`Gc::borrow`] and
75/// [`Gc::borrow_mut`]
76///
77/// Unlike [`std::rc::Rc`], `Gc` pointers may refer to each other in a way that creates cycles
78/// arbitrarily without causing leaks, provided that the program calls [`collect`] to collect those
79/// cycles.
80///
81/// It's important to note a few things about collection:
82/// - The default [`collect`] implementation only performs cycle-tracing collection if a node has
83///   been in waiting for collection for a while. This is so that we don't pay the cost of tracing
84///   for acyclic nodes which may be marked for collection due to the number of outstanding
85///   references, but don't actually participate in a cycle. You may use [`collect_full`] to force
86///   tracing collection of all pending nodes if you prefer.
87/// - Dropping of data is deferred until a call to [`collect`] or [`collect_full`] is made, and the
88///   collector is able to determine that the `Gc` is actually dead. The collector guarantees that
89///   (in the absence of bugs) the [`Drop`] implementation for your type will be executed if it is
90///   determined to be dead, but cannot provide any guarantees on **when** it will be executed.
91///     - [`collect`] has weak guarantees even in the presence of acyclic pointers - if a node
92///       containing your type is acyclic, but has N strong references, it may take up to `min(N,
93///       `[`CollectOptions::old_gen_threshold`](crate::CollectOptions::old_gen_threshold)` + 1)`
94///       calls to `collect` for the value to be cleaned up even if all parents are dropped.
95///     - The collector does guarantee that if [`collect_full`] is used, the `Drop` implementation
96///       for your data will have been executed by the time `collect_full` completes if the value is
97///       dead.
98/// - The collector does not make any guarantees about the relative order of drops for dead nodes.
99///   Even if the order appears stable, you **may not** rely on it for program correctness.
100pub struct Gc<T: Trace + 'static> {
101    ptr: Rc<Inner<T>>,
102}
103
104impl<T> Trace for Gc<T>
105where
106    T: Trace + 'static,
107{
108    fn visit_children(&self, visitor: &mut GcVisitor) {
109        visitor.visit_node(self);
110    }
111}
112
113impl<T> Gc<T>
114where
115    T: Trace + 'static,
116{
117    /// Construct a new `Gc` containing `data` which will be automatically cleaned up with it is no
118    /// longer reachable, even in the presence of cyclical references.
119    pub fn new(data: T) -> Self {
120        Self {
121            ptr: Rc::new(Inner {
122                status: Cell::new(Status::Live),
123                data: RefCell::new(ManuallyDrop::new(data)),
124            }),
125        }
126    }
127}
128
129impl<T> Gc<T>
130where
131    T: Trace + 'static,
132{
133    /// Retrieve the current number of strong references outstanding.
134    pub fn strong_count(this: &Self) -> usize {
135        Rc::strong_count(&this.ptr)
136    }
137
138    /// Returns true if the data in Self hasn't been dropped yet. This will almost always be the
139    /// case unless it is called inside of a Drop implementation or if there is a bug present in a
140    /// `Trace` impl.
141    pub fn is_live(this: &Self) -> bool {
142        this.ptr.is_live()
143    }
144
145    /// Get a reference into this `Gc`.
146    ///
147    /// The borrow of the data ends until the returned `Ref` exists the scope. Multiple immutable
148    /// borrows may be taken out at the same time.
149    ///
150    /// # Panics
151    /// Panics if the value is currently mutably borrowed.
152    pub fn borrow(&self) -> Ref<'_, T> {
153        self.try_borrow().unwrap()
154    }
155
156    /// Try to get a reference into this `Gc`.
157    ///
158    /// Returns None if the pointer is dead or immutably borrowed, as the data contained in this
159    /// pointer is possibly cleaned up or cannot be aliased.
160    pub fn try_borrow(&self) -> Option<Ref<'_, T>> {
161        if let Ok(cell_ref) = self.ptr.data.try_borrow() {
162            self.ptr.mark_live();
163            Some(Ref { cell_ref })
164        } else {
165            None
166        }
167    }
168
169    /// Get a mutable reference into this `Gc`.
170    ///
171    /// Similar to `RefCell`, the mutable borrow of the data lasts until the returned RefMut or all
172    /// RefMuts derived from it exit scope.
173    ///
174    /// # Panics
175    /// Panics if the value is currently borrowed.
176    pub fn borrow_mut(&self) -> RefMut<'_, T> {
177        self.try_borrow_mut().unwrap()
178    }
179
180    /// Try to get a mutable referenced into this `Gc`.
181    ///
182    /// Will return None if there are oustanding borrows, or if the pointer is dead.
183    pub fn try_borrow_mut(&self) -> Option<RefMut<'_, T>> {
184        if let Ok(cell_ref) = self.ptr.data.try_borrow_mut() {
185            self.ptr.mark_live();
186            Some(RefMut { cell_ref })
187        } else {
188            None
189        }
190    }
191
192    /// Returns true if both this & other point to the same allocation.
193    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
194        Rc::ptr_eq(&this.ptr, &other.ptr)
195    }
196
197    fn node(&self) -> WeakNode {
198        let inner_ptr = Rc::downgrade(&self.ptr);
199        WeakNode { inner_ptr }
200    }
201}
202
203impl<T> std::fmt::Debug for Gc<T>
204where
205    T: Trace + 'static,
206{
207    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
208        f.debug_struct("Gc")
209            .field("strong", &format_args!("{}", Self::strong_count(self)))
210            .field("status", &format_args!("{:?}", self.ptr.status.get()))
211            .field(
212                "data",
213                &format_args!(
214                    "{:?} @ {:?}",
215                    self.try_borrow().map(|_| std::any::type_name::<T>()),
216                    self.ptr.data.as_ptr()
217                ),
218            )
219            .finish()
220    }
221}
222
223impl<T> Clone for Gc<T>
224where
225    T: Trace + 'static,
226{
227    fn clone(&self) -> Self {
228        self.ptr.mark_live();
229
230        Self {
231            ptr: self.ptr.clone(),
232        }
233    }
234}
235
236impl<T: Trace + 'static> Drop for Gc<T> {
237    fn drop(&mut self) {
238        if Rc::strong_count(&self.ptr) > 1
239            && self.ptr.status.get() != Status::Dead
240            && self.ptr.try_mark_ref_dropped()
241        {
242            let node = self.node();
243
244            // It's possible that this will turn out to be a member of a cycle, so we need
245            // to add it to the list of items the collector tracks.
246            YOUNG_GEN.with(|gen| {
247                let mut gen = gen.borrow_mut();
248                // Because we haven't decremented the strong count yet, we can safely just
249                // add this to the young gen without fear of early
250                // deletion. It might already be present there, but that's fine.
251                gen.insert(node, 0);
252            });
253        }
254    }
255}
256
257impl<T: Trace + 'static> PartialEq for Gc<T>
258where
259    T: PartialEq,
260{
261    fn eq(&self, other: &Self) -> bool {
262        *self.borrow() == *other.borrow()
263    }
264}
265
266impl<T: Trace + 'static> Eq for Gc<T> where T: Eq {}
267
268impl<T: Trace + 'static> PartialOrd for Gc<T>
269where
270    T: PartialOrd,
271{
272    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
273        self.borrow().partial_cmp(&other.borrow())
274    }
275}
276
277impl<T: Trace + 'static> Ord for Gc<T>
278where
279    T: Ord,
280{
281    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
282        self.borrow().cmp(&other.borrow())
283    }
284}
285
286#[derive(Debug, Clone, Copy, PartialEq, Eq)]
287pub(crate) enum Status {
288    /// The node has had its refcount incremented recently or a mutable/immutable borrow checked
289    /// out and is definitely alive.
290    Live,
291    /// The node had its reference count decremented recently and may be dead.
292    RecentlyDecremented,
293    /// The Collector completed collection and believes the node is dead. This status is
294    /// unrecoverable.
295    Dead,
296}
297
298pub(crate) struct Inner<T: Trace + ?Sized> {
299    status: Cell<Status>,
300    data: RefCell<ManuallyDrop<T>>,
301}
302
303impl<T> std::fmt::Debug for Inner<T>
304where
305    T: Trace + ?Sized,
306{
307    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
308        f.debug_struct("Inner")
309            .field("status", &self.status)
310            .field(
311                "data",
312                &format_args!("{} @ {:?}", std::any::type_name::<T>(), self.data.as_ptr()),
313            )
314            .finish()
315    }
316}
317
318impl<T> Drop for Inner<T>
319where
320    T: Trace + ?Sized,
321{
322    fn drop(&mut self) {
323        self.drop_data();
324    }
325}
326
327impl<T> Inner<T>
328where
329    T: Trace + ?Sized,
330{
331    fn drop_data(&self) {
332        if let Ok(mut mut_borrow) = self.data.try_borrow_mut() {
333            self.status.set(Status::Dead);
334
335            // SAFETY: During drop of the data, we forget a mut borrow of it, which bans all access
336            // to the data afterwards. If we're able to reach here, we're the last
337            // mutable borrower.
338            unsafe { ManuallyDrop::drop(&mut mut_borrow) };
339
340            // TODO: Once RefMut::leak is stable, use that here.
341            std::mem::forget(mut_borrow);
342        }
343    }
344
345    fn is_live(&self) -> bool {
346        matches!(
347            self.status.get(),
348            Status::Live | Status::RecentlyDecremented
349        )
350    }
351
352    fn mark_live(&self) {
353        if self.status.get() == Status::RecentlyDecremented {
354            self.status.set(Status::Live);
355        }
356    }
357
358    #[must_use]
359    fn try_mark_ref_dropped(&self) -> bool {
360        if self.data.try_borrow_mut().is_ok() {
361            self.status.set(Status::RecentlyDecremented);
362            true
363        } else {
364            // If someone has an open borrow to data, it cannot be dead.
365            false
366        }
367    }
368}
369
370#[cfg(test)]
371mod tests {
372    use std::{
373        cell::{
374            BorrowError,
375            Cell,
376            RefCell,
377        },
378        mem::ManuallyDrop,
379        rc::Rc,
380    };
381
382    use pretty_assertions::assert_eq;
383
384    use crate::rc::{
385        Inner,
386        Status,
387    };
388
389    #[test]
390    fn data_inaccessible_post_drop() {
391        let inner = Rc::new(Inner {
392            status: Cell::new(Status::Live),
393            data: RefCell::new(ManuallyDrop::new(())),
394        });
395
396        Inner::drop_data(&inner);
397
398        assert!(matches!(inner.data.try_borrow(), Err(BorrowError { .. })));
399        assert_eq!(inner.status.get(), Status::Dead);
400    }
401}