tracing_rc/
lib.rs

1#![deny(missing_docs)]
2
3//! Cycle collecting reference counted pointers for Rust with a safe api.
4//!
5//! This crate is most useful when you have a data structure with inter-dependent nodes of
6//! arbitrary lifetimes and no clear parent-children relationships.
7//!
8//! If you can be certain that the layout of your data will be acyclic, or [`std::rc::Weak`]
9//! would be sufficient to prevent leaks, this type is probably not a good fit. If you know all
10//! of your data will be dropped after a certain phase of your program completes, you will
11//! probably prefer arena allocation through a crate like `typed-arena`, `generational-arena`,
12//! etc.
13//!
14//! # Concurrent Collection
15//! Experimental support for concurrent collection may be enabled with the `sync` feature flag. When
16//! enabled, a `sync::Agc` type will be provided, paralleling `std::sync::Arc`. Note that you will
17//! need to call `sync::collect` instead of `rc::collect` to collect garbage generated by `Agc`
18//! values.
19//!
20//!
21//! # Basic Example
22//! ```
23//! # use tracing_rc::rc::{
24//! #     collect_full,
25//! #     Gc,
26//! #     GcVisitor,
27//! #     Trace,
28//! # };
29//! #
30//! struct GraphNode<T: 'static> {
31//!     data: T,
32//!     edge: Option<Gc<GraphNode<T>>>,
33//! }
34//!
35//! impl<T> Trace for GraphNode<T> {
36//!     fn visit_children(&self, visitor: &mut GcVisitor) {
37//!         self.edge.visit_children(visitor);
38//!     }
39//! }
40//!
41//! # fn main() {
42//! {
43//!     let node_a = Gc::new(GraphNode {
44//!         data: 10,
45//!         edge: None,
46//!     });
47//!     let node_b = Gc::new(GraphNode {
48//!         data: 11,
49//!         edge: None,
50//!     });
51//!     let node_c = Gc::new(GraphNode {
52//!         data: 12,
53//!         edge: Some(node_a.clone()),
54//!     });
55//!
56//!     node_a.borrow_mut().edge = Some(node_b.clone());
57//!     node_b.borrow_mut().edge = Some(node_c);
58//!
59//!     let a = node_a.borrow();
60//!     let b = a.edge.as_ref().unwrap().borrow();
61//!     let c = b.edge.as_ref().unwrap().borrow();
62//!
63//!     assert_eq!(a.data, c.edge.as_ref().unwrap().borrow().data);
64//!     // all of the nodes go out of scope at this point and would normally be leaked.
65//! }
66//!
67//! // In this simple example, we always have cycles and our program is complete after this,
68//! // so we can't take advantage of the young generation picking up acyclic pointers without
69//! // tracing.
70//! collect_full();
71//!
72//! // All leaked nodes have been cleaned up!
73//! # }
74//! ```
75
76#![cfg_attr(all(doc, ENABLE_DOC_AUTO_CFG), feature(doc_auto_cfg))]
77
78/// A non-sync cycle-collecting reference-counted smart pointer.
79pub mod rc;
80
81#[cfg(feature = "sync")]
82/// An atomic cycle-collecting reference-counted smart pointer.
83pub mod sync;
84
85#[cfg(all(feature = "sync", feature = "proc_macro"))]
86/// Proc macro for deriving [`sync::Trace`].
87pub use tracing_rc_derive::SyncTrace;
88#[cfg(feature = "proc_macro")]
89/// Proc macro for deriving [`Trace`].
90pub use tracing_rc_derive::Trace;
91
92/// Controls the style of collection carried out.
93#[derive(Debug, Clone, Copy, PartialEq, Eq)]
94#[non_exhaustive]
95pub enum CollectionType {
96    /// Do a simple pass over the young gen, collecting non-cyclical pointers
97    /// and moving old pointers to the old gen. Then perform a cycle-tracing
98    /// collection over the old gen.
99    Default,
100    /// Only run collection for the young gen. This may still move pointers to the old gen if they
101    /// qualify based on [`CollectOptions::old_gen_threshold`]
102    YoungOnly,
103}
104
105impl Default for CollectionType {
106    fn default() -> Self {
107        Self::Default
108    }
109}
110
111/// Provides settings which control how cycle-collection is performed.
112#[derive(Debug, Clone, Copy, PartialEq, Eq)]
113#[non_exhaustive]
114pub struct CollectOptions {
115    /// The number of times a pointer may be seen in the young gen before moving it to the old
116    /// gen for a full tracing collection. Setting this to zero will cause all pointers to move to
117    /// the old gen if they cannot be immediately cleaned up.
118    pub old_gen_threshold: usize,
119    /// The kind of collection to perform, e.g. just the young gen, or full tracing of both old &
120    /// young gen.
121    pub kind: CollectionType,
122}
123
124impl CollectOptions {
125    /// The default options for cycle collection. Items remain in the young gen for 5 cycles, and
126    /// both old and young gen will be process for each collection. These options will be used when
127    /// calling `collect`
128    pub const DEFAULT: Self = Self {
129        old_gen_threshold: 5,
130        kind: CollectionType::Default,
131    };
132    /// Forces tracing collection for all items currently awaiting cleanup.
133    pub const TRACE_AND_COLLECT_ALL: Self = Self::DEFAULT.set_old_gen_threshold(0);
134    /// Only runs collection for the young generation. This will still move old items to the old
135    /// gen.
136    pub const YOUNG_ONLY: Self = Self::DEFAULT.set_kind(CollectionType::YoungOnly);
137
138    /// Alter the [`CollectionType`] performed when calling `collect_with_options`.
139    #[must_use]
140    pub const fn set_kind(self, kind: CollectionType) -> Self {
141        let Self {
142            old_gen_threshold,
143            kind: _,
144        } = self;
145
146        Self {
147            old_gen_threshold,
148            kind,
149        }
150    }
151
152    /// Alter the number of times an item may be seen in the young generation before being moved to
153    /// the old generation and traced.
154    #[must_use]
155    pub const fn set_old_gen_threshold(self, threshold: usize) -> Self {
156        let Self {
157            old_gen_threshold: _,
158            kind,
159        } = self;
160
161        Self {
162            old_gen_threshold: threshold,
163            kind,
164        }
165    }
166}
167
168impl Default for CollectOptions {
169    fn default() -> Self {
170        Self::DEFAULT
171    }
172}
173
174macro_rules! impl_node {
175    ($name:ident { $field:ident: $ptr_ty:ident<$inner_ty:ident<dyn Trace>>$(,)? }, upgrade($varname:ident) => $upg:expr) => {
176        #[derive(Clone)]
177        pub(crate) struct $name {
178            pub(super) $field: $ptr_ty<$inner_ty<dyn Trace>>,
179        }
180
181        impl From<$ptr_ty<$inner_ty<dyn Trace>>> for $name {
182            fn from(ptr: $ptr_ty<$inner_ty<dyn Trace>>) -> Self {
183                Self { inner_ptr: ptr }
184            }
185        }
186
187        impl ::std::fmt::Debug for $name {
188            fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
189                f.debug_struct("Node")
190                    .field("strong", &$ptr_ty::strong_count(&self.$field))
191                    .field("inner_ptr", &{
192                        let $varname = &self.$field;
193                        $upg
194                    })
195                    .finish()
196            }
197        }
198
199        impl ::std::ops::Deref for $name {
200            type Target = $ptr_ty<$inner_ty<dyn Trace>>;
201
202            fn deref(&self) -> &Self::Target {
203                &self.$field
204            }
205        }
206
207        impl ::std::hash::Hash for $name {
208            fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
209                state.write_usize($ptr_ty::as_ptr(&self.$field) as *const $inner_ty<()> as usize)
210            }
211        }
212
213        impl PartialEq for $name {
214            fn eq(&self, other: &Self) -> bool {
215                std::ptr::eq(
216                    $ptr_ty::as_ptr(&self.$field) as *const $inner_ty<()>,
217                    $ptr_ty::as_ptr(&other.$field) as *const $inner_ty<()>,
218                )
219            }
220        }
221
222        impl Eq for $name {}
223    };
224}
225
226pub(crate) use impl_node;