Skip to main content

sandpit/
gc.rs

1//! Module containing the three types of GC pointers.
2//!
3//! ## Overview
4//! * An object may only be referenced by a GC pointer if it implements [`crate::Trace`].
5//! * Any object capable of containing a GC pointer may not impl [`crate::TraceLeaf`].
6//!
7//! [`Gc`] and [`GcOpt`] may be updated via a [`crate::WriteBarrier`] to point
8//! at different values.
9use super::trace::Trace;
10use crate::barrier::WriteBarrier;
11use crate::header::GcHeader;
12use crate::mutator::Mutator;
13use crate::pointee::{GcPointee, Thin};
14
15use alloc::alloc::Layout;
16use core::marker::PhantomData;
17use core::ops::Deref;
18use core::ptr::{null_mut, NonNull};
19use core::sync::atomic::{AtomicPtr, Ordering};
20
21// A Gc points to a valid T within a GC Arena which is also succeeded by its
22// GC header which may or may not be padded.
23// This holds true for Gc as well as GcOpt if it is not null.
24//
25//                                         Gc<T>
26//                                          |
27//                                          V
28// [ <T as GcPointee>::GcHeader ][ padding ][ T value ]
29//
30// Since Gc cannot be mutated and therefore has no need to be atomic,
31// it is able to be a wide pointer.
32
33/// Immutable Gc pointer, meaning a write barrier cannot be used to update this pointer.
34///
35/// It's inner value can still be mutated.
36///
37/// A [`Gc`] can safely dereference into
38/// a `&'gc T`, but provides no option to obtain mutable references to it's
39/// inner value. Due to all GC values sharing the same 'gc lifetime,
40/// any number of GC values are allowed to reference each other at anytime. This
41/// is beneficial in easing the creation of graphs and cyclical data structures,
42/// but means any mutation of a GC value requires some form of interior mutatbility.
43///
44/// A [`Gc`] is itself immutable in that it's inner pointer may never be
45/// changed. The [`Gc`] and [`GcOpt`] types allow for updating
46/// which value it is referencing through the means of a write barrier.
47/// A [`Gc`] may also point at a garbage collected array like `Gc<'gc, [T]>`. A Gc referencing an
48/// array can be obtained via the mutator by using one of several array allocation methods
49/// including [`Mutator::alloc_array`].
50
51// Gc may be updated to point somewhere else which requires it to be atomic
52// in order to sync with the tracing threads.
53
54/// Mutable GC pointer, meaning a write barrier can be used to update this pointer.
55///
56/// See [`crate::barrier::WriteBarrier`] for how to update a [`Gc`].
57pub struct Gc<'gc, T: Trace + ?Sized> {
58    ptr: AtomicPtr<Thin<T>>,
59    scope: PhantomData<&'gc *mut T>,
60}
61
62impl<'gc, T: Trace> Gc<'gc, T> {
63    /// Provides a way to allocate a value into the GC arena, returning a `Gc<T>`.
64    ///
65    /// This method is equivalent to calling [`crate::mutator::Mutator::alloc`].
66    ///
67    /// # Example
68    /// ```rust
69    /// # use sandpit::{Arena, Gc, Root};
70    /// # let arena: Arena<Root![()]> = Arena::new(|mu| ());
71    /// arena.mutate(|mu, root| {
72    ///    let new = Gc::new(mu, 69);
73    /// });
74    pub fn new(m: &'gc Mutator<'gc>, obj: T) -> Self {
75        m.alloc(obj)
76    }
77}
78
79impl<'gc, T: Trace + ?Sized> Gc<'gc, T> {
80    pub(crate) fn get_layout(&self) -> Layout {
81        <T as GcPointee>::get_header(self.as_thin()).get_alloc_layout()
82    }
83}
84
85impl<'gc, T: Trace + ?Sized + 'gc> Deref for Gc<'gc, T> {
86    type Target = T;
87
88    fn deref(&self) -> &Self::Target {
89        let thin_ptr = self.ptr.load(Ordering::Relaxed);
90
91        <T as GcPointee>::deref(NonNull::new(thin_ptr).unwrap())
92    }
93}
94
95impl<'gc, T: Trace + ?Sized> Clone for Gc<'gc, T> {
96    fn clone(&self) -> Self {
97        let ptr = self.ptr.load(Ordering::Relaxed);
98
99        Self {
100            ptr: AtomicPtr::new(ptr),
101            scope: PhantomData::<&'gc *mut T>,
102        }
103    }
104}
105
106impl<'gc, T: Trace + ?Sized> Gc<'gc, T> {
107    pub(crate) unsafe fn set(&self, value: Gc<'gc, T>) {
108        let thin_ptr = value.ptr.load(Ordering::Relaxed);
109
110        self.ptr.store(thin_ptr, Ordering::Relaxed);
111    }
112
113    // SAFETY: the pointer must have a valid GcHeader for T, and be allocated
114    // within a GC Arena
115    pub(crate) unsafe fn from_ptr(ptr: *const T) -> Self {
116        Self {
117            ptr: AtomicPtr::new(ptr as *mut T as *mut Thin<T>),
118            scope: PhantomData::<&'gc *mut T>,
119        }
120    }
121
122    pub(crate) fn get_header(&self) -> &<T as GcPointee>::GcHeader {
123        <T as GcPointee>::get_header(self.as_thin())
124    }
125
126    // HACK: THIS EXIST FOR PROVENANCE
127    pub(crate) fn get_header_ptr(&self) -> *const <T as GcPointee>::GcHeader {
128        <T as GcPointee>::get_header_ptr(self.as_thin())
129    }
130
131    pub(crate) fn as_thin(&self) -> NonNull<Thin<T>> {
132        unsafe { NonNull::new_unchecked(self.ptr.load(Ordering::Relaxed)) }
133    }
134
135    /// Get a reference to a garabage collected value with the lifetime of the mutation.
136    ///
137    /// Becuase all Gc pointers point to values valid for the entire mutation
138    /// lifetime, it is fine to dereference them with that lifetime.
139    ///
140    /// A regular deref of a `Gc<'gc, T>` gives `&'a T` where `'a` is the lifetime
141    /// of the pointer.
142    ///
143    /// # Example
144    ///
145    /// In this example scoded deref is needed to implement Foo's set_inner method.
146    ///
147    ///```rust
148    /// # use sandpit::{Arena, Root, Gc};
149    /// let arena: Arena<Root![Gc<'_, usize>]> = Arena::new(|mu| Gc::new(mu, 69));
150    ///
151    /// arena.mutate(|mu, root| {
152    ///     struct Foo<'gc> {
153    ///         inner: &'gc usize
154    ///     }
155    ///
156    ///     impl<'gc> Foo<'gc> {
157    ///         fn set_inner(&mut self, gc: Gc<'gc, usize>) {
158    ///             // DOES NOT COMPILE
159    ///             // self.inner = &gc;
160    ///             self.inner = &gc.scoped_deref();
161    ///         }
162    ///     }
163    ///
164    ///     let mut foo = Foo {
165    ///         inner: root.scoped_deref()
166    ///     };
167    ///
168    ///     let gc = Gc::new(mu, 2);
169    ///
170    ///     foo.set_inner(gc);
171    /// });
172    ///```
173    pub fn scoped_deref(&self) -> &'gc T {
174        let thin_ptr = self.ptr.load(Ordering::Relaxed);
175
176        <T as GcPointee>::deref(NonNull::new(thin_ptr).unwrap())
177    }
178
179    /// Allows for updating internal `Gc`'s and `GcOpt`'s.
180    ///
181    /// Returns a reference to the pointed at value that is wrapped in a
182    /// [`crate::barrier::WriteBarrier`] which allows for mutating `Gc` and
183    /// `GcOpt`'s.
184    ///
185    /// # Example
186    ///
187    /// Get a writer barrier to a index of a slice behind write barrier.
188    ///
189    /// ## Example
190    ///
191    /// See [`crate::barrier::WriteBarrier`] for more examples.
192    ///
193    /// ```rust
194    /// use sandpit::{Arena, Gc, Root};
195    ///
196    /// let arena: Arena<Root![Gc<'_, Gc<'_, usize>>]> = Arena::new(|mu| {
197    ///    Gc::new(mu, Gc::new(mu, 69))
198    /// });
199    ///
200    /// arena.mutate(|mu, root| {
201    ///     root.write_barrier(mu, |barrier| {
202    ///         let new_value = Gc::new(mu, 420);
203    ///
204    ///         barrier.set(new_value);
205    ///
206    ///         assert!(**barrier.inner() == 420);
207    ///     });
208    /// });
209    ///```
210    pub fn write_barrier<F>(&self, mu: &'gc Mutator, f: F)
211    where
212        F: FnOnce(&WriteBarrier<T>),
213    {
214        // SAFETY: Its safe to create a writebarrier over this pointer b/c it is guaranteed
215        // to be retraced after the closure ends.
216        let barrier = unsafe { WriteBarrier::new(self.scoped_deref()) };
217
218        f(&barrier);
219
220        if mu.has_marked(&self) {
221            mu.retrace(self.scoped_deref());
222        }
223    }
224}
225
226/// A GC pointer which is able of pointing to null.
227///
228/// [`GcOpt`] can be unwrapped into a Gc, and can also be updated via
229/// a [`crate::barrier::WriteBarrier`].
230pub struct GcOpt<'gc, T: Trace + ?Sized> {
231    ptr: AtomicPtr<Thin<T>>,
232    scope: PhantomData<&'gc *mut T>,
233}
234
235impl<'gc, T: Trace + ?Sized> From<Gc<'gc, T>> for GcOpt<'gc, T> {
236    fn from(gc: Gc<'gc, T>) -> Self {
237        Self {
238            ptr: AtomicPtr::new(gc.ptr.load(Ordering::Relaxed)),
239            scope: PhantomData::<&'gc *mut T>,
240        }
241    }
242}
243
244impl<'gc, T: Trace + ?Sized> Clone for GcOpt<'gc, T> {
245    fn clone(&self) -> Self {
246        Self {
247            ptr: AtomicPtr::new(self.ptr.load(Ordering::Relaxed)),
248            scope: PhantomData::<&'gc *mut T>,
249        }
250    }
251}
252
253impl<'gc, T: Trace + ?Sized> GcOpt<'gc, T> {
254    /// Creates a new GcOpt which points to null.
255    ///
256    /// A GcOpt can also be created from a [`Gc`] or [`Gc`].
257    ///
258    /// # Example
259    /// ```rust
260    /// use sandpit::{Arena, GcOpt, Root};
261    ///
262    /// let arena: Arena<Root![GcOpt<'_, usize>]> = Arena::new(|mu| {
263    ///    GcOpt::new_none()
264    /// });
265    ///```
266    pub fn new_none() -> Self {
267        Self {
268            ptr: AtomicPtr::new(null_mut()),
269            scope: PhantomData::<&'gc *mut T>,
270        }
271    }
272
273    /// Check whether this [`GcOpt`] is null.
274    ///
275    /// # Example
276    /// ```rust
277    /// # use sandpit::{Arena, GcOpt, Root};
278    /// # let arena: Arena<Root![()]> = Arena::new(|mu| {
279    ///    let gc_opt: GcOpt<()> = GcOpt::new_none();
280    ///
281    ///    assert!(gc_opt.is_none());
282    /// # });
283    ///```
284    pub fn is_none(&self) -> bool {
285        self.ptr.load(Ordering::Relaxed).is_null()
286    }
287
288    /// Check whether this [`GcOpt`] contains a valid [`Gc`].
289    ///
290    /// # Example
291    /// ```rust
292    /// # use sandpit::{Arena, Gc, GcOpt, Root};
293    /// # let arena: Arena<Root![()]> = Arena::new(|mu| {
294    ///    let gc_opt = GcOpt::from(Gc::new(mu, 123));
295    ///
296    ///    assert!(gc_opt.is_some());
297    /// # });
298    ///```
299    pub fn is_some(&self) -> bool {
300        !self.is_none()
301    }
302
303    /// Mutate this [`GcOpt`] so that it is null.
304    ///
305    /// Normally updating a Gc pointer requires a write barrier, however,
306    /// this method is an exception as the null pointer requires no tracing.
307    ///
308    /// # Example
309    /// ```rust
310    /// # use sandpit::{Arena, Gc, GcOpt, Root};
311    /// # let arena: Arena<Root![()]> = Arena::new(|mu| {
312    ///    let gc_opt = GcOpt::from(Gc::new(mu, 123));
313    ///
314    ///    assert!(gc_opt.is_some());
315    ///
316    ///    gc_opt.set_none();
317    ///
318    ///    assert!(gc_opt.is_none());
319    /// # });
320    ///```
321    pub fn set_none(&self) {
322        self.ptr.store(null_mut(), Ordering::Relaxed)
323    }
324
325    /// Convert into a Option of [`Gc`].
326    ///
327    /// # Example
328    /// ```rust
329    /// # use sandpit::{Arena, Gc, GcOpt, Root};
330    /// # let arena: Arena<Root![()]> = Arena::new(|mu| {
331    ///    let gc_opt = GcOpt::from(Gc::new(mu, 123));
332    ///
333    ///    let gc_mut = gc_opt.as_option().unwrap();
334    ///
335    ///    assert!(*gc_mut == 123);
336    /// # });
337    ///```
338    pub fn as_option(&self) -> Option<Gc<'gc, T>> {
339        if self.is_some() {
340            Some(Gc {
341                ptr: AtomicPtr::new(self.ptr.load(Ordering::Relaxed)),
342                scope: PhantomData::<&'gc *mut T>,
343            })
344        } else {
345            None
346        }
347    }
348
349    /// ## Panics
350    ///
351    /// Panics if it is in the null state
352    pub fn unwrap(&self) -> Gc<'gc, T> {
353        self.as_option().unwrap()
354    }
355
356    // If the tracers have already traced this pointer, than the new pointer
357    // must be retraced before the end of the mutation context.
358    //
359    // Use a write barrier to call this method safely.
360    pub(crate) unsafe fn set(&self, new: GcOpt<'gc, T>) {
361        let thin_ptr = new.ptr.load(Ordering::Relaxed);
362
363        self.ptr.store(thin_ptr, Ordering::SeqCst);
364    }
365}
366
367impl<'gc, T: Trace> GcOpt<'gc, T> {
368    pub fn new(m: &'gc Mutator<'gc>, obj: T) -> Self {
369        m.alloc(obj).into()
370    }
371}
372
373impl<'gc, T: Trace + ?Sized> GcOpt<'gc, T> {
374    pub unsafe fn from_ptr(ptr: *mut T) -> Self {
375        Self {
376            ptr: AtomicPtr::new(ptr as *mut Thin<T>),
377            scope: PhantomData::<&'gc *mut T>,
378        }
379    }
380}
381
382/*
383pub trait GcPointer: Trace + Clone {
384    const POINTEE_ALIGNMENT: usize;
385
386    unsafe fn set(&self, value: Self);
387}
388
389impl<'gc, T: Trace> GcPointer for Gc<'gc, T> {
390    const POINTEE_ALIGNMENT: usize = core::mem::align_of::<T>();
391
392    unsafe fn set(&self, value: Self) {
393        self.set(value)
394    }
395}
396
397impl<'gc, T: Trace> GcPointer for GcOpt<'gc, T> {
398    const POINTEE_ALIGNMENT: usize = core::mem::align_of::<T>();
399
400    unsafe fn set(&self, value: Self) {
401        self.set(value)
402    }
403}
404*/
405
406#[cfg(test)]
407mod tests {
408    use super::*;
409    use crate::header::GcMark;
410    use crate::{Arena, Root};
411
412    #[test]
413    fn set_header_marker() {
414        let _: Arena<Root![_]> = Arena::new(|mu| {
415            let gc = Gc::new(mu, 69);
416            let header = gc.get_header();
417
418            assert!(*gc == 69);
419            assert_eq!(header.get_mark(), GcMark::Blue);
420            header.set_mark(GcMark::Green);
421            assert_eq!(header.get_mark(), GcMark::Green);
422            assert!(*gc == 69);
423        });
424    }
425
426    #[test]
427    fn gc_from_gcopt() {
428        let _: Arena<Root![_]> = Arena::new(|mu| {
429            let gc = Gc::new(mu, 69);
430            let gc_opt = GcOpt::from(gc);
431            let gc = Gc::from(gc_opt.unwrap());
432            let header = gc.get_header();
433
434            assert!(*gc == 69);
435            assert_eq!(header.get_mark(), GcMark::Blue);
436        });
437    }
438
439    #[test]
440    fn test_gc_sizes() {
441        let _: Arena<Root![_]> = Arena::new(|_| {
442            assert!(size_of::<Gc<()>>() == size_of::<*const ()>());
443            assert!(size_of::<GcOpt<()>>() == size_of::<*const ()>());
444        });
445    }
446}