gc_arena/
gc.rs

1use core::{
2    fmt::{self, Debug, Display, Pointer},
3    hash::{Hash, Hasher},
4    marker::PhantomData,
5    mem,
6    ops::Deref,
7    ptr::{self, NonNull},
8};
9
10use crate::{
11    barrier::{Unlock, Write},
12    collect::Collect,
13    context::{Collection, Mutation},
14    gc_weak::GcWeak,
15    types::{GcBox, GcBoxInner, GcColor, Invariant},
16    Finalization,
17};
18
19/// A garbage collected pointer to a type T. Implements Copy, and is implemented as a plain machine
20/// pointer. You can only allocate `Gc` pointers through a `&Mutation<'gc>` inside an arena type,
21/// and through "generativity" such `Gc` pointers may not escape the arena they were born in or
22/// be stored inside TLS. This, combined with correct `Collect` implementations, means that `Gc`
23/// pointers will never be dangling and are always safe to access.
24pub struct Gc<'gc, T: ?Sized + 'gc> {
25    pub(crate) ptr: NonNull<GcBoxInner<T>>,
26    pub(crate) _invariant: Invariant<'gc>,
27}
28
29impl<'gc, T: Debug + ?Sized + 'gc> Debug for Gc<'gc, T> {
30    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
31        fmt::Debug::fmt(&**self, fmt)
32    }
33}
34
35impl<'gc, T: ?Sized + 'gc> Pointer for Gc<'gc, T> {
36    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
37        fmt::Pointer::fmt(&Gc::as_ptr(*self), fmt)
38    }
39}
40
41impl<'gc, T: Display + ?Sized + 'gc> Display for Gc<'gc, T> {
42    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
43        fmt::Display::fmt(&**self, fmt)
44    }
45}
46
47impl<'gc, T: ?Sized + 'gc> Copy for Gc<'gc, T> {}
48
49impl<'gc, T: ?Sized + 'gc> Clone for Gc<'gc, T> {
50    #[inline]
51    fn clone(&self) -> Gc<'gc, T> {
52        *self
53    }
54}
55
56unsafe impl<'gc, T: ?Sized + 'gc> Collect for Gc<'gc, T> {
57    #[inline]
58    fn trace(&self, cc: &Collection) {
59        unsafe {
60            cc.trace(GcBox::erase(self.ptr));
61        }
62    }
63}
64
65impl<'gc, T: ?Sized + 'gc> Deref for Gc<'gc, T> {
66    type Target = T;
67
68    #[inline]
69    fn deref(&self) -> &T {
70        unsafe { &self.ptr.as_ref().value }
71    }
72}
73
74impl<'gc, T: ?Sized + 'gc> AsRef<T> for Gc<'gc, T> {
75    #[inline]
76    fn as_ref(&self) -> &T {
77        unsafe { &self.ptr.as_ref().value }
78    }
79}
80
81impl<'gc, T: Collect + 'gc> Gc<'gc, T> {
82    #[inline]
83    pub fn new(mc: &Mutation<'gc>, t: T) -> Gc<'gc, T> {
84        Gc {
85            ptr: mc.allocate(t),
86            _invariant: PhantomData,
87        }
88    }
89}
90
91impl<'gc, T: 'gc> Gc<'gc, T> {
92    /// Cast the internal pointer to a different type.
93    ///
94    /// SAFETY:
95    /// It must be valid to dereference a `*mut U` that has come from casting a `*mut T`.
96    #[inline]
97    pub unsafe fn cast<U: 'gc>(this: Gc<'gc, T>) -> Gc<'gc, U> {
98        Gc {
99            ptr: NonNull::cast(this.ptr),
100            _invariant: PhantomData,
101        }
102    }
103
104    /// Retrieve a `Gc` from a raw pointer obtained from `Gc::as_ptr`
105    ///
106    /// SAFETY:
107    /// The provided pointer must have been obtained from `Gc::as_ptr`, and the pointer must not
108    /// have been collected yet.
109    #[inline]
110    pub unsafe fn from_ptr(ptr: *const T) -> Gc<'gc, T> {
111        let header_offset = {
112            let base = mem::MaybeUninit::<GcBoxInner<T>>::uninit();
113            let base_ptr = base.as_ptr();
114            let val_ptr = ptr::addr_of!((*base_ptr).value);
115            (base_ptr as isize) - (val_ptr as isize)
116        };
117        let ptr = (ptr as *mut T)
118            .cast::<u8>()
119            .offset(header_offset)
120            .cast::<GcBoxInner<T>>();
121        Gc {
122            ptr: NonNull::new_unchecked(ptr),
123            _invariant: PhantomData,
124        }
125    }
126}
127
128impl<'gc, T: Unlock + ?Sized + 'gc> Gc<'gc, T> {
129    /// Shorthand for [`Gc::write`]`(mc, self).`[`unlock()`](Write::unlock).
130    #[inline]
131    pub fn unlock(self, mc: &Mutation<'gc>) -> &'gc T::Unlocked {
132        Gc::write(mc, self);
133        // SAFETY: see doc-comment.
134        unsafe { self.as_ref().unlock_unchecked() }
135    }
136}
137
138impl<'gc, T: ?Sized + 'gc> Gc<'gc, T> {
139    /// Obtains a long-lived reference to the contents of this `Gc`.
140    ///
141    /// Unlike `AsRef` or `Deref`, the returned reference isn't bound to the `Gc` itself, and
142    /// will stay valid for the entirety of the current arena callback.
143    #[inline]
144    pub fn as_ref(self: Gc<'gc, T>) -> &'gc T {
145        // SAFETY: The returned reference cannot escape the current arena callback, as `&'gc T`
146        // never implements `Collect` (unless `'gc` is `'static`, which is impossible here), and
147        // so cannot be stored inside the GC root.
148        unsafe { &self.ptr.as_ref().value }
149    }
150
151    #[inline]
152    pub fn downgrade(this: Gc<'gc, T>) -> GcWeak<'gc, T> {
153        GcWeak { inner: this }
154    }
155
156    /// Triggers a write barrier on this `Gc`, allowing for further safe mutation.
157    #[inline]
158    pub fn write(mc: &Mutation<'gc>, gc: Self) -> &'gc Write<T> {
159        unsafe {
160            mc.write_barrier(GcBox::erase(gc.ptr));
161            // SAFETY: the write barrier stays valid until the end of the current callback.
162            Write::assume(gc.as_ref())
163        }
164    }
165
166    /// Returns true if two `Gc`s point to the same allocation.
167    ///
168    /// Similarly to `Rc::ptr_eq` and `Arc::ptr_eq`, this function ignores the metadata of `dyn`
169    /// pointers.
170    #[inline]
171    pub fn ptr_eq(this: Gc<'gc, T>, other: Gc<'gc, T>) -> bool {
172        // TODO: Equivalent to `core::ptr::addr_eq`:
173        // https://github.com/rust-lang/rust/issues/116324
174        Gc::as_ptr(this) as *const () == Gc::as_ptr(other) as *const ()
175    }
176
177    #[inline]
178    pub fn as_ptr(gc: Gc<'gc, T>) -> *const T {
179        unsafe {
180            let inner = gc.ptr.as_ptr();
181            core::ptr::addr_of!((*inner).value) as *const T
182        }
183    }
184
185    /// Returns true when a pointer is *dead* during finalization. This is equivalent to
186    /// `GcWeak::is_dead` for strong pointers.
187    ///
188    /// Any strong pointer reachable from the root will never be dead, BUT there can be strong
189    /// pointers reachable only through other weak pointers that can be dead.
190    #[inline]
191    pub fn is_dead(_: &Finalization<'gc>, gc: Gc<'gc, T>) -> bool {
192        let inner = unsafe { gc.ptr.as_ref() };
193        matches!(inner.header.color(), GcColor::White | GcColor::WhiteWeak)
194    }
195
196    /// Manually marks a dead `Gc` pointer as reachable and keeps it alive.
197    ///
198    /// Equivalent to `GcWeak::resurrect` for strong pointers. Manually marks this pointer and
199    /// all transitively held pointers as reachable, thus keeping them from being dropped this
200    /// collection cycle.
201    #[inline]
202    pub fn resurrect(fc: &Finalization<'gc>, gc: Gc<'gc, T>) {
203        unsafe {
204            fc.resurrect(GcBox::erase(gc.ptr));
205        }
206    }
207}
208
209impl<'gc, T: PartialEq + ?Sized + 'gc> PartialEq for Gc<'gc, T> {
210    fn eq(&self, other: &Self) -> bool {
211        (**self).eq(other)
212    }
213
214    fn ne(&self, other: &Self) -> bool {
215        (**self).ne(other)
216    }
217}
218
219impl<'gc, T: Eq + ?Sized + 'gc> Eq for Gc<'gc, T> {}
220
221impl<'gc, T: PartialOrd + ?Sized + 'gc> PartialOrd for Gc<'gc, T> {
222    fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
223        (**self).partial_cmp(other)
224    }
225
226    fn le(&self, other: &Self) -> bool {
227        (**self).le(other)
228    }
229
230    fn lt(&self, other: &Self) -> bool {
231        (**self).lt(other)
232    }
233
234    fn ge(&self, other: &Self) -> bool {
235        (**self).ge(other)
236    }
237
238    fn gt(&self, other: &Self) -> bool {
239        (**self).gt(other)
240    }
241}
242
243impl<'gc, T: Ord + ?Sized + 'gc> Ord for Gc<'gc, T> {
244    fn cmp(&self, other: &Self) -> core::cmp::Ordering {
245        (**self).cmp(other)
246    }
247}
248
249impl<'gc, T: Hash + ?Sized + 'gc> Hash for Gc<'gc, T> {
250    fn hash<H: Hasher>(&self, state: &mut H) {
251        (**self).hash(state)
252    }
253}