aiscript_arena/
gc.rs

1use core::{
2    alloc::Layout,
3    borrow::Borrow,
4    fmt::{self, Debug, Display, Pointer},
5    hash::{Hash, Hasher},
6    marker::PhantomData,
7    ops::Deref,
8    ptr::NonNull,
9};
10
11use crate::{
12    Finalization,
13    barrier::{Unlock, Write},
14    collect::Collect,
15    context::{Collection, Mutation},
16    gc_weak::GcWeak,
17    static_collect::Static,
18    types::{GcBox, GcBoxHeader, GcBoxInner, GcColor, Invariant},
19};
20
21/// A garbage collected pointer to a type T. Implements Copy, and is implemented as a plain machine
22/// pointer. You can only allocate `Gc` pointers through a `&Mutation<'gc>` inside an arena type,
23/// and through "generativity" such `Gc` pointers may not escape the arena they were born in or
24/// be stored inside TLS. This, combined with correct `Collect` implementations, means that `Gc`
25/// pointers will never be dangling and are always safe to access.
26pub struct Gc<'gc, T: ?Sized + 'gc> {
27    pub(crate) ptr: NonNull<GcBoxInner<T>>,
28    pub(crate) _invariant: Invariant<'gc>,
29}
30
31impl<'gc, T: Debug + ?Sized + 'gc> Debug for Gc<'gc, T> {
32    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
33        fmt::Debug::fmt(&**self, fmt)
34    }
35}
36
37impl<'gc, T: ?Sized + 'gc> Pointer for Gc<'gc, T> {
38    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
39        fmt::Pointer::fmt(&Gc::as_ptr(*self), fmt)
40    }
41}
42
43impl<'gc, T: Display + ?Sized + 'gc> Display for Gc<'gc, T> {
44    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
45        fmt::Display::fmt(&**self, fmt)
46    }
47}
48
49impl<'gc, T: ?Sized + 'gc> Copy for Gc<'gc, T> {}
50
51impl<'gc, T: ?Sized + 'gc> Clone for Gc<'gc, T> {
52    #[inline]
53    fn clone(&self) -> Gc<'gc, T> {
54        *self
55    }
56}
57
58unsafe impl<'gc, T: ?Sized + 'gc> Collect for Gc<'gc, T> {
59    #[inline]
60    fn trace(&self, cc: &Collection) {
61        unsafe {
62            cc.trace(GcBox::erase(self.ptr));
63        }
64    }
65}
66
67impl<'gc, T: ?Sized + 'gc> Deref for Gc<'gc, T> {
68    type Target = T;
69
70    #[inline]
71    fn deref(&self) -> &T {
72        unsafe { &self.ptr.as_ref().value }
73    }
74}
75
76impl<'gc, T: ?Sized + 'gc> AsRef<T> for Gc<'gc, T> {
77    #[inline]
78    fn as_ref(&self) -> &T {
79        unsafe { &self.ptr.as_ref().value }
80    }
81}
82
83impl<'gc, T: ?Sized + 'gc> Borrow<T> for Gc<'gc, T> {
84    #[inline]
85    fn borrow(&self) -> &T {
86        unsafe { &self.ptr.as_ref().value }
87    }
88}
89
90impl<'gc, T: Collect + 'gc> Gc<'gc, T> {
91    #[inline]
92    pub fn new(mc: &Mutation<'gc>, t: T) -> Gc<'gc, T> {
93        Gc {
94            ptr: mc.allocate(t),
95            _invariant: PhantomData,
96        }
97    }
98}
99
100impl<'gc, T: 'static> Gc<'gc, T> {
101    /// Create a new `Gc` pointer from a static value.
102    ///
103    /// This method does not require that the type `T` implement `Collect`. This uses [`Static`]
104    /// internally to automatically provide a trivial `Collect` impl and is equivalent to the
105    /// following code:
106    ///
107    /// ```rust
108    /// # use aiscript_arena::{Gc, Static};
109    /// # fn main() {
110    /// # aiscript_arena::arena::rootless_mutate(|mc| {
111    /// struct MyStaticStruct;
112    /// let p = Gc::new(mc, Static(MyStaticStruct));
113    /// // This is allowed because `Static` is `#[repr(transparent)]`
114    /// let p: Gc<MyStaticStruct> = unsafe { Gc::cast(p) };
115    /// # });
116    /// # }
117    /// ```
118    #[inline]
119    pub fn new_static(mc: &Mutation<'gc>, t: T) -> Gc<'gc, T> {
120        let p = Gc::new(mc, Static(t));
121        // SAFETY: `Static` is `#[repr(transparent)]`.
122        unsafe { Gc::cast::<T>(p) }
123    }
124}
125
126impl<'gc, T: ?Sized + 'gc> Gc<'gc, T> {
127    /// Cast a `Gc` pointer to a different type.
128    ///
129    /// SAFETY:
130    /// It must be valid to dereference a `*mut U` that has come from casting a `*mut T`.
131    #[inline]
132    pub unsafe fn cast<U: 'gc>(this: Gc<'gc, T>) -> Gc<'gc, U> {
133        Gc {
134            ptr: NonNull::cast(this.ptr),
135            _invariant: PhantomData,
136        }
137    }
138
139    /// Cast a `Gc` to the unit type.
140    ///
141    /// This is exactly the same as `unsafe { Gc::cast::<()>(this) }`, but we can provide this
142    /// method safely because it is always safe to dereference a `*mut ()` that has come from
143    /// casting a `*mut T`.
144    #[inline]
145    pub fn erase(this: Gc<'gc, T>) -> Gc<'gc, ()> {
146        unsafe { Gc::cast(this) }
147    }
148
149    /// Retrieve a `Gc` from a raw pointer obtained from `Gc::as_ptr`
150    ///
151    /// SAFETY:
152    /// The provided pointer must have been obtained from `Gc::as_ptr`, and the pointer must not
153    /// have been collected yet.
154    #[inline]
155    pub unsafe fn from_ptr(ptr: *const T) -> Gc<'gc, T> {
156        unsafe {
157            let layout = Layout::new::<GcBoxHeader>();
158            let (_, header_offset) = layout.extend(Layout::for_value(&*ptr)).unwrap();
159            let header_offset = -(header_offset as isize);
160            let ptr = (ptr as *mut T).byte_offset(header_offset) as *mut GcBoxInner<T>;
161            Gc {
162                ptr: NonNull::new_unchecked(ptr),
163                _invariant: PhantomData,
164            }
165        }
166    }
167}
168
169impl<'gc, T: Unlock + ?Sized + 'gc> Gc<'gc, T> {
170    /// Shorthand for [`Gc::write`]`(mc, self).`[`unlock()`](Write::unlock).
171    #[inline]
172    pub fn unlock(self, mc: &Mutation<'gc>) -> &'gc T::Unlocked {
173        Gc::write(mc, self);
174        // SAFETY: see doc-comment.
175        unsafe { self.as_ref().unlock_unchecked() }
176    }
177}
178
179impl<'gc, T: ?Sized + 'gc> Gc<'gc, T> {
180    /// Obtains a long-lived reference to the contents of this `Gc`.
181    ///
182    /// Unlike `AsRef` or `Deref`, the returned reference isn't bound to the `Gc` itself, and
183    /// will stay valid for the entirety of the current arena callback.
184    #[inline]
185    pub fn as_ref(self: Gc<'gc, T>) -> &'gc T {
186        // SAFETY: The returned reference cannot escape the current arena callback, as `&'gc T`
187        // never implements `Collect` (unless `'gc` is `'static`, which is impossible here), and
188        // so cannot be stored inside the GC root.
189        unsafe { &self.ptr.as_ref().value }
190    }
191
192    #[inline]
193    pub fn downgrade(this: Gc<'gc, T>) -> GcWeak<'gc, T> {
194        GcWeak { inner: this }
195    }
196
197    /// Triggers a write barrier on this `Gc`, allowing for safe mutation.
198    ///
199    /// This triggers an unrestricted *backwards* write barrier on this pointer, meaning that it is
200    /// guaranteed that this pointer can safely adopt *any* arbitrary child pointers (until the next
201    /// time that collection is triggered).
202    ///
203    /// It returns a reference to the inner `T` wrapped in a `Write` marker to allow for
204    /// unrestricted mutation on the held type or any of its directly held fields.
205    #[inline]
206    pub fn write(mc: &Mutation<'gc>, gc: Self) -> &'gc Write<T> {
207        unsafe {
208            mc.backward_barrier(Gc::erase(gc), None);
209            // SAFETY: the write barrier stays valid until the end of the current callback.
210            Write::assume(gc.as_ref())
211        }
212    }
213
214    /// Returns true if two `Gc`s point to the same allocation.
215    ///
216    /// Similarly to `Rc::ptr_eq` and `Arc::ptr_eq`, this function ignores the metadata of `dyn`
217    /// pointers.
218    #[inline]
219    pub fn ptr_eq(this: Gc<'gc, T>, other: Gc<'gc, T>) -> bool {
220        // TODO: Equivalent to `core::ptr::addr_eq`:
221        // https://github.com/rust-lang/rust/issues/116324
222        core::ptr::eq(Gc::as_ptr(this), Gc::as_ptr(other))
223    }
224
225    #[inline]
226    pub fn as_ptr(gc: Gc<'gc, T>) -> *const T {
227        unsafe {
228            let inner = gc.ptr.as_ptr();
229            core::ptr::addr_of!((*inner).value) as *const T
230        }
231    }
232
233    /// Returns true when a pointer is *dead* during finalization. This is equivalent to
234    /// `GcWeak::is_dead` for strong pointers.
235    ///
236    /// Any strong pointer reachable from the root will never be dead, BUT there can be strong
237    /// pointers reachable only through other weak pointers that can be dead.
238    #[inline]
239    pub fn is_dead(_: &Finalization<'gc>, gc: Gc<'gc, T>) -> bool {
240        let inner = unsafe { gc.ptr.as_ref() };
241        matches!(inner.header.color(), GcColor::White | GcColor::WhiteWeak)
242    }
243
244    /// Manually marks a dead `Gc` pointer as reachable and keeps it alive.
245    ///
246    /// Equivalent to `GcWeak::resurrect` for strong pointers. Manually marks this pointer and
247    /// all transitively held pointers as reachable, thus keeping them from being dropped this
248    /// collection cycle.
249    #[inline]
250    pub fn resurrect(fc: &Finalization<'gc>, gc: Gc<'gc, T>) {
251        unsafe {
252            fc.resurrect(GcBox::erase(gc.ptr));
253        }
254    }
255}
256
257impl<'gc, T: PartialEq + ?Sized + 'gc> PartialEq for Gc<'gc, T> {
258    fn eq(&self, other: &Self) -> bool {
259        (**self).eq(other)
260    }
261
262    fn ne(&self, other: &Self) -> bool {
263        (**self).ne(other)
264    }
265}
266
267impl<'gc, T: Eq + ?Sized + 'gc> Eq for Gc<'gc, T> {}
268
269impl<'gc, T: PartialOrd + ?Sized + 'gc> PartialOrd for Gc<'gc, T> {
270    fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
271        (**self).partial_cmp(other)
272    }
273
274    fn le(&self, other: &Self) -> bool {
275        (**self).le(other)
276    }
277
278    fn lt(&self, other: &Self) -> bool {
279        (**self).lt(other)
280    }
281
282    fn ge(&self, other: &Self) -> bool {
283        (**self).ge(other)
284    }
285
286    fn gt(&self, other: &Self) -> bool {
287        (**self).gt(other)
288    }
289}
290
291impl<'gc, T: Ord + ?Sized + 'gc> Ord for Gc<'gc, T> {
292    fn cmp(&self, other: &Self) -> core::cmp::Ordering {
293        (**self).cmp(other)
294    }
295}
296
297impl<'gc, T: Hash + ?Sized + 'gc> Hash for Gc<'gc, T> {
298    fn hash<H: Hasher>(&self, state: &mut H) {
299        (**self).hash(state)
300    }
301}