aiscript_arena/
dynamic_roots.rs

1use core::{cell::RefCell, fmt, mem};
2
3use alloc::{
4    rc::{Rc, Weak},
5    vec::Vec,
6};
7
8use crate::{Collect, Gc, Mutation, Rootable, arena::Root, metrics::Metrics};
9
10/// A way of registering GC roots dynamically.
11///
12/// Use this type as (a part of) an [`Arena`](crate::Arena) root to enable dynamic rooting of
13/// GC'd objects through [`DynamicRoot`] handles.
14//
15// SAFETY: This allows us to convert `Gc<'gc>` pointers to `Gc<'static>` and back, and this is VERY
16// sketchy. We know it is safe because:
17//   1) The `DynamicRootSet` must be created inside an arena and is branded with an invariant `'gc`
18//      lifetime.
19//   2) When stashing a `Gc<'gc, R>` pointer, the invariant `'gc` lifetimes must match.
20//   3) When fetching, we make sure that the `DynamicRoot` `slots` field points to the same object
21//      as the `slots` field in the `DynamicRootSet`. We never drop this `Rc` or change the `Weak`
22//      held in any `DynamicRoot`, so if they both point to the same object, the original `Gc`
23//      pointer *must* have originally been stashed using *this* set. Therefore, it is safe to cast
24//      it back to whatever our current `'gc` lifetime is.
25#[derive(Copy, Clone)]
26pub struct DynamicRootSet<'gc>(Gc<'gc, Inner<'gc>>);
27
28unsafe impl Collect for DynamicRootSet<'_> {
29    fn trace(&self, cc: &crate::Collection) {
30        self.0.trace(cc);
31    }
32}
33
34impl<'gc> DynamicRootSet<'gc> {
35    /// Creates a new, empty root set.
36    pub fn new(mc: &Mutation<'gc>) -> Self {
37        DynamicRootSet(Gc::new(
38            mc,
39            Inner {
40                slots: Rc::new(RefCell::new(Slots::new(mc.metrics().clone()))),
41            },
42        ))
43    }
44
45    /// Puts a root inside this root set.
46    ///
47    /// The returned handle can be freely stored outside the current arena, and will keep the root
48    /// alive across garbage collections.
49    pub fn stash<R: for<'a> Rootable<'a>>(
50        &self,
51        mc: &Mutation<'gc>,
52        root: Gc<'gc, Root<'gc, R>>,
53    ) -> DynamicRoot<R> {
54        // SAFETY: We are adopting a new `Gc` pointer, so we must invoke a write barrier.
55        mc.backward_barrier(Gc::erase(self.0), Some(Gc::erase(root)));
56
57        let mut slots = self.0.slots.borrow_mut();
58        let index = slots.add(unsafe { Gc::cast(root) });
59
60        let ptr =
61            unsafe { mem::transmute::<Gc<'gc, Root<'gc, R>>, Gc<'static, Root<'static, R>>>(root) };
62        let slots = unsafe {
63            mem::transmute::<Weak<RefCell<Slots<'gc>>>, Weak<RefCell<Slots<'static>>>>(
64                Rc::downgrade(&self.0.slots),
65            )
66        };
67
68        DynamicRoot { ptr, slots, index }
69    }
70
71    /// Gets immutable access to the given root.
72    ///
73    /// # Panics
74    ///
75    /// Panics if the handle doesn't belong to this root set. For the non-panicking variant, use
76    /// [`try_fetch`](Self::try_fetch).
77    #[inline]
78    pub fn fetch<R: for<'r> Rootable<'r>>(&self, root: &DynamicRoot<R>) -> Gc<'gc, Root<'gc, R>> {
79        if self.contains(root) {
80            unsafe {
81                mem::transmute::<Gc<'static, Root<'static, R>>, Gc<'gc, Root<'gc, R>>>(root.ptr)
82            }
83        } else {
84            panic!("mismatched root set")
85        }
86    }
87
88    /// Gets immutable access to the given root, or returns an error if the handle doesn't belong
89    /// to this root set.
90    #[inline]
91    pub fn try_fetch<R: for<'r> Rootable<'r>>(
92        &self,
93        root: &DynamicRoot<R>,
94    ) -> Result<Gc<'gc, Root<'gc, R>>, MismatchedRootSet> {
95        if self.contains(root) {
96            Ok(unsafe {
97                mem::transmute::<Gc<'static, Root<'static, R>>, Gc<'gc, Root<'gc, R>>>(root.ptr)
98            })
99        } else {
100            Err(MismatchedRootSet(()))
101        }
102    }
103
104    /// Tests if the given handle belongs to this root set.
105    #[inline]
106    pub fn contains<R: for<'r> Rootable<'r>>(&self, root: &DynamicRoot<R>) -> bool {
107        // NOTE: We are making an assumption about how `Weak` works that is currently true and
108        // surely MUST continue to be true, but is possibly under-specified in the stdlib. We are
109        // assuming that if the `Weak` pointer held in the given `DynamicRoot` points to a *dropped*
110        // root set, that `Weak::as_ptr` will return a pointer that cannot possibly belong to a
111        // live `Rc`.
112        let ours = unsafe {
113            mem::transmute::<*const RefCell<Slots<'gc>>, *const RefCell<Slots<'static>>>(
114                Rc::as_ptr(&self.0.slots),
115            )
116        };
117        let theirs = Weak::as_ptr(&root.slots);
118        ours == theirs
119    }
120}
121
122/// Handle to a `Gc` pointer held inside a [`DynamicRootSet`] which is `'static` and can be held
123/// outside of the arena.
124pub struct DynamicRoot<R: for<'gc> Rootable<'gc>> {
125    ptr: Gc<'static, Root<'static, R>>,
126    slots: Weak<RefCell<Slots<'static>>>,
127    index: Index,
128}
129
130impl<R: for<'gc> Rootable<'gc>> Drop for DynamicRoot<R> {
131    fn drop(&mut self) {
132        if let Some(slots) = self.slots.upgrade() {
133            slots.borrow_mut().dec(self.index);
134        }
135    }
136}
137
138impl<R: for<'gc> Rootable<'gc>> Clone for DynamicRoot<R> {
139    fn clone(&self) -> Self {
140        if let Some(slots) = self.slots.upgrade() {
141            slots.borrow_mut().inc(self.index);
142        }
143
144        Self {
145            ptr: self.ptr,
146            slots: self.slots.clone(),
147            index: self.index,
148        }
149    }
150}
151
152impl<R: for<'gc> Rootable<'gc>> DynamicRoot<R> {
153    /// Get a pointer to the held object.
154    ///
155    /// This returns [`Gc::as_ptr`] for the [`Gc`] provided when the `DynamicRoot` is stashed.
156    ///
157    /// # Safety
158    ///
159    /// It is possible to use this to reconstruct the original `Gc` pointer by calling the unsafe
160    /// [`Gc::from_ptr`], but this is incredibly dangerous!
161    ///
162    /// First, if the [`DynamicRootSet`] in which the `DynamicRoot` was stashed has been collected,
163    /// then either the returned pointer or other transitive `Gc` pointers objects may be dangling.
164    /// The parent `DynamicRootSet` *must* still be uncollected in order to do this soundly.
165    ///
166    /// Second, the `'gc` lifetime returned here is unbound, so it is meaningless and can allow
167    /// improper mixing of objects across arenas. The returned `'gc` lifetime must be bound to only
168    /// the arena that holds the parent `DynamicRootSet`.
169    #[inline]
170    pub fn as_ptr<'gc>(&self) -> *const Root<'gc, R> {
171        unsafe { mem::transmute::<&Root<'static, R>, &Root<'gc, R>>(&self.ptr) as *const _ }
172    }
173}
174
175/// Error returned when trying to fetch a [`DynamicRoot`] from the wrong [`DynamicRootSet`].
176#[derive(Debug)]
177pub struct MismatchedRootSet(());
178
179impl fmt::Display for MismatchedRootSet {
180    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
181        f.write_str("mismatched root set")
182    }
183}
184
185#[cfg(feature = "std")]
186impl std::error::Error for MismatchedRootSet {}
187
188struct Inner<'gc> {
189    slots: Rc<RefCell<Slots<'gc>>>,
190}
191
192unsafe impl Collect for Inner<'_> {
193    fn trace(&self, cc: &crate::Collection) {
194        let slots = self.slots.borrow();
195        slots.trace(cc);
196    }
197}
198
199type Index = usize;
200
201// By avoiding Option<usize>, `Slot` can go from 24 bytes to 16.
202//
203// usize::MAX can never be a valid index without using more than `usize::MAX` memory in the slots
204// vec, which is impossible.
205const NULL_INDEX: Index = usize::MAX;
206
207enum Slot<'gc> {
208    Vacant { next_free: Index },
209    Occupied { root: Gc<'gc, ()>, ref_count: usize },
210}
211
212unsafe impl Collect for Slot<'_> {
213    fn trace(&self, cc: &crate::Collection) {
214        match self {
215            Slot::Vacant { .. } => {}
216            Slot::Occupied { root, .. } => root.trace(cc),
217        }
218    }
219}
220
221struct Slots<'gc> {
222    metrics: Metrics,
223    slots: Vec<Slot<'gc>>,
224    next_free: Index,
225}
226
227impl Drop for Slots<'_> {
228    fn drop(&mut self) {
229        self.metrics
230            .mark_external_deallocation(self.slots.capacity() * mem::size_of::<Slot>());
231    }
232}
233
234unsafe impl Collect for Slots<'_> {
235    fn trace(&self, cc: &crate::Collection) {
236        self.slots.trace(cc);
237    }
238}
239
240impl<'gc> Slots<'gc> {
241    fn new(metrics: Metrics) -> Self {
242        Self {
243            metrics,
244            slots: Vec::new(),
245            next_free: NULL_INDEX,
246        }
247    }
248
249    fn add(&mut self, p: Gc<'gc, ()>) -> Index {
250        // Occupied slot refcount starts at 0. A refcount of 0 and a set ptr implies that there is
251        // *one* live reference.
252
253        if self.next_free != NULL_INDEX {
254            let idx = self.next_free;
255            let slot = &mut self.slots[idx];
256            match *slot {
257                Slot::Vacant { next_free } => {
258                    self.next_free = next_free;
259                }
260                Slot::Occupied { .. } => panic!("free slot linked list corrupted"),
261            }
262            *slot = Slot::Occupied {
263                root: p,
264                ref_count: 0,
265            };
266            idx
267        } else {
268            let idx = self.slots.len();
269
270            let old_capacity = self.slots.capacity();
271            self.slots.push(Slot::Occupied {
272                root: p,
273                ref_count: 0,
274            });
275            let new_capacity = self.slots.capacity();
276
277            debug_assert!(new_capacity >= old_capacity);
278            if new_capacity > old_capacity {
279                self.metrics.mark_external_allocation(
280                    (new_capacity - old_capacity) * mem::size_of::<Slot>(),
281                );
282            }
283
284            idx
285        }
286    }
287
288    fn inc(&mut self, idx: Index) {
289        match &mut self.slots[idx] {
290            Slot::Occupied { ref_count, .. } => {
291                *ref_count = ref_count
292                    .checked_add(1)
293                    .expect("DynamicRoot refcount overflow!");
294            }
295            Slot::Vacant { .. } => panic!("taken slot has been improperly freed"),
296        }
297    }
298
299    fn dec(&mut self, idx: Index) {
300        let slot = &mut self.slots[idx];
301        match slot {
302            Slot::Occupied { ref_count, .. } => {
303                if *ref_count == 0 {
304                    *slot = Slot::Vacant {
305                        next_free: self.next_free,
306                    };
307                    self.next_free = idx;
308                } else {
309                    *ref_count -= 1;
310                }
311            }
312            Slot::Vacant { .. } => panic!("taken slot has been improperly freed"),
313        }
314    }
315}