ghost_gc/
context.rs

1use core::{alloc::Layout, ptr::Pointee};
2use std::{
3    alloc::{Allocator, Global},
4    cell::{Cell, RefCell},
5    ptr::NonNull,
6};
7
8use crate::{
9    gc_box::{Colour, Erased, GcBox, GcInner},
10    Collect, Invariant,
11};
12
13#[repr(transparent)]
14pub struct Mutation<'b>(Invariant<'b>, Context<dyn Allocator>);
15
16impl<'b> Mutation<'b> {
17    pub(crate) fn new<A>(ctx: &Context<A>) -> &Mutation<'b>
18    where
19        A: Allocator,
20    {
21        let ctx: &Context<dyn Allocator> = ctx;
22
23        // Safety: `Mutation` is a transparent wrapper around `Context<dyn Allocator>`.
24        unsafe { core::mem::transmute::<&Context<dyn Allocator>, &Mutation<'b>>(ctx) }
25    }
26
27    pub(crate) fn context(&self) -> &Context<dyn Allocator> {
28        &self.1
29    }
30
31    pub(crate) fn allocate<T>(&self, meta: <T as Pointee>::Metadata, layout: Layout) -> GcBox<T>
32    where
33        T: Collect,
34    {
35        self.context().allocate(meta, layout)
36    }
37}
38
39impl core::fmt::Debug for Mutation<'_> {
40    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
41        write!(f, "Mutation")
42    }
43}
44
45#[repr(transparent)]
46pub struct Collector(Context<dyn Allocator>);
47
48impl Collector {
49    pub(crate) fn new<A>(ctx: &Context<A>) -> &Collector
50    where
51        A: Allocator,
52    {
53        let ctx: &Context<dyn Allocator> = ctx;
54
55        // Safety: `Collector` is a transparent wrapper around a `Context<dyn Allocator>`.
56        unsafe { core::mem::transmute::<&Context<dyn Allocator>, &Collector>(ctx) }
57    }
58
59    pub(crate) fn context(&self) -> &Context<dyn Allocator> {
60        &self.0
61    }
62}
63
64impl core::fmt::Debug for Collector {
65    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
66        write!(f, "Collector")
67    }
68}
69
70pub(crate) struct Context<A = Global>
71where
72    A: Allocator + ?Sized,
73{
74    newly_allocated: RefCell<Vec<GcBox<Erased>>>,
75    objects: RefCell<Vec<GcBox<Erased>>>,
76    trace_root: Cell<bool>,
77    first_gray: Cell<Option<GcBox<Erased>>>,
78    phase: Cell<CollectionPhase>,
79    cycle_allocations: Cell<usize>,
80    cycle_bytes: Cell<usize>,
81    pacing: Pacing,
82    alloc: A,
83}
84
85impl<A: Allocator> Context<A> {
86    pub(crate) fn new_in(pacing: Pacing, alloc: A) -> Context<A>
87    where
88        A: Allocator + 'static,
89    {
90        Context {
91            newly_allocated: Default::default(),
92            objects: Default::default(),
93            trace_root: Default::default(),
94            first_gray: Default::default(),
95            phase: Default::default(),
96            cycle_allocations: Cell::new(0),
97            cycle_bytes: Cell::new(0),
98            pacing,
99            alloc,
100        }
101    }
102
103    fn trace_next(&self, root: &impl Collect) -> bool {
104        if self.trace_root.get() {
105            root.trace(Collector::new(self));
106            self.set_root_traced();
107
108            true
109        } else if let Some(val) = self.take_next_box() {
110            match val.colour() {
111                Colour::White | Colour::Weak => unreachable!(),
112                Colour::Gray => {
113                    unsafe { val.trace_value(Collector::new(self)) };
114                    unsafe { val.set_colour(Colour::Black) };
115                }
116                Colour::Black => {}
117            }
118
119            true
120        } else {
121            false
122        }
123    }
124
125    fn take_next_box(&self) -> Option<GcBox<Erased>> {
126        let ptr = self.first_gray.get()?;
127        let next = ptr.next_gc();
128        self.first_gray.set(next);
129        Some(ptr)
130    }
131
132    pub fn set_root_untraced(&self) {
133        self.trace_root.set(true);
134    }
135
136    pub fn set_root_traced(&self) {
137        self.trace_root.set(false);
138    }
139
140    pub fn allocations(&self) -> usize {
141        self.objects.borrow().len() + self.newly_allocated.borrow().len()
142    }
143
144    pub fn advance_phase(&self) -> bool {
145        match self.phase.get() {
146            CollectionPhase::Sleep => {
147                self.objects
148                    .borrow_mut()
149                    .append(&mut *self.newly_allocated.borrow_mut());
150
151                self.cycle_allocations.set(0);
152                self.cycle_bytes.set(0);
153
154                self.set_root_untraced();
155
156                for obj in self.objects.borrow().iter() {
157                    unsafe { obj.set_colour(Colour::White) };
158                    obj.set_next(None);
159                }
160
161                self.phase.set(CollectionPhase::Mark);
162
163                false
164            }
165            CollectionPhase::Mark => {
166                self.phase.set(CollectionPhase::Sweep { index: 0 });
167
168                false
169            }
170            CollectionPhase::Sweep { .. } => {
171                self.phase.set(CollectionPhase::Sleep);
172
173                true
174            }
175        }
176    }
177
178    pub fn advance_collection(&self, root: &impl Collect) {
179        self.advance_cycle_by(root, self.pacing);
180    }
181
182    /// Advances the cycle by the given pacing. If the current phase ends, then this function will
183    /// return without making any progress on the next one, regardless of the pacing value.
184    pub fn advance_cycle_by(&self, root: &impl Collect, pacing: Pacing) {
185        match self.phase.get() {
186            CollectionPhase::Sleep => {
187                let allocations = self.cycle_allocations.get();
188                let bytes = self.cycle_bytes.get();
189
190                if pacing.should_wake(allocations, bytes) {
191                    self.advance_phase();
192                }
193            }
194            CollectionPhase::Mark => {
195                let mut marked = 0;
196
197                dbg!(&self.first_gray);
198                while self.trace_next(root) {
199                    marked += 1;
200
201                    dbg!(&self.first_gray);
202
203                    if marked >= self.pacing.mark_stride {
204                        return;
205                    }
206                }
207
208                self.advance_phase();
209            }
210            CollectionPhase::Sweep { index } => {
211                let objects = &mut *self.objects.borrow_mut();
212
213                let mut current = index;
214                let mut end =
215                    std::cmp::min(index.saturating_add(pacing.sweep_stride), objects.len());
216
217                while current < end {
218                    dbg!(&objects, current, end);
219                    let obj = objects[current];
220
221                    match obj.colour() {
222                        Colour::White => {
223                            unsafe { obj.drop_in_place() };
224                            objects.swap_remove(current);
225                            unsafe { self.deallocate(obj) };
226                            end -= 1;
227                            continue;
228                        }
229                        Colour::Gray => unreachable!(),
230                        Colour::Weak => {
231                            unsafe { obj.drop_in_place() };
232                            obj.set_uninit();
233                            current += 1;
234                            continue;
235                        }
236                        Colour::Black => {
237                            current += 1;
238                            continue;
239                        }
240                    }
241                }
242
243                if end == objects.len() {
244                    self.advance_phase();
245                }
246            }
247        }
248    }
249
250    /// Runs the collection cycle until all allocated objects have been marked and swept.
251    pub fn run_full_cycle(&self, root: &impl Collect) {
252        // Only need to reset phase if there are objects to be swept.
253        // Resetting to sleep phase is always safe.
254        if !self.newly_allocated.borrow().is_empty() {
255            self.phase.set(CollectionPhase::Sleep);
256        }
257
258        if self.phase.get() == CollectionPhase::Sleep {
259            self.advance_phase();
260        }
261
262        while self.phase.get() != CollectionPhase::Sleep {
263            dbg!(self.phase.get());
264            self.advance_cycle_by(root, Pacing::MAX_PACE);
265        }
266
267        debug_assert!(matches!(self.phase.get(), CollectionPhase::Sleep { .. }));
268    }
269}
270
271impl<A: Allocator + ?Sized> Context<A> {
272    pub fn push_box(&self, ptr: GcBox<Erased>) {
273        ptr.set_next(self.first_gray.get());
274        self.first_gray.set(Some(ptr));
275    }
276
277    pub fn allocate<T: ?Sized + Collect + Pointee>(
278        &self,
279        meta: T::Metadata,
280        layout: Layout,
281    ) -> GcBox<T> {
282        let Ok(layout) = GcInner::<T>::layout(layout) else {
283            todo!()
284        };
285
286        let ptr = self.alloc.allocate(layout).unwrap();
287
288        let gc = unsafe { GcBox::new(ptr.as_ptr().cast(), meta, layout) };
289
290        self.objects.borrow_mut().push(gc.erase());
291
292        gc
293    }
294
295    pub unsafe fn deallocate(&self, gc: GcBox<Erased>) {
296        let layout = gc.layout();
297
298        let ptr = gc.inner_ptr().cast::<u8>();
299
300        unsafe { self.alloc.deallocate(NonNull::new_unchecked(ptr), layout) };
301    }
302}
303
304impl<A> Drop for Context<A>
305where
306    A: Allocator + ?Sized,
307{
308    fn drop(&mut self) {
309        let newly_allocated: &[GcBox<Erased>] = &self.newly_allocated.borrow();
310        let objects: &[GcBox<Erased>] = &self.objects.borrow();
311
312        for obj in objects.iter().chain(newly_allocated) {
313            unsafe { obj.vtable().drop_in_place(*obj) };
314
315            unsafe { alloc::alloc::dealloc(obj.inner_ptr().cast::<u8>(), obj.layout()) };
316        }
317    }
318}
319
320#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
321pub struct Pacing {
322    pub trigger_bytes: Option<usize>,
323    pub trigger_allocations: Option<usize>,
324    pub mark_stride: usize,
325    pub sweep_stride: usize,
326}
327
328impl Pacing {
329    /// The maximum possible pace for the garbage collector to run. It will always trigger, and
330    /// never stop tracing.
331    const MAX_PACE: Pacing = Pacing {
332        trigger_bytes: Some(0),
333        trigger_allocations: Some(0),
334        mark_stride: usize::MAX,
335        sweep_stride: usize::MAX,
336    };
337
338    fn should_wake(&self, allocations: usize, bytes: usize) -> bool {
339        self.trigger_allocations.is_some_and(|n| allocations >= n)
340            || self.trigger_bytes.is_some_and(|n| bytes >= n)
341    }
342}
343
344impl Default for Pacing {
345    fn default() -> Self {
346        Self {
347            trigger_bytes: Some(4192),
348            trigger_allocations: Some(64),
349            mark_stride: 16,
350            sweep_stride: 8,
351        }
352    }
353}
354
355#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
356enum CollectionPhase {
357    #[default]
358    Sleep,
359    Mark,
360    Sweep {
361        index: usize,
362    },
363}