sphinx/runtime/
gc.rs

1///! The Sphinx language garbage collector.
2///! Most of the "public" API is centered around the `Gc<T>` smart pointer. 
3///! See the documentation for the [runtime::gc::handle].
4
5use core::fmt;
6use core::ptr::NonNull;
7use core::cell::{Cell, RefCell};
8use log;
9
10mod trace;
11mod ptrmeta;
12mod gcbox;
13mod handle;
14mod weak;
15
16pub use trace::GcTrace;
17pub use handle::{Gc, GcWeak};
18
19use gcbox::{GcBox, GcBoxPtr};
20
21
22thread_local! {
23    static GC_STATE: RefCell<GcState> = RefCell::new(GcState::default());
24}
25
26pub fn gc_collect(root: &impl GcTrace) {
27    GC_STATE.with(|gc| {
28        let mut gc = gc.borrow_mut();
29        if gc.should_collect() {
30            gc.collect_garbage(root)
31        }
32    })
33}
34
35pub fn gc_force(root: &impl GcTrace) {
36    GC_STATE.with(|gc| {
37        let mut gc = gc.borrow_mut();
38        gc.collect_garbage(root)
39    })
40}
41
42struct GcState {
43    stats: GcStats,
44    config: GcConfig,
45    threshold: usize,
46    boxes_start: Option<GcBoxPtr>,
47}
48
49#[derive(Debug)]
50struct GcStats {
51    allocated: usize,
52    box_count: usize,
53    cycle_count: usize,
54}
55
56struct GcConfig {
57    threshold: u16,
58    pause_factor: u16,  // percent memory use relative to last cycle before starting a new cycle
59}
60
61impl Default for GcConfig {
62    fn default() -> Self {
63        Self {
64            threshold: 512, // Small because of stop-the-world. If we go incremental increase this to 8 kiB
65            pause_factor: 160,
66        }
67    }
68}
69
70impl Default for GcState {
71    fn default() -> Self {
72        GcState::new(GcConfig::default())
73    }
74}
75
76impl GcState {
77    fn new(config: GcConfig) -> Self {
78        let threshold = config.threshold as usize;
79        
80        Self {
81            config,
82            threshold,
83            
84            stats: GcStats {
85                allocated: 0,
86                box_count: 0,
87                cycle_count: 0,
88            },
89            
90            boxes_start: None,
91        }
92    }
93    
94    #[inline]
95    fn should_collect(&self) -> bool {
96        self.stats.allocated > self.threshold
97    }
98    
99    fn insert<T>(&mut self, mut gcbox: NonNull<GcBox<T>>) where T: GcTrace + ?Sized {
100        unsafe {
101            let size = gcbox.as_ref().header().size();
102            log::debug!("{:#X} allocate {} bytes", gcbox.as_ptr() as *const () as usize, size);
103            
104            gcbox.as_mut().header_mut().set_next(self.boxes_start.take());
105            self.boxes_start = Some(gcbox.into());
106            self.stats.allocated += size;
107            self.stats.box_count += 1;
108        }
109    }
110    
111    /// frees the GcBox, yielding it's next pointer
112    fn free(&mut self, gcbox: GcBoxPtr) -> Option<GcBoxPtr> {
113        let size = unsafe { gcbox.header().size() };
114        self.stats.allocated -= size;
115        self.stats.box_count -= 1;
116        log::debug!("{:#X} free {} bytes", gcbox.as_ptr() as *const () as usize, size);
117        
118        unsafe { gcbox.free() }
119    }
120    
121    fn collect_garbage(&mut self, root: &impl GcTrace) {
122        log::debug!("GC cycle begin ---");
123        
124        let allocated = self.stats.allocated;
125        let box_count = self.stats.box_count;
126        log::debug!("{}", self.stats);
127        
128        // mark
129        root.trace();
130        
131        // sweep
132        unsafe { self.sweep(); }
133        self.stats.cycle_count = self.stats.cycle_count.wrapping_add(1);
134        
135        let freed = allocated - self.stats.allocated;
136        let dropped = box_count - self.stats.box_count;
137        log::debug!("Freed {} bytes ({} allocations)", freed, dropped);
138        log::debug!("{}", self.stats);
139        
140        self.threshold = (self.stats.allocated * self.config.pause_factor as usize) / 100;
141        log::debug!("Next collection at {} bytes", self.threshold);
142        
143        log::debug!("GC cycle end ---");
144    }
145    
146    unsafe fn sweep(&mut self) {
147        let _guard = DropGuard::new();
148        
149        let mut prev_box = None;
150        let mut next_box = self.boxes_start;
151        while let Some(mut gcbox) = next_box {
152            if gcbox.header().is_marked() {
153                gcbox.header_mut().set_marked(false);
154                
155                next_box = gcbox.header().next();
156                prev_box.replace(gcbox);
157                
158            } else {
159                
160                next_box = self.free(gcbox);
161                if let Some(mut prev_box) = prev_box {
162                    prev_box.header_mut().set_next(next_box);
163                } else {
164                    self.boxes_start = next_box;
165                }
166                
167            }
168        }
169    }
170}
171
172impl Drop for GcState {
173    fn drop(&mut self) {
174        unsafe { self.sweep() }
175    }
176}
177
178
179// Whether or not the thread is currently in the sweep phase of garbage collection.
180thread_local!(pub static GC_SWEEP: Cell<bool> = Cell::new(false));
181
182struct DropGuard;
183
184impl DropGuard {
185    fn new() -> DropGuard {
186        GC_SWEEP.with(|flag| flag.set(true));
187        DropGuard
188    }
189}
190
191impl Drop for DropGuard {
192    fn drop(&mut self) {
193        GC_SWEEP.with(|flag| flag.set(false));
194    }
195}
196
197fn deref_safe() -> bool {
198    GC_SWEEP.with(|flag| !flag.get())
199}
200
201
202impl fmt::Display for GcStats {
203    fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
204        write!(
205            fmt, "Cycle {}: estimated usage {}u ({} allocations)", 
206            self.cycle_count, self.allocated, self.box_count
207        )
208    }
209}