1use std::cell::RefCell;
7use std::time::{Duration, Instant};
8
9#[derive(Debug, Clone)]
11pub struct GCConfig {
12 pub initial_heap_size: usize,
13 pub max_heap_size: usize,
14 pub collection_threshold: f64,
15 pub generational: bool,
16 pub incremental: bool,
17 pub max_increment_time: u64,
18 pub enable_stats: bool,
19}
20
21impl Default for GCConfig {
22 fn default() -> Self {
23 Self {
24 initial_heap_size: 1024 * 1024,
25 max_heap_size: 64 * 1024 * 1024,
26 collection_threshold: 0.75,
27 generational: true,
28 incremental: true,
29 max_increment_time: 1000,
30 enable_stats: false,
31 }
32 }
33}
34
35pub type ObjectId = u64;
37
38#[derive(Debug, Default, Clone)]
40pub struct GCStats {
41 pub collections: u64,
42 pub objects_collected: u64,
43 pub bytes_collected: u64,
44 pub total_collection_time: Duration,
45 pub avg_collection_time: Duration,
46 pub peak_heap_size: usize,
47 pub current_heap_size: usize,
48 pub last_collection: Option<Instant>,
49}
50
51pub struct GarbageCollector {
56 config: GCConfig,
57 stats: RefCell<GCStats>,
58}
59
60impl GarbageCollector {
61 pub fn new(config: GCConfig) -> Self {
62 Self {
63 config,
64 stats: RefCell::new(GCStats::default()),
65 }
66 }
67
68 pub fn config(&self) -> &GCConfig {
69 &self.config
70 }
71
72 pub fn add_root(&self, _obj_id: ObjectId) {}
73 pub fn remove_root(&self, _obj_id: ObjectId) {}
74 pub fn collect(&self) -> GCResult {
75 GCResult::empty()
76 }
77 pub fn collect_incremental(&self) {}
84 pub fn force_collect(&self) -> GCResult {
85 GCResult::empty()
86 }
87 pub fn heap_size(&self) -> usize {
88 0
89 }
90 pub fn object_count(&self) -> usize {
91 0
92 }
93 pub fn stats(&self) -> GCStats {
94 self.stats.borrow().clone()
95 }
96 pub fn contains_object(&self, _obj_id: ObjectId) -> bool {
97 false
98 }
99
100 pub fn record_collection(&self, result: &GCResult) {
102 let mut stats = self.stats.borrow_mut();
103 stats.collections += 1;
104 stats.objects_collected += result.objects_collected;
105 stats.bytes_collected += result.bytes_collected;
106 stats.total_collection_time += result.duration;
107 if stats.collections > 0 {
108 stats.avg_collection_time = stats.total_collection_time / stats.collections as u32;
109 }
110 stats.last_collection = Some(Instant::now());
111 }
112}
113
114#[inline(always)]
123pub fn write_barrier_slot(_old: u64, _new: u64) {
124 #[cfg(feature = "gc_barrier_debug")]
125 {
126 BARRIER_COUNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
127 }
128 #[cfg(feature = "gc")]
129 {
130 }
134}
135
136#[inline(always)]
142pub fn write_barrier_vw(old: &shape_value::ValueWord, new: &shape_value::ValueWord) {
143 write_barrier_slot(old.raw_bits(), new.raw_bits());
144}
145
146#[cfg(feature = "gc_barrier_debug")]
152pub static BARRIER_COUNT: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
153
154#[cfg(feature = "gc_barrier_debug")]
160pub static HEAP_WRITE_COUNT: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
161
162#[inline(always)]
168pub fn record_heap_write() {
169 #[cfg(feature = "gc_barrier_debug")]
170 {
171 HEAP_WRITE_COUNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
172 }
173}
174
175#[cfg(feature = "gc_barrier_debug")]
180pub fn assert_barrier_coverage() {
181 let barriers = BARRIER_COUNT.load(std::sync::atomic::Ordering::Relaxed);
182 let writes = HEAP_WRITE_COUNT.load(std::sync::atomic::Ordering::Relaxed);
183 assert!(
184 barriers >= writes,
185 "Write barrier coverage gap: {} heap writes but only {} barriers",
186 writes,
187 barriers
188 );
189}
190
191#[cfg(test)]
192mod tests {
193 use super::*;
194
195 #[test]
196 fn write_barrier_slot_does_not_panic() {
197 write_barrier_slot(0, 0);
199 write_barrier_slot(u64::MAX, 0);
200 write_barrier_slot(0, u64::MAX);
201 write_barrier_slot(0xFFF8_0000_0000_0000, 0xFFF8_0000_0000_0001);
202 }
203
204 #[test]
205 fn write_barrier_vw_does_not_panic() {
206 let a = shape_value::ValueWord::none();
207 let b = shape_value::ValueWord::from_i64(42);
208 write_barrier_vw(&a, &b);
209 write_barrier_vw(&b, &a);
210 }
211
212 #[test]
213 fn record_heap_write_does_not_panic() {
214 record_heap_write();
216 }
217}
218
219#[derive(Debug, Clone)]
221pub struct GCResult {
222 pub objects_collected: u64,
223 pub bytes_collected: u64,
224 pub duration: Duration,
225}
226
227impl GCResult {
228 pub fn new(objects_collected: u64, bytes_collected: u64, duration: Duration) -> Self {
229 Self {
230 objects_collected,
231 bytes_collected,
232 duration,
233 }
234 }
235
236 pub fn empty() -> Self {
237 Self {
238 objects_collected: 0,
239 bytes_collected: 0,
240 duration: Duration::ZERO,
241 }
242 }
243}