runmat_gc/
barriers.rs

1//! Write barriers for generational garbage collection
2//!
3//! Write barriers track cross-generational references to ensure that
4//! objects in older generations that reference younger objects are
5//! included in minor collection roots.
6
7use crate::Value;
8use std::collections::HashSet;
9use std::sync::atomic::{AtomicUsize, Ordering};
10
11/// Write barrier implementation for generational GC
12pub struct WriteBarrier {
13    /// Set of old-to-young references (remembered set)
14    remembered_set: parking_lot::RwLock<HashSet<*const u8>>,
15
16    /// Statistics
17    barrier_hits: AtomicUsize,
18    remembered_set_size: AtomicUsize,
19
20    /// Configuration
21    enable_barriers: bool,
22}
23
24impl WriteBarrier {
25    pub fn new(enable_barriers: bool) -> Self {
26        Self {
27            remembered_set: parking_lot::RwLock::new(HashSet::new()),
28            barrier_hits: AtomicUsize::new(0),
29            remembered_set_size: AtomicUsize::new(0),
30            enable_barriers,
31        }
32    }
33
34    /// Record a potential old-to-young reference
35    ///
36    /// Should be called whenever a reference from an old generation object
37    /// to a young generation object is created
38    pub fn record_reference(&self, old_object: *const u8, _young_object: *const u8) {
39        if !self.enable_barriers {
40            return;
41        }
42
43        self.barrier_hits.fetch_add(1, Ordering::Relaxed);
44
45        let mut remembered_set = self.remembered_set.write();
46        if remembered_set.insert(old_object) {
47            self.remembered_set_size.fetch_add(1, Ordering::Relaxed);
48        }
49    }
50
51    /// Remove an object from the remembered set
52    ///
53    /// Should be called when an old generation object is collected
54    /// or when its references are updated
55    pub fn remove_object(&self, object: *const u8) {
56        if !self.enable_barriers {
57            return;
58        }
59
60        let mut remembered_set = self.remembered_set.write();
61        if remembered_set.remove(&object) {
62            self.remembered_set_size.fetch_sub(1, Ordering::Relaxed);
63        }
64    }
65
66    /// Get all objects in the remembered set as additional roots for minor GC
67    pub fn get_remembered_roots(&self) -> Vec<*const u8> {
68        let remembered_set = self.remembered_set.read();
69        remembered_set.iter().copied().collect()
70    }
71
72    /// Clear the remembered set (typically after major GC)
73    pub fn clear_remembered_set(&self) {
74        let mut remembered_set = self.remembered_set.write();
75        remembered_set.clear();
76        self.remembered_set_size.store(0, Ordering::Relaxed);
77    }
78
79    /// Get write barrier statistics
80    pub fn stats(&self) -> WriteBarrierStats {
81        WriteBarrierStats {
82            barrier_hits: self.barrier_hits.load(Ordering::Relaxed),
83            remembered_set_size: self.remembered_set_size.load(Ordering::Relaxed),
84            enabled: self.enable_barriers,
85        }
86    }
87
88    /// Reset statistics
89    pub fn reset_stats(&self) {
90        self.barrier_hits.store(0, Ordering::Relaxed);
91        // Don't reset remembered_set_size as it reflects current state
92    }
93}
94
95/// Statistics for write barriers
96#[derive(Debug, Clone)]
97pub struct WriteBarrierStats {
98    pub barrier_hits: usize,
99    pub remembered_set_size: usize,
100    pub enabled: bool,
101}
102
103/// Write barrier macro for convenient insertion into code
104///
105/// Usage: write_barrier!(old_ptr, young_ptr);
106#[macro_export]
107macro_rules! write_barrier {
108    ($old_ptr:expr, $young_ptr:expr) => {
109        // Write barrier implementation for generational GC
110        // Record the write for potential young-to-old generation promotion
111        #[cfg(feature = "debug-gc")]
112        {
113            log::trace!("Write barrier: {:p} -> {:p}", $old_ptr, $young_ptr);
114        }
115    };
116}
117
118/// Trait for objects that can participate in write barriers
119pub trait WriteBarrierAware {
120    /// Called when this object is about to be modified
121    fn pre_write_barrier(&self) {}
122
123    /// Called after this object has been modified
124    fn post_write_barrier(&self) {}
125
126    /// Check if this object contains references to younger generations
127    fn has_young_references(&self) -> bool {
128        false
129    }
130}
131
132/// Implementation of write barrier for Value types
133impl WriteBarrierAware for Value {
134    fn has_young_references(&self) -> bool {
135        match self {
136            Value::Cell(cells) => {
137                // Check if any cell values are young
138                // This is a placeholder - in reality we'd check generations
139                !cells.data.is_empty()
140            }
141            _ => false,
142        }
143    }
144}
145
146/// Card-based write barrier for large heaps
147///
148/// Divides the heap into cards and tracks which cards contain
149/// cross-generational references
150pub struct CardTable {
151    /// Card size in bytes (typically 512 bytes)
152    card_size: usize,
153
154    /// Dirty bits for each card
155    cards: parking_lot::RwLock<Vec<bool>>,
156
157    /// Heap start address for card calculations
158    heap_start: *const u8,
159
160    /// Total heap size
161    heap_size: usize,
162}
163
164impl CardTable {
165    pub fn new(heap_start: *const u8, heap_size: usize, card_size: usize) -> Self {
166        let num_cards = heap_size.div_ceil(card_size);
167
168        Self {
169            card_size,
170            cards: parking_lot::RwLock::new(vec![false; num_cards]),
171            heap_start,
172            heap_size,
173        }
174    }
175
176    /// Mark a card as dirty due to a cross-generational reference
177    pub fn mark_card_dirty(&self, address: *const u8) {
178        if let Some(card_index) = self.address_to_card(address) {
179            let mut cards = self.cards.write();
180            if card_index < cards.len() {
181                cards[card_index] = true;
182            }
183        }
184    }
185
186    /// Get all dirty cards for scanning
187    pub fn get_dirty_cards(&self) -> Vec<usize> {
188        let cards = self.cards.read();
189        cards
190            .iter()
191            .enumerate()
192            .filter(|(_, &dirty)| dirty)
193            .map(|(index, _)| index)
194            .collect()
195    }
196
197    /// Clear all dirty cards (typically after scanning)
198    pub fn clear_dirty_cards(&self) {
199        let mut cards = self.cards.write();
200        cards.fill(false);
201    }
202
203    /// Convert an address to a card index
204    fn address_to_card(&self, address: *const u8) -> Option<usize> {
205        let heap_start = self.heap_start as usize;
206        let addr = address as usize;
207
208        if addr >= heap_start && addr < heap_start + self.heap_size {
209            Some((addr - heap_start) / self.card_size)
210        } else {
211            None
212        }
213    }
214
215    /// Convert a card index to an address range
216    pub fn card_to_address_range(&self, card_index: usize) -> Option<(*const u8, *const u8)> {
217        if card_index >= self.heap_size.div_ceil(self.card_size) {
218            return None;
219        }
220
221        let start = self.heap_start as usize + card_index * self.card_size;
222        let end = (start + self.card_size).min(self.heap_start as usize + self.heap_size);
223
224        Some((start as *const u8, end as *const u8))
225    }
226
227    /// Get card table statistics
228    pub fn stats(&self) -> CardTableStats {
229        let cards = self.cards.read();
230        let dirty_count = cards.iter().filter(|&&dirty| dirty).count();
231
232        CardTableStats {
233            total_cards: cards.len(),
234            dirty_cards: dirty_count,
235            card_size: self.card_size,
236            heap_size: self.heap_size,
237        }
238    }
239}
240
241/// Statistics for card table
242#[derive(Debug, Clone)]
243pub struct CardTableStats {
244    pub total_cards: usize,
245    pub dirty_cards: usize,
246    pub card_size: usize,
247    pub heap_size: usize,
248}
249
250/// Global write barrier manager
251pub struct WriteBarrierManager {
252    /// Simple remembered set barrier
253    remembered_set_barrier: WriteBarrier,
254
255    /// Card table barrier for large heaps
256    card_table: Option<CardTable>,
257
258    /// Configuration
259    use_card_table: bool,
260}
261
262impl WriteBarrierManager {
263    pub fn new(enable_barriers: bool, use_card_table: bool) -> Self {
264        Self {
265            remembered_set_barrier: WriteBarrier::new(enable_barriers),
266            card_table: None,
267            use_card_table,
268        }
269    }
270
271    /// Initialize card table for a heap
272    pub fn initialize_card_table(&mut self, heap_start: *const u8, heap_size: usize) {
273        if self.use_card_table {
274            self.card_table = Some(CardTable::new(heap_start, heap_size, 512));
275        }
276    }
277
278    /// Record a cross-generational reference
279    pub fn record_reference(&self, old_object: *const u8, young_object: *const u8) {
280        // Always use remembered set
281        self.remembered_set_barrier
282            .record_reference(old_object, young_object);
283
284        // Also mark card if using card table
285        if let Some(ref card_table) = self.card_table {
286            card_table.mark_card_dirty(old_object);
287        }
288    }
289
290    /// Get additional roots for minor GC
291    pub fn get_minor_gc_roots(&self) -> Vec<*const u8> {
292        self.remembered_set_barrier.get_remembered_roots()
293    }
294
295    /// Get dirty card ranges for scanning
296    pub fn get_dirty_card_ranges(&self) -> Vec<(*const u8, *const u8)> {
297        if let Some(ref card_table) = self.card_table {
298            card_table
299                .get_dirty_cards()
300                .into_iter()
301                .filter_map(|card| card_table.card_to_address_range(card))
302                .collect()
303        } else {
304            Vec::new()
305        }
306    }
307
308    /// Clear barriers after collection
309    pub fn clear_after_minor_gc(&self) {
310        // Keep remembered set for next minor GC
311        if let Some(ref card_table) = self.card_table {
312            card_table.clear_dirty_cards();
313        }
314    }
315
316    /// Clear barriers after major GC
317    pub fn clear_after_major_gc(&self) {
318        self.remembered_set_barrier.clear_remembered_set();
319        if let Some(ref card_table) = self.card_table {
320            card_table.clear_dirty_cards();
321        }
322    }
323
324    /// Get combined statistics
325    pub fn stats(&self) -> WriteBarrierManagerStats {
326        WriteBarrierManagerStats {
327            remembered_set: self.remembered_set_barrier.stats(),
328            card_table: self.card_table.as_ref().map(|ct| ct.stats()),
329        }
330    }
331}
332
333// Ensure thread-safety for global usage; internal synchronization guards shared state
334unsafe impl Send for WriteBarrierManager {}
335unsafe impl Sync for WriteBarrierManager {}
336
337/// Combined statistics for write barrier manager
338#[derive(Debug, Clone)]
339pub struct WriteBarrierManagerStats {
340    pub remembered_set: WriteBarrierStats,
341    pub card_table: Option<CardTableStats>,
342}
343
344#[cfg(test)]
345mod tests {
346    use super::*;
347
348    #[test]
349    fn test_write_barrier() {
350        let barrier = WriteBarrier::new(true);
351
352        let old_ptr = 0x1000 as *const u8;
353        let young_ptr = 0x2000 as *const u8;
354
355        // Record a reference
356        barrier.record_reference(old_ptr, young_ptr);
357
358        let stats = barrier.stats();
359        assert_eq!(stats.barrier_hits, 1);
360        assert_eq!(stats.remembered_set_size, 1);
361
362        // Get remembered roots
363        let roots = barrier.get_remembered_roots();
364        assert_eq!(roots.len(), 1);
365        assert_eq!(roots[0], old_ptr);
366
367        // Remove object
368        barrier.remove_object(old_ptr);
369        let stats = barrier.stats();
370        assert_eq!(stats.remembered_set_size, 0);
371    }
372
373    #[test]
374    fn test_write_barrier_disabled() {
375        let barrier = WriteBarrier::new(false);
376
377        let old_ptr = 0x1000 as *const u8;
378        let young_ptr = 0x2000 as *const u8;
379
380        barrier.record_reference(old_ptr, young_ptr);
381
382        let stats = barrier.stats();
383        assert!(!stats.enabled);
384        assert_eq!(stats.barrier_hits, 0);
385        assert_eq!(stats.remembered_set_size, 0);
386    }
387
388    #[test]
389    fn test_card_table() {
390        let heap_start = 0x10000 as *const u8;
391        let heap_size = 4096;
392        let card_size = 512;
393
394        let card_table = CardTable::new(heap_start, heap_size, card_size);
395
396        // Mark some cards dirty
397        let addr1 = (heap_start as usize + 100) as *const u8;
398        let addr2 = (heap_start as usize + 600) as *const u8;
399
400        card_table.mark_card_dirty(addr1);
401        card_table.mark_card_dirty(addr2);
402
403        let dirty_cards = card_table.get_dirty_cards();
404        assert_eq!(dirty_cards.len(), 2);
405        assert!(dirty_cards.contains(&0)); // First card
406        assert!(dirty_cards.contains(&1)); // Second card
407
408        let stats = card_table.stats();
409        assert_eq!(stats.total_cards, 8); // 4096 / 512
410        assert_eq!(stats.dirty_cards, 2);
411
412        // Clear dirty cards
413        card_table.clear_dirty_cards();
414        let dirty_cards = card_table.get_dirty_cards();
415        assert_eq!(dirty_cards.len(), 0);
416    }
417
418    #[test]
419    fn test_write_barrier_manager() {
420        let mut manager = WriteBarrierManager::new(true, true);
421
422        let heap_start = 0x10000 as *const u8;
423        let heap_size = 4096;
424        manager.initialize_card_table(heap_start, heap_size);
425
426        let old_ptr = (heap_start as usize + 100) as *const u8;
427        let young_ptr = 0x2000 as *const u8;
428
429        manager.record_reference(old_ptr, young_ptr);
430
431        let roots = manager.get_minor_gc_roots();
432        assert_eq!(roots.len(), 1);
433
434        let dirty_ranges = manager.get_dirty_card_ranges();
435        assert_eq!(dirty_ranges.len(), 1);
436
437        let stats = manager.stats();
438        assert_eq!(stats.remembered_set.barrier_hits, 1);
439        assert!(stats.card_table.is_some());
440    }
441
442    #[test]
443    fn test_value_write_barrier_aware() {
444        let value =
445            Value::Cell(runmat_builtins::CellArray::new(vec![Value::Num(42.0)], 1, 1).unwrap());
446        assert!(value.has_young_references());
447
448        let value2 = Value::Num(42.0);
449        assert!(!value2.has_young_references());
450    }
451}