Skip to main content

shape_gc/
generations.rs

1//! Generational collection with card table write barriers.
2//!
3//! - **Young generation**: Small region set (~4-8MB), bump-allocated, collected frequently.
4//! - **Old generation**: Larger region set, collected less frequently.
5//! - **Promotion**: Objects surviving N young collections are copied to old gen.
6//! - **Card table**: 512-byte cards tracking old→young pointers (write barrier at store sites).
7
8use crate::SweepStats;
9use crate::header::{GcColor, GcHeader, Generation};
10use crate::marker::Marker;
11use crate::region::Region;
12use std::collections::HashMap;
13
14/// Card table entry size: 512 bytes per card.
15const CARD_SIZE: usize = 512;
16
17/// Number of young GC cycles before promotion.
18const PROMOTION_THRESHOLD: u8 = 2;
19
20/// Card table for tracking old→young pointers.
21///
22/// Each byte covers a 512-byte region of memory. When a store into an old-gen
23/// object writes a pointer to a young-gen object, the corresponding card byte
24/// is set to 1 (dirty).
25pub struct CardTable {
26    /// One byte per 512-byte card. 0 = clean, 1 = dirty.
27    cards: Vec<u8>,
28    /// Base address of the covered memory range.
29    base: usize,
30    /// Total size of the covered memory range.
31    size: usize,
32}
33
34impl CardTable {
35    /// Create a card table covering `size` bytes starting at `base`.
36    pub fn new(base: usize, size: usize) -> Self {
37        let num_cards = (size + CARD_SIZE - 1) / CARD_SIZE;
38        Self {
39            cards: vec![0; num_cards],
40            base,
41            size,
42        }
43    }
44
45    /// Mark the card containing `addr` as dirty.
46    #[inline(always)]
47    pub fn mark_dirty(&mut self, addr: usize) {
48        if addr >= self.base && addr < self.base + self.size {
49            let index = (addr - self.base) / CARD_SIZE;
50            if index < self.cards.len() {
51                self.cards[index] = 1;
52            }
53        }
54    }
55
56    /// Check if the card containing `addr` is dirty.
57    #[inline(always)]
58    pub fn is_dirty(&self, addr: usize) -> bool {
59        if addr >= self.base && addr < self.base + self.size {
60            let index = (addr - self.base) / CARD_SIZE;
61            index < self.cards.len() && self.cards[index] != 0
62        } else {
63            false
64        }
65    }
66
67    /// Clear all dirty cards.
68    pub fn clear(&mut self) {
69        self.cards.fill(0);
70    }
71
72    /// Iterate over dirty card ranges. Calls `f(start_addr, end_addr)` for each dirty card.
73    pub fn for_each_dirty(&self, mut f: impl FnMut(usize, usize)) {
74        for (i, &card) in self.cards.iter().enumerate() {
75            if card != 0 {
76                let start = self.base + i * CARD_SIZE;
77                let end = start + CARD_SIZE;
78                f(start, end);
79            }
80        }
81    }
82
83    /// Number of dirty cards.
84    pub fn dirty_count(&self) -> usize {
85        self.cards.iter().filter(|&&c| c != 0).count()
86    }
87
88    /// Get the base address covered by this card table.
89    pub fn base(&self) -> usize {
90        self.base
91    }
92
93    /// Get the total size covered by this card table.
94    pub fn covered_size(&self) -> usize {
95        self.size
96    }
97}
98
99/// Generational collector state.
100///
101/// Manages young and old generation regions, promotion tracking, and card table
102/// write barriers. Young-gen collection scans only young regions + dirty cards,
103/// while old-gen collection does a full mark-sweep.
104pub struct GenerationalCollector {
105    /// Number of young collections performed.
106    young_gc_count: u64,
107    /// Number of old collections performed.
108    old_gc_count: u64,
109    /// Promotion threshold (number of young GCs survived).
110    promotion_threshold: u8,
111    /// Card table (lazily initialized when old gen exists).
112    card_table: Option<CardTable>,
113
114    // ── Generation regions ──────────────────────────────────────────
115    /// Regions belonging to the young generation (bump-allocated, frequent collection).
116    young_regions: Vec<Region>,
117    /// Regions belonging to the old generation (promoted objects, infrequent collection).
118    old_regions: Vec<Region>,
119
120    // ── Survival tracking ───────────────────────────────────────────
121    /// Per-object survival count, keyed by object pointer address.
122    /// Incremented each time a young-gen object survives a young collection.
123    survival_counts: HashMap<usize, u8>,
124
125    // ── Statistics ──────────────────────────────────────────────────
126    /// Total objects promoted across all young GCs.
127    total_promoted: u64,
128}
129
130impl GenerationalCollector {
131    pub fn new() -> Self {
132        Self {
133            young_gc_count: 0,
134            old_gc_count: 0,
135            promotion_threshold: PROMOTION_THRESHOLD,
136            card_table: None,
137            young_regions: Vec::new(),
138            old_regions: Vec::new(),
139            survival_counts: HashMap::new(),
140            total_promoted: 0,
141        }
142    }
143
144    /// Create a generational collector with a custom promotion threshold.
145    pub fn with_promotion_threshold(threshold: u8) -> Self {
146        Self {
147            promotion_threshold: threshold,
148            ..Self::new()
149        }
150    }
151
152    // ── Region management ───────────────────────────────────────────
153
154    /// Add a region to the young generation.
155    pub fn add_young_region(&mut self, region: Region) {
156        self.young_regions.push(region);
157    }
158
159    /// Add a region to the old generation.
160    pub fn add_old_region(&mut self, region: Region) {
161        self.old_regions.push(region);
162    }
163
164    /// Get a reference to young regions.
165    pub fn young_regions(&self) -> &[Region] {
166        &self.young_regions
167    }
168
169    /// Get a mutable reference to young regions.
170    pub fn young_regions_mut(&mut self) -> &mut Vec<Region> {
171        &mut self.young_regions
172    }
173
174    /// Get a reference to old regions.
175    pub fn old_regions(&self) -> &[Region] {
176        &self.old_regions
177    }
178
179    /// Get a mutable reference to old regions.
180    pub fn old_regions_mut(&mut self) -> &mut Vec<Region> {
181        &mut self.old_regions
182    }
183
184    /// Total bytes used in young gen.
185    pub fn young_used_bytes(&self) -> usize {
186        self.young_regions.iter().map(|r| r.used_bytes()).sum()
187    }
188
189    /// Total capacity in young gen (all young regions).
190    pub fn young_capacity_bytes(&self) -> usize {
191        self.young_regions.len() * crate::region::REGION_SIZE
192    }
193
194    /// Total bytes used in old gen.
195    pub fn old_used_bytes(&self) -> usize {
196        self.old_regions.iter().map(|r| r.used_bytes()).sum()
197    }
198
199    /// Total capacity in old gen (all old regions).
200    pub fn old_capacity_bytes(&self) -> usize {
201        self.old_regions.len() * crate::region::REGION_SIZE
202    }
203
204    /// Young gen utilization (0.0 to 1.0). Returns 0.0 if no young regions exist.
205    pub fn young_utilization(&self) -> f64 {
206        let cap = self.young_capacity_bytes();
207        if cap == 0 {
208            return 0.0;
209        }
210        self.young_used_bytes() as f64 / cap as f64
211    }
212
213    /// Old gen free bytes.
214    pub fn old_free_bytes(&self) -> usize {
215        let cap = self.old_capacity_bytes();
216        let used = self.old_used_bytes();
217        cap.saturating_sub(used)
218    }
219
220    /// Check if a pointer falls within any young-gen region.
221    pub fn is_young_ptr(&self, ptr: *const u8) -> bool {
222        self.young_regions.iter().any(|r| r.contains(ptr))
223    }
224
225    /// Check if a pointer falls within any old-gen region.
226    pub fn is_old_ptr(&self, ptr: *const u8) -> bool {
227        self.old_regions.iter().any(|r| r.contains(ptr))
228    }
229
230    // ── Young-gen collection ────────────────────────────────────────
231
232    /// Collect the young generation.
233    ///
234    /// 1. Mark roots that point into young gen
235    /// 2. Scan dirty cards for old-to-young references
236    /// 3. Complete marking (only young-gen objects)
237    /// 4. Promote survivors that have survived enough cycles
238    /// 5. Sweep dead young-gen objects
239    /// 6. Clear dirty cards
240    pub fn collect_young(&mut self, marker: &mut Marker, roots: &[*mut u8]) -> SweepStats {
241        // 1. Mark roots that point into young gen
242        marker.reset();
243        marker.start_marking();
244        for &root in roots {
245            if !root.is_null() && self.is_young_ptr(root) {
246                marker.mark_root(root);
247            }
248        }
249
250        // 2. Scan dirty cards for old→young references.
251        // Walk objects in old-gen regions that fall within dirty card ranges.
252        if let Some(ref card_table) = self.card_table {
253            let mut old_to_young_refs: Vec<*mut u8> = Vec::new();
254
255            card_table.for_each_dirty(|card_start, card_end| {
256                // Scan each old-gen region for objects overlapping this card range
257                for region in &self.old_regions {
258                    let region_base = region.base() as usize;
259                    let region_end = region_base + crate::region::REGION_SIZE;
260
261                    // Skip regions that don't overlap this card range
262                    if region_end <= card_start || region_base >= card_end {
263                        continue;
264                    }
265
266                    // Walk objects in this region and check if they overlap the dirty card
267                    region.for_each_object(|_header, obj_ptr| {
268                        let obj_addr = obj_ptr as usize;
269                        if obj_addr >= card_start && obj_addr < card_end {
270                            // This object is in a dirty card region. In a full implementation
271                            // we would trace its reference fields. For now, we treat the
272                            // object pointer itself as a potential young-gen reference
273                            // and mark it as a remembered-set entry.
274                            old_to_young_refs.push(obj_ptr);
275                        }
276                    });
277                }
278            });
279
280            // Mark any old→young references as roots for the young GC
281            for ptr in old_to_young_refs {
282                // The old object itself isn't young, but its reference fields may
283                // point to young objects. Since we can't trace individual fields
284                // without object layout knowledge, we add the pointer to the
285                // marker's live set to ensure referenced young objects are discovered
286                // during tracing.
287                marker.mark_gray(ptr);
288            }
289        }
290
291        // 3. Complete marking phase (process all gray objects)
292        marker.mark_all();
293
294        // 4. Update survival counts for live objects (increment before promotion check)
295        self.update_survival_counts(marker);
296
297        // 5. Promote survivors that have exceeded the threshold
298        let promoted = self.promote_survivors(marker);
299        self.total_promoted += promoted as u64;
300
301        // 6. Sweep young gen — collect dead objects
302        let stats = self.sweep_young(marker);
303
304        // 7. Clear dirty cards after young GC
305        if let Some(ref mut card_table) = self.card_table {
306            card_table.clear();
307        }
308
309        // 8. Bookkeeping
310        marker.finish_marking();
311        self.young_gc_count += 1;
312
313        stats
314    }
315
316    /// Promote surviving young-gen objects that have survived enough collections.
317    ///
318    /// Objects are copied to old-gen regions and their headers updated.
319    /// Returns the number of promoted objects.
320    fn promote_survivors(&mut self, marker: &Marker) -> usize {
321        let header_size = std::mem::size_of::<GcHeader>();
322        let mut promoted_count = 0;
323        let mut objects_to_promote: Vec<(usize, u32)> = Vec::new(); // (obj_addr, size)
324
325        // Find objects eligible for promotion
326        for region in &self.young_regions {
327            region.for_each_object(|header, obj_ptr| {
328                if marker.is_marked(obj_ptr) {
329                    let obj_addr = obj_ptr as usize;
330                    let survival_count = self.survival_counts.get(&obj_addr).copied().unwrap_or(0);
331                    if survival_count >= self.promotion_threshold {
332                        objects_to_promote.push((obj_addr, header.size));
333                    }
334                }
335            });
336        }
337
338        // Copy each promoted object to old gen
339        for (obj_addr, obj_size) in objects_to_promote {
340            let total = (header_size + obj_size as usize + 7) & !7;
341
342            // Get header pointer (header precedes object data)
343            let header_ptr = unsafe { (obj_addr as *mut u8).sub(header_size) };
344
345            // Ensure we have an old-gen region with space
346            let dest = self.alloc_in_old_gen(total);
347            if let Some(dest_ptr) = dest {
348                // Copy header + object data
349                unsafe {
350                    std::ptr::copy_nonoverlapping(header_ptr, dest_ptr, total);
351                }
352
353                // Update the header at the destination to mark as old gen
354                let dest_header = unsafe { &mut *(dest_ptr as *mut GcHeader) };
355                dest_header.set_generation(Generation::Old);
356                dest_header.set_color(GcColor::White); // Reset for next cycle
357
358                // Remove from survival tracking (now in old gen)
359                self.survival_counts.remove(&obj_addr);
360
361                // Mark the original as forwarded so sweep can skip it
362                let orig_header = unsafe { &mut *(header_ptr as *mut GcHeader) };
363                orig_header.set_forwarded(true);
364
365                promoted_count += 1;
366            }
367        }
368
369        promoted_count
370    }
371
372    /// Allocate space in old-gen regions. Creates a new region if needed.
373    fn alloc_in_old_gen(&mut self, total_bytes: usize) -> Option<*mut u8> {
374        // Try to find an old-gen region with enough space
375        for region in &mut self.old_regions {
376            if region.remaining() >= total_bytes {
377                let base = region.base();
378                let cursor = region.used_bytes();
379                let ptr = unsafe { base.add(cursor) };
380                region.set_cursor(cursor + total_bytes);
381                return Some(ptr);
382            }
383        }
384
385        // Allocate a new old-gen region
386        let mut new_region = Region::new();
387        let ptr = new_region.base();
388        new_region.set_cursor(total_bytes);
389
390        // Initialize card table for the new old-gen region
391        let base_addr = ptr as usize;
392        if self.card_table.is_none() {
393            self.card_table = Some(CardTable::new(base_addr, crate::region::REGION_SIZE));
394        }
395
396        self.old_regions.push(new_region);
397        Some(ptr)
398    }
399
400    /// Sweep the young generation: reclaim unmarked (white) objects.
401    fn sweep_young(&mut self, marker: &Marker) -> SweepStats {
402        let mut stats = SweepStats::default();
403
404        for region in &mut self.young_regions {
405            let mut live_bytes = 0;
406            region.for_each_object_mut(|header, obj_ptr| {
407                if header.is_forwarded() {
408                    // Promoted — count as collected from young gen
409                    stats.bytes_collected += header.size as usize;
410                    stats.objects_collected += 1;
411                    header.set_forwarded(false); // Reset flag
412                } else if marker.is_marked(obj_ptr) {
413                    // Live — reset to white for next cycle
414                    let size = header.size as usize;
415                    live_bytes += size;
416                    stats.bytes_retained += size;
417                    header.set_color(GcColor::White);
418                } else {
419                    // Dead object
420                    stats.bytes_collected += header.size as usize;
421                    stats.objects_collected += 1;
422                }
423            });
424            region.set_live_bytes(live_bytes);
425        }
426
427        stats
428    }
429
430    /// Update survival counts after a young GC.
431    /// Increment count for objects that survived, remove entries for dead objects.
432    fn update_survival_counts(&mut self, marker: &Marker) {
433        let mut live_addrs: Vec<usize> = Vec::new();
434
435        for region in &self.young_regions {
436            region.for_each_object(|_header, obj_ptr| {
437                if marker.is_marked(obj_ptr) {
438                    live_addrs.push(obj_ptr as usize);
439                }
440            });
441        }
442
443        // Remove dead entries
444        self.survival_counts
445            .retain(|addr, _| live_addrs.contains(addr));
446
447        // Increment survival count for live objects
448        for addr in live_addrs {
449            let count = self.survival_counts.entry(addr).or_insert(0);
450            *count = count.saturating_add(1);
451        }
452    }
453
454    // ── Old-gen collection ──────────────────────────────────────────
455
456    /// Collect the old generation (full mark-sweep).
457    ///
458    /// This marks from roots across ALL regions (young + old) and sweeps
459    /// only old-gen regions. Called less frequently than young GC.
460    pub fn collect_old(&mut self, marker: &mut Marker, roots: &[*mut u8]) -> SweepStats {
461        // Full mark from all roots (across both generations)
462        marker.reset();
463        marker.start_marking();
464        for &root in roots {
465            if !root.is_null() {
466                marker.mark_root(root);
467            }
468        }
469        marker.mark_all();
470
471        // Sweep old-gen regions
472        let mut stats = SweepStats::default();
473        for region in &mut self.old_regions {
474            let mut live_bytes = 0;
475            region.for_each_object_mut(|header, obj_ptr| {
476                if marker.is_marked(obj_ptr) {
477                    let size = header.size as usize;
478                    live_bytes += size;
479                    stats.bytes_retained += size;
480                    header.set_color(GcColor::White);
481                } else {
482                    stats.bytes_collected += header.size as usize;
483                    stats.objects_collected += 1;
484                }
485            });
486            region.set_live_bytes(live_bytes);
487        }
488
489        // Also sweep young-gen regions during a full collection
490        for region in &mut self.young_regions {
491            let mut live_bytes = 0;
492            region.for_each_object_mut(|header, obj_ptr| {
493                if marker.is_marked(obj_ptr) {
494                    let size = header.size as usize;
495                    live_bytes += size;
496                    stats.bytes_retained += size;
497                    header.set_color(GcColor::White);
498                } else {
499                    stats.bytes_collected += header.size as usize;
500                    stats.objects_collected += 1;
501                }
502            });
503            region.set_live_bytes(live_bytes);
504        }
505
506        marker.finish_marking();
507        self.old_gc_count += 1;
508
509        stats
510    }
511
512    // ── Survival tracking queries ───────────────────────────────────
513
514    /// Get the survival count for a specific object.
515    pub fn survival_count(&self, obj_ptr: *const u8) -> u8 {
516        self.survival_counts
517            .get(&(obj_ptr as usize))
518            .copied()
519            .unwrap_or(0)
520    }
521
522    /// Get the total number of objects promoted over all collections.
523    pub fn total_promoted(&self) -> u64 {
524        self.total_promoted
525    }
526
527    // ── Record + stat accessors (backward-compatible) ───────────────
528
529    /// Record a young collection.
530    pub fn record_young_gc(&mut self) {
531        self.young_gc_count += 1;
532    }
533
534    /// Record an old (full) collection.
535    pub fn record_old_gc(&mut self) {
536        self.old_gc_count += 1;
537    }
538
539    /// Check if an object should be promoted based on survival count.
540    pub fn should_promote(&self, survival_count: u8) -> bool {
541        survival_count >= self.promotion_threshold
542    }
543
544    /// Get the card table, if initialized.
545    pub fn card_table(&self) -> Option<&CardTable> {
546        self.card_table.as_ref()
547    }
548
549    /// Get a mutable reference to the card table.
550    pub fn card_table_mut(&mut self) -> Option<&mut CardTable> {
551        self.card_table.as_mut()
552    }
553
554    /// Initialize the card table for a given memory range.
555    pub fn init_card_table(&mut self, base: usize, size: usize) {
556        self.card_table = Some(CardTable::new(base, size));
557    }
558
559    /// Statistics.
560    pub fn young_gc_count(&self) -> u64 {
561        self.young_gc_count
562    }
563
564    pub fn old_gc_count(&self) -> u64 {
565        self.old_gc_count
566    }
567
568    /// Promotion threshold value.
569    pub fn promotion_threshold(&self) -> u8 {
570        self.promotion_threshold
571    }
572}
573
574impl Default for GenerationalCollector {
575    fn default() -> Self {
576        Self::new()
577    }
578}
579
580#[cfg(test)]
581mod tests {
582    use super::*;
583    use crate::marker::Marker;
584    use std::alloc::Layout;
585
586    // ── CardTable tests (existing) ──────────────────────────────────
587
588    #[test]
589    fn test_card_table_mark_and_check() {
590        let mut ct = CardTable::new(0x1000, 4096);
591
592        assert!(!ct.is_dirty(0x1000));
593        ct.mark_dirty(0x1000);
594        assert!(ct.is_dirty(0x1000));
595        assert!(ct.is_dirty(0x1100)); // Same card (within 512 bytes)
596        assert!(!ct.is_dirty(0x1200)); // Next card
597    }
598
599    #[test]
600    fn test_card_table_clear() {
601        let mut ct = CardTable::new(0x1000, 4096);
602        ct.mark_dirty(0x1000);
603        ct.mark_dirty(0x1800);
604        assert_eq!(ct.dirty_count(), 2);
605
606        ct.clear();
607        assert_eq!(ct.dirty_count(), 0);
608    }
609
610    #[test]
611    fn test_promotion_threshold() {
612        let gc = GenerationalCollector::new();
613        assert!(!gc.should_promote(0));
614        assert!(!gc.should_promote(1));
615        assert!(gc.should_promote(2));
616        assert!(gc.should_promote(3));
617    }
618
619    // ── Young-gen collection tests ──────────────────────────────────
620
621    /// Helper: allocate an object in a region, returning (obj_ptr, region).
622    fn alloc_in_region(region: &mut Region, value: u64) -> *mut u8 {
623        let layout = Layout::new::<u64>();
624        let ptr = region.try_alloc(layout).expect("alloc failed");
625        unsafe {
626            (ptr as *mut u64).write(value);
627        }
628        ptr
629    }
630
631    #[test]
632    fn test_young_gc_preserves_live_objects() {
633        let mut gc = GenerationalCollector::new();
634        let mut marker = Marker::new();
635
636        // Create a young-gen region and allocate objects
637        let mut region = Region::new();
638        let live_ptr = alloc_in_region(&mut region, 42);
639        let _dead_ptr = alloc_in_region(&mut region, 99);
640        gc.add_young_region(region);
641
642        // Collect with only live_ptr as root
643        let stats = gc.collect_young(&mut marker, &[live_ptr]);
644
645        // Live object survives
646        assert_eq!(unsafe { *(live_ptr as *const u64) }, 42);
647        // One dead object collected
648        assert_eq!(stats.objects_collected, 1);
649        assert!(stats.bytes_collected > 0);
650        assert!(stats.bytes_retained > 0);
651    }
652
653    #[test]
654    fn test_young_gc_collects_dead_objects() {
655        let mut gc = GenerationalCollector::new();
656        let mut marker = Marker::new();
657
658        let mut region = Region::new();
659        let _dead1 = alloc_in_region(&mut region, 1);
660        let _dead2 = alloc_in_region(&mut region, 2);
661        let _dead3 = alloc_in_region(&mut region, 3);
662        gc.add_young_region(region);
663
664        // No roots — all objects should be collected
665        let stats = gc.collect_young(&mut marker, &[]);
666
667        assert_eq!(stats.objects_collected, 3);
668        assert_eq!(stats.bytes_retained, 0);
669    }
670
671    #[test]
672    fn test_promotion_after_n_survivals() {
673        let mut gc = GenerationalCollector::with_promotion_threshold(2);
674        let mut marker = Marker::new();
675
676        let mut region = Region::new();
677        let ptr = alloc_in_region(&mut region, 100);
678        gc.add_young_region(region);
679
680        // First young GC — object survives (survival_count = 1)
681        let stats1 = gc.collect_young(&mut marker, &[ptr]);
682        assert_eq!(stats1.bytes_retained > 0, true);
683        assert_eq!(gc.old_regions().len(), 0); // Not yet promoted
684        assert_eq!(gc.survival_count(ptr), 1);
685
686        // Second young GC — object survives again (survival_count = 2 >= threshold)
687        // The object should be promoted to old gen
688        let _stats2 = gc.collect_young(&mut marker, &[ptr]);
689        // After promotion, the object was copied to old gen and the young copy
690        // was marked as "collected" (forwarded). The stats reflect the young sweep.
691        assert_eq!(gc.old_regions().len(), 1); // Promoted
692        assert_eq!(gc.total_promoted(), 1);
693    }
694
695    #[test]
696    fn test_old_gen_full_collection() {
697        let mut gc = GenerationalCollector::new();
698        let mut marker = Marker::new();
699
700        // Directly populate old gen
701        let mut old_region = Region::new();
702        let live_ptr = alloc_in_region(&mut old_region, 200);
703        let _dead_ptr = alloc_in_region(&mut old_region, 300);
704        gc.add_old_region(old_region);
705
706        // Full collection of old gen
707        let stats = gc.collect_old(&mut marker, &[live_ptr]);
708
709        assert_eq!(stats.objects_collected, 1); // One dead
710        assert!(stats.bytes_retained > 0); // One live
711        assert_eq!(unsafe { *(live_ptr as *const u64) }, 200);
712    }
713
714    #[test]
715    fn test_dirty_card_scanning_finds_old_to_young_refs() {
716        let mut gc = GenerationalCollector::new();
717
718        // Set up an old-gen region
719        let mut old_region = Region::new();
720        let old_ptr = alloc_in_region(&mut old_region, 500);
721        let old_base = old_region.base() as usize;
722        gc.add_old_region(old_region);
723
724        // Set up a young-gen region
725        let mut young_region = Region::new();
726        let young_ptr = alloc_in_region(&mut young_region, 600);
727        gc.add_young_region(young_region);
728
729        // Initialize card table covering the old-gen region
730        gc.init_card_table(old_base, crate::region::REGION_SIZE);
731
732        // Simulate a write barrier: old object stores a reference to young object
733        gc.card_table_mut().unwrap().mark_dirty(old_ptr as usize);
734
735        // Now collect young gen — dirty card should cause old_ptr to be scanned
736        let mut marker = Marker::new();
737        let stats = gc.collect_young(&mut marker, &[young_ptr]);
738
739        // Young object should survive (it was a root)
740        assert!(stats.bytes_retained > 0);
741        // Dirty cards should be cleared after collection
742        assert_eq!(gc.card_table().unwrap().dirty_count(), 0);
743    }
744
745    #[test]
746    fn test_young_gc_increments_count() {
747        let mut gc = GenerationalCollector::new();
748        let mut marker = Marker::new();
749
750        // Add an empty young region so collect_young has something to scan
751        gc.add_young_region(Region::new());
752
753        assert_eq!(gc.young_gc_count(), 0);
754        gc.collect_young(&mut marker, &[]);
755        assert_eq!(gc.young_gc_count(), 1);
756        gc.collect_young(&mut marker, &[]);
757        assert_eq!(gc.young_gc_count(), 2);
758    }
759
760    #[test]
761    fn test_old_gc_increments_count() {
762        let mut gc = GenerationalCollector::new();
763        let mut marker = Marker::new();
764
765        gc.add_old_region(Region::new());
766
767        assert_eq!(gc.old_gc_count(), 0);
768        gc.collect_old(&mut marker, &[]);
769        assert_eq!(gc.old_gc_count(), 1);
770    }
771
772    #[test]
773    fn test_young_utilization() {
774        let gc = GenerationalCollector::new();
775        // No regions → 0.0
776        assert_eq!(gc.young_utilization(), 0.0);
777    }
778
779    #[test]
780    fn test_custom_promotion_threshold() {
781        let gc = GenerationalCollector::with_promotion_threshold(5);
782        assert_eq!(gc.promotion_threshold(), 5);
783        assert!(!gc.should_promote(4));
784        assert!(gc.should_promote(5));
785    }
786
787    #[test]
788    fn test_is_young_old_ptr() {
789        let mut gc = GenerationalCollector::new();
790
791        let mut young_region = Region::new();
792        let young_ptr = alloc_in_region(&mut young_region, 1);
793        gc.add_young_region(young_region);
794
795        let mut old_region = Region::new();
796        let old_ptr = alloc_in_region(&mut old_region, 2);
797        gc.add_old_region(old_region);
798
799        assert!(gc.is_young_ptr(young_ptr));
800        assert!(!gc.is_old_ptr(young_ptr));
801        assert!(gc.is_old_ptr(old_ptr));
802        assert!(!gc.is_young_ptr(old_ptr));
803    }
804
805    #[test]
806    fn test_old_gen_free_bytes() {
807        let mut gc = GenerationalCollector::new();
808        assert_eq!(gc.old_free_bytes(), 0);
809
810        let region = Region::new();
811        gc.add_old_region(region);
812        // Full region capacity minus used (0 used for new region)
813        assert_eq!(gc.old_free_bytes(), crate::region::REGION_SIZE);
814    }
815
816    #[test]
817    fn test_multiple_young_gcs_before_promotion() {
818        // Verify that objects are NOT promoted before reaching the threshold
819        let mut gc = GenerationalCollector::with_promotion_threshold(3);
820        let mut marker = Marker::new();
821
822        let mut region = Region::new();
823        let ptr = alloc_in_region(&mut region, 77);
824        gc.add_young_region(region);
825
826        // GC #1 — survival_count becomes 1
827        gc.collect_young(&mut marker, &[ptr]);
828        assert_eq!(gc.old_regions().len(), 0);
829        assert_eq!(gc.survival_count(ptr), 1);
830
831        // GC #2 — survival_count becomes 2
832        gc.collect_young(&mut marker, &[ptr]);
833        assert_eq!(gc.old_regions().len(), 0);
834        assert_eq!(gc.survival_count(ptr), 2);
835
836        // GC #3 — survival_count becomes 3 >= threshold, promoted
837        gc.collect_young(&mut marker, &[ptr]);
838        assert_eq!(gc.old_regions().len(), 1);
839        assert_eq!(gc.total_promoted(), 1);
840    }
841}