1use crate::SweepStats;
9use crate::header::{GcColor, GcHeader, Generation};
10use crate::marker::Marker;
11use crate::region::Region;
12use std::collections::HashMap;
13
14const CARD_SIZE: usize = 512;
16
17const PROMOTION_THRESHOLD: u8 = 2;
19
20pub struct CardTable {
26 cards: Vec<u8>,
28 base: usize,
30 size: usize,
32}
33
34impl CardTable {
35 pub fn new(base: usize, size: usize) -> Self {
37 let num_cards = (size + CARD_SIZE - 1) / CARD_SIZE;
38 Self {
39 cards: vec![0; num_cards],
40 base,
41 size,
42 }
43 }
44
45 #[inline(always)]
47 pub fn mark_dirty(&mut self, addr: usize) {
48 if addr >= self.base && addr < self.base + self.size {
49 let index = (addr - self.base) / CARD_SIZE;
50 if index < self.cards.len() {
51 self.cards[index] = 1;
52 }
53 }
54 }
55
56 #[inline(always)]
58 pub fn is_dirty(&self, addr: usize) -> bool {
59 if addr >= self.base && addr < self.base + self.size {
60 let index = (addr - self.base) / CARD_SIZE;
61 index < self.cards.len() && self.cards[index] != 0
62 } else {
63 false
64 }
65 }
66
67 pub fn clear(&mut self) {
69 self.cards.fill(0);
70 }
71
72 pub fn for_each_dirty(&self, mut f: impl FnMut(usize, usize)) {
74 for (i, &card) in self.cards.iter().enumerate() {
75 if card != 0 {
76 let start = self.base + i * CARD_SIZE;
77 let end = start + CARD_SIZE;
78 f(start, end);
79 }
80 }
81 }
82
83 pub fn dirty_count(&self) -> usize {
85 self.cards.iter().filter(|&&c| c != 0).count()
86 }
87
88 pub fn base(&self) -> usize {
90 self.base
91 }
92
93 pub fn covered_size(&self) -> usize {
95 self.size
96 }
97}
98
99pub struct GenerationalCollector {
105 young_gc_count: u64,
107 old_gc_count: u64,
109 promotion_threshold: u8,
111 card_table: Option<CardTable>,
113
114 young_regions: Vec<Region>,
117 old_regions: Vec<Region>,
119
120 survival_counts: HashMap<usize, u8>,
124
125 total_promoted: u64,
128}
129
130impl GenerationalCollector {
131 pub fn new() -> Self {
132 Self {
133 young_gc_count: 0,
134 old_gc_count: 0,
135 promotion_threshold: PROMOTION_THRESHOLD,
136 card_table: None,
137 young_regions: Vec::new(),
138 old_regions: Vec::new(),
139 survival_counts: HashMap::new(),
140 total_promoted: 0,
141 }
142 }
143
144 pub fn with_promotion_threshold(threshold: u8) -> Self {
146 Self {
147 promotion_threshold: threshold,
148 ..Self::new()
149 }
150 }
151
152 pub fn add_young_region(&mut self, region: Region) {
156 self.young_regions.push(region);
157 }
158
159 pub fn add_old_region(&mut self, region: Region) {
161 self.old_regions.push(region);
162 }
163
164 pub fn young_regions(&self) -> &[Region] {
166 &self.young_regions
167 }
168
169 pub fn young_regions_mut(&mut self) -> &mut Vec<Region> {
171 &mut self.young_regions
172 }
173
174 pub fn old_regions(&self) -> &[Region] {
176 &self.old_regions
177 }
178
179 pub fn old_regions_mut(&mut self) -> &mut Vec<Region> {
181 &mut self.old_regions
182 }
183
184 pub fn young_used_bytes(&self) -> usize {
186 self.young_regions.iter().map(|r| r.used_bytes()).sum()
187 }
188
189 pub fn young_capacity_bytes(&self) -> usize {
191 self.young_regions.len() * crate::region::REGION_SIZE
192 }
193
194 pub fn old_used_bytes(&self) -> usize {
196 self.old_regions.iter().map(|r| r.used_bytes()).sum()
197 }
198
199 pub fn old_capacity_bytes(&self) -> usize {
201 self.old_regions.len() * crate::region::REGION_SIZE
202 }
203
204 pub fn young_utilization(&self) -> f64 {
206 let cap = self.young_capacity_bytes();
207 if cap == 0 {
208 return 0.0;
209 }
210 self.young_used_bytes() as f64 / cap as f64
211 }
212
213 pub fn old_free_bytes(&self) -> usize {
215 let cap = self.old_capacity_bytes();
216 let used = self.old_used_bytes();
217 cap.saturating_sub(used)
218 }
219
220 pub fn is_young_ptr(&self, ptr: *const u8) -> bool {
222 self.young_regions.iter().any(|r| r.contains(ptr))
223 }
224
225 pub fn is_old_ptr(&self, ptr: *const u8) -> bool {
227 self.old_regions.iter().any(|r| r.contains(ptr))
228 }
229
230 pub fn collect_young(&mut self, marker: &mut Marker, roots: &[*mut u8]) -> SweepStats {
241 marker.reset();
243 marker.start_marking();
244 for &root in roots {
245 if !root.is_null() && self.is_young_ptr(root) {
246 marker.mark_root(root);
247 }
248 }
249
250 if let Some(ref card_table) = self.card_table {
253 let mut old_to_young_refs: Vec<*mut u8> = Vec::new();
254
255 card_table.for_each_dirty(|card_start, card_end| {
256 for region in &self.old_regions {
258 let region_base = region.base() as usize;
259 let region_end = region_base + crate::region::REGION_SIZE;
260
261 if region_end <= card_start || region_base >= card_end {
263 continue;
264 }
265
266 region.for_each_object(|_header, obj_ptr| {
268 let obj_addr = obj_ptr as usize;
269 if obj_addr >= card_start && obj_addr < card_end {
270 old_to_young_refs.push(obj_ptr);
275 }
276 });
277 }
278 });
279
280 for ptr in old_to_young_refs {
282 marker.mark_gray(ptr);
288 }
289 }
290
291 marker.mark_all();
293
294 self.update_survival_counts(marker);
296
297 let promoted = self.promote_survivors(marker);
299 self.total_promoted += promoted as u64;
300
301 let stats = self.sweep_young(marker);
303
304 if let Some(ref mut card_table) = self.card_table {
306 card_table.clear();
307 }
308
309 marker.finish_marking();
311 self.young_gc_count += 1;
312
313 stats
314 }
315
316 fn promote_survivors(&mut self, marker: &Marker) -> usize {
321 let header_size = std::mem::size_of::<GcHeader>();
322 let mut promoted_count = 0;
323 let mut objects_to_promote: Vec<(usize, u32)> = Vec::new(); for region in &self.young_regions {
327 region.for_each_object(|header, obj_ptr| {
328 if marker.is_marked(obj_ptr) {
329 let obj_addr = obj_ptr as usize;
330 let survival_count = self.survival_counts.get(&obj_addr).copied().unwrap_or(0);
331 if survival_count >= self.promotion_threshold {
332 objects_to_promote.push((obj_addr, header.size));
333 }
334 }
335 });
336 }
337
338 for (obj_addr, obj_size) in objects_to_promote {
340 let total = (header_size + obj_size as usize + 7) & !7;
341
342 let header_ptr = unsafe { (obj_addr as *mut u8).sub(header_size) };
344
345 let dest = self.alloc_in_old_gen(total);
347 if let Some(dest_ptr) = dest {
348 unsafe {
350 std::ptr::copy_nonoverlapping(header_ptr, dest_ptr, total);
351 }
352
353 let dest_header = unsafe { &mut *(dest_ptr as *mut GcHeader) };
355 dest_header.set_generation(Generation::Old);
356 dest_header.set_color(GcColor::White); self.survival_counts.remove(&obj_addr);
360
361 let orig_header = unsafe { &mut *(header_ptr as *mut GcHeader) };
363 orig_header.set_forwarded(true);
364
365 promoted_count += 1;
366 }
367 }
368
369 promoted_count
370 }
371
372 fn alloc_in_old_gen(&mut self, total_bytes: usize) -> Option<*mut u8> {
374 for region in &mut self.old_regions {
376 if region.remaining() >= total_bytes {
377 let base = region.base();
378 let cursor = region.used_bytes();
379 let ptr = unsafe { base.add(cursor) };
380 region.set_cursor(cursor + total_bytes);
381 return Some(ptr);
382 }
383 }
384
385 let mut new_region = Region::new();
387 let ptr = new_region.base();
388 new_region.set_cursor(total_bytes);
389
390 let base_addr = ptr as usize;
392 if self.card_table.is_none() {
393 self.card_table = Some(CardTable::new(base_addr, crate::region::REGION_SIZE));
394 }
395
396 self.old_regions.push(new_region);
397 Some(ptr)
398 }
399
400 fn sweep_young(&mut self, marker: &Marker) -> SweepStats {
402 let mut stats = SweepStats::default();
403
404 for region in &mut self.young_regions {
405 let mut live_bytes = 0;
406 region.for_each_object_mut(|header, obj_ptr| {
407 if header.is_forwarded() {
408 stats.bytes_collected += header.size as usize;
410 stats.objects_collected += 1;
411 header.set_forwarded(false); } else if marker.is_marked(obj_ptr) {
413 let size = header.size as usize;
415 live_bytes += size;
416 stats.bytes_retained += size;
417 header.set_color(GcColor::White);
418 } else {
419 stats.bytes_collected += header.size as usize;
421 stats.objects_collected += 1;
422 }
423 });
424 region.set_live_bytes(live_bytes);
425 }
426
427 stats
428 }
429
430 fn update_survival_counts(&mut self, marker: &Marker) {
433 let mut live_addrs: Vec<usize> = Vec::new();
434
435 for region in &self.young_regions {
436 region.for_each_object(|_header, obj_ptr| {
437 if marker.is_marked(obj_ptr) {
438 live_addrs.push(obj_ptr as usize);
439 }
440 });
441 }
442
443 self.survival_counts
445 .retain(|addr, _| live_addrs.contains(addr));
446
447 for addr in live_addrs {
449 let count = self.survival_counts.entry(addr).or_insert(0);
450 *count = count.saturating_add(1);
451 }
452 }
453
454 pub fn collect_old(&mut self, marker: &mut Marker, roots: &[*mut u8]) -> SweepStats {
461 marker.reset();
463 marker.start_marking();
464 for &root in roots {
465 if !root.is_null() {
466 marker.mark_root(root);
467 }
468 }
469 marker.mark_all();
470
471 let mut stats = SweepStats::default();
473 for region in &mut self.old_regions {
474 let mut live_bytes = 0;
475 region.for_each_object_mut(|header, obj_ptr| {
476 if marker.is_marked(obj_ptr) {
477 let size = header.size as usize;
478 live_bytes += size;
479 stats.bytes_retained += size;
480 header.set_color(GcColor::White);
481 } else {
482 stats.bytes_collected += header.size as usize;
483 stats.objects_collected += 1;
484 }
485 });
486 region.set_live_bytes(live_bytes);
487 }
488
489 for region in &mut self.young_regions {
491 let mut live_bytes = 0;
492 region.for_each_object_mut(|header, obj_ptr| {
493 if marker.is_marked(obj_ptr) {
494 let size = header.size as usize;
495 live_bytes += size;
496 stats.bytes_retained += size;
497 header.set_color(GcColor::White);
498 } else {
499 stats.bytes_collected += header.size as usize;
500 stats.objects_collected += 1;
501 }
502 });
503 region.set_live_bytes(live_bytes);
504 }
505
506 marker.finish_marking();
507 self.old_gc_count += 1;
508
509 stats
510 }
511
512 pub fn survival_count(&self, obj_ptr: *const u8) -> u8 {
516 self.survival_counts
517 .get(&(obj_ptr as usize))
518 .copied()
519 .unwrap_or(0)
520 }
521
522 pub fn total_promoted(&self) -> u64 {
524 self.total_promoted
525 }
526
527 pub fn record_young_gc(&mut self) {
531 self.young_gc_count += 1;
532 }
533
534 pub fn record_old_gc(&mut self) {
536 self.old_gc_count += 1;
537 }
538
539 pub fn should_promote(&self, survival_count: u8) -> bool {
541 survival_count >= self.promotion_threshold
542 }
543
544 pub fn card_table(&self) -> Option<&CardTable> {
546 self.card_table.as_ref()
547 }
548
549 pub fn card_table_mut(&mut self) -> Option<&mut CardTable> {
551 self.card_table.as_mut()
552 }
553
554 pub fn init_card_table(&mut self, base: usize, size: usize) {
556 self.card_table = Some(CardTable::new(base, size));
557 }
558
559 pub fn young_gc_count(&self) -> u64 {
561 self.young_gc_count
562 }
563
564 pub fn old_gc_count(&self) -> u64 {
565 self.old_gc_count
566 }
567
568 pub fn promotion_threshold(&self) -> u8 {
570 self.promotion_threshold
571 }
572}
573
574impl Default for GenerationalCollector {
575 fn default() -> Self {
576 Self::new()
577 }
578}
579
580#[cfg(test)]
581mod tests {
582 use super::*;
583 use crate::marker::Marker;
584 use std::alloc::Layout;
585
586 #[test]
589 fn test_card_table_mark_and_check() {
590 let mut ct = CardTable::new(0x1000, 4096);
591
592 assert!(!ct.is_dirty(0x1000));
593 ct.mark_dirty(0x1000);
594 assert!(ct.is_dirty(0x1000));
595 assert!(ct.is_dirty(0x1100)); assert!(!ct.is_dirty(0x1200)); }
598
599 #[test]
600 fn test_card_table_clear() {
601 let mut ct = CardTable::new(0x1000, 4096);
602 ct.mark_dirty(0x1000);
603 ct.mark_dirty(0x1800);
604 assert_eq!(ct.dirty_count(), 2);
605
606 ct.clear();
607 assert_eq!(ct.dirty_count(), 0);
608 }
609
610 #[test]
611 fn test_promotion_threshold() {
612 let gc = GenerationalCollector::new();
613 assert!(!gc.should_promote(0));
614 assert!(!gc.should_promote(1));
615 assert!(gc.should_promote(2));
616 assert!(gc.should_promote(3));
617 }
618
619 fn alloc_in_region(region: &mut Region, value: u64) -> *mut u8 {
623 let layout = Layout::new::<u64>();
624 let ptr = region.try_alloc(layout).expect("alloc failed");
625 unsafe {
626 (ptr as *mut u64).write(value);
627 }
628 ptr
629 }
630
631 #[test]
632 fn test_young_gc_preserves_live_objects() {
633 let mut gc = GenerationalCollector::new();
634 let mut marker = Marker::new();
635
636 let mut region = Region::new();
638 let live_ptr = alloc_in_region(&mut region, 42);
639 let _dead_ptr = alloc_in_region(&mut region, 99);
640 gc.add_young_region(region);
641
642 let stats = gc.collect_young(&mut marker, &[live_ptr]);
644
645 assert_eq!(unsafe { *(live_ptr as *const u64) }, 42);
647 assert_eq!(stats.objects_collected, 1);
649 assert!(stats.bytes_collected > 0);
650 assert!(stats.bytes_retained > 0);
651 }
652
653 #[test]
654 fn test_young_gc_collects_dead_objects() {
655 let mut gc = GenerationalCollector::new();
656 let mut marker = Marker::new();
657
658 let mut region = Region::new();
659 let _dead1 = alloc_in_region(&mut region, 1);
660 let _dead2 = alloc_in_region(&mut region, 2);
661 let _dead3 = alloc_in_region(&mut region, 3);
662 gc.add_young_region(region);
663
664 let stats = gc.collect_young(&mut marker, &[]);
666
667 assert_eq!(stats.objects_collected, 3);
668 assert_eq!(stats.bytes_retained, 0);
669 }
670
671 #[test]
672 fn test_promotion_after_n_survivals() {
673 let mut gc = GenerationalCollector::with_promotion_threshold(2);
674 let mut marker = Marker::new();
675
676 let mut region = Region::new();
677 let ptr = alloc_in_region(&mut region, 100);
678 gc.add_young_region(region);
679
680 let stats1 = gc.collect_young(&mut marker, &[ptr]);
682 assert_eq!(stats1.bytes_retained > 0, true);
683 assert_eq!(gc.old_regions().len(), 0); assert_eq!(gc.survival_count(ptr), 1);
685
686 let _stats2 = gc.collect_young(&mut marker, &[ptr]);
689 assert_eq!(gc.old_regions().len(), 1); assert_eq!(gc.total_promoted(), 1);
693 }
694
695 #[test]
696 fn test_old_gen_full_collection() {
697 let mut gc = GenerationalCollector::new();
698 let mut marker = Marker::new();
699
700 let mut old_region = Region::new();
702 let live_ptr = alloc_in_region(&mut old_region, 200);
703 let _dead_ptr = alloc_in_region(&mut old_region, 300);
704 gc.add_old_region(old_region);
705
706 let stats = gc.collect_old(&mut marker, &[live_ptr]);
708
709 assert_eq!(stats.objects_collected, 1); assert!(stats.bytes_retained > 0); assert_eq!(unsafe { *(live_ptr as *const u64) }, 200);
712 }
713
714 #[test]
715 fn test_dirty_card_scanning_finds_old_to_young_refs() {
716 let mut gc = GenerationalCollector::new();
717
718 let mut old_region = Region::new();
720 let old_ptr = alloc_in_region(&mut old_region, 500);
721 let old_base = old_region.base() as usize;
722 gc.add_old_region(old_region);
723
724 let mut young_region = Region::new();
726 let young_ptr = alloc_in_region(&mut young_region, 600);
727 gc.add_young_region(young_region);
728
729 gc.init_card_table(old_base, crate::region::REGION_SIZE);
731
732 gc.card_table_mut().unwrap().mark_dirty(old_ptr as usize);
734
735 let mut marker = Marker::new();
737 let stats = gc.collect_young(&mut marker, &[young_ptr]);
738
739 assert!(stats.bytes_retained > 0);
741 assert_eq!(gc.card_table().unwrap().dirty_count(), 0);
743 }
744
745 #[test]
746 fn test_young_gc_increments_count() {
747 let mut gc = GenerationalCollector::new();
748 let mut marker = Marker::new();
749
750 gc.add_young_region(Region::new());
752
753 assert_eq!(gc.young_gc_count(), 0);
754 gc.collect_young(&mut marker, &[]);
755 assert_eq!(gc.young_gc_count(), 1);
756 gc.collect_young(&mut marker, &[]);
757 assert_eq!(gc.young_gc_count(), 2);
758 }
759
760 #[test]
761 fn test_old_gc_increments_count() {
762 let mut gc = GenerationalCollector::new();
763 let mut marker = Marker::new();
764
765 gc.add_old_region(Region::new());
766
767 assert_eq!(gc.old_gc_count(), 0);
768 gc.collect_old(&mut marker, &[]);
769 assert_eq!(gc.old_gc_count(), 1);
770 }
771
772 #[test]
773 fn test_young_utilization() {
774 let gc = GenerationalCollector::new();
775 assert_eq!(gc.young_utilization(), 0.0);
777 }
778
779 #[test]
780 fn test_custom_promotion_threshold() {
781 let gc = GenerationalCollector::with_promotion_threshold(5);
782 assert_eq!(gc.promotion_threshold(), 5);
783 assert!(!gc.should_promote(4));
784 assert!(gc.should_promote(5));
785 }
786
787 #[test]
788 fn test_is_young_old_ptr() {
789 let mut gc = GenerationalCollector::new();
790
791 let mut young_region = Region::new();
792 let young_ptr = alloc_in_region(&mut young_region, 1);
793 gc.add_young_region(young_region);
794
795 let mut old_region = Region::new();
796 let old_ptr = alloc_in_region(&mut old_region, 2);
797 gc.add_old_region(old_region);
798
799 assert!(gc.is_young_ptr(young_ptr));
800 assert!(!gc.is_old_ptr(young_ptr));
801 assert!(gc.is_old_ptr(old_ptr));
802 assert!(!gc.is_young_ptr(old_ptr));
803 }
804
805 #[test]
806 fn test_old_gen_free_bytes() {
807 let mut gc = GenerationalCollector::new();
808 assert_eq!(gc.old_free_bytes(), 0);
809
810 let region = Region::new();
811 gc.add_old_region(region);
812 assert_eq!(gc.old_free_bytes(), crate::region::REGION_SIZE);
814 }
815
816 #[test]
817 fn test_multiple_young_gcs_before_promotion() {
818 let mut gc = GenerationalCollector::with_promotion_threshold(3);
820 let mut marker = Marker::new();
821
822 let mut region = Region::new();
823 let ptr = alloc_in_region(&mut region, 77);
824 gc.add_young_region(region);
825
826 gc.collect_young(&mut marker, &[ptr]);
828 assert_eq!(gc.old_regions().len(), 0);
829 assert_eq!(gc.survival_count(ptr), 1);
830
831 gc.collect_young(&mut marker, &[ptr]);
833 assert_eq!(gc.old_regions().len(), 0);
834 assert_eq!(gc.survival_count(ptr), 2);
835
836 gc.collect_young(&mut marker, &[ptr]);
838 assert_eq!(gc.old_regions().len(), 1);
839 assert_eq!(gc.total_promoted(), 1);
840 }
841}