1use crate::Value;
8use std::collections::HashSet;
9use std::sync::atomic::{AtomicUsize, Ordering};
10
11pub struct WriteBarrier {
13 remembered_set: parking_lot::RwLock<HashSet<*const u8>>,
15
16 barrier_hits: AtomicUsize,
18 remembered_set_size: AtomicUsize,
19
20 enable_barriers: bool,
22}
23
24impl WriteBarrier {
25 pub fn new(enable_barriers: bool) -> Self {
26 Self {
27 remembered_set: parking_lot::RwLock::new(HashSet::new()),
28 barrier_hits: AtomicUsize::new(0),
29 remembered_set_size: AtomicUsize::new(0),
30 enable_barriers,
31 }
32 }
33
34 pub fn record_reference(&self, old_object: *const u8, _young_object: *const u8) {
39 if !self.enable_barriers {
40 return;
41 }
42
43 self.barrier_hits.fetch_add(1, Ordering::Relaxed);
44
45 let mut remembered_set = self.remembered_set.write();
46 if remembered_set.insert(old_object) {
47 self.remembered_set_size.fetch_add(1, Ordering::Relaxed);
48 }
49 }
50
51 pub fn remove_object(&self, object: *const u8) {
56 if !self.enable_barriers {
57 return;
58 }
59
60 let mut remembered_set = self.remembered_set.write();
61 if remembered_set.remove(&object) {
62 self.remembered_set_size.fetch_sub(1, Ordering::Relaxed);
63 }
64 }
65
66 pub fn get_remembered_roots(&self) -> Vec<*const u8> {
68 let remembered_set = self.remembered_set.read();
69 remembered_set.iter().copied().collect()
70 }
71
72 pub fn clear_remembered_set(&self) {
74 let mut remembered_set = self.remembered_set.write();
75 remembered_set.clear();
76 self.remembered_set_size.store(0, Ordering::Relaxed);
77 }
78
79 pub fn stats(&self) -> WriteBarrierStats {
81 WriteBarrierStats {
82 barrier_hits: self.barrier_hits.load(Ordering::Relaxed),
83 remembered_set_size: self.remembered_set_size.load(Ordering::Relaxed),
84 enabled: self.enable_barriers,
85 }
86 }
87
88 pub fn reset_stats(&self) {
90 self.barrier_hits.store(0, Ordering::Relaxed);
91 }
93}
94
95#[derive(Debug, Clone)]
97pub struct WriteBarrierStats {
98 pub barrier_hits: usize,
99 pub remembered_set_size: usize,
100 pub enabled: bool,
101}
102
103#[macro_export]
107macro_rules! write_barrier {
108 ($old_ptr:expr, $young_ptr:expr) => {
109 #[cfg(feature = "debug-gc")]
112 {
113 log::trace!("Write barrier: {:p} -> {:p}", $old_ptr, $young_ptr);
114 }
115 };
116}
117
118pub trait WriteBarrierAware {
120 fn pre_write_barrier(&self) {}
122
123 fn post_write_barrier(&self) {}
125
126 fn has_young_references(&self) -> bool {
128 false
129 }
130}
131
132impl WriteBarrierAware for Value {
134 fn has_young_references(&self) -> bool {
135 match self {
136 Value::Cell(cells) => {
137 !cells.data.is_empty()
140 }
141 _ => false,
142 }
143 }
144}
145
146pub struct CardTable {
151 card_size: usize,
153
154 cards: parking_lot::RwLock<Vec<bool>>,
156
157 heap_start: *const u8,
159
160 heap_size: usize,
162}
163
164impl CardTable {
165 pub fn new(heap_start: *const u8, heap_size: usize, card_size: usize) -> Self {
166 let num_cards = heap_size.div_ceil(card_size);
167
168 Self {
169 card_size,
170 cards: parking_lot::RwLock::new(vec![false; num_cards]),
171 heap_start,
172 heap_size,
173 }
174 }
175
176 pub fn mark_card_dirty(&self, address: *const u8) {
178 if let Some(card_index) = self.address_to_card(address) {
179 let mut cards = self.cards.write();
180 if card_index < cards.len() {
181 cards[card_index] = true;
182 }
183 }
184 }
185
186 pub fn get_dirty_cards(&self) -> Vec<usize> {
188 let cards = self.cards.read();
189 cards
190 .iter()
191 .enumerate()
192 .filter(|(_, &dirty)| dirty)
193 .map(|(index, _)| index)
194 .collect()
195 }
196
197 pub fn clear_dirty_cards(&self) {
199 let mut cards = self.cards.write();
200 cards.fill(false);
201 }
202
203 fn address_to_card(&self, address: *const u8) -> Option<usize> {
205 let heap_start = self.heap_start as usize;
206 let addr = address as usize;
207
208 if addr >= heap_start && addr < heap_start + self.heap_size {
209 Some((addr - heap_start) / self.card_size)
210 } else {
211 None
212 }
213 }
214
215 pub fn card_to_address_range(&self, card_index: usize) -> Option<(*const u8, *const u8)> {
217 if card_index >= self.heap_size.div_ceil(self.card_size) {
218 return None;
219 }
220
221 let start = self.heap_start as usize + card_index * self.card_size;
222 let end = (start + self.card_size).min(self.heap_start as usize + self.heap_size);
223
224 Some((start as *const u8, end as *const u8))
225 }
226
227 pub fn stats(&self) -> CardTableStats {
229 let cards = self.cards.read();
230 let dirty_count = cards.iter().filter(|&&dirty| dirty).count();
231
232 CardTableStats {
233 total_cards: cards.len(),
234 dirty_cards: dirty_count,
235 card_size: self.card_size,
236 heap_size: self.heap_size,
237 }
238 }
239}
240
241#[derive(Debug, Clone)]
243pub struct CardTableStats {
244 pub total_cards: usize,
245 pub dirty_cards: usize,
246 pub card_size: usize,
247 pub heap_size: usize,
248}
249
250pub struct WriteBarrierManager {
252 remembered_set_barrier: WriteBarrier,
254
255 card_table: Option<CardTable>,
257
258 use_card_table: bool,
260}
261
262impl WriteBarrierManager {
263 pub fn new(enable_barriers: bool, use_card_table: bool) -> Self {
264 Self {
265 remembered_set_barrier: WriteBarrier::new(enable_barriers),
266 card_table: None,
267 use_card_table,
268 }
269 }
270
271 pub fn initialize_card_table(&mut self, heap_start: *const u8, heap_size: usize) {
273 if self.use_card_table {
274 self.card_table = Some(CardTable::new(heap_start, heap_size, 512));
275 }
276 }
277
278 pub fn record_reference(&self, old_object: *const u8, young_object: *const u8) {
280 self.remembered_set_barrier
282 .record_reference(old_object, young_object);
283
284 if let Some(ref card_table) = self.card_table {
286 card_table.mark_card_dirty(old_object);
287 }
288 }
289
290 pub fn get_minor_gc_roots(&self) -> Vec<*const u8> {
292 self.remembered_set_barrier.get_remembered_roots()
293 }
294
295 pub fn get_dirty_card_ranges(&self) -> Vec<(*const u8, *const u8)> {
297 if let Some(ref card_table) = self.card_table {
298 card_table
299 .get_dirty_cards()
300 .into_iter()
301 .filter_map(|card| card_table.card_to_address_range(card))
302 .collect()
303 } else {
304 Vec::new()
305 }
306 }
307
308 pub fn clear_after_minor_gc(&self) {
310 if let Some(ref card_table) = self.card_table {
312 card_table.clear_dirty_cards();
313 }
314 }
315
316 pub fn clear_after_major_gc(&self) {
318 self.remembered_set_barrier.clear_remembered_set();
319 if let Some(ref card_table) = self.card_table {
320 card_table.clear_dirty_cards();
321 }
322 }
323
324 pub fn stats(&self) -> WriteBarrierManagerStats {
326 WriteBarrierManagerStats {
327 remembered_set: self.remembered_set_barrier.stats(),
328 card_table: self.card_table.as_ref().map(|ct| ct.stats()),
329 }
330 }
331}
332
333unsafe impl Send for WriteBarrierManager {}
335unsafe impl Sync for WriteBarrierManager {}
336
337#[derive(Debug, Clone)]
339pub struct WriteBarrierManagerStats {
340 pub remembered_set: WriteBarrierStats,
341 pub card_table: Option<CardTableStats>,
342}
343
344#[cfg(test)]
345mod tests {
346 use super::*;
347
348 #[test]
349 fn test_write_barrier() {
350 let barrier = WriteBarrier::new(true);
351
352 let old_ptr = 0x1000 as *const u8;
353 let young_ptr = 0x2000 as *const u8;
354
355 barrier.record_reference(old_ptr, young_ptr);
357
358 let stats = barrier.stats();
359 assert_eq!(stats.barrier_hits, 1);
360 assert_eq!(stats.remembered_set_size, 1);
361
362 let roots = barrier.get_remembered_roots();
364 assert_eq!(roots.len(), 1);
365 assert_eq!(roots[0], old_ptr);
366
367 barrier.remove_object(old_ptr);
369 let stats = barrier.stats();
370 assert_eq!(stats.remembered_set_size, 0);
371 }
372
373 #[test]
374 fn test_write_barrier_disabled() {
375 let barrier = WriteBarrier::new(false);
376
377 let old_ptr = 0x1000 as *const u8;
378 let young_ptr = 0x2000 as *const u8;
379
380 barrier.record_reference(old_ptr, young_ptr);
381
382 let stats = barrier.stats();
383 assert!(!stats.enabled);
384 assert_eq!(stats.barrier_hits, 0);
385 assert_eq!(stats.remembered_set_size, 0);
386 }
387
388 #[test]
389 fn test_card_table() {
390 let heap_start = 0x10000 as *const u8;
391 let heap_size = 4096;
392 let card_size = 512;
393
394 let card_table = CardTable::new(heap_start, heap_size, card_size);
395
396 let addr1 = (heap_start as usize + 100) as *const u8;
398 let addr2 = (heap_start as usize + 600) as *const u8;
399
400 card_table.mark_card_dirty(addr1);
401 card_table.mark_card_dirty(addr2);
402
403 let dirty_cards = card_table.get_dirty_cards();
404 assert_eq!(dirty_cards.len(), 2);
405 assert!(dirty_cards.contains(&0)); assert!(dirty_cards.contains(&1)); let stats = card_table.stats();
409 assert_eq!(stats.total_cards, 8); assert_eq!(stats.dirty_cards, 2);
411
412 card_table.clear_dirty_cards();
414 let dirty_cards = card_table.get_dirty_cards();
415 assert_eq!(dirty_cards.len(), 0);
416 }
417
418 #[test]
419 fn test_write_barrier_manager() {
420 let mut manager = WriteBarrierManager::new(true, true);
421
422 let heap_start = 0x10000 as *const u8;
423 let heap_size = 4096;
424 manager.initialize_card_table(heap_start, heap_size);
425
426 let old_ptr = (heap_start as usize + 100) as *const u8;
427 let young_ptr = 0x2000 as *const u8;
428
429 manager.record_reference(old_ptr, young_ptr);
430
431 let roots = manager.get_minor_gc_roots();
432 assert_eq!(roots.len(), 1);
433
434 let dirty_ranges = manager.get_dirty_card_ranges();
435 assert_eq!(dirty_ranges.len(), 1);
436
437 let stats = manager.stats();
438 assert_eq!(stats.remembered_set.barrier_hits, 1);
439 assert!(stats.card_table.is_some());
440 }
441
442 #[test]
443 fn test_value_write_barrier_aware() {
444 let value =
445 Value::Cell(runmat_builtins::CellArray::new(vec![Value::Num(42.0)], 1, 1).unwrap());
446 assert!(value.has_young_references());
447
448 let value2 = Value::Num(42.0);
449 assert!(!value2.has_young_references());
450 }
451}