Skip to main content

aver_memory/
memory.rs

1use super::*;
2
3impl<T: ArenaTypes> Arena<T> {
4    pub fn truncate_to(&mut self, mark: u32) {
5        self.young_entries.truncate(mark as usize);
6    }
7
8    pub fn collect_young_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
9        if self.young_entries.len() <= mark as usize {
10            return;
11        }
12
13        let mut relocated =
14            Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
15        let mut compacted = Vec::with_capacity(self.young_entries.len() - mark as usize);
16
17        for root in roots {
18            *root = self.relocate_young_root(*root, mark, &mut relocated, &mut compacted);
19        }
20
21        self.young_entries.truncate(mark as usize);
22        self.young_entries.extend(compacted);
23        Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
24    }
25
26    pub fn truncate_yard_to(&mut self, mark: u32) {
27        self.yard_entries.truncate(mark as usize);
28    }
29
30    pub fn truncate_handoff_to(&mut self, mark: u32) {
31        self.handoff_entries.truncate(mark as usize);
32    }
33
34    pub fn evacuate_frame_to_yard(
35        &mut self,
36        young_mark: u32,
37        yard_mark: u32,
38        handoff_mark: u32,
39        roots: &mut [NanValue],
40    ) -> (bool, bool) {
41        self.evacuate_frame_locals(young_mark, yard_mark, handoff_mark, roots, AllocSpace::Yard)
42    }
43
44    pub fn evacuate_frame_to_handoff(
45        &mut self,
46        young_mark: u32,
47        yard_mark: u32,
48        handoff_mark: u32,
49        roots: &mut [NanValue],
50    ) -> (bool, bool) {
51        self.evacuate_frame_locals(
52            young_mark,
53            yard_mark,
54            handoff_mark,
55            roots,
56            AllocSpace::Handoff,
57        )
58    }
59
60    fn evacuate_frame_locals(
61        &mut self,
62        young_mark: u32,
63        yard_mark: u32,
64        handoff_mark: u32,
65        roots: &mut [NanValue],
66        young_target: AllocSpace,
67    ) -> (bool, bool) {
68        let mut relocated_young = Self::take_u32_scratch(
69            &mut self.scratch_young,
70            self.young_entries.len().saturating_sub(young_mark as usize),
71        );
72        let mut relocated_yard = Self::take_u32_scratch(
73            &mut self.scratch_yard,
74            self.yard_entries.len().saturating_sub(yard_mark as usize),
75        );
76        let mut relocated_handoff = Self::take_u32_scratch(
77            &mut self.scratch_handoff,
78            self.handoff_entries
79                .len()
80                .saturating_sub(handoff_mark as usize),
81        );
82        let mut compacted_yard =
83            Vec::with_capacity(self.yard_entries.len().saturating_sub(yard_mark as usize));
84        let mut compacted_handoff = Vec::with_capacity(
85            self.handoff_entries
86                .len()
87                .saturating_sub(handoff_mark as usize),
88        );
89
90        for root in roots {
91            *root = self.evacuate_local_root(
92                *root,
93                young_mark,
94                yard_mark,
95                handoff_mark,
96                young_target,
97                &mut relocated_young,
98                &mut relocated_yard,
99                &mut relocated_handoff,
100                &mut compacted_yard,
101                &mut compacted_handoff,
102            );
103        }
104
105        self.young_entries.truncate(young_mark as usize);
106        self.yard_entries.truncate(yard_mark as usize);
107        self.yard_entries.extend(compacted_yard);
108        self.handoff_entries.truncate(handoff_mark as usize);
109        self.handoff_entries.extend(compacted_handoff);
110        self.note_peak_usage();
111        Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
112        Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
113        Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
114
115        (
116            self.yard_entries.len() > yard_mark as usize,
117            self.handoff_entries.len() > handoff_mark as usize,
118        )
119    }
120
121    fn allocate_local_target_slot(
122        target: AllocSpace,
123        yard_mark: u32,
124        handoff_mark: u32,
125        compacted_yard: &mut Vec<ArenaEntry<T>>,
126        compacted_handoff: &mut Vec<ArenaEntry<T>>,
127    ) -> (u32, u32) {
128        match target {
129            AllocSpace::Yard => {
130                let pos = compacted_yard.len() as u32;
131                let idx = Self::encode_yard_index(yard_mark + pos);
132                compacted_yard.push(ArenaEntry::Int(0));
133                (idx, pos)
134            }
135            AllocSpace::Handoff => {
136                let pos = compacted_handoff.len() as u32;
137                let idx = Self::encode_handoff_index(handoff_mark + pos);
138                compacted_handoff.push(ArenaEntry::Int(0));
139                (idx, pos)
140            }
141            AllocSpace::Young => unreachable!("local evacuation target must be yard or handoff"),
142        }
143    }
144
145    fn store_local_target_entry(
146        target: AllocSpace,
147        compacted_pos: u32,
148        entry: ArenaEntry<T>,
149        compacted_yard: &mut [ArenaEntry<T>],
150        compacted_handoff: &mut [ArenaEntry<T>],
151    ) {
152        match target {
153            AllocSpace::Yard => compacted_yard[compacted_pos as usize] = entry,
154            AllocSpace::Handoff => compacted_handoff[compacted_pos as usize] = entry,
155            AllocSpace::Young => unreachable!(),
156        }
157    }
158
159    #[inline(always)]
160    fn rewrite_entry_with<F>(&mut self, entry: ArenaEntry<T>, rewrite: &mut F) -> ArenaEntry<T>
161    where
162        F: FnMut(&mut Arena<T>, NanValue) -> NanValue,
163    {
164        match entry {
165            ArenaEntry::Int(i) => ArenaEntry::Int(i),
166            ArenaEntry::String(s) => ArenaEntry::String(s),
167            ArenaEntry::Builtin(name) => ArenaEntry::Builtin(name),
168            ArenaEntry::Fn(f) => ArenaEntry::Fn(f),
169            ArenaEntry::Boxed(inner) => ArenaEntry::Boxed(rewrite(self, inner)),
170            ArenaEntry::List(list) => ArenaEntry::List(self.rewrite_list_with(list, rewrite)),
171            ArenaEntry::Tuple(mut items) => {
172                for value in &mut items {
173                    *value = rewrite(self, *value);
174                }
175                ArenaEntry::Tuple(items)
176            }
177            ArenaEntry::Vector(mut items) => {
178                for value in &mut items {
179                    *value = rewrite(self, *value);
180                }
181                ArenaEntry::Vector(items)
182            }
183            ArenaEntry::Map(mut map) => {
184                map.rewrite_values_mut(|pair| {
185                    pair.0 = rewrite(self, pair.0);
186                    pair.1 = rewrite(self, pair.1);
187                });
188                ArenaEntry::Map(map)
189            }
190            ArenaEntry::Record {
191                type_id,
192                mut fields,
193            } => {
194                for value in &mut fields {
195                    *value = rewrite(self, *value);
196                }
197                ArenaEntry::Record { type_id, fields }
198            }
199            ArenaEntry::Variant {
200                type_id,
201                variant_id,
202                mut fields,
203            } => {
204                for value in &mut fields {
205                    *value = rewrite(self, *value);
206                }
207                ArenaEntry::Variant {
208                    type_id,
209                    variant_id,
210                    fields,
211                }
212            }
213            ArenaEntry::Namespace { name, mut members } => {
214                for (_, value) in &mut members {
215                    *value = rewrite(self, *value);
216                }
217                ArenaEntry::Namespace { name, members }
218            }
219        }
220    }
221
222    #[inline(always)]
223    fn rewrite_list_with<F>(&mut self, list: ArenaList, rewrite: &mut F) -> ArenaList
224    where
225        F: FnMut(&mut Arena<T>, NanValue) -> NanValue,
226    {
227        match list {
228            ArenaList::Flat { items, start } => ArenaList::Flat {
229                items: Rc::new(
230                    items[start..]
231                        .iter()
232                        .map(|value| rewrite(self, *value))
233                        .collect(),
234                ),
235                start: 0,
236            },
237            ArenaList::Prepend { head, tail, len } => ArenaList::Prepend {
238                head: rewrite(self, head),
239                tail: rewrite(self, tail),
240                len,
241            },
242            ArenaList::Concat { left, right, len } => ArenaList::Concat {
243                left: rewrite(self, left),
244                right: rewrite(self, right),
245                len,
246            },
247            ArenaList::Segments {
248                current,
249                rest,
250                start,
251                len,
252            } => ArenaList::Segments {
253                current: rewrite(self, current),
254                rest: Rc::new(
255                    rest[start..]
256                        .iter()
257                        .map(|value| rewrite(self, *value))
258                        .collect(),
259                ),
260                start: 0,
261                len,
262            },
263        }
264    }
265
266    /// Flatten a deep list (Prepend/Concat chain) into a single Flat entry.
267    pub fn flatten_deep_list(&mut self, value: NanValue) -> NanValue {
268        const FLATTEN_THRESHOLD: usize = 64;
269
270        if !value.is_list() || value.is_empty_list_immediate() {
271            return value;
272        }
273        let len = self.list_len_value(value);
274        if len <= FLATTEN_THRESHOLD {
275            return value;
276        }
277        let elements = self.list_to_vec_value(value);
278        let flat = ArenaList::Flat {
279            items: Rc::new(elements),
280            start: 0,
281        };
282        let index = self.push(ArenaEntry::List(flat));
283        NanValue::new_list(index)
284    }
285
286    #[allow(clippy::too_many_arguments)]
287    fn evacuate_local_root(
288        &mut self,
289        value: NanValue,
290        young_mark: u32,
291        yard_mark: u32,
292        handoff_mark: u32,
293        young_target: AllocSpace,
294        relocated_young: &mut [u32],
295        relocated_yard: &mut [u32],
296        relocated_handoff: &mut [u32],
297        compacted_yard: &mut Vec<ArenaEntry<T>>,
298        compacted_handoff: &mut Vec<ArenaEntry<T>>,
299    ) -> NanValue {
300        let Some(index) = value.heap_index() else {
301            return value;
302        };
303        let (space, _) = Self::decode_index(index);
304        match space {
305            HeapSpace::Young if self.is_young_index_in_region(index, young_mark) => self
306                .evacuate_young_value(
307                    value,
308                    young_mark,
309                    yard_mark,
310                    handoff_mark,
311                    young_target,
312                    relocated_young,
313                    relocated_yard,
314                    relocated_handoff,
315                    compacted_yard,
316                    compacted_handoff,
317                ),
318            HeapSpace::Yard if self.is_yard_index_in_region(index, yard_mark) => self
319                .evacuate_yard_value(
320                    value,
321                    young_mark,
322                    yard_mark,
323                    handoff_mark,
324                    young_target,
325                    relocated_young,
326                    relocated_yard,
327                    relocated_handoff,
328                    compacted_yard,
329                    compacted_handoff,
330                ),
331            HeapSpace::Handoff if self.is_handoff_index_in_region(index, handoff_mark) => self
332                .evacuate_handoff_value(
333                    value,
334                    young_mark,
335                    yard_mark,
336                    handoff_mark,
337                    young_target,
338                    relocated_young,
339                    relocated_yard,
340                    relocated_handoff,
341                    compacted_yard,
342                    compacted_handoff,
343                ),
344            _ => value,
345        }
346    }
347
348    #[allow(clippy::too_many_arguments)]
349    fn evacuate_young_value(
350        &mut self,
351        value: NanValue,
352        young_mark: u32,
353        yard_mark: u32,
354        handoff_mark: u32,
355        young_target: AllocSpace,
356        relocated_young: &mut [u32],
357        relocated_yard: &mut [u32],
358        relocated_handoff: &mut [u32],
359        compacted_yard: &mut Vec<ArenaEntry<T>>,
360        compacted_handoff: &mut Vec<ArenaEntry<T>>,
361    ) -> NanValue {
362        let index = value.heap_index().expect("young value must be heap-backed");
363        let (_, raw_index) = Self::decode_index(index);
364        let relocation_slot = (raw_index - young_mark) as usize;
365        let relocated_index = relocated_young[relocation_slot];
366        if relocated_index != u32::MAX {
367            return value.with_heap_index(relocated_index);
368        }
369
370        let (new_index, compacted_pos) = Self::allocate_local_target_slot(
371            young_target,
372            yard_mark,
373            handoff_mark,
374            compacted_yard,
375            compacted_handoff,
376        );
377        relocated_young[relocation_slot] = new_index;
378
379        let entry = core::mem::replace(
380            &mut self.young_entries[raw_index as usize],
381            ArenaEntry::Int(0),
382        );
383        let new_entry = self.evacuate_local_entry(
384            entry,
385            young_mark,
386            yard_mark,
387            handoff_mark,
388            young_target,
389            relocated_young,
390            relocated_yard,
391            relocated_handoff,
392            compacted_yard,
393            compacted_handoff,
394        );
395        Self::store_local_target_entry(
396            young_target,
397            compacted_pos,
398            new_entry,
399            compacted_yard,
400            compacted_handoff,
401        );
402        value.with_heap_index(new_index)
403    }
404
405    #[allow(clippy::too_many_arguments)]
406    fn evacuate_yard_value(
407        &mut self,
408        value: NanValue,
409        young_mark: u32,
410        yard_mark: u32,
411        handoff_mark: u32,
412        young_target: AllocSpace,
413        relocated_young: &mut [u32],
414        relocated_yard: &mut [u32],
415        relocated_handoff: &mut [u32],
416        compacted_yard: &mut Vec<ArenaEntry<T>>,
417        compacted_handoff: &mut Vec<ArenaEntry<T>>,
418    ) -> NanValue {
419        let index = value.heap_index().expect("yard value must be heap-backed");
420        let (_, raw_index) = Self::decode_index(index);
421        let relocation_slot = (raw_index - yard_mark) as usize;
422        let relocated_index = relocated_yard[relocation_slot];
423        if relocated_index != u32::MAX {
424            return value.with_heap_index(relocated_index);
425        }
426
427        let target = match young_target {
428            AllocSpace::Yard => AllocSpace::Yard,
429            AllocSpace::Handoff => AllocSpace::Handoff,
430            AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
431        };
432        let (new_index, compacted_pos) = Self::allocate_local_target_slot(
433            target,
434            yard_mark,
435            handoff_mark,
436            compacted_yard,
437            compacted_handoff,
438        );
439        relocated_yard[relocation_slot] = new_index;
440
441        let entry = core::mem::replace(
442            &mut self.yard_entries[raw_index as usize],
443            ArenaEntry::Int(0),
444        );
445        let new_entry = self.evacuate_local_entry(
446            entry,
447            young_mark,
448            yard_mark,
449            handoff_mark,
450            young_target,
451            relocated_young,
452            relocated_yard,
453            relocated_handoff,
454            compacted_yard,
455            compacted_handoff,
456        );
457        Self::store_local_target_entry(
458            target,
459            compacted_pos,
460            new_entry,
461            compacted_yard,
462            compacted_handoff,
463        );
464        value.with_heap_index(new_index)
465    }
466
467    #[allow(clippy::too_many_arguments)]
468    fn evacuate_handoff_value(
469        &mut self,
470        value: NanValue,
471        young_mark: u32,
472        yard_mark: u32,
473        handoff_mark: u32,
474        young_target: AllocSpace,
475        relocated_young: &mut [u32],
476        relocated_yard: &mut [u32],
477        relocated_handoff: &mut [u32],
478        compacted_yard: &mut Vec<ArenaEntry<T>>,
479        compacted_handoff: &mut Vec<ArenaEntry<T>>,
480    ) -> NanValue {
481        let index = value
482            .heap_index()
483            .expect("handoff value must be heap-backed");
484        let (_, raw_index) = Self::decode_index(index);
485        let relocation_slot = (raw_index - handoff_mark) as usize;
486        let relocated_index = relocated_handoff[relocation_slot];
487        if relocated_index != u32::MAX {
488            return value.with_heap_index(relocated_index);
489        }
490
491        let target = match young_target {
492            AllocSpace::Yard => AllocSpace::Yard,
493            AllocSpace::Handoff => AllocSpace::Handoff,
494            AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
495        };
496        let (new_index, compacted_pos) = Self::allocate_local_target_slot(
497            target,
498            yard_mark,
499            handoff_mark,
500            compacted_yard,
501            compacted_handoff,
502        );
503        relocated_handoff[relocation_slot] = new_index;
504
505        let entry = core::mem::replace(
506            &mut self.handoff_entries[raw_index as usize],
507            ArenaEntry::Int(0),
508        );
509        let new_entry = self.evacuate_local_entry(
510            entry,
511            young_mark,
512            yard_mark,
513            handoff_mark,
514            young_target,
515            relocated_young,
516            relocated_yard,
517            relocated_handoff,
518            compacted_yard,
519            compacted_handoff,
520        );
521        Self::store_local_target_entry(
522            target,
523            compacted_pos,
524            new_entry,
525            compacted_yard,
526            compacted_handoff,
527        );
528        value.with_heap_index(new_index)
529    }
530
531    #[allow(clippy::too_many_arguments)]
532    fn evacuate_local_entry(
533        &mut self,
534        entry: ArenaEntry<T>,
535        young_mark: u32,
536        yard_mark: u32,
537        handoff_mark: u32,
538        young_target: AllocSpace,
539        relocated_young: &mut [u32],
540        relocated_yard: &mut [u32],
541        relocated_handoff: &mut [u32],
542        compacted_yard: &mut Vec<ArenaEntry<T>>,
543        compacted_handoff: &mut Vec<ArenaEntry<T>>,
544    ) -> ArenaEntry<T> {
545        let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
546            arena.evacuate_local_root(
547                value,
548                young_mark,
549                yard_mark,
550                handoff_mark,
551                young_target,
552                relocated_young,
553                relocated_yard,
554                relocated_handoff,
555                compacted_yard,
556                compacted_handoff,
557            )
558        };
559        self.rewrite_entry_with(entry, &mut rewrite)
560    }
561
562    fn relocate_young_root(
563        &mut self,
564        value: NanValue,
565        mark: u32,
566        relocated: &mut [u32],
567        compacted: &mut Vec<ArenaEntry<T>>,
568    ) -> NanValue {
569        let Some(index) = value.heap_index() else {
570            return value;
571        };
572        let (space, raw_index) = Self::decode_index(index);
573        if matches!(space, HeapSpace::Young)
574            && raw_index >= mark
575            && raw_index < self.young_entries.len() as u32
576        {
577            return self.relocate_young_value(value, mark, relocated, compacted);
578        }
579        self.rewrite_young_refs_in_place(space, raw_index, mark, relocated, compacted);
580        value
581    }
582
583    fn relocate_young_value(
584        &mut self,
585        value: NanValue,
586        mark: u32,
587        relocated: &mut [u32],
588        compacted: &mut Vec<ArenaEntry<T>>,
589    ) -> NanValue {
590        let Some(index) = value.heap_index() else {
591            return value;
592        };
593        let (space, raw_index) = Self::decode_index(index);
594        if !matches!(space, HeapSpace::Young) || raw_index < mark {
595            return value;
596        }
597
598        let relocation_slot = raw_index as usize;
599        let relocated_index = relocated[relocation_slot];
600        if relocated_index != u32::MAX {
601            return value.with_heap_index(relocated_index);
602        }
603
604        let compacted_pos = compacted.len() as u32;
605        let new_index = Self::encode_index(HeapSpace::Young, mark + compacted_pos);
606        relocated[relocation_slot] = new_index;
607        compacted.push(ArenaEntry::Int(0));
608
609        let entry = core::mem::replace(
610            &mut self.young_entries[raw_index as usize],
611            ArenaEntry::Int(0),
612        );
613        let new_entry = self.relocate_young_entry(entry, mark, relocated, compacted);
614        compacted[compacted_pos as usize] = new_entry;
615        value.with_heap_index(new_index)
616    }
617
618    fn relocate_young_entry(
619        &mut self,
620        entry: ArenaEntry<T>,
621        mark: u32,
622        relocated: &mut [u32],
623        compacted: &mut Vec<ArenaEntry<T>>,
624    ) -> ArenaEntry<T> {
625        let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
626            arena.relocate_young_value(value, mark, relocated, compacted)
627        };
628        self.rewrite_entry_with(entry, &mut rewrite)
629    }
630
631    fn rewrite_young_refs_in_place(
632        &mut self,
633        space: HeapSpace,
634        raw_index: u32,
635        mark: u32,
636        relocated: &mut [u32],
637        compacted: &mut Vec<ArenaEntry<T>>,
638    ) {
639        let raw_index = raw_index as usize;
640        match space {
641            HeapSpace::Young => {
642                if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
643                    return;
644                }
645                let entry =
646                    core::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
647                let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
648                self.young_entries[raw_index] = new_entry;
649            }
650            HeapSpace::Yard => {
651                if raw_index >= self.yard_entries.len() {
652                    return;
653                }
654                let entry =
655                    core::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
656                let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
657                self.yard_entries[raw_index] = new_entry;
658            }
659            HeapSpace::Handoff => {
660                if raw_index >= self.handoff_entries.len() {
661                    return;
662                }
663                let entry =
664                    core::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
665                let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
666                self.handoff_entries[raw_index] = new_entry;
667            }
668            HeapSpace::Stable => {
669                if raw_index >= self.stable_entries.len() {
670                    return;
671                }
672                let entry =
673                    core::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
674                let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
675                self.stable_entries[raw_index] = new_entry;
676            }
677        }
678    }
679
680    fn rewrite_young_entry(
681        &mut self,
682        entry: ArenaEntry<T>,
683        mark: u32,
684        relocated: &mut [u32],
685        compacted: &mut Vec<ArenaEntry<T>>,
686    ) -> ArenaEntry<T> {
687        let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
688            arena.relocate_young_root(value, mark, relocated, compacted)
689        };
690        self.rewrite_entry_with(entry, &mut rewrite)
691    }
692
693    fn promote_region_root_to_target(
694        &mut self,
695        value: NanValue,
696        mark: u32,
697        relocated: &mut [u32],
698        target: AllocSpace,
699    ) -> NanValue {
700        let Some(index) = value.heap_index() else {
701            return value;
702        };
703        let (space, raw_index) = Self::decode_index(index);
704        if matches!(space, HeapSpace::Young)
705            && raw_index >= mark
706            && raw_index < self.young_entries.len() as u32
707        {
708            return self.promote_value_to_target(value, mark, relocated, target);
709        }
710        self.rewrite_promoted_young_refs_in_place(space, raw_index, mark, relocated, target);
711        value
712    }
713
714    pub fn promote_young_roots_to_yard(&mut self, mark: u32, roots: &mut [NanValue]) {
715        if self.young_entries.len() <= mark as usize {
716            return;
717        }
718
719        let mut relocated =
720            Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
721
722        for root in roots {
723            *root = self.promote_region_root_to_yard(*root, mark, &mut relocated);
724        }
725
726        self.young_entries.truncate(mark as usize);
727        Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
728    }
729
730    pub fn promote_young_roots_to_handoff(&mut self, mark: u32, roots: &mut [NanValue]) {
731        if self.young_entries.len() <= mark as usize {
732            return;
733        }
734
735        let mut relocated =
736            Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
737
738        for root in roots {
739            *root = self.promote_region_root_to_handoff(*root, mark, &mut relocated);
740        }
741
742        self.young_entries.truncate(mark as usize);
743        Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
744    }
745
746    pub fn promote_roots_to_stable(&mut self, roots: &mut [NanValue]) {
747        let mut relocated_young =
748            Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
749        let mut relocated_yard =
750            Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
751        let mut relocated_handoff =
752            Self::take_u32_scratch(&mut self.scratch_handoff, self.handoff_entries.len());
753
754        for root in roots {
755            *root = self.promote_value_to_stable(
756                *root,
757                &mut relocated_young,
758                &mut relocated_yard,
759                &mut relocated_handoff,
760            );
761        }
762        Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
763        Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
764        Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
765    }
766
767    pub fn collect_yard_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
768        if self.yard_entries.len() <= mark as usize {
769            return;
770        }
771
772        let mut relocated = Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
773        let mut compacted = Vec::with_capacity(self.yard_entries.len() - mark as usize);
774
775        for root in roots {
776            *root = self.relocate_yard_root(*root, mark, &mut relocated, &mut compacted);
777        }
778
779        self.yard_entries.truncate(mark as usize);
780        self.yard_entries.extend(compacted);
781        Self::recycle_u32_scratch(&mut self.scratch_yard, relocated);
782    }
783
784    pub fn collect_stable_from_roots(&mut self, roots: &mut [NanValue]) {
785        if self.stable_entries.is_empty() {
786            return;
787        }
788
789        let mut relocated =
790            Self::take_u32_scratch(&mut self.scratch_stable, self.stable_entries.len());
791        let mut compacted = Vec::with_capacity(self.stable_entries.len());
792
793        for root in roots {
794            *root = self.relocate_stable_root(*root, &mut relocated, &mut compacted);
795        }
796
797        self.stable_entries = compacted;
798        Self::recycle_u32_scratch(&mut self.scratch_stable, relocated);
799    }
800
801    fn promote_region_root_to_yard(
802        &mut self,
803        value: NanValue,
804        mark: u32,
805        relocated: &mut [u32],
806    ) -> NanValue {
807        self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Yard)
808    }
809
810    fn promote_region_root_to_handoff(
811        &mut self,
812        value: NanValue,
813        mark: u32,
814        relocated: &mut [u32],
815    ) -> NanValue {
816        self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Handoff)
817    }
818
819    fn rewrite_promoted_young_refs_in_place(
820        &mut self,
821        space: HeapSpace,
822        raw_index: u32,
823        mark: u32,
824        relocated: &mut [u32],
825        target: AllocSpace,
826    ) {
827        let raw_index = raw_index as usize;
828        match space {
829            HeapSpace::Young => {
830                if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
831                    return;
832                }
833                let entry =
834                    core::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
835                let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
836                self.young_entries[raw_index] = new_entry;
837            }
838            HeapSpace::Yard => {
839                if raw_index >= self.yard_entries.len() {
840                    return;
841                }
842                let entry =
843                    core::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
844                let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
845                self.yard_entries[raw_index] = new_entry;
846            }
847            HeapSpace::Handoff => {
848                if raw_index >= self.handoff_entries.len() {
849                    return;
850                }
851                let entry =
852                    core::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
853                let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
854                self.handoff_entries[raw_index] = new_entry;
855            }
856            HeapSpace::Stable => {
857                if raw_index >= self.stable_entries.len() {
858                    return;
859                }
860                let entry =
861                    core::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
862                let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
863                self.stable_entries[raw_index] = new_entry;
864            }
865        }
866    }
867
868    fn rewrite_promoted_young_entry(
869        &mut self,
870        entry: ArenaEntry<T>,
871        mark: u32,
872        relocated: &mut [u32],
873        target: AllocSpace,
874    ) -> ArenaEntry<T> {
875        let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
876            arena.promote_region_root_to_target(value, mark, relocated, target)
877        };
878        self.rewrite_entry_with(entry, &mut rewrite)
879    }
880
881    fn promote_value_to_target(
882        &mut self,
883        value: NanValue,
884        mark: u32,
885        relocated: &mut [u32],
886        target: AllocSpace,
887    ) -> NanValue {
888        let Some(index) = value.heap_index() else {
889            return value;
890        };
891        let (space, raw_index) = Self::decode_index(index);
892        if !matches!(space, HeapSpace::Young) || raw_index < mark {
893            return value;
894        }
895
896        let relocation_slot = raw_index as usize;
897        let relocated_index = relocated[relocation_slot];
898        if relocated_index != u32::MAX {
899            return value.with_heap_index(relocated_index);
900        }
901
902        let new_index = match target {
903            AllocSpace::Yard => Self::encode_yard_index(self.yard_entries.len() as u32),
904            AllocSpace::Handoff => Self::encode_handoff_index(self.handoff_entries.len() as u32),
905            AllocSpace::Young => unreachable!("promotion target must be yard or handoff"),
906        };
907        relocated[relocation_slot] = new_index;
908        match target {
909            AllocSpace::Yard => self.yard_entries.push(ArenaEntry::Int(0)),
910            AllocSpace::Handoff => self.handoff_entries.push(ArenaEntry::Int(0)),
911            AllocSpace::Young => unreachable!(),
912        }
913        self.note_peak_usage();
914
915        let entry = core::mem::replace(
916            &mut self.young_entries[raw_index as usize],
917            ArenaEntry::Int(0),
918        );
919        let new_entry = self.promote_entry_to_target(entry, mark, relocated, target);
920        match target {
921            AllocSpace::Yard => {
922                self.yard_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
923            }
924            AllocSpace::Handoff => {
925                self.handoff_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
926            }
927            AllocSpace::Young => unreachable!(),
928        }
929        value.with_heap_index(new_index)
930    }
931
932    /// Check if a NanValue references young heap at or after `mark`.
933    #[inline(always)]
934    fn value_needs_young_promotion(value: NanValue, mark: u32) -> bool {
935        if let Some(index) = value.heap_index() {
936            let (space, raw_index) = Self::decode_index(index);
937            matches!(space, HeapSpace::Young) && raw_index >= mark
938        } else {
939            false
940        }
941    }
942
943    fn promote_entry_to_target(
944        &mut self,
945        entry: ArenaEntry<T>,
946        mark: u32,
947        relocated: &mut [u32],
948        target: AllocSpace,
949    ) -> ArenaEntry<T> {
950        // Fast path for bulk-data types: if no NanValue in this entry points
951        // to young >= mark, skip the rewrite — move the entry as-is.
952        // Only check types where the scan is cheap relative to the rewrite cost.
953        match &entry {
954            ArenaEntry::Vector(items) | ArenaEntry::Tuple(items)
955                if !items.is_empty()
956                    && !items
957                        .iter()
958                        .any(|v| Self::value_needs_young_promotion(*v, mark)) =>
959            {
960                return entry;
961            }
962            ArenaEntry::Map(map)
963                if !map.is_empty()
964                    && !map.values().any(|(k, v)| {
965                        Self::value_needs_young_promotion(*k, mark)
966                            || Self::value_needs_young_promotion(*v, mark)
967                    }) =>
968            {
969                return entry;
970            }
971            _ => {}
972        }
973        let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
974            arena.promote_region_root_to_target(value, mark, relocated, target)
975        };
976        self.rewrite_entry_with(entry, &mut rewrite)
977    }
978
979    fn promote_value_to_stable(
980        &mut self,
981        value: NanValue,
982        relocated_young: &mut [u32],
983        relocated_yard: &mut [u32],
984        relocated_handoff: &mut [u32],
985    ) -> NanValue {
986        let Some(index) = value.heap_index() else {
987            return value;
988        };
989        let (space, raw_index) = Self::decode_index(index);
990        match space {
991            HeapSpace::Young => {
992                let relocation_slot = raw_index as usize;
993                let relocated_index = relocated_young[relocation_slot];
994                if relocated_index != u32::MAX {
995                    return value.with_heap_index(relocated_index);
996                }
997
998                let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
999                relocated_young[relocation_slot] = new_index;
1000                self.stable_entries.push(ArenaEntry::Int(0));
1001                self.note_peak_usage();
1002
1003                let entry = self.young_entries[raw_index as usize].clone();
1004                let new_entry = self.promote_entry_to_stable(
1005                    entry,
1006                    relocated_young,
1007                    relocated_yard,
1008                    relocated_handoff,
1009                );
1010                self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1011                value.with_heap_index(new_index)
1012            }
1013            HeapSpace::Yard => {
1014                let relocation_slot = raw_index as usize;
1015                let relocated_index = relocated_yard[relocation_slot];
1016                if relocated_index != u32::MAX {
1017                    return value.with_heap_index(relocated_index);
1018                }
1019
1020                let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
1021                relocated_yard[relocation_slot] = new_index;
1022                self.stable_entries.push(ArenaEntry::Int(0));
1023                self.note_peak_usage();
1024
1025                let entry = self.yard_entries[raw_index as usize].clone();
1026                let new_entry = self.promote_entry_to_stable(
1027                    entry,
1028                    relocated_young,
1029                    relocated_yard,
1030                    relocated_handoff,
1031                );
1032                self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1033                value.with_heap_index(new_index)
1034            }
1035            HeapSpace::Handoff => {
1036                let relocation_slot = raw_index as usize;
1037                let relocated_index = relocated_handoff[relocation_slot];
1038                if relocated_index != u32::MAX {
1039                    return value.with_heap_index(relocated_index);
1040                }
1041
1042                let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
1043                relocated_handoff[relocation_slot] = new_index;
1044                self.stable_entries.push(ArenaEntry::Int(0));
1045                self.note_peak_usage();
1046
1047                let entry = self.handoff_entries[raw_index as usize].clone();
1048                let new_entry = self.promote_entry_to_stable(
1049                    entry,
1050                    relocated_young,
1051                    relocated_yard,
1052                    relocated_handoff,
1053                );
1054                self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1055                value.with_heap_index(new_index)
1056            }
1057            HeapSpace::Stable => value,
1058        }
1059    }
1060
1061    fn promote_entry_to_stable(
1062        &mut self,
1063        entry: ArenaEntry<T>,
1064        relocated_young: &mut [u32],
1065        relocated_yard: &mut [u32],
1066        relocated_handoff: &mut [u32],
1067    ) -> ArenaEntry<T> {
1068        let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
1069            arena.promote_value_to_stable(value, relocated_young, relocated_yard, relocated_handoff)
1070        };
1071        self.rewrite_entry_with(entry, &mut rewrite)
1072    }
1073
1074    fn relocate_yard_root(
1075        &mut self,
1076        value: NanValue,
1077        mark: u32,
1078        relocated: &mut [u32],
1079        compacted: &mut Vec<ArenaEntry<T>>,
1080    ) -> NanValue {
1081        let Some(index) = value.heap_index() else {
1082            return value;
1083        };
1084        let (space, raw_index) = Self::decode_index(index);
1085        if matches!(space, HeapSpace::Yard)
1086            && raw_index >= mark
1087            && raw_index < self.yard_entries.len() as u32
1088        {
1089            return self.relocate_yard_value(value, mark, relocated, compacted);
1090        }
1091        self.rewrite_yard_refs_in_place(space, raw_index, mark, relocated, compacted);
1092        value
1093    }
1094
1095    fn relocate_yard_value(
1096        &mut self,
1097        value: NanValue,
1098        mark: u32,
1099        relocated: &mut [u32],
1100        compacted: &mut Vec<ArenaEntry<T>>,
1101    ) -> NanValue {
1102        let Some(index) = value.heap_index() else {
1103            return value;
1104        };
1105        let (space, raw_index) = Self::decode_index(index);
1106        if !matches!(space, HeapSpace::Yard) || raw_index < mark {
1107            return value;
1108        }
1109
1110        let relocation_slot = raw_index as usize;
1111        let relocated_index = relocated[relocation_slot];
1112        if relocated_index != u32::MAX {
1113            return value.with_heap_index(relocated_index);
1114        }
1115
1116        let compacted_pos = compacted.len() as u32;
1117        let new_index = Self::encode_yard_index(mark + compacted_pos);
1118        relocated[relocation_slot] = new_index;
1119        compacted.push(ArenaEntry::Int(0));
1120
1121        let entry = core::mem::replace(
1122            &mut self.yard_entries[raw_index as usize],
1123            ArenaEntry::Int(0),
1124        );
1125        let new_entry = self.relocate_yard_entry(entry, mark, relocated, compacted);
1126        compacted[compacted_pos as usize] = new_entry;
1127        value.with_heap_index(new_index)
1128    }
1129
1130    fn relocate_yard_entry(
1131        &mut self,
1132        entry: ArenaEntry<T>,
1133        mark: u32,
1134        relocated: &mut [u32],
1135        compacted: &mut Vec<ArenaEntry<T>>,
1136    ) -> ArenaEntry<T> {
1137        let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
1138            arena.relocate_yard_value(value, mark, relocated, compacted)
1139        };
1140        self.rewrite_entry_with(entry, &mut rewrite)
1141    }
1142
1143    fn rewrite_yard_refs_in_place(
1144        &mut self,
1145        space: HeapSpace,
1146        raw_index: u32,
1147        mark: u32,
1148        relocated: &mut [u32],
1149        compacted: &mut Vec<ArenaEntry<T>>,
1150    ) {
1151        let raw_index = raw_index as usize;
1152        match space {
1153            HeapSpace::Young => {
1154                if raw_index >= self.young_entries.len() {
1155                    return;
1156                }
1157                let entry =
1158                    core::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
1159                let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1160                self.young_entries[raw_index] = new_entry;
1161            }
1162            HeapSpace::Yard => {
1163                if raw_index >= self.yard_entries.len() || raw_index >= mark as usize {
1164                    return;
1165                }
1166                let entry =
1167                    core::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
1168                let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1169                self.yard_entries[raw_index] = new_entry;
1170            }
1171            HeapSpace::Handoff => {
1172                if raw_index >= self.handoff_entries.len() {
1173                    return;
1174                }
1175                let entry =
1176                    core::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
1177                let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1178                self.handoff_entries[raw_index] = new_entry;
1179            }
1180            HeapSpace::Stable => {
1181                if raw_index >= self.stable_entries.len() {
1182                    return;
1183                }
1184                let entry =
1185                    core::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
1186                let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1187                self.stable_entries[raw_index] = new_entry;
1188            }
1189        }
1190    }
1191
1192    fn rewrite_yard_entry(
1193        &mut self,
1194        entry: ArenaEntry<T>,
1195        mark: u32,
1196        relocated: &mut [u32],
1197        compacted: &mut Vec<ArenaEntry<T>>,
1198    ) -> ArenaEntry<T> {
1199        let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
1200            arena.relocate_yard_root(value, mark, relocated, compacted)
1201        };
1202        self.rewrite_entry_with(entry, &mut rewrite)
1203    }
1204
1205    fn relocate_stable_root(
1206        &mut self,
1207        value: NanValue,
1208        relocated: &mut [u32],
1209        compacted: &mut Vec<ArenaEntry<T>>,
1210    ) -> NanValue {
1211        let Some(index) = value.heap_index() else {
1212            return value;
1213        };
1214        if !matches!(Self::decode_index(index).0, HeapSpace::Stable) {
1215            return value;
1216        }
1217        self.relocate_stable_value(value, relocated, compacted)
1218    }
1219
1220    fn relocate_stable_value(
1221        &mut self,
1222        value: NanValue,
1223        relocated: &mut [u32],
1224        compacted: &mut Vec<ArenaEntry<T>>,
1225    ) -> NanValue {
1226        let Some(index) = value.heap_index() else {
1227            return value;
1228        };
1229        let (space, raw_index) = Self::decode_index(index);
1230        if !matches!(space, HeapSpace::Stable) {
1231            return value;
1232        }
1233
1234        let relocation_slot = raw_index as usize;
1235        let relocated_index = relocated[relocation_slot];
1236        if relocated_index != u32::MAX {
1237            return value.with_heap_index(relocated_index);
1238        }
1239
1240        let new_index = Self::encode_stable_index(compacted.len() as u32);
1241        relocated[relocation_slot] = new_index;
1242        compacted.push(ArenaEntry::Int(0));
1243
1244        let entry = core::mem::replace(
1245            &mut self.stable_entries[raw_index as usize],
1246            ArenaEntry::Int(0),
1247        );
1248        let new_entry = self.relocate_stable_entry(entry, relocated, compacted);
1249        compacted[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1250        value.with_heap_index(new_index)
1251    }
1252
1253    fn relocate_stable_entry(
1254        &mut self,
1255        entry: ArenaEntry<T>,
1256        relocated: &mut [u32],
1257        compacted: &mut Vec<ArenaEntry<T>>,
1258    ) -> ArenaEntry<T> {
1259        let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
1260            arena.relocate_stable_value(value, relocated, compacted)
1261        };
1262        self.rewrite_entry_with(entry, &mut rewrite)
1263    }
1264}