1use super::*;
2
3impl Arena {
4 pub fn truncate_to(&mut self, mark: u32) {
5 self.young_entries.truncate(mark as usize);
6 }
7
8 pub fn collect_young_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
9 if self.young_entries.len() <= mark as usize {
10 return;
11 }
12
13 let mut relocated =
14 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
15 let mut compacted = Vec::with_capacity(self.young_entries.len() - mark as usize);
16
17 for root in roots {
18 *root = self.relocate_young_root(*root, mark, &mut relocated, &mut compacted);
19 }
20
21 self.young_entries.truncate(mark as usize);
22 self.young_entries.extend(compacted);
23 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
24 }
25
26 pub fn truncate_yard_to(&mut self, mark: u32) {
27 self.yard_entries.truncate(mark as usize);
28 }
29
30 pub fn truncate_handoff_to(&mut self, mark: u32) {
31 self.handoff_entries.truncate(mark as usize);
32 }
33
34 pub fn evacuate_frame_to_yard(
35 &mut self,
36 young_mark: u32,
37 yard_mark: u32,
38 handoff_mark: u32,
39 roots: &mut [NanValue],
40 ) -> (bool, bool) {
41 self.evacuate_frame_locals(young_mark, yard_mark, handoff_mark, roots, AllocSpace::Yard)
42 }
43
44 pub fn evacuate_frame_to_handoff(
45 &mut self,
46 young_mark: u32,
47 yard_mark: u32,
48 handoff_mark: u32,
49 roots: &mut [NanValue],
50 ) -> (bool, bool) {
51 self.evacuate_frame_locals(
52 young_mark,
53 yard_mark,
54 handoff_mark,
55 roots,
56 AllocSpace::Handoff,
57 )
58 }
59
60 fn evacuate_frame_locals(
61 &mut self,
62 young_mark: u32,
63 yard_mark: u32,
64 handoff_mark: u32,
65 roots: &mut [NanValue],
66 young_target: AllocSpace,
67 ) -> (bool, bool) {
68 let mut relocated_young = Self::take_u32_scratch(
69 &mut self.scratch_young,
70 self.young_entries.len().saturating_sub(young_mark as usize),
71 );
72 let mut relocated_yard = Self::take_u32_scratch(
73 &mut self.scratch_yard,
74 self.yard_entries.len().saturating_sub(yard_mark as usize),
75 );
76 let mut relocated_handoff = Self::take_u32_scratch(
77 &mut self.scratch_handoff,
78 self.handoff_entries
79 .len()
80 .saturating_sub(handoff_mark as usize),
81 );
82 let mut compacted_yard =
83 Vec::with_capacity(self.yard_entries.len().saturating_sub(yard_mark as usize));
84 let mut compacted_handoff = Vec::with_capacity(
85 self.handoff_entries
86 .len()
87 .saturating_sub(handoff_mark as usize),
88 );
89
90 for root in roots {
91 *root = self.evacuate_local_root(
92 *root,
93 young_mark,
94 yard_mark,
95 handoff_mark,
96 young_target,
97 &mut relocated_young,
98 &mut relocated_yard,
99 &mut relocated_handoff,
100 &mut compacted_yard,
101 &mut compacted_handoff,
102 );
103 }
104
105 self.young_entries.truncate(young_mark as usize);
106 self.yard_entries.truncate(yard_mark as usize);
107 self.yard_entries.extend(compacted_yard);
108 self.handoff_entries.truncate(handoff_mark as usize);
109 self.handoff_entries.extend(compacted_handoff);
110 self.note_peak_usage();
111 Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
112 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
113 Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
114
115 (
116 self.yard_entries.len() > yard_mark as usize,
117 self.handoff_entries.len() > handoff_mark as usize,
118 )
119 }
120
121 fn allocate_local_target_slot(
122 target: AllocSpace,
123 yard_mark: u32,
124 handoff_mark: u32,
125 compacted_yard: &mut Vec<ArenaEntry>,
126 compacted_handoff: &mut Vec<ArenaEntry>,
127 ) -> (u32, u32) {
128 match target {
129 AllocSpace::Yard => {
130 let pos = compacted_yard.len() as u32;
131 let idx = Self::encode_yard_index(yard_mark + pos);
132 compacted_yard.push(ArenaEntry::Int(0));
133 (idx, pos)
134 }
135 AllocSpace::Handoff => {
136 let pos = compacted_handoff.len() as u32;
137 let idx = Self::encode_handoff_index(handoff_mark + pos);
138 compacted_handoff.push(ArenaEntry::Int(0));
139 (idx, pos)
140 }
141 AllocSpace::Young => unreachable!("local evacuation target must be yard or handoff"),
142 }
143 }
144
145 fn store_local_target_entry(
146 target: AllocSpace,
147 compacted_pos: u32,
148 entry: ArenaEntry,
149 compacted_yard: &mut [ArenaEntry],
150 compacted_handoff: &mut [ArenaEntry],
151 ) {
152 match target {
153 AllocSpace::Yard => compacted_yard[compacted_pos as usize] = entry,
154 AllocSpace::Handoff => compacted_handoff[compacted_pos as usize] = entry,
155 AllocSpace::Young => unreachable!(),
156 }
157 }
158
159 #[inline(always)]
160 fn rewrite_entry_with<F>(&mut self, entry: ArenaEntry, rewrite: &mut F) -> ArenaEntry
161 where
162 F: FnMut(&mut Arena, NanValue) -> NanValue,
163 {
164 match entry {
165 ArenaEntry::Int(i) => ArenaEntry::Int(i),
166 ArenaEntry::String(s) => ArenaEntry::String(s),
167 ArenaEntry::Builtin(name) => ArenaEntry::Builtin(name),
168 ArenaEntry::Fn(f) => ArenaEntry::Fn(f),
169 ArenaEntry::Boxed(inner) => ArenaEntry::Boxed(rewrite(self, inner)),
170 ArenaEntry::List(list) => ArenaEntry::List(self.rewrite_list_with(list, rewrite)),
171 ArenaEntry::Tuple(mut items) => {
172 for value in &mut items {
173 *value = rewrite(self, *value);
174 }
175 ArenaEntry::Tuple(items)
176 }
177 ArenaEntry::Map(map) => {
178 let mut out = PersistentMap::new();
179 for (hash, (key, value)) in map {
180 out.insert(hash, (rewrite(self, key), rewrite(self, value)));
181 }
182 ArenaEntry::Map(out)
183 }
184 ArenaEntry::Record {
185 type_id,
186 mut fields,
187 } => {
188 for value in &mut fields {
189 *value = rewrite(self, *value);
190 }
191 ArenaEntry::Record { type_id, fields }
192 }
193 ArenaEntry::Variant {
194 type_id,
195 variant_id,
196 mut fields,
197 } => {
198 for value in &mut fields {
199 *value = rewrite(self, *value);
200 }
201 ArenaEntry::Variant {
202 type_id,
203 variant_id,
204 fields,
205 }
206 }
207 ArenaEntry::Namespace { name, mut members } => {
208 for (_, value) in &mut members {
209 *value = rewrite(self, *value);
210 }
211 ArenaEntry::Namespace { name, members }
212 }
213 }
214 }
215
216 #[inline(always)]
217 fn rewrite_list_with<F>(&mut self, list: ArenaList, rewrite: &mut F) -> ArenaList
218 where
219 F: FnMut(&mut Arena, NanValue) -> NanValue,
220 {
221 match list {
222 ArenaList::Flat { items, start } => ArenaList::Flat {
223 items: Rc::new(
224 items[start..]
225 .iter()
226 .map(|value| rewrite(self, *value))
227 .collect(),
228 ),
229 start: 0,
230 },
231 ArenaList::Prepend { head, tail, len } => ArenaList::Prepend {
232 head: rewrite(self, head),
233 tail: rewrite(self, tail),
234 len,
235 },
236 ArenaList::Concat { left, right, len } => ArenaList::Concat {
237 left: rewrite(self, left),
238 right: rewrite(self, right),
239 len,
240 },
241 ArenaList::Segments {
242 current,
243 rest,
244 start,
245 len,
246 } => ArenaList::Segments {
247 current: rewrite(self, current),
248 rest: Rc::new(
249 rest[start..]
250 .iter()
251 .map(|value| rewrite(self, *value))
252 .collect(),
253 ),
254 start: 0,
255 len,
256 },
257 }
258 }
259
260 #[allow(clippy::too_many_arguments)]
261 fn evacuate_local_root(
262 &mut self,
263 value: NanValue,
264 young_mark: u32,
265 yard_mark: u32,
266 handoff_mark: u32,
267 young_target: AllocSpace,
268 relocated_young: &mut [u32],
269 relocated_yard: &mut [u32],
270 relocated_handoff: &mut [u32],
271 compacted_yard: &mut Vec<ArenaEntry>,
272 compacted_handoff: &mut Vec<ArenaEntry>,
273 ) -> NanValue {
274 let Some(index) = value.heap_index() else {
275 return value;
276 };
277 let (space, _) = Self::decode_index(index);
278 match space {
279 HeapSpace::Young if self.is_young_index_in_region(index, young_mark) => self
280 .evacuate_young_value(
281 value,
282 young_mark,
283 yard_mark,
284 handoff_mark,
285 young_target,
286 relocated_young,
287 relocated_yard,
288 relocated_handoff,
289 compacted_yard,
290 compacted_handoff,
291 ),
292 HeapSpace::Yard if self.is_yard_index_in_region(index, yard_mark) => self
293 .evacuate_yard_value(
294 value,
295 young_mark,
296 yard_mark,
297 handoff_mark,
298 young_target,
299 relocated_young,
300 relocated_yard,
301 relocated_handoff,
302 compacted_yard,
303 compacted_handoff,
304 ),
305 HeapSpace::Handoff if self.is_handoff_index_in_region(index, handoff_mark) => self
306 .evacuate_handoff_value(
307 value,
308 young_mark,
309 yard_mark,
310 handoff_mark,
311 young_target,
312 relocated_young,
313 relocated_yard,
314 relocated_handoff,
315 compacted_yard,
316 compacted_handoff,
317 ),
318 _ => value,
322 }
323 }
324
325 #[allow(clippy::too_many_arguments)]
326 fn evacuate_young_value(
327 &mut self,
328 value: NanValue,
329 young_mark: u32,
330 yard_mark: u32,
331 handoff_mark: u32,
332 young_target: AllocSpace,
333 relocated_young: &mut [u32],
334 relocated_yard: &mut [u32],
335 relocated_handoff: &mut [u32],
336 compacted_yard: &mut Vec<ArenaEntry>,
337 compacted_handoff: &mut Vec<ArenaEntry>,
338 ) -> NanValue {
339 let index = value.heap_index().expect("young value must be heap-backed");
340 let (_, raw_index) = Self::decode_index(index);
341 let relocation_slot = (raw_index - young_mark) as usize;
342 let relocated_index = relocated_young[relocation_slot];
343 if relocated_index != u32::MAX {
344 return value.with_heap_index(relocated_index);
345 }
346
347 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
348 young_target,
349 yard_mark,
350 handoff_mark,
351 compacted_yard,
352 compacted_handoff,
353 );
354 relocated_young[relocation_slot] = new_index;
355
356 let entry = std::mem::replace(
357 &mut self.young_entries[raw_index as usize],
358 ArenaEntry::Int(0),
359 );
360 let new_entry = self.evacuate_local_entry(
361 entry,
362 young_mark,
363 yard_mark,
364 handoff_mark,
365 young_target,
366 relocated_young,
367 relocated_yard,
368 relocated_handoff,
369 compacted_yard,
370 compacted_handoff,
371 );
372 Self::store_local_target_entry(
373 young_target,
374 compacted_pos,
375 new_entry,
376 compacted_yard,
377 compacted_handoff,
378 );
379 value.with_heap_index(new_index)
380 }
381
382 #[allow(clippy::too_many_arguments)]
383 fn evacuate_yard_value(
384 &mut self,
385 value: NanValue,
386 young_mark: u32,
387 yard_mark: u32,
388 handoff_mark: u32,
389 young_target: AllocSpace,
390 relocated_young: &mut [u32],
391 relocated_yard: &mut [u32],
392 relocated_handoff: &mut [u32],
393 compacted_yard: &mut Vec<ArenaEntry>,
394 compacted_handoff: &mut Vec<ArenaEntry>,
395 ) -> NanValue {
396 let index = value.heap_index().expect("yard value must be heap-backed");
397 let (_, raw_index) = Self::decode_index(index);
398 let relocation_slot = (raw_index - yard_mark) as usize;
399 let relocated_index = relocated_yard[relocation_slot];
400 if relocated_index != u32::MAX {
401 return value.with_heap_index(relocated_index);
402 }
403
404 let target = match young_target {
405 AllocSpace::Yard => AllocSpace::Yard,
406 AllocSpace::Handoff => AllocSpace::Handoff,
407 AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
408 };
409 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
410 target,
411 yard_mark,
412 handoff_mark,
413 compacted_yard,
414 compacted_handoff,
415 );
416 relocated_yard[relocation_slot] = new_index;
417
418 let entry = std::mem::replace(
419 &mut self.yard_entries[raw_index as usize],
420 ArenaEntry::Int(0),
421 );
422 let new_entry = self.evacuate_local_entry(
423 entry,
424 young_mark,
425 yard_mark,
426 handoff_mark,
427 young_target,
428 relocated_young,
429 relocated_yard,
430 relocated_handoff,
431 compacted_yard,
432 compacted_handoff,
433 );
434 Self::store_local_target_entry(
435 target,
436 compacted_pos,
437 new_entry,
438 compacted_yard,
439 compacted_handoff,
440 );
441 value.with_heap_index(new_index)
442 }
443
444 #[allow(clippy::too_many_arguments)]
445 fn evacuate_handoff_value(
446 &mut self,
447 value: NanValue,
448 young_mark: u32,
449 yard_mark: u32,
450 handoff_mark: u32,
451 young_target: AllocSpace,
452 relocated_young: &mut [u32],
453 relocated_yard: &mut [u32],
454 relocated_handoff: &mut [u32],
455 compacted_yard: &mut Vec<ArenaEntry>,
456 compacted_handoff: &mut Vec<ArenaEntry>,
457 ) -> NanValue {
458 let index = value
459 .heap_index()
460 .expect("handoff value must be heap-backed");
461 let (_, raw_index) = Self::decode_index(index);
462 let relocation_slot = (raw_index - handoff_mark) as usize;
463 let relocated_index = relocated_handoff[relocation_slot];
464 if relocated_index != u32::MAX {
465 return value.with_heap_index(relocated_index);
466 }
467
468 let target = match young_target {
469 AllocSpace::Yard => AllocSpace::Yard,
470 AllocSpace::Handoff => AllocSpace::Handoff,
471 AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
472 };
473 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
474 target,
475 yard_mark,
476 handoff_mark,
477 compacted_yard,
478 compacted_handoff,
479 );
480 relocated_handoff[relocation_slot] = new_index;
481
482 let entry = std::mem::replace(
483 &mut self.handoff_entries[raw_index as usize],
484 ArenaEntry::Int(0),
485 );
486 let new_entry = self.evacuate_local_entry(
487 entry,
488 young_mark,
489 yard_mark,
490 handoff_mark,
491 young_target,
492 relocated_young,
493 relocated_yard,
494 relocated_handoff,
495 compacted_yard,
496 compacted_handoff,
497 );
498 Self::store_local_target_entry(
499 target,
500 compacted_pos,
501 new_entry,
502 compacted_yard,
503 compacted_handoff,
504 );
505 value.with_heap_index(new_index)
506 }
507
508 #[allow(clippy::too_many_arguments)]
509 fn evacuate_local_entry(
510 &mut self,
511 entry: ArenaEntry,
512 young_mark: u32,
513 yard_mark: u32,
514 handoff_mark: u32,
515 young_target: AllocSpace,
516 relocated_young: &mut [u32],
517 relocated_yard: &mut [u32],
518 relocated_handoff: &mut [u32],
519 compacted_yard: &mut Vec<ArenaEntry>,
520 compacted_handoff: &mut Vec<ArenaEntry>,
521 ) -> ArenaEntry {
522 let mut rewrite = |arena: &mut Arena, value: NanValue| {
523 arena.evacuate_local_root(
524 value,
525 young_mark,
526 yard_mark,
527 handoff_mark,
528 young_target,
529 relocated_young,
530 relocated_yard,
531 relocated_handoff,
532 compacted_yard,
533 compacted_handoff,
534 )
535 };
536 self.rewrite_entry_with(entry, &mut rewrite)
537 }
538
539 fn relocate_young_root(
540 &mut self,
541 value: NanValue,
542 mark: u32,
543 relocated: &mut [u32],
544 compacted: &mut Vec<ArenaEntry>,
545 ) -> NanValue {
546 let Some(index) = value.heap_index() else {
547 return value;
548 };
549 let (space, raw_index) = Self::decode_index(index);
550 if matches!(space, HeapSpace::Young)
551 && raw_index >= mark
552 && raw_index < self.young_entries.len() as u32
553 {
554 return self.relocate_young_value(value, mark, relocated, compacted);
555 }
556 self.rewrite_young_refs_in_place(space, raw_index, mark, relocated, compacted);
557 value
558 }
559
560 fn relocate_young_value(
561 &mut self,
562 value: NanValue,
563 mark: u32,
564 relocated: &mut [u32],
565 compacted: &mut Vec<ArenaEntry>,
566 ) -> NanValue {
567 let Some(index) = value.heap_index() else {
568 return value;
569 };
570 let (space, raw_index) = Self::decode_index(index);
571 if !matches!(space, HeapSpace::Young) || raw_index < mark {
572 return value;
573 }
574
575 let relocation_slot = raw_index as usize;
576 let relocated_index = relocated[relocation_slot];
577 if relocated_index != u32::MAX {
578 return value.with_heap_index(relocated_index);
579 }
580
581 let compacted_pos = compacted.len() as u32;
582 let new_index = Self::encode_index(HeapSpace::Young, mark + compacted_pos);
583 relocated[relocation_slot] = new_index;
584 compacted.push(ArenaEntry::Int(0));
585
586 let entry = std::mem::replace(
587 &mut self.young_entries[raw_index as usize],
588 ArenaEntry::Int(0),
589 );
590 let new_entry = self.relocate_young_entry(entry, mark, relocated, compacted);
591 compacted[compacted_pos as usize] = new_entry;
592 value.with_heap_index(new_index)
593 }
594
595 fn relocate_young_entry(
596 &mut self,
597 entry: ArenaEntry,
598 mark: u32,
599 relocated: &mut [u32],
600 compacted: &mut Vec<ArenaEntry>,
601 ) -> ArenaEntry {
602 let mut rewrite = |arena: &mut Arena, value: NanValue| {
603 arena.relocate_young_value(value, mark, relocated, compacted)
604 };
605 self.rewrite_entry_with(entry, &mut rewrite)
606 }
607
608 fn rewrite_young_refs_in_place(
609 &mut self,
610 space: HeapSpace,
611 raw_index: u32,
612 mark: u32,
613 relocated: &mut [u32],
614 compacted: &mut Vec<ArenaEntry>,
615 ) {
616 let raw_index = raw_index as usize;
617 match space {
618 HeapSpace::Young => {
619 if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
620 return;
621 }
622 let entry =
623 std::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
624 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
625 self.young_entries[raw_index] = new_entry;
626 }
627 HeapSpace::Yard => {
628 if raw_index >= self.yard_entries.len() {
629 return;
630 }
631 let entry =
632 std::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
633 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
634 self.yard_entries[raw_index] = new_entry;
635 }
636 HeapSpace::Handoff => {
637 if raw_index >= self.handoff_entries.len() {
638 return;
639 }
640 let entry =
641 std::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
642 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
643 self.handoff_entries[raw_index] = new_entry;
644 }
645 HeapSpace::Stable => {
646 if raw_index >= self.stable_entries.len() {
647 return;
648 }
649 let entry =
650 std::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
651 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
652 self.stable_entries[raw_index] = new_entry;
653 }
654 }
655 }
656
657 fn rewrite_young_entry(
658 &mut self,
659 entry: ArenaEntry,
660 mark: u32,
661 relocated: &mut [u32],
662 compacted: &mut Vec<ArenaEntry>,
663 ) -> ArenaEntry {
664 let mut rewrite = |arena: &mut Arena, value: NanValue| {
665 arena.relocate_young_root(value, mark, relocated, compacted)
666 };
667 self.rewrite_entry_with(entry, &mut rewrite)
668 }
669
670 fn promote_region_root_to_target(
671 &mut self,
672 value: NanValue,
673 mark: u32,
674 relocated: &mut [u32],
675 target: AllocSpace,
676 ) -> NanValue {
677 let Some(index) = value.heap_index() else {
678 return value;
679 };
680 let (space, raw_index) = Self::decode_index(index);
681 if matches!(space, HeapSpace::Young)
682 && raw_index >= mark
683 && raw_index < self.young_entries.len() as u32
684 {
685 return self.promote_value_to_target(value, mark, relocated, target);
686 }
687 self.rewrite_promoted_young_refs_in_place(space, raw_index, mark, relocated, target);
688 value
689 }
690
691 pub fn promote_young_roots_to_yard(&mut self, mark: u32, roots: &mut [NanValue]) {
692 if self.young_entries.len() <= mark as usize {
693 return;
694 }
695
696 let mut relocated =
697 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
698
699 for root in roots {
700 *root = self.promote_region_root_to_yard(*root, mark, &mut relocated);
701 }
702
703 self.young_entries.truncate(mark as usize);
704 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
705 }
706
707 pub fn promote_young_roots_to_handoff(&mut self, mark: u32, roots: &mut [NanValue]) {
708 if self.young_entries.len() <= mark as usize {
709 return;
710 }
711
712 let mut relocated =
713 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
714
715 for root in roots {
716 *root = self.promote_region_root_to_handoff(*root, mark, &mut relocated);
717 }
718
719 self.young_entries.truncate(mark as usize);
720 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
721 }
722
723 pub fn promote_roots_to_stable(&mut self, roots: &mut [NanValue]) {
724 let mut relocated_young =
725 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
726 let mut relocated_yard =
727 Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
728 let mut relocated_handoff =
729 Self::take_u32_scratch(&mut self.scratch_handoff, self.handoff_entries.len());
730
731 for root in roots {
732 *root = self.promote_value_to_stable(
733 *root,
734 &mut relocated_young,
735 &mut relocated_yard,
736 &mut relocated_handoff,
737 );
738 }
739 Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
740 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
741 Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
742 }
743
744 pub fn collect_yard_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
745 if self.yard_entries.len() <= mark as usize {
746 return;
747 }
748
749 let mut relocated = Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
750 let mut compacted = Vec::with_capacity(self.yard_entries.len() - mark as usize);
751
752 for root in roots {
753 *root = self.relocate_yard_root(*root, mark, &mut relocated, &mut compacted);
754 }
755
756 self.yard_entries.truncate(mark as usize);
757 self.yard_entries.extend(compacted);
758 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated);
759 }
760
761 pub fn collect_stable_from_roots(&mut self, roots: &mut [NanValue]) {
762 if self.stable_entries.is_empty() {
763 return;
764 }
765
766 let mut relocated =
767 Self::take_u32_scratch(&mut self.scratch_stable, self.stable_entries.len());
768 let mut compacted = Vec::with_capacity(self.stable_entries.len());
769
770 for root in roots {
771 *root = self.relocate_stable_root(*root, &mut relocated, &mut compacted);
772 }
773
774 self.stable_entries = compacted;
775 Self::recycle_u32_scratch(&mut self.scratch_stable, relocated);
776 }
777
778 fn promote_region_root_to_yard(
779 &mut self,
780 value: NanValue,
781 mark: u32,
782 relocated: &mut [u32],
783 ) -> NanValue {
784 self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Yard)
785 }
786
787 fn promote_region_root_to_handoff(
788 &mut self,
789 value: NanValue,
790 mark: u32,
791 relocated: &mut [u32],
792 ) -> NanValue {
793 self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Handoff)
794 }
795
796 fn rewrite_promoted_young_refs_in_place(
797 &mut self,
798 space: HeapSpace,
799 raw_index: u32,
800 mark: u32,
801 relocated: &mut [u32],
802 target: AllocSpace,
803 ) {
804 let raw_index = raw_index as usize;
805 match space {
806 HeapSpace::Young => {
807 if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
808 return;
809 }
810 let entry =
811 std::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
812 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
813 self.young_entries[raw_index] = new_entry;
814 }
815 HeapSpace::Yard => {
816 if raw_index >= self.yard_entries.len() {
817 return;
818 }
819 let entry =
820 std::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
821 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
822 self.yard_entries[raw_index] = new_entry;
823 }
824 HeapSpace::Handoff => {
825 if raw_index >= self.handoff_entries.len() {
826 return;
827 }
828 let entry =
829 std::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
830 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
831 self.handoff_entries[raw_index] = new_entry;
832 }
833 HeapSpace::Stable => {
834 if raw_index >= self.stable_entries.len() {
835 return;
836 }
837 let entry =
838 std::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
839 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
840 self.stable_entries[raw_index] = new_entry;
841 }
842 }
843 }
844
845 fn rewrite_promoted_young_entry(
846 &mut self,
847 entry: ArenaEntry,
848 mark: u32,
849 relocated: &mut [u32],
850 target: AllocSpace,
851 ) -> ArenaEntry {
852 let mut rewrite = |arena: &mut Arena, value: NanValue| {
853 arena.promote_region_root_to_target(value, mark, relocated, target)
854 };
855 self.rewrite_entry_with(entry, &mut rewrite)
856 }
857
858 fn promote_value_to_target(
859 &mut self,
860 value: NanValue,
861 mark: u32,
862 relocated: &mut [u32],
863 target: AllocSpace,
864 ) -> NanValue {
865 let Some(index) = value.heap_index() else {
866 return value;
867 };
868 let (space, raw_index) = Self::decode_index(index);
869 if !matches!(space, HeapSpace::Young) || raw_index < mark {
870 return value;
871 }
872
873 let relocation_slot = raw_index as usize;
874 let relocated_index = relocated[relocation_slot];
875 if relocated_index != u32::MAX {
876 return value.with_heap_index(relocated_index);
877 }
878
879 let new_index = match target {
880 AllocSpace::Yard => Self::encode_yard_index(self.yard_entries.len() as u32),
881 AllocSpace::Handoff => Self::encode_handoff_index(self.handoff_entries.len() as u32),
882 AllocSpace::Young => unreachable!("promotion target must be yard or handoff"),
883 };
884 relocated[relocation_slot] = new_index;
885 match target {
886 AllocSpace::Yard => self.yard_entries.push(ArenaEntry::Int(0)),
887 AllocSpace::Handoff => self.handoff_entries.push(ArenaEntry::Int(0)),
888 AllocSpace::Young => unreachable!(),
889 }
890 self.note_peak_usage();
891
892 let entry = std::mem::replace(
893 &mut self.young_entries[raw_index as usize],
894 ArenaEntry::Int(0),
895 );
896 let new_entry = self.promote_entry_to_target(entry, mark, relocated, target);
897 match target {
898 AllocSpace::Yard => {
899 self.yard_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
900 }
901 AllocSpace::Handoff => {
902 self.handoff_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
903 }
904 AllocSpace::Young => unreachable!(),
905 }
906 value.with_heap_index(new_index)
907 }
908
909 fn promote_entry_to_target(
910 &mut self,
911 entry: ArenaEntry,
912 mark: u32,
913 relocated: &mut [u32],
914 target: AllocSpace,
915 ) -> ArenaEntry {
916 let mut rewrite = |arena: &mut Arena, value: NanValue| {
917 arena.promote_region_root_to_target(value, mark, relocated, target)
918 };
919 self.rewrite_entry_with(entry, &mut rewrite)
920 }
921
922 fn promote_value_to_stable(
923 &mut self,
924 value: NanValue,
925 relocated_young: &mut [u32],
926 relocated_yard: &mut [u32],
927 relocated_handoff: &mut [u32],
928 ) -> NanValue {
929 let Some(index) = value.heap_index() else {
930 return value;
931 };
932 let (space, raw_index) = Self::decode_index(index);
933 match space {
934 HeapSpace::Young => {
935 let relocation_slot = raw_index as usize;
936 let relocated_index = relocated_young[relocation_slot];
937 if relocated_index != u32::MAX {
938 return value.with_heap_index(relocated_index);
939 }
940
941 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
942 relocated_young[relocation_slot] = new_index;
943 self.stable_entries.push(ArenaEntry::Int(0));
944 self.note_peak_usage();
945
946 let entry = self.young_entries[raw_index as usize].clone();
947 let new_entry = self.promote_entry_to_stable(
948 entry,
949 relocated_young,
950 relocated_yard,
951 relocated_handoff,
952 );
953 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
954 value.with_heap_index(new_index)
955 }
956 HeapSpace::Yard => {
957 let relocation_slot = raw_index as usize;
958 let relocated_index = relocated_yard[relocation_slot];
959 if relocated_index != u32::MAX {
960 return value.with_heap_index(relocated_index);
961 }
962
963 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
964 relocated_yard[relocation_slot] = new_index;
965 self.stable_entries.push(ArenaEntry::Int(0));
966 self.note_peak_usage();
967
968 let entry = self.yard_entries[raw_index as usize].clone();
969 let new_entry = self.promote_entry_to_stable(
970 entry,
971 relocated_young,
972 relocated_yard,
973 relocated_handoff,
974 );
975 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
976 value.with_heap_index(new_index)
977 }
978 HeapSpace::Handoff => {
979 let relocation_slot = raw_index as usize;
980 let relocated_index = relocated_handoff[relocation_slot];
981 if relocated_index != u32::MAX {
982 return value.with_heap_index(relocated_index);
983 }
984
985 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
986 relocated_handoff[relocation_slot] = new_index;
987 self.stable_entries.push(ArenaEntry::Int(0));
988 self.note_peak_usage();
989
990 let entry = self.handoff_entries[raw_index as usize].clone();
991 let new_entry = self.promote_entry_to_stable(
992 entry,
993 relocated_young,
994 relocated_yard,
995 relocated_handoff,
996 );
997 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
998 value.with_heap_index(new_index)
999 }
1000 HeapSpace::Stable => value,
1001 }
1002 }
1003
1004 fn promote_entry_to_stable(
1005 &mut self,
1006 entry: ArenaEntry,
1007 relocated_young: &mut [u32],
1008 relocated_yard: &mut [u32],
1009 relocated_handoff: &mut [u32],
1010 ) -> ArenaEntry {
1011 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1012 arena.promote_value_to_stable(value, relocated_young, relocated_yard, relocated_handoff)
1013 };
1014 self.rewrite_entry_with(entry, &mut rewrite)
1015 }
1016
1017 fn relocate_yard_root(
1018 &mut self,
1019 value: NanValue,
1020 mark: u32,
1021 relocated: &mut [u32],
1022 compacted: &mut Vec<ArenaEntry>,
1023 ) -> NanValue {
1024 let Some(index) = value.heap_index() else {
1025 return value;
1026 };
1027 let (space, raw_index) = Self::decode_index(index);
1028 if matches!(space, HeapSpace::Yard)
1029 && raw_index >= mark
1030 && raw_index < self.yard_entries.len() as u32
1031 {
1032 return self.relocate_yard_value(value, mark, relocated, compacted);
1033 }
1034 self.rewrite_yard_refs_in_place(space, raw_index, mark, relocated, compacted);
1035 value
1036 }
1037
1038 fn relocate_yard_value(
1039 &mut self,
1040 value: NanValue,
1041 mark: u32,
1042 relocated: &mut [u32],
1043 compacted: &mut Vec<ArenaEntry>,
1044 ) -> NanValue {
1045 let Some(index) = value.heap_index() else {
1046 return value;
1047 };
1048 let (space, raw_index) = Self::decode_index(index);
1049 if !matches!(space, HeapSpace::Yard) || raw_index < mark {
1050 return value;
1051 }
1052
1053 let relocation_slot = raw_index as usize;
1054 let relocated_index = relocated[relocation_slot];
1055 if relocated_index != u32::MAX {
1056 return value.with_heap_index(relocated_index);
1057 }
1058
1059 let compacted_pos = compacted.len() as u32;
1060 let new_index = Self::encode_yard_index(mark + compacted_pos);
1061 relocated[relocation_slot] = new_index;
1062 compacted.push(ArenaEntry::Int(0));
1063
1064 let entry = std::mem::replace(
1065 &mut self.yard_entries[raw_index as usize],
1066 ArenaEntry::Int(0),
1067 );
1068 let new_entry = self.relocate_yard_entry(entry, mark, relocated, compacted);
1069 compacted[compacted_pos as usize] = new_entry;
1070 value.with_heap_index(new_index)
1071 }
1072
1073 fn relocate_yard_entry(
1074 &mut self,
1075 entry: ArenaEntry,
1076 mark: u32,
1077 relocated: &mut [u32],
1078 compacted: &mut Vec<ArenaEntry>,
1079 ) -> ArenaEntry {
1080 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1081 arena.relocate_yard_value(value, mark, relocated, compacted)
1082 };
1083 self.rewrite_entry_with(entry, &mut rewrite)
1084 }
1085
1086 fn rewrite_yard_refs_in_place(
1087 &mut self,
1088 space: HeapSpace,
1089 raw_index: u32,
1090 mark: u32,
1091 relocated: &mut [u32],
1092 compacted: &mut Vec<ArenaEntry>,
1093 ) {
1094 let raw_index = raw_index as usize;
1095 match space {
1096 HeapSpace::Young => {
1097 if raw_index >= self.young_entries.len() {
1098 return;
1099 }
1100 let entry =
1101 std::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
1102 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1103 self.young_entries[raw_index] = new_entry;
1104 }
1105 HeapSpace::Yard => {
1106 if raw_index >= self.yard_entries.len() || raw_index >= mark as usize {
1107 return;
1108 }
1109 let entry =
1110 std::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
1111 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1112 self.yard_entries[raw_index] = new_entry;
1113 }
1114 HeapSpace::Handoff => {
1115 if raw_index >= self.handoff_entries.len() {
1116 return;
1117 }
1118 let entry =
1119 std::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
1120 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1121 self.handoff_entries[raw_index] = new_entry;
1122 }
1123 HeapSpace::Stable => {
1124 if raw_index >= self.stable_entries.len() {
1125 return;
1126 }
1127 let entry =
1128 std::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
1129 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1130 self.stable_entries[raw_index] = new_entry;
1131 }
1132 }
1133 }
1134
1135 fn rewrite_yard_entry(
1136 &mut self,
1137 entry: ArenaEntry,
1138 mark: u32,
1139 relocated: &mut [u32],
1140 compacted: &mut Vec<ArenaEntry>,
1141 ) -> ArenaEntry {
1142 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1143 arena.relocate_yard_root(value, mark, relocated, compacted)
1144 };
1145 self.rewrite_entry_with(entry, &mut rewrite)
1146 }
1147
1148 fn relocate_stable_root(
1149 &mut self,
1150 value: NanValue,
1151 relocated: &mut [u32],
1152 compacted: &mut Vec<ArenaEntry>,
1153 ) -> NanValue {
1154 let Some(index) = value.heap_index() else {
1155 return value;
1156 };
1157 if !matches!(Self::decode_index(index).0, HeapSpace::Stable) {
1158 return value;
1159 }
1160 self.relocate_stable_value(value, relocated, compacted)
1161 }
1162
1163 fn relocate_stable_value(
1164 &mut self,
1165 value: NanValue,
1166 relocated: &mut [u32],
1167 compacted: &mut Vec<ArenaEntry>,
1168 ) -> NanValue {
1169 let Some(index) = value.heap_index() else {
1170 return value;
1171 };
1172 let (space, raw_index) = Self::decode_index(index);
1173 if !matches!(space, HeapSpace::Stable) {
1174 return value;
1175 }
1176
1177 let relocation_slot = raw_index as usize;
1178 let relocated_index = relocated[relocation_slot];
1179 if relocated_index != u32::MAX {
1180 return value.with_heap_index(relocated_index);
1181 }
1182
1183 let new_index = Self::encode_stable_index(compacted.len() as u32);
1184 relocated[relocation_slot] = new_index;
1185 compacted.push(ArenaEntry::Int(0));
1186
1187 let entry = std::mem::replace(
1188 &mut self.stable_entries[raw_index as usize],
1189 ArenaEntry::Int(0),
1190 );
1191 let new_entry = self.relocate_stable_entry(entry, relocated, compacted);
1192 compacted[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1193 value.with_heap_index(new_index)
1194 }
1195
1196 fn relocate_stable_entry(
1197 &mut self,
1198 entry: ArenaEntry,
1199 relocated: &mut [u32],
1200 compacted: &mut Vec<ArenaEntry>,
1201 ) -> ArenaEntry {
1202 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1203 arena.relocate_stable_value(value, relocated, compacted)
1204 };
1205 self.rewrite_entry_with(entry, &mut rewrite)
1206 }
1207}