1use super::*;
2
3impl Arena {
4 pub fn truncate_to(&mut self, mark: u32) {
5 self.young_entries.truncate(mark as usize);
6 }
7
8 pub fn collect_young_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
9 if self.young_entries.len() <= mark as usize {
10 return;
11 }
12
13 let mut relocated =
14 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
15 let mut compacted = Vec::with_capacity(self.young_entries.len() - mark as usize);
16
17 for root in roots {
18 *root = self.relocate_young_root(*root, mark, &mut relocated, &mut compacted);
19 }
20
21 self.young_entries.truncate(mark as usize);
22 self.young_entries.extend(compacted);
23 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
24 }
25
26 pub fn truncate_yard_to(&mut self, mark: u32) {
27 self.yard_entries.truncate(mark as usize);
28 }
29
30 pub fn truncate_handoff_to(&mut self, mark: u32) {
31 self.handoff_entries.truncate(mark as usize);
32 }
33
34 pub fn evacuate_frame_to_yard(
35 &mut self,
36 young_mark: u32,
37 yard_mark: u32,
38 handoff_mark: u32,
39 roots: &mut [NanValue],
40 ) -> (bool, bool) {
41 self.evacuate_frame_locals(young_mark, yard_mark, handoff_mark, roots, AllocSpace::Yard)
42 }
43
44 pub fn evacuate_frame_to_handoff(
45 &mut self,
46 young_mark: u32,
47 yard_mark: u32,
48 handoff_mark: u32,
49 roots: &mut [NanValue],
50 ) -> (bool, bool) {
51 self.evacuate_frame_locals(
52 young_mark,
53 yard_mark,
54 handoff_mark,
55 roots,
56 AllocSpace::Handoff,
57 )
58 }
59
60 fn evacuate_frame_locals(
61 &mut self,
62 young_mark: u32,
63 yard_mark: u32,
64 handoff_mark: u32,
65 roots: &mut [NanValue],
66 young_target: AllocSpace,
67 ) -> (bool, bool) {
68 let mut relocated_young = Self::take_u32_scratch(
69 &mut self.scratch_young,
70 self.young_entries.len().saturating_sub(young_mark as usize),
71 );
72 let mut relocated_yard = Self::take_u32_scratch(
73 &mut self.scratch_yard,
74 self.yard_entries.len().saturating_sub(yard_mark as usize),
75 );
76 let mut relocated_handoff = Self::take_u32_scratch(
77 &mut self.scratch_handoff,
78 self.handoff_entries
79 .len()
80 .saturating_sub(handoff_mark as usize),
81 );
82 let mut compacted_yard =
83 Vec::with_capacity(self.yard_entries.len().saturating_sub(yard_mark as usize));
84 let mut compacted_handoff = Vec::with_capacity(
85 self.handoff_entries
86 .len()
87 .saturating_sub(handoff_mark as usize),
88 );
89
90 for root in roots {
91 *root = self.evacuate_local_root(
92 *root,
93 young_mark,
94 yard_mark,
95 handoff_mark,
96 young_target,
97 &mut relocated_young,
98 &mut relocated_yard,
99 &mut relocated_handoff,
100 &mut compacted_yard,
101 &mut compacted_handoff,
102 );
103 }
104
105 self.young_entries.truncate(young_mark as usize);
106 self.yard_entries.truncate(yard_mark as usize);
107 self.yard_entries.extend(compacted_yard);
108 self.handoff_entries.truncate(handoff_mark as usize);
109 self.handoff_entries.extend(compacted_handoff);
110 self.note_peak_usage();
111 Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
112 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
113 Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
114
115 (
116 self.yard_entries.len() > yard_mark as usize,
117 self.handoff_entries.len() > handoff_mark as usize,
118 )
119 }
120
121 fn allocate_local_target_slot(
122 target: AllocSpace,
123 yard_mark: u32,
124 handoff_mark: u32,
125 compacted_yard: &mut Vec<ArenaEntry>,
126 compacted_handoff: &mut Vec<ArenaEntry>,
127 ) -> (u32, u32) {
128 match target {
129 AllocSpace::Yard => {
130 let pos = compacted_yard.len() as u32;
131 let idx = Self::encode_yard_index(yard_mark + pos);
132 compacted_yard.push(ArenaEntry::Int(0));
133 (idx, pos)
134 }
135 AllocSpace::Handoff => {
136 let pos = compacted_handoff.len() as u32;
137 let idx = Self::encode_handoff_index(handoff_mark + pos);
138 compacted_handoff.push(ArenaEntry::Int(0));
139 (idx, pos)
140 }
141 AllocSpace::Young => unreachable!("local evacuation target must be yard or handoff"),
142 }
143 }
144
145 fn store_local_target_entry(
146 target: AllocSpace,
147 compacted_pos: u32,
148 entry: ArenaEntry,
149 compacted_yard: &mut [ArenaEntry],
150 compacted_handoff: &mut [ArenaEntry],
151 ) {
152 match target {
153 AllocSpace::Yard => compacted_yard[compacted_pos as usize] = entry,
154 AllocSpace::Handoff => compacted_handoff[compacted_pos as usize] = entry,
155 AllocSpace::Young => unreachable!(),
156 }
157 }
158
159 #[inline(always)]
160 fn rewrite_entry_with<F>(&mut self, entry: ArenaEntry, rewrite: &mut F) -> ArenaEntry
161 where
162 F: FnMut(&mut Arena, NanValue) -> NanValue,
163 {
164 match entry {
165 ArenaEntry::Int(i) => ArenaEntry::Int(i),
166 ArenaEntry::String(s) => ArenaEntry::String(s),
167 ArenaEntry::Builtin(name) => ArenaEntry::Builtin(name),
168 ArenaEntry::Fn(f) => ArenaEntry::Fn(f),
169 ArenaEntry::Boxed(inner) => ArenaEntry::Boxed(rewrite(self, inner)),
170 ArenaEntry::List(list) => ArenaEntry::List(self.rewrite_list_with(list, rewrite)),
171 ArenaEntry::Tuple(mut items) => {
172 for value in &mut items {
173 *value = rewrite(self, *value);
174 }
175 ArenaEntry::Tuple(items)
176 }
177 ArenaEntry::Map(map) => {
178 let mut out = PersistentMap::new();
179 for (hash, (key, value)) in map {
180 out.insert(hash, (rewrite(self, key), rewrite(self, value)));
181 }
182 ArenaEntry::Map(out)
183 }
184 ArenaEntry::Record {
185 type_id,
186 mut fields,
187 } => {
188 for value in &mut fields {
189 *value = rewrite(self, *value);
190 }
191 ArenaEntry::Record { type_id, fields }
192 }
193 ArenaEntry::Variant {
194 type_id,
195 variant_id,
196 mut fields,
197 } => {
198 for value in &mut fields {
199 *value = rewrite(self, *value);
200 }
201 ArenaEntry::Variant {
202 type_id,
203 variant_id,
204 fields,
205 }
206 }
207 ArenaEntry::Namespace { name, mut members } => {
208 for (_, value) in &mut members {
209 *value = rewrite(self, *value);
210 }
211 ArenaEntry::Namespace { name, members }
212 }
213 }
214 }
215
216 #[inline(always)]
217 fn rewrite_list_with<F>(&mut self, list: ArenaList, rewrite: &mut F) -> ArenaList
218 where
219 F: FnMut(&mut Arena, NanValue) -> NanValue,
220 {
221 match list {
222 ArenaList::Flat { items, start } => ArenaList::Flat {
223 items: Rc::new(
224 items[start..]
225 .iter()
226 .map(|value| rewrite(self, *value))
227 .collect(),
228 ),
229 start: 0,
230 },
231 ArenaList::Prepend { head, tail, len } => ArenaList::Prepend {
232 head: rewrite(self, head),
233 tail: rewrite(self, tail),
234 len,
235 },
236 ArenaList::Concat { left, right, len } => ArenaList::Concat {
237 left: rewrite(self, left),
238 right: rewrite(self, right),
239 len,
240 },
241 ArenaList::Segments {
242 current,
243 rest,
244 start,
245 len,
246 } => ArenaList::Segments {
247 current: rewrite(self, current),
248 rest: Rc::new(
249 rest[start..]
250 .iter()
251 .map(|value| rewrite(self, *value))
252 .collect(),
253 ),
254 start: 0,
255 len,
256 },
257 }
258 }
259
260 pub fn flatten_deep_list(&mut self, value: NanValue) -> NanValue {
264 const FLATTEN_THRESHOLD: usize = 64;
265
266 if !value.is_list() || value.is_empty_list_immediate() {
267 return value;
268 }
269 let len = self.list_len_value(value);
270 if len <= FLATTEN_THRESHOLD {
271 return value;
272 }
273 let elements = self.list_to_vec_value(value);
274 let flat = ArenaList::Flat {
275 items: Rc::new(elements),
276 start: 0,
277 };
278 let index = self.push(ArenaEntry::List(flat));
279 NanValue::new_list(index)
280 }
281
282 #[allow(clippy::too_many_arguments)]
283 fn evacuate_local_root(
284 &mut self,
285 value: NanValue,
286 young_mark: u32,
287 yard_mark: u32,
288 handoff_mark: u32,
289 young_target: AllocSpace,
290 relocated_young: &mut [u32],
291 relocated_yard: &mut [u32],
292 relocated_handoff: &mut [u32],
293 compacted_yard: &mut Vec<ArenaEntry>,
294 compacted_handoff: &mut Vec<ArenaEntry>,
295 ) -> NanValue {
296 let Some(index) = value.heap_index() else {
297 return value;
298 };
299 let (space, _) = Self::decode_index(index);
300 match space {
301 HeapSpace::Young if self.is_young_index_in_region(index, young_mark) => self
302 .evacuate_young_value(
303 value,
304 young_mark,
305 yard_mark,
306 handoff_mark,
307 young_target,
308 relocated_young,
309 relocated_yard,
310 relocated_handoff,
311 compacted_yard,
312 compacted_handoff,
313 ),
314 HeapSpace::Yard if self.is_yard_index_in_region(index, yard_mark) => self
315 .evacuate_yard_value(
316 value,
317 young_mark,
318 yard_mark,
319 handoff_mark,
320 young_target,
321 relocated_young,
322 relocated_yard,
323 relocated_handoff,
324 compacted_yard,
325 compacted_handoff,
326 ),
327 HeapSpace::Handoff if self.is_handoff_index_in_region(index, handoff_mark) => self
328 .evacuate_handoff_value(
329 value,
330 young_mark,
331 yard_mark,
332 handoff_mark,
333 young_target,
334 relocated_young,
335 relocated_yard,
336 relocated_handoff,
337 compacted_yard,
338 compacted_handoff,
339 ),
340 _ => value,
344 }
345 }
346
347 #[allow(clippy::too_many_arguments)]
348 fn evacuate_young_value(
349 &mut self,
350 value: NanValue,
351 young_mark: u32,
352 yard_mark: u32,
353 handoff_mark: u32,
354 young_target: AllocSpace,
355 relocated_young: &mut [u32],
356 relocated_yard: &mut [u32],
357 relocated_handoff: &mut [u32],
358 compacted_yard: &mut Vec<ArenaEntry>,
359 compacted_handoff: &mut Vec<ArenaEntry>,
360 ) -> NanValue {
361 let index = value.heap_index().expect("young value must be heap-backed");
362 let (_, raw_index) = Self::decode_index(index);
363 let relocation_slot = (raw_index - young_mark) as usize;
364 let relocated_index = relocated_young[relocation_slot];
365 if relocated_index != u32::MAX {
366 return value.with_heap_index(relocated_index);
367 }
368
369 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
370 young_target,
371 yard_mark,
372 handoff_mark,
373 compacted_yard,
374 compacted_handoff,
375 );
376 relocated_young[relocation_slot] = new_index;
377
378 let entry = std::mem::replace(
379 &mut self.young_entries[raw_index as usize],
380 ArenaEntry::Int(0),
381 );
382 let new_entry = self.evacuate_local_entry(
383 entry,
384 young_mark,
385 yard_mark,
386 handoff_mark,
387 young_target,
388 relocated_young,
389 relocated_yard,
390 relocated_handoff,
391 compacted_yard,
392 compacted_handoff,
393 );
394 Self::store_local_target_entry(
395 young_target,
396 compacted_pos,
397 new_entry,
398 compacted_yard,
399 compacted_handoff,
400 );
401 value.with_heap_index(new_index)
402 }
403
404 #[allow(clippy::too_many_arguments)]
405 fn evacuate_yard_value(
406 &mut self,
407 value: NanValue,
408 young_mark: u32,
409 yard_mark: u32,
410 handoff_mark: u32,
411 young_target: AllocSpace,
412 relocated_young: &mut [u32],
413 relocated_yard: &mut [u32],
414 relocated_handoff: &mut [u32],
415 compacted_yard: &mut Vec<ArenaEntry>,
416 compacted_handoff: &mut Vec<ArenaEntry>,
417 ) -> NanValue {
418 let index = value.heap_index().expect("yard value must be heap-backed");
419 let (_, raw_index) = Self::decode_index(index);
420 let relocation_slot = (raw_index - yard_mark) as usize;
421 let relocated_index = relocated_yard[relocation_slot];
422 if relocated_index != u32::MAX {
423 return value.with_heap_index(relocated_index);
424 }
425
426 let target = match young_target {
427 AllocSpace::Yard => AllocSpace::Yard,
428 AllocSpace::Handoff => AllocSpace::Handoff,
429 AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
430 };
431 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
432 target,
433 yard_mark,
434 handoff_mark,
435 compacted_yard,
436 compacted_handoff,
437 );
438 relocated_yard[relocation_slot] = new_index;
439
440 let entry = std::mem::replace(
441 &mut self.yard_entries[raw_index as usize],
442 ArenaEntry::Int(0),
443 );
444 let new_entry = self.evacuate_local_entry(
445 entry,
446 young_mark,
447 yard_mark,
448 handoff_mark,
449 young_target,
450 relocated_young,
451 relocated_yard,
452 relocated_handoff,
453 compacted_yard,
454 compacted_handoff,
455 );
456 Self::store_local_target_entry(
457 target,
458 compacted_pos,
459 new_entry,
460 compacted_yard,
461 compacted_handoff,
462 );
463 value.with_heap_index(new_index)
464 }
465
466 #[allow(clippy::too_many_arguments)]
467 fn evacuate_handoff_value(
468 &mut self,
469 value: NanValue,
470 young_mark: u32,
471 yard_mark: u32,
472 handoff_mark: u32,
473 young_target: AllocSpace,
474 relocated_young: &mut [u32],
475 relocated_yard: &mut [u32],
476 relocated_handoff: &mut [u32],
477 compacted_yard: &mut Vec<ArenaEntry>,
478 compacted_handoff: &mut Vec<ArenaEntry>,
479 ) -> NanValue {
480 let index = value
481 .heap_index()
482 .expect("handoff value must be heap-backed");
483 let (_, raw_index) = Self::decode_index(index);
484 let relocation_slot = (raw_index - handoff_mark) as usize;
485 let relocated_index = relocated_handoff[relocation_slot];
486 if relocated_index != u32::MAX {
487 return value.with_heap_index(relocated_index);
488 }
489
490 let target = match young_target {
491 AllocSpace::Yard => AllocSpace::Yard,
492 AllocSpace::Handoff => AllocSpace::Handoff,
493 AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
494 };
495 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
496 target,
497 yard_mark,
498 handoff_mark,
499 compacted_yard,
500 compacted_handoff,
501 );
502 relocated_handoff[relocation_slot] = new_index;
503
504 let entry = std::mem::replace(
505 &mut self.handoff_entries[raw_index as usize],
506 ArenaEntry::Int(0),
507 );
508 let new_entry = self.evacuate_local_entry(
509 entry,
510 young_mark,
511 yard_mark,
512 handoff_mark,
513 young_target,
514 relocated_young,
515 relocated_yard,
516 relocated_handoff,
517 compacted_yard,
518 compacted_handoff,
519 );
520 Self::store_local_target_entry(
521 target,
522 compacted_pos,
523 new_entry,
524 compacted_yard,
525 compacted_handoff,
526 );
527 value.with_heap_index(new_index)
528 }
529
530 #[allow(clippy::too_many_arguments)]
531 fn evacuate_local_entry(
532 &mut self,
533 entry: ArenaEntry,
534 young_mark: u32,
535 yard_mark: u32,
536 handoff_mark: u32,
537 young_target: AllocSpace,
538 relocated_young: &mut [u32],
539 relocated_yard: &mut [u32],
540 relocated_handoff: &mut [u32],
541 compacted_yard: &mut Vec<ArenaEntry>,
542 compacted_handoff: &mut Vec<ArenaEntry>,
543 ) -> ArenaEntry {
544 let mut rewrite = |arena: &mut Arena, value: NanValue| {
545 arena.evacuate_local_root(
546 value,
547 young_mark,
548 yard_mark,
549 handoff_mark,
550 young_target,
551 relocated_young,
552 relocated_yard,
553 relocated_handoff,
554 compacted_yard,
555 compacted_handoff,
556 )
557 };
558 self.rewrite_entry_with(entry, &mut rewrite)
559 }
560
561 fn relocate_young_root(
562 &mut self,
563 value: NanValue,
564 mark: u32,
565 relocated: &mut [u32],
566 compacted: &mut Vec<ArenaEntry>,
567 ) -> NanValue {
568 let Some(index) = value.heap_index() else {
569 return value;
570 };
571 let (space, raw_index) = Self::decode_index(index);
572 if matches!(space, HeapSpace::Young)
573 && raw_index >= mark
574 && raw_index < self.young_entries.len() as u32
575 {
576 return self.relocate_young_value(value, mark, relocated, compacted);
577 }
578 self.rewrite_young_refs_in_place(space, raw_index, mark, relocated, compacted);
579 value
580 }
581
582 fn relocate_young_value(
583 &mut self,
584 value: NanValue,
585 mark: u32,
586 relocated: &mut [u32],
587 compacted: &mut Vec<ArenaEntry>,
588 ) -> NanValue {
589 let Some(index) = value.heap_index() else {
590 return value;
591 };
592 let (space, raw_index) = Self::decode_index(index);
593 if !matches!(space, HeapSpace::Young) || raw_index < mark {
594 return value;
595 }
596
597 let relocation_slot = raw_index as usize;
598 let relocated_index = relocated[relocation_slot];
599 if relocated_index != u32::MAX {
600 return value.with_heap_index(relocated_index);
601 }
602
603 let compacted_pos = compacted.len() as u32;
604 let new_index = Self::encode_index(HeapSpace::Young, mark + compacted_pos);
605 relocated[relocation_slot] = new_index;
606 compacted.push(ArenaEntry::Int(0));
607
608 let entry = std::mem::replace(
609 &mut self.young_entries[raw_index as usize],
610 ArenaEntry::Int(0),
611 );
612 let new_entry = self.relocate_young_entry(entry, mark, relocated, compacted);
613 compacted[compacted_pos as usize] = new_entry;
614 value.with_heap_index(new_index)
615 }
616
617 fn relocate_young_entry(
618 &mut self,
619 entry: ArenaEntry,
620 mark: u32,
621 relocated: &mut [u32],
622 compacted: &mut Vec<ArenaEntry>,
623 ) -> ArenaEntry {
624 let mut rewrite = |arena: &mut Arena, value: NanValue| {
625 arena.relocate_young_value(value, mark, relocated, compacted)
626 };
627 self.rewrite_entry_with(entry, &mut rewrite)
628 }
629
630 fn rewrite_young_refs_in_place(
631 &mut self,
632 space: HeapSpace,
633 raw_index: u32,
634 mark: u32,
635 relocated: &mut [u32],
636 compacted: &mut Vec<ArenaEntry>,
637 ) {
638 let raw_index = raw_index as usize;
639 match space {
640 HeapSpace::Young => {
641 if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
642 return;
643 }
644 let entry =
645 std::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
646 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
647 self.young_entries[raw_index] = new_entry;
648 }
649 HeapSpace::Yard => {
650 if raw_index >= self.yard_entries.len() {
651 return;
652 }
653 let entry =
654 std::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
655 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
656 self.yard_entries[raw_index] = new_entry;
657 }
658 HeapSpace::Handoff => {
659 if raw_index >= self.handoff_entries.len() {
660 return;
661 }
662 let entry =
663 std::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
664 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
665 self.handoff_entries[raw_index] = new_entry;
666 }
667 HeapSpace::Stable => {
668 if raw_index >= self.stable_entries.len() {
669 return;
670 }
671 let entry =
672 std::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
673 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
674 self.stable_entries[raw_index] = new_entry;
675 }
676 }
677 }
678
679 fn rewrite_young_entry(
680 &mut self,
681 entry: ArenaEntry,
682 mark: u32,
683 relocated: &mut [u32],
684 compacted: &mut Vec<ArenaEntry>,
685 ) -> ArenaEntry {
686 let mut rewrite = |arena: &mut Arena, value: NanValue| {
687 arena.relocate_young_root(value, mark, relocated, compacted)
688 };
689 self.rewrite_entry_with(entry, &mut rewrite)
690 }
691
692 fn promote_region_root_to_target(
693 &mut self,
694 value: NanValue,
695 mark: u32,
696 relocated: &mut [u32],
697 target: AllocSpace,
698 ) -> NanValue {
699 let Some(index) = value.heap_index() else {
700 return value;
701 };
702 let (space, raw_index) = Self::decode_index(index);
703 if matches!(space, HeapSpace::Young)
704 && raw_index >= mark
705 && raw_index < self.young_entries.len() as u32
706 {
707 return self.promote_value_to_target(value, mark, relocated, target);
708 }
709 self.rewrite_promoted_young_refs_in_place(space, raw_index, mark, relocated, target);
710 value
711 }
712
713 pub fn promote_young_roots_to_yard(&mut self, mark: u32, roots: &mut [NanValue]) {
714 if self.young_entries.len() <= mark as usize {
715 return;
716 }
717
718 let mut relocated =
719 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
720
721 for root in roots {
722 *root = self.promote_region_root_to_yard(*root, mark, &mut relocated);
723 }
724
725 self.young_entries.truncate(mark as usize);
726 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
727 }
728
729 pub fn promote_young_roots_to_handoff(&mut self, mark: u32, roots: &mut [NanValue]) {
730 if self.young_entries.len() <= mark as usize {
731 return;
732 }
733
734 let mut relocated =
735 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
736
737 for root in roots {
738 *root = self.promote_region_root_to_handoff(*root, mark, &mut relocated);
739 }
740
741 self.young_entries.truncate(mark as usize);
742 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
743 }
744
745 pub fn promote_roots_to_stable(&mut self, roots: &mut [NanValue]) {
746 let mut relocated_young =
747 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
748 let mut relocated_yard =
749 Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
750 let mut relocated_handoff =
751 Self::take_u32_scratch(&mut self.scratch_handoff, self.handoff_entries.len());
752
753 for root in roots {
754 *root = self.promote_value_to_stable(
755 *root,
756 &mut relocated_young,
757 &mut relocated_yard,
758 &mut relocated_handoff,
759 );
760 }
761 Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
762 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
763 Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
764 }
765
766 pub fn collect_yard_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
767 if self.yard_entries.len() <= mark as usize {
768 return;
769 }
770
771 let mut relocated = Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
772 let mut compacted = Vec::with_capacity(self.yard_entries.len() - mark as usize);
773
774 for root in roots {
775 *root = self.relocate_yard_root(*root, mark, &mut relocated, &mut compacted);
776 }
777
778 self.yard_entries.truncate(mark as usize);
779 self.yard_entries.extend(compacted);
780 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated);
781 }
782
783 pub fn collect_stable_from_roots(&mut self, roots: &mut [NanValue]) {
784 if self.stable_entries.is_empty() {
785 return;
786 }
787
788 let mut relocated =
789 Self::take_u32_scratch(&mut self.scratch_stable, self.stable_entries.len());
790 let mut compacted = Vec::with_capacity(self.stable_entries.len());
791
792 for root in roots {
793 *root = self.relocate_stable_root(*root, &mut relocated, &mut compacted);
794 }
795
796 self.stable_entries = compacted;
797 Self::recycle_u32_scratch(&mut self.scratch_stable, relocated);
798 }
799
800 fn promote_region_root_to_yard(
801 &mut self,
802 value: NanValue,
803 mark: u32,
804 relocated: &mut [u32],
805 ) -> NanValue {
806 self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Yard)
807 }
808
809 fn promote_region_root_to_handoff(
810 &mut self,
811 value: NanValue,
812 mark: u32,
813 relocated: &mut [u32],
814 ) -> NanValue {
815 self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Handoff)
816 }
817
818 fn rewrite_promoted_young_refs_in_place(
819 &mut self,
820 space: HeapSpace,
821 raw_index: u32,
822 mark: u32,
823 relocated: &mut [u32],
824 target: AllocSpace,
825 ) {
826 let raw_index = raw_index as usize;
827 match space {
828 HeapSpace::Young => {
829 if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
830 return;
831 }
832 let entry =
833 std::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
834 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
835 self.young_entries[raw_index] = new_entry;
836 }
837 HeapSpace::Yard => {
838 if raw_index >= self.yard_entries.len() {
839 return;
840 }
841 let entry =
842 std::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
843 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
844 self.yard_entries[raw_index] = new_entry;
845 }
846 HeapSpace::Handoff => {
847 if raw_index >= self.handoff_entries.len() {
848 return;
849 }
850 let entry =
851 std::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
852 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
853 self.handoff_entries[raw_index] = new_entry;
854 }
855 HeapSpace::Stable => {
856 if raw_index >= self.stable_entries.len() {
857 return;
858 }
859 let entry =
860 std::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
861 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
862 self.stable_entries[raw_index] = new_entry;
863 }
864 }
865 }
866
867 fn rewrite_promoted_young_entry(
868 &mut self,
869 entry: ArenaEntry,
870 mark: u32,
871 relocated: &mut [u32],
872 target: AllocSpace,
873 ) -> ArenaEntry {
874 let mut rewrite = |arena: &mut Arena, value: NanValue| {
875 arena.promote_region_root_to_target(value, mark, relocated, target)
876 };
877 self.rewrite_entry_with(entry, &mut rewrite)
878 }
879
880 fn promote_value_to_target(
881 &mut self,
882 value: NanValue,
883 mark: u32,
884 relocated: &mut [u32],
885 target: AllocSpace,
886 ) -> NanValue {
887 let Some(index) = value.heap_index() else {
888 return value;
889 };
890 let (space, raw_index) = Self::decode_index(index);
891 if !matches!(space, HeapSpace::Young) || raw_index < mark {
892 return value;
893 }
894
895 let relocation_slot = raw_index as usize;
896 let relocated_index = relocated[relocation_slot];
897 if relocated_index != u32::MAX {
898 return value.with_heap_index(relocated_index);
899 }
900
901 let new_index = match target {
902 AllocSpace::Yard => Self::encode_yard_index(self.yard_entries.len() as u32),
903 AllocSpace::Handoff => Self::encode_handoff_index(self.handoff_entries.len() as u32),
904 AllocSpace::Young => unreachable!("promotion target must be yard or handoff"),
905 };
906 relocated[relocation_slot] = new_index;
907 match target {
908 AllocSpace::Yard => self.yard_entries.push(ArenaEntry::Int(0)),
909 AllocSpace::Handoff => self.handoff_entries.push(ArenaEntry::Int(0)),
910 AllocSpace::Young => unreachable!(),
911 }
912 self.note_peak_usage();
913
914 let entry = std::mem::replace(
915 &mut self.young_entries[raw_index as usize],
916 ArenaEntry::Int(0),
917 );
918 let new_entry = self.promote_entry_to_target(entry, mark, relocated, target);
919 match target {
920 AllocSpace::Yard => {
921 self.yard_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
922 }
923 AllocSpace::Handoff => {
924 self.handoff_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
925 }
926 AllocSpace::Young => unreachable!(),
927 }
928 value.with_heap_index(new_index)
929 }
930
931 fn promote_entry_to_target(
932 &mut self,
933 entry: ArenaEntry,
934 mark: u32,
935 relocated: &mut [u32],
936 target: AllocSpace,
937 ) -> ArenaEntry {
938 let mut rewrite = |arena: &mut Arena, value: NanValue| {
939 arena.promote_region_root_to_target(value, mark, relocated, target)
940 };
941 self.rewrite_entry_with(entry, &mut rewrite)
942 }
943
944 fn promote_value_to_stable(
945 &mut self,
946 value: NanValue,
947 relocated_young: &mut [u32],
948 relocated_yard: &mut [u32],
949 relocated_handoff: &mut [u32],
950 ) -> NanValue {
951 let Some(index) = value.heap_index() else {
952 return value;
953 };
954 let (space, raw_index) = Self::decode_index(index);
955 match space {
956 HeapSpace::Young => {
957 let relocation_slot = raw_index as usize;
958 let relocated_index = relocated_young[relocation_slot];
959 if relocated_index != u32::MAX {
960 return value.with_heap_index(relocated_index);
961 }
962
963 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
964 relocated_young[relocation_slot] = new_index;
965 self.stable_entries.push(ArenaEntry::Int(0));
966 self.note_peak_usage();
967
968 let entry = self.young_entries[raw_index as usize].clone();
969 let new_entry = self.promote_entry_to_stable(
970 entry,
971 relocated_young,
972 relocated_yard,
973 relocated_handoff,
974 );
975 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
976 value.with_heap_index(new_index)
977 }
978 HeapSpace::Yard => {
979 let relocation_slot = raw_index as usize;
980 let relocated_index = relocated_yard[relocation_slot];
981 if relocated_index != u32::MAX {
982 return value.with_heap_index(relocated_index);
983 }
984
985 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
986 relocated_yard[relocation_slot] = new_index;
987 self.stable_entries.push(ArenaEntry::Int(0));
988 self.note_peak_usage();
989
990 let entry = self.yard_entries[raw_index as usize].clone();
991 let new_entry = self.promote_entry_to_stable(
992 entry,
993 relocated_young,
994 relocated_yard,
995 relocated_handoff,
996 );
997 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
998 value.with_heap_index(new_index)
999 }
1000 HeapSpace::Handoff => {
1001 let relocation_slot = raw_index as usize;
1002 let relocated_index = relocated_handoff[relocation_slot];
1003 if relocated_index != u32::MAX {
1004 return value.with_heap_index(relocated_index);
1005 }
1006
1007 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
1008 relocated_handoff[relocation_slot] = new_index;
1009 self.stable_entries.push(ArenaEntry::Int(0));
1010 self.note_peak_usage();
1011
1012 let entry = self.handoff_entries[raw_index as usize].clone();
1013 let new_entry = self.promote_entry_to_stable(
1014 entry,
1015 relocated_young,
1016 relocated_yard,
1017 relocated_handoff,
1018 );
1019 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1020 value.with_heap_index(new_index)
1021 }
1022 HeapSpace::Stable => value,
1023 }
1024 }
1025
1026 fn promote_entry_to_stable(
1027 &mut self,
1028 entry: ArenaEntry,
1029 relocated_young: &mut [u32],
1030 relocated_yard: &mut [u32],
1031 relocated_handoff: &mut [u32],
1032 ) -> ArenaEntry {
1033 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1034 arena.promote_value_to_stable(value, relocated_young, relocated_yard, relocated_handoff)
1035 };
1036 self.rewrite_entry_with(entry, &mut rewrite)
1037 }
1038
1039 fn relocate_yard_root(
1040 &mut self,
1041 value: NanValue,
1042 mark: u32,
1043 relocated: &mut [u32],
1044 compacted: &mut Vec<ArenaEntry>,
1045 ) -> NanValue {
1046 let Some(index) = value.heap_index() else {
1047 return value;
1048 };
1049 let (space, raw_index) = Self::decode_index(index);
1050 if matches!(space, HeapSpace::Yard)
1051 && raw_index >= mark
1052 && raw_index < self.yard_entries.len() as u32
1053 {
1054 return self.relocate_yard_value(value, mark, relocated, compacted);
1055 }
1056 self.rewrite_yard_refs_in_place(space, raw_index, mark, relocated, compacted);
1057 value
1058 }
1059
1060 fn relocate_yard_value(
1061 &mut self,
1062 value: NanValue,
1063 mark: u32,
1064 relocated: &mut [u32],
1065 compacted: &mut Vec<ArenaEntry>,
1066 ) -> NanValue {
1067 let Some(index) = value.heap_index() else {
1068 return value;
1069 };
1070 let (space, raw_index) = Self::decode_index(index);
1071 if !matches!(space, HeapSpace::Yard) || raw_index < mark {
1072 return value;
1073 }
1074
1075 let relocation_slot = raw_index as usize;
1076 let relocated_index = relocated[relocation_slot];
1077 if relocated_index != u32::MAX {
1078 return value.with_heap_index(relocated_index);
1079 }
1080
1081 let compacted_pos = compacted.len() as u32;
1082 let new_index = Self::encode_yard_index(mark + compacted_pos);
1083 relocated[relocation_slot] = new_index;
1084 compacted.push(ArenaEntry::Int(0));
1085
1086 let entry = std::mem::replace(
1087 &mut self.yard_entries[raw_index as usize],
1088 ArenaEntry::Int(0),
1089 );
1090 let new_entry = self.relocate_yard_entry(entry, mark, relocated, compacted);
1091 compacted[compacted_pos as usize] = new_entry;
1092 value.with_heap_index(new_index)
1093 }
1094
1095 fn relocate_yard_entry(
1096 &mut self,
1097 entry: ArenaEntry,
1098 mark: u32,
1099 relocated: &mut [u32],
1100 compacted: &mut Vec<ArenaEntry>,
1101 ) -> ArenaEntry {
1102 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1103 arena.relocate_yard_value(value, mark, relocated, compacted)
1104 };
1105 self.rewrite_entry_with(entry, &mut rewrite)
1106 }
1107
1108 fn rewrite_yard_refs_in_place(
1109 &mut self,
1110 space: HeapSpace,
1111 raw_index: u32,
1112 mark: u32,
1113 relocated: &mut [u32],
1114 compacted: &mut Vec<ArenaEntry>,
1115 ) {
1116 let raw_index = raw_index as usize;
1117 match space {
1118 HeapSpace::Young => {
1119 if raw_index >= self.young_entries.len() {
1120 return;
1121 }
1122 let entry =
1123 std::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
1124 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1125 self.young_entries[raw_index] = new_entry;
1126 }
1127 HeapSpace::Yard => {
1128 if raw_index >= self.yard_entries.len() || raw_index >= mark as usize {
1129 return;
1130 }
1131 let entry =
1132 std::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
1133 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1134 self.yard_entries[raw_index] = new_entry;
1135 }
1136 HeapSpace::Handoff => {
1137 if raw_index >= self.handoff_entries.len() {
1138 return;
1139 }
1140 let entry =
1141 std::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
1142 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1143 self.handoff_entries[raw_index] = new_entry;
1144 }
1145 HeapSpace::Stable => {
1146 if raw_index >= self.stable_entries.len() {
1147 return;
1148 }
1149 let entry =
1150 std::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
1151 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1152 self.stable_entries[raw_index] = new_entry;
1153 }
1154 }
1155 }
1156
1157 fn rewrite_yard_entry(
1158 &mut self,
1159 entry: ArenaEntry,
1160 mark: u32,
1161 relocated: &mut [u32],
1162 compacted: &mut Vec<ArenaEntry>,
1163 ) -> ArenaEntry {
1164 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1165 arena.relocate_yard_root(value, mark, relocated, compacted)
1166 };
1167 self.rewrite_entry_with(entry, &mut rewrite)
1168 }
1169
1170 fn relocate_stable_root(
1171 &mut self,
1172 value: NanValue,
1173 relocated: &mut [u32],
1174 compacted: &mut Vec<ArenaEntry>,
1175 ) -> NanValue {
1176 let Some(index) = value.heap_index() else {
1177 return value;
1178 };
1179 if !matches!(Self::decode_index(index).0, HeapSpace::Stable) {
1180 return value;
1181 }
1182 self.relocate_stable_value(value, relocated, compacted)
1183 }
1184
1185 fn relocate_stable_value(
1186 &mut self,
1187 value: NanValue,
1188 relocated: &mut [u32],
1189 compacted: &mut Vec<ArenaEntry>,
1190 ) -> NanValue {
1191 let Some(index) = value.heap_index() else {
1192 return value;
1193 };
1194 let (space, raw_index) = Self::decode_index(index);
1195 if !matches!(space, HeapSpace::Stable) {
1196 return value;
1197 }
1198
1199 let relocation_slot = raw_index as usize;
1200 let relocated_index = relocated[relocation_slot];
1201 if relocated_index != u32::MAX {
1202 return value.with_heap_index(relocated_index);
1203 }
1204
1205 let new_index = Self::encode_stable_index(compacted.len() as u32);
1206 relocated[relocation_slot] = new_index;
1207 compacted.push(ArenaEntry::Int(0));
1208
1209 let entry = std::mem::replace(
1210 &mut self.stable_entries[raw_index as usize],
1211 ArenaEntry::Int(0),
1212 );
1213 let new_entry = self.relocate_stable_entry(entry, relocated, compacted);
1214 compacted[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1215 value.with_heap_index(new_index)
1216 }
1217
1218 fn relocate_stable_entry(
1219 &mut self,
1220 entry: ArenaEntry,
1221 relocated: &mut [u32],
1222 compacted: &mut Vec<ArenaEntry>,
1223 ) -> ArenaEntry {
1224 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1225 arena.relocate_stable_value(value, relocated, compacted)
1226 };
1227 self.rewrite_entry_with(entry, &mut rewrite)
1228 }
1229}