1use super::*;
2
3impl Arena {
4 pub fn truncate_to(&mut self, mark: u32) {
5 self.young_entries.truncate(mark as usize);
6 }
7
8 pub fn collect_young_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
9 if self.young_entries.len() <= mark as usize {
10 return;
11 }
12
13 let mut relocated =
14 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
15 let mut compacted = Vec::with_capacity(self.young_entries.len() - mark as usize);
16
17 for root in roots {
18 *root = self.relocate_young_root(*root, mark, &mut relocated, &mut compacted);
19 }
20
21 self.young_entries.truncate(mark as usize);
22 self.young_entries.extend(compacted);
23 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
24 }
25
26 pub fn truncate_yard_to(&mut self, mark: u32) {
27 self.yard_entries.truncate(mark as usize);
28 }
29
30 pub fn truncate_handoff_to(&mut self, mark: u32) {
31 self.handoff_entries.truncate(mark as usize);
32 }
33
34 pub fn evacuate_frame_to_yard(
35 &mut self,
36 young_mark: u32,
37 yard_mark: u32,
38 handoff_mark: u32,
39 roots: &mut [NanValue],
40 ) -> (bool, bool) {
41 self.evacuate_frame_locals(young_mark, yard_mark, handoff_mark, roots, AllocSpace::Yard)
42 }
43
44 pub fn evacuate_frame_to_handoff(
45 &mut self,
46 young_mark: u32,
47 yard_mark: u32,
48 handoff_mark: u32,
49 roots: &mut [NanValue],
50 ) -> (bool, bool) {
51 self.evacuate_frame_locals(
52 young_mark,
53 yard_mark,
54 handoff_mark,
55 roots,
56 AllocSpace::Handoff,
57 )
58 }
59
60 fn evacuate_frame_locals(
61 &mut self,
62 young_mark: u32,
63 yard_mark: u32,
64 handoff_mark: u32,
65 roots: &mut [NanValue],
66 young_target: AllocSpace,
67 ) -> (bool, bool) {
68 let mut relocated_young = Self::take_u32_scratch(
69 &mut self.scratch_young,
70 self.young_entries.len().saturating_sub(young_mark as usize),
71 );
72 let mut relocated_yard = Self::take_u32_scratch(
73 &mut self.scratch_yard,
74 self.yard_entries.len().saturating_sub(yard_mark as usize),
75 );
76 let mut relocated_handoff = Self::take_u32_scratch(
77 &mut self.scratch_handoff,
78 self.handoff_entries
79 .len()
80 .saturating_sub(handoff_mark as usize),
81 );
82 let mut compacted_yard =
83 Vec::with_capacity(self.yard_entries.len().saturating_sub(yard_mark as usize));
84 let mut compacted_handoff = Vec::with_capacity(
85 self.handoff_entries
86 .len()
87 .saturating_sub(handoff_mark as usize),
88 );
89
90 for root in roots {
91 *root = self.evacuate_local_root(
92 *root,
93 young_mark,
94 yard_mark,
95 handoff_mark,
96 young_target,
97 &mut relocated_young,
98 &mut relocated_yard,
99 &mut relocated_handoff,
100 &mut compacted_yard,
101 &mut compacted_handoff,
102 );
103 }
104
105 self.young_entries.truncate(young_mark as usize);
106 self.yard_entries.truncate(yard_mark as usize);
107 self.yard_entries.extend(compacted_yard);
108 self.handoff_entries.truncate(handoff_mark as usize);
109 self.handoff_entries.extend(compacted_handoff);
110 self.note_peak_usage();
111 Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
112 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
113 Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
114
115 (
116 self.yard_entries.len() > yard_mark as usize,
117 self.handoff_entries.len() > handoff_mark as usize,
118 )
119 }
120
121 fn allocate_local_target_slot(
122 target: AllocSpace,
123 yard_mark: u32,
124 handoff_mark: u32,
125 compacted_yard: &mut Vec<ArenaEntry>,
126 compacted_handoff: &mut Vec<ArenaEntry>,
127 ) -> (u32, u32) {
128 match target {
129 AllocSpace::Yard => {
130 let pos = compacted_yard.len() as u32;
131 let idx = Self::encode_yard_index(yard_mark + pos);
132 compacted_yard.push(ArenaEntry::Int(0));
133 (idx, pos)
134 }
135 AllocSpace::Handoff => {
136 let pos = compacted_handoff.len() as u32;
137 let idx = Self::encode_handoff_index(handoff_mark + pos);
138 compacted_handoff.push(ArenaEntry::Int(0));
139 (idx, pos)
140 }
141 AllocSpace::Young => unreachable!("local evacuation target must be yard or handoff"),
142 }
143 }
144
145 fn store_local_target_entry(
146 target: AllocSpace,
147 compacted_pos: u32,
148 entry: ArenaEntry,
149 compacted_yard: &mut [ArenaEntry],
150 compacted_handoff: &mut [ArenaEntry],
151 ) {
152 match target {
153 AllocSpace::Yard => compacted_yard[compacted_pos as usize] = entry,
154 AllocSpace::Handoff => compacted_handoff[compacted_pos as usize] = entry,
155 AllocSpace::Young => unreachable!(),
156 }
157 }
158
159 #[inline(always)]
160 fn rewrite_entry_with<F>(&mut self, entry: ArenaEntry, rewrite: &mut F) -> ArenaEntry
161 where
162 F: FnMut(&mut Arena, NanValue) -> NanValue,
163 {
164 match entry {
165 ArenaEntry::Int(i) => ArenaEntry::Int(i),
166 ArenaEntry::String(s) => ArenaEntry::String(s),
167 ArenaEntry::Builtin(name) => ArenaEntry::Builtin(name),
168 ArenaEntry::Fn(f) => ArenaEntry::Fn(f),
169 ArenaEntry::Boxed(inner) => ArenaEntry::Boxed(rewrite(self, inner)),
170 ArenaEntry::List(list) => ArenaEntry::List(self.rewrite_list_with(list, rewrite)),
171 ArenaEntry::Tuple(mut items) => {
172 for value in &mut items {
173 *value = rewrite(self, *value);
174 }
175 ArenaEntry::Tuple(items)
176 }
177 ArenaEntry::Vector(mut items) => {
178 for value in &mut items {
179 *value = rewrite(self, *value);
180 }
181 ArenaEntry::Vector(items)
182 }
183 ArenaEntry::Map(map) => {
184 let mut out = PersistentMap::new();
185 for (&hash, &(key, value)) in map.iter() {
186 out = out.insert(hash, (rewrite(self, key), rewrite(self, value)));
187 }
188 ArenaEntry::Map(out)
189 }
190 ArenaEntry::Record {
191 type_id,
192 mut fields,
193 } => {
194 for value in &mut fields {
195 *value = rewrite(self, *value);
196 }
197 ArenaEntry::Record { type_id, fields }
198 }
199 ArenaEntry::Variant {
200 type_id,
201 variant_id,
202 mut fields,
203 } => {
204 for value in &mut fields {
205 *value = rewrite(self, *value);
206 }
207 ArenaEntry::Variant {
208 type_id,
209 variant_id,
210 fields,
211 }
212 }
213 ArenaEntry::Namespace { name, mut members } => {
214 for (_, value) in &mut members {
215 *value = rewrite(self, *value);
216 }
217 ArenaEntry::Namespace { name, members }
218 }
219 }
220 }
221
222 #[inline(always)]
223 fn rewrite_list_with<F>(&mut self, list: ArenaList, rewrite: &mut F) -> ArenaList
224 where
225 F: FnMut(&mut Arena, NanValue) -> NanValue,
226 {
227 match list {
228 ArenaList::Flat { items, start } => ArenaList::Flat {
229 items: Rc::new(
230 items[start..]
231 .iter()
232 .map(|value| rewrite(self, *value))
233 .collect(),
234 ),
235 start: 0,
236 },
237 ArenaList::Prepend { head, tail, len } => ArenaList::Prepend {
238 head: rewrite(self, head),
239 tail: rewrite(self, tail),
240 len,
241 },
242 ArenaList::Concat { left, right, len } => ArenaList::Concat {
243 left: rewrite(self, left),
244 right: rewrite(self, right),
245 len,
246 },
247 ArenaList::Segments {
248 current,
249 rest,
250 start,
251 len,
252 } => ArenaList::Segments {
253 current: rewrite(self, current),
254 rest: Rc::new(
255 rest[start..]
256 .iter()
257 .map(|value| rewrite(self, *value))
258 .collect(),
259 ),
260 start: 0,
261 len,
262 },
263 }
264 }
265
266 pub fn flatten_deep_list(&mut self, value: NanValue) -> NanValue {
270 const FLATTEN_THRESHOLD: usize = 64;
271
272 if !value.is_list() || value.is_empty_list_immediate() {
273 return value;
274 }
275 let len = self.list_len_value(value);
276 if len <= FLATTEN_THRESHOLD {
277 return value;
278 }
279 let elements = self.list_to_vec_value(value);
280 let flat = ArenaList::Flat {
281 items: Rc::new(elements),
282 start: 0,
283 };
284 let index = self.push(ArenaEntry::List(flat));
285 NanValue::new_list(index)
286 }
287
288 #[allow(clippy::too_many_arguments)]
289 fn evacuate_local_root(
290 &mut self,
291 value: NanValue,
292 young_mark: u32,
293 yard_mark: u32,
294 handoff_mark: u32,
295 young_target: AllocSpace,
296 relocated_young: &mut [u32],
297 relocated_yard: &mut [u32],
298 relocated_handoff: &mut [u32],
299 compacted_yard: &mut Vec<ArenaEntry>,
300 compacted_handoff: &mut Vec<ArenaEntry>,
301 ) -> NanValue {
302 let Some(index) = value.heap_index() else {
303 return value;
304 };
305 let (space, _) = Self::decode_index(index);
306 match space {
307 HeapSpace::Young if self.is_young_index_in_region(index, young_mark) => self
308 .evacuate_young_value(
309 value,
310 young_mark,
311 yard_mark,
312 handoff_mark,
313 young_target,
314 relocated_young,
315 relocated_yard,
316 relocated_handoff,
317 compacted_yard,
318 compacted_handoff,
319 ),
320 HeapSpace::Yard if self.is_yard_index_in_region(index, yard_mark) => self
321 .evacuate_yard_value(
322 value,
323 young_mark,
324 yard_mark,
325 handoff_mark,
326 young_target,
327 relocated_young,
328 relocated_yard,
329 relocated_handoff,
330 compacted_yard,
331 compacted_handoff,
332 ),
333 HeapSpace::Handoff if self.is_handoff_index_in_region(index, handoff_mark) => self
334 .evacuate_handoff_value(
335 value,
336 young_mark,
337 yard_mark,
338 handoff_mark,
339 young_target,
340 relocated_young,
341 relocated_yard,
342 relocated_handoff,
343 compacted_yard,
344 compacted_handoff,
345 ),
346 _ => value,
350 }
351 }
352
353 #[allow(clippy::too_many_arguments)]
354 fn evacuate_young_value(
355 &mut self,
356 value: NanValue,
357 young_mark: u32,
358 yard_mark: u32,
359 handoff_mark: u32,
360 young_target: AllocSpace,
361 relocated_young: &mut [u32],
362 relocated_yard: &mut [u32],
363 relocated_handoff: &mut [u32],
364 compacted_yard: &mut Vec<ArenaEntry>,
365 compacted_handoff: &mut Vec<ArenaEntry>,
366 ) -> NanValue {
367 let index = value.heap_index().expect("young value must be heap-backed");
368 let (_, raw_index) = Self::decode_index(index);
369 let relocation_slot = (raw_index - young_mark) as usize;
370 let relocated_index = relocated_young[relocation_slot];
371 if relocated_index != u32::MAX {
372 return value.with_heap_index(relocated_index);
373 }
374
375 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
376 young_target,
377 yard_mark,
378 handoff_mark,
379 compacted_yard,
380 compacted_handoff,
381 );
382 relocated_young[relocation_slot] = new_index;
383
384 let entry = std::mem::replace(
385 &mut self.young_entries[raw_index as usize],
386 ArenaEntry::Int(0),
387 );
388 let new_entry = self.evacuate_local_entry(
389 entry,
390 young_mark,
391 yard_mark,
392 handoff_mark,
393 young_target,
394 relocated_young,
395 relocated_yard,
396 relocated_handoff,
397 compacted_yard,
398 compacted_handoff,
399 );
400 Self::store_local_target_entry(
401 young_target,
402 compacted_pos,
403 new_entry,
404 compacted_yard,
405 compacted_handoff,
406 );
407 value.with_heap_index(new_index)
408 }
409
410 #[allow(clippy::too_many_arguments)]
411 fn evacuate_yard_value(
412 &mut self,
413 value: NanValue,
414 young_mark: u32,
415 yard_mark: u32,
416 handoff_mark: u32,
417 young_target: AllocSpace,
418 relocated_young: &mut [u32],
419 relocated_yard: &mut [u32],
420 relocated_handoff: &mut [u32],
421 compacted_yard: &mut Vec<ArenaEntry>,
422 compacted_handoff: &mut Vec<ArenaEntry>,
423 ) -> NanValue {
424 let index = value.heap_index().expect("yard value must be heap-backed");
425 let (_, raw_index) = Self::decode_index(index);
426 let relocation_slot = (raw_index - yard_mark) as usize;
427 let relocated_index = relocated_yard[relocation_slot];
428 if relocated_index != u32::MAX {
429 return value.with_heap_index(relocated_index);
430 }
431
432 let target = match young_target {
433 AllocSpace::Yard => AllocSpace::Yard,
434 AllocSpace::Handoff => AllocSpace::Handoff,
435 AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
436 };
437 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
438 target,
439 yard_mark,
440 handoff_mark,
441 compacted_yard,
442 compacted_handoff,
443 );
444 relocated_yard[relocation_slot] = new_index;
445
446 let entry = std::mem::replace(
447 &mut self.yard_entries[raw_index as usize],
448 ArenaEntry::Int(0),
449 );
450 let new_entry = self.evacuate_local_entry(
451 entry,
452 young_mark,
453 yard_mark,
454 handoff_mark,
455 young_target,
456 relocated_young,
457 relocated_yard,
458 relocated_handoff,
459 compacted_yard,
460 compacted_handoff,
461 );
462 Self::store_local_target_entry(
463 target,
464 compacted_pos,
465 new_entry,
466 compacted_yard,
467 compacted_handoff,
468 );
469 value.with_heap_index(new_index)
470 }
471
472 #[allow(clippy::too_many_arguments)]
473 fn evacuate_handoff_value(
474 &mut self,
475 value: NanValue,
476 young_mark: u32,
477 yard_mark: u32,
478 handoff_mark: u32,
479 young_target: AllocSpace,
480 relocated_young: &mut [u32],
481 relocated_yard: &mut [u32],
482 relocated_handoff: &mut [u32],
483 compacted_yard: &mut Vec<ArenaEntry>,
484 compacted_handoff: &mut Vec<ArenaEntry>,
485 ) -> NanValue {
486 let index = value
487 .heap_index()
488 .expect("handoff value must be heap-backed");
489 let (_, raw_index) = Self::decode_index(index);
490 let relocation_slot = (raw_index - handoff_mark) as usize;
491 let relocated_index = relocated_handoff[relocation_slot];
492 if relocated_index != u32::MAX {
493 return value.with_heap_index(relocated_index);
494 }
495
496 let target = match young_target {
497 AllocSpace::Yard => AllocSpace::Yard,
498 AllocSpace::Handoff => AllocSpace::Handoff,
499 AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
500 };
501 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
502 target,
503 yard_mark,
504 handoff_mark,
505 compacted_yard,
506 compacted_handoff,
507 );
508 relocated_handoff[relocation_slot] = new_index;
509
510 let entry = std::mem::replace(
511 &mut self.handoff_entries[raw_index as usize],
512 ArenaEntry::Int(0),
513 );
514 let new_entry = self.evacuate_local_entry(
515 entry,
516 young_mark,
517 yard_mark,
518 handoff_mark,
519 young_target,
520 relocated_young,
521 relocated_yard,
522 relocated_handoff,
523 compacted_yard,
524 compacted_handoff,
525 );
526 Self::store_local_target_entry(
527 target,
528 compacted_pos,
529 new_entry,
530 compacted_yard,
531 compacted_handoff,
532 );
533 value.with_heap_index(new_index)
534 }
535
536 #[allow(clippy::too_many_arguments)]
537 fn evacuate_local_entry(
538 &mut self,
539 entry: ArenaEntry,
540 young_mark: u32,
541 yard_mark: u32,
542 handoff_mark: u32,
543 young_target: AllocSpace,
544 relocated_young: &mut [u32],
545 relocated_yard: &mut [u32],
546 relocated_handoff: &mut [u32],
547 compacted_yard: &mut Vec<ArenaEntry>,
548 compacted_handoff: &mut Vec<ArenaEntry>,
549 ) -> ArenaEntry {
550 let mut rewrite = |arena: &mut Arena, value: NanValue| {
551 arena.evacuate_local_root(
552 value,
553 young_mark,
554 yard_mark,
555 handoff_mark,
556 young_target,
557 relocated_young,
558 relocated_yard,
559 relocated_handoff,
560 compacted_yard,
561 compacted_handoff,
562 )
563 };
564 self.rewrite_entry_with(entry, &mut rewrite)
565 }
566
567 fn relocate_young_root(
568 &mut self,
569 value: NanValue,
570 mark: u32,
571 relocated: &mut [u32],
572 compacted: &mut Vec<ArenaEntry>,
573 ) -> NanValue {
574 let Some(index) = value.heap_index() else {
575 return value;
576 };
577 let (space, raw_index) = Self::decode_index(index);
578 if matches!(space, HeapSpace::Young)
579 && raw_index >= mark
580 && raw_index < self.young_entries.len() as u32
581 {
582 return self.relocate_young_value(value, mark, relocated, compacted);
583 }
584 self.rewrite_young_refs_in_place(space, raw_index, mark, relocated, compacted);
585 value
586 }
587
588 fn relocate_young_value(
589 &mut self,
590 value: NanValue,
591 mark: u32,
592 relocated: &mut [u32],
593 compacted: &mut Vec<ArenaEntry>,
594 ) -> NanValue {
595 let Some(index) = value.heap_index() else {
596 return value;
597 };
598 let (space, raw_index) = Self::decode_index(index);
599 if !matches!(space, HeapSpace::Young) || raw_index < mark {
600 return value;
601 }
602
603 let relocation_slot = raw_index as usize;
604 let relocated_index = relocated[relocation_slot];
605 if relocated_index != u32::MAX {
606 return value.with_heap_index(relocated_index);
607 }
608
609 let compacted_pos = compacted.len() as u32;
610 let new_index = Self::encode_index(HeapSpace::Young, mark + compacted_pos);
611 relocated[relocation_slot] = new_index;
612 compacted.push(ArenaEntry::Int(0));
613
614 let entry = std::mem::replace(
615 &mut self.young_entries[raw_index as usize],
616 ArenaEntry::Int(0),
617 );
618 let new_entry = self.relocate_young_entry(entry, mark, relocated, compacted);
619 compacted[compacted_pos as usize] = new_entry;
620 value.with_heap_index(new_index)
621 }
622
623 fn relocate_young_entry(
624 &mut self,
625 entry: ArenaEntry,
626 mark: u32,
627 relocated: &mut [u32],
628 compacted: &mut Vec<ArenaEntry>,
629 ) -> ArenaEntry {
630 let mut rewrite = |arena: &mut Arena, value: NanValue| {
631 arena.relocate_young_value(value, mark, relocated, compacted)
632 };
633 self.rewrite_entry_with(entry, &mut rewrite)
634 }
635
636 fn rewrite_young_refs_in_place(
637 &mut self,
638 space: HeapSpace,
639 raw_index: u32,
640 mark: u32,
641 relocated: &mut [u32],
642 compacted: &mut Vec<ArenaEntry>,
643 ) {
644 let raw_index = raw_index as usize;
645 match space {
646 HeapSpace::Young => {
647 if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
648 return;
649 }
650 let entry =
651 std::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
652 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
653 self.young_entries[raw_index] = new_entry;
654 }
655 HeapSpace::Yard => {
656 if raw_index >= self.yard_entries.len() {
657 return;
658 }
659 let entry =
660 std::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
661 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
662 self.yard_entries[raw_index] = new_entry;
663 }
664 HeapSpace::Handoff => {
665 if raw_index >= self.handoff_entries.len() {
666 return;
667 }
668 let entry =
669 std::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
670 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
671 self.handoff_entries[raw_index] = new_entry;
672 }
673 HeapSpace::Stable => {
674 if raw_index >= self.stable_entries.len() {
675 return;
676 }
677 let entry =
678 std::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
679 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
680 self.stable_entries[raw_index] = new_entry;
681 }
682 }
683 }
684
685 fn rewrite_young_entry(
686 &mut self,
687 entry: ArenaEntry,
688 mark: u32,
689 relocated: &mut [u32],
690 compacted: &mut Vec<ArenaEntry>,
691 ) -> ArenaEntry {
692 let mut rewrite = |arena: &mut Arena, value: NanValue| {
693 arena.relocate_young_root(value, mark, relocated, compacted)
694 };
695 self.rewrite_entry_with(entry, &mut rewrite)
696 }
697
698 fn promote_region_root_to_target(
699 &mut self,
700 value: NanValue,
701 mark: u32,
702 relocated: &mut [u32],
703 target: AllocSpace,
704 ) -> NanValue {
705 let Some(index) = value.heap_index() else {
706 return value;
707 };
708 let (space, raw_index) = Self::decode_index(index);
709 if matches!(space, HeapSpace::Young)
710 && raw_index >= mark
711 && raw_index < self.young_entries.len() as u32
712 {
713 return self.promote_value_to_target(value, mark, relocated, target);
714 }
715 self.rewrite_promoted_young_refs_in_place(space, raw_index, mark, relocated, target);
716 value
717 }
718
719 pub fn promote_young_roots_to_yard(&mut self, mark: u32, roots: &mut [NanValue]) {
720 if self.young_entries.len() <= mark as usize {
721 return;
722 }
723
724 let mut relocated =
725 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
726
727 for root in roots {
728 *root = self.promote_region_root_to_yard(*root, mark, &mut relocated);
729 }
730
731 self.young_entries.truncate(mark as usize);
732 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
733 }
734
735 pub fn promote_young_roots_to_handoff(&mut self, mark: u32, roots: &mut [NanValue]) {
736 if self.young_entries.len() <= mark as usize {
737 return;
738 }
739
740 let mut relocated =
741 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
742
743 for root in roots {
744 *root = self.promote_region_root_to_handoff(*root, mark, &mut relocated);
745 }
746
747 self.young_entries.truncate(mark as usize);
748 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
749 }
750
751 pub fn promote_roots_to_stable(&mut self, roots: &mut [NanValue]) {
752 let mut relocated_young =
753 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
754 let mut relocated_yard =
755 Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
756 let mut relocated_handoff =
757 Self::take_u32_scratch(&mut self.scratch_handoff, self.handoff_entries.len());
758
759 for root in roots {
760 *root = self.promote_value_to_stable(
761 *root,
762 &mut relocated_young,
763 &mut relocated_yard,
764 &mut relocated_handoff,
765 );
766 }
767 Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
768 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
769 Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
770 }
771
772 pub fn collect_yard_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
773 if self.yard_entries.len() <= mark as usize {
774 return;
775 }
776
777 let mut relocated = Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
778 let mut compacted = Vec::with_capacity(self.yard_entries.len() - mark as usize);
779
780 for root in roots {
781 *root = self.relocate_yard_root(*root, mark, &mut relocated, &mut compacted);
782 }
783
784 self.yard_entries.truncate(mark as usize);
785 self.yard_entries.extend(compacted);
786 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated);
787 }
788
789 pub fn collect_stable_from_roots(&mut self, roots: &mut [NanValue]) {
790 if self.stable_entries.is_empty() {
791 return;
792 }
793
794 let mut relocated =
795 Self::take_u32_scratch(&mut self.scratch_stable, self.stable_entries.len());
796 let mut compacted = Vec::with_capacity(self.stable_entries.len());
797
798 for root in roots {
799 *root = self.relocate_stable_root(*root, &mut relocated, &mut compacted);
800 }
801
802 self.stable_entries = compacted;
803 Self::recycle_u32_scratch(&mut self.scratch_stable, relocated);
804 }
805
806 fn promote_region_root_to_yard(
807 &mut self,
808 value: NanValue,
809 mark: u32,
810 relocated: &mut [u32],
811 ) -> NanValue {
812 self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Yard)
813 }
814
815 fn promote_region_root_to_handoff(
816 &mut self,
817 value: NanValue,
818 mark: u32,
819 relocated: &mut [u32],
820 ) -> NanValue {
821 self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Handoff)
822 }
823
824 fn rewrite_promoted_young_refs_in_place(
825 &mut self,
826 space: HeapSpace,
827 raw_index: u32,
828 mark: u32,
829 relocated: &mut [u32],
830 target: AllocSpace,
831 ) {
832 let raw_index = raw_index as usize;
833 match space {
834 HeapSpace::Young => {
835 if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
836 return;
837 }
838 let entry =
839 std::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
840 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
841 self.young_entries[raw_index] = new_entry;
842 }
843 HeapSpace::Yard => {
844 if raw_index >= self.yard_entries.len() {
845 return;
846 }
847 let entry =
848 std::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
849 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
850 self.yard_entries[raw_index] = new_entry;
851 }
852 HeapSpace::Handoff => {
853 if raw_index >= self.handoff_entries.len() {
854 return;
855 }
856 let entry =
857 std::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
858 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
859 self.handoff_entries[raw_index] = new_entry;
860 }
861 HeapSpace::Stable => {
862 if raw_index >= self.stable_entries.len() {
863 return;
864 }
865 let entry =
866 std::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
867 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
868 self.stable_entries[raw_index] = new_entry;
869 }
870 }
871 }
872
873 fn rewrite_promoted_young_entry(
874 &mut self,
875 entry: ArenaEntry,
876 mark: u32,
877 relocated: &mut [u32],
878 target: AllocSpace,
879 ) -> ArenaEntry {
880 let mut rewrite = |arena: &mut Arena, value: NanValue| {
881 arena.promote_region_root_to_target(value, mark, relocated, target)
882 };
883 self.rewrite_entry_with(entry, &mut rewrite)
884 }
885
886 fn promote_value_to_target(
887 &mut self,
888 value: NanValue,
889 mark: u32,
890 relocated: &mut [u32],
891 target: AllocSpace,
892 ) -> NanValue {
893 let Some(index) = value.heap_index() else {
894 return value;
895 };
896 let (space, raw_index) = Self::decode_index(index);
897 if !matches!(space, HeapSpace::Young) || raw_index < mark {
898 return value;
899 }
900
901 let relocation_slot = raw_index as usize;
902 let relocated_index = relocated[relocation_slot];
903 if relocated_index != u32::MAX {
904 return value.with_heap_index(relocated_index);
905 }
906
907 let new_index = match target {
908 AllocSpace::Yard => Self::encode_yard_index(self.yard_entries.len() as u32),
909 AllocSpace::Handoff => Self::encode_handoff_index(self.handoff_entries.len() as u32),
910 AllocSpace::Young => unreachable!("promotion target must be yard or handoff"),
911 };
912 relocated[relocation_slot] = new_index;
913 match target {
914 AllocSpace::Yard => self.yard_entries.push(ArenaEntry::Int(0)),
915 AllocSpace::Handoff => self.handoff_entries.push(ArenaEntry::Int(0)),
916 AllocSpace::Young => unreachable!(),
917 }
918 self.note_peak_usage();
919
920 let entry = std::mem::replace(
921 &mut self.young_entries[raw_index as usize],
922 ArenaEntry::Int(0),
923 );
924 let new_entry = self.promote_entry_to_target(entry, mark, relocated, target);
925 match target {
926 AllocSpace::Yard => {
927 self.yard_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
928 }
929 AllocSpace::Handoff => {
930 self.handoff_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
931 }
932 AllocSpace::Young => unreachable!(),
933 }
934 value.with_heap_index(new_index)
935 }
936
937 fn promote_entry_to_target(
938 &mut self,
939 entry: ArenaEntry,
940 mark: u32,
941 relocated: &mut [u32],
942 target: AllocSpace,
943 ) -> ArenaEntry {
944 let mut rewrite = |arena: &mut Arena, value: NanValue| {
945 arena.promote_region_root_to_target(value, mark, relocated, target)
946 };
947 self.rewrite_entry_with(entry, &mut rewrite)
948 }
949
950 fn promote_value_to_stable(
951 &mut self,
952 value: NanValue,
953 relocated_young: &mut [u32],
954 relocated_yard: &mut [u32],
955 relocated_handoff: &mut [u32],
956 ) -> NanValue {
957 let Some(index) = value.heap_index() else {
958 return value;
959 };
960 let (space, raw_index) = Self::decode_index(index);
961 match space {
962 HeapSpace::Young => {
963 let relocation_slot = raw_index as usize;
964 let relocated_index = relocated_young[relocation_slot];
965 if relocated_index != u32::MAX {
966 return value.with_heap_index(relocated_index);
967 }
968
969 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
970 relocated_young[relocation_slot] = new_index;
971 self.stable_entries.push(ArenaEntry::Int(0));
972 self.note_peak_usage();
973
974 let entry = self.young_entries[raw_index as usize].clone();
975 let new_entry = self.promote_entry_to_stable(
976 entry,
977 relocated_young,
978 relocated_yard,
979 relocated_handoff,
980 );
981 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
982 value.with_heap_index(new_index)
983 }
984 HeapSpace::Yard => {
985 let relocation_slot = raw_index as usize;
986 let relocated_index = relocated_yard[relocation_slot];
987 if relocated_index != u32::MAX {
988 return value.with_heap_index(relocated_index);
989 }
990
991 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
992 relocated_yard[relocation_slot] = new_index;
993 self.stable_entries.push(ArenaEntry::Int(0));
994 self.note_peak_usage();
995
996 let entry = self.yard_entries[raw_index as usize].clone();
997 let new_entry = self.promote_entry_to_stable(
998 entry,
999 relocated_young,
1000 relocated_yard,
1001 relocated_handoff,
1002 );
1003 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1004 value.with_heap_index(new_index)
1005 }
1006 HeapSpace::Handoff => {
1007 let relocation_slot = raw_index as usize;
1008 let relocated_index = relocated_handoff[relocation_slot];
1009 if relocated_index != u32::MAX {
1010 return value.with_heap_index(relocated_index);
1011 }
1012
1013 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
1014 relocated_handoff[relocation_slot] = new_index;
1015 self.stable_entries.push(ArenaEntry::Int(0));
1016 self.note_peak_usage();
1017
1018 let entry = self.handoff_entries[raw_index as usize].clone();
1019 let new_entry = self.promote_entry_to_stable(
1020 entry,
1021 relocated_young,
1022 relocated_yard,
1023 relocated_handoff,
1024 );
1025 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1026 value.with_heap_index(new_index)
1027 }
1028 HeapSpace::Stable => value,
1029 }
1030 }
1031
1032 fn promote_entry_to_stable(
1033 &mut self,
1034 entry: ArenaEntry,
1035 relocated_young: &mut [u32],
1036 relocated_yard: &mut [u32],
1037 relocated_handoff: &mut [u32],
1038 ) -> ArenaEntry {
1039 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1040 arena.promote_value_to_stable(value, relocated_young, relocated_yard, relocated_handoff)
1041 };
1042 self.rewrite_entry_with(entry, &mut rewrite)
1043 }
1044
1045 fn relocate_yard_root(
1046 &mut self,
1047 value: NanValue,
1048 mark: u32,
1049 relocated: &mut [u32],
1050 compacted: &mut Vec<ArenaEntry>,
1051 ) -> NanValue {
1052 let Some(index) = value.heap_index() else {
1053 return value;
1054 };
1055 let (space, raw_index) = Self::decode_index(index);
1056 if matches!(space, HeapSpace::Yard)
1057 && raw_index >= mark
1058 && raw_index < self.yard_entries.len() as u32
1059 {
1060 return self.relocate_yard_value(value, mark, relocated, compacted);
1061 }
1062 self.rewrite_yard_refs_in_place(space, raw_index, mark, relocated, compacted);
1063 value
1064 }
1065
1066 fn relocate_yard_value(
1067 &mut self,
1068 value: NanValue,
1069 mark: u32,
1070 relocated: &mut [u32],
1071 compacted: &mut Vec<ArenaEntry>,
1072 ) -> NanValue {
1073 let Some(index) = value.heap_index() else {
1074 return value;
1075 };
1076 let (space, raw_index) = Self::decode_index(index);
1077 if !matches!(space, HeapSpace::Yard) || raw_index < mark {
1078 return value;
1079 }
1080
1081 let relocation_slot = raw_index as usize;
1082 let relocated_index = relocated[relocation_slot];
1083 if relocated_index != u32::MAX {
1084 return value.with_heap_index(relocated_index);
1085 }
1086
1087 let compacted_pos = compacted.len() as u32;
1088 let new_index = Self::encode_yard_index(mark + compacted_pos);
1089 relocated[relocation_slot] = new_index;
1090 compacted.push(ArenaEntry::Int(0));
1091
1092 let entry = std::mem::replace(
1093 &mut self.yard_entries[raw_index as usize],
1094 ArenaEntry::Int(0),
1095 );
1096 let new_entry = self.relocate_yard_entry(entry, mark, relocated, compacted);
1097 compacted[compacted_pos as usize] = new_entry;
1098 value.with_heap_index(new_index)
1099 }
1100
1101 fn relocate_yard_entry(
1102 &mut self,
1103 entry: ArenaEntry,
1104 mark: u32,
1105 relocated: &mut [u32],
1106 compacted: &mut Vec<ArenaEntry>,
1107 ) -> ArenaEntry {
1108 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1109 arena.relocate_yard_value(value, mark, relocated, compacted)
1110 };
1111 self.rewrite_entry_with(entry, &mut rewrite)
1112 }
1113
1114 fn rewrite_yard_refs_in_place(
1115 &mut self,
1116 space: HeapSpace,
1117 raw_index: u32,
1118 mark: u32,
1119 relocated: &mut [u32],
1120 compacted: &mut Vec<ArenaEntry>,
1121 ) {
1122 let raw_index = raw_index as usize;
1123 match space {
1124 HeapSpace::Young => {
1125 if raw_index >= self.young_entries.len() {
1126 return;
1127 }
1128 let entry =
1129 std::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
1130 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1131 self.young_entries[raw_index] = new_entry;
1132 }
1133 HeapSpace::Yard => {
1134 if raw_index >= self.yard_entries.len() || raw_index >= mark as usize {
1135 return;
1136 }
1137 let entry =
1138 std::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
1139 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1140 self.yard_entries[raw_index] = new_entry;
1141 }
1142 HeapSpace::Handoff => {
1143 if raw_index >= self.handoff_entries.len() {
1144 return;
1145 }
1146 let entry =
1147 std::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
1148 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1149 self.handoff_entries[raw_index] = new_entry;
1150 }
1151 HeapSpace::Stable => {
1152 if raw_index >= self.stable_entries.len() {
1153 return;
1154 }
1155 let entry =
1156 std::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
1157 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1158 self.stable_entries[raw_index] = new_entry;
1159 }
1160 }
1161 }
1162
1163 fn rewrite_yard_entry(
1164 &mut self,
1165 entry: ArenaEntry,
1166 mark: u32,
1167 relocated: &mut [u32],
1168 compacted: &mut Vec<ArenaEntry>,
1169 ) -> ArenaEntry {
1170 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1171 arena.relocate_yard_root(value, mark, relocated, compacted)
1172 };
1173 self.rewrite_entry_with(entry, &mut rewrite)
1174 }
1175
1176 fn relocate_stable_root(
1177 &mut self,
1178 value: NanValue,
1179 relocated: &mut [u32],
1180 compacted: &mut Vec<ArenaEntry>,
1181 ) -> NanValue {
1182 let Some(index) = value.heap_index() else {
1183 return value;
1184 };
1185 if !matches!(Self::decode_index(index).0, HeapSpace::Stable) {
1186 return value;
1187 }
1188 self.relocate_stable_value(value, relocated, compacted)
1189 }
1190
1191 fn relocate_stable_value(
1192 &mut self,
1193 value: NanValue,
1194 relocated: &mut [u32],
1195 compacted: &mut Vec<ArenaEntry>,
1196 ) -> NanValue {
1197 let Some(index) = value.heap_index() else {
1198 return value;
1199 };
1200 let (space, raw_index) = Self::decode_index(index);
1201 if !matches!(space, HeapSpace::Stable) {
1202 return value;
1203 }
1204
1205 let relocation_slot = raw_index as usize;
1206 let relocated_index = relocated[relocation_slot];
1207 if relocated_index != u32::MAX {
1208 return value.with_heap_index(relocated_index);
1209 }
1210
1211 let new_index = Self::encode_stable_index(compacted.len() as u32);
1212 relocated[relocation_slot] = new_index;
1213 compacted.push(ArenaEntry::Int(0));
1214
1215 let entry = std::mem::replace(
1216 &mut self.stable_entries[raw_index as usize],
1217 ArenaEntry::Int(0),
1218 );
1219 let new_entry = self.relocate_stable_entry(entry, relocated, compacted);
1220 compacted[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1221 value.with_heap_index(new_index)
1222 }
1223
1224 fn relocate_stable_entry(
1225 &mut self,
1226 entry: ArenaEntry,
1227 relocated: &mut [u32],
1228 compacted: &mut Vec<ArenaEntry>,
1229 ) -> ArenaEntry {
1230 let mut rewrite = |arena: &mut Arena, value: NanValue| {
1231 arena.relocate_stable_value(value, relocated, compacted)
1232 };
1233 self.rewrite_entry_with(entry, &mut rewrite)
1234 }
1235}