1use super::*;
2
3impl<T: ArenaTypes> Arena<T> {
4 pub fn truncate_to(&mut self, mark: u32) {
5 self.young_entries.truncate(mark as usize);
6 }
7
8 pub fn collect_young_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
9 if self.young_entries.len() <= mark as usize {
10 return;
11 }
12
13 let mut relocated =
14 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
15 let mut compacted = Vec::with_capacity(self.young_entries.len() - mark as usize);
16
17 for root in roots {
18 *root = self.relocate_young_root(*root, mark, &mut relocated, &mut compacted);
19 }
20
21 self.young_entries.truncate(mark as usize);
22 self.young_entries.extend(compacted);
23 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
24 }
25
26 pub fn truncate_yard_to(&mut self, mark: u32) {
27 self.yard_entries.truncate(mark as usize);
28 }
29
30 pub fn truncate_handoff_to(&mut self, mark: u32) {
31 self.handoff_entries.truncate(mark as usize);
32 }
33
34 pub fn evacuate_frame_to_yard(
35 &mut self,
36 young_mark: u32,
37 yard_mark: u32,
38 handoff_mark: u32,
39 roots: &mut [NanValue],
40 ) -> (bool, bool) {
41 self.evacuate_frame_locals(young_mark, yard_mark, handoff_mark, roots, AllocSpace::Yard)
42 }
43
44 pub fn evacuate_frame_to_handoff(
45 &mut self,
46 young_mark: u32,
47 yard_mark: u32,
48 handoff_mark: u32,
49 roots: &mut [NanValue],
50 ) -> (bool, bool) {
51 self.evacuate_frame_locals(
52 young_mark,
53 yard_mark,
54 handoff_mark,
55 roots,
56 AllocSpace::Handoff,
57 )
58 }
59
60 fn evacuate_frame_locals(
61 &mut self,
62 young_mark: u32,
63 yard_mark: u32,
64 handoff_mark: u32,
65 roots: &mut [NanValue],
66 young_target: AllocSpace,
67 ) -> (bool, bool) {
68 let mut relocated_young = Self::take_u32_scratch(
69 &mut self.scratch_young,
70 self.young_entries.len().saturating_sub(young_mark as usize),
71 );
72 let mut relocated_yard = Self::take_u32_scratch(
73 &mut self.scratch_yard,
74 self.yard_entries.len().saturating_sub(yard_mark as usize),
75 );
76 let mut relocated_handoff = Self::take_u32_scratch(
77 &mut self.scratch_handoff,
78 self.handoff_entries
79 .len()
80 .saturating_sub(handoff_mark as usize),
81 );
82 let mut compacted_yard =
83 Vec::with_capacity(self.yard_entries.len().saturating_sub(yard_mark as usize));
84 let mut compacted_handoff = Vec::with_capacity(
85 self.handoff_entries
86 .len()
87 .saturating_sub(handoff_mark as usize),
88 );
89
90 for root in roots {
91 *root = self.evacuate_local_root(
92 *root,
93 young_mark,
94 yard_mark,
95 handoff_mark,
96 young_target,
97 &mut relocated_young,
98 &mut relocated_yard,
99 &mut relocated_handoff,
100 &mut compacted_yard,
101 &mut compacted_handoff,
102 );
103 }
104
105 self.young_entries.truncate(young_mark as usize);
106 self.yard_entries.truncate(yard_mark as usize);
107 self.yard_entries.extend(compacted_yard);
108 self.handoff_entries.truncate(handoff_mark as usize);
109 self.handoff_entries.extend(compacted_handoff);
110 self.note_peak_usage();
111 Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
112 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
113 Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
114
115 (
116 self.yard_entries.len() > yard_mark as usize,
117 self.handoff_entries.len() > handoff_mark as usize,
118 )
119 }
120
121 fn allocate_local_target_slot(
122 target: AllocSpace,
123 yard_mark: u32,
124 handoff_mark: u32,
125 compacted_yard: &mut Vec<ArenaEntry<T>>,
126 compacted_handoff: &mut Vec<ArenaEntry<T>>,
127 ) -> (u32, u32) {
128 match target {
129 AllocSpace::Yard => {
130 let pos = compacted_yard.len() as u32;
131 let idx = Self::encode_yard_index(yard_mark + pos);
132 compacted_yard.push(ArenaEntry::Int(0));
133 (idx, pos)
134 }
135 AllocSpace::Handoff => {
136 let pos = compacted_handoff.len() as u32;
137 let idx = Self::encode_handoff_index(handoff_mark + pos);
138 compacted_handoff.push(ArenaEntry::Int(0));
139 (idx, pos)
140 }
141 AllocSpace::Young => unreachable!("local evacuation target must be yard or handoff"),
142 }
143 }
144
145 fn store_local_target_entry(
146 target: AllocSpace,
147 compacted_pos: u32,
148 entry: ArenaEntry<T>,
149 compacted_yard: &mut [ArenaEntry<T>],
150 compacted_handoff: &mut [ArenaEntry<T>],
151 ) {
152 match target {
153 AllocSpace::Yard => compacted_yard[compacted_pos as usize] = entry,
154 AllocSpace::Handoff => compacted_handoff[compacted_pos as usize] = entry,
155 AllocSpace::Young => unreachable!(),
156 }
157 }
158
159 #[inline(always)]
160 fn rewrite_entry_with<F>(&mut self, entry: ArenaEntry<T>, rewrite: &mut F) -> ArenaEntry<T>
161 where
162 F: FnMut(&mut Arena<T>, NanValue) -> NanValue,
163 {
164 match entry {
165 ArenaEntry::Int(i) => ArenaEntry::Int(i),
166 ArenaEntry::String(s) => ArenaEntry::String(s),
167 ArenaEntry::Builtin(name) => ArenaEntry::Builtin(name),
168 ArenaEntry::Fn(f) => ArenaEntry::Fn(f),
169 ArenaEntry::Boxed(inner) => ArenaEntry::Boxed(rewrite(self, inner)),
170 ArenaEntry::List(list) => ArenaEntry::List(self.rewrite_list_with(list, rewrite)),
171 ArenaEntry::Tuple(mut items) => {
172 for value in &mut items {
173 *value = rewrite(self, *value);
174 }
175 ArenaEntry::Tuple(items)
176 }
177 ArenaEntry::Vector(mut items) => {
178 for value in &mut items {
179 *value = rewrite(self, *value);
180 }
181 ArenaEntry::Vector(items)
182 }
183 ArenaEntry::Map(map) => {
184 let mut out = T::Map::new();
185 for (&hash, &(key, value)) in map.iter() {
186 out = out.insert(hash, (rewrite(self, key), rewrite(self, value)));
187 }
188 ArenaEntry::Map(out)
189 }
190 ArenaEntry::Record {
191 type_id,
192 mut fields,
193 } => {
194 for value in &mut fields {
195 *value = rewrite(self, *value);
196 }
197 ArenaEntry::Record { type_id, fields }
198 }
199 ArenaEntry::Variant {
200 type_id,
201 variant_id,
202 mut fields,
203 } => {
204 for value in &mut fields {
205 *value = rewrite(self, *value);
206 }
207 ArenaEntry::Variant {
208 type_id,
209 variant_id,
210 fields,
211 }
212 }
213 ArenaEntry::Namespace { name, mut members } => {
214 for (_, value) in &mut members {
215 *value = rewrite(self, *value);
216 }
217 ArenaEntry::Namespace { name, members }
218 }
219 }
220 }
221
222 #[inline(always)]
223 fn rewrite_list_with<F>(&mut self, list: ArenaList, rewrite: &mut F) -> ArenaList
224 where
225 F: FnMut(&mut Arena<T>, NanValue) -> NanValue,
226 {
227 match list {
228 ArenaList::Flat { items, start } => ArenaList::Flat {
229 items: Rc::new(
230 items[start..]
231 .iter()
232 .map(|value| rewrite(self, *value))
233 .collect(),
234 ),
235 start: 0,
236 },
237 ArenaList::Prepend { head, tail, len } => ArenaList::Prepend {
238 head: rewrite(self, head),
239 tail: rewrite(self, tail),
240 len,
241 },
242 ArenaList::Concat { left, right, len } => ArenaList::Concat {
243 left: rewrite(self, left),
244 right: rewrite(self, right),
245 len,
246 },
247 ArenaList::Segments {
248 current,
249 rest,
250 start,
251 len,
252 } => ArenaList::Segments {
253 current: rewrite(self, current),
254 rest: Rc::new(
255 rest[start..]
256 .iter()
257 .map(|value| rewrite(self, *value))
258 .collect(),
259 ),
260 start: 0,
261 len,
262 },
263 }
264 }
265
266 pub fn flatten_deep_list(&mut self, value: NanValue) -> NanValue {
268 const FLATTEN_THRESHOLD: usize = 64;
269
270 if !value.is_list() || value.is_empty_list_immediate() {
271 return value;
272 }
273 let len = self.list_len_value(value);
274 if len <= FLATTEN_THRESHOLD {
275 return value;
276 }
277 let elements = self.list_to_vec_value(value);
278 let flat = ArenaList::Flat {
279 items: Rc::new(elements),
280 start: 0,
281 };
282 let index = self.push(ArenaEntry::List(flat));
283 NanValue::new_list(index)
284 }
285
286 #[allow(clippy::too_many_arguments)]
287 fn evacuate_local_root(
288 &mut self,
289 value: NanValue,
290 young_mark: u32,
291 yard_mark: u32,
292 handoff_mark: u32,
293 young_target: AllocSpace,
294 relocated_young: &mut [u32],
295 relocated_yard: &mut [u32],
296 relocated_handoff: &mut [u32],
297 compacted_yard: &mut Vec<ArenaEntry<T>>,
298 compacted_handoff: &mut Vec<ArenaEntry<T>>,
299 ) -> NanValue {
300 let Some(index) = value.heap_index() else {
301 return value;
302 };
303 let (space, _) = Self::decode_index(index);
304 match space {
305 HeapSpace::Young if self.is_young_index_in_region(index, young_mark) => self
306 .evacuate_young_value(
307 value,
308 young_mark,
309 yard_mark,
310 handoff_mark,
311 young_target,
312 relocated_young,
313 relocated_yard,
314 relocated_handoff,
315 compacted_yard,
316 compacted_handoff,
317 ),
318 HeapSpace::Yard if self.is_yard_index_in_region(index, yard_mark) => self
319 .evacuate_yard_value(
320 value,
321 young_mark,
322 yard_mark,
323 handoff_mark,
324 young_target,
325 relocated_young,
326 relocated_yard,
327 relocated_handoff,
328 compacted_yard,
329 compacted_handoff,
330 ),
331 HeapSpace::Handoff if self.is_handoff_index_in_region(index, handoff_mark) => self
332 .evacuate_handoff_value(
333 value,
334 young_mark,
335 yard_mark,
336 handoff_mark,
337 young_target,
338 relocated_young,
339 relocated_yard,
340 relocated_handoff,
341 compacted_yard,
342 compacted_handoff,
343 ),
344 _ => value,
345 }
346 }
347
348 #[allow(clippy::too_many_arguments)]
349 fn evacuate_young_value(
350 &mut self,
351 value: NanValue,
352 young_mark: u32,
353 yard_mark: u32,
354 handoff_mark: u32,
355 young_target: AllocSpace,
356 relocated_young: &mut [u32],
357 relocated_yard: &mut [u32],
358 relocated_handoff: &mut [u32],
359 compacted_yard: &mut Vec<ArenaEntry<T>>,
360 compacted_handoff: &mut Vec<ArenaEntry<T>>,
361 ) -> NanValue {
362 let index = value.heap_index().expect("young value must be heap-backed");
363 let (_, raw_index) = Self::decode_index(index);
364 let relocation_slot = (raw_index - young_mark) as usize;
365 let relocated_index = relocated_young[relocation_slot];
366 if relocated_index != u32::MAX {
367 return value.with_heap_index(relocated_index);
368 }
369
370 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
371 young_target,
372 yard_mark,
373 handoff_mark,
374 compacted_yard,
375 compacted_handoff,
376 );
377 relocated_young[relocation_slot] = new_index;
378
379 let entry = core::mem::replace(
380 &mut self.young_entries[raw_index as usize],
381 ArenaEntry::Int(0),
382 );
383 let new_entry = self.evacuate_local_entry(
384 entry,
385 young_mark,
386 yard_mark,
387 handoff_mark,
388 young_target,
389 relocated_young,
390 relocated_yard,
391 relocated_handoff,
392 compacted_yard,
393 compacted_handoff,
394 );
395 Self::store_local_target_entry(
396 young_target,
397 compacted_pos,
398 new_entry,
399 compacted_yard,
400 compacted_handoff,
401 );
402 value.with_heap_index(new_index)
403 }
404
405 #[allow(clippy::too_many_arguments)]
406 fn evacuate_yard_value(
407 &mut self,
408 value: NanValue,
409 young_mark: u32,
410 yard_mark: u32,
411 handoff_mark: u32,
412 young_target: AllocSpace,
413 relocated_young: &mut [u32],
414 relocated_yard: &mut [u32],
415 relocated_handoff: &mut [u32],
416 compacted_yard: &mut Vec<ArenaEntry<T>>,
417 compacted_handoff: &mut Vec<ArenaEntry<T>>,
418 ) -> NanValue {
419 let index = value.heap_index().expect("yard value must be heap-backed");
420 let (_, raw_index) = Self::decode_index(index);
421 let relocation_slot = (raw_index - yard_mark) as usize;
422 let relocated_index = relocated_yard[relocation_slot];
423 if relocated_index != u32::MAX {
424 return value.with_heap_index(relocated_index);
425 }
426
427 let target = match young_target {
428 AllocSpace::Yard => AllocSpace::Yard,
429 AllocSpace::Handoff => AllocSpace::Handoff,
430 AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
431 };
432 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
433 target,
434 yard_mark,
435 handoff_mark,
436 compacted_yard,
437 compacted_handoff,
438 );
439 relocated_yard[relocation_slot] = new_index;
440
441 let entry = core::mem::replace(
442 &mut self.yard_entries[raw_index as usize],
443 ArenaEntry::Int(0),
444 );
445 let new_entry = self.evacuate_local_entry(
446 entry,
447 young_mark,
448 yard_mark,
449 handoff_mark,
450 young_target,
451 relocated_young,
452 relocated_yard,
453 relocated_handoff,
454 compacted_yard,
455 compacted_handoff,
456 );
457 Self::store_local_target_entry(
458 target,
459 compacted_pos,
460 new_entry,
461 compacted_yard,
462 compacted_handoff,
463 );
464 value.with_heap_index(new_index)
465 }
466
467 #[allow(clippy::too_many_arguments)]
468 fn evacuate_handoff_value(
469 &mut self,
470 value: NanValue,
471 young_mark: u32,
472 yard_mark: u32,
473 handoff_mark: u32,
474 young_target: AllocSpace,
475 relocated_young: &mut [u32],
476 relocated_yard: &mut [u32],
477 relocated_handoff: &mut [u32],
478 compacted_yard: &mut Vec<ArenaEntry<T>>,
479 compacted_handoff: &mut Vec<ArenaEntry<T>>,
480 ) -> NanValue {
481 let index = value
482 .heap_index()
483 .expect("handoff value must be heap-backed");
484 let (_, raw_index) = Self::decode_index(index);
485 let relocation_slot = (raw_index - handoff_mark) as usize;
486 let relocated_index = relocated_handoff[relocation_slot];
487 if relocated_index != u32::MAX {
488 return value.with_heap_index(relocated_index);
489 }
490
491 let target = match young_target {
492 AllocSpace::Yard => AllocSpace::Yard,
493 AllocSpace::Handoff => AllocSpace::Handoff,
494 AllocSpace::Young => unreachable!("local evacuation must target yard or handoff"),
495 };
496 let (new_index, compacted_pos) = Self::allocate_local_target_slot(
497 target,
498 yard_mark,
499 handoff_mark,
500 compacted_yard,
501 compacted_handoff,
502 );
503 relocated_handoff[relocation_slot] = new_index;
504
505 let entry = core::mem::replace(
506 &mut self.handoff_entries[raw_index as usize],
507 ArenaEntry::Int(0),
508 );
509 let new_entry = self.evacuate_local_entry(
510 entry,
511 young_mark,
512 yard_mark,
513 handoff_mark,
514 young_target,
515 relocated_young,
516 relocated_yard,
517 relocated_handoff,
518 compacted_yard,
519 compacted_handoff,
520 );
521 Self::store_local_target_entry(
522 target,
523 compacted_pos,
524 new_entry,
525 compacted_yard,
526 compacted_handoff,
527 );
528 value.with_heap_index(new_index)
529 }
530
531 #[allow(clippy::too_many_arguments)]
532 fn evacuate_local_entry(
533 &mut self,
534 entry: ArenaEntry<T>,
535 young_mark: u32,
536 yard_mark: u32,
537 handoff_mark: u32,
538 young_target: AllocSpace,
539 relocated_young: &mut [u32],
540 relocated_yard: &mut [u32],
541 relocated_handoff: &mut [u32],
542 compacted_yard: &mut Vec<ArenaEntry<T>>,
543 compacted_handoff: &mut Vec<ArenaEntry<T>>,
544 ) -> ArenaEntry<T> {
545 let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
546 arena.evacuate_local_root(
547 value,
548 young_mark,
549 yard_mark,
550 handoff_mark,
551 young_target,
552 relocated_young,
553 relocated_yard,
554 relocated_handoff,
555 compacted_yard,
556 compacted_handoff,
557 )
558 };
559 self.rewrite_entry_with(entry, &mut rewrite)
560 }
561
562 fn relocate_young_root(
563 &mut self,
564 value: NanValue,
565 mark: u32,
566 relocated: &mut [u32],
567 compacted: &mut Vec<ArenaEntry<T>>,
568 ) -> NanValue {
569 let Some(index) = value.heap_index() else {
570 return value;
571 };
572 let (space, raw_index) = Self::decode_index(index);
573 if matches!(space, HeapSpace::Young)
574 && raw_index >= mark
575 && raw_index < self.young_entries.len() as u32
576 {
577 return self.relocate_young_value(value, mark, relocated, compacted);
578 }
579 self.rewrite_young_refs_in_place(space, raw_index, mark, relocated, compacted);
580 value
581 }
582
583 fn relocate_young_value(
584 &mut self,
585 value: NanValue,
586 mark: u32,
587 relocated: &mut [u32],
588 compacted: &mut Vec<ArenaEntry<T>>,
589 ) -> NanValue {
590 let Some(index) = value.heap_index() else {
591 return value;
592 };
593 let (space, raw_index) = Self::decode_index(index);
594 if !matches!(space, HeapSpace::Young) || raw_index < mark {
595 return value;
596 }
597
598 let relocation_slot = raw_index as usize;
599 let relocated_index = relocated[relocation_slot];
600 if relocated_index != u32::MAX {
601 return value.with_heap_index(relocated_index);
602 }
603
604 let compacted_pos = compacted.len() as u32;
605 let new_index = Self::encode_index(HeapSpace::Young, mark + compacted_pos);
606 relocated[relocation_slot] = new_index;
607 compacted.push(ArenaEntry::Int(0));
608
609 let entry = core::mem::replace(
610 &mut self.young_entries[raw_index as usize],
611 ArenaEntry::Int(0),
612 );
613 let new_entry = self.relocate_young_entry(entry, mark, relocated, compacted);
614 compacted[compacted_pos as usize] = new_entry;
615 value.with_heap_index(new_index)
616 }
617
618 fn relocate_young_entry(
619 &mut self,
620 entry: ArenaEntry<T>,
621 mark: u32,
622 relocated: &mut [u32],
623 compacted: &mut Vec<ArenaEntry<T>>,
624 ) -> ArenaEntry<T> {
625 let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
626 arena.relocate_young_value(value, mark, relocated, compacted)
627 };
628 self.rewrite_entry_with(entry, &mut rewrite)
629 }
630
631 fn rewrite_young_refs_in_place(
632 &mut self,
633 space: HeapSpace,
634 raw_index: u32,
635 mark: u32,
636 relocated: &mut [u32],
637 compacted: &mut Vec<ArenaEntry<T>>,
638 ) {
639 let raw_index = raw_index as usize;
640 match space {
641 HeapSpace::Young => {
642 if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
643 return;
644 }
645 let entry =
646 core::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
647 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
648 self.young_entries[raw_index] = new_entry;
649 }
650 HeapSpace::Yard => {
651 if raw_index >= self.yard_entries.len() {
652 return;
653 }
654 let entry =
655 core::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
656 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
657 self.yard_entries[raw_index] = new_entry;
658 }
659 HeapSpace::Handoff => {
660 if raw_index >= self.handoff_entries.len() {
661 return;
662 }
663 let entry =
664 core::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
665 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
666 self.handoff_entries[raw_index] = new_entry;
667 }
668 HeapSpace::Stable => {
669 if raw_index >= self.stable_entries.len() {
670 return;
671 }
672 let entry =
673 core::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
674 let new_entry = self.rewrite_young_entry(entry, mark, relocated, compacted);
675 self.stable_entries[raw_index] = new_entry;
676 }
677 }
678 }
679
680 fn rewrite_young_entry(
681 &mut self,
682 entry: ArenaEntry<T>,
683 mark: u32,
684 relocated: &mut [u32],
685 compacted: &mut Vec<ArenaEntry<T>>,
686 ) -> ArenaEntry<T> {
687 let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
688 arena.relocate_young_root(value, mark, relocated, compacted)
689 };
690 self.rewrite_entry_with(entry, &mut rewrite)
691 }
692
693 fn promote_region_root_to_target(
694 &mut self,
695 value: NanValue,
696 mark: u32,
697 relocated: &mut [u32],
698 target: AllocSpace,
699 ) -> NanValue {
700 let Some(index) = value.heap_index() else {
701 return value;
702 };
703 let (space, raw_index) = Self::decode_index(index);
704 if matches!(space, HeapSpace::Young)
705 && raw_index >= mark
706 && raw_index < self.young_entries.len() as u32
707 {
708 return self.promote_value_to_target(value, mark, relocated, target);
709 }
710 self.rewrite_promoted_young_refs_in_place(space, raw_index, mark, relocated, target);
711 value
712 }
713
714 pub fn promote_young_roots_to_yard(&mut self, mark: u32, roots: &mut [NanValue]) {
715 if self.young_entries.len() <= mark as usize {
716 return;
717 }
718
719 let mut relocated =
720 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
721
722 for root in roots {
723 *root = self.promote_region_root_to_yard(*root, mark, &mut relocated);
724 }
725
726 self.young_entries.truncate(mark as usize);
727 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
728 }
729
730 pub fn promote_young_roots_to_handoff(&mut self, mark: u32, roots: &mut [NanValue]) {
731 if self.young_entries.len() <= mark as usize {
732 return;
733 }
734
735 let mut relocated =
736 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
737
738 for root in roots {
739 *root = self.promote_region_root_to_handoff(*root, mark, &mut relocated);
740 }
741
742 self.young_entries.truncate(mark as usize);
743 Self::recycle_u32_scratch(&mut self.scratch_young, relocated);
744 }
745
746 pub fn promote_roots_to_stable(&mut self, roots: &mut [NanValue]) {
747 let mut relocated_young =
748 Self::take_u32_scratch(&mut self.scratch_young, self.young_entries.len());
749 let mut relocated_yard =
750 Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
751 let mut relocated_handoff =
752 Self::take_u32_scratch(&mut self.scratch_handoff, self.handoff_entries.len());
753
754 for root in roots {
755 *root = self.promote_value_to_stable(
756 *root,
757 &mut relocated_young,
758 &mut relocated_yard,
759 &mut relocated_handoff,
760 );
761 }
762 Self::recycle_u32_scratch(&mut self.scratch_young, relocated_young);
763 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated_yard);
764 Self::recycle_u32_scratch(&mut self.scratch_handoff, relocated_handoff);
765 }
766
767 pub fn collect_yard_from_roots(&mut self, mark: u32, roots: &mut [NanValue]) {
768 if self.yard_entries.len() <= mark as usize {
769 return;
770 }
771
772 let mut relocated = Self::take_u32_scratch(&mut self.scratch_yard, self.yard_entries.len());
773 let mut compacted = Vec::with_capacity(self.yard_entries.len() - mark as usize);
774
775 for root in roots {
776 *root = self.relocate_yard_root(*root, mark, &mut relocated, &mut compacted);
777 }
778
779 self.yard_entries.truncate(mark as usize);
780 self.yard_entries.extend(compacted);
781 Self::recycle_u32_scratch(&mut self.scratch_yard, relocated);
782 }
783
784 pub fn collect_stable_from_roots(&mut self, roots: &mut [NanValue]) {
785 if self.stable_entries.is_empty() {
786 return;
787 }
788
789 let mut relocated =
790 Self::take_u32_scratch(&mut self.scratch_stable, self.stable_entries.len());
791 let mut compacted = Vec::with_capacity(self.stable_entries.len());
792
793 for root in roots {
794 *root = self.relocate_stable_root(*root, &mut relocated, &mut compacted);
795 }
796
797 self.stable_entries = compacted;
798 Self::recycle_u32_scratch(&mut self.scratch_stable, relocated);
799 }
800
801 fn promote_region_root_to_yard(
802 &mut self,
803 value: NanValue,
804 mark: u32,
805 relocated: &mut [u32],
806 ) -> NanValue {
807 self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Yard)
808 }
809
810 fn promote_region_root_to_handoff(
811 &mut self,
812 value: NanValue,
813 mark: u32,
814 relocated: &mut [u32],
815 ) -> NanValue {
816 self.promote_region_root_to_target(value, mark, relocated, AllocSpace::Handoff)
817 }
818
819 fn rewrite_promoted_young_refs_in_place(
820 &mut self,
821 space: HeapSpace,
822 raw_index: u32,
823 mark: u32,
824 relocated: &mut [u32],
825 target: AllocSpace,
826 ) {
827 let raw_index = raw_index as usize;
828 match space {
829 HeapSpace::Young => {
830 if raw_index >= self.young_entries.len() || raw_index >= mark as usize {
831 return;
832 }
833 let entry =
834 core::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
835 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
836 self.young_entries[raw_index] = new_entry;
837 }
838 HeapSpace::Yard => {
839 if raw_index >= self.yard_entries.len() {
840 return;
841 }
842 let entry =
843 core::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
844 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
845 self.yard_entries[raw_index] = new_entry;
846 }
847 HeapSpace::Handoff => {
848 if raw_index >= self.handoff_entries.len() {
849 return;
850 }
851 let entry =
852 core::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
853 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
854 self.handoff_entries[raw_index] = new_entry;
855 }
856 HeapSpace::Stable => {
857 if raw_index >= self.stable_entries.len() {
858 return;
859 }
860 let entry =
861 core::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
862 let new_entry = self.rewrite_promoted_young_entry(entry, mark, relocated, target);
863 self.stable_entries[raw_index] = new_entry;
864 }
865 }
866 }
867
868 fn rewrite_promoted_young_entry(
869 &mut self,
870 entry: ArenaEntry<T>,
871 mark: u32,
872 relocated: &mut [u32],
873 target: AllocSpace,
874 ) -> ArenaEntry<T> {
875 let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
876 arena.promote_region_root_to_target(value, mark, relocated, target)
877 };
878 self.rewrite_entry_with(entry, &mut rewrite)
879 }
880
881 fn promote_value_to_target(
882 &mut self,
883 value: NanValue,
884 mark: u32,
885 relocated: &mut [u32],
886 target: AllocSpace,
887 ) -> NanValue {
888 let Some(index) = value.heap_index() else {
889 return value;
890 };
891 let (space, raw_index) = Self::decode_index(index);
892 if !matches!(space, HeapSpace::Young) || raw_index < mark {
893 return value;
894 }
895
896 let relocation_slot = raw_index as usize;
897 let relocated_index = relocated[relocation_slot];
898 if relocated_index != u32::MAX {
899 return value.with_heap_index(relocated_index);
900 }
901
902 let new_index = match target {
903 AllocSpace::Yard => Self::encode_yard_index(self.yard_entries.len() as u32),
904 AllocSpace::Handoff => Self::encode_handoff_index(self.handoff_entries.len() as u32),
905 AllocSpace::Young => unreachable!("promotion target must be yard or handoff"),
906 };
907 relocated[relocation_slot] = new_index;
908 match target {
909 AllocSpace::Yard => self.yard_entries.push(ArenaEntry::Int(0)),
910 AllocSpace::Handoff => self.handoff_entries.push(ArenaEntry::Int(0)),
911 AllocSpace::Young => unreachable!(),
912 }
913 self.note_peak_usage();
914
915 let entry = core::mem::replace(
916 &mut self.young_entries[raw_index as usize],
917 ArenaEntry::Int(0),
918 );
919 let new_entry = self.promote_entry_to_target(entry, mark, relocated, target);
920 match target {
921 AllocSpace::Yard => {
922 self.yard_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
923 }
924 AllocSpace::Handoff => {
925 self.handoff_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
926 }
927 AllocSpace::Young => unreachable!(),
928 }
929 value.with_heap_index(new_index)
930 }
931
932 fn promote_entry_to_target(
933 &mut self,
934 entry: ArenaEntry<T>,
935 mark: u32,
936 relocated: &mut [u32],
937 target: AllocSpace,
938 ) -> ArenaEntry<T> {
939 let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
940 arena.promote_region_root_to_target(value, mark, relocated, target)
941 };
942 self.rewrite_entry_with(entry, &mut rewrite)
943 }
944
945 fn promote_value_to_stable(
946 &mut self,
947 value: NanValue,
948 relocated_young: &mut [u32],
949 relocated_yard: &mut [u32],
950 relocated_handoff: &mut [u32],
951 ) -> NanValue {
952 let Some(index) = value.heap_index() else {
953 return value;
954 };
955 let (space, raw_index) = Self::decode_index(index);
956 match space {
957 HeapSpace::Young => {
958 let relocation_slot = raw_index as usize;
959 let relocated_index = relocated_young[relocation_slot];
960 if relocated_index != u32::MAX {
961 return value.with_heap_index(relocated_index);
962 }
963
964 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
965 relocated_young[relocation_slot] = new_index;
966 self.stable_entries.push(ArenaEntry::Int(0));
967 self.note_peak_usage();
968
969 let entry = self.young_entries[raw_index as usize].clone();
970 let new_entry = self.promote_entry_to_stable(
971 entry,
972 relocated_young,
973 relocated_yard,
974 relocated_handoff,
975 );
976 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
977 value.with_heap_index(new_index)
978 }
979 HeapSpace::Yard => {
980 let relocation_slot = raw_index as usize;
981 let relocated_index = relocated_yard[relocation_slot];
982 if relocated_index != u32::MAX {
983 return value.with_heap_index(relocated_index);
984 }
985
986 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
987 relocated_yard[relocation_slot] = new_index;
988 self.stable_entries.push(ArenaEntry::Int(0));
989 self.note_peak_usage();
990
991 let entry = self.yard_entries[raw_index as usize].clone();
992 let new_entry = self.promote_entry_to_stable(
993 entry,
994 relocated_young,
995 relocated_yard,
996 relocated_handoff,
997 );
998 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
999 value.with_heap_index(new_index)
1000 }
1001 HeapSpace::Handoff => {
1002 let relocation_slot = raw_index as usize;
1003 let relocated_index = relocated_handoff[relocation_slot];
1004 if relocated_index != u32::MAX {
1005 return value.with_heap_index(relocated_index);
1006 }
1007
1008 let new_index = Self::encode_stable_index(self.stable_entries.len() as u32);
1009 relocated_handoff[relocation_slot] = new_index;
1010 self.stable_entries.push(ArenaEntry::Int(0));
1011 self.note_peak_usage();
1012
1013 let entry = self.handoff_entries[raw_index as usize].clone();
1014 let new_entry = self.promote_entry_to_stable(
1015 entry,
1016 relocated_young,
1017 relocated_yard,
1018 relocated_handoff,
1019 );
1020 self.stable_entries[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1021 value.with_heap_index(new_index)
1022 }
1023 HeapSpace::Stable => value,
1024 }
1025 }
1026
1027 fn promote_entry_to_stable(
1028 &mut self,
1029 entry: ArenaEntry<T>,
1030 relocated_young: &mut [u32],
1031 relocated_yard: &mut [u32],
1032 relocated_handoff: &mut [u32],
1033 ) -> ArenaEntry<T> {
1034 let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
1035 arena.promote_value_to_stable(value, relocated_young, relocated_yard, relocated_handoff)
1036 };
1037 self.rewrite_entry_with(entry, &mut rewrite)
1038 }
1039
1040 fn relocate_yard_root(
1041 &mut self,
1042 value: NanValue,
1043 mark: u32,
1044 relocated: &mut [u32],
1045 compacted: &mut Vec<ArenaEntry<T>>,
1046 ) -> NanValue {
1047 let Some(index) = value.heap_index() else {
1048 return value;
1049 };
1050 let (space, raw_index) = Self::decode_index(index);
1051 if matches!(space, HeapSpace::Yard)
1052 && raw_index >= mark
1053 && raw_index < self.yard_entries.len() as u32
1054 {
1055 return self.relocate_yard_value(value, mark, relocated, compacted);
1056 }
1057 self.rewrite_yard_refs_in_place(space, raw_index, mark, relocated, compacted);
1058 value
1059 }
1060
1061 fn relocate_yard_value(
1062 &mut self,
1063 value: NanValue,
1064 mark: u32,
1065 relocated: &mut [u32],
1066 compacted: &mut Vec<ArenaEntry<T>>,
1067 ) -> NanValue {
1068 let Some(index) = value.heap_index() else {
1069 return value;
1070 };
1071 let (space, raw_index) = Self::decode_index(index);
1072 if !matches!(space, HeapSpace::Yard) || raw_index < mark {
1073 return value;
1074 }
1075
1076 let relocation_slot = raw_index as usize;
1077 let relocated_index = relocated[relocation_slot];
1078 if relocated_index != u32::MAX {
1079 return value.with_heap_index(relocated_index);
1080 }
1081
1082 let compacted_pos = compacted.len() as u32;
1083 let new_index = Self::encode_yard_index(mark + compacted_pos);
1084 relocated[relocation_slot] = new_index;
1085 compacted.push(ArenaEntry::Int(0));
1086
1087 let entry = core::mem::replace(
1088 &mut self.yard_entries[raw_index as usize],
1089 ArenaEntry::Int(0),
1090 );
1091 let new_entry = self.relocate_yard_entry(entry, mark, relocated, compacted);
1092 compacted[compacted_pos as usize] = new_entry;
1093 value.with_heap_index(new_index)
1094 }
1095
1096 fn relocate_yard_entry(
1097 &mut self,
1098 entry: ArenaEntry<T>,
1099 mark: u32,
1100 relocated: &mut [u32],
1101 compacted: &mut Vec<ArenaEntry<T>>,
1102 ) -> ArenaEntry<T> {
1103 let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
1104 arena.relocate_yard_value(value, mark, relocated, compacted)
1105 };
1106 self.rewrite_entry_with(entry, &mut rewrite)
1107 }
1108
1109 fn rewrite_yard_refs_in_place(
1110 &mut self,
1111 space: HeapSpace,
1112 raw_index: u32,
1113 mark: u32,
1114 relocated: &mut [u32],
1115 compacted: &mut Vec<ArenaEntry<T>>,
1116 ) {
1117 let raw_index = raw_index as usize;
1118 match space {
1119 HeapSpace::Young => {
1120 if raw_index >= self.young_entries.len() {
1121 return;
1122 }
1123 let entry =
1124 core::mem::replace(&mut self.young_entries[raw_index], ArenaEntry::Int(0));
1125 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1126 self.young_entries[raw_index] = new_entry;
1127 }
1128 HeapSpace::Yard => {
1129 if raw_index >= self.yard_entries.len() || raw_index >= mark as usize {
1130 return;
1131 }
1132 let entry =
1133 core::mem::replace(&mut self.yard_entries[raw_index], ArenaEntry::Int(0));
1134 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1135 self.yard_entries[raw_index] = new_entry;
1136 }
1137 HeapSpace::Handoff => {
1138 if raw_index >= self.handoff_entries.len() {
1139 return;
1140 }
1141 let entry =
1142 core::mem::replace(&mut self.handoff_entries[raw_index], ArenaEntry::Int(0));
1143 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1144 self.handoff_entries[raw_index] = new_entry;
1145 }
1146 HeapSpace::Stable => {
1147 if raw_index >= self.stable_entries.len() {
1148 return;
1149 }
1150 let entry =
1151 core::mem::replace(&mut self.stable_entries[raw_index], ArenaEntry::Int(0));
1152 let new_entry = self.rewrite_yard_entry(entry, mark, relocated, compacted);
1153 self.stable_entries[raw_index] = new_entry;
1154 }
1155 }
1156 }
1157
1158 fn rewrite_yard_entry(
1159 &mut self,
1160 entry: ArenaEntry<T>,
1161 mark: u32,
1162 relocated: &mut [u32],
1163 compacted: &mut Vec<ArenaEntry<T>>,
1164 ) -> ArenaEntry<T> {
1165 let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
1166 arena.relocate_yard_root(value, mark, relocated, compacted)
1167 };
1168 self.rewrite_entry_with(entry, &mut rewrite)
1169 }
1170
1171 fn relocate_stable_root(
1172 &mut self,
1173 value: NanValue,
1174 relocated: &mut [u32],
1175 compacted: &mut Vec<ArenaEntry<T>>,
1176 ) -> NanValue {
1177 let Some(index) = value.heap_index() else {
1178 return value;
1179 };
1180 if !matches!(Self::decode_index(index).0, HeapSpace::Stable) {
1181 return value;
1182 }
1183 self.relocate_stable_value(value, relocated, compacted)
1184 }
1185
1186 fn relocate_stable_value(
1187 &mut self,
1188 value: NanValue,
1189 relocated: &mut [u32],
1190 compacted: &mut Vec<ArenaEntry<T>>,
1191 ) -> NanValue {
1192 let Some(index) = value.heap_index() else {
1193 return value;
1194 };
1195 let (space, raw_index) = Self::decode_index(index);
1196 if !matches!(space, HeapSpace::Stable) {
1197 return value;
1198 }
1199
1200 let relocation_slot = raw_index as usize;
1201 let relocated_index = relocated[relocation_slot];
1202 if relocated_index != u32::MAX {
1203 return value.with_heap_index(relocated_index);
1204 }
1205
1206 let new_index = Self::encode_stable_index(compacted.len() as u32);
1207 relocated[relocation_slot] = new_index;
1208 compacted.push(ArenaEntry::Int(0));
1209
1210 let entry = core::mem::replace(
1211 &mut self.stable_entries[raw_index as usize],
1212 ArenaEntry::Int(0),
1213 );
1214 let new_entry = self.relocate_stable_entry(entry, relocated, compacted);
1215 compacted[(new_index & HEAP_INDEX_MASK_U32) as usize] = new_entry;
1216 value.with_heap_index(new_index)
1217 }
1218
1219 fn relocate_stable_entry(
1220 &mut self,
1221 entry: ArenaEntry<T>,
1222 relocated: &mut [u32],
1223 compacted: &mut Vec<ArenaEntry<T>>,
1224 ) -> ArenaEntry<T> {
1225 let mut rewrite = |arena: &mut Arena<T>, value: NanValue| {
1226 arena.relocate_stable_value(value, relocated, compacted)
1227 };
1228 self.rewrite_entry_with(entry, &mut rewrite)
1229 }
1230}