1use crate::{
2 cast,
3 gc::{
4 array::ArrayHeader, Array as GcArray, Event, GcPtr, GcRuntime, Observer, RawGcPtr, Stats,
5 TypeTrace,
6 },
7 mapping::{self, resolve_struct_to_struct_edit, Action, FieldMapping, MemoryMapper},
8 r#type::Type,
9 TypeKind,
10};
11use mapping::{Mapping, StructMapping};
12use parking_lot::RwLock;
13use std::{
14 alloc::{Layout, LayoutError},
15 borrow::Cow,
16 collections::{HashMap, VecDeque},
17 ops::{Deref, DerefMut},
18 pin::Pin,
19 ptr::NonNull,
20};
21
22pub struct Trace {
24 stack: VecDeque<CompositeTrace>,
25}
26
27impl Trace {
28 fn new(obj: NonNull<ObjectInfo>) -> Trace {
29 let mut trace = Trace {
30 stack: Default::default(),
31 };
32 let obj_ref = unsafe { obj.as_ref() };
33 match obj_ref.ty.kind() {
34 TypeKind::Primitive(_) | TypeKind::Pointer(_) => {}
35 TypeKind::Struct(_) => {
36 trace.stack.push_back(CompositeTrace::Struct(StructTrace {
37 struct_ptr: unsafe { obj_ref.data.ptr },
38 struct_type: obj_ref.ty.clone(),
39 field_index: 0,
40 }));
41 }
42 TypeKind::Array(arr) => {
43 let array_handle = ArrayHandle { obj };
44 trace.stack.push_back(CompositeTrace::Array(ArrayTrace {
45 iter: array_handle.elements(),
46 element_ty: arr.element_type(),
47 }));
48 }
49 }
50 trace
51 }
52}
53
54impl Iterator for Trace {
55 type Item = GcPtr;
56
57 fn next(&mut self) -> Option<Self::Item> {
58 loop {
59 let top_stack = self.stack.back_mut()?;
60 let event = match top_stack {
61 CompositeTrace::Struct(s) => s.next(),
62 CompositeTrace::Array(a) => a.next(),
63 };
64
65 match event {
66 None => {
67 self.stack.pop_back();
68 }
69 Some(TraceEvent::Reference(r)) => return Some(r.into()),
70 Some(TraceEvent::InlineStruct(s)) => {
71 self.stack.push_back(CompositeTrace::Struct(s))
72 }
73 }
74 }
75 }
76}
77
78enum CompositeTrace {
80 Struct(StructTrace),
82
83 Array(ArrayTrace),
85}
86
87enum TraceEvent {
88 Reference(NonNull<ObjectInfo>),
89 InlineStruct(StructTrace),
90}
91
92impl TraceEvent {
93 pub fn new(ptr: NonNull<u8>, ty: Cow<'_, Type>) -> Option<TraceEvent> {
95 match ty.kind() {
96 TypeKind::Primitive(_) | TypeKind::Pointer(_) => None,
97 TypeKind::Struct(s) => {
98 return if s.is_gc_struct() {
99 let deref_ptr = unsafe { ptr.cast::<NonNull<ObjectInfo>>().as_ref() };
100 Some(TraceEvent::Reference(*deref_ptr))
101 } else {
102 Some(TraceEvent::InlineStruct(StructTrace {
103 struct_ptr: ptr.cast(),
104 struct_type: ty.into_owned(),
105 field_index: 0,
106 }))
107 }
108 }
109 TypeKind::Array(_) => Some(TraceEvent::Reference(ptr.cast())),
110 }
111 }
112}
113
114struct StructTrace {
117 struct_ptr: NonNull<u8>,
118 struct_type: Type,
119 field_index: usize,
120}
121
122impl Iterator for StructTrace {
123 type Item = TraceEvent;
124
125 fn next(&mut self) -> Option<Self::Item> {
126 let struct_ty = self.struct_type.as_struct()?;
127 let fields = struct_ty.fields();
128 let field_count = fields.len();
129 while self.field_index < field_count {
130 let index = self.field_index;
131 self.field_index += 1;
132
133 let field = fields.get(index).unwrap();
134 let field_ty = field.ty();
135 let field_ptr =
136 unsafe { NonNull::new_unchecked(self.struct_ptr.as_ptr().add(field.offset())) };
137
138 if let Some(event) = TraceEvent::new(field_ptr, Cow::Owned(field_ty)) {
139 return Some(event);
140 }
141 }
142 None
143 }
144}
145
146struct ArrayTrace {
151 iter: ArrayHandleIter,
152 element_ty: Type,
153}
154
155impl Iterator for ArrayTrace {
156 type Item = TraceEvent;
157
158 fn next(&mut self) -> Option<Self::Item> {
159 loop {
160 if let Some(event) = TraceEvent::new(self.iter.next()?, Cow::Borrowed(&self.element_ty))
161 {
162 break Some(event);
163 }
164 }
165 }
166}
167
168impl TypeTrace for Type {
169 type Trace = Trace;
170
171 fn trace(&self, obj: GcPtr) -> Self::Trace {
172 let obj = NonNull::new(obj.as_ptr() as *mut ObjectInfo).expect("invalid gc ptr");
173 Trace::new(obj)
174 }
175}
176
177pub struct MarkSweep<O>
179where
180 O: Observer<Event = Event>,
181{
182 objects: RwLock<HashMap<GcPtr, Pin<Box<ObjectInfo>>>>,
183 observer: O,
184 stats: RwLock<Stats>,
185}
186
187impl<O> Default for MarkSweep<O>
188where
189 O: Observer<Event = Event> + Default,
190{
191 fn default() -> Self {
192 MarkSweep {
193 objects: RwLock::new(HashMap::new()),
194 observer: O::default(),
195 stats: RwLock::new(Stats::default()),
196 }
197 }
198}
199
200impl<O> MarkSweep<O>
201where
202 O: Observer<Event = Event>,
203{
204 pub fn with_observer(observer: O) -> Self {
206 Self {
207 objects: RwLock::new(HashMap::new()),
208 observer,
209 stats: RwLock::new(Stats::default()),
210 }
211 }
212
213 fn log_alloc(&self, handle: GcPtr, size: usize) {
215 {
216 let mut stats = self.stats.write();
217 stats.allocated_memory += size;
218 }
219
220 self.observer.event(Event::Allocation(handle));
221 }
222
223 pub fn observer(&self) -> &O {
225 &self.observer
226 }
227}
228
229fn alloc_obj(ty: Type) -> Pin<Box<ObjectInfo>> {
230 let ptr = NonNull::new(unsafe { std::alloc::alloc_zeroed(ty.value_layout()) })
231 .expect("failed to allocate memory for new object");
232 Box::pin(ObjectInfo {
233 data: ObjectInfoData { ptr },
234 ty,
235 roots: 0,
236 color: Color::White,
237 })
238}
239
240#[derive(Debug)]
242pub enum MemoryLayoutError {
243 OutOfBounds,
245
246 LayoutError(LayoutError),
248}
249
250impl From<LayoutError> for MemoryLayoutError {
251 fn from(err: LayoutError) -> Self {
252 MemoryLayoutError::LayoutError(err)
253 }
254}
255
256pub struct ArrayHandle {
275 obj: NonNull<ObjectInfo>,
277}
278
279impl ArrayHandle {
280 pub fn header(&self) -> &ArrayHeader {
282 unsafe { self.obj.as_ref().data.array.as_ref() }
285 }
286
287 pub unsafe fn set_length(&mut self, length: usize) {
293 let header = self.obj.as_mut().data.array.as_mut();
294 debug_assert!(header.capacity >= length);
295 header.length = length;
296 }
297
298 pub fn element_layout(&self) -> Layout {
304 self.element_type().reference_layout()
305 }
306
307 pub fn element_stride(&self) -> usize {
312 self.element_layout().pad_to_align().size()
313 }
314
315 pub fn data(&self) -> NonNull<u8> {
317 let element_layout = self.element_layout();
320 let header_layout = Layout::new::<ArrayHeader>();
321 let (_, padded_header_size) = header_layout
322 .extend(element_layout)
323 .expect("error creating combined layout of header and element");
324
325 unsafe {
326 NonNull::new_unchecked(
327 (self.obj.as_ref().data.array.as_ptr().cast::<u8>() as *mut u8)
328 .add(padded_header_size),
329 )
330 }
331 }
332}
333
334impl GcArray for ArrayHandle {
335 type Iterator = ArrayHandleIter;
336
337 fn as_raw(&self) -> GcPtr {
338 self.obj.into()
339 }
340
341 fn element_type(&self) -> Type {
342 let array_ty = &unsafe { self.obj.as_ref() }.ty;
343 array_ty
344 .as_array()
345 .expect("unable to determine element_type, type is not an array")
346 .element_type()
347 }
348
349 fn length(&self) -> usize {
350 self.header().length
351 }
352
353 fn capacity(&self) -> usize {
354 self.header().capacity
355 }
356
357 fn elements(&self) -> Self::Iterator {
358 let length = self.length();
359 let next = self.data();
360 let element_ty = self.element_type();
361 let element_layout = element_ty.reference_layout();
362 ArrayHandleIter {
363 remaining: length,
364 next,
365 stride: element_layout.pad_to_align().size(),
366 }
367 }
368}
369
370pub struct ArrayHandleIter {
375 next: NonNull<u8>,
377
378 remaining: usize,
380
381 stride: usize,
383}
384
385impl Iterator for ArrayHandleIter {
386 type Item = NonNull<u8>;
387
388 fn next(&mut self) -> Option<Self::Item> {
389 if self.remaining > 0 {
390 let element = self.next;
391 self.remaining -= 1;
392 self.next = unsafe { NonNull::new_unchecked(self.next.as_ptr().add(self.stride)) };
393 Some(element)
394 } else {
395 None
396 }
397 }
398}
399
400fn repeat_layout(layout: Layout, n: usize) -> Result<Layout, MemoryLayoutError> {
405 let len_rounded_up = layout.size().wrapping_add(layout.align()).wrapping_sub(1)
406 & !layout.align().wrapping_sub(1);
407 let padded_size = layout.size() + len_rounded_up.wrapping_sub(layout.align());
408 let alloc_size = padded_size
409 .checked_mul(n)
410 .ok_or(MemoryLayoutError::OutOfBounds)?;
411 Layout::from_size_align(alloc_size, layout.align()).map_err(Into::into)
412}
413
414fn alloc_array(ty: Type, length: usize) -> Pin<Box<ObjectInfo>> {
416 Box::pin(ObjectInfo {
417 data: ObjectInfoData {
418 array: array_header(&ty, length),
419 },
420 ty,
421 roots: 0,
422 color: Color::White,
423 })
424}
425
426fn array_header(ty: &Type, length: usize) -> NonNull<ArrayHeader> {
428 let array_ty = ty
429 .as_array()
430 .expect("array type doesnt have an element type");
431
432 let header_layout = Layout::new::<ArrayHeader>();
434 let element_ty_layout = array_ty.element_type().reference_layout();
435 let elements_layout = repeat_layout(element_ty_layout, length)
436 .expect("unable to create a memory layout for array elemets");
437 let (layout, _) = header_layout
438 .extend(elements_layout)
439 .expect("unable to create memory layout for array");
440
441 let mut array_header: NonNull<ArrayHeader> =
442 NonNull::new(unsafe { std::alloc::alloc_zeroed(layout).cast() })
443 .expect("error allocating memory for array");
444 let array = unsafe { array_header.as_mut() };
445 array.length = length;
446 array.capacity = length;
447
448 array_header
449}
450
451impl<O> GcRuntime for MarkSweep<O>
452where
453 O: Observer<Event = Event>,
454{
455 type Array = ArrayHandle;
456
457 fn alloc(&self, ty: &Type) -> GcPtr {
458 assert!(ty.is_concrete());
459
460 let object = alloc_obj(ty.clone());
461 let size = object.layout().size();
462
463 let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
465
466 {
467 let mut objects = self.objects.write();
468 objects.insert(handle, object);
469 }
470
471 self.log_alloc(handle, size);
472 handle
473 }
474
475 fn alloc_array(&self, ty: &Type, n: usize) -> Self::Array {
476 let object = alloc_array(ty.clone(), n);
477 let size = object.layout().size();
478
479 let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
481
482 {
483 let mut objects = self.objects.write();
484 objects.insert(handle, object);
485 }
486
487 self.log_alloc(handle, size);
488 ArrayHandle {
489 obj: unsafe { NonNull::new_unchecked(handle.into()) },
490 }
491 }
492
493 fn ptr_type(&self, handle: GcPtr) -> Type {
494 let _lock = self.objects.read();
495
496 let object_info: *const ObjectInfo = handle.into();
498
499 unsafe { (*object_info).ty.clone() }
501 }
502
503 fn array(&self, handle: GcPtr) -> Option<Self::Array> {
504 let _lock = self.objects.read();
505 let obj: NonNull<ObjectInfo> =
506 NonNull::new(handle.into()).expect("cannot have a null handle here");
507 unsafe {
508 if !obj.as_ref().ty.is_array() {
509 return None;
510 }
511 }
512
513 Some(ArrayHandle { obj })
514 }
515
516 fn root(&self, handle: GcPtr) {
517 let _lock = self.objects.write();
518
519 let object_info: *mut ObjectInfo = handle.into();
521
522 unsafe { (*object_info).roots += 1 };
523 }
524
525 fn unroot(&self, handle: GcPtr) {
526 let _lock = self.objects.write();
527
528 let object_info: *mut ObjectInfo = handle.into();
530
531 unsafe { (*object_info).roots -= 1 };
532 }
533
534 fn stats(&self) -> Stats {
535 self.stats.read().clone()
536 }
537}
538
539impl<O> MarkSweep<O>
540where
541 O: Observer<Event = Event>,
542{
543 pub fn collect(&self) -> bool {
546 self.observer.event(Event::Start);
547
548 let mut objects = self.objects.write();
549
550 let mut roots = objects
552 .iter()
553 .filter_map(|(_, obj)| {
554 if obj.roots > 0 {
555 Some(obj.as_ref().get_ref() as *const _ as *mut ObjectInfo)
556 } else {
557 None
558 }
559 })
560 .collect::<VecDeque<_>>();
561
562 while let Some(next) = roots.pop_front() {
564 let handle = (next as *const _ as RawGcPtr).into();
565
566 for reference in unsafe { (*next).ty.trace(handle) } {
568 let ref_ptr = objects
569 .get_mut(&reference)
570 .expect("found invalid reference");
571 if ref_ptr.color == Color::White {
572 let ptr = ref_ptr.as_ref().get_ref() as *const _ as *mut ObjectInfo;
573 unsafe { (*ptr).color = Color::Gray };
574 roots.push_back(ptr);
575 }
576 }
577
578 unsafe {
580 (*next).color = Color::Black;
581 }
582 }
583
584 let size_before = objects.len();
586 objects.retain(|h, obj| {
587 if obj.color == Color::Black {
588 unsafe {
589 obj.as_mut().get_unchecked_mut().color = Color::White;
590 }
591 true
592 } else {
593 let value_memory_layout = obj.layout();
594 unsafe { std::alloc::dealloc(obj.data.ptr.as_mut(), value_memory_layout) };
595 self.observer.event(Event::Deallocation(*h));
596 {
597 let mut stats = self.stats.write();
598 stats.allocated_memory -= value_memory_layout.size();
599 }
600 false
601 }
602 });
603 let size_after = objects.len();
604
605 self.observer.event(Event::End);
606
607 size_before != size_after
608 }
609}
610
611impl<O> MemoryMapper for MarkSweep<O>
612where
613 O: Observer<Event = Event>,
614{
615 fn map_memory(&self, mapping: Mapping) -> Vec<GcPtr> {
616 let mut objects = self.objects.write();
617
618 let deleted = objects
620 .iter()
621 .filter_map(|(ptr, object_info)| {
622 if mapping.deletions.contains(&object_info.ty) {
623 Some(*ptr)
624 } else {
625 None
626 }
627 })
628 .collect();
629
630 for (old_ty, new_ty) in mapping.identical {
632 for object_info in objects.values_mut() {
633 if object_info.ty == old_ty {
634 object_info.set(ObjectInfo {
635 data: ObjectInfoData {
636 ptr: unsafe { object_info.data.ptr },
637 },
638 roots: object_info.roots,
639 color: object_info.color,
640 ty: new_ty.clone(),
641 });
642 }
643 }
644 }
645
646 let mut new_allocations = Vec::new();
647
648 objects
650 .values_mut()
651 .filter(|object_info| object_info.ty.is_struct())
652 .for_each(|object_info| {
653 if let Some(conversion) = mapping.struct_mappings.get(&object_info.ty) {
654 let old_layout = object_info.ty.value_layout();
655 let src = unsafe { object_info.data.ptr };
656 let dest = unsafe {
657 NonNull::new_unchecked(std::alloc::alloc_zeroed(
658 conversion.new_ty.value_layout(),
659 ))
660 };
661
662 map_struct(
663 &mut new_allocations,
664 &mapping.struct_mappings,
665 &conversion.field_mapping,
666 src,
667 dest,
668 );
669
670 unsafe { std::alloc::dealloc(src.as_ptr(), old_layout) };
671
672 object_info.set(ObjectInfo {
673 data: ObjectInfoData { ptr: dest },
674 roots: object_info.roots,
675 color: object_info.color,
676 ty: conversion.new_ty.clone(),
677 });
678 }
679 });
680
681 objects
683 .values_mut()
684 .filter(|object_info| object_info.ty.is_array())
685 .for_each(|object_info| {
686 let mut ty = object_info.ty.clone();
687 let mut stack = Vec::new();
688
689 while let Some(array) = ty.as_array() {
690 stack.push(ty.clone());
691 ty = array.element_type();
692 }
693
694 let old_element_ty = ty;
695 if let Some(conversion) = mapping.struct_mappings.get(&old_element_ty) {
696 let mut new_ty = conversion.new_ty.clone();
697 while stack.pop().is_some() {
698 new_ty = new_ty.array_type();
699 }
700
701 let new_element_ty = new_ty.as_array().unwrap().element_type();
704 if new_element_ty.is_struct() {
705 assert!(old_element_ty.is_struct());
707
708 let element_action =
709 resolve_struct_to_struct_edit(&old_element_ty, &new_element_ty, 0);
710
711 map_array(
712 &mut new_allocations,
713 &mapping.struct_mappings,
714 unsafe {
715 NonNull::new_unchecked(
716 object_info.as_mut().deref_mut() as *mut ObjectInfo
717 )
718 },
719 &element_action,
720 &new_ty,
721 );
722 } else {
723 object_info.as_mut().ty = conversion.new_ty.clone();
725 }
726 }
727 });
728
729 for object in new_allocations {
732 let size = object.layout().size();
733 let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
736 objects.insert(handle, object);
737
738 self.log_alloc(handle, size);
739 }
740
741 return deleted;
742
743 unsafe fn get_field_ptr(struct_ptr: NonNull<u8>, offset: usize) -> NonNull<u8> {
744 let mut ptr = struct_ptr.as_ptr() as usize;
745 ptr += offset;
746 NonNull::new_unchecked(ptr as *mut u8)
747 }
748
749 fn map_array(
750 new_allocations: &mut Vec<Pin<Box<ObjectInfo>>>,
751 conversions: &HashMap<Type, StructMapping>,
752 mut src_object: NonNull<ObjectInfo>,
753 element_action: &Action,
754 new_ty: &Type,
755 ) {
756 let src_array = ArrayHandle { obj: src_object };
757
758 let new_header = array_header(new_ty, src_array.length());
760
761 let mut dest_obj = ObjectInfo {
762 data: ObjectInfoData { array: new_header },
763 roots: unsafe { src_object.as_ref().roots },
764 color: unsafe { src_object.as_ref().color },
765 ty: new_ty.clone(),
766 };
767
768 let dest_array = ArrayHandle {
769 obj: unsafe { NonNull::new_unchecked(&mut dest_obj as *mut ObjectInfo) },
770 };
771
772 src_array
774 .elements()
775 .zip(dest_array.elements())
776 .for_each(|(src, dest)| {
777 map_type(
778 new_allocations,
779 conversions,
780 src,
781 dest,
782 element_action,
783 &new_ty.as_array().expect("Must be an array.").element_type(),
784 )
785 });
786
787 unsafe {
788 let src_obj = src_object.as_mut();
789 std::alloc::dealloc(src_obj.data.ptr.as_mut(), src_obj.layout());
790 *src_obj = dest_obj;
791 };
792 }
793
794 fn map_type(
795 new_allocations: &mut Vec<Pin<Box<ObjectInfo>>>,
796 conversions: &HashMap<Type, StructMapping>,
797 src: NonNull<u8>,
798 dest: NonNull<u8>,
799 action: &mapping::Action,
800 new_ty: &Type,
801 ) {
802 match action {
803 mapping::Action::ArrayAlloc => {
804 let object = alloc_array(new_ty.clone(), 0);
806
807 let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
809
810 let mut dest_handle = dest.cast::<GcPtr>();
812 unsafe { *dest_handle.as_mut() = handle };
813
814 new_allocations.push(object);
815 }
816 mapping::Action::ArrayFromValue {
817 element_action,
818 old_offset,
819 } => {
820 let mut object = alloc_array(new_ty.clone(), 1);
822
823 let array_handle = ArrayHandle {
824 obj: unsafe {
825 NonNull::new_unchecked(object.as_mut().deref_mut() as *mut ObjectInfo)
826 },
827 };
828
829 map_type(
831 new_allocations,
832 conversions,
833 unsafe { get_field_ptr(src, *old_offset) },
834 array_handle.data(),
835 element_action,
836 &new_ty.as_array().expect("Must be an array.").element_type(),
837 );
838
839 let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
841
842 let mut dest_handle = dest.cast::<GcPtr>();
844 unsafe { *dest_handle.as_mut() = handle };
845
846 new_allocations.push(object);
847 }
848 mapping::Action::ArrayMap {
849 element_action,
850 old_offset,
851 } => {
852 let src_ptr = unsafe { get_field_ptr(src, *old_offset) };
853
854 let src_obj = unsafe { *src_ptr.cast::<NonNull<ObjectInfo>>().as_ref() };
856
857 map_array(
858 new_allocations,
859 conversions,
860 src_obj,
861 element_action,
862 new_ty,
863 );
864
865 unsafe {
866 std::ptr::copy_nonoverlapping(
867 src_ptr.as_ptr(),
868 dest.as_ptr(),
869 std::mem::size_of::<GcPtr>(),
870 )
871 };
872 }
873 mapping::Action::Cast { old_offset, old_ty } => {
874 if !cast::try_cast_from_to(
875 old_ty.clone(),
876 new_ty.clone(),
877 unsafe { get_field_ptr(src, *old_offset) },
878 dest,
879 ) {
880 }
882 }
883 mapping::Action::Copy {
884 old_offset,
885 size: size_in_bytes,
886 } => {
887 unsafe {
888 std::ptr::copy_nonoverlapping(
889 get_field_ptr(src, *old_offset).as_ptr(),
890 dest.as_ptr(),
891 *size_in_bytes,
892 )
893 };
894 }
895 mapping::Action::ElementFromArray {
896 element_action,
897 old_offset,
898 } => {
899 let obj = unsafe {
901 *get_field_ptr(src, *old_offset)
902 .cast::<NonNull<ObjectInfo>>()
903 .as_ref()
904 };
905
906 let array_handle = ArrayHandle { obj };
907
908 if array_handle.header().length > 0 {
909 map_type(
911 new_allocations,
912 conversions,
913 array_handle.data(),
914 dest,
915 element_action,
916 new_ty,
917 )
918 } else {
919 }
921 }
922 mapping::Action::StructAlloc => {
923 let object = alloc_obj(new_ty.clone());
924
925 let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
927
928 let mut dest_handle = dest.cast::<GcPtr>();
930 unsafe { *dest_handle.as_mut() = handle };
931
932 new_allocations.push(object);
933 }
934 mapping::Action::StructMapFromGc { old_ty, old_offset } => {
935 let conversion = conversions.get(old_ty).unwrap_or_else(|| {
936 panic!(
937 "If the struct changed, there must also be a conversion for type: {:#?}.",
938 old_ty,
939 )
940 });
941
942 let object = unsafe {
944 *get_field_ptr(src, *old_offset)
945 .cast::<NonNull<ObjectInfo>>()
946 .as_ref()
947 };
948
949 map_struct(
951 new_allocations,
952 conversions,
953 &conversion.field_mapping,
954 unsafe { object.as_ref().data.ptr },
956 dest,
957 );
958 }
959 mapping::Action::StructMapFromValue { old_ty, old_offset } => {
960 let object = alloc_obj(new_ty.clone());
961
962 let conversion = conversions.get(old_ty).unwrap_or_else(|| {
963 panic!(
964 "If the struct changed, there must also be a conversion for type: {:#?}.",
965 old_ty,
966 )
967 });
968
969 map_struct(
971 new_allocations,
972 conversions,
973 &conversion.field_mapping,
974 unsafe { get_field_ptr(src, *old_offset) },
975 unsafe { object.as_ref().data.ptr },
977 );
978
979 let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
981
982 let mut dest_handle = dest.cast::<GcPtr>();
984 unsafe { *dest_handle.as_mut() = handle };
985
986 new_allocations.push(object);
987 }
988 mapping::Action::StructMapInPlace { old_ty, old_offset } => {
989 let conversion = conversions.get(old_ty).unwrap_or_else(|| {
990 panic!(
991 "If the struct changed, there must also be a conversion for type: {:#?}.",
992 old_ty,
993 )
994 });
995
996 map_struct(
997 new_allocations,
998 conversions,
999 &conversion.field_mapping,
1000 unsafe { get_field_ptr(src, *old_offset) },
1001 dest,
1002 );
1003 }
1004 mapping::Action::ZeroInitialize => {
1005 }
1007 }
1008 }
1009
1010 #[allow(clippy::mutable_key_type)]
1011 fn map_struct(
1012 new_allocations: &mut Vec<Pin<Box<ObjectInfo>>>,
1013 conversions: &HashMap<Type, StructMapping>,
1014 mapping: &[FieldMapping],
1015 src: NonNull<u8>,
1016 dest: NonNull<u8>,
1017 ) {
1018 for FieldMapping {
1019 new_ty,
1020 new_offset,
1021 action,
1022 } in mapping.iter()
1023 {
1024 let field_dest = unsafe { get_field_ptr(dest, *new_offset) };
1025 map_type(
1026 new_allocations,
1027 conversions,
1028 src,
1029 field_dest,
1030 action,
1031 new_ty,
1032 );
1033 }
1034 }
1035 }
1036}
1037
1038#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1040enum Color {
1041 White,
1043
1044 Gray,
1046
1047 Black,
1049}
1050
1051#[repr(C)]
1054struct ObjectInfo {
1055 pub data: ObjectInfoData,
1056 pub roots: u32,
1057 pub color: Color,
1058 pub ty: Type,
1059}
1060
1061#[repr(C)]
1062union ObjectInfoData {
1063 pub ptr: NonNull<u8>,
1064 pub array: NonNull<ArrayHeader>,
1065}
1066
1067unsafe impl Send for ObjectInfo {}
1069unsafe impl Sync for ObjectInfo {}
1070
1071impl From<GcPtr> for *const ObjectInfo {
1072 fn from(ptr: GcPtr) -> Self {
1073 ptr.as_ptr() as Self
1074 }
1075}
1076
1077impl From<GcPtr> for *mut ObjectInfo {
1078 fn from(ptr: GcPtr) -> Self {
1079 ptr.as_ptr() as Self
1080 }
1081}
1082
1083impl From<*const ObjectInfo> for GcPtr {
1084 fn from(info: *const ObjectInfo) -> Self {
1085 (info as RawGcPtr).into()
1086 }
1087}
1088
1089impl From<*mut ObjectInfo> for GcPtr {
1090 fn from(info: *mut ObjectInfo) -> Self {
1091 (info as RawGcPtr).into()
1092 }
1093}
1094
1095impl From<NonNull<ObjectInfo>> for GcPtr {
1096 fn from(info: NonNull<ObjectInfo>) -> Self {
1097 (info.as_ptr() as RawGcPtr).into()
1098 }
1099}
1100
1101impl ObjectInfo {
1102 pub fn layout(&self) -> Layout {
1104 match self.ty.kind() {
1105 TypeKind::Struct(_) | TypeKind::Primitive(_) | TypeKind::Pointer(_) => {
1106 self.ty.value_layout()
1107 }
1108 TypeKind::Array(array) => {
1109 let elem_count = unsafe { self.data.array.as_ref().capacity };
1110 let elem_layout = repeat_layout(array.element_type().value_layout(), elem_count)
1111 .expect("unable to determine layout of array elements");
1112 let (layout, _) = Layout::new::<ArrayHeader>()
1113 .extend(elem_layout)
1114 .expect("unable to determine layout of array");
1115 layout
1116 }
1117 }
1118 }
1119}