mun_memory/gc/
mark_sweep.rs

1use crate::{
2    cast,
3    gc::{
4        array::ArrayHeader, Array as GcArray, Event, GcPtr, GcRuntime, Observer, RawGcPtr, Stats,
5        TypeTrace,
6    },
7    mapping::{self, resolve_struct_to_struct_edit, Action, FieldMapping, MemoryMapper},
8    r#type::Type,
9    TypeKind,
10};
11use mapping::{Mapping, StructMapping};
12use parking_lot::RwLock;
13use std::{
14    alloc::{Layout, LayoutError},
15    borrow::Cow,
16    collections::{HashMap, VecDeque},
17    ops::{Deref, DerefMut},
18    pin::Pin,
19    ptr::NonNull,
20};
21
22/// An object that enables tracing all reference types from another object.
23pub struct Trace {
24    stack: VecDeque<CompositeTrace>,
25}
26
27impl Trace {
28    fn new(obj: NonNull<ObjectInfo>) -> Trace {
29        let mut trace = Trace {
30            stack: Default::default(),
31        };
32        let obj_ref = unsafe { obj.as_ref() };
33        match obj_ref.ty.kind() {
34            TypeKind::Primitive(_) | TypeKind::Pointer(_) => {}
35            TypeKind::Struct(_) => {
36                trace.stack.push_back(CompositeTrace::Struct(StructTrace {
37                    struct_ptr: unsafe { obj_ref.data.ptr },
38                    struct_type: obj_ref.ty.clone(),
39                    field_index: 0,
40                }));
41            }
42            TypeKind::Array(arr) => {
43                let array_handle = ArrayHandle { obj };
44                trace.stack.push_back(CompositeTrace::Array(ArrayTrace {
45                    iter: array_handle.elements(),
46                    element_ty: arr.element_type(),
47                }));
48            }
49        }
50        trace
51    }
52}
53
54impl Iterator for Trace {
55    type Item = GcPtr;
56
57    fn next(&mut self) -> Option<Self::Item> {
58        loop {
59            let top_stack = self.stack.back_mut()?;
60            let event = match top_stack {
61                CompositeTrace::Struct(s) => s.next(),
62                CompositeTrace::Array(a) => a.next(),
63            };
64
65            match event {
66                None => {
67                    self.stack.pop_back();
68                }
69                Some(TraceEvent::Reference(r)) => return Some(r.into()),
70                Some(TraceEvent::InlineStruct(s)) => {
71                    self.stack.push_back(CompositeTrace::Struct(s))
72                }
73            }
74        }
75    }
76}
77
78/// An object that enables iterating over a composite value stored somewhere in memory.
79enum CompositeTrace {
80    /// A struct
81    Struct(StructTrace),
82
83    /// An array
84    Array(ArrayTrace),
85}
86
87enum TraceEvent {
88    Reference(NonNull<ObjectInfo>),
89    InlineStruct(StructTrace),
90}
91
92impl TraceEvent {
93    /// Construct a new `TraceEvent` based on the type of data stored at the specified location.
94    pub fn new(ptr: NonNull<u8>, ty: Cow<'_, Type>) -> Option<TraceEvent> {
95        match ty.kind() {
96            TypeKind::Primitive(_) | TypeKind::Pointer(_) => None,
97            TypeKind::Struct(s) => {
98                return if s.is_gc_struct() {
99                    let deref_ptr = unsafe { ptr.cast::<NonNull<ObjectInfo>>().as_ref() };
100                    Some(TraceEvent::Reference(*deref_ptr))
101                } else {
102                    Some(TraceEvent::InlineStruct(StructTrace {
103                        struct_ptr: ptr.cast(),
104                        struct_type: ty.into_owned(),
105                        field_index: 0,
106                    }))
107                }
108            }
109            TypeKind::Array(_) => Some(TraceEvent::Reference(ptr.cast())),
110        }
111    }
112}
113
114/// A struct that enables iterating over all GC references in a struct. Structs can be stored inline
115/// or on the heap. This struct supports both.
116struct StructTrace {
117    struct_ptr: NonNull<u8>,
118    struct_type: Type,
119    field_index: usize,
120}
121
122impl Iterator for StructTrace {
123    type Item = TraceEvent;
124
125    fn next(&mut self) -> Option<Self::Item> {
126        let struct_ty = self.struct_type.as_struct()?;
127        let fields = struct_ty.fields();
128        let field_count = fields.len();
129        while self.field_index < field_count {
130            let index = self.field_index;
131            self.field_index += 1;
132
133            let field = fields.get(index).unwrap();
134            let field_ty = field.ty();
135            let field_ptr =
136                unsafe { NonNull::new_unchecked(self.struct_ptr.as_ptr().add(field.offset())) };
137
138            if let Some(event) = TraceEvent::new(field_ptr, Cow::Owned(field_ty)) {
139                return Some(event);
140            }
141        }
142        None
143    }
144}
145
146/// A struct that enables iterating over all GC references in a struct.
147///
148/// TODO: if the element type doesnt contain any references it's a bit of a waste to iterate over
149/// all elements.
150struct ArrayTrace {
151    iter: ArrayHandleIter,
152    element_ty: Type,
153}
154
155impl Iterator for ArrayTrace {
156    type Item = TraceEvent;
157
158    fn next(&mut self) -> Option<Self::Item> {
159        loop {
160            if let Some(event) = TraceEvent::new(self.iter.next()?, Cow::Borrowed(&self.element_ty))
161            {
162                break Some(event);
163            }
164        }
165    }
166}
167
168impl TypeTrace for Type {
169    type Trace = Trace;
170
171    fn trace(&self, obj: GcPtr) -> Self::Trace {
172        let obj = NonNull::new(obj.as_ptr() as *mut ObjectInfo).expect("invalid gc ptr");
173        Trace::new(obj)
174    }
175}
176
177/// Implements a simple mark-sweep type garbage collector.
178pub struct MarkSweep<O>
179where
180    O: Observer<Event = Event>,
181{
182    objects: RwLock<HashMap<GcPtr, Pin<Box<ObjectInfo>>>>,
183    observer: O,
184    stats: RwLock<Stats>,
185}
186
187impl<O> Default for MarkSweep<O>
188where
189    O: Observer<Event = Event> + Default,
190{
191    fn default() -> Self {
192        MarkSweep {
193            objects: RwLock::new(HashMap::new()),
194            observer: O::default(),
195            stats: RwLock::new(Stats::default()),
196        }
197    }
198}
199
200impl<O> MarkSweep<O>
201where
202    O: Observer<Event = Event>,
203{
204    /// Creates a `MarkSweep` memory collector with the specified `Observer`.
205    pub fn with_observer(observer: O) -> Self {
206        Self {
207            objects: RwLock::new(HashMap::new()),
208            observer,
209            stats: RwLock::new(Stats::default()),
210        }
211    }
212
213    /// Logs an allocation
214    fn log_alloc(&self, handle: GcPtr, size: usize) {
215        {
216            let mut stats = self.stats.write();
217            stats.allocated_memory += size;
218        }
219
220        self.observer.event(Event::Allocation(handle));
221    }
222
223    /// Returns the observer
224    pub fn observer(&self) -> &O {
225        &self.observer
226    }
227}
228
229fn alloc_obj(ty: Type) -> Pin<Box<ObjectInfo>> {
230    let ptr = NonNull::new(unsafe { std::alloc::alloc_zeroed(ty.value_layout()) })
231        .expect("failed to allocate memory for new object");
232    Box::pin(ObjectInfo {
233        data: ObjectInfoData { ptr },
234        ty,
235        roots: 0,
236        color: Color::White,
237    })
238}
239
240/// An error that might occur when requesting memory layout of a type
241#[derive(Debug)]
242pub enum MemoryLayoutError {
243    /// An error that is returned when the memory requested is to large to deal with.
244    OutOfBounds,
245
246    /// An error that is returned by constructing a Layout
247    LayoutError(LayoutError),
248}
249
250impl From<LayoutError> for MemoryLayoutError {
251    fn from(err: LayoutError) -> Self {
252        MemoryLayoutError::LayoutError(err)
253    }
254}
255
256/// Helper object to work with GcPtr that represents an array.
257///
258/// Arrays are stored in memory with a header which holds the length and capacity. The memory layout
259/// of an array looks like this in memory:
260///
261/// ```text
262/// object.data.array ───►┌──────────────┐
263///                       │ ArrayHeader  │
264///                       └─┬────────────┘
265///                         │ padding to align elements
266///                       ┌─┴────────────┐
267///                       │ element #1   │
268///                       └──────────────┘
269///                        :
270///                       ┌──────────────┐
271///                       │ element #n   │
272///                       └──────────────┘
273/// ```
274pub struct ArrayHandle {
275    /// Pointer to the object handle.
276    obj: NonNull<ObjectInfo>,
277}
278
279impl ArrayHandle {
280    /// Returns a reference to the header
281    pub fn header(&self) -> &ArrayHeader {
282        // Safety: Safe because at the moment we have a reference to the object which cannot be
283        // modified. Also we can be sure this is an array at this point.
284        unsafe { self.obj.as_ref().data.array.as_ref() }
285    }
286
287    /// Sets the length of the array.
288    ///
289    /// # Safety
290    ///
291    /// This function is unsafe because the array elements might be left uninitialized.
292    pub unsafe fn set_length(&mut self, length: usize) {
293        let header = self.obj.as_mut().data.array.as_mut();
294        debug_assert!(header.capacity >= length);
295        header.length = length;
296    }
297
298    /// Returns the layout of an element stored in the array.
299    ///
300    /// Note that this may be different from the layout of the [`Self::element_type`]. If the
301    /// element type is a garbage collected type, the array stores references instead of raw
302    /// elements.
303    pub fn element_layout(&self) -> Layout {
304        self.element_type().reference_layout()
305    }
306
307    /// Returns the stride in bytes between elements.
308    ///
309    /// The stride is determined by the size of [`Self::element_layout`] padded to alignment of
310    /// layout.
311    pub fn element_stride(&self) -> usize {
312        self.element_layout().pad_to_align().size()
313    }
314
315    /// Returns a pointer to the data.
316    pub fn data(&self) -> NonNull<u8> {
317        // Determine the offset of the data relative from the start of the array pointer. This
318        // the header and the extra alignment padding between the header and the data.
319        let element_layout = self.element_layout();
320        let header_layout = Layout::new::<ArrayHeader>();
321        let (_, padded_header_size) = header_layout
322            .extend(element_layout)
323            .expect("error creating combined layout of header and element");
324
325        unsafe {
326            NonNull::new_unchecked(
327                (self.obj.as_ref().data.array.as_ptr().cast::<u8>() as *mut u8)
328                    .add(padded_header_size),
329            )
330        }
331    }
332}
333
334impl GcArray for ArrayHandle {
335    type Iterator = ArrayHandleIter;
336
337    fn as_raw(&self) -> GcPtr {
338        self.obj.into()
339    }
340
341    fn element_type(&self) -> Type {
342        let array_ty = &unsafe { self.obj.as_ref() }.ty;
343        array_ty
344            .as_array()
345            .expect("unable to determine element_type, type is not an array")
346            .element_type()
347    }
348
349    fn length(&self) -> usize {
350        self.header().length
351    }
352
353    fn capacity(&self) -> usize {
354        self.header().capacity
355    }
356
357    fn elements(&self) -> Self::Iterator {
358        let length = self.length();
359        let next = self.data();
360        let element_ty = self.element_type();
361        let element_layout = element_ty.reference_layout();
362        ArrayHandleIter {
363            remaining: length,
364            next,
365            stride: element_layout.pad_to_align().size(),
366        }
367    }
368}
369
370/// An iterator implementation.
371///
372/// TODO: Note that this iterator is highly non-thread safe. Any operation that modifies the
373/// original array could cause undefined behavior.
374pub struct ArrayHandleIter {
375    /// Pointer to the next element
376    next: NonNull<u8>,
377
378    /// The number of remaning elements in the iterator.
379    remaining: usize,
380
381    /// The number of bytes to skip to get to the next element
382    stride: usize,
383}
384
385impl Iterator for ArrayHandleIter {
386    type Item = NonNull<u8>;
387
388    fn next(&mut self) -> Option<Self::Item> {
389        if self.remaining > 0 {
390            let element = self.next;
391            self.remaining -= 1;
392            self.next = unsafe { NonNull::new_unchecked(self.next.as_ptr().add(self.stride)) };
393            Some(element)
394        } else {
395            None
396        }
397    }
398}
399
400/// Creates a layout describing the record for `n` instances of `layout`, with a suitable amount of
401/// padding between each to ensure that each instance is given its requested size and alignment.
402///
403/// Implementation taken from `Layout::repeat` (which is currently unstable)
404fn repeat_layout(layout: Layout, n: usize) -> Result<Layout, MemoryLayoutError> {
405    let len_rounded_up = layout.size().wrapping_add(layout.align()).wrapping_sub(1)
406        & !layout.align().wrapping_sub(1);
407    let padded_size = layout.size() + len_rounded_up.wrapping_sub(layout.align());
408    let alloc_size = padded_size
409        .checked_mul(n)
410        .ok_or(MemoryLayoutError::OutOfBounds)?;
411    Layout::from_size_align(alloc_size, layout.align()).map_err(Into::into)
412}
413
414/// Allocates memory for an array type with `length` elements. `array_ty` must be an array type.
415fn alloc_array(ty: Type, length: usize) -> Pin<Box<ObjectInfo>> {
416    Box::pin(ObjectInfo {
417        data: ObjectInfoData {
418            array: array_header(&ty, length),
419        },
420        ty,
421        roots: 0,
422        color: Color::White,
423    })
424}
425
426/// Constructs an array header for an array type with `length` elements.
427fn array_header(ty: &Type, length: usize) -> NonNull<ArrayHeader> {
428    let array_ty = ty
429        .as_array()
430        .expect("array type doesnt have an element type");
431
432    // Allocate memory for the array data
433    let header_layout = Layout::new::<ArrayHeader>();
434    let element_ty_layout = array_ty.element_type().reference_layout();
435    let elements_layout = repeat_layout(element_ty_layout, length)
436        .expect("unable to create a memory layout for array elemets");
437    let (layout, _) = header_layout
438        .extend(elements_layout)
439        .expect("unable to create memory layout for array");
440
441    let mut array_header: NonNull<ArrayHeader> =
442        NonNull::new(unsafe { std::alloc::alloc_zeroed(layout).cast() })
443            .expect("error allocating memory for array");
444    let array = unsafe { array_header.as_mut() };
445    array.length = length;
446    array.capacity = length;
447
448    array_header
449}
450
451impl<O> GcRuntime for MarkSweep<O>
452where
453    O: Observer<Event = Event>,
454{
455    type Array = ArrayHandle;
456
457    fn alloc(&self, ty: &Type) -> GcPtr {
458        assert!(ty.is_concrete());
459
460        let object = alloc_obj(ty.clone());
461        let size = object.layout().size();
462
463        // We want to return a pointer to the `ObjectInfo`, to be used as handle.
464        let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
465
466        {
467            let mut objects = self.objects.write();
468            objects.insert(handle, object);
469        }
470
471        self.log_alloc(handle, size);
472        handle
473    }
474
475    fn alloc_array(&self, ty: &Type, n: usize) -> Self::Array {
476        let object = alloc_array(ty.clone(), n);
477        let size = object.layout().size();
478
479        // We want to return a pointer to the `ObjectInfo`, to be used as handle.
480        let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
481
482        {
483            let mut objects = self.objects.write();
484            objects.insert(handle, object);
485        }
486
487        self.log_alloc(handle, size);
488        ArrayHandle {
489            obj: unsafe { NonNull::new_unchecked(handle.into()) },
490        }
491    }
492
493    fn ptr_type(&self, handle: GcPtr) -> Type {
494        let _lock = self.objects.read();
495
496        // Convert the handle to our internal representation
497        let object_info: *const ObjectInfo = handle.into();
498
499        // Return the type of the object
500        unsafe { (*object_info).ty.clone() }
501    }
502
503    fn array(&self, handle: GcPtr) -> Option<Self::Array> {
504        let _lock = self.objects.read();
505        let obj: NonNull<ObjectInfo> =
506            NonNull::new(handle.into()).expect("cannot have a null handle here");
507        unsafe {
508            if !obj.as_ref().ty.is_array() {
509                return None;
510            }
511        }
512
513        Some(ArrayHandle { obj })
514    }
515
516    fn root(&self, handle: GcPtr) {
517        let _lock = self.objects.write();
518
519        // Convert the handle to our internal representation
520        let object_info: *mut ObjectInfo = handle.into();
521
522        unsafe { (*object_info).roots += 1 };
523    }
524
525    fn unroot(&self, handle: GcPtr) {
526        let _lock = self.objects.write();
527
528        // Convert the handle to our internal representation
529        let object_info: *mut ObjectInfo = handle.into();
530
531        unsafe { (*object_info).roots -= 1 };
532    }
533
534    fn stats(&self) -> Stats {
535        self.stats.read().clone()
536    }
537}
538
539impl<O> MarkSweep<O>
540where
541    O: Observer<Event = Event>,
542{
543    /// Collects all memory that is no longer referenced by rooted objects. Returns `true` if memory
544    /// was reclaimed, `false` otherwise.
545    pub fn collect(&self) -> bool {
546        self.observer.event(Event::Start);
547
548        let mut objects = self.objects.write();
549
550        // Get all roots
551        let mut roots = objects
552            .iter()
553            .filter_map(|(_, obj)| {
554                if obj.roots > 0 {
555                    Some(obj.as_ref().get_ref() as *const _ as *mut ObjectInfo)
556                } else {
557                    None
558                }
559            })
560            .collect::<VecDeque<_>>();
561
562        // Iterate over all roots
563        while let Some(next) = roots.pop_front() {
564            let handle = (next as *const _ as RawGcPtr).into();
565
566            // Trace all other objects
567            for reference in unsafe { (*next).ty.trace(handle) } {
568                let ref_ptr = objects
569                    .get_mut(&reference)
570                    .expect("found invalid reference");
571                if ref_ptr.color == Color::White {
572                    let ptr = ref_ptr.as_ref().get_ref() as *const _ as *mut ObjectInfo;
573                    unsafe { (*ptr).color = Color::Gray };
574                    roots.push_back(ptr);
575                }
576            }
577
578            // This object has been traced
579            unsafe {
580                (*next).color = Color::Black;
581            }
582        }
583
584        // Sweep all non-reachable objects
585        let size_before = objects.len();
586        objects.retain(|h, obj| {
587            if obj.color == Color::Black {
588                unsafe {
589                    obj.as_mut().get_unchecked_mut().color = Color::White;
590                }
591                true
592            } else {
593                let value_memory_layout = obj.layout();
594                unsafe { std::alloc::dealloc(obj.data.ptr.as_mut(), value_memory_layout) };
595                self.observer.event(Event::Deallocation(*h));
596                {
597                    let mut stats = self.stats.write();
598                    stats.allocated_memory -= value_memory_layout.size();
599                }
600                false
601            }
602        });
603        let size_after = objects.len();
604
605        self.observer.event(Event::End);
606
607        size_before != size_after
608    }
609}
610
611impl<O> MemoryMapper for MarkSweep<O>
612where
613    O: Observer<Event = Event>,
614{
615    fn map_memory(&self, mapping: Mapping) -> Vec<GcPtr> {
616        let mut objects = self.objects.write();
617
618        // Determine which types are still allocated with deleted types
619        let deleted = objects
620            .iter()
621            .filter_map(|(ptr, object_info)| {
622                if mapping.deletions.contains(&object_info.ty) {
623                    Some(*ptr)
624                } else {
625                    None
626                }
627            })
628            .collect();
629
630        // Update type pointers of types that didn't change
631        for (old_ty, new_ty) in mapping.identical {
632            for object_info in objects.values_mut() {
633                if object_info.ty == old_ty {
634                    object_info.set(ObjectInfo {
635                        data: ObjectInfoData {
636                            ptr: unsafe { object_info.data.ptr },
637                        },
638                        roots: object_info.roots,
639                        color: object_info.color,
640                        ty: new_ty.clone(),
641                    });
642                }
643            }
644        }
645
646        let mut new_allocations = Vec::new();
647
648        // Map struct types
649        objects
650            .values_mut()
651            .filter(|object_info| object_info.ty.is_struct())
652            .for_each(|object_info| {
653                if let Some(conversion) = mapping.struct_mappings.get(&object_info.ty) {
654                    let old_layout = object_info.ty.value_layout();
655                    let src = unsafe { object_info.data.ptr };
656                    let dest = unsafe {
657                        NonNull::new_unchecked(std::alloc::alloc_zeroed(
658                            conversion.new_ty.value_layout(),
659                        ))
660                    };
661
662                    map_struct(
663                        &mut new_allocations,
664                        &mapping.struct_mappings,
665                        &conversion.field_mapping,
666                        src,
667                        dest,
668                    );
669
670                    unsafe { std::alloc::dealloc(src.as_ptr(), old_layout) };
671
672                    object_info.set(ObjectInfo {
673                        data: ObjectInfoData { ptr: dest },
674                        roots: object_info.roots,
675                        color: object_info.color,
676                        ty: conversion.new_ty.clone(),
677                    });
678                }
679            });
680
681        // Map rooted array types
682        objects
683            .values_mut()
684            .filter(|object_info| object_info.ty.is_array())
685            .for_each(|object_info| {
686                let mut ty = object_info.ty.clone();
687                let mut stack = Vec::new();
688
689                while let Some(array) = ty.as_array() {
690                    stack.push(ty.clone());
691                    ty = array.element_type();
692                }
693
694                let old_element_ty = ty;
695                if let Some(conversion) = mapping.struct_mappings.get(&old_element_ty) {
696                    let mut new_ty = conversion.new_ty.clone();
697                    while stack.pop().is_some() {
698                        new_ty = new_ty.array_type();
699                    }
700
701                    // Only arrays containing structs need to be mapped, as an array of arrays merely
702                    // contains `GcPtr`s.
703                    let new_element_ty = new_ty.as_array().unwrap().element_type();
704                    if new_element_ty.is_struct() {
705                        // Conversion between ADTs are already handled in struct mappings
706                        assert!(old_element_ty.is_struct());
707
708                        let element_action =
709                            resolve_struct_to_struct_edit(&old_element_ty, &new_element_ty, 0);
710
711                        map_array(
712                            &mut new_allocations,
713                            &mapping.struct_mappings,
714                            unsafe {
715                                NonNull::new_unchecked(
716                                    object_info.as_mut().deref_mut() as *mut ObjectInfo
717                                )
718                            },
719                            &element_action,
720                            &new_ty,
721                        );
722                    } else {
723                        // Update the type of arrays of arrays
724                        object_info.as_mut().ty = conversion.new_ty.clone();
725                    }
726                }
727            });
728
729        // Retroactively store newly allocated objects
730        // This cannot be done while mapping because we hold a mutable reference to objects
731        for object in new_allocations {
732            let size = object.layout().size();
733            // We want to return a pointer to the `ObjectInfo`, to
734            // be used as handle.
735            let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
736            objects.insert(handle, object);
737
738            self.log_alloc(handle, size);
739        }
740
741        return deleted;
742
743        unsafe fn get_field_ptr(struct_ptr: NonNull<u8>, offset: usize) -> NonNull<u8> {
744            let mut ptr = struct_ptr.as_ptr() as usize;
745            ptr += offset;
746            NonNull::new_unchecked(ptr as *mut u8)
747        }
748
749        fn map_array(
750            new_allocations: &mut Vec<Pin<Box<ObjectInfo>>>,
751            conversions: &HashMap<Type, StructMapping>,
752            mut src_object: NonNull<ObjectInfo>,
753            element_action: &Action,
754            new_ty: &Type,
755        ) {
756            let src_array = ArrayHandle { obj: src_object };
757
758            // Initialize the array
759            let new_header = array_header(new_ty, src_array.length());
760
761            let mut dest_obj = ObjectInfo {
762                data: ObjectInfoData { array: new_header },
763                roots: unsafe { src_object.as_ref().roots },
764                color: unsafe { src_object.as_ref().color },
765                ty: new_ty.clone(),
766            };
767
768            let dest_array = ArrayHandle {
769                obj: unsafe { NonNull::new_unchecked(&mut dest_obj as *mut ObjectInfo) },
770            };
771
772            // Map array elements
773            src_array
774                .elements()
775                .zip(dest_array.elements())
776                .for_each(|(src, dest)| {
777                    map_type(
778                        new_allocations,
779                        conversions,
780                        src,
781                        dest,
782                        element_action,
783                        &new_ty.as_array().expect("Must be an array.").element_type(),
784                    )
785                });
786
787            unsafe {
788                let src_obj = src_object.as_mut();
789                std::alloc::dealloc(src_obj.data.ptr.as_mut(), src_obj.layout());
790                *src_obj = dest_obj;
791            };
792        }
793
794        fn map_type(
795            new_allocations: &mut Vec<Pin<Box<ObjectInfo>>>,
796            conversions: &HashMap<Type, StructMapping>,
797            src: NonNull<u8>,
798            dest: NonNull<u8>,
799            action: &mapping::Action,
800            new_ty: &Type,
801        ) {
802            match action {
803                mapping::Action::ArrayAlloc => {
804                    // Initialize the array with no values
805                    let object = alloc_array(new_ty.clone(), 0);
806
807                    // We want to return a pointer to the `ObjectInfo`, to be used as handle.
808                    let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
809
810                    // Write handle to field
811                    let mut dest_handle = dest.cast::<GcPtr>();
812                    unsafe { *dest_handle.as_mut() = handle };
813
814                    new_allocations.push(object);
815                }
816                mapping::Action::ArrayFromValue {
817                    element_action,
818                    old_offset,
819                } => {
820                    // Initialize the array with a single value
821                    let mut object = alloc_array(new_ty.clone(), 1);
822
823                    let array_handle = ArrayHandle {
824                        obj: unsafe {
825                            NonNull::new_unchecked(object.as_mut().deref_mut() as *mut ObjectInfo)
826                        },
827                    };
828
829                    // Map single element to array
830                    map_type(
831                        new_allocations,
832                        conversions,
833                        unsafe { get_field_ptr(src, *old_offset) },
834                        array_handle.data(),
835                        element_action,
836                        &new_ty.as_array().expect("Must be an array.").element_type(),
837                    );
838
839                    // We want to return a pointer to the `ObjectInfo`, to be used as handle.
840                    let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
841
842                    // Write handle to field
843                    let mut dest_handle = dest.cast::<GcPtr>();
844                    unsafe { *dest_handle.as_mut() = handle };
845
846                    new_allocations.push(object);
847                }
848                mapping::Action::ArrayMap {
849                    element_action,
850                    old_offset,
851                } => {
852                    let src_ptr = unsafe { get_field_ptr(src, *old_offset) };
853
854                    // Safety: we already hold a write lock on `objects`, so this is legal.
855                    let src_obj = unsafe { *src_ptr.cast::<NonNull<ObjectInfo>>().as_ref() };
856
857                    map_array(
858                        new_allocations,
859                        conversions,
860                        src_obj,
861                        element_action,
862                        new_ty,
863                    );
864
865                    unsafe {
866                        std::ptr::copy_nonoverlapping(
867                            src_ptr.as_ptr(),
868                            dest.as_ptr(),
869                            std::mem::size_of::<GcPtr>(),
870                        )
871                    };
872                }
873                mapping::Action::Cast { old_offset, old_ty } => {
874                    if !cast::try_cast_from_to(
875                        old_ty.clone(),
876                        new_ty.clone(),
877                        unsafe { get_field_ptr(src, *old_offset) },
878                        dest,
879                    ) {
880                        // Failed to cast. Use the previously zero-initialized value instead
881                    }
882                }
883                mapping::Action::Copy {
884                    old_offset,
885                    size: size_in_bytes,
886                } => {
887                    unsafe {
888                        std::ptr::copy_nonoverlapping(
889                            get_field_ptr(src, *old_offset).as_ptr(),
890                            dest.as_ptr(),
891                            *size_in_bytes,
892                        )
893                    };
894                }
895                mapping::Action::ElementFromArray {
896                    element_action,
897                    old_offset,
898                } => {
899                    // Safety: we already hold a write lock on `objects`, so this is legal.
900                    let obj = unsafe {
901                        *get_field_ptr(src, *old_offset)
902                            .cast::<NonNull<ObjectInfo>>()
903                            .as_ref()
904                    };
905
906                    let array_handle = ArrayHandle { obj };
907
908                    if array_handle.header().length > 0 {
909                        // Map single element from array
910                        map_type(
911                            new_allocations,
912                            conversions,
913                            array_handle.data(),
914                            dest,
915                            element_action,
916                            new_ty,
917                        )
918                    } else {
919                        // zero initialize
920                    }
921                }
922                mapping::Action::StructAlloc => {
923                    let object = alloc_obj(new_ty.clone());
924
925                    // We want to return a pointer to the `ObjectInfo`, to be used as handle.
926                    let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
927
928                    // Write handle to field
929                    let mut dest_handle = dest.cast::<GcPtr>();
930                    unsafe { *dest_handle.as_mut() = handle };
931
932                    new_allocations.push(object);
933                }
934                mapping::Action::StructMapFromGc { old_ty, old_offset } => {
935                    let conversion = conversions.get(old_ty).unwrap_or_else(|| {
936                        panic!(
937                        "If the struct changed, there must also be a conversion for type: {:#?}.",
938                        old_ty,
939                    )
940                    });
941
942                    // Safety: we already hold a write lock on `objects`, so this is legal.
943                    let object = unsafe {
944                        *get_field_ptr(src, *old_offset)
945                            .cast::<NonNull<ObjectInfo>>()
946                            .as_ref()
947                    };
948
949                    // Map heap-allocated struct to in-memory struct
950                    map_struct(
951                        new_allocations,
952                        conversions,
953                        &conversion.field_mapping,
954                        // SAFETY: pointer is guaranteed to be valid
955                        unsafe { object.as_ref().data.ptr },
956                        dest,
957                    );
958                }
959                mapping::Action::StructMapFromValue { old_ty, old_offset } => {
960                    let object = alloc_obj(new_ty.clone());
961
962                    let conversion = conversions.get(old_ty).unwrap_or_else(|| {
963                        panic!(
964                        "If the struct changed, there must also be a conversion for type: {:#?}.",
965                        old_ty,
966                    )
967                    });
968
969                    // Map in-memory struct to heap-allocated struct
970                    map_struct(
971                        new_allocations,
972                        conversions,
973                        &conversion.field_mapping,
974                        unsafe { get_field_ptr(src, *old_offset) },
975                        // SAFETY: pointer is guaranteed to be valid
976                        unsafe { object.as_ref().data.ptr },
977                    );
978
979                    // We want to return a pointer to the `ObjectInfo`, to be used as handle.
980                    let handle = (object.as_ref().deref() as *const _ as RawGcPtr).into();
981
982                    // Write handle to field
983                    let mut dest_handle = dest.cast::<GcPtr>();
984                    unsafe { *dest_handle.as_mut() = handle };
985
986                    new_allocations.push(object);
987                }
988                mapping::Action::StructMapInPlace { old_ty, old_offset } => {
989                    let conversion = conversions.get(old_ty).unwrap_or_else(|| {
990                        panic!(
991                        "If the struct changed, there must also be a conversion for type: {:#?}.",
992                        old_ty,
993                    )
994                    });
995
996                    map_struct(
997                        new_allocations,
998                        conversions,
999                        &conversion.field_mapping,
1000                        unsafe { get_field_ptr(src, *old_offset) },
1001                        dest,
1002                    );
1003                }
1004                mapping::Action::ZeroInitialize => {
1005                    // Use previously zero-initialized memory
1006                }
1007            }
1008        }
1009
1010        #[allow(clippy::mutable_key_type)]
1011        fn map_struct(
1012            new_allocations: &mut Vec<Pin<Box<ObjectInfo>>>,
1013            conversions: &HashMap<Type, StructMapping>,
1014            mapping: &[FieldMapping],
1015            src: NonNull<u8>,
1016            dest: NonNull<u8>,
1017        ) {
1018            for FieldMapping {
1019                new_ty,
1020                new_offset,
1021                action,
1022            } in mapping.iter()
1023            {
1024                let field_dest = unsafe { get_field_ptr(dest, *new_offset) };
1025                map_type(
1026                    new_allocations,
1027                    conversions,
1028                    src,
1029                    field_dest,
1030                    action,
1031                    new_ty,
1032                );
1033            }
1034        }
1035    }
1036}
1037
1038/// Coloring used in the Mark Sweep phase.
1039#[derive(Clone, Copy, Debug, PartialEq, Eq)]
1040enum Color {
1041    /// A white object has not been seen yet by the mark phase
1042    White,
1043
1044    /// A gray object has been seen by the mark phase but has not yet been visited
1045    Gray,
1046
1047    /// A black object has been visited by the mark phase
1048    Black,
1049}
1050
1051/// An indirection table that stores the address to the actual memory, the type of the object and
1052/// meta information.
1053#[repr(C)]
1054struct ObjectInfo {
1055    pub data: ObjectInfoData,
1056    pub roots: u32,
1057    pub color: Color,
1058    pub ty: Type,
1059}
1060
1061#[repr(C)]
1062union ObjectInfoData {
1063    pub ptr: NonNull<u8>,
1064    pub array: NonNull<ArrayHeader>,
1065}
1066
1067/// An `ObjectInfo` is thread-safe.
1068unsafe impl Send for ObjectInfo {}
1069unsafe impl Sync for ObjectInfo {}
1070
1071impl From<GcPtr> for *const ObjectInfo {
1072    fn from(ptr: GcPtr) -> Self {
1073        ptr.as_ptr() as Self
1074    }
1075}
1076
1077impl From<GcPtr> for *mut ObjectInfo {
1078    fn from(ptr: GcPtr) -> Self {
1079        ptr.as_ptr() as Self
1080    }
1081}
1082
1083impl From<*const ObjectInfo> for GcPtr {
1084    fn from(info: *const ObjectInfo) -> Self {
1085        (info as RawGcPtr).into()
1086    }
1087}
1088
1089impl From<*mut ObjectInfo> for GcPtr {
1090    fn from(info: *mut ObjectInfo) -> Self {
1091        (info as RawGcPtr).into()
1092    }
1093}
1094
1095impl From<NonNull<ObjectInfo>> for GcPtr {
1096    fn from(info: NonNull<ObjectInfo>) -> Self {
1097        (info.as_ptr() as RawGcPtr).into()
1098    }
1099}
1100
1101impl ObjectInfo {
1102    /// Returns the layout of the data pointed to by data
1103    pub fn layout(&self) -> Layout {
1104        match self.ty.kind() {
1105            TypeKind::Struct(_) | TypeKind::Primitive(_) | TypeKind::Pointer(_) => {
1106                self.ty.value_layout()
1107            }
1108            TypeKind::Array(array) => {
1109                let elem_count = unsafe { self.data.array.as_ref().capacity };
1110                let elem_layout = repeat_layout(array.element_type().value_layout(), elem_count)
1111                    .expect("unable to determine layout of array elements");
1112                let (layout, _) = Layout::new::<ArrayHeader>()
1113                    .extend(elem_layout)
1114                    .expect("unable to determine layout of array");
1115                layout
1116            }
1117        }
1118    }
1119}