shapely_core/
partial.rs

1use crate::{FieldError, Innards, ListVTable, ShapeDesc, Shapely, Slot, trace};
2use std::{alloc, ptr::NonNull};
3
4/// Origin of the partial — did we allocate it? Or is it borrowed?
5pub enum Origin<'s> {
6    /// It was allocated via `alloc::alloc` and needs to be deallocated on drop,
7    /// moving out, etc.
8    HeapAllocated,
9
10    /// It was generously lent to us by some outside code, and we are NOT
11    /// to free it (although we should still uninitialize any fields that we initialized).
12    Borrowed {
13        /// The parent `Partial` that we borrowed from.
14        parent: Option<&'s Partial<'s>>,
15
16        /// Some mark that indicates whether this field is initialized or not — we should
17        /// set it after initializing the memory we got.
18        init_mark: InitMark<'s>,
19    },
20}
21
22/// A partially-initialized shape.
23///
24/// This type keeps track of the initialized state of every field and only allows getting out the
25/// concrete type or the boxed concrete type or moving out of this partial into a pointer if all the
26/// fields have been initialized.
27pub struct Partial<'s> {
28    /// Address of the value we're building in memory.
29    /// If the type is a ZST, then the addr will be dangling.
30    pub(crate) addr: NonNull<u8>,
31
32    /// Where `addr` came from (ie. are we responsible for freeing it?)
33    pub(crate) origin: Origin<'s>,
34
35    /// Keeps track of which fields are initialized
36    pub(crate) init_set: InitSet64,
37
38    /// The shape we're building, asserted when building, but
39    /// also when getting fields slots, etc.
40    pub(crate) shape: ShapeDesc,
41}
42
43/// We can build a tree of partials when deserializing, so `Partial<'s>` has to be covariant over 's.
44fn _assert_partial_covariant<'long: 'short, 'short>(partial: Partial<'long>) -> Partial<'short> {
45    partial
46}
47
48impl Drop for Partial<'_> {
49    // This drop function is only really called when a partial is dropped without being fully
50    // built out. Otherwise, it's forgotten because the value has been moved elsewhere.
51    //
52    // As a result, its only job is to drop any fields that may have been initialized. And finally
53    // to free the memory for the partial itself if we own it.
54    fn drop(&mut self) {
55        match self.shape.get().innards {
56            crate::Innards::Struct { fields } => {
57                fields
58                    .iter()
59                    .enumerate()
60                    .filter_map(|(i, field)| {
61                        if self.init_set.is_set(i) {
62                            Some((field, field.shape.get().drop_in_place?))
63                        } else {
64                            None
65                        }
66                    })
67                    .for_each(|(field, drop_fn)| {
68                        unsafe {
69                            // SAFETY: field_addr is valid, aligned, and initialized.
70                            //
71                            // If the struct is a ZST, then `self.addr` is dangling.
72                            // That also means that all the fields are ZSTs, which means
73                            // the actual address we pass to the drop fn does not matter,
74                            // but we do want the side effects.
75                            //
76                            // If the struct is not a ZST, then `self.addr` is a valid address.
77                            // The fields can still be ZST and that's not a special case, really.
78                            drop_fn(self.addr.byte_add(field.offset).as_ptr());
79                        }
80                    })
81            }
82            crate::Innards::Scalar(_) => {
83                if self.init_set.is_set(0) {
84                    // Drop the scalar value if it has a drop function
85                    if let Some(drop_fn) = self.shape.get().drop_in_place {
86                        // SAFETY: self.addr is always valid for Scalar types,
87                        // even for ZSTs where it might be dangling.
88                        unsafe {
89                            drop_fn(self.addr.as_ptr());
90                        }
91                    }
92                }
93            }
94            _ => {}
95        }
96
97        self.deallocate()
98    }
99}
100
101impl Partial<'_> {
102    /// Allocates a partial on the heap for the given shape descriptor.
103    pub fn alloc(shape: ShapeDesc) -> Self {
104        let sh = shape.get();
105        let layout = sh.layout;
106        let addr = if layout.size() == 0 {
107            // ZSTs need a well-aligned address
108            sh.dangling()
109        } else {
110            let addr = unsafe { alloc::alloc(layout) };
111            if addr.is_null() {
112                alloc::handle_alloc_error(layout);
113            }
114            // SAFETY: We just allocated this memory and checked that it's not null,
115            // so it's safe to create a NonNull from it.
116            unsafe { NonNull::new_unchecked(addr) }
117        };
118
119        Self {
120            origin: Origin::HeapAllocated,
121            addr,
122            init_set: Default::default(),
123            shape,
124        }
125    }
126
127    /// Borrows a `MaybeUninit<Self>` and returns a `Partial`.
128    ///
129    /// Before calling assume_init, make sure to call Partial.build_in_place().
130    pub fn borrow<T: Shapely>(uninit: &mut std::mem::MaybeUninit<T>) -> Self {
131        Self {
132            origin: Origin::Borrowed {
133                parent: None,
134                init_mark: InitMark::Ignored,
135            },
136            addr: NonNull::new(uninit.as_mut_ptr() as _).unwrap(),
137            init_set: Default::default(),
138            shape: T::shape_desc(),
139        }
140    }
141
142    /// Checks if all fields in the struct or scalar value have been initialized.
143    /// Panics if any field is not initialized, providing details about the uninitialized field.
144    pub(crate) fn assert_all_fields_initialized(&self) {
145        match self.shape.get().innards {
146            crate::Innards::Struct { fields } => {
147                for (i, field) in fields.iter().enumerate() {
148                    if !self.init_set.is_set(i) {
149                        panic!(
150                            "Field '{}' was not initialized. Complete schema:\n{:?}",
151                            field.name,
152                            self.shape.get()
153                        );
154                    }
155                }
156            }
157            crate::Innards::Scalar(_) => {
158                if !self.init_set.is_set(0) {
159                    panic!(
160                        "Scalar value was not initialized. Complete schema:\n{:?}",
161                        self.shape.get()
162                    );
163                }
164            }
165            crate::Innards::Enum {
166                variants: _,
167                repr: _,
168            } => {
169                // Check if a variant has been selected (bit 0)
170                if !self.init_set.is_set(0) {
171                    panic!(
172                        "No enum variant was selected. Complete schema:\n{:?}",
173                        self.shape.get()
174                    );
175                }
176
177                // Get the selected variant
178                if let Some(variant_index) = self.selected_variant_index() {
179                    let shape = self.shape.get();
180                    if let crate::Innards::Enum { variants, repr: _ } = &shape.innards {
181                        let variant = &variants[variant_index];
182
183                        // Check if all fields of the selected variant are initialized
184                        match &variant.kind {
185                            crate::VariantKind::Unit => {
186                                // Unit variants don't have fields, so they're initialized if the variant is selected
187                            }
188                            crate::VariantKind::Tuple { fields }
189                            | crate::VariantKind::Struct { fields } => {
190                                // Check each field
191                                for (field_index, field) in fields.iter().enumerate() {
192                                    // Field init bits start at index 1 (index 0 is for variant selection)
193                                    let init_bit = field_index + 1;
194                                    if !self.init_set.is_set(init_bit) {
195                                        panic!(
196                                            "Field '{}' of variant '{}' was not initialized. Complete schema:\n{:?}",
197                                            field.name,
198                                            variant.name,
199                                            self.shape.get()
200                                        );
201                                    }
202                                }
203                            }
204                        }
205                    }
206                }
207            }
208            _ => {}
209        }
210    }
211
212    /// Returns a slot for treating this partial as an array (onto which you can push new items)
213    pub fn array_slot(&mut self, size_hint: Option<usize>) -> Option<ArraySlot> {
214        match self.shape.get().innards {
215            crate::Innards::List {
216                vtable,
217                item_shape: _,
218            } => {
219                if self.init_set.is_set(0) {
220                    panic!("Array is already initialized");
221                }
222
223                // Initialize the array using the vtable's init function
224                unsafe {
225                    (vtable.init)(self.addr.as_ptr(), size_hint);
226                }
227
228                // Mark the array as initialized in our init_set
229                self.init_set.set(0);
230
231                Some(unsafe { ArraySlot::new(self.addr, vtable) })
232            }
233            _ => None,
234        }
235    }
236
237    /// Returns a slot for a HashMap field in the shape.
238    pub fn hashmap_slot(&mut self, size_hint: Option<usize>) -> Option<HashMapSlot> {
239        match self.shape.get().innards {
240            crate::Innards::Map {
241                vtable,
242                value_shape: _,
243            } => {
244                if self.init_set.is_set(0) {
245                    panic!("HashMap is already initialized");
246                }
247
248                // Initialize the HashMap using the vtable's init function
249                unsafe {
250                    (vtable.init)(self.addr.as_ptr(), size_hint);
251                }
252
253                // Mark the HashMap as initialized in our init_set
254                self.init_set.set(0);
255
256                Some(unsafe { HashMapSlot::new(self.addr, vtable) })
257            }
258            _ => None,
259        }
260    }
261
262    /// Returns an iterator over the key-value pairs in a HashMap
263    pub fn hashmap_iter(&self) -> Option<HashMapIter> {
264        match self.shape.get().innards {
265            crate::Innards::Map {
266                vtable,
267                value_shape: _,
268            } => {
269                // Get the iterator from the vtable
270                let iter_raw = unsafe { (vtable.iter)(self.addr.as_ptr()) };
271                if iter_raw.is_null() {
272                    return None;
273                }
274
275                Some(HashMapIter {
276                    iter_ptr: iter_raw,
277                    vtable: vtable.iter_vtable,
278                })
279            }
280            _ => None,
281        }
282    }
283
284    /// Returns a slot for assigning this whole shape as a scalar
285    pub fn scalar_slot(&mut self) -> Option<Slot<'_>> {
286        match self.shape.get().innards {
287            crate::Innards::Scalar(_) => {
288                let slot = Slot::for_ptr(
289                    self.addr,
290                    self.shape,
291                    InitMark::Struct {
292                        index: 0,
293                        set: &mut self.init_set,
294                    },
295                );
296                Some(slot)
297            }
298            crate::Innards::Transparent(inner_shape) => {
299                let slot = Slot::for_ptr(
300                    self.addr,
301                    inner_shape,
302                    InitMark::Struct {
303                        index: 0,
304                        set: &mut self.init_set,
305                    },
306                );
307                Some(slot)
308            }
309            _ => panic!(
310                "Expected scalar innards, found {:?}",
311                self.shape.get().innards
312            ),
313        }
314    }
315
316    /// Returns a slot for initializing a field in the shape.
317    pub fn slot_by_name<'s>(&'s mut self, name: &str) -> Result<Slot<'s>, FieldError> {
318        let shape = self.shape.get();
319        match shape.innards {
320            Innards::Struct { fields }
321            | Innards::TupleStruct { fields }
322            | Innards::Tuple { fields } => {
323                let (index, field) = fields
324                    .iter()
325                    .enumerate()
326                    .find(|(_, f)| f.name == name)
327                    .ok_or(FieldError::NoSuchStaticField)?;
328                let field_addr = unsafe {
329                    // SAFETY: self.addr is a valid pointer to the start of the struct,
330                    // and field.offset is the correct offset for this field within the struct.
331                    // The resulting pointer is properly aligned and within the bounds of the allocated memory.
332                    self.addr.byte_add(field.offset)
333                };
334                Ok(Slot::for_ptr(
335                    field_addr,
336                    field.shape,
337                    self.init_set.field(index),
338                ))
339            }
340            Innards::Map { .. } => Err(FieldError::NoStaticFields),
341            Innards::Transparent(_) => Err(FieldError::NoStaticFields),
342            Innards::Scalar(_) => Err(FieldError::NoStaticFields),
343            Innards::List { .. } => Err(FieldError::NoStaticFields),
344            Innards::Enum {
345                variants: _,
346                repr: _,
347            } => {
348                // Enum variants aren't supported yet for slot_by_name
349                Err(FieldError::NotAStruct)
350            }
351        }
352    }
353
354    /// Returns a slot for initializing a field in the shape by index.
355    pub fn slot_by_index(&mut self, index: usize) -> Result<Slot<'_>, FieldError> {
356        let sh = self.shape.get();
357        let field = sh.field_by_index(index)?;
358        let field_addr = unsafe {
359            // SAFETY: self.addr is a valid pointer to the start of the struct,
360            // and field.offset is the correct offset for this field within the struct.
361            // The resulting pointer is properly aligned and within the bounds of the allocated memory.
362            self.addr.byte_add(field.offset)
363        };
364        let slot = Slot::for_ptr(field_addr, field.shape, self.init_set.field(index));
365        Ok(slot)
366    }
367
368    fn assert_matching_shape<T: Shapely>(&self) {
369        if self.shape != T::shape_desc() {
370            let partial_shape = self.shape.get();
371            let target_shape = T::shape();
372
373            panic!(
374                "This is a partial \x1b[1;34m{}\x1b[0m, you can't build a \x1b[1;32m{}\x1b[0m out of it",
375                partial_shape, target_shape,
376            );
377        }
378    }
379
380    fn deallocate(&mut self) {
381        // ZSTs don't need to be deallocated
382        if self.shape.get().layout.size() != 0 {
383            unsafe { alloc::dealloc(self.addr.as_ptr(), self.shape.get().layout) }
384        }
385    }
386
387    /// Asserts that every field has been initialized and forgets the Partial.
388    ///
389    /// This method is only used when the origin is borrowed.
390    /// If this method is not called, all fields will be freed when the Partial is dropped.
391    ///
392    /// # Panics
393    ///
394    /// This function will panic if:
395    /// - The origin is not borrowed (i.e., it's heap allocated).
396    /// - Any field is not initialized.
397    pub fn build_in_place(mut self) {
398        // ensure all fields are initialized
399        self.assert_all_fields_initialized();
400
401        match &mut self.origin {
402            Origin::Borrowed { init_mark, .. } => {
403                // Mark the borrowed field as initialized
404                init_mark.set();
405            }
406            Origin::HeapAllocated => {
407                panic!("Cannot build in place for heap allocated Partial");
408            }
409        }
410
411        // prevent field drops when the Partial is dropped
412        std::mem::forget(self);
413    }
414
415    /// Builds a value of type `T` from the partial representation.
416    ///
417    /// # Panics
418    ///
419    /// This function will panic if:
420    /// - Not all the fields have been initialized.
421    /// - The generic type parameter T does not match the shape that this partial is building.
422    pub fn build<T: Shapely>(mut self) -> T {
423        self.assert_all_fields_initialized();
424        self.assert_matching_shape::<T>();
425
426        let shape = self.shape.get();
427
428        // Special handling for enums to ensure the correct variant is built
429        if let crate::Innards::Enum { variants, repr } = &shape.innards {
430            if !self.init_set.is_set(0) {
431                panic!("Enum variant not selected");
432            }
433
434            // Check if explicit enum representation is used
435            if let crate::EnumRepr::Default = repr {
436                panic!(
437                    "Enum must have an explicit representation (e.g. #[repr(u8)]). Default representation is not supported."
438                );
439            }
440
441            if let Some(variant_idx) = self.selected_variant_index() {
442                // Create a properly initialized result with the correct variant
443                let mut result_mem = std::mem::MaybeUninit::<T>::uninit();
444
445                unsafe {
446                    // Zero out memory first for safety
447                    std::ptr::write_bytes(
448                        result_mem.as_mut_ptr() as *mut u8,
449                        0,
450                        std::mem::size_of::<T>(),
451                    );
452
453                    // Get the variant info
454                    let variant = &variants[variant_idx];
455
456                    // Set discriminant value - this is the key part for fixing the enum issue
457                    let discriminant_value = match &variant.discriminant {
458                        Some(disc) => *disc,
459                        None => variant_idx as i64,
460                    };
461
462                    // Write the discriminant value based on the representation
463                    match repr {
464                        crate::EnumRepr::U8 => {
465                            let tag_ptr = result_mem.as_mut_ptr() as *mut u8;
466                            *tag_ptr = discriminant_value as u8;
467                        }
468                        crate::EnumRepr::U16 => {
469                            let tag_ptr = result_mem.as_mut_ptr() as *mut u16;
470                            *tag_ptr = discriminant_value as u16;
471                        }
472                        crate::EnumRepr::U32 => {
473                            let tag_ptr = result_mem.as_mut_ptr() as *mut u32;
474                            *tag_ptr = discriminant_value as u32;
475                        }
476                        crate::EnumRepr::U64 => {
477                            let tag_ptr = result_mem.as_mut_ptr() as *mut u64;
478                            *tag_ptr = discriminant_value as u64;
479                        }
480                        crate::EnumRepr::USize => {
481                            let tag_ptr = result_mem.as_mut_ptr() as *mut usize;
482                            *tag_ptr = discriminant_value as usize;
483                        }
484                        crate::EnumRepr::I8 => {
485                            let tag_ptr = result_mem.as_mut_ptr() as *mut i8;
486                            *tag_ptr = discriminant_value as i8;
487                        }
488                        crate::EnumRepr::I16 => {
489                            let tag_ptr = result_mem.as_mut_ptr() as *mut i16;
490                            *tag_ptr = discriminant_value as i16;
491                        }
492                        crate::EnumRepr::I32 => {
493                            let tag_ptr = result_mem.as_mut_ptr() as *mut i32;
494                            *tag_ptr = discriminant_value as i32;
495                        }
496                        crate::EnumRepr::I64 => {
497                            let tag_ptr = result_mem.as_mut_ptr() as *mut i64;
498                            *tag_ptr = discriminant_value;
499                        }
500                        crate::EnumRepr::ISize => {
501                            let tag_ptr = result_mem.as_mut_ptr() as *mut isize;
502                            *tag_ptr = discriminant_value as isize;
503                        }
504                        crate::EnumRepr::Default => {
505                            // Use a heuristic based on the number of variants
506                            if variants.len() <= 256 {
507                                // Can fit in a u8
508                                let tag_ptr = result_mem.as_mut_ptr() as *mut u8;
509                                *tag_ptr = discriminant_value as u8;
510                            } else if variants.len() <= 65536 {
511                                // Can fit in a u16
512                                let tag_ptr = result_mem.as_mut_ptr() as *mut u16;
513                                *tag_ptr = discriminant_value as u16;
514                            } else {
515                                // Default to u32
516                                let tag_ptr = result_mem.as_mut_ptr() as *mut u32;
517                                *tag_ptr = discriminant_value as u32;
518                            }
519                        }
520                    }
521
522                    // For non-unit variants, copy the initialized fields
523                    match &variant.kind {
524                        crate::VariantKind::Tuple { fields } => {
525                            // Copy the fields from our partial to the result
526                            for field in fields.iter() {
527                                let src_ptr = (self.addr.as_ptr() as *const u8).add(field.offset);
528                                let dst_ptr =
529                                    (result_mem.as_mut_ptr() as *mut u8).add(field.offset);
530                                // Access the layout from the shape field
531                                let size = field.shape.get().layout.size();
532                                std::ptr::copy_nonoverlapping(src_ptr, dst_ptr, size);
533                            }
534                        }
535                        crate::VariantKind::Struct { fields } => {
536                            // Copy the fields from our partial to the result
537                            for field in fields.iter() {
538                                let src_ptr = (self.addr.as_ptr() as *const u8).add(field.offset);
539                                let dst_ptr =
540                                    (result_mem.as_mut_ptr() as *mut u8).add(field.offset);
541                                // Access the layout from the shape field
542                                let size = field.shape.get().layout.size();
543                                std::ptr::copy_nonoverlapping(src_ptr, dst_ptr, size);
544                            }
545                        }
546                        crate::VariantKind::Unit => {
547                            // Nothing to copy for unit variants, just the discriminant is enough
548                        }
549                    }
550
551                    // Return the completed enum
552                    let result = result_mem.assume_init();
553                    trace!("Built \x1b[1;33m{}\x1b[0m successfully", T::shape());
554                    self.deallocate();
555                    std::mem::forget(self);
556                    return result;
557                }
558            }
559        }
560
561        // For non-enum types, use the original implementation
562        let result = unsafe {
563            let ptr = self.addr.as_ptr() as *const T;
564            std::ptr::read(ptr)
565        };
566        trace!("Built \x1b[1;33m{}\x1b[0m successfully", T::shape());
567        self.deallocate();
568        std::mem::forget(self);
569        result
570    }
571
572    /// Build that partial into a boxed completed shape.
573    ///
574    /// # Panics
575    ///
576    /// This function will panic if:
577    /// - Not all the fields have been initialized.
578    /// - The generic type parameter T does not match the shape that this partial is building.
579    ///
580    /// # Safety
581    ///
582    /// This function uses unsafe code to create a Box from a raw pointer.
583    /// It's safe because we've verified the initialization and shape matching,
584    /// and we forget `self` to prevent double-freeing.
585    pub fn build_boxed<T: Shapely>(self) -> Box<T> {
586        self.assert_all_fields_initialized();
587        self.assert_matching_shape::<T>();
588
589        let boxed = unsafe { Box::from_raw(self.addr.as_ptr() as *mut T) };
590        std::mem::forget(self);
591        boxed
592    }
593
594    /// Moves the contents of this `Partial` into a target memory location.
595    ///
596    /// This function is useful when you need to place the fully initialized value
597    /// into a specific memory address, such as when working with FFI or custom allocators.
598    ///
599    /// # Safety
600    ///
601    /// The target pointer must be valid and properly aligned,
602    /// and must be large enough to hold the value.
603    /// The caller is responsible for ensuring that the target memory is properly deallocated
604    /// when it's no longer needed.
605    pub unsafe fn move_into(mut self, target: NonNull<u8>) {
606        self.assert_all_fields_initialized();
607        unsafe {
608            std::ptr::copy_nonoverlapping(
609                self.addr.as_ptr(),
610                target.as_ptr(),
611                // note: copy_nonoverlapping takes a count,
612                // since we're dealing with `*mut u8`, it's a byte count.
613                // if we were dealing with `*mut ()`, we'd have a nasty surprise.
614                self.shape.get().layout.size(),
615            );
616        }
617        self.deallocate();
618        std::mem::forget(self);
619    }
620
621    /// Returns the shape we're currently building.
622    pub fn shape(&self) -> ShapeDesc {
623        self.shape
624    }
625
626    /// Returns the address of the value we're building in memory.
627    pub fn addr(&self) -> NonNull<u8> {
628        self.addr
629    }
630
631    /// Sets the variant of an enum by name.
632    ///
633    /// # Errors
634    ///
635    /// Returns an error if:
636    /// - The shape doesn't represent an enum.
637    /// - No variant with the given name exists.
638    pub fn set_variant_by_name(&mut self, variant_name: &str) -> Result<(), crate::FieldError> {
639        let shape = self.shape.get();
640
641        if let crate::Innards::Enum { variants, repr: _ } = &shape.innards {
642            let variant_index = variants
643                .iter()
644                .enumerate()
645                .find(|(_, v)| v.name == variant_name)
646                .map(|(i, _)| i)
647                .ok_or(crate::FieldError::NoSuchStaticField)?;
648
649            self.set_variant_by_index(variant_index)
650        } else {
651            Err(crate::FieldError::NotAStruct) // Using NotAStruct as a stand-in for "not an enum"
652        }
653    }
654
655    /// Sets the variant of an enum by index.
656    ///
657    /// # Errors
658    ///
659    /// Returns an error if:
660    /// - The shape doesn't represent an enum.
661    /// - The index is out of bounds.
662    pub fn set_variant_by_index(&mut self, variant_index: usize) -> Result<(), crate::FieldError> {
663        let shape = self.shape.get();
664
665        if let crate::Innards::Enum { variants, repr } = &shape.innards {
666            if variant_index >= variants.len() {
667                return Err(crate::FieldError::IndexOutOfBounds);
668            }
669
670            // Get the current variant info
671            let variant = &variants[variant_index];
672
673            // Prepare memory for the enum
674            unsafe {
675                // Zero out the memory first to ensure clean state
676                std::ptr::write_bytes(self.addr.as_ptr(), 0, shape.layout.size());
677
678                // Set up the discriminant (tag)
679                // For enums in Rust, the first bytes contain the discriminant
680                // By default, we should use the smallest type that can represent all variants
681                let discriminant_value = match &variant.discriminant {
682                    // If we have an explicit discriminant, use it
683                    Some(discriminant) => *discriminant,
684                    // Otherwise, use the variant index directly
685                    None => variant_index as i64,
686                };
687
688                // Write the discriminant value based on the representation
689                match repr {
690                    crate::EnumRepr::U8 => {
691                        let tag_ptr = self.addr.as_ptr();
692                        *tag_ptr = discriminant_value as u8;
693                    }
694                    crate::EnumRepr::U16 => {
695                        let tag_ptr = self.addr.as_ptr() as *mut u16;
696                        *tag_ptr = discriminant_value as u16;
697                    }
698                    crate::EnumRepr::U32 => {
699                        let tag_ptr = self.addr.as_ptr() as *mut u32;
700                        *tag_ptr = discriminant_value as u32;
701                    }
702                    crate::EnumRepr::U64 => {
703                        let tag_ptr = self.addr.as_ptr() as *mut u64;
704                        *tag_ptr = discriminant_value as u64;
705                    }
706                    crate::EnumRepr::USize => {
707                        let tag_ptr = self.addr.as_ptr() as *mut usize;
708                        *tag_ptr = discriminant_value as usize;
709                    }
710                    crate::EnumRepr::I8 => {
711                        let tag_ptr = self.addr.as_ptr() as *mut i8;
712                        *tag_ptr = discriminant_value as i8;
713                    }
714                    crate::EnumRepr::I16 => {
715                        let tag_ptr = self.addr.as_ptr() as *mut i16;
716                        *tag_ptr = discriminant_value as i16;
717                    }
718                    crate::EnumRepr::I32 => {
719                        let tag_ptr = self.addr.as_ptr() as *mut i32;
720                        *tag_ptr = discriminant_value as i32;
721                    }
722                    crate::EnumRepr::I64 => {
723                        let tag_ptr = self.addr.as_ptr() as *mut i64;
724                        *tag_ptr = discriminant_value;
725                    }
726                    crate::EnumRepr::ISize => {
727                        let tag_ptr = self.addr.as_ptr() as *mut isize;
728                        *tag_ptr = discriminant_value as isize;
729                    }
730                    crate::EnumRepr::Default => {
731                        // Use a heuristic based on the number of variants
732                        if variants.len() <= 256 {
733                            // Can fit in a u8
734                            let tag_ptr = self.addr.as_ptr();
735                            *tag_ptr = discriminant_value as u8;
736                        } else if variants.len() <= 65536 {
737                            // Can fit in a u16
738                            let tag_ptr = self.addr.as_ptr() as *mut u16;
739                            *tag_ptr = discriminant_value as u16;
740                        } else {
741                            // Default to u32
742                            let tag_ptr = self.addr.as_ptr() as *mut u32;
743                            *tag_ptr = discriminant_value as u32;
744                        }
745                    }
746                }
747            }
748
749            // Mark the variant as selected (bit 0)
750            self.init_set.set(0);
751
752            // Reset all field initialization bits (starting from bit 1)
753            // InitSet64 can hold 64 bits, so we'll clear bits 1-63
754            for i in 1..64 {
755                self.init_set.unset(i);
756            }
757
758            Ok(())
759        } else {
760            Err(crate::FieldError::NotAStruct) // Using NotAStruct as a stand-in for "not an enum"
761        }
762    }
763
764    /// Returns the currently selected variant index, if any.
765    pub fn selected_variant_index(&self) -> Option<usize> {
766        if !self.init_set.is_set(0) {
767            return None;
768        }
769
770        let shape = self.shape.get();
771
772        // We need to read the discriminant and map it back to the variant index
773        if let crate::Innards::Enum { variants, repr } = &shape.innards {
774            unsafe {
775                // Attempt to read the tag based on the representation
776                let discriminant_value = match repr {
777                    crate::EnumRepr::U8 => {
778                        let tag_ptr = self.addr.as_ptr() as *const u8;
779                        *tag_ptr as i64
780                    }
781                    crate::EnumRepr::U16 => {
782                        let tag_ptr = self.addr.as_ptr() as *const u16;
783                        *tag_ptr as i64
784                    }
785                    crate::EnumRepr::U32 => {
786                        let tag_ptr = self.addr.as_ptr() as *const u32;
787                        *tag_ptr as i64
788                    }
789                    crate::EnumRepr::U64 => {
790                        let tag_ptr = self.addr.as_ptr() as *const u64;
791                        *tag_ptr as i64
792                    }
793                    crate::EnumRepr::USize => {
794                        let tag_ptr = self.addr.as_ptr() as *const usize;
795                        *tag_ptr as i64
796                    }
797                    crate::EnumRepr::I8 => {
798                        let tag_ptr = self.addr.as_ptr() as *const i8;
799                        *tag_ptr as i64
800                    }
801                    crate::EnumRepr::I16 => {
802                        let tag_ptr = self.addr.as_ptr() as *const i16;
803                        *tag_ptr as i64
804                    }
805                    crate::EnumRepr::I32 => {
806                        let tag_ptr = self.addr.as_ptr() as *const i32;
807                        *tag_ptr as i64
808                    }
809                    crate::EnumRepr::I64 => {
810                        let tag_ptr = self.addr.as_ptr() as *const i64;
811                        *tag_ptr
812                    }
813                    crate::EnumRepr::ISize => {
814                        let tag_ptr = self.addr.as_ptr() as *const isize;
815                        *tag_ptr as i64
816                    }
817                    crate::EnumRepr::Default => {
818                        // Use a heuristic based on the number of variants
819                        if variants.len() <= 256 {
820                            // Likely a u8 discriminant
821                            let tag_ptr = self.addr.as_ptr() as *const u8;
822                            *tag_ptr as i64
823                        } else if variants.len() <= 65536 {
824                            // Likely a u16 discriminant
825                            let tag_ptr = self.addr.as_ptr() as *const u16;
826                            *tag_ptr as i64
827                        } else {
828                            // Default to u32
829                            let tag_ptr = self.addr.as_ptr() as *const u32;
830                            *tag_ptr as i64
831                        }
832                    }
833                };
834
835                // Find the variant with this discriminant or index
836                // Try matching by discriminant first
837                for (idx, variant) in variants.iter().enumerate() {
838                    if let Some(disc) = variant.discriminant {
839                        if disc == discriminant_value {
840                            return Some(idx);
841                        }
842                    } else if idx as i64 == discriminant_value {
843                        // Fallback to index-based match
844                        return Some(idx);
845                    }
846                }
847
848                // If we couldn't find a match, but we know a variant is selected,
849                // assume it's the variant at the discriminant index if in bounds
850                if (discriminant_value as usize) < variants.len() {
851                    return Some(discriminant_value as usize);
852                }
853            }
854        }
855
856        None
857    }
858
859    /// Get a slot for a field in the currently selected variant.
860    ///
861    /// # Errors
862    ///
863    /// Returns an error if:
864    /// - The shape doesn't represent an enum.
865    /// - No variant has been selected yet.
866    /// - The field name doesn't exist in the selected variant.
867    /// - The selected variant is a unit variant (which has no fields).
868    pub fn variant_field_by_name<'s>(
869        &'s mut self,
870        name: &str,
871    ) -> Result<Slot<'s>, crate::FieldError> {
872        let variant_index = self
873            .selected_variant_index()
874            .ok_or(crate::FieldError::NotAStruct)?; // Using NotAStruct as a stand-in for "no variant selected"
875
876        let shape = self.shape.get();
877        if let crate::Innards::Enum { variants, repr: _ } = &shape.innards {
878            let variant = &variants[variant_index];
879
880            // Find the field in the variant
881            match &variant.kind {
882                crate::VariantKind::Unit => {
883                    // Unit variants have no fields
884                    Err(crate::FieldError::NoSuchStaticField)
885                }
886                crate::VariantKind::Tuple { fields } => {
887                    // For tuple variants, find the field by name
888                    let (field_index, field) = fields
889                        .iter()
890                        .enumerate()
891                        .find(|(_, f)| f.name == name)
892                        .ok_or(crate::FieldError::NoSuchStaticField)?;
893
894                    // The field's initialization bit is offset by 1 (since bit 0 is used for variant selection)
895                    let init_bit = field_index + 1;
896
897                    // Get the field's address
898                    let field_addr = unsafe {
899                        // The actual offset may depend on the variant's layout, but we use the field index for now
900                        // This is technically incorrect, as it assumes a simple layout where offsets are contiguous
901                        self.addr.byte_add(field.offset)
902                    };
903
904                    Ok(Slot::for_ptr(
905                        field_addr,
906                        field.shape,
907                        self.init_set.field(init_bit),
908                    ))
909                }
910                crate::VariantKind::Struct { fields } => {
911                    // For struct variants, find the field by name
912                    let (field_index, field) = fields
913                        .iter()
914                        .enumerate()
915                        .find(|(_, f)| f.name == name)
916                        .ok_or(crate::FieldError::NoSuchStaticField)?;
917
918                    // The field's initialization bit is offset by 1 (since bit 0 is used for variant selection)
919                    let init_bit = field_index + 1;
920
921                    // Get the field's address
922                    let field_addr = unsafe {
923                        // The actual offset may depend on the variant's layout, but we use the field index for now
924                        // This is technically incorrect, as it assumes a simple layout where offsets are contiguous
925                        self.addr.byte_add(field.offset)
926                    };
927
928                    Ok(Slot::for_ptr(
929                        field_addr,
930                        field.shape,
931                        self.init_set.field(init_bit),
932                    ))
933                }
934            }
935        } else {
936            Err(crate::FieldError::NotAStruct)
937        }
938    }
939}
940
941/// A bit array to keep track of which fields were initialized, up to 64 fields
942#[derive(Clone, Copy, Default)]
943pub struct InitSet64(u64);
944
945impl InitSet64 {
946    /// Sets the bit at the given index.
947    pub fn set(&mut self, index: usize) {
948        if index >= 64 {
949            panic!("InitSet64 can only track up to 64 fields. Index {index} is out of bounds.");
950        }
951        self.0 |= 1 << index;
952    }
953
954    /// Unsets the bit at the given index.
955    pub fn unset(&mut self, index: usize) {
956        if index >= 64 {
957            panic!("InitSet64 can only track up to 64 fields. Index {index} is out of bounds.");
958        }
959        self.0 &= !(1 << index);
960    }
961
962    /// Checks if the bit at the given index is set.
963    pub fn is_set(&self, index: usize) -> bool {
964        if index >= 64 {
965            panic!("InitSet64 can only track up to 64 fields. Index {index} is out of bounds.");
966        }
967        (self.0 & (1 << index)) != 0
968    }
969
970    /// Checks if all bits up to the given count are set.
971    pub fn all_set(&self, count: usize) -> bool {
972        if count > 64 {
973            panic!("InitSet64 can only track up to 64 fields. Count {count} is out of bounds.");
974        }
975        let mask = (1 << count) - 1;
976        self.0 & mask == mask
977    }
978
979    /// Gets an [InitMark] to track the initialization state of a single field
980    pub fn field(&mut self, index: usize) -> InitMark {
981        InitMark::Struct { index, set: self }
982    }
983}
984
985/// `InitMark` is used to track the initialization state of a single field within an `InitSet64`.
986/// It is part of a system used to progressively initialize structs, where each field's
987/// initialization status is represented by a bit in a 64-bit set.
988pub enum InitMark<'s> {
989    /// Represents a field in a struct that needs to be tracked for initialization.
990    Struct {
991        /// The index of the field in the struct (0-63).
992        index: usize,
993        /// A reference to the `InitSet64` that tracks all fields' initialization states.
994        set: &'s mut InitSet64,
995    },
996    /// Represents a field or value that doesn't need initialization tracking.
997    Ignored,
998}
999
1000impl InitMark<'_> {
1001    /// Marks the field as initialized by setting its corresponding bit in the `InitSet64`.
1002    pub fn set(&mut self) {
1003        if let Self::Struct { index, set } = self {
1004            set.set(*index);
1005        }
1006    }
1007
1008    /// Marks the field as uninitialized by clearing its corresponding bit in the `InitSet64`.
1009    pub fn unset(&mut self) {
1010        if let Self::Struct { index, set } = self {
1011            set.0 &= !(1 << *index);
1012        }
1013    }
1014
1015    /// Checks if the field is marked as initialized.
1016    ///
1017    /// Returns `true` if the field is initialized, `false` otherwise.
1018    /// Always returns `true` for `Ignored` fields.
1019    pub fn get(&self) -> bool {
1020        match self {
1021            Self::Struct { index, set } => set.is_set(*index),
1022            Self::Ignored => true,
1023        }
1024    }
1025}
1026
1027/// A helper struct to fill up arrays — note that it is designed for `Vec<T>`
1028/// rather than fixed-size arrays or slices, so it's a bit of a misnomer at the moment.
1029pub struct ArraySlot {
1030    pub(crate) addr: NonNull<u8>,
1031    pub(crate) vtable: ListVTable,
1032}
1033
1034impl ArraySlot {
1035    /// Create a new ArraySlot with the given address and vtable
1036    pub(crate) unsafe fn new(addr: NonNull<u8>, vtable: ListVTable) -> Self {
1037        Self { addr, vtable }
1038    }
1039
1040    /// Push a partial value onto the array
1041    ///
1042    /// # Safety
1043    ///
1044    /// This function uses unsafe code to push a value into the array.
1045    /// It's safe to use because the vtable's push function handles
1046    /// proper memory management and initialization.
1047    pub fn push(&mut self, partial: crate::Partial) {
1048        // Call the vtable's push function to add the item to the array
1049        unsafe {
1050            (self.vtable.push)(self.addr.as_ptr(), partial);
1051        }
1052    }
1053}
1054
1055/// Provides insert, length check, and iteration over a type-erased hashmap
1056pub struct HashMapSlot {
1057    pub(crate) addr: NonNull<u8>,
1058    pub(crate) vtable: crate::MapVTable,
1059}
1060
1061impl HashMapSlot {
1062    /// Create a new HashMapSlot with the given address and vtable
1063    pub(crate) unsafe fn new(addr: NonNull<u8>, vtable: crate::MapVTable) -> Self {
1064        Self { addr, vtable }
1065    }
1066
1067    /// Insert a key-value pair into the HashMap
1068    ///
1069    /// # Safety
1070    ///
1071    /// This function uses unsafe code to insert a key-value pair into the HashMap.
1072    /// It's safe to use because the vtable's insert function handles
1073    /// proper memory management and initialization.
1074    pub fn insert(&mut self, key: crate::Partial, value: crate::Partial) {
1075        // Call the vtable's insert function to add the key-value pair to the HashMap
1076        unsafe {
1077            (self.vtable.insert)(self.addr.as_ptr(), key, value);
1078        }
1079    }
1080
1081    /// Get the number of entries in the HashMap
1082    pub fn len(&self) -> usize {
1083        unsafe { (self.vtable.len)(self.addr.as_ptr()) }
1084    }
1085
1086    /// Check if the HashMap is empty
1087    pub fn is_empty(&self) -> bool {
1088        self.len() == 0
1089    }
1090
1091    /// Check if the HashMap contains a key
1092    pub fn contains_key(&self, key: &str) -> bool {
1093        unsafe { (self.vtable.contains_key)(self.addr.as_ptr(), key) }
1094    }
1095}
1096
1097/// An iterator over key-value pairs in a HashMap
1098pub struct HashMapIter {
1099    iter_ptr: *const u8,
1100    vtable: crate::MapIterVTable,
1101}
1102
1103impl HashMapIter {
1104    /// Get the next key-value pair from the iterator
1105    pub fn next(&self) -> Option<(&str, *const u8)> {
1106        let (k, v) = unsafe { (self.vtable.next)(self.iter_ptr)? };
1107        let k = unsafe { (*k).as_str() };
1108        Some((k, v))
1109    }
1110}
1111
1112impl Drop for HashMapIter {
1113    fn drop(&mut self) {
1114        unsafe {
1115            (self.vtable.dealloc)(self.iter_ptr);
1116        }
1117    }
1118}