Skip to main content

facet_reflect/partial/
mod.rs

1//! Partial value construction for dynamic reflection
2//!
3//! This module provides APIs for incrementally building values through reflection,
4//! particularly useful when deserializing data from external formats like JSON or YAML.
5//!
6//! # Overview
7//!
8//! The `Partial` type (formerly known as `Wip` - Work In Progress) allows you to:
9//! - Allocate memory for a value based on its `Shape`
10//! - Initialize fields incrementally in a type-safe manner
11//! - Handle complex nested structures including structs, enums, collections, and smart pointers
12//! - Build the final value once all required fields are initialized
13//!
14//! **Note**: This is the only API for partial value construction. The previous `TypedPartial`
15//! wrapper has been removed in favor of using `Partial` directly.
16//!
17//! # Basic Usage
18//!
19//! ```no_run
20//! # use facet_reflect::Partial;
21//! # use facet_core::{Shape, Facet};
22//! # fn example<T: Facet<'static>>() -> Result<(), Box<dyn std::error::Error>> {
23//! // Allocate memory for a struct
24//! let mut partial = Partial::alloc::<T>()?;
25//!
26//! // Set simple fields
27//! partial = partial.set_field("name", "Alice")?;
28//! partial = partial.set_field("age", 30u32)?;
29//!
30//! // Work with nested structures
31//! partial = partial.begin_field("address")?;
32//! partial = partial.set_field("street", "123 Main St")?;
33//! partial = partial.set_field("city", "Springfield")?;
34//! partial = partial.end()?;
35//!
36//! // Build the final value
37//! let value = partial.build()?;
38//! # Ok(())
39//! # }
40//! ```
41//!
42//! # Chaining Style
43//!
44//! The API supports method chaining for cleaner code:
45//!
46//! ```no_run
47//! # use facet_reflect::Partial;
48//! # use facet_core::{Shape, Facet};
49//! # fn example<T: Facet<'static>>() -> Result<(), Box<dyn std::error::Error>> {
50//! let value = Partial::alloc::<T>()?
51//!     .set_field("name", "Bob")?
52//!     .begin_field("scores")?
53//!         .set(vec![95, 87, 92])?
54//!     .end()?
55//!     .build()?;
56//! # Ok(())
57//! # }
58//! ```
59//!
60//! # Working with Collections
61//!
62//! ```no_run
63//! # use facet_reflect::Partial;
64//! # use facet_core::{Shape, Facet};
65//! # fn example() -> Result<(), Box<dyn std::error::Error>> {
66//! let mut partial = Partial::alloc::<Vec<String>>()?;
67//!
68//! // Add items to a list
69//! partial = partial.begin_list_item()?;
70//! partial = partial.set("first")?;
71//! partial = partial.end()?;
72//!
73//! partial = partial.begin_list_item()?;
74//! partial = partial.set("second")?;
75//! partial = partial.end()?;
76//!
77//! let vec = partial.build()?;
78//! # Ok(())
79//! # }
80//! ```
81//!
82//! # Working with Maps
83//!
84//! ```no_run
85//! # use facet_reflect::Partial;
86//! # use facet_core::{Shape, Facet};
87//! # use std::collections::HashMap;
88//! # fn example() -> Result<(), Box<dyn std::error::Error>> {
89//! let mut partial = Partial::alloc::<HashMap<String, i32>>()?;
90//!
91//! // Insert key-value pairs
92//! partial = partial.begin_key()?;
93//! partial = partial.set("score")?;
94//! partial = partial.end()?;
95//! partial = partial.begin_value()?;
96//! partial = partial.set(100i32)?;
97//! partial = partial.end()?;
98//!
99//! let map = partial.build()?;
100//! # Ok(())
101//! # }
102//! ```
103//!
104//! # Safety and Memory Management
105//!
106//! The `Partial` type ensures memory safety by:
107//! - Tracking initialization state of all fields
108//! - Preventing use-after-build through state tracking
109//! - Properly handling drop semantics for partially initialized values
110//! - Supporting both owned and borrowed values through lifetime parameters
111
112use alloc::{collections::BTreeMap, sync::Arc, vec::Vec};
113
114mod arena;
115mod iset;
116mod rope;
117pub(crate) mod typeplan;
118pub use typeplan::{DeserStrategy, NodeId, TypePlan, TypePlanCore};
119
120mod partial_api;
121
122use crate::{ReflectErrorKind, TrackerKind, trace};
123use facet_core::Facet;
124use facet_path::{Path, PathStep};
125
126use core::marker::PhantomData;
127
128mod heap_value;
129pub use heap_value::*;
130
131use facet_core::{
132    Def, EnumType, Field, PtrMut, PtrUninit, Shape, SliceBuilderVTable, Type, UserType, Variant,
133};
134use iset::ISet;
135use rope::ListRope;
136use typeplan::{FieldDefault, FieldInitPlan, FillRule};
137
138/// State of a partial value
139#[derive(Debug, Clone, Copy, PartialEq, Eq)]
140enum PartialState {
141    /// Partial is active and can be modified
142    Active,
143
144    /// Partial has been successfully built and cannot be reused
145    Built,
146}
147
148/// Mode of operation for frame management.
149///
150/// In `Strict` mode, frames must be fully initialized before being popped.
151/// In `Deferred` mode, frames can be stored when popped and restored on re-entry,
152/// with final validation happening in `finish_deferred()`.
153enum FrameMode {
154    /// Strict mode: frames must be fully initialized before popping.
155    Strict {
156        /// Stack of frames for nested initialization.
157        stack: Vec<Frame>,
158    },
159
160    /// Deferred mode: frames are stored when popped, can be re-entered.
161    Deferred {
162        /// Stack of frames for nested initialization.
163        stack: Vec<Frame>,
164
165        /// The frame depth when deferred mode was started.
166        /// Path calculations are relative to this depth.
167        start_depth: usize,
168
169        /// Frames saved when popped, keyed by their path (derived from frame stack).
170        /// When we re-enter a path, we restore the stored frame.
171        /// Uses the full `Path` type which includes the root shape for proper type anchoring.
172        stored_frames: BTreeMap<Path, Frame>,
173    },
174}
175
176impl FrameMode {
177    /// Get a reference to the frame stack.
178    const fn stack(&self) -> &Vec<Frame> {
179        match self {
180            FrameMode::Strict { stack } | FrameMode::Deferred { stack, .. } => stack,
181        }
182    }
183
184    /// Get a mutable reference to the frame stack.
185    const fn stack_mut(&mut self) -> &mut Vec<Frame> {
186        match self {
187            FrameMode::Strict { stack } | FrameMode::Deferred { stack, .. } => stack,
188        }
189    }
190
191    /// Check if we're in deferred mode.
192    const fn is_deferred(&self) -> bool {
193        matches!(self, FrameMode::Deferred { .. })
194    }
195
196    /// Get the start depth if in deferred mode.
197    const fn start_depth(&self) -> Option<usize> {
198        match self {
199            FrameMode::Deferred { start_depth, .. } => Some(*start_depth),
200            FrameMode::Strict { .. } => None,
201        }
202    }
203}
204
205/// A type-erased, heap-allocated, partially-initialized value.
206///
207/// [Partial] keeps track of the state of initialiation of the underlying
208/// value: if we're building `struct S { a: u32, b: String }`, we may
209/// have initialized `a`, or `b`, or both, or neither.
210///
211/// [Partial] allows navigating down nested structs and initializing them
212/// progressively: [Partial::begin_field] pushes a frame onto the stack,
213/// which then has to be initialized, and popped off with [Partial::end].
214///
215/// If [Partial::end] is called but the current frame isn't fully initialized,
216/// an error is returned: in other words, if you navigate down to a field,
217/// you have to fully initialize it one go. You can't go back up and back down
218/// to it again.
219pub struct Partial<'facet, const BORROW: bool = true> {
220    /// Frame management mode (strict or deferred) and associated state.
221    mode: FrameMode,
222
223    /// current state of the Partial
224    state: PartialState,
225
226    /// Precomputed deserialization plan for the root type.
227    /// Built once at allocation time, navigated in parallel with value construction.
228    /// Each Frame holds a NodeId (index) into this plan's arenas.
229    root_plan: Arc<TypePlanCore>,
230
231    /// PhantomData marker for the 'facet lifetime.
232    /// This is covariant in 'facet, which is safe because 'facet represents
233    /// the lifetime of borrowed data FROM the input (deserialization source).
234    /// A Partial<'long, ...> can be safely treated as Partial<'short, ...>
235    /// because it only needs borrowed data to live at least as long as 'short.
236    _marker: PhantomData<&'facet ()>,
237}
238
239#[derive(Clone, Copy, Debug)]
240pub(crate) enum MapInsertState {
241    /// Not currently inserting
242    Idle,
243
244    /// Pushing key - memory allocated, waiting for initialization
245    PushingKey {
246        /// Temporary storage for the key being built
247        key_ptr: PtrUninit,
248        /// Whether the key has been fully initialized
249        key_initialized: bool,
250        /// Whether the key's TrackedBuffer frame is still on the stack.
251        /// When true, the frame handles cleanup. When false (after end()),
252        /// the Map tracker owns the buffer and must clean it up.
253        key_frame_on_stack: bool,
254    },
255
256    /// Pushing value after key is done
257    PushingValue {
258        /// Temporary storage for the key that was built (always initialized)
259        key_ptr: PtrUninit,
260        /// Temporary storage for the value being built
261        value_ptr: Option<PtrUninit>,
262        /// Whether the value has been fully initialized
263        value_initialized: bool,
264        /// Whether the value's TrackedBuffer frame is still on the stack.
265        /// When true, the frame handles cleanup. When false (after end()),
266        /// the Map tracker owns the buffer and must clean it up.
267        value_frame_on_stack: bool,
268        /// Whether the key's frame was stored in deferred mode.
269        /// When true, the stored frame handles cleanup. When false,
270        /// the Map tracker owns the key buffer and must clean it up.
271        key_frame_stored: bool,
272    },
273}
274
275#[derive(Debug, Clone, Copy)]
276pub(crate) enum FrameOwnership {
277    /// This frame owns the allocation and should deallocate it on drop
278    Owned,
279
280    /// This frame points to a field/element within a parent's allocation.
281    /// The parent's `iset[field_idx]` was CLEARED when this frame was created.
282    /// On drop: deinit if initialized, but do NOT deallocate.
283    /// On successful end(): parent's `iset[field_idx]` will be SET.
284    Field { field_idx: usize },
285
286    /// Temporary buffer tracked by parent's MapInsertState.
287    /// Used by begin_key(), begin_value() for map insertions.
288    /// Safe to drop on deinit - parent's cleanup respects is_init propagation.
289    TrackedBuffer,
290
291    /// Pointer into existing collection entry (Value object, Option inner, etc.)
292    /// Used by begin_object_entry() on existing key, begin_some() re-entry.
293    /// NOT safe to drop on deinit - parent collection has no per-entry tracking
294    /// and would try to drop the freed value again (double-free).
295    BorrowedInPlace,
296
297    /// Pointer to externally-owned memory (e.g., caller's stack via MaybeUninit).
298    /// Used by `from_raw()` for stack-friendly deserialization.
299    /// On drop: deinit if initialized (drop partially constructed values), but do NOT deallocate.
300    /// The caller owns the memory and is responsible for its lifetime.
301    External,
302
303    /// Points into a stable rope chunk for list element building.
304    /// Used by `begin_list_item()` for building Vec elements.
305    /// The memory is stable (won't move during Vec growth),
306    /// so frames inside can be stored for deferred processing.
307    /// On successful end(): element is tracked for later finalization.
308    /// On list frame end(): all elements are moved into the real Vec.
309    /// On drop/failure: the rope chunk handles cleanup.
310    RopeSlot,
311}
312
313impl FrameOwnership {
314    /// Returns true if this frame is responsible for deallocating its memory.
315    ///
316    /// Both `Owned` and `TrackedBuffer` frames allocated their memory and need
317    /// to deallocate it. `Field`, `BorrowedInPlace`, and `External` frames borrow from
318    /// parent, existing structures, or caller-provided memory.
319    const fn needs_dealloc(&self) -> bool {
320        matches!(self, FrameOwnership::Owned | FrameOwnership::TrackedBuffer)
321    }
322}
323
324/// Immutable pairing of a shape with its actual allocation size.
325///
326/// This ensures that the shape and allocated size are always in sync and cannot
327/// drift apart, preventing the class of bugs where a frame's shape doesn't match
328/// what was actually allocated (see issue #1568).
329pub(crate) struct AllocatedShape {
330    shape: &'static Shape,
331    allocated_size: usize,
332}
333
334impl AllocatedShape {
335    pub(crate) const fn new(shape: &'static Shape, allocated_size: usize) -> Self {
336        Self {
337            shape,
338            allocated_size,
339        }
340    }
341
342    pub(crate) const fn shape(&self) -> &'static Shape {
343        self.shape
344    }
345
346    pub(crate) const fn allocated_size(&self) -> usize {
347        self.allocated_size
348    }
349}
350
351/// Points somewhere in a partially-initialized value. If we're initializing
352/// `a.b.c`, then the first frame would point to the beginning of `a`, the
353/// second to the beginning of the `b` field of `a`, etc.
354///
355/// A frame can point to a complex data structure, like a struct or an enum:
356/// it keeps track of whether a variant was selected, which fields are initialized,
357/// etc. and is able to drop & deinitialize
358#[must_use]
359pub(crate) struct Frame {
360    /// Address of the value being initialized
361    pub(crate) data: PtrUninit,
362
363    /// Shape of the value being initialized, paired with the actual allocation size
364    pub(crate) allocated: AllocatedShape,
365
366    /// Whether this frame's data is fully initialized
367    pub(crate) is_init: bool,
368
369    /// Tracks building mode and partial initialization state
370    pub(crate) tracker: Tracker,
371
372    /// Whether this frame owns the allocation or is just a field pointer
373    pub(crate) ownership: FrameOwnership,
374
375    /// Whether this frame is for a custom deserialization pipeline
376    pub(crate) using_custom_deserialization: bool,
377
378    /// Container-level proxy definition (from `#[facet(proxy = ...)]` on the shape).
379    /// Used during custom deserialization to convert from proxy type to target type.
380    pub(crate) shape_level_proxy: Option<&'static facet_core::ProxyDef>,
381
382    /// Index of the precomputed TypePlan node for this frame's type.
383    /// This is navigated in parallel with the value - when we begin_nth_field,
384    /// the new frame gets the index for that field's child plan node.
385    /// Use `plan.node(type_plan)` to get the actual `&TypePlanNode`.
386    /// Always present - TypePlan is built for what we actually deserialize into
387    /// (including proxies).
388    pub(crate) type_plan: typeplan::NodeId,
389}
390
391#[derive(Debug)]
392pub(crate) enum Tracker {
393    /// Simple scalar value - no partial initialization tracking needed.
394    /// Whether it's initialized is tracked by `Frame::is_init`.
395    Scalar,
396
397    /// Partially initialized array
398    Array {
399        /// Track which array elements are initialized (up to 63 elements)
400        iset: ISet,
401        /// If we're pushing another frame, this is set to the array index
402        current_child: Option<usize>,
403    },
404
405    /// Partially initialized struct/tuple-struct etc.
406    Struct {
407        /// fields need to be individually tracked — we only
408        /// support up to 63 fields.
409        iset: ISet,
410        /// if we're pushing another frame, this is set to the index of the struct field
411        current_child: Option<usize>,
412    },
413
414    /// Smart pointer being initialized.
415    /// Whether it's initialized is tracked by `Frame::is_init`.
416    SmartPointer {
417        /// Whether we're currently building the inner value
418        building_inner: bool,
419        /// Pending inner value pointer to be moved with new_into_fn on finalization.
420        /// Deferred processing requires keeping the inner value's memory stable,
421        /// so we delay the new_into_fn() call until the SmartPointer frame is finalized.
422        /// None = no pending inner, Some = inner value ready to be moved into SmartPointer.
423        pending_inner: Option<PtrUninit>,
424    },
425
426    /// We're initializing an `Arc<[T]>`, `Box<[T]>`, `Rc<[T]>`, etc.
427    ///
428    /// We're using the slice builder API to construct the slice
429    SmartPointerSlice {
430        /// The slice builder vtable
431        vtable: &'static SliceBuilderVTable,
432
433        /// Whether we're currently building an item to push
434        building_item: bool,
435
436        /// Current element index being built (for path derivation in deferred mode)
437        current_child: Option<usize>,
438    },
439
440    /// Transparent inner type wrapper (`NonZero<T>`, ByteString, etc.)
441    /// Used to distinguish inner frames from their parent for deferred path tracking.
442    Inner {
443        /// Whether we're currently building the inner value
444        building_inner: bool,
445    },
446
447    /// Partially initialized enum (but we picked a variant,
448    /// so it's not Uninit)
449    Enum {
450        /// Variant chosen for the enum
451        variant: &'static Variant,
452        /// Index of the variant in the enum's variants array
453        variant_idx: usize,
454        /// tracks enum fields (for the given variant)
455        data: ISet,
456        /// If we're pushing another frame, this is set to the field index
457        current_child: Option<usize>,
458    },
459
460    /// Partially initialized list (Vec, etc.)
461    /// Whether it's initialized is tracked by `Frame::is_init`.
462    List {
463        /// If we're pushing another frame for an element, this is the element index
464        current_child: Option<usize>,
465        /// Stable rope storage for elements during list building.
466        /// A rope is a list of fixed-size chunks - chunks never reallocate, only new
467        /// chunks are added. This keeps element pointers stable, enabling deferred
468        /// frame processing for nested structs inside Vec elements.
469        /// On finalization, elements are moved into the real Vec.
470        rope: Option<ListRope>,
471    },
472
473    /// Partially initialized map (HashMap, BTreeMap, etc.)
474    /// Whether it's initialized is tracked by `Frame::is_init`.
475    Map {
476        /// State of the current insertion operation
477        insert_state: MapInsertState,
478        /// Pending key-value entries to be inserted on map finalization.
479        /// Deferred processing requires keeping buffers alive until finish_deferred(),
480        /// so we delay actual insertion until the map frame is finalized.
481        /// Each entry is (key_ptr, value_ptr) - both are initialized and owned by this tracker.
482        pending_entries: Vec<(PtrUninit, PtrUninit)>,
483        /// The current entry index, used for building unique paths for deferred frame storage.
484        /// Incremented each time we start a new key (in begin_key).
485        /// This allows inner frames of different map entries to have distinct paths.
486        current_entry_index: Option<usize>,
487        /// Whether we're currently building a key (true) or value (false).
488        /// Used to determine whether to push MapKey or MapValue to the path.
489        building_key: bool,
490    },
491
492    /// Partially initialized set (HashSet, BTreeSet, etc.)
493    /// Whether it's initialized is tracked by `Frame::is_init`.
494    Set {
495        /// If we're pushing another frame for an element
496        current_child: bool,
497    },
498
499    /// Option being initialized with Some(inner_value)
500    Option {
501        /// Whether we're currently building the inner value
502        building_inner: bool,
503        /// Pending inner value pointer to be moved with init_some on finalization.
504        /// Deferred processing requires keeping the inner value's memory stable,
505        /// so we delay the init_some() call until the Option frame is finalized.
506        /// None = no pending inner, Some = inner value ready to be moved into Option.
507        pending_inner: Option<PtrUninit>,
508    },
509
510    /// Result being initialized with Ok or Err
511    Result {
512        /// Whether we're building Ok (true) or Err (false)
513        is_ok: bool,
514        /// Whether we're currently building the inner value
515        building_inner: bool,
516    },
517
518    /// Dynamic value (e.g., facet_value::Value) being initialized
519    DynamicValue {
520        /// What kind of dynamic value we're building
521        state: DynamicValueState,
522    },
523}
524
525/// State for building a dynamic value
526#[derive(Debug)]
527#[allow(dead_code)] // Some variants are for future use (object support)
528pub(crate) enum DynamicValueState {
529    /// Not yet initialized - will be set to scalar, array, or object
530    Uninit,
531    /// Initialized as a scalar (null, bool, number, string, bytes)
532    Scalar,
533    /// Initialized as an array, currently building an element
534    Array {
535        building_element: bool,
536        /// Pending elements to be inserted during finalization (deferred mode)
537        pending_elements: alloc::vec::Vec<PtrUninit>,
538    },
539    /// Initialized as an object
540    Object {
541        insert_state: DynamicObjectInsertState,
542        /// Pending entries to be inserted during finalization (deferred mode)
543        pending_entries: alloc::vec::Vec<(alloc::string::String, PtrUninit)>,
544    },
545}
546
547/// State for inserting into a dynamic object
548#[derive(Debug)]
549#[allow(dead_code)] // For future use (object support)
550pub(crate) enum DynamicObjectInsertState {
551    /// Idle - ready for a new key-value pair
552    Idle,
553    /// Currently building the value for a key
554    BuildingValue {
555        /// The key for the current entry
556        key: alloc::string::String,
557    },
558}
559
560impl Tracker {
561    const fn kind(&self) -> TrackerKind {
562        match self {
563            Tracker::Scalar => TrackerKind::Scalar,
564            Tracker::Array { .. } => TrackerKind::Array,
565            Tracker::Struct { .. } => TrackerKind::Struct,
566            Tracker::SmartPointer { .. } => TrackerKind::SmartPointer,
567            Tracker::SmartPointerSlice { .. } => TrackerKind::SmartPointerSlice,
568            Tracker::Enum { .. } => TrackerKind::Enum,
569            Tracker::List { .. } => TrackerKind::List,
570            Tracker::Map { .. } => TrackerKind::Map,
571            Tracker::Set { .. } => TrackerKind::Set,
572            Tracker::Option { .. } => TrackerKind::Option,
573            Tracker::Result { .. } => TrackerKind::Result,
574            Tracker::DynamicValue { .. } => TrackerKind::DynamicValue,
575            Tracker::Inner { .. } => TrackerKind::Inner,
576        }
577    }
578
579    /// Set the current_child index for trackers that support it
580    const fn set_current_child(&mut self, idx: usize) {
581        match self {
582            Tracker::Struct { current_child, .. }
583            | Tracker::Enum { current_child, .. }
584            | Tracker::Array { current_child, .. } => {
585                *current_child = Some(idx);
586            }
587            _ => {}
588        }
589    }
590
591    /// Clear the current_child index for trackers that support it
592    fn clear_current_child(&mut self) {
593        match self {
594            Tracker::Struct { current_child, .. }
595            | Tracker::Enum { current_child, .. }
596            | Tracker::Array { current_child, .. }
597            | Tracker::List { current_child, .. } => {
598                *current_child = None;
599            }
600            Tracker::Set { current_child } => {
601                *current_child = false;
602            }
603            _ => {}
604        }
605    }
606}
607
608impl Frame {
609    fn new(
610        data: PtrUninit,
611        allocated: AllocatedShape,
612        ownership: FrameOwnership,
613        type_plan: typeplan::NodeId,
614    ) -> Self {
615        // For empty structs (structs with 0 fields), start as initialized since there's nothing to initialize
616        // This includes empty tuples () which are zero-sized types with no fields to initialize
617        let is_init = matches!(
618            allocated.shape().ty,
619            Type::User(UserType::Struct(struct_type)) if struct_type.fields.is_empty()
620        );
621
622        Self {
623            data,
624            allocated,
625            is_init,
626            tracker: Tracker::Scalar,
627            ownership,
628            using_custom_deserialization: false,
629            shape_level_proxy: None,
630            type_plan,
631        }
632    }
633
634    /// Deinitialize any initialized field: calls `drop_in_place` but does not free any
635    /// memory even if the frame owns that memory.
636    ///
637    /// After this call, `is_init` will be false and `tracker` will be [Tracker::Scalar].
638    fn deinit(&mut self) {
639        // For BorrowedInPlace frames, we must NOT drop. These point into existing
640        // collection entries (Value objects, Option inners) where the parent has no
641        // per-entry tracking. Dropping here would cause double-free when parent drops.
642        //
643        // For RopeSlot frames, we must NOT drop. These point into a ListRope chunk
644        // owned by the parent List's tracker. The rope handles cleanup of all elements.
645        //
646        // For TrackedBuffer frames, we CAN drop. These are temporary buffers where
647        // the parent's MapInsertState tracks initialization via is_init propagation.
648        if matches!(
649            self.ownership,
650            FrameOwnership::BorrowedInPlace | FrameOwnership::RopeSlot
651        ) {
652            self.is_init = false;
653            self.tracker = Tracker::Scalar;
654            return;
655        }
656
657        // Field frames are responsible for their value during cleanup.
658        // The ownership model ensures no double-free:
659        // - begin_field: parent's iset[idx] is cleared (parent relinquishes responsibility)
660        // - end: parent's iset[idx] is set (parent reclaims responsibility), frame is popped
661        // So if Field frame is still on stack during cleanup, parent's iset[idx] is false,
662        // meaning the parent won't drop this field - the Field frame must do it.
663
664        match &mut self.tracker {
665            Tracker::Scalar => {
666                // Simple scalar - drop if initialized
667                if self.is_init {
668                    unsafe {
669                        self.allocated
670                            .shape()
671                            .call_drop_in_place(self.data.assume_init())
672                    };
673                }
674            }
675            Tracker::Array { iset, .. } => {
676                // Drop initialized array elements
677                if let Type::Sequence(facet_core::SequenceType::Array(array_def)) =
678                    self.allocated.shape().ty
679                {
680                    let element_layout = array_def.t.layout.sized_layout().ok();
681                    if let Some(layout) = element_layout {
682                        for idx in 0..array_def.n {
683                            if iset.get(idx) {
684                                let offset = layout.size() * idx;
685                                let element_ptr = unsafe { self.data.field_init(offset) };
686                                unsafe { array_def.t.call_drop_in_place(element_ptr) };
687                            }
688                        }
689                    }
690                }
691            }
692            Tracker::Struct { iset, .. } => {
693                // Drop initialized struct fields
694                if let Type::User(UserType::Struct(struct_type)) = self.allocated.shape().ty {
695                    if iset.all_set(struct_type.fields.len()) {
696                        unsafe {
697                            self.allocated
698                                .shape()
699                                .call_drop_in_place(self.data.assume_init())
700                        };
701                    } else {
702                        for (idx, field) in struct_type.fields.iter().enumerate() {
703                            if iset.get(idx) {
704                                // This field was initialized, drop it
705                                let field_ptr = unsafe { self.data.field_init(field.offset) };
706                                unsafe { field.shape().call_drop_in_place(field_ptr) };
707                            }
708                        }
709                    }
710                }
711            }
712            Tracker::Enum { variant, data, .. } => {
713                // Drop initialized enum variant fields
714                for (idx, field) in variant.data.fields.iter().enumerate() {
715                    if data.get(idx) {
716                        // This field was initialized, drop it
717                        let field_ptr = unsafe { self.data.field_init(field.offset) };
718                        unsafe { field.shape().call_drop_in_place(field_ptr) };
719                    }
720                }
721            }
722            Tracker::SmartPointer { pending_inner, .. } => {
723                // If there's a pending inner value, drop it
724                if let Some(inner_ptr) = pending_inner
725                    && let Def::Pointer(ptr_def) = self.allocated.shape().def
726                    && let Some(inner_shape) = ptr_def.pointee
727                {
728                    unsafe {
729                        inner_shape.call_drop_in_place(PtrMut::new(inner_ptr.as_mut_byte_ptr()))
730                    };
731                }
732                // Drop the initialized SmartPointer
733                if self.is_init {
734                    unsafe {
735                        self.allocated
736                            .shape()
737                            .call_drop_in_place(self.data.assume_init())
738                    };
739                }
740            }
741            Tracker::SmartPointerSlice { vtable, .. } => {
742                // Free the slice builder
743                let builder_ptr = unsafe { self.data.assume_init() };
744                unsafe {
745                    (vtable.free_fn)(builder_ptr);
746                }
747            }
748            Tracker::List { rope, .. } => {
749                // Drop the initialized List
750                if self.is_init {
751                    unsafe {
752                        self.allocated
753                            .shape()
754                            .call_drop_in_place(self.data.assume_init())
755                    };
756                }
757
758                // Drop any elements still in the rope (not yet drained into Vec)
759                if let Some(mut rope) = rope.take()
760                    && let Def::List(list_def) = self.allocated.shape().def
761                {
762                    let element_shape = list_def.t;
763                    unsafe {
764                        rope.drain_into(|ptr| {
765                            element_shape.call_drop_in_place(PtrMut::new(ptr.as_ptr()));
766                        });
767                    }
768                }
769            }
770            Tracker::Map {
771                insert_state,
772                pending_entries,
773                ..
774            } => {
775                // Drop the initialized Map
776                if self.is_init {
777                    unsafe {
778                        self.allocated
779                            .shape()
780                            .call_drop_in_place(self.data.assume_init())
781                    };
782                }
783
784                // Clean up pending entries (key-value pairs that haven't been inserted yet)
785                if let Def::Map(map_def) = self.allocated.shape().def {
786                    for (key_ptr, value_ptr) in pending_entries.drain(..) {
787                        // Drop and deallocate key
788                        unsafe { map_def.k().call_drop_in_place(key_ptr.assume_init()) };
789                        if let Ok(key_layout) = map_def.k().layout.sized_layout()
790                            && key_layout.size() > 0
791                        {
792                            unsafe { alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout) };
793                        }
794                        // Drop and deallocate value
795                        unsafe { map_def.v().call_drop_in_place(value_ptr.assume_init()) };
796                        if let Ok(value_layout) = map_def.v().layout.sized_layout()
797                            && value_layout.size() > 0
798                        {
799                            unsafe {
800                                alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), value_layout)
801                            };
802                        }
803                    }
804                }
805
806                // Clean up key/value buffers based on whether their TrackedBuffer frames
807                // are still on the stack. If a frame is on the stack, it handles cleanup.
808                // If a frame was already popped (via end()), we own the buffer and must clean it.
809                match insert_state {
810                    MapInsertState::PushingKey {
811                        key_ptr,
812                        key_initialized,
813                        key_frame_on_stack,
814                    } => {
815                        // Only clean up if the frame was already popped.
816                        // If key_frame_on_stack is true, the TrackedBuffer frame above us
817                        // will handle dropping and deallocating the key buffer.
818                        if !*key_frame_on_stack
819                            && let Def::Map(map_def) = self.allocated.shape().def
820                        {
821                            // Drop the key if it was initialized
822                            if *key_initialized {
823                                unsafe { map_def.k().call_drop_in_place(key_ptr.assume_init()) };
824                            }
825                            // Deallocate the key buffer
826                            if let Ok(key_layout) = map_def.k().layout.sized_layout()
827                                && key_layout.size() > 0
828                            {
829                                unsafe {
830                                    alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout)
831                                };
832                            }
833                        }
834                    }
835                    MapInsertState::PushingValue {
836                        key_ptr,
837                        value_ptr,
838                        value_initialized,
839                        value_frame_on_stack,
840                        key_frame_stored,
841                    } => {
842                        if let Def::Map(map_def) = self.allocated.shape().def {
843                            // Only clean up key if the key frame was NOT stored.
844                            // If key_frame_stored is true, the stored frame handles cleanup.
845                            if !*key_frame_stored {
846                                unsafe { map_def.k().call_drop_in_place(key_ptr.assume_init()) };
847                                if let Ok(key_layout) = map_def.k().layout.sized_layout()
848                                    && key_layout.size() > 0
849                                {
850                                    unsafe {
851                                        alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout)
852                                    };
853                                }
854                            }
855
856                            // Only clean up value if the frame was already popped.
857                            // If value_frame_on_stack is true, the TrackedBuffer frame above us
858                            // will handle dropping and deallocating the value buffer.
859                            if !*value_frame_on_stack && let Some(value_ptr) = value_ptr {
860                                // Drop the value if it was initialized
861                                if *value_initialized {
862                                    unsafe {
863                                        map_def.v().call_drop_in_place(value_ptr.assume_init())
864                                    };
865                                }
866                                // Deallocate the value buffer
867                                if let Ok(value_layout) = map_def.v().layout.sized_layout()
868                                    && value_layout.size() > 0
869                                {
870                                    unsafe {
871                                        alloc::alloc::dealloc(
872                                            value_ptr.as_mut_byte_ptr(),
873                                            value_layout,
874                                        )
875                                    };
876                                }
877                            }
878                        }
879                    }
880                    MapInsertState::Idle => {}
881                }
882            }
883            Tracker::Set { .. } => {
884                // Drop the initialized Set
885                if self.is_init {
886                    unsafe {
887                        self.allocated
888                            .shape()
889                            .call_drop_in_place(self.data.assume_init())
890                    };
891                }
892            }
893            Tracker::Option {
894                building_inner,
895                pending_inner,
896            } => {
897                // Clean up pending inner value if it was never finalized
898                let had_pending = pending_inner.is_some();
899                if let Some(inner_ptr) = pending_inner.take()
900                    && let Def::Option(option_def) = self.allocated.shape().def
901                {
902                    // Drop the inner value
903                    unsafe { option_def.t.call_drop_in_place(inner_ptr.assume_init()) };
904                    // Deallocate the inner buffer
905                    if let Ok(layout) = option_def.t.layout.sized_layout()
906                        && layout.size() > 0
907                    {
908                        unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), layout) };
909                    }
910                }
911                // If we're building the inner value, it will be handled by the Option vtable
912                // No special cleanup needed here as the Option will either be properly
913                // initialized or remain uninitialized
914                if !*building_inner && !had_pending {
915                    // Option is fully initialized (no pending), drop it normally
916                    unsafe {
917                        self.allocated
918                            .shape()
919                            .call_drop_in_place(self.data.assume_init())
920                    };
921                }
922            }
923            Tracker::Result { building_inner, .. } => {
924                // If we're building the inner value, it will be handled by the Result vtable
925                // No special cleanup needed here as the Result will either be properly
926                // initialized or remain uninitialized
927                if !*building_inner {
928                    // Result is fully initialized, drop it normally
929                    unsafe {
930                        self.allocated
931                            .shape()
932                            .call_drop_in_place(self.data.assume_init())
933                    };
934                }
935            }
936            Tracker::DynamicValue { state } => {
937                // Clean up pending_entries if this is an Object
938                if let DynamicValueState::Object {
939                    pending_entries, ..
940                } = state
941                {
942                    // Drop and deallocate any pending values that weren't inserted
943                    if let Def::DynamicValue(dyn_def) = self.allocated.shape().def {
944                        let value_shape = self.allocated.shape(); // Value entries are same shape
945                        for (_key, value_ptr) in pending_entries.drain(..) {
946                            // Drop the value
947                            unsafe {
948                                value_shape.call_drop_in_place(value_ptr.assume_init());
949                            }
950                            // Deallocate the value buffer
951                            if let Ok(layout) = value_shape.layout.sized_layout()
952                                && layout.size() > 0
953                            {
954                                unsafe {
955                                    alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), layout);
956                                }
957                            }
958                        }
959                        // Note: keys are Strings and will be dropped when pending_entries is dropped
960                        let _ = dyn_def; // silence unused warning
961                    }
962                }
963
964                // Clean up pending_elements if this is an Array
965                if let DynamicValueState::Array {
966                    pending_elements, ..
967                } = state
968                {
969                    // Drop and deallocate any pending elements that weren't inserted
970                    let element_shape = self.allocated.shape(); // Array elements are same shape
971                    for element_ptr in pending_elements.drain(..) {
972                        // Drop the element
973                        unsafe {
974                            element_shape.call_drop_in_place(element_ptr.assume_init());
975                        }
976                        // Deallocate the element buffer
977                        if let Ok(layout) = element_shape.layout.sized_layout()
978                            && layout.size() > 0
979                        {
980                            unsafe {
981                                alloc::alloc::dealloc(element_ptr.as_mut_byte_ptr(), layout);
982                            }
983                        }
984                    }
985                }
986
987                // Drop if initialized
988                if self.is_init {
989                    let result = unsafe {
990                        self.allocated
991                            .shape()
992                            .call_drop_in_place(self.data.assume_init())
993                    };
994                    if result.is_none() {
995                        // This would be a bug - DynamicValue should always have drop_in_place
996                        panic!(
997                            "DynamicValue type {} has no drop_in_place implementation",
998                            self.allocated.shape()
999                        );
1000                    }
1001                }
1002            }
1003            Tracker::Inner { .. } => {
1004                // Inner wrapper - drop if initialized
1005                if self.is_init {
1006                    unsafe {
1007                        self.allocated
1008                            .shape()
1009                            .call_drop_in_place(self.data.assume_init())
1010                    };
1011                }
1012            }
1013        }
1014
1015        self.is_init = false;
1016        self.tracker = Tracker::Scalar;
1017    }
1018
1019    /// Deinitialize any initialized value for REPLACEMENT purposes.
1020    ///
1021    /// Unlike `deinit()` which is used during error cleanup, this method is used when
1022    /// we're about to overwrite a value with a new one (e.g., in `set_shape`).
1023    ///
1024    /// The difference is important for Field frames with simple trackers:
1025    /// - During cleanup: parent struct will drop all initialized fields, so Field frames skip dropping
1026    /// - During replacement: we're about to overwrite, so we MUST drop the old value
1027    ///
1028    /// For BorrowedInPlace frames: same logic applies - we must drop when replacing.
1029    fn deinit_for_replace(&mut self) {
1030        // For BorrowedInPlace frames, deinit() skips dropping (parent owns on cleanup).
1031        // But when REPLACING a value, we must drop the old value first.
1032        if matches!(self.ownership, FrameOwnership::BorrowedInPlace) && self.is_init {
1033            unsafe {
1034                self.allocated
1035                    .shape()
1036                    .call_drop_in_place(self.data.assume_init());
1037            }
1038
1039            // CRITICAL: For DynamicValue (e.g., facet_value::Value), the parent Object's
1040            // HashMap entry still points to this location. If we just drop and leave garbage,
1041            // the parent will try to drop that garbage when it's cleaned up, causing
1042            // use-after-free. We must reinitialize to a safe default (Null) so the parent
1043            // can safely drop it later.
1044            if let Def::DynamicValue(dyn_def) = &self.allocated.shape().def {
1045                unsafe {
1046                    (dyn_def.vtable.set_null)(self.data);
1047                }
1048                // Keep is_init = true since we just initialized it to Null
1049                self.tracker = Tracker::DynamicValue {
1050                    state: DynamicValueState::Scalar,
1051                };
1052                return;
1053            }
1054
1055            self.is_init = false;
1056            self.tracker = Tracker::Scalar;
1057            return;
1058        }
1059
1060        // Field frames handle their own cleanup in deinit() - no special handling needed here.
1061
1062        // All other cases: use normal deinit
1063        self.deinit();
1064    }
1065
1066    /// This must be called after (fully) initializing a value.
1067    ///
1068    /// This sets `is_init` to `true` to indicate the value is initialized.
1069    /// Composite types (structs, enums, etc.) might be handled differently.
1070    ///
1071    /// # Safety
1072    ///
1073    /// This should only be called when `self.data` has been actually initialized.
1074    const unsafe fn mark_as_init(&mut self) {
1075        self.is_init = true;
1076    }
1077
1078    /// Deallocate the memory associated with this frame, if it owns it.
1079    ///
1080    /// The memory has to be deinitialized first, see [Frame::deinit]
1081    fn dealloc(self) {
1082        // Only deallocate if this frame owns its memory
1083        if !self.ownership.needs_dealloc() {
1084            return;
1085        }
1086
1087        // If we need to deallocate, the frame must be deinitialized first
1088        if self.is_init {
1089            unreachable!("a frame has to be deinitialized before being deallocated")
1090        }
1091
1092        // Deallocate using the actual allocated size (not derived from shape)
1093        if self.allocated.allocated_size() > 0 {
1094            // Use the shape for alignment, but the stored size for the actual allocation
1095            if let Ok(layout) = self.allocated.shape().layout.sized_layout() {
1096                let actual_layout = core::alloc::Layout::from_size_align(
1097                    self.allocated.allocated_size(),
1098                    layout.align(),
1099                )
1100                .expect("allocated_size must be valid");
1101                unsafe { alloc::alloc::dealloc(self.data.as_mut_byte_ptr(), actual_layout) };
1102            }
1103        }
1104    }
1105
1106    /// Fill in defaults for any unset fields that have default values.
1107    ///
1108    /// This handles:
1109    /// - Container-level defaults (when no fields set and struct has Default impl)
1110    /// - Fields with `#[facet(default = ...)]` - uses the explicit default function
1111    /// - Fields with `#[facet(default)]` - uses the type's Default impl
1112    /// - `Option<T>` fields - default to None
1113    ///
1114    /// Returns Ok(()) if successful, or an error if a field has `#[facet(default)]`
1115    /// but no default implementation is available.
1116    fn fill_defaults(&mut self) -> Result<(), ReflectErrorKind> {
1117        // First, check if we need to upgrade from Scalar to Struct tracker
1118        // This happens when no fields were visited at all in deferred mode
1119        if !self.is_init
1120            && matches!(self.tracker, Tracker::Scalar)
1121            && let Type::User(UserType::Struct(struct_type)) = self.allocated.shape().ty
1122        {
1123            // If no fields were visited and the container has a default, use it
1124            // SAFETY: We're about to initialize the entire struct with its default value
1125            if unsafe { self.allocated.shape().call_default_in_place(self.data) }.is_some() {
1126                self.is_init = true;
1127                return Ok(());
1128            }
1129            // Otherwise initialize the struct tracker with empty iset
1130            self.tracker = Tracker::Struct {
1131                iset: ISet::new(struct_type.fields.len()),
1132                current_child: None,
1133            };
1134        }
1135
1136        // Handle Option types with Scalar tracker - default to None
1137        // This happens in deferred mode when an Option field was never touched
1138        if !self.is_init
1139            && matches!(self.tracker, Tracker::Scalar)
1140            && matches!(self.allocated.shape().def, Def::Option(_))
1141        {
1142            // SAFETY: Option<T> always implements Default (as None)
1143            if unsafe { self.allocated.shape().call_default_in_place(self.data) }.is_some() {
1144                self.is_init = true;
1145                return Ok(());
1146            }
1147        }
1148
1149        match &mut self.tracker {
1150            Tracker::Struct { iset, .. } => {
1151                if let Type::User(UserType::Struct(struct_type)) = self.allocated.shape().ty {
1152                    // Fast path: if ALL fields are set, nothing to do
1153                    if iset.all_set(struct_type.fields.len()) {
1154                        return Ok(());
1155                    }
1156
1157                    // Check if NO fields have been set and the container has a default
1158                    let no_fields_set = (0..struct_type.fields.len()).all(|i| !iset.get(i));
1159                    if no_fields_set {
1160                        // SAFETY: We're about to initialize the entire struct with its default value
1161                        if unsafe { self.allocated.shape().call_default_in_place(self.data) }
1162                            .is_some()
1163                        {
1164                            self.tracker = Tracker::Scalar;
1165                            self.is_init = true;
1166                            return Ok(());
1167                        }
1168                    }
1169
1170                    // Check if the container has #[facet(default)] attribute
1171                    let container_has_default = self.allocated.shape().has_default_attr();
1172
1173                    // Fill defaults for individual fields
1174                    for (idx, field) in struct_type.fields.iter().enumerate() {
1175                        // Skip already-initialized fields
1176                        if iset.get(idx) {
1177                            continue;
1178                        }
1179
1180                        // Calculate field pointer
1181                        let field_ptr = unsafe { self.data.field_uninit(field.offset) };
1182
1183                        // Try to initialize with default
1184                        if unsafe {
1185                            Self::try_init_field_default(field, field_ptr, container_has_default)
1186                        } {
1187                            // Mark field as initialized
1188                            iset.set(idx);
1189                        } else if field.has_default() {
1190                            // Field has #[facet(default)] but we couldn't find a default function.
1191                            // This happens with opaque types that don't have default_in_place.
1192                            return Err(ReflectErrorKind::DefaultAttrButNoDefaultImpl {
1193                                shape: field.shape(),
1194                            });
1195                        }
1196                    }
1197                }
1198            }
1199            Tracker::Enum { variant, data, .. } => {
1200                // Fast path: if ALL fields are set, nothing to do
1201                let num_fields = variant.data.fields.len();
1202                if num_fields == 0 || data.all_set(num_fields) {
1203                    return Ok(());
1204                }
1205
1206                // Check if the container has #[facet(default)] attribute
1207                let container_has_default = self.allocated.shape().has_default_attr();
1208
1209                // Handle enum variant fields
1210                for (idx, field) in variant.data.fields.iter().enumerate() {
1211                    // Skip already-initialized fields
1212                    if data.get(idx) {
1213                        continue;
1214                    }
1215
1216                    // Calculate field pointer within the variant data
1217                    let field_ptr = unsafe { self.data.field_uninit(field.offset) };
1218
1219                    // Try to initialize with default
1220                    if unsafe {
1221                        Self::try_init_field_default(field, field_ptr, container_has_default)
1222                    } {
1223                        // Mark field as initialized
1224                        data.set(idx);
1225                    } else if field.has_default() {
1226                        // Field has #[facet(default)] but we couldn't find a default function.
1227                        return Err(ReflectErrorKind::DefaultAttrButNoDefaultImpl {
1228                            shape: field.shape(),
1229                        });
1230                    }
1231                }
1232            }
1233            // Other tracker types don't have fields with defaults
1234            _ => {}
1235        }
1236        Ok(())
1237    }
1238
1239    /// Initialize a field with its default value if one is available.
1240    ///
1241    /// Priority:
1242    /// 1. Explicit field-level default_fn (from `#[facet(default = ...)]`)
1243    /// 2. Type-level default_in_place (from Default impl, including `Option<T>`)
1244    ///    but only if the field has the DEFAULT flag
1245    /// 3. Container-level default: if the container has `#[facet(default)]` and
1246    ///    the field's type implements Default, use that
1247    /// 4. Special cases: `Option<T>` (defaults to None), () (unit type)
1248    ///
1249    /// Returns true if a default was applied, false otherwise.
1250    ///
1251    /// # Safety
1252    ///
1253    /// `field_ptr` must point to uninitialized memory of the appropriate type.
1254    unsafe fn try_init_field_default(
1255        field: &Field,
1256        field_ptr: PtrUninit,
1257        container_has_default: bool,
1258    ) -> bool {
1259        use facet_core::DefaultSource;
1260
1261        // First check for explicit field-level default
1262        if let Some(default_source) = field.default {
1263            match default_source {
1264                DefaultSource::Custom(default_fn) => {
1265                    // Custom default function - it expects PtrUninit
1266                    unsafe { default_fn(field_ptr) };
1267                    return true;
1268                }
1269                DefaultSource::FromTrait => {
1270                    // Use the type's Default trait
1271                    if unsafe { field.shape().call_default_in_place(field_ptr) }.is_some() {
1272                        return true;
1273                    }
1274                }
1275            }
1276        }
1277
1278        // If container has #[facet(default)] and the field's type implements Default,
1279        // use the type's Default impl. This allows `#[facet(default)]` on a struct to
1280        // mean "use Default for any missing fields whose types implement Default".
1281        if container_has_default
1282            && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1283        {
1284            return true;
1285        }
1286
1287        // Special case: Option<T> always defaults to None, even without explicit #[facet(default)]
1288        // This is because Option is fundamentally "optional" - if not set, it should be None
1289        if matches!(field.shape().def, Def::Option(_))
1290            && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1291        {
1292            return true;
1293        }
1294
1295        // Special case: () unit type always defaults to ()
1296        if field.shape().is_type::<()>()
1297            && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1298        {
1299            return true;
1300        }
1301
1302        // Special case: Collection types (Vec, HashMap, HashSet, etc.) default to empty
1303        // These types have obvious "zero values" and it's almost always what you want
1304        // when deserializing data where the collection is simply absent.
1305        if matches!(field.shape().def, Def::List(_) | Def::Map(_) | Def::Set(_))
1306            && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1307        {
1308            return true;
1309        }
1310
1311        false
1312    }
1313
1314    /// Drain all initialized elements from the rope into the Vec.
1315    ///
1316    /// This is called when finalizing a list that used rope storage. Elements were
1317    /// built in stable rope chunks to allow deferred processing; now we move them
1318    /// into the actual Vec.
1319    ///
1320    /// # Safety
1321    ///
1322    /// The rope must contain only initialized elements (via `mark_last_initialized`).
1323    /// The list_data must point to an initialized Vec with capacity for the elements.
1324    fn drain_rope_into_vec(
1325        mut rope: ListRope,
1326        list_def: &facet_core::ListDef,
1327        list_data: PtrUninit,
1328    ) -> Result<(), ReflectErrorKind> {
1329        let count = rope.initialized_count();
1330        if count == 0 {
1331            return Ok(());
1332        }
1333
1334        let push_fn = list_def
1335            .push()
1336            .ok_or_else(|| ReflectErrorKind::OperationFailed {
1337                shape: list_def.t(),
1338                operation: "List missing push function for rope drain",
1339            })?;
1340
1341        // SAFETY: list_data points to initialized Vec (is_init was true)
1342        let list_ptr = unsafe { list_data.assume_init() };
1343
1344        // Reserve space if available (optimization, not required)
1345        if let Some(reserve_fn) = list_def.reserve() {
1346            unsafe {
1347                reserve_fn(list_ptr, count);
1348            }
1349        }
1350
1351        // Move each element from rope to Vec
1352        // SAFETY: rope contains `count` initialized elements
1353        unsafe {
1354            rope.drain_into(|element_ptr| {
1355                push_fn(
1356                    facet_core::PtrMut::new(list_ptr.as_mut_byte_ptr()),
1357                    facet_core::PtrMut::new(element_ptr.as_ptr()),
1358                );
1359            });
1360        }
1361
1362        Ok(())
1363    }
1364
1365    /// Insert all pending key-value entries into the map.
1366    ///
1367    /// This is called when finalizing a map that used delayed insertion. Entries were
1368    /// kept in pending_entries to allow deferred processing; now we insert them into
1369    /// the actual map and deallocate the temporary buffers.
1370    fn drain_pending_into_map(
1371        pending_entries: &mut Vec<(PtrUninit, PtrUninit)>,
1372        map_def: &facet_core::MapDef,
1373        map_data: PtrUninit,
1374    ) -> Result<(), ReflectErrorKind> {
1375        let insert_fn = map_def.vtable.insert;
1376
1377        // SAFETY: map_data points to initialized map (is_init was true)
1378        let map_ptr = unsafe { map_data.assume_init() };
1379
1380        for (key_ptr, value_ptr) in pending_entries.drain(..) {
1381            // Insert the key-value pair
1382            unsafe {
1383                insert_fn(
1384                    facet_core::PtrMut::new(map_ptr.as_mut_byte_ptr()),
1385                    facet_core::PtrMut::new(key_ptr.as_mut_byte_ptr()),
1386                    facet_core::PtrMut::new(value_ptr.as_mut_byte_ptr()),
1387                );
1388            }
1389
1390            // Deallocate the temporary buffers (insert moved the data)
1391            if let Ok(key_layout) = map_def.k().layout.sized_layout()
1392                && key_layout.size() > 0
1393            {
1394                unsafe { alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout) };
1395            }
1396            if let Ok(value_layout) = map_def.v().layout.sized_layout()
1397                && value_layout.size() > 0
1398            {
1399                unsafe { alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), value_layout) };
1400            }
1401        }
1402
1403        Ok(())
1404    }
1405
1406    /// Complete an Option by moving the pending inner value into it.
1407    ///
1408    /// This is called when finalizing an Option that used deferred init_some.
1409    /// The inner value was kept in stable memory for deferred processing;
1410    /// now we move it into the Option and deallocate the temporary buffer.
1411    fn complete_pending_option(
1412        option_def: facet_core::OptionDef,
1413        option_data: PtrUninit,
1414        inner_ptr: PtrUninit,
1415    ) -> Result<(), ReflectErrorKind> {
1416        let init_some_fn = option_def.vtable.init_some;
1417        let inner_shape = option_def.t;
1418
1419        // The inner_ptr contains the initialized inner value
1420        let inner_value_ptr = unsafe { inner_ptr.assume_init() };
1421
1422        // Initialize the Option as Some(inner_value)
1423        unsafe {
1424            init_some_fn(option_data, inner_value_ptr);
1425        }
1426
1427        // Deallocate the inner value's memory since init_some_fn moved it
1428        if let Ok(layout) = inner_shape.layout.sized_layout()
1429            && layout.size() > 0
1430        {
1431            unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), layout) };
1432        }
1433
1434        Ok(())
1435    }
1436
1437    fn complete_pending_smart_pointer(
1438        smart_ptr_shape: &'static Shape,
1439        smart_ptr_def: facet_core::PointerDef,
1440        smart_ptr_data: PtrUninit,
1441        inner_ptr: PtrUninit,
1442    ) -> Result<(), ReflectErrorKind> {
1443        // Check for sized pointee case first (uses new_into_fn)
1444        if let Some(new_into_fn) = smart_ptr_def.vtable.new_into_fn {
1445            let Some(inner_shape) = smart_ptr_def.pointee else {
1446                return Err(ReflectErrorKind::OperationFailed {
1447                    shape: smart_ptr_shape,
1448                    operation: "SmartPointer missing pointee shape",
1449                });
1450            };
1451
1452            // The inner_ptr contains the initialized inner value
1453            let _ = unsafe { inner_ptr.assume_init() };
1454
1455            // Initialize the SmartPointer with the inner value
1456            unsafe {
1457                new_into_fn(smart_ptr_data, PtrMut::new(inner_ptr.as_mut_byte_ptr()));
1458            }
1459
1460            // Deallocate the inner value's memory since new_into_fn moved it
1461            if let Ok(layout) = inner_shape.layout.sized_layout()
1462                && layout.size() > 0
1463            {
1464                unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), layout) };
1465            }
1466
1467            return Ok(());
1468        }
1469
1470        // Check for unsized pointee case: String -> Arc<str>/Box<str>/Rc<str>
1471        if let Some(pointee) = smart_ptr_def.pointee()
1472            && pointee.is_shape(str::SHAPE)
1473        {
1474            use alloc::{rc::Rc, string::String, sync::Arc};
1475            use facet_core::KnownPointer;
1476
1477            let Some(known) = smart_ptr_def.known else {
1478                return Err(ReflectErrorKind::OperationFailed {
1479                    shape: smart_ptr_shape,
1480                    operation: "SmartPointer<str> missing known pointer type",
1481                });
1482            };
1483
1484            // Read the String value from inner_ptr
1485            let string_ptr = inner_ptr.as_mut_byte_ptr() as *mut String;
1486            let string_value = unsafe { core::ptr::read(string_ptr) };
1487
1488            // Convert to the appropriate smart pointer type
1489            match known {
1490                KnownPointer::Box => {
1491                    let boxed: alloc::boxed::Box<str> = string_value.into_boxed_str();
1492                    unsafe {
1493                        core::ptr::write(
1494                            smart_ptr_data.as_mut_byte_ptr() as *mut alloc::boxed::Box<str>,
1495                            boxed,
1496                        );
1497                    }
1498                }
1499                KnownPointer::Arc => {
1500                    let arc: Arc<str> = Arc::from(string_value.into_boxed_str());
1501                    unsafe {
1502                        core::ptr::write(smart_ptr_data.as_mut_byte_ptr() as *mut Arc<str>, arc);
1503                    }
1504                }
1505                KnownPointer::Rc => {
1506                    let rc: Rc<str> = Rc::from(string_value.into_boxed_str());
1507                    unsafe {
1508                        core::ptr::write(smart_ptr_data.as_mut_byte_ptr() as *mut Rc<str>, rc);
1509                    }
1510                }
1511                _ => {
1512                    return Err(ReflectErrorKind::OperationFailed {
1513                        shape: smart_ptr_shape,
1514                        operation: "Unsupported SmartPointer<str> type",
1515                    });
1516                }
1517            }
1518
1519            // Deallocate the String's memory (we moved the data out via ptr::read)
1520            let string_layout = alloc::string::String::SHAPE.layout.sized_layout().unwrap();
1521            if string_layout.size() > 0 {
1522                unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), string_layout) };
1523            }
1524
1525            return Ok(());
1526        }
1527
1528        Err(ReflectErrorKind::OperationFailed {
1529            shape: smart_ptr_shape,
1530            operation: "SmartPointer missing new_into_fn and not a supported unsized type",
1531        })
1532    }
1533
1534    /// Returns an error if the value is not fully initialized.
1535    /// For lists with rope storage, drains the rope into the Vec.
1536    /// For maps with pending entries, drains the entries into the map.
1537    /// For options with pending inner values, calls init_some.
1538    fn require_full_initialization(&mut self) -> Result<(), ReflectErrorKind> {
1539        match &mut self.tracker {
1540            Tracker::Scalar => {
1541                if self.is_init {
1542                    Ok(())
1543                } else {
1544                    Err(ReflectErrorKind::UninitializedValue {
1545                        shape: self.allocated.shape(),
1546                    })
1547                }
1548            }
1549            Tracker::Array { iset, .. } => {
1550                match self.allocated.shape().ty {
1551                    Type::Sequence(facet_core::SequenceType::Array(array_def)) => {
1552                        // Check if all array elements are initialized
1553                        if (0..array_def.n).all(|idx| iset.get(idx)) {
1554                            Ok(())
1555                        } else {
1556                            Err(ReflectErrorKind::UninitializedValue {
1557                                shape: self.allocated.shape(),
1558                            })
1559                        }
1560                    }
1561                    _ => Err(ReflectErrorKind::UninitializedValue {
1562                        shape: self.allocated.shape(),
1563                    }),
1564                }
1565            }
1566            Tracker::Struct { iset, .. } => {
1567                match self.allocated.shape().ty {
1568                    Type::User(UserType::Struct(struct_type)) => {
1569                        if iset.all_set(struct_type.fields.len()) {
1570                            Ok(())
1571                        } else {
1572                            // Find index of the first bit not set
1573                            let first_missing_idx =
1574                                (0..struct_type.fields.len()).find(|&idx| !iset.get(idx));
1575                            if let Some(missing_idx) = first_missing_idx {
1576                                let field_name = struct_type.fields[missing_idx].name;
1577                                Err(ReflectErrorKind::UninitializedField {
1578                                    shape: self.allocated.shape(),
1579                                    field_name,
1580                                })
1581                            } else {
1582                                // fallback, something went wrong
1583                                Err(ReflectErrorKind::UninitializedValue {
1584                                    shape: self.allocated.shape(),
1585                                })
1586                            }
1587                        }
1588                    }
1589                    _ => Err(ReflectErrorKind::UninitializedValue {
1590                        shape: self.allocated.shape(),
1591                    }),
1592                }
1593            }
1594            Tracker::Enum { variant, data, .. } => {
1595                // Check if all fields of the variant are initialized
1596                let num_fields = variant.data.fields.len();
1597                if num_fields == 0 {
1598                    // Unit variant, always initialized
1599                    Ok(())
1600                } else if (0..num_fields).all(|idx| data.get(idx)) {
1601                    Ok(())
1602                } else {
1603                    // Find the first uninitialized field
1604                    let first_missing_idx = (0..num_fields).find(|&idx| !data.get(idx));
1605                    if let Some(missing_idx) = first_missing_idx {
1606                        let field_name = variant.data.fields[missing_idx].name;
1607                        Err(ReflectErrorKind::UninitializedField {
1608                            shape: self.allocated.shape(),
1609                            field_name,
1610                        })
1611                    } else {
1612                        Err(ReflectErrorKind::UninitializedValue {
1613                            shape: self.allocated.shape(),
1614                        })
1615                    }
1616                }
1617            }
1618            Tracker::SmartPointer {
1619                building_inner,
1620                pending_inner,
1621            } => {
1622                if *building_inner {
1623                    // Inner value is still being built
1624                    Err(ReflectErrorKind::UninitializedValue {
1625                        shape: self.allocated.shape(),
1626                    })
1627                } else if let Some(inner_ptr) = pending_inner.take() {
1628                    // Finalize the pending inner value
1629                    let smart_ptr_shape = self.allocated.shape();
1630                    if let Def::Pointer(smart_ptr_def) = smart_ptr_shape.def {
1631                        Self::complete_pending_smart_pointer(
1632                            smart_ptr_shape,
1633                            smart_ptr_def,
1634                            self.data,
1635                            inner_ptr,
1636                        )?;
1637                        self.is_init = true;
1638                        Ok(())
1639                    } else {
1640                        Err(ReflectErrorKind::OperationFailed {
1641                            shape: smart_ptr_shape,
1642                            operation: "SmartPointer frame without SmartPointer definition",
1643                        })
1644                    }
1645                } else if self.is_init {
1646                    Ok(())
1647                } else {
1648                    Err(ReflectErrorKind::UninitializedValue {
1649                        shape: self.allocated.shape(),
1650                    })
1651                }
1652            }
1653            Tracker::SmartPointerSlice { building_item, .. } => {
1654                if *building_item {
1655                    Err(ReflectErrorKind::UninitializedValue {
1656                        shape: self.allocated.shape(),
1657                    })
1658                } else {
1659                    Ok(())
1660                }
1661            }
1662            Tracker::List {
1663                current_child,
1664                rope,
1665            } => {
1666                if self.is_init && current_child.is_none() {
1667                    // Drain rope into Vec if we have elements stored there
1668                    if let Some(rope) = rope.take()
1669                        && let Def::List(list_def) = self.allocated.shape().def
1670                    {
1671                        Self::drain_rope_into_vec(rope, &list_def, self.data)?;
1672                    }
1673                    Ok(())
1674                } else {
1675                    Err(ReflectErrorKind::UninitializedValue {
1676                        shape: self.allocated.shape(),
1677                    })
1678                }
1679            }
1680            Tracker::Map {
1681                insert_state,
1682                pending_entries,
1683                ..
1684            } => {
1685                if self.is_init && matches!(insert_state, MapInsertState::Idle) {
1686                    // Insert all pending entries into the map
1687                    if !pending_entries.is_empty()
1688                        && let Def::Map(map_def) = self.allocated.shape().def
1689                    {
1690                        Self::drain_pending_into_map(pending_entries, &map_def, self.data)?;
1691                    }
1692                    Ok(())
1693                } else {
1694                    Err(ReflectErrorKind::UninitializedValue {
1695                        shape: self.allocated.shape(),
1696                    })
1697                }
1698            }
1699            Tracker::Set { current_child } => {
1700                if self.is_init && !*current_child {
1701                    Ok(())
1702                } else {
1703                    Err(ReflectErrorKind::UninitializedValue {
1704                        shape: self.allocated.shape(),
1705                    })
1706                }
1707            }
1708            Tracker::Option {
1709                building_inner,
1710                pending_inner,
1711            } => {
1712                if *building_inner {
1713                    Err(ReflectErrorKind::UninitializedValue {
1714                        shape: self.allocated.shape(),
1715                    })
1716                } else {
1717                    // Finalize pending init_some if we have a pending inner value
1718                    if let Some(inner_ptr) = pending_inner.take()
1719                        && let Def::Option(option_def) = self.allocated.shape().def
1720                    {
1721                        Self::complete_pending_option(option_def, self.data, inner_ptr)?;
1722                    }
1723                    Ok(())
1724                }
1725            }
1726            Tracker::Result { building_inner, .. } => {
1727                if *building_inner {
1728                    Err(ReflectErrorKind::UninitializedValue {
1729                        shape: self.allocated.shape(),
1730                    })
1731                } else {
1732                    Ok(())
1733                }
1734            }
1735            Tracker::Inner { building_inner } => {
1736                if *building_inner {
1737                    // Inner value is still being built
1738                    Err(ReflectErrorKind::UninitializedValue {
1739                        shape: self.allocated.shape(),
1740                    })
1741                } else if self.is_init {
1742                    Ok(())
1743                } else {
1744                    Err(ReflectErrorKind::UninitializedValue {
1745                        shape: self.allocated.shape(),
1746                    })
1747                }
1748            }
1749            Tracker::DynamicValue { state } => {
1750                if matches!(state, DynamicValueState::Uninit) {
1751                    Err(ReflectErrorKind::UninitializedValue {
1752                        shape: self.allocated.shape(),
1753                    })
1754                } else {
1755                    // Insert pending entries for Object state
1756                    if let DynamicValueState::Object {
1757                        pending_entries,
1758                        insert_state,
1759                    } = state
1760                    {
1761                        if !matches!(insert_state, DynamicObjectInsertState::Idle) {
1762                            return Err(ReflectErrorKind::UninitializedValue {
1763                                shape: self.allocated.shape(),
1764                            });
1765                        }
1766
1767                        if !pending_entries.is_empty()
1768                            && let Def::DynamicValue(dyn_def) = self.allocated.shape().def
1769                        {
1770                            let object_ptr = unsafe { self.data.assume_init() };
1771                            let value_shape = self.allocated.shape();
1772
1773                            for (key, value_ptr) in pending_entries.drain(..) {
1774                                // Insert the entry
1775                                unsafe {
1776                                    (dyn_def.vtable.insert_object_entry)(
1777                                        object_ptr,
1778                                        &key,
1779                                        value_ptr.assume_init(),
1780                                    );
1781                                }
1782                                // Deallocate the value buffer (insert_object_entry moved the value)
1783                                if let Ok(layout) = value_shape.layout.sized_layout()
1784                                    && layout.size() > 0
1785                                {
1786                                    unsafe {
1787                                        alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), layout);
1788                                    }
1789                                }
1790                            }
1791                        }
1792                    }
1793
1794                    // Insert pending elements for Array state
1795                    if let DynamicValueState::Array {
1796                        pending_elements,
1797                        building_element,
1798                    } = state
1799                    {
1800                        if *building_element {
1801                            return Err(ReflectErrorKind::UninitializedValue {
1802                                shape: self.allocated.shape(),
1803                            });
1804                        }
1805
1806                        if !pending_elements.is_empty()
1807                            && let Def::DynamicValue(dyn_def) = self.allocated.shape().def
1808                        {
1809                            let array_ptr = unsafe { self.data.assume_init() };
1810                            let element_shape = self.allocated.shape();
1811
1812                            for element_ptr in pending_elements.drain(..) {
1813                                // Push the element into the array
1814                                unsafe {
1815                                    (dyn_def.vtable.push_array_element)(
1816                                        array_ptr,
1817                                        element_ptr.assume_init(),
1818                                    );
1819                                }
1820                                // Deallocate the element buffer (push_array_element moved the value)
1821                                if let Ok(layout) = element_shape.layout.sized_layout()
1822                                    && layout.size() > 0
1823                                {
1824                                    unsafe {
1825                                        alloc::alloc::dealloc(
1826                                            element_ptr.as_mut_byte_ptr(),
1827                                            layout,
1828                                        );
1829                                    }
1830                                }
1831                            }
1832                        }
1833                    }
1834
1835                    Ok(())
1836                }
1837            }
1838        }
1839    }
1840
1841    /// Fill defaults and check required fields in a single pass using precomputed plans.
1842    ///
1843    /// This replaces the separate `fill_defaults` + `require_full_initialization` calls
1844    /// with a single iteration over the precomputed `FieldInitPlan` list.
1845    ///
1846    /// # Arguments
1847    /// * `plans` - Precomputed field initialization plans from TypePlan
1848    /// * `num_fields` - Total number of fields (from StructPlan/VariantPlanMeta)
1849    /// * `type_plan_core` - Reference to the TypePlanCore for resolving validators
1850    ///
1851    /// # Returns
1852    /// `Ok(())` if all required fields are set (or filled with defaults), or an error
1853    /// describing the first missing required field.
1854    #[allow(unsafe_code)]
1855    fn fill_and_require_fields(
1856        &mut self,
1857        plans: &[FieldInitPlan],
1858        num_fields: usize,
1859        type_plan_core: &TypePlanCore,
1860    ) -> Result<(), ReflectErrorKind> {
1861        // With lazy tracker initialization, structs start with Tracker::Scalar.
1862        // If is_init is true with Scalar, the struct was set wholesale - nothing to do.
1863        // If is_init is false, we need to upgrade to Tracker::Struct to track fields.
1864        if !self.is_init
1865            && matches!(self.tracker, Tracker::Scalar)
1866            && matches!(self.allocated.shape().ty, Type::User(UserType::Struct(_)))
1867        {
1868            // Try container-level default first
1869            if unsafe { self.allocated.shape().call_default_in_place(self.data) }.is_some() {
1870                self.is_init = true;
1871                return Ok(());
1872            }
1873            // Upgrade to Tracker::Struct for field-by-field tracking
1874            self.tracker = Tracker::Struct {
1875                iset: ISet::new(num_fields),
1876                current_child: None,
1877            };
1878        }
1879
1880        // Get the iset based on tracker type
1881        let iset = match &mut self.tracker {
1882            Tracker::Struct { iset, .. } => iset,
1883            Tracker::Enum { data, .. } => data,
1884            // Scalar with is_init=true means struct was set wholesale - all fields initialized
1885            Tracker::Scalar if self.is_init => return Ok(()),
1886            // Other tracker types don't use field_init_plans
1887            _ => return Ok(()),
1888        };
1889
1890        // Fast path: if all fields are already set, no defaults needed.
1891        // But validators still need to run.
1892        let all_fields_set = iset.all_set(num_fields);
1893
1894        for plan in plans {
1895            if !all_fields_set && !iset.get(plan.index) {
1896                // Field not set - handle according to fill rule
1897                match &plan.fill_rule {
1898                    FillRule::Defaultable(default) => {
1899                        // Calculate field pointer
1900                        let field_ptr = unsafe { self.data.field_uninit(plan.offset) };
1901
1902                        // Call the appropriate default function
1903                        let success = match default {
1904                            FieldDefault::Custom(default_fn) => {
1905                                // SAFETY: default_fn writes to uninitialized memory
1906                                unsafe { default_fn(field_ptr) };
1907                                true
1908                            }
1909                            FieldDefault::FromTrait(shape) => {
1910                                // SAFETY: call_default_in_place writes to uninitialized memory
1911                                unsafe { shape.call_default_in_place(field_ptr) }.is_some()
1912                            }
1913                        };
1914
1915                        if success {
1916                            iset.set(plan.index);
1917                        } else {
1918                            return Err(ReflectErrorKind::UninitializedField {
1919                                shape: self.allocated.shape(),
1920                                field_name: plan.name,
1921                            });
1922                        }
1923                    }
1924                    FillRule::Required => {
1925                        return Err(ReflectErrorKind::UninitializedField {
1926                            shape: self.allocated.shape(),
1927                            field_name: plan.name,
1928                        });
1929                    }
1930                }
1931            }
1932
1933            // Run validators on the (now initialized) field
1934            if !plan.validators.is_empty() {
1935                let field_ptr = unsafe { self.data.field_init(plan.offset) };
1936                for validator in type_plan_core.validators(plan.validators) {
1937                    validator.run(field_ptr.into(), plan.name, self.allocated.shape())?;
1938                }
1939            }
1940        }
1941
1942        Ok(())
1943    }
1944
1945    /// Get the [EnumType] of the frame's shape, if it is an enum type
1946    pub(crate) const fn get_enum_type(&self) -> Result<EnumType, ReflectErrorKind> {
1947        match self.allocated.shape().ty {
1948            Type::User(UserType::Enum(e)) => Ok(e),
1949            _ => Err(ReflectErrorKind::WasNotA {
1950                expected: "enum",
1951                actual: self.allocated.shape(),
1952            }),
1953        }
1954    }
1955
1956    pub(crate) fn get_field(&self) -> Option<&Field> {
1957        match self.allocated.shape().ty {
1958            Type::User(user_type) => match user_type {
1959                UserType::Struct(struct_type) => {
1960                    // Try to get currently active field index
1961                    if let Tracker::Struct {
1962                        current_child: Some(idx),
1963                        ..
1964                    } = &self.tracker
1965                    {
1966                        struct_type.fields.get(*idx)
1967                    } else {
1968                        None
1969                    }
1970                }
1971                UserType::Enum(_enum_type) => {
1972                    if let Tracker::Enum {
1973                        variant,
1974                        current_child: Some(idx),
1975                        ..
1976                    } = &self.tracker
1977                    {
1978                        variant.data.fields.get(*idx)
1979                    } else {
1980                        None
1981                    }
1982                }
1983                _ => None,
1984            },
1985            _ => None,
1986        }
1987    }
1988}
1989
1990// Convenience methods on Partial for accessing FrameMode internals.
1991// These help minimize changes to the rest of the codebase during the refactor.
1992impl<'facet, const BORROW: bool> Partial<'facet, BORROW> {
1993    /// Get a reference to the frame stack.
1994    #[inline]
1995    pub(crate) const fn frames(&self) -> &Vec<Frame> {
1996        self.mode.stack()
1997    }
1998
1999    /// Get a mutable reference to the frame stack.
2000    #[inline]
2001    pub(crate) fn frames_mut(&mut self) -> &mut Vec<Frame> {
2002        self.mode.stack_mut()
2003    }
2004
2005    /// Check if we're in deferred mode.
2006    #[inline]
2007    pub const fn is_deferred(&self) -> bool {
2008        self.mode.is_deferred()
2009    }
2010
2011    /// Get the start depth if in deferred mode.
2012    #[inline]
2013    pub(crate) const fn start_depth(&self) -> Option<usize> {
2014        self.mode.start_depth()
2015    }
2016
2017    /// Derive the path from the current frame stack.
2018    ///
2019    /// Compute the navigation path for deferred mode storage and lookup.
2020    /// The returned `Path` is anchored to the root shape for proper type context.
2021    ///
2022    /// This extracts Field steps from struct/enum frames and Index steps from
2023    /// array/list frames. Option wrappers, smart pointers (Box, Rc, etc.), and
2024    /// other transparent types don't add path steps.
2025    ///
2026    /// This MUST match the storage path computation in end() for consistency.
2027    pub(crate) fn derive_path(&self) -> Path {
2028        // Get the root shape from the first frame
2029        let root_shape = self
2030            .frames()
2031            .first()
2032            .map(|f| f.allocated.shape())
2033            .unwrap_or_else(|| {
2034                // Fallback to unit type shape if no frames (shouldn't happen in practice)
2035                <() as facet_core::Facet>::SHAPE
2036            });
2037
2038        let mut path = Path::new(root_shape);
2039
2040        // Walk ALL frames, extracting navigation steps
2041        // This matches the storage path computation in end()
2042        let frames = self.frames();
2043        for (frame_idx, frame) in frames.iter().enumerate() {
2044            match &frame.tracker {
2045                Tracker::Struct {
2046                    current_child: Some(idx),
2047                    ..
2048                } => {
2049                    path.push(PathStep::Field(*idx as u32));
2050                }
2051                Tracker::Enum {
2052                    current_child: Some(idx),
2053                    ..
2054                } => {
2055                    path.push(PathStep::Field(*idx as u32));
2056                }
2057                Tracker::List {
2058                    current_child: Some(idx),
2059                    ..
2060                } => {
2061                    path.push(PathStep::Index(*idx as u32));
2062                }
2063                Tracker::Array {
2064                    current_child: Some(idx),
2065                    ..
2066                } => {
2067                    path.push(PathStep::Index(*idx as u32));
2068                }
2069                Tracker::Option {
2070                    building_inner: true,
2071                    ..
2072                } => {
2073                    // Option with building_inner contributes OptionSome to path
2074                    path.push(PathStep::OptionSome);
2075                }
2076                Tracker::SmartPointer {
2077                    building_inner: true,
2078                    ..
2079                } => {
2080                    // SmartPointer with building_inner contributes Deref to path
2081                    path.push(PathStep::Deref);
2082                }
2083                Tracker::SmartPointerSlice {
2084                    current_child: Some(idx),
2085                    ..
2086                } => {
2087                    // SmartPointerSlice with current_child contributes Index to path
2088                    path.push(PathStep::Index(*idx as u32));
2089                }
2090                Tracker::Inner {
2091                    building_inner: true,
2092                } => {
2093                    // Inner with building_inner contributes Inner to path
2094                    path.push(PathStep::Inner);
2095                }
2096                Tracker::Map {
2097                    current_entry_index: Some(idx),
2098                    building_key,
2099                    ..
2100                } => {
2101                    // Map with active entry contributes MapKey or MapValue with entry index
2102                    if *building_key {
2103                        path.push(PathStep::MapKey(*idx as u32));
2104                    } else {
2105                        path.push(PathStep::MapValue(*idx as u32));
2106                    }
2107                }
2108                // Other tracker types (Set, Result, etc.)
2109                // don't contribute to the storage path - they're transparent wrappers
2110                _ => {}
2111            }
2112
2113            // If the next frame is a proxy frame, add a Proxy step (matches end())
2114            if frame_idx + 1 < frames.len() && frames[frame_idx + 1].using_custom_deserialization {
2115                path.push(PathStep::Proxy);
2116            }
2117        }
2118
2119        path
2120    }
2121}
2122
2123impl<'facet, const BORROW: bool> Drop for Partial<'facet, BORROW> {
2124    fn drop(&mut self) {
2125        trace!("🧹 Partial is being dropped");
2126
2127        // With the ownership transfer model:
2128        // - When we enter a field, parent's iset[idx] is cleared
2129        // - Parent won't try to drop fields with iset[idx] = false
2130        // - No double-free possible by construction
2131
2132        // 1. Clean up stored frames from deferred state
2133        if let FrameMode::Deferred {
2134            stored_frames,
2135            stack,
2136            ..
2137        } = &mut self.mode
2138        {
2139            // Stored frames have ownership of their data (parent's iset was cleared).
2140            // IMPORTANT: Process in deepest-first order so children are dropped before parents.
2141            // Child frames have data pointers into parent memory, so parents must stay valid
2142            // until all their children are cleaned up.
2143            //
2144            // CRITICAL: Before dropping a child frame, we must mark the parent's field as
2145            // uninitialized. Otherwise, when we later drop the parent, it will try to drop
2146            // that field again, causing a double-free.
2147            let mut stored_frames = core::mem::take(stored_frames);
2148            let mut paths: Vec<_> = stored_frames.keys().cloned().collect();
2149            // Sort by path depth (number of steps), deepest first
2150            paths.sort_by_key(|p| core::cmp::Reverse(p.steps.len()));
2151            for path in paths {
2152                if let Some(mut frame) = stored_frames.remove(&path) {
2153                    // Before dropping this frame, update the parent to prevent double-free.
2154                    // The parent path is everything except the last step.
2155                    let parent_path = Path {
2156                        shape: path.shape,
2157                        steps: path.steps[..path.steps.len().saturating_sub(1)].to_vec(),
2158                    };
2159
2160                    // Helper to find parent frame in stored_frames or stack
2161                    let find_parent_frame =
2162                        |stored: &mut alloc::collections::BTreeMap<Path, Frame>,
2163                         stk: &mut [Frame],
2164                         pp: &Path|
2165                         -> Option<*mut Frame> {
2166                            if let Some(pf) = stored.get_mut(pp) {
2167                                Some(pf as *mut Frame)
2168                            } else {
2169                                let idx = pp.steps.len();
2170                                stk.get_mut(idx).map(|f| f as *mut Frame)
2171                            }
2172                        };
2173
2174                    match path.steps.last() {
2175                        Some(PathStep::Field(field_idx)) => {
2176                            let field_idx = *field_idx as usize;
2177                            if let Some(parent_ptr) =
2178                                find_parent_frame(&mut stored_frames, stack, &parent_path)
2179                            {
2180                                // SAFETY: parent_ptr is valid for the duration of this block
2181                                let parent_frame = unsafe { &mut *parent_ptr };
2182                                match &mut parent_frame.tracker {
2183                                    Tracker::Struct { iset, .. } => {
2184                                        iset.unset(field_idx);
2185                                    }
2186                                    Tracker::Enum { data, .. } => {
2187                                        data.unset(field_idx);
2188                                    }
2189                                    _ => {}
2190                                }
2191                            }
2192                        }
2193                        Some(PathStep::MapKey(entry_idx)) => {
2194                            // Map key frame - clear from parent's insert_state to prevent
2195                            // double-free. The key will be dropped by this frame's deinit.
2196                            let entry_idx = *entry_idx as usize;
2197                            if let Some(parent_ptr) =
2198                                find_parent_frame(&mut stored_frames, stack, &parent_path)
2199                            {
2200                                let parent_frame = unsafe { &mut *parent_ptr };
2201                                if let Tracker::Map {
2202                                    insert_state,
2203                                    pending_entries,
2204                                    ..
2205                                } = &mut parent_frame.tracker
2206                                {
2207                                    // If key is in insert_state, clear it
2208                                    if let MapInsertState::PushingKey {
2209                                        key_frame_on_stack, ..
2210                                    } = insert_state
2211                                    {
2212                                        *key_frame_on_stack = false;
2213                                    }
2214                                    // Also check if there's a pending entry with this key
2215                                    // that needs to have the key nullified
2216                                    if entry_idx < pending_entries.len() {
2217                                        // Remove this entry since we're handling cleanup here
2218                                        // The key will be dropped by this frame's deinit
2219                                        // The value frame will be handled separately
2220                                        // Mark the key as already-handled by setting to dangling
2221                                        // Actually, we'll clear the entire entry - the value
2222                                        // frame will be processed separately anyway
2223                                    }
2224                                }
2225                            }
2226                        }
2227                        Some(PathStep::MapValue(entry_idx)) => {
2228                            // Map value frame - remove the entry from pending_entries.
2229                            // The value is dropped by this frame's deinit.
2230                            // The key is dropped by the MapKey frame's deinit (processed separately).
2231                            let entry_idx = *entry_idx as usize;
2232                            if let Some(parent_ptr) =
2233                                find_parent_frame(&mut stored_frames, stack, &parent_path)
2234                            {
2235                                let parent_frame = unsafe { &mut *parent_ptr };
2236                                if let Tracker::Map {
2237                                    pending_entries, ..
2238                                } = &mut parent_frame.tracker
2239                                {
2240                                    // Remove the entry at this index if it exists.
2241                                    // Don't drop key/value here - they're handled by their
2242                                    // respective stored frames (MapKey and MapValue).
2243                                    if entry_idx < pending_entries.len() {
2244                                        pending_entries.remove(entry_idx);
2245                                    }
2246                                }
2247                            }
2248                        }
2249                        Some(PathStep::Index(_)) => {
2250                            // List element frames with RopeSlot ownership are handled by
2251                            // the deinit check for RopeSlot - they skip dropping since the
2252                            // rope owns the data. No parent update needed.
2253                        }
2254                        _ => {}
2255                    }
2256                    frame.deinit();
2257                    frame.dealloc();
2258                }
2259            }
2260        }
2261
2262        // 2. Pop and deinit stack frames
2263        // CRITICAL: Before deiniting a child frame, we must mark the parent's field as
2264        // uninitialized. Otherwise, the parent will try to drop the field again.
2265        loop {
2266            let stack = self.mode.stack_mut();
2267            if stack.is_empty() {
2268                break;
2269            }
2270
2271            let mut frame = stack.pop().unwrap();
2272
2273            // If this frame has Field ownership, mark the parent's bit as unset
2274            // so the parent won't try to drop it again.
2275            if let FrameOwnership::Field { field_idx } = frame.ownership
2276                && let Some(parent_frame) = stack.last_mut()
2277            {
2278                match &mut parent_frame.tracker {
2279                    Tracker::Struct { iset, .. } => {
2280                        iset.unset(field_idx);
2281                    }
2282                    Tracker::Enum { data, .. } => {
2283                        data.unset(field_idx);
2284                    }
2285                    Tracker::Array { iset, .. } => {
2286                        iset.unset(field_idx);
2287                    }
2288                    _ => {}
2289                }
2290            }
2291
2292            frame.deinit();
2293            frame.dealloc();
2294        }
2295    }
2296}
2297
2298#[cfg(test)]
2299mod size_tests {
2300    use super::*;
2301    use core::mem::size_of;
2302
2303    #[test]
2304    fn print_type_sizes() {
2305        eprintln!("\n=== Type Sizes ===");
2306        eprintln!("Frame: {} bytes", size_of::<Frame>());
2307        eprintln!("Tracker: {} bytes", size_of::<Tracker>());
2308        eprintln!("ISet: {} bytes", size_of::<ISet>());
2309        eprintln!("AllocatedShape: {} bytes", size_of::<AllocatedShape>());
2310        eprintln!("FrameOwnership: {} bytes", size_of::<FrameOwnership>());
2311        eprintln!("PtrUninit: {} bytes", size_of::<facet_core::PtrUninit>());
2312        eprintln!("Option<usize>: {} bytes", size_of::<Option<usize>>());
2313        eprintln!(
2314            "Option<&'static facet_core::ProxyDef>: {} bytes",
2315            size_of::<Option<&'static facet_core::ProxyDef>>()
2316        );
2317        eprintln!(
2318            "TypePlanNode: {} bytes",
2319            size_of::<typeplan::TypePlanNode>()
2320        );
2321        eprintln!("Vec<Frame>: {} bytes", size_of::<Vec<Frame>>());
2322        eprintln!("MapInsertState: {} bytes", size_of::<MapInsertState>());
2323        eprintln!(
2324            "DynamicValueState: {} bytes",
2325            size_of::<DynamicValueState>()
2326        );
2327        eprintln!("===================\n");
2328    }
2329}