Skip to main content

facet_reflect/partial/
mod.rs

1//! Partial value construction for dynamic reflection
2//!
3//! This module provides APIs for incrementally building values through reflection,
4//! particularly useful when deserializing data from external formats like JSON or YAML.
5//!
6//! # Overview
7//!
8//! The `Partial` type (formerly known as `Wip` - Work In Progress) allows you to:
9//! - Allocate memory for a value based on its `Shape`
10//! - Initialize fields incrementally in a type-safe manner
11//! - Handle complex nested structures including structs, enums, collections, and smart pointers
12//! - Build the final value once all required fields are initialized
13//!
14//! **Note**: This is the only API for partial value construction. The previous `TypedPartial`
15//! wrapper has been removed in favor of using `Partial` directly.
16//!
17//! # Basic Usage
18//!
19//! ```no_run
20//! # use facet_reflect::Partial;
21//! # use facet_core::{Shape, Facet};
22//! # fn example<T: Facet<'static>>() -> Result<(), Box<dyn std::error::Error>> {
23//! // Allocate memory for a struct
24//! let mut partial = Partial::alloc::<T>()?;
25//!
26//! // Set simple fields
27//! partial = partial.set_field("name", "Alice")?;
28//! partial = partial.set_field("age", 30u32)?;
29//!
30//! // Work with nested structures
31//! partial = partial.begin_field("address")?;
32//! partial = partial.set_field("street", "123 Main St")?;
33//! partial = partial.set_field("city", "Springfield")?;
34//! partial = partial.end()?;
35//!
36//! // Build the final value
37//! let value = partial.build()?;
38//! # Ok(())
39//! # }
40//! ```
41//!
42//! # Chaining Style
43//!
44//! The API supports method chaining for cleaner code:
45//!
46//! ```no_run
47//! # use facet_reflect::Partial;
48//! # use facet_core::{Shape, Facet};
49//! # fn example<T: Facet<'static>>() -> Result<(), Box<dyn std::error::Error>> {
50//! let value = Partial::alloc::<T>()?
51//!     .set_field("name", "Bob")?
52//!     .begin_field("scores")?
53//!         .set(vec![95, 87, 92])?
54//!     .end()?
55//!     .build()?;
56//! # Ok(())
57//! # }
58//! ```
59//!
60//! # Working with Collections
61//!
62//! ```no_run
63//! # use facet_reflect::Partial;
64//! # use facet_core::{Shape, Facet};
65//! # fn example() -> Result<(), Box<dyn std::error::Error>> {
66//! let mut partial = Partial::alloc::<Vec<String>>()?;
67//!
68//! // Add items to a list
69//! partial = partial.begin_list_item()?;
70//! partial = partial.set("first")?;
71//! partial = partial.end()?;
72//!
73//! partial = partial.begin_list_item()?;
74//! partial = partial.set("second")?;
75//! partial = partial.end()?;
76//!
77//! let vec = partial.build()?;
78//! # Ok(())
79//! # }
80//! ```
81//!
82//! # Working with Maps
83//!
84//! ```no_run
85//! # use facet_reflect::Partial;
86//! # use facet_core::{Shape, Facet};
87//! # use std::collections::HashMap;
88//! # fn example() -> Result<(), Box<dyn std::error::Error>> {
89//! let mut partial = Partial::alloc::<HashMap<String, i32>>()?;
90//!
91//! // Insert key-value pairs
92//! partial = partial.begin_key()?;
93//! partial = partial.set("score")?;
94//! partial = partial.end()?;
95//! partial = partial.begin_value()?;
96//! partial = partial.set(100i32)?;
97//! partial = partial.end()?;
98//!
99//! let map = partial.build()?;
100//! # Ok(())
101//! # }
102//! ```
103//!
104//! # Safety and Memory Management
105//!
106//! The `Partial` type ensures memory safety by:
107//! - Tracking initialization state of all fields
108//! - Preventing use-after-build through state tracking
109//! - Properly handling drop semantics for partially initialized values
110//! - Supporting both owned and borrowed values through lifetime parameters
111
112use alloc::{collections::BTreeMap, sync::Arc, vec::Vec};
113
114mod arena;
115mod iset;
116mod rope;
117pub(crate) mod typeplan;
118pub use typeplan::{DeserStrategy, NodeId, TypePlan, TypePlanCore};
119
120mod partial_api;
121
122use crate::{ReflectErrorKind, TrackerKind, trace};
123use facet_core::Facet;
124use facet_path::{Path, PathStep};
125
126use core::marker::PhantomData;
127
128mod heap_value;
129pub use heap_value::*;
130
131use facet_core::{
132    Def, EnumType, Field, PtrMut, PtrUninit, Shape, SliceBuilderVTable, Type, UserType, Variant,
133};
134use iset::ISet;
135use rope::ListRope;
136use typeplan::{FieldDefault, FieldInitPlan, FillRule};
137
138/// State of a partial value
139#[derive(Debug, Clone, Copy, PartialEq, Eq)]
140enum PartialState {
141    /// Partial is active and can be modified
142    Active,
143
144    /// Partial has been successfully built and cannot be reused
145    Built,
146}
147
148/// Mode of operation for frame management.
149///
150/// In `Strict` mode, frames must be fully initialized before being popped.
151/// In `Deferred` mode, frames can be stored when popped and restored on re-entry,
152/// with final validation happening in `finish_deferred()`.
153enum FrameMode {
154    /// Strict mode: frames must be fully initialized before popping.
155    Strict {
156        /// Stack of frames for nested initialization.
157        stack: Vec<Frame>,
158    },
159
160    /// Deferred mode: frames are stored when popped, can be re-entered.
161    Deferred {
162        /// Stack of frames for nested initialization.
163        stack: Vec<Frame>,
164
165        /// The frame depth when deferred mode was started.
166        /// Path calculations are relative to this depth.
167        start_depth: usize,
168
169        /// Frames saved when popped, keyed by their path (derived from frame stack).
170        /// When we re-enter a path, we restore the stored frame.
171        /// Uses the full `Path` type which includes the root shape for proper type anchoring.
172        stored_frames: BTreeMap<Path, Frame>,
173    },
174}
175
176impl FrameMode {
177    /// Get a reference to the frame stack.
178    const fn stack(&self) -> &Vec<Frame> {
179        match self {
180            FrameMode::Strict { stack } | FrameMode::Deferred { stack, .. } => stack,
181        }
182    }
183
184    /// Get a mutable reference to the frame stack.
185    const fn stack_mut(&mut self) -> &mut Vec<Frame> {
186        match self {
187            FrameMode::Strict { stack } | FrameMode::Deferred { stack, .. } => stack,
188        }
189    }
190
191    /// Check if we're in deferred mode.
192    const fn is_deferred(&self) -> bool {
193        matches!(self, FrameMode::Deferred { .. })
194    }
195
196    /// Get the start depth if in deferred mode.
197    const fn start_depth(&self) -> Option<usize> {
198        match self {
199            FrameMode::Deferred { start_depth, .. } => Some(*start_depth),
200            FrameMode::Strict { .. } => None,
201        }
202    }
203}
204
205/// A type-erased, heap-allocated, partially-initialized value.
206///
207/// [Partial] keeps track of the state of initialiation of the underlying
208/// value: if we're building `struct S { a: u32, b: String }`, we may
209/// have initialized `a`, or `b`, or both, or neither.
210///
211/// [Partial] allows navigating down nested structs and initializing them
212/// progressively: [Partial::begin_field] pushes a frame onto the stack,
213/// which then has to be initialized, and popped off with [Partial::end].
214///
215/// If [Partial::end] is called but the current frame isn't fully initialized,
216/// an error is returned: in other words, if you navigate down to a field,
217/// you have to fully initialize it one go. You can't go back up and back down
218/// to it again.
219pub struct Partial<'facet, const BORROW: bool = true> {
220    /// Frame management mode (strict or deferred) and associated state.
221    mode: FrameMode,
222
223    /// current state of the Partial
224    state: PartialState,
225
226    /// Precomputed deserialization plan for the root type.
227    /// Built once at allocation time, navigated in parallel with value construction.
228    /// Each Frame holds a NodeId (index) into this plan's arenas.
229    root_plan: Arc<TypePlanCore>,
230
231    /// PhantomData marker for the 'facet lifetime.
232    /// This is covariant in 'facet, which is safe because 'facet represents
233    /// the lifetime of borrowed data FROM the input (deserialization source).
234    /// A Partial<'long, ...> can be safely treated as Partial<'short, ...>
235    /// because it only needs borrowed data to live at least as long as 'short.
236    _marker: PhantomData<&'facet ()>,
237}
238
239#[derive(Clone, Copy, Debug)]
240pub(crate) enum MapInsertState {
241    /// Not currently inserting
242    Idle,
243
244    /// Pushing key - memory allocated, waiting for initialization
245    PushingKey {
246        /// Temporary storage for the key being built
247        key_ptr: PtrUninit,
248        /// Whether the key has been fully initialized
249        key_initialized: bool,
250        /// Whether the key's TrackedBuffer frame is still on the stack.
251        /// When true, the frame handles cleanup. When false (after end()),
252        /// the Map tracker owns the buffer and must clean it up.
253        key_frame_on_stack: bool,
254    },
255
256    /// Pushing value after key is done
257    PushingValue {
258        /// Temporary storage for the key that was built (always initialized)
259        key_ptr: PtrUninit,
260        /// Temporary storage for the value being built
261        value_ptr: Option<PtrUninit>,
262        /// Whether the value has been fully initialized
263        value_initialized: bool,
264        /// Whether the value's TrackedBuffer frame is still on the stack.
265        /// When true, the frame handles cleanup. When false (after end()),
266        /// the Map tracker owns the buffer and must clean it up.
267        value_frame_on_stack: bool,
268        /// Whether the key's frame was stored in deferred mode.
269        /// When true, the stored frame handles cleanup. When false,
270        /// the Map tracker owns the key buffer and must clean it up.
271        key_frame_stored: bool,
272    },
273}
274
275#[derive(Debug, Clone, Copy)]
276pub(crate) enum FrameOwnership {
277    /// This frame owns the allocation and should deallocate it on drop
278    Owned,
279
280    /// This frame points to a field/element within a parent's allocation.
281    /// The parent's `iset[field_idx]` was CLEARED when this frame was created.
282    /// On drop: deinit if initialized, but do NOT deallocate.
283    /// On successful end(): parent's `iset[field_idx]` will be SET.
284    Field { field_idx: usize },
285
286    /// Temporary buffer tracked by parent's MapInsertState.
287    /// Used by begin_key(), begin_value() for map insertions.
288    /// Safe to drop on deinit - parent's cleanup respects is_init propagation.
289    TrackedBuffer,
290
291    /// Pointer into existing collection entry (Value object, Option inner, etc.)
292    /// Used by begin_object_entry() on existing key, begin_some() re-entry.
293    /// NOT safe to drop on deinit - parent collection has no per-entry tracking
294    /// and would try to drop the freed value again (double-free).
295    BorrowedInPlace,
296
297    /// Pointer to externally-owned memory (e.g., caller's stack via MaybeUninit).
298    /// Used by `from_raw()` for stack-friendly deserialization.
299    /// On drop: deinit if initialized (drop partially constructed values), but do NOT deallocate.
300    /// The caller owns the memory and is responsible for its lifetime.
301    External,
302
303    /// Points into a stable rope chunk for list element building.
304    /// Used by `begin_list_item()` for building Vec elements.
305    /// The memory is stable (won't move during Vec growth),
306    /// so frames inside can be stored for deferred processing.
307    /// On successful end(): element is tracked for later finalization.
308    /// On list frame end(): all elements are moved into the real Vec.
309    /// On drop/failure: the rope chunk handles cleanup.
310    RopeSlot,
311}
312
313impl FrameOwnership {
314    /// Returns true if this frame is responsible for deallocating its memory.
315    ///
316    /// Both `Owned` and `TrackedBuffer` frames allocated their memory and need
317    /// to deallocate it. `Field`, `BorrowedInPlace`, and `External` frames borrow from
318    /// parent, existing structures, or caller-provided memory.
319    const fn needs_dealloc(&self) -> bool {
320        matches!(self, FrameOwnership::Owned | FrameOwnership::TrackedBuffer)
321    }
322}
323
324/// Immutable pairing of a shape with its actual allocation size.
325///
326/// This ensures that the shape and allocated size are always in sync and cannot
327/// drift apart, preventing the class of bugs where a frame's shape doesn't match
328/// what was actually allocated (see issue #1568).
329pub(crate) struct AllocatedShape {
330    shape: &'static Shape,
331    allocated_size: usize,
332}
333
334impl AllocatedShape {
335    pub(crate) const fn new(shape: &'static Shape, allocated_size: usize) -> Self {
336        Self {
337            shape,
338            allocated_size,
339        }
340    }
341
342    pub(crate) const fn shape(&self) -> &'static Shape {
343        self.shape
344    }
345
346    pub(crate) const fn allocated_size(&self) -> usize {
347        self.allocated_size
348    }
349}
350
351/// Points somewhere in a partially-initialized value. If we're initializing
352/// `a.b.c`, then the first frame would point to the beginning of `a`, the
353/// second to the beginning of the `b` field of `a`, etc.
354///
355/// A frame can point to a complex data structure, like a struct or an enum:
356/// it keeps track of whether a variant was selected, which fields are initialized,
357/// etc. and is able to drop & deinitialize
358#[must_use]
359pub(crate) struct Frame {
360    /// Address of the value being initialized
361    pub(crate) data: PtrUninit,
362
363    /// Shape of the value being initialized, paired with the actual allocation size
364    pub(crate) allocated: AllocatedShape,
365
366    /// Whether this frame's data is fully initialized
367    pub(crate) is_init: bool,
368
369    /// Tracks building mode and partial initialization state
370    pub(crate) tracker: Tracker,
371
372    /// Whether this frame owns the allocation or is just a field pointer
373    pub(crate) ownership: FrameOwnership,
374
375    /// Whether this frame is for a custom deserialization pipeline
376    pub(crate) using_custom_deserialization: bool,
377
378    /// Container-level proxy definition (from `#[facet(proxy = ...)]` on the shape).
379    /// Used during custom deserialization to convert from proxy type to target type.
380    pub(crate) shape_level_proxy: Option<&'static facet_core::ProxyDef>,
381
382    /// Index of the precomputed TypePlan node for this frame's type.
383    /// This is navigated in parallel with the value - when we begin_nth_field,
384    /// the new frame gets the index for that field's child plan node.
385    /// Use `plan.node(type_plan)` to get the actual `&TypePlanNode`.
386    /// Always present - TypePlan is built for what we actually deserialize into
387    /// (including proxies).
388    pub(crate) type_plan: typeplan::NodeId,
389}
390
391#[derive(Debug)]
392pub(crate) enum Tracker {
393    /// Simple scalar value - no partial initialization tracking needed.
394    /// Whether it's initialized is tracked by `Frame::is_init`.
395    Scalar,
396
397    /// Partially initialized array
398    Array {
399        /// Track which array elements are initialized (up to 63 elements)
400        iset: ISet,
401        /// If we're pushing another frame, this is set to the array index
402        current_child: Option<usize>,
403    },
404
405    /// Partially initialized struct/tuple-struct etc.
406    Struct {
407        /// fields need to be individually tracked — we only
408        /// support up to 63 fields.
409        iset: ISet,
410        /// if we're pushing another frame, this is set to the index of the struct field
411        current_child: Option<usize>,
412    },
413
414    /// Smart pointer being initialized.
415    /// Whether it's initialized is tracked by `Frame::is_init`.
416    SmartPointer {
417        /// Whether we're currently building the inner value
418        building_inner: bool,
419        /// Pending inner value pointer to be moved with new_into_fn on finalization.
420        /// Deferred processing requires keeping the inner value's memory stable,
421        /// so we delay the new_into_fn() call until the SmartPointer frame is finalized.
422        /// None = no pending inner, Some = inner value ready to be moved into SmartPointer.
423        pending_inner: Option<PtrUninit>,
424    },
425
426    /// We're initializing an `Arc<[T]>`, `Box<[T]>`, `Rc<[T]>`, etc.
427    ///
428    /// We're using the slice builder API to construct the slice
429    SmartPointerSlice {
430        /// The slice builder vtable
431        vtable: &'static SliceBuilderVTable,
432
433        /// Whether we're currently building an item to push
434        building_item: bool,
435
436        /// Current element index being built (for path derivation in deferred mode)
437        current_child: Option<usize>,
438    },
439
440    /// Transparent inner type wrapper (`NonZero<T>`, ByteString, etc.)
441    /// Used to distinguish inner frames from their parent for deferred path tracking.
442    Inner {
443        /// Whether we're currently building the inner value
444        building_inner: bool,
445    },
446
447    /// Partially initialized enum (but we picked a variant,
448    /// so it's not Uninit)
449    Enum {
450        /// Variant chosen for the enum
451        variant: &'static Variant,
452        /// Index of the variant in the enum's variants array
453        variant_idx: usize,
454        /// tracks enum fields (for the given variant)
455        data: ISet,
456        /// If we're pushing another frame, this is set to the field index
457        current_child: Option<usize>,
458    },
459
460    /// Partially initialized list (Vec, etc.)
461    /// Whether it's initialized is tracked by `Frame::is_init`.
462    List {
463        /// If we're pushing another frame for an element, this is the element index
464        current_child: Option<usize>,
465        /// Stable rope storage for elements during list building.
466        /// A rope is a list of fixed-size chunks - chunks never reallocate, only new
467        /// chunks are added. This keeps element pointers stable, enabling deferred
468        /// frame processing for nested structs inside Vec elements.
469        /// On finalization, elements are moved into the real Vec.
470        rope: Option<ListRope>,
471    },
472
473    /// Partially initialized map (HashMap, BTreeMap, etc.)
474    /// Whether it's initialized is tracked by `Frame::is_init`.
475    Map {
476        /// State of the current insertion operation
477        insert_state: MapInsertState,
478        /// Pending key-value entries to be inserted on map finalization.
479        /// Deferred processing requires keeping buffers alive until finish_deferred(),
480        /// so we delay actual insertion until the map frame is finalized.
481        /// Each entry is (key_ptr, value_ptr) - both are initialized and owned by this tracker.
482        pending_entries: Vec<(PtrUninit, PtrUninit)>,
483        /// The current entry index, used for building unique paths for deferred frame storage.
484        /// Incremented each time we start a new key (in begin_key).
485        /// This allows inner frames of different map entries to have distinct paths.
486        current_entry_index: Option<usize>,
487        /// Whether we're currently building a key (true) or value (false).
488        /// Used to determine whether to push MapKey or MapValue to the path.
489        building_key: bool,
490    },
491
492    /// Partially initialized set (HashSet, BTreeSet, etc.)
493    /// Whether it's initialized is tracked by `Frame::is_init`.
494    Set {
495        /// If we're pushing another frame for an element
496        current_child: bool,
497    },
498
499    /// Option being initialized with Some(inner_value)
500    Option {
501        /// Whether we're currently building the inner value
502        building_inner: bool,
503        /// Pending inner value pointer to be moved with init_some on finalization.
504        /// Deferred processing requires keeping the inner value's memory stable,
505        /// so we delay the init_some() call until the Option frame is finalized.
506        /// None = no pending inner, Some = inner value ready to be moved into Option.
507        pending_inner: Option<PtrUninit>,
508    },
509
510    /// Result being initialized with Ok or Err
511    Result {
512        /// Whether we're building Ok (true) or Err (false)
513        is_ok: bool,
514        /// Whether we're currently building the inner value
515        building_inner: bool,
516    },
517
518    /// Dynamic value (e.g., facet_value::Value) being initialized
519    DynamicValue {
520        /// What kind of dynamic value we're building
521        state: DynamicValueState,
522    },
523}
524
525/// State for building a dynamic value
526#[derive(Debug)]
527#[allow(dead_code)] // Some variants are for future use (object support)
528pub(crate) enum DynamicValueState {
529    /// Not yet initialized - will be set to scalar, array, or object
530    Uninit,
531    /// Initialized as a scalar (null, bool, number, string, bytes)
532    Scalar,
533    /// Initialized as an array, currently building an element
534    Array {
535        building_element: bool,
536        /// Pending elements to be inserted during finalization (deferred mode)
537        pending_elements: alloc::vec::Vec<PtrUninit>,
538    },
539    /// Initialized as an object
540    Object {
541        insert_state: DynamicObjectInsertState,
542        /// Pending entries to be inserted during finalization (deferred mode)
543        pending_entries: alloc::vec::Vec<(alloc::string::String, PtrUninit)>,
544    },
545}
546
547/// State for inserting into a dynamic object
548#[derive(Debug)]
549#[allow(dead_code)] // For future use (object support)
550pub(crate) enum DynamicObjectInsertState {
551    /// Idle - ready for a new key-value pair
552    Idle,
553    /// Currently building the value for a key
554    BuildingValue {
555        /// The key for the current entry
556        key: alloc::string::String,
557    },
558}
559
560impl Tracker {
561    const fn kind(&self) -> TrackerKind {
562        match self {
563            Tracker::Scalar => TrackerKind::Scalar,
564            Tracker::Array { .. } => TrackerKind::Array,
565            Tracker::Struct { .. } => TrackerKind::Struct,
566            Tracker::SmartPointer { .. } => TrackerKind::SmartPointer,
567            Tracker::SmartPointerSlice { .. } => TrackerKind::SmartPointerSlice,
568            Tracker::Enum { .. } => TrackerKind::Enum,
569            Tracker::List { .. } => TrackerKind::List,
570            Tracker::Map { .. } => TrackerKind::Map,
571            Tracker::Set { .. } => TrackerKind::Set,
572            Tracker::Option { .. } => TrackerKind::Option,
573            Tracker::Result { .. } => TrackerKind::Result,
574            Tracker::DynamicValue { .. } => TrackerKind::DynamicValue,
575            Tracker::Inner { .. } => TrackerKind::Inner,
576        }
577    }
578
579    /// Set the current_child index for trackers that support it
580    const fn set_current_child(&mut self, idx: usize) {
581        match self {
582            Tracker::Struct { current_child, .. }
583            | Tracker::Enum { current_child, .. }
584            | Tracker::Array { current_child, .. } => {
585                *current_child = Some(idx);
586            }
587            _ => {}
588        }
589    }
590
591    /// Clear the current_child index for trackers that support it
592    fn clear_current_child(&mut self) {
593        match self {
594            Tracker::Struct { current_child, .. }
595            | Tracker::Enum { current_child, .. }
596            | Tracker::Array { current_child, .. }
597            | Tracker::List { current_child, .. } => {
598                *current_child = None;
599            }
600            Tracker::Set { current_child } => {
601                *current_child = false;
602            }
603            _ => {}
604        }
605    }
606}
607
608impl Frame {
609    fn new(
610        data: PtrUninit,
611        allocated: AllocatedShape,
612        ownership: FrameOwnership,
613        type_plan: typeplan::NodeId,
614    ) -> Self {
615        // For empty structs (structs with 0 fields), start as initialized since there's nothing to initialize
616        // This includes empty tuples () which are zero-sized types with no fields to initialize
617        let is_init = matches!(
618            allocated.shape().ty,
619            Type::User(UserType::Struct(struct_type)) if struct_type.fields.is_empty()
620        );
621
622        Self {
623            data,
624            allocated,
625            is_init,
626            tracker: Tracker::Scalar,
627            ownership,
628            using_custom_deserialization: false,
629            shape_level_proxy: None,
630            type_plan,
631        }
632    }
633
634    /// Deinitialize any initialized field: calls `drop_in_place` but does not free any
635    /// memory even if the frame owns that memory.
636    ///
637    /// After this call, `is_init` will be false and `tracker` will be [Tracker::Scalar].
638    fn deinit(&mut self) {
639        // For BorrowedInPlace frames, we must NOT drop. These point into existing
640        // collection entries (Value objects, Option inners) where the parent has no
641        // per-entry tracking. Dropping here would cause double-free when parent drops.
642        //
643        // For RopeSlot frames, we must NOT drop. These point into a ListRope chunk
644        // owned by the parent List's tracker. The rope handles cleanup of all elements.
645        //
646        // For TrackedBuffer frames, we CAN drop. These are temporary buffers where
647        // the parent's MapInsertState tracks initialization via is_init propagation.
648        if matches!(
649            self.ownership,
650            FrameOwnership::BorrowedInPlace | FrameOwnership::RopeSlot
651        ) {
652            self.is_init = false;
653            self.tracker = Tracker::Scalar;
654            return;
655        }
656
657        // Field frames are responsible for their value during cleanup.
658        // The ownership model ensures no double-free:
659        // - begin_field: parent's iset[idx] is cleared (parent relinquishes responsibility)
660        // - end: parent's iset[idx] is set (parent reclaims responsibility), frame is popped
661        // So if Field frame is still on stack during cleanup, parent's iset[idx] is false,
662        // meaning the parent won't drop this field - the Field frame must do it.
663
664        match &mut self.tracker {
665            Tracker::Scalar => {
666                // Simple scalar - drop if initialized
667                if self.is_init {
668                    unsafe {
669                        self.allocated
670                            .shape()
671                            .call_drop_in_place(self.data.assume_init())
672                    };
673                }
674            }
675            Tracker::Array { iset, .. } => {
676                // Drop initialized array elements
677                if let Type::Sequence(facet_core::SequenceType::Array(array_def)) =
678                    self.allocated.shape().ty
679                {
680                    let element_layout = array_def.t.layout.sized_layout().ok();
681                    if let Some(layout) = element_layout {
682                        for idx in 0..array_def.n {
683                            if iset.get(idx) {
684                                let offset = layout.size() * idx;
685                                let element_ptr = unsafe { self.data.field_init(offset) };
686                                unsafe { array_def.t.call_drop_in_place(element_ptr) };
687                            }
688                        }
689                    }
690                }
691            }
692            Tracker::Struct { iset, .. } => {
693                // Drop initialized struct fields
694                if let Type::User(UserType::Struct(struct_type)) = self.allocated.shape().ty {
695                    if iset.all_set(struct_type.fields.len()) {
696                        unsafe {
697                            self.allocated
698                                .shape()
699                                .call_drop_in_place(self.data.assume_init())
700                        };
701                    } else {
702                        for (idx, field) in struct_type.fields.iter().enumerate() {
703                            if iset.get(idx) {
704                                // This field was initialized, drop it
705                                let field_ptr = unsafe { self.data.field_init(field.offset) };
706                                unsafe { field.shape().call_drop_in_place(field_ptr) };
707                            }
708                        }
709                    }
710                }
711            }
712            Tracker::Enum { variant, data, .. } => {
713                // Drop initialized enum variant fields
714                for (idx, field) in variant.data.fields.iter().enumerate() {
715                    if data.get(idx) {
716                        // This field was initialized, drop it
717                        let field_ptr = unsafe { self.data.field_init(field.offset) };
718                        unsafe { field.shape().call_drop_in_place(field_ptr) };
719                    }
720                }
721            }
722            Tracker::SmartPointer { pending_inner, .. } => {
723                // If there's a pending inner value, drop it
724                if let Some(inner_ptr) = pending_inner
725                    && let Def::Pointer(ptr_def) = self.allocated.shape().def
726                    && let Some(inner_shape) = ptr_def.pointee
727                {
728                    unsafe {
729                        inner_shape.call_drop_in_place(PtrMut::new(inner_ptr.as_mut_byte_ptr()))
730                    };
731                }
732                // Drop the initialized SmartPointer
733                if self.is_init {
734                    unsafe {
735                        self.allocated
736                            .shape()
737                            .call_drop_in_place(self.data.assume_init())
738                    };
739                }
740            }
741            Tracker::SmartPointerSlice { vtable, .. } => {
742                // Free the slice builder
743                let builder_ptr = unsafe { self.data.assume_init() };
744                unsafe {
745                    (vtable.free_fn)(builder_ptr);
746                }
747            }
748            Tracker::List { rope, .. } => {
749                // Drain any rope elements first. `is_init` only indicates that the Vec
750                // has been allocated (via `init_in_place_with_capacity`); elements pushed
751                // via `begin_list_item` live in the rope until `drain_rope_into_vec` moves
752                // them into the Vec. A successful drain leaves `rope = None` (via `.take()`),
753                // so if we see `rope = Some(..)` here the elements inside were never moved
754                // into the Vec and they're still owned by the rope. Drop them now.
755                if let Some(mut rope) = rope.take()
756                    && let Def::List(list_def) = self.allocated.shape().def
757                {
758                    let element_shape = list_def.t;
759                    unsafe {
760                        rope.drain_into(|ptr| {
761                            element_shape.call_drop_in_place(PtrMut::new(ptr.as_ptr()));
762                        });
763                    }
764                }
765
766                // Now drop the Vec (and whatever elements it already owns).
767                if self.is_init {
768                    unsafe {
769                        self.allocated
770                            .shape()
771                            .call_drop_in_place(self.data.assume_init())
772                    };
773                }
774            }
775            Tracker::Map {
776                insert_state,
777                pending_entries,
778                ..
779            } => {
780                // Drop the initialized Map
781                if self.is_init {
782                    unsafe {
783                        self.allocated
784                            .shape()
785                            .call_drop_in_place(self.data.assume_init())
786                    };
787                }
788
789                // Clean up pending entries (key-value pairs that haven't been inserted yet)
790                if let Def::Map(map_def) = self.allocated.shape().def {
791                    for (key_ptr, value_ptr) in pending_entries.drain(..) {
792                        // Drop and deallocate key
793                        unsafe { map_def.k().call_drop_in_place(key_ptr.assume_init()) };
794                        if let Ok(key_layout) = map_def.k().layout.sized_layout()
795                            && key_layout.size() > 0
796                        {
797                            unsafe { alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout) };
798                        }
799                        // Drop and deallocate value
800                        unsafe { map_def.v().call_drop_in_place(value_ptr.assume_init()) };
801                        if let Ok(value_layout) = map_def.v().layout.sized_layout()
802                            && value_layout.size() > 0
803                        {
804                            unsafe {
805                                alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), value_layout)
806                            };
807                        }
808                    }
809                }
810
811                // Clean up key/value buffers based on whether their TrackedBuffer frames
812                // are still on the stack. If a frame is on the stack, it handles cleanup.
813                // If a frame was already popped (via end()), we own the buffer and must clean it.
814                match insert_state {
815                    MapInsertState::PushingKey {
816                        key_ptr,
817                        key_initialized,
818                        key_frame_on_stack,
819                    } => {
820                        // Only clean up if the frame was already popped.
821                        // If key_frame_on_stack is true, the TrackedBuffer frame above us
822                        // will handle dropping and deallocating the key buffer.
823                        if !*key_frame_on_stack
824                            && let Def::Map(map_def) = self.allocated.shape().def
825                        {
826                            // Drop the key if it was initialized
827                            if *key_initialized {
828                                unsafe { map_def.k().call_drop_in_place(key_ptr.assume_init()) };
829                            }
830                            // Deallocate the key buffer
831                            if let Ok(key_layout) = map_def.k().layout.sized_layout()
832                                && key_layout.size() > 0
833                            {
834                                unsafe {
835                                    alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout)
836                                };
837                            }
838                        }
839                    }
840                    MapInsertState::PushingValue {
841                        key_ptr,
842                        value_ptr,
843                        value_initialized,
844                        value_frame_on_stack,
845                        key_frame_stored,
846                    } => {
847                        if let Def::Map(map_def) = self.allocated.shape().def {
848                            // Only clean up key if the key frame was NOT stored.
849                            // If key_frame_stored is true, the stored frame handles cleanup.
850                            if !*key_frame_stored {
851                                unsafe { map_def.k().call_drop_in_place(key_ptr.assume_init()) };
852                                if let Ok(key_layout) = map_def.k().layout.sized_layout()
853                                    && key_layout.size() > 0
854                                {
855                                    unsafe {
856                                        alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout)
857                                    };
858                                }
859                            }
860
861                            // Only clean up value if the frame was already popped.
862                            // If value_frame_on_stack is true, the TrackedBuffer frame above us
863                            // will handle dropping and deallocating the value buffer.
864                            if !*value_frame_on_stack && let Some(value_ptr) = value_ptr {
865                                // Drop the value if it was initialized
866                                if *value_initialized {
867                                    unsafe {
868                                        map_def.v().call_drop_in_place(value_ptr.assume_init())
869                                    };
870                                }
871                                // Deallocate the value buffer
872                                if let Ok(value_layout) = map_def.v().layout.sized_layout()
873                                    && value_layout.size() > 0
874                                {
875                                    unsafe {
876                                        alloc::alloc::dealloc(
877                                            value_ptr.as_mut_byte_ptr(),
878                                            value_layout,
879                                        )
880                                    };
881                                }
882                            }
883                        }
884                    }
885                    MapInsertState::Idle => {}
886                }
887            }
888            Tracker::Set { .. } => {
889                // Drop the initialized Set
890                if self.is_init {
891                    unsafe {
892                        self.allocated
893                            .shape()
894                            .call_drop_in_place(self.data.assume_init())
895                    };
896                }
897            }
898            Tracker::Option {
899                building_inner,
900                pending_inner,
901            } => {
902                // Clean up pending inner value if it was never finalized
903                let had_pending = pending_inner.is_some();
904                if let Some(inner_ptr) = pending_inner.take()
905                    && let Def::Option(option_def) = self.allocated.shape().def
906                {
907                    // Drop the inner value
908                    unsafe { option_def.t.call_drop_in_place(inner_ptr.assume_init()) };
909                    // Deallocate the inner buffer
910                    if let Ok(layout) = option_def.t.layout.sized_layout()
911                        && layout.size() > 0
912                    {
913                        unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), layout) };
914                    }
915                }
916                // If we're building the inner value, it will be handled by the Option vtable
917                // No special cleanup needed here as the Option will either be properly
918                // initialized or remain uninitialized
919                if !*building_inner && !had_pending {
920                    // Option is fully initialized (no pending), drop it normally
921                    unsafe {
922                        self.allocated
923                            .shape()
924                            .call_drop_in_place(self.data.assume_init())
925                    };
926                }
927            }
928            Tracker::Result { building_inner, .. } => {
929                // If we're building the inner value, it will be handled by the Result vtable
930                // No special cleanup needed here as the Result will either be properly
931                // initialized or remain uninitialized
932                if !*building_inner {
933                    // Result is fully initialized, drop it normally
934                    unsafe {
935                        self.allocated
936                            .shape()
937                            .call_drop_in_place(self.data.assume_init())
938                    };
939                }
940            }
941            Tracker::DynamicValue { state } => {
942                // Clean up pending_entries if this is an Object
943                if let DynamicValueState::Object {
944                    pending_entries, ..
945                } = state
946                {
947                    // Drop and deallocate any pending values that weren't inserted
948                    if let Def::DynamicValue(dyn_def) = self.allocated.shape().def {
949                        let value_shape = self.allocated.shape(); // Value entries are same shape
950                        for (_key, value_ptr) in pending_entries.drain(..) {
951                            // Drop the value
952                            unsafe {
953                                value_shape.call_drop_in_place(value_ptr.assume_init());
954                            }
955                            // Deallocate the value buffer
956                            if let Ok(layout) = value_shape.layout.sized_layout()
957                                && layout.size() > 0
958                            {
959                                unsafe {
960                                    alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), layout);
961                                }
962                            }
963                        }
964                        // Note: keys are Strings and will be dropped when pending_entries is dropped
965                        let _ = dyn_def; // silence unused warning
966                    }
967                }
968
969                // Clean up pending_elements if this is an Array
970                if let DynamicValueState::Array {
971                    pending_elements, ..
972                } = state
973                {
974                    // Drop and deallocate any pending elements that weren't inserted
975                    let element_shape = self.allocated.shape(); // Array elements are same shape
976                    for element_ptr in pending_elements.drain(..) {
977                        // Drop the element
978                        unsafe {
979                            element_shape.call_drop_in_place(element_ptr.assume_init());
980                        }
981                        // Deallocate the element buffer
982                        if let Ok(layout) = element_shape.layout.sized_layout()
983                            && layout.size() > 0
984                        {
985                            unsafe {
986                                alloc::alloc::dealloc(element_ptr.as_mut_byte_ptr(), layout);
987                            }
988                        }
989                    }
990                }
991
992                // Drop if initialized
993                if self.is_init {
994                    let result = unsafe {
995                        self.allocated
996                            .shape()
997                            .call_drop_in_place(self.data.assume_init())
998                    };
999                    if result.is_none() {
1000                        // This would be a bug - DynamicValue should always have drop_in_place
1001                        panic!(
1002                            "DynamicValue type {} has no drop_in_place implementation",
1003                            self.allocated.shape()
1004                        );
1005                    }
1006                }
1007            }
1008            Tracker::Inner { .. } => {
1009                // Inner wrapper - drop if initialized
1010                if self.is_init {
1011                    unsafe {
1012                        self.allocated
1013                            .shape()
1014                            .call_drop_in_place(self.data.assume_init())
1015                    };
1016                }
1017            }
1018        }
1019
1020        self.is_init = false;
1021        self.tracker = Tracker::Scalar;
1022    }
1023
1024    /// Deinitialize any initialized value for REPLACEMENT purposes.
1025    ///
1026    /// Unlike `deinit()` which is used during error cleanup, this method is used when
1027    /// we're about to overwrite a value with a new one (e.g., in `set_shape`).
1028    ///
1029    /// The difference is important for Field frames with simple trackers:
1030    /// - During cleanup: parent struct will drop all initialized fields, so Field frames skip dropping
1031    /// - During replacement: we're about to overwrite, so we MUST drop the old value
1032    ///
1033    /// For BorrowedInPlace frames: same logic applies - we must drop when replacing.
1034    fn deinit_for_replace(&mut self) {
1035        // For BorrowedInPlace frames, deinit() skips dropping (parent owns on cleanup).
1036        // But when REPLACING a value, we must drop the old value first.
1037        if matches!(self.ownership, FrameOwnership::BorrowedInPlace) && self.is_init {
1038            unsafe {
1039                self.allocated
1040                    .shape()
1041                    .call_drop_in_place(self.data.assume_init());
1042            }
1043
1044            // CRITICAL: For DynamicValue (e.g., facet_value::Value), the parent Object's
1045            // HashMap entry still points to this location. If we just drop and leave garbage,
1046            // the parent will try to drop that garbage when it's cleaned up, causing
1047            // use-after-free. We must reinitialize to a safe default (Null) so the parent
1048            // can safely drop it later.
1049            if let Def::DynamicValue(dyn_def) = &self.allocated.shape().def {
1050                unsafe {
1051                    (dyn_def.vtable.set_null)(self.data);
1052                }
1053                // Keep is_init = true since we just initialized it to Null
1054                self.tracker = Tracker::DynamicValue {
1055                    state: DynamicValueState::Scalar,
1056                };
1057                return;
1058            }
1059
1060            self.is_init = false;
1061            self.tracker = Tracker::Scalar;
1062            return;
1063        }
1064
1065        // Field frames handle their own cleanup in deinit() - no special handling needed here.
1066
1067        // All other cases: use normal deinit
1068        self.deinit();
1069    }
1070
1071    /// This must be called after (fully) initializing a value.
1072    ///
1073    /// This sets `is_init` to `true` to indicate the value is initialized.
1074    /// Composite types (structs, enums, etc.) might be handled differently.
1075    ///
1076    /// # Safety
1077    ///
1078    /// This should only be called when `self.data` has been actually initialized.
1079    const unsafe fn mark_as_init(&mut self) {
1080        self.is_init = true;
1081    }
1082
1083    /// Deallocate the memory associated with this frame, if it owns it.
1084    ///
1085    /// The memory has to be deinitialized first, see [Frame::deinit]
1086    fn dealloc(self) {
1087        // Only deallocate if this frame owns its memory
1088        if !self.ownership.needs_dealloc() {
1089            return;
1090        }
1091
1092        // If we need to deallocate, the frame must be deinitialized first
1093        if self.is_init {
1094            unreachable!("a frame has to be deinitialized before being deallocated")
1095        }
1096
1097        // Deallocate using the actual allocated size (not derived from shape)
1098        if self.allocated.allocated_size() > 0 {
1099            // Use the shape for alignment, but the stored size for the actual allocation
1100            if let Ok(layout) = self.allocated.shape().layout.sized_layout() {
1101                let actual_layout = core::alloc::Layout::from_size_align(
1102                    self.allocated.allocated_size(),
1103                    layout.align(),
1104                )
1105                .expect("allocated_size must be valid");
1106                unsafe { alloc::alloc::dealloc(self.data.as_mut_byte_ptr(), actual_layout) };
1107            }
1108        }
1109    }
1110
1111    /// Fill in defaults for any unset fields that have default values.
1112    ///
1113    /// This handles:
1114    /// - Container-level defaults (when no fields set and struct has Default impl)
1115    /// - Fields with `#[facet(default = ...)]` - uses the explicit default function
1116    /// - Fields with `#[facet(default)]` - uses the type's Default impl
1117    /// - `Option<T>` fields - default to None
1118    ///
1119    /// Returns Ok(()) if successful, or an error if a field has `#[facet(default)]`
1120    /// but no default implementation is available.
1121    fn fill_defaults(&mut self) -> Result<(), ReflectErrorKind> {
1122        // First, check if we need to upgrade from Scalar to Struct tracker
1123        // This happens when no fields were visited at all in deferred mode
1124        if !self.is_init
1125            && matches!(self.tracker, Tracker::Scalar)
1126            && let Type::User(UserType::Struct(struct_type)) = self.allocated.shape().ty
1127        {
1128            // If no fields were visited and the container has a default, use it
1129            // SAFETY: We're about to initialize the entire struct with its default value
1130            if unsafe { self.allocated.shape().call_default_in_place(self.data) }.is_some() {
1131                self.is_init = true;
1132                return Ok(());
1133            }
1134            // Otherwise initialize the struct tracker with empty iset
1135            self.tracker = Tracker::Struct {
1136                iset: ISet::new(struct_type.fields.len()),
1137                current_child: None,
1138            };
1139        }
1140
1141        // Handle Option types with Scalar tracker - default to None
1142        // This happens in deferred mode when an Option field was never touched
1143        if !self.is_init
1144            && matches!(self.tracker, Tracker::Scalar)
1145            && matches!(self.allocated.shape().def, Def::Option(_))
1146        {
1147            // SAFETY: Option<T> always implements Default (as None)
1148            if unsafe { self.allocated.shape().call_default_in_place(self.data) }.is_some() {
1149                self.is_init = true;
1150                return Ok(());
1151            }
1152        }
1153
1154        match &mut self.tracker {
1155            Tracker::Struct { iset, .. } => {
1156                if let Type::User(UserType::Struct(struct_type)) = self.allocated.shape().ty {
1157                    // Fast path: if ALL fields are set, nothing to do
1158                    if iset.all_set(struct_type.fields.len()) {
1159                        return Ok(());
1160                    }
1161
1162                    // Check if NO fields have been set and the container has a default
1163                    let no_fields_set = (0..struct_type.fields.len()).all(|i| !iset.get(i));
1164                    if no_fields_set {
1165                        // SAFETY: We're about to initialize the entire struct with its default value
1166                        if unsafe { self.allocated.shape().call_default_in_place(self.data) }
1167                            .is_some()
1168                        {
1169                            self.tracker = Tracker::Scalar;
1170                            self.is_init = true;
1171                            return Ok(());
1172                        }
1173                    }
1174
1175                    // Check if the container has #[facet(default)] attribute
1176                    let container_has_default = self.allocated.shape().has_default_attr();
1177
1178                    // Fill defaults for individual fields
1179                    for (idx, field) in struct_type.fields.iter().enumerate() {
1180                        // Skip already-initialized fields
1181                        if iset.get(idx) {
1182                            continue;
1183                        }
1184
1185                        // Calculate field pointer
1186                        let field_ptr = unsafe { self.data.field_uninit(field.offset) };
1187
1188                        // Try to initialize with default
1189                        if unsafe {
1190                            Self::try_init_field_default(field, field_ptr, container_has_default)
1191                        } {
1192                            // Mark field as initialized
1193                            iset.set(idx);
1194                        } else if field.has_default() {
1195                            // Field has #[facet(default)] but we couldn't find a default function.
1196                            // This happens with opaque types that don't have default_in_place.
1197                            return Err(ReflectErrorKind::DefaultAttrButNoDefaultImpl {
1198                                shape: field.shape(),
1199                            });
1200                        }
1201                    }
1202                }
1203            }
1204            Tracker::Enum { variant, data, .. } => {
1205                // Fast path: if ALL fields are set, nothing to do
1206                let num_fields = variant.data.fields.len();
1207                if num_fields == 0 || data.all_set(num_fields) {
1208                    return Ok(());
1209                }
1210
1211                // Check if the container has #[facet(default)] attribute
1212                let container_has_default = self.allocated.shape().has_default_attr();
1213
1214                // Handle enum variant fields
1215                for (idx, field) in variant.data.fields.iter().enumerate() {
1216                    // Skip already-initialized fields
1217                    if data.get(idx) {
1218                        continue;
1219                    }
1220
1221                    // Calculate field pointer within the variant data
1222                    let field_ptr = unsafe { self.data.field_uninit(field.offset) };
1223
1224                    // Try to initialize with default
1225                    if unsafe {
1226                        Self::try_init_field_default(field, field_ptr, container_has_default)
1227                    } {
1228                        // Mark field as initialized
1229                        data.set(idx);
1230                    } else if field.has_default() {
1231                        // Field has #[facet(default)] but we couldn't find a default function.
1232                        return Err(ReflectErrorKind::DefaultAttrButNoDefaultImpl {
1233                            shape: field.shape(),
1234                        });
1235                    }
1236                }
1237            }
1238            // Other tracker types don't have fields with defaults
1239            _ => {}
1240        }
1241        Ok(())
1242    }
1243
1244    /// Initialize a field with its default value if one is available.
1245    ///
1246    /// Priority:
1247    /// 1. Explicit field-level default_fn (from `#[facet(default = ...)]`)
1248    /// 2. Type-level default_in_place (from Default impl, including `Option<T>`)
1249    ///    but only if the field has the DEFAULT flag
1250    /// 3. Container-level default: if the container has `#[facet(default)]` and
1251    ///    the field's type implements Default, use that
1252    /// 4. Special cases: `Option<T>` (defaults to None), () (unit type)
1253    ///
1254    /// Returns true if a default was applied, false otherwise.
1255    ///
1256    /// # Safety
1257    ///
1258    /// `field_ptr` must point to uninitialized memory of the appropriate type.
1259    unsafe fn try_init_field_default(
1260        field: &Field,
1261        field_ptr: PtrUninit,
1262        container_has_default: bool,
1263    ) -> bool {
1264        use facet_core::DefaultSource;
1265
1266        // First check for explicit field-level default
1267        if let Some(default_source) = field.default {
1268            match default_source {
1269                DefaultSource::Custom(default_fn) => {
1270                    // Custom default function - it expects PtrUninit
1271                    unsafe { default_fn(field_ptr) };
1272                    return true;
1273                }
1274                DefaultSource::FromTrait => {
1275                    // Use the type's Default trait
1276                    if unsafe { field.shape().call_default_in_place(field_ptr) }.is_some() {
1277                        return true;
1278                    }
1279                }
1280            }
1281        }
1282
1283        // If container has #[facet(default)] and the field's type implements Default,
1284        // use the type's Default impl. This allows `#[facet(default)]` on a struct to
1285        // mean "use Default for any missing fields whose types implement Default".
1286        if container_has_default
1287            && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1288        {
1289            return true;
1290        }
1291
1292        // Special case: Option<T> always defaults to None, even without explicit #[facet(default)]
1293        // This is because Option is fundamentally "optional" - if not set, it should be None
1294        if matches!(field.shape().def, Def::Option(_))
1295            && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1296        {
1297            return true;
1298        }
1299
1300        // Special case: () unit type always defaults to ()
1301        if field.shape().is_type::<()>()
1302            && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1303        {
1304            return true;
1305        }
1306
1307        // Special case: Collection types (Vec, HashMap, HashSet, etc.) default to empty
1308        // These types have obvious "zero values" and it's almost always what you want
1309        // when deserializing data where the collection is simply absent.
1310        if matches!(field.shape().def, Def::List(_) | Def::Map(_) | Def::Set(_))
1311            && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1312        {
1313            return true;
1314        }
1315
1316        false
1317    }
1318
1319    /// Drain all initialized elements from the rope into the Vec.
1320    ///
1321    /// This is called when finalizing a list that used rope storage. Elements were
1322    /// built in stable rope chunks to allow deferred processing; now we move them
1323    /// into the actual Vec.
1324    ///
1325    /// # Safety
1326    ///
1327    /// The rope must contain only initialized elements (via `mark_last_initialized`).
1328    /// The list_data must point to an initialized Vec with capacity for the elements.
1329    fn drain_rope_into_vec(
1330        mut rope: ListRope,
1331        list_def: &facet_core::ListDef,
1332        list_data: PtrUninit,
1333    ) -> Result<(), ReflectErrorKind> {
1334        let count = rope.initialized_count();
1335        if count == 0 {
1336            return Ok(());
1337        }
1338
1339        let push_fn = list_def
1340            .push()
1341            .ok_or_else(|| ReflectErrorKind::OperationFailed {
1342                shape: list_def.t(),
1343                operation: "List missing push function for rope drain",
1344            })?;
1345
1346        // SAFETY: list_data points to initialized Vec (is_init was true)
1347        let list_ptr = unsafe { list_data.assume_init() };
1348
1349        // Reserve space if available (optimization, not required)
1350        if let Some(reserve_fn) = list_def.reserve() {
1351            unsafe {
1352                reserve_fn(list_ptr, count);
1353            }
1354        }
1355
1356        // Move each element from rope to Vec
1357        // SAFETY: rope contains `count` initialized elements
1358        unsafe {
1359            rope.drain_into(|element_ptr| {
1360                push_fn(
1361                    facet_core::PtrMut::new(list_ptr.as_mut_byte_ptr()),
1362                    facet_core::PtrMut::new(element_ptr.as_ptr()),
1363                );
1364            });
1365        }
1366
1367        Ok(())
1368    }
1369
1370    /// Insert all pending key-value entries into the map.
1371    ///
1372    /// This is called when finalizing a map that used delayed insertion. Entries were
1373    /// kept in pending_entries to allow deferred processing; now we insert them into
1374    /// the actual map and deallocate the temporary buffers.
1375    fn drain_pending_into_map(
1376        pending_entries: &mut Vec<(PtrUninit, PtrUninit)>,
1377        map_def: &facet_core::MapDef,
1378        map_data: PtrUninit,
1379    ) -> Result<(), ReflectErrorKind> {
1380        let insert_fn = map_def.vtable.insert;
1381
1382        // SAFETY: map_data points to initialized map (is_init was true)
1383        let map_ptr = unsafe { map_data.assume_init() };
1384
1385        for (key_ptr, value_ptr) in pending_entries.drain(..) {
1386            // Insert the key-value pair
1387            unsafe {
1388                insert_fn(
1389                    facet_core::PtrMut::new(map_ptr.as_mut_byte_ptr()),
1390                    facet_core::PtrMut::new(key_ptr.as_mut_byte_ptr()),
1391                    facet_core::PtrMut::new(value_ptr.as_mut_byte_ptr()),
1392                );
1393            }
1394
1395            // Deallocate the temporary buffers (insert moved the data)
1396            if let Ok(key_layout) = map_def.k().layout.sized_layout()
1397                && key_layout.size() > 0
1398            {
1399                unsafe { alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout) };
1400            }
1401            if let Ok(value_layout) = map_def.v().layout.sized_layout()
1402                && value_layout.size() > 0
1403            {
1404                unsafe { alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), value_layout) };
1405            }
1406        }
1407
1408        Ok(())
1409    }
1410
1411    /// Complete an Option by moving the pending inner value into it.
1412    ///
1413    /// This is called when finalizing an Option that used deferred init_some.
1414    /// The inner value was kept in stable memory for deferred processing;
1415    /// now we move it into the Option and deallocate the temporary buffer.
1416    fn complete_pending_option(
1417        option_def: facet_core::OptionDef,
1418        option_data: PtrUninit,
1419        inner_ptr: PtrUninit,
1420    ) -> Result<(), ReflectErrorKind> {
1421        let init_some_fn = option_def.vtable.init_some;
1422        let inner_shape = option_def.t;
1423
1424        // The inner_ptr contains the initialized inner value
1425        let inner_value_ptr = unsafe { inner_ptr.assume_init() };
1426
1427        // Initialize the Option as Some(inner_value)
1428        unsafe {
1429            init_some_fn(option_data, inner_value_ptr);
1430        }
1431
1432        // Deallocate the inner value's memory since init_some_fn moved it
1433        if let Ok(layout) = inner_shape.layout.sized_layout()
1434            && layout.size() > 0
1435        {
1436            unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), layout) };
1437        }
1438
1439        Ok(())
1440    }
1441
1442    fn complete_pending_smart_pointer(
1443        smart_ptr_shape: &'static Shape,
1444        smart_ptr_def: facet_core::PointerDef,
1445        smart_ptr_data: PtrUninit,
1446        inner_ptr: PtrUninit,
1447    ) -> Result<(), ReflectErrorKind> {
1448        // Check for sized pointee case first (uses new_into_fn)
1449        if let Some(new_into_fn) = smart_ptr_def.vtable.new_into_fn {
1450            let Some(inner_shape) = smart_ptr_def.pointee else {
1451                return Err(ReflectErrorKind::OperationFailed {
1452                    shape: smart_ptr_shape,
1453                    operation: "SmartPointer missing pointee shape",
1454                });
1455            };
1456
1457            // The inner_ptr contains the initialized inner value
1458            let _ = unsafe { inner_ptr.assume_init() };
1459
1460            // Initialize the SmartPointer with the inner value
1461            unsafe {
1462                new_into_fn(smart_ptr_data, PtrMut::new(inner_ptr.as_mut_byte_ptr()));
1463            }
1464
1465            // Deallocate the inner value's memory since new_into_fn moved it
1466            if let Ok(layout) = inner_shape.layout.sized_layout()
1467                && layout.size() > 0
1468            {
1469                unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), layout) };
1470            }
1471
1472            return Ok(());
1473        }
1474
1475        // Check for unsized pointee case: String -> Arc<str>/Box<str>/Rc<str>
1476        if let Some(pointee) = smart_ptr_def.pointee()
1477            && pointee.is_shape(str::SHAPE)
1478        {
1479            use alloc::{rc::Rc, string::String, sync::Arc};
1480            use facet_core::KnownPointer;
1481
1482            let Some(known) = smart_ptr_def.known else {
1483                return Err(ReflectErrorKind::OperationFailed {
1484                    shape: smart_ptr_shape,
1485                    operation: "SmartPointer<str> missing known pointer type",
1486                });
1487            };
1488
1489            // Read the String value from inner_ptr
1490            let string_ptr = inner_ptr.as_mut_byte_ptr() as *mut String;
1491            let string_value = unsafe { core::ptr::read(string_ptr) };
1492
1493            // Convert to the appropriate smart pointer type
1494            match known {
1495                KnownPointer::Box => {
1496                    let boxed: alloc::boxed::Box<str> = string_value.into_boxed_str();
1497                    unsafe {
1498                        core::ptr::write(
1499                            smart_ptr_data.as_mut_byte_ptr() as *mut alloc::boxed::Box<str>,
1500                            boxed,
1501                        );
1502                    }
1503                }
1504                KnownPointer::Arc => {
1505                    let arc: Arc<str> = Arc::from(string_value.into_boxed_str());
1506                    unsafe {
1507                        core::ptr::write(smart_ptr_data.as_mut_byte_ptr() as *mut Arc<str>, arc);
1508                    }
1509                }
1510                KnownPointer::Rc => {
1511                    let rc: Rc<str> = Rc::from(string_value.into_boxed_str());
1512                    unsafe {
1513                        core::ptr::write(smart_ptr_data.as_mut_byte_ptr() as *mut Rc<str>, rc);
1514                    }
1515                }
1516                _ => {
1517                    return Err(ReflectErrorKind::OperationFailed {
1518                        shape: smart_ptr_shape,
1519                        operation: "Unsupported SmartPointer<str> type",
1520                    });
1521                }
1522            }
1523
1524            // Deallocate the String's memory (we moved the data out via ptr::read)
1525            let string_layout = alloc::string::String::SHAPE.layout.sized_layout().unwrap();
1526            if string_layout.size() > 0 {
1527                unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), string_layout) };
1528            }
1529
1530            return Ok(());
1531        }
1532
1533        Err(ReflectErrorKind::OperationFailed {
1534            shape: smart_ptr_shape,
1535            operation: "SmartPointer missing new_into_fn and not a supported unsized type",
1536        })
1537    }
1538
1539    /// Returns an error if the value is not fully initialized.
1540    /// For lists with rope storage, drains the rope into the Vec.
1541    /// For maps with pending entries, drains the entries into the map.
1542    /// For options with pending inner values, calls init_some.
1543    fn require_full_initialization(&mut self) -> Result<(), ReflectErrorKind> {
1544        match &mut self.tracker {
1545            Tracker::Scalar => {
1546                if self.is_init {
1547                    Ok(())
1548                } else {
1549                    Err(ReflectErrorKind::UninitializedValue {
1550                        shape: self.allocated.shape(),
1551                    })
1552                }
1553            }
1554            Tracker::Array { iset, .. } => {
1555                match self.allocated.shape().ty {
1556                    Type::Sequence(facet_core::SequenceType::Array(array_def)) => {
1557                        // Check if all array elements are initialized
1558                        if (0..array_def.n).all(|idx| iset.get(idx)) {
1559                            Ok(())
1560                        } else {
1561                            Err(ReflectErrorKind::UninitializedValue {
1562                                shape: self.allocated.shape(),
1563                            })
1564                        }
1565                    }
1566                    _ => Err(ReflectErrorKind::UninitializedValue {
1567                        shape: self.allocated.shape(),
1568                    }),
1569                }
1570            }
1571            Tracker::Struct { iset, .. } => {
1572                match self.allocated.shape().ty {
1573                    Type::User(UserType::Struct(struct_type)) => {
1574                        if iset.all_set(struct_type.fields.len()) {
1575                            Ok(())
1576                        } else {
1577                            // Find index of the first bit not set
1578                            let first_missing_idx =
1579                                (0..struct_type.fields.len()).find(|&idx| !iset.get(idx));
1580                            if let Some(missing_idx) = first_missing_idx {
1581                                let field_name = struct_type.fields[missing_idx].name;
1582                                Err(ReflectErrorKind::UninitializedField {
1583                                    shape: self.allocated.shape(),
1584                                    field_name,
1585                                })
1586                            } else {
1587                                // fallback, something went wrong
1588                                Err(ReflectErrorKind::UninitializedValue {
1589                                    shape: self.allocated.shape(),
1590                                })
1591                            }
1592                        }
1593                    }
1594                    _ => Err(ReflectErrorKind::UninitializedValue {
1595                        shape: self.allocated.shape(),
1596                    }),
1597                }
1598            }
1599            Tracker::Enum { variant, data, .. } => {
1600                // Check if all fields of the variant are initialized
1601                let num_fields = variant.data.fields.len();
1602                if num_fields == 0 {
1603                    // Unit variant, always initialized
1604                    Ok(())
1605                } else if (0..num_fields).all(|idx| data.get(idx)) {
1606                    Ok(())
1607                } else {
1608                    // Find the first uninitialized field
1609                    let first_missing_idx = (0..num_fields).find(|&idx| !data.get(idx));
1610                    if let Some(missing_idx) = first_missing_idx {
1611                        let field_name = variant.data.fields[missing_idx].name;
1612                        Err(ReflectErrorKind::UninitializedField {
1613                            shape: self.allocated.shape(),
1614                            field_name,
1615                        })
1616                    } else {
1617                        Err(ReflectErrorKind::UninitializedValue {
1618                            shape: self.allocated.shape(),
1619                        })
1620                    }
1621                }
1622            }
1623            Tracker::SmartPointer {
1624                building_inner,
1625                pending_inner,
1626            } => {
1627                if *building_inner {
1628                    // Inner value is still being built
1629                    Err(ReflectErrorKind::UninitializedValue {
1630                        shape: self.allocated.shape(),
1631                    })
1632                } else if let Some(inner_ptr) = pending_inner.take() {
1633                    // Finalize the pending inner value
1634                    let smart_ptr_shape = self.allocated.shape();
1635                    if let Def::Pointer(smart_ptr_def) = smart_ptr_shape.def {
1636                        Self::complete_pending_smart_pointer(
1637                            smart_ptr_shape,
1638                            smart_ptr_def,
1639                            self.data,
1640                            inner_ptr,
1641                        )?;
1642                        self.is_init = true;
1643                        Ok(())
1644                    } else {
1645                        Err(ReflectErrorKind::OperationFailed {
1646                            shape: smart_ptr_shape,
1647                            operation: "SmartPointer frame without SmartPointer definition",
1648                        })
1649                    }
1650                } else if self.is_init {
1651                    Ok(())
1652                } else {
1653                    Err(ReflectErrorKind::UninitializedValue {
1654                        shape: self.allocated.shape(),
1655                    })
1656                }
1657            }
1658            Tracker::SmartPointerSlice { building_item, .. } => {
1659                if *building_item {
1660                    Err(ReflectErrorKind::UninitializedValue {
1661                        shape: self.allocated.shape(),
1662                    })
1663                } else {
1664                    Ok(())
1665                }
1666            }
1667            Tracker::List {
1668                current_child,
1669                rope,
1670            } => {
1671                if self.is_init && current_child.is_none() {
1672                    // Drain rope into Vec if we have elements stored there
1673                    if let Some(rope) = rope.take()
1674                        && let Def::List(list_def) = self.allocated.shape().def
1675                    {
1676                        Self::drain_rope_into_vec(rope, &list_def, self.data)?;
1677                    }
1678                    Ok(())
1679                } else {
1680                    Err(ReflectErrorKind::UninitializedValue {
1681                        shape: self.allocated.shape(),
1682                    })
1683                }
1684            }
1685            Tracker::Map {
1686                insert_state,
1687                pending_entries,
1688                ..
1689            } => {
1690                if self.is_init && matches!(insert_state, MapInsertState::Idle) {
1691                    // Insert all pending entries into the map
1692                    if !pending_entries.is_empty()
1693                        && let Def::Map(map_def) = self.allocated.shape().def
1694                    {
1695                        Self::drain_pending_into_map(pending_entries, &map_def, self.data)?;
1696                    }
1697                    Ok(())
1698                } else {
1699                    Err(ReflectErrorKind::UninitializedValue {
1700                        shape: self.allocated.shape(),
1701                    })
1702                }
1703            }
1704            Tracker::Set { current_child } => {
1705                if self.is_init && !*current_child {
1706                    Ok(())
1707                } else {
1708                    Err(ReflectErrorKind::UninitializedValue {
1709                        shape: self.allocated.shape(),
1710                    })
1711                }
1712            }
1713            Tracker::Option {
1714                building_inner,
1715                pending_inner,
1716            } => {
1717                if *building_inner {
1718                    Err(ReflectErrorKind::UninitializedValue {
1719                        shape: self.allocated.shape(),
1720                    })
1721                } else {
1722                    // Finalize pending init_some if we have a pending inner value
1723                    if let Some(inner_ptr) = pending_inner.take()
1724                        && let Def::Option(option_def) = self.allocated.shape().def
1725                    {
1726                        Self::complete_pending_option(option_def, self.data, inner_ptr)?;
1727                    }
1728                    Ok(())
1729                }
1730            }
1731            Tracker::Result { building_inner, .. } => {
1732                if *building_inner {
1733                    Err(ReflectErrorKind::UninitializedValue {
1734                        shape: self.allocated.shape(),
1735                    })
1736                } else {
1737                    Ok(())
1738                }
1739            }
1740            Tracker::Inner { building_inner } => {
1741                if *building_inner {
1742                    // Inner value is still being built
1743                    Err(ReflectErrorKind::UninitializedValue {
1744                        shape: self.allocated.shape(),
1745                    })
1746                } else if self.is_init {
1747                    Ok(())
1748                } else {
1749                    Err(ReflectErrorKind::UninitializedValue {
1750                        shape: self.allocated.shape(),
1751                    })
1752                }
1753            }
1754            Tracker::DynamicValue { state } => {
1755                if matches!(state, DynamicValueState::Uninit) {
1756                    Err(ReflectErrorKind::UninitializedValue {
1757                        shape: self.allocated.shape(),
1758                    })
1759                } else {
1760                    // Insert pending entries for Object state
1761                    if let DynamicValueState::Object {
1762                        pending_entries,
1763                        insert_state,
1764                    } = state
1765                    {
1766                        if !matches!(insert_state, DynamicObjectInsertState::Idle) {
1767                            return Err(ReflectErrorKind::UninitializedValue {
1768                                shape: self.allocated.shape(),
1769                            });
1770                        }
1771
1772                        if !pending_entries.is_empty()
1773                            && let Def::DynamicValue(dyn_def) = self.allocated.shape().def
1774                        {
1775                            let object_ptr = unsafe { self.data.assume_init() };
1776                            let value_shape = self.allocated.shape();
1777
1778                            for (key, value_ptr) in pending_entries.drain(..) {
1779                                // Insert the entry
1780                                unsafe {
1781                                    (dyn_def.vtable.insert_object_entry)(
1782                                        object_ptr,
1783                                        &key,
1784                                        value_ptr.assume_init(),
1785                                    );
1786                                }
1787                                // Deallocate the value buffer (insert_object_entry moved the value)
1788                                if let Ok(layout) = value_shape.layout.sized_layout()
1789                                    && layout.size() > 0
1790                                {
1791                                    unsafe {
1792                                        alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), layout);
1793                                    }
1794                                }
1795                            }
1796                        }
1797                    }
1798
1799                    // Insert pending elements for Array state
1800                    if let DynamicValueState::Array {
1801                        pending_elements,
1802                        building_element,
1803                    } = state
1804                    {
1805                        if *building_element {
1806                            return Err(ReflectErrorKind::UninitializedValue {
1807                                shape: self.allocated.shape(),
1808                            });
1809                        }
1810
1811                        if !pending_elements.is_empty()
1812                            && let Def::DynamicValue(dyn_def) = self.allocated.shape().def
1813                        {
1814                            let array_ptr = unsafe { self.data.assume_init() };
1815                            let element_shape = self.allocated.shape();
1816
1817                            for element_ptr in pending_elements.drain(..) {
1818                                // Push the element into the array
1819                                unsafe {
1820                                    (dyn_def.vtable.push_array_element)(
1821                                        array_ptr,
1822                                        element_ptr.assume_init(),
1823                                    );
1824                                }
1825                                // Deallocate the element buffer (push_array_element moved the value)
1826                                if let Ok(layout) = element_shape.layout.sized_layout()
1827                                    && layout.size() > 0
1828                                {
1829                                    unsafe {
1830                                        alloc::alloc::dealloc(
1831                                            element_ptr.as_mut_byte_ptr(),
1832                                            layout,
1833                                        );
1834                                    }
1835                                }
1836                            }
1837                        }
1838                    }
1839
1840                    Ok(())
1841                }
1842            }
1843        }
1844    }
1845
1846    /// Fill defaults and check required fields in a single pass using precomputed plans.
1847    ///
1848    /// This replaces the separate `fill_defaults` + `require_full_initialization` calls
1849    /// with a single iteration over the precomputed `FieldInitPlan` list.
1850    ///
1851    /// # Arguments
1852    /// * `plans` - Precomputed field initialization plans from TypePlan
1853    /// * `num_fields` - Total number of fields (from StructPlan/VariantPlanMeta)
1854    /// * `type_plan_core` - Reference to the TypePlanCore for resolving validators
1855    ///
1856    /// # Returns
1857    /// `Ok(())` if all required fields are set (or filled with defaults), or an error
1858    /// describing the first missing required field.
1859    #[allow(unsafe_code)]
1860    fn fill_and_require_fields(
1861        &mut self,
1862        plans: &[FieldInitPlan],
1863        num_fields: usize,
1864        type_plan_core: &TypePlanCore,
1865    ) -> Result<(), ReflectErrorKind> {
1866        // With lazy tracker initialization, structs start with Tracker::Scalar.
1867        // If is_init is true with Scalar, the struct was set wholesale - nothing to do.
1868        // If is_init is false, we need to upgrade to Tracker::Struct to track fields.
1869        if !self.is_init
1870            && matches!(self.tracker, Tracker::Scalar)
1871            && matches!(self.allocated.shape().ty, Type::User(UserType::Struct(_)))
1872        {
1873            // Try container-level default first
1874            if unsafe { self.allocated.shape().call_default_in_place(self.data) }.is_some() {
1875                self.is_init = true;
1876                return Ok(());
1877            }
1878            // Upgrade to Tracker::Struct for field-by-field tracking
1879            self.tracker = Tracker::Struct {
1880                iset: ISet::new(num_fields),
1881                current_child: None,
1882            };
1883        }
1884
1885        // Get the iset based on tracker type
1886        let iset = match &mut self.tracker {
1887            Tracker::Struct { iset, .. } => iset,
1888            Tracker::Enum { data, .. } => data,
1889            // Scalar with is_init=true means struct was set wholesale - all fields initialized
1890            Tracker::Scalar if self.is_init => return Ok(()),
1891            // Other tracker types don't use field_init_plans
1892            _ => return Ok(()),
1893        };
1894
1895        // Fast path: if all fields are already set, no defaults needed.
1896        // But validators still need to run.
1897        let all_fields_set = iset.all_set(num_fields);
1898
1899        for plan in plans {
1900            if !all_fields_set && !iset.get(plan.index) {
1901                // Field not set - handle according to fill rule
1902                match &plan.fill_rule {
1903                    FillRule::Defaultable(default) => {
1904                        // Calculate field pointer
1905                        let field_ptr = unsafe { self.data.field_uninit(plan.offset) };
1906
1907                        // Call the appropriate default function
1908                        let success = match default {
1909                            FieldDefault::Custom(default_fn) => {
1910                                // SAFETY: default_fn writes to uninitialized memory
1911                                unsafe { default_fn(field_ptr) };
1912                                true
1913                            }
1914                            FieldDefault::FromTrait(shape) => {
1915                                // SAFETY: call_default_in_place writes to uninitialized memory
1916                                unsafe { shape.call_default_in_place(field_ptr) }.is_some()
1917                            }
1918                        };
1919
1920                        if success {
1921                            iset.set(plan.index);
1922                        } else {
1923                            return Err(ReflectErrorKind::UninitializedField {
1924                                shape: self.allocated.shape(),
1925                                field_name: plan.name,
1926                            });
1927                        }
1928                    }
1929                    FillRule::Required => {
1930                        return Err(ReflectErrorKind::UninitializedField {
1931                            shape: self.allocated.shape(),
1932                            field_name: plan.name,
1933                        });
1934                    }
1935                }
1936            }
1937
1938            // Run validators on the (now initialized) field
1939            if !plan.validators.is_empty() {
1940                let field_ptr = unsafe { self.data.field_init(plan.offset) };
1941                for validator in type_plan_core.validators(plan.validators) {
1942                    validator.run(field_ptr.into(), plan.name, self.allocated.shape())?;
1943                }
1944            }
1945        }
1946
1947        Ok(())
1948    }
1949
1950    /// Get the [EnumType] of the frame's shape, if it is an enum type
1951    pub(crate) const fn get_enum_type(&self) -> Result<EnumType, ReflectErrorKind> {
1952        match self.allocated.shape().ty {
1953            Type::User(UserType::Enum(e)) => Ok(e),
1954            _ => Err(ReflectErrorKind::WasNotA {
1955                expected: "enum",
1956                actual: self.allocated.shape(),
1957            }),
1958        }
1959    }
1960
1961    pub(crate) fn get_field(&self) -> Option<&Field> {
1962        match self.allocated.shape().ty {
1963            Type::User(user_type) => match user_type {
1964                UserType::Struct(struct_type) => {
1965                    // Try to get currently active field index
1966                    if let Tracker::Struct {
1967                        current_child: Some(idx),
1968                        ..
1969                    } = &self.tracker
1970                    {
1971                        struct_type.fields.get(*idx)
1972                    } else {
1973                        None
1974                    }
1975                }
1976                UserType::Enum(_enum_type) => {
1977                    if let Tracker::Enum {
1978                        variant,
1979                        current_child: Some(idx),
1980                        ..
1981                    } = &self.tracker
1982                    {
1983                        variant.data.fields.get(*idx)
1984                    } else {
1985                        None
1986                    }
1987                }
1988                _ => None,
1989            },
1990            _ => None,
1991        }
1992    }
1993}
1994
1995// Convenience methods on Partial for accessing FrameMode internals.
1996// These help minimize changes to the rest of the codebase during the refactor.
1997impl<'facet, const BORROW: bool> Partial<'facet, BORROW> {
1998    /// Get a reference to the frame stack.
1999    #[inline]
2000    pub(crate) const fn frames(&self) -> &Vec<Frame> {
2001        self.mode.stack()
2002    }
2003
2004    /// Get a mutable reference to the frame stack.
2005    #[inline]
2006    pub(crate) fn frames_mut(&mut self) -> &mut Vec<Frame> {
2007        self.mode.stack_mut()
2008    }
2009
2010    /// Check if we're in deferred mode.
2011    #[inline]
2012    pub const fn is_deferred(&self) -> bool {
2013        self.mode.is_deferred()
2014    }
2015
2016    /// Get the start depth if in deferred mode.
2017    #[inline]
2018    pub(crate) const fn start_depth(&self) -> Option<usize> {
2019        self.mode.start_depth()
2020    }
2021
2022    /// Derive the path from the current frame stack.
2023    ///
2024    /// Compute the navigation path for deferred mode storage and lookup.
2025    /// The returned `Path` is anchored to the root shape for proper type context.
2026    ///
2027    /// This extracts Field steps from struct/enum frames and Index steps from
2028    /// array/list frames. Option wrappers, smart pointers (Box, Rc, etc.), and
2029    /// other transparent types don't add path steps.
2030    ///
2031    /// This MUST match the storage path computation in end() for consistency.
2032    pub(crate) fn derive_path(&self) -> Path {
2033        // Get the root shape from the first frame
2034        let root_shape = self
2035            .frames()
2036            .first()
2037            .map(|f| f.allocated.shape())
2038            .unwrap_or_else(|| {
2039                // Fallback to unit type shape if no frames (shouldn't happen in practice)
2040                <() as facet_core::Facet>::SHAPE
2041            });
2042
2043        let mut path = Path::new(root_shape);
2044
2045        // Walk ALL frames, extracting navigation steps
2046        // This matches the storage path computation in end()
2047        let frames = self.frames();
2048        for (frame_idx, frame) in frames.iter().enumerate() {
2049            match &frame.tracker {
2050                Tracker::Struct {
2051                    current_child: Some(idx),
2052                    ..
2053                } => {
2054                    path.push(PathStep::Field(*idx as u32));
2055                }
2056                Tracker::Enum {
2057                    current_child: Some(idx),
2058                    ..
2059                } => {
2060                    path.push(PathStep::Field(*idx as u32));
2061                }
2062                Tracker::List {
2063                    current_child: Some(idx),
2064                    ..
2065                } => {
2066                    path.push(PathStep::Index(*idx as u32));
2067                }
2068                Tracker::Array {
2069                    current_child: Some(idx),
2070                    ..
2071                } => {
2072                    path.push(PathStep::Index(*idx as u32));
2073                }
2074                Tracker::Option {
2075                    building_inner: true,
2076                    ..
2077                } => {
2078                    // Option with building_inner contributes OptionSome to path
2079                    path.push(PathStep::OptionSome);
2080                }
2081                Tracker::SmartPointer {
2082                    building_inner: true,
2083                    ..
2084                } => {
2085                    // SmartPointer with building_inner contributes Deref to path
2086                    path.push(PathStep::Deref);
2087                }
2088                Tracker::SmartPointerSlice {
2089                    current_child: Some(idx),
2090                    ..
2091                } => {
2092                    // SmartPointerSlice with current_child contributes Index to path
2093                    path.push(PathStep::Index(*idx as u32));
2094                }
2095                Tracker::Inner {
2096                    building_inner: true,
2097                } => {
2098                    // Inner with building_inner contributes Inner to path
2099                    path.push(PathStep::Inner);
2100                }
2101                Tracker::Map {
2102                    current_entry_index: Some(idx),
2103                    building_key,
2104                    ..
2105                } => {
2106                    // Map with active entry contributes MapKey or MapValue with entry index
2107                    if *building_key {
2108                        path.push(PathStep::MapKey(*idx as u32));
2109                    } else {
2110                        path.push(PathStep::MapValue(*idx as u32));
2111                    }
2112                }
2113                // Other tracker types (Set, Result, etc.)
2114                // don't contribute to the storage path - they're transparent wrappers
2115                _ => {}
2116            }
2117
2118            // If the next frame is a proxy frame, add a Proxy step (matches end())
2119            if frame_idx + 1 < frames.len() && frames[frame_idx + 1].using_custom_deserialization {
2120                path.push(PathStep::Proxy);
2121            }
2122        }
2123
2124        path
2125    }
2126}
2127
2128impl<'facet, const BORROW: bool> Drop for Partial<'facet, BORROW> {
2129    fn drop(&mut self) {
2130        trace!("🧹 Partial is being dropped");
2131
2132        // With the ownership transfer model:
2133        // - When we enter a field, parent's iset[idx] is cleared
2134        // - Parent won't try to drop fields with iset[idx] = false
2135        // - No double-free possible by construction
2136
2137        // 1. Clean up stored frames from deferred state
2138        if let FrameMode::Deferred {
2139            stored_frames,
2140            stack,
2141            ..
2142        } = &mut self.mode
2143        {
2144            // Stored frames have ownership of their data (parent's iset was cleared).
2145            // IMPORTANT: Process in deepest-first order so children are dropped before parents.
2146            // Child frames have data pointers into parent memory, so parents must stay valid
2147            // until all their children are cleaned up.
2148            //
2149            // CRITICAL: Before dropping a child frame, we must mark the parent's field as
2150            // uninitialized. Otherwise, when we later drop the parent, it will try to drop
2151            // that field again, causing a double-free.
2152            let mut stored_frames = core::mem::take(stored_frames);
2153            let mut paths: Vec<_> = stored_frames.keys().cloned().collect();
2154            // Sort by path depth (number of steps), deepest first
2155            paths.sort_by_key(|p| core::cmp::Reverse(p.steps.len()));
2156            for path in paths {
2157                if let Some(mut frame) = stored_frames.remove(&path) {
2158                    // Before dropping this frame, update the parent to prevent double-free.
2159                    // The parent path is everything except the last step.
2160                    let parent_path = Path {
2161                        shape: path.shape,
2162                        steps: path.steps[..path.steps.len().saturating_sub(1)].to_vec(),
2163                    };
2164
2165                    // Helper to find parent frame in stored_frames or stack
2166                    let find_parent_frame =
2167                        |stored: &mut alloc::collections::BTreeMap<Path, Frame>,
2168                         stk: &mut [Frame],
2169                         pp: &Path|
2170                         -> Option<*mut Frame> {
2171                            if let Some(pf) = stored.get_mut(pp) {
2172                                Some(pf as *mut Frame)
2173                            } else {
2174                                let idx = pp.steps.len();
2175                                stk.get_mut(idx).map(|f| f as *mut Frame)
2176                            }
2177                        };
2178
2179                    match path.steps.last() {
2180                        Some(PathStep::Field(field_idx)) => {
2181                            let field_idx = *field_idx as usize;
2182                            if let Some(parent_ptr) =
2183                                find_parent_frame(&mut stored_frames, stack, &parent_path)
2184                            {
2185                                // SAFETY: parent_ptr is valid for the duration of this block
2186                                let parent_frame = unsafe { &mut *parent_ptr };
2187                                match &mut parent_frame.tracker {
2188                                    Tracker::Struct { iset, .. } => {
2189                                        iset.unset(field_idx);
2190                                    }
2191                                    Tracker::Enum { data, .. } => {
2192                                        data.unset(field_idx);
2193                                    }
2194                                    _ => {}
2195                                }
2196                            }
2197                        }
2198                        Some(PathStep::MapKey(entry_idx)) => {
2199                            // Map key frame - clear from parent's insert_state to prevent
2200                            // double-free. The key will be dropped by this frame's deinit.
2201                            let entry_idx = *entry_idx as usize;
2202                            if let Some(parent_ptr) =
2203                                find_parent_frame(&mut stored_frames, stack, &parent_path)
2204                            {
2205                                let parent_frame = unsafe { &mut *parent_ptr };
2206                                if let Tracker::Map {
2207                                    insert_state,
2208                                    pending_entries,
2209                                    ..
2210                                } = &mut parent_frame.tracker
2211                                {
2212                                    // If key is in insert_state, clear it
2213                                    if let MapInsertState::PushingKey {
2214                                        key_frame_on_stack, ..
2215                                    } = insert_state
2216                                    {
2217                                        *key_frame_on_stack = false;
2218                                    }
2219                                    // Also check if there's a pending entry with this key
2220                                    // that needs to have the key nullified
2221                                    if entry_idx < pending_entries.len() {
2222                                        // Remove this entry since we're handling cleanup here
2223                                        // The key will be dropped by this frame's deinit
2224                                        // The value frame will be handled separately
2225                                        // Mark the key as already-handled by setting to dangling
2226                                        // Actually, we'll clear the entire entry - the value
2227                                        // frame will be processed separately anyway
2228                                    }
2229                                }
2230                            }
2231                        }
2232                        Some(PathStep::MapValue(entry_idx)) => {
2233                            // Map value frame - remove the entry from pending_entries.
2234                            // The value is dropped by this frame's deinit.
2235                            // The key is dropped by the MapKey frame's deinit (processed separately).
2236                            let entry_idx = *entry_idx as usize;
2237                            if let Some(parent_ptr) =
2238                                find_parent_frame(&mut stored_frames, stack, &parent_path)
2239                            {
2240                                let parent_frame = unsafe { &mut *parent_ptr };
2241                                if let Tracker::Map {
2242                                    pending_entries, ..
2243                                } = &mut parent_frame.tracker
2244                                {
2245                                    // Remove the entry at this index if it exists.
2246                                    // Don't drop key/value here - they're handled by their
2247                                    // respective stored frames (MapKey and MapValue).
2248                                    if entry_idx < pending_entries.len() {
2249                                        pending_entries.remove(entry_idx);
2250                                    }
2251                                }
2252                            }
2253                        }
2254                        Some(PathStep::Index(_)) => {
2255                            // List element frames with RopeSlot ownership are handled by
2256                            // the deinit check for RopeSlot - they skip dropping since the
2257                            // rope owns the data. No parent update needed.
2258                        }
2259                        _ => {}
2260                    }
2261                    frame.deinit();
2262                    frame.dealloc();
2263                }
2264            }
2265        }
2266
2267        // 2. Pop and deinit stack frames
2268        // CRITICAL: Before deiniting a child frame, we must mark the parent's field as
2269        // uninitialized. Otherwise, the parent will try to drop the field again.
2270        loop {
2271            let stack = self.mode.stack_mut();
2272            if stack.is_empty() {
2273                break;
2274            }
2275
2276            let mut frame = stack.pop().unwrap();
2277
2278            // If this frame has Field ownership, mark the parent's bit as unset
2279            // so the parent won't try to drop it again.
2280            if let FrameOwnership::Field { field_idx } = frame.ownership
2281                && let Some(parent_frame) = stack.last_mut()
2282            {
2283                match &mut parent_frame.tracker {
2284                    Tracker::Struct { iset, .. } => {
2285                        iset.unset(field_idx);
2286                    }
2287                    Tracker::Enum { data, .. } => {
2288                        data.unset(field_idx);
2289                    }
2290                    Tracker::Array { iset, .. } => {
2291                        iset.unset(field_idx);
2292                    }
2293                    _ => {}
2294                }
2295            }
2296
2297            frame.deinit();
2298            frame.dealloc();
2299        }
2300    }
2301}
2302
2303#[cfg(test)]
2304mod size_tests {
2305    use super::*;
2306    use core::mem::size_of;
2307
2308    #[test]
2309    fn print_type_sizes() {
2310        eprintln!("\n=== Type Sizes ===");
2311        eprintln!("Frame: {} bytes", size_of::<Frame>());
2312        eprintln!("Tracker: {} bytes", size_of::<Tracker>());
2313        eprintln!("ISet: {} bytes", size_of::<ISet>());
2314        eprintln!("AllocatedShape: {} bytes", size_of::<AllocatedShape>());
2315        eprintln!("FrameOwnership: {} bytes", size_of::<FrameOwnership>());
2316        eprintln!("PtrUninit: {} bytes", size_of::<facet_core::PtrUninit>());
2317        eprintln!("Option<usize>: {} bytes", size_of::<Option<usize>>());
2318        eprintln!(
2319            "Option<&'static facet_core::ProxyDef>: {} bytes",
2320            size_of::<Option<&'static facet_core::ProxyDef>>()
2321        );
2322        eprintln!(
2323            "TypePlanNode: {} bytes",
2324            size_of::<typeplan::TypePlanNode>()
2325        );
2326        eprintln!("Vec<Frame>: {} bytes", size_of::<Vec<Frame>>());
2327        eprintln!("MapInsertState: {} bytes", size_of::<MapInsertState>());
2328        eprintln!(
2329            "DynamicValueState: {} bytes",
2330            size_of::<DynamicValueState>()
2331        );
2332        eprintln!("===================\n");
2333    }
2334}