facet_reflect/partial/mod.rs
1//! Partial value construction for dynamic reflection
2//!
3//! This module provides APIs for incrementally building values through reflection,
4//! particularly useful when deserializing data from external formats like JSON or YAML.
5//!
6//! # Overview
7//!
8//! The `Partial` type (formerly known as `Wip` - Work In Progress) allows you to:
9//! - Allocate memory for a value based on its `Shape`
10//! - Initialize fields incrementally in a type-safe manner
11//! - Handle complex nested structures including structs, enums, collections, and smart pointers
12//! - Build the final value once all required fields are initialized
13//!
14//! **Note**: This is the only API for partial value construction. The previous `TypedPartial`
15//! wrapper has been removed in favor of using `Partial` directly.
16//!
17//! # Basic Usage
18//!
19//! ```no_run
20//! # use facet_reflect::Partial;
21//! # use facet_core::{Shape, Facet};
22//! # fn example<T: Facet<'static>>() -> Result<(), Box<dyn std::error::Error>> {
23//! // Allocate memory for a struct
24//! let mut partial = Partial::alloc::<T>()?;
25//!
26//! // Set simple fields
27//! partial = partial.set_field("name", "Alice")?;
28//! partial = partial.set_field("age", 30u32)?;
29//!
30//! // Work with nested structures
31//! partial = partial.begin_field("address")?;
32//! partial = partial.set_field("street", "123 Main St")?;
33//! partial = partial.set_field("city", "Springfield")?;
34//! partial = partial.end()?;
35//!
36//! // Build the final value
37//! let value = partial.build()?;
38//! # Ok(())
39//! # }
40//! ```
41//!
42//! # Chaining Style
43//!
44//! The API supports method chaining for cleaner code:
45//!
46//! ```no_run
47//! # use facet_reflect::Partial;
48//! # use facet_core::{Shape, Facet};
49//! # fn example<T: Facet<'static>>() -> Result<(), Box<dyn std::error::Error>> {
50//! let value = Partial::alloc::<T>()?
51//! .set_field("name", "Bob")?
52//! .begin_field("scores")?
53//! .set(vec![95, 87, 92])?
54//! .end()?
55//! .build()?;
56//! # Ok(())
57//! # }
58//! ```
59//!
60//! # Working with Collections
61//!
62//! ```no_run
63//! # use facet_reflect::Partial;
64//! # use facet_core::{Shape, Facet};
65//! # fn example() -> Result<(), Box<dyn std::error::Error>> {
66//! let mut partial = Partial::alloc::<Vec<String>>()?;
67//!
68//! // Add items to a list
69//! partial = partial.begin_list_item()?;
70//! partial = partial.set("first")?;
71//! partial = partial.end()?;
72//!
73//! partial = partial.begin_list_item()?;
74//! partial = partial.set("second")?;
75//! partial = partial.end()?;
76//!
77//! let vec = partial.build()?;
78//! # Ok(())
79//! # }
80//! ```
81//!
82//! # Working with Maps
83//!
84//! ```no_run
85//! # use facet_reflect::Partial;
86//! # use facet_core::{Shape, Facet};
87//! # use std::collections::HashMap;
88//! # fn example() -> Result<(), Box<dyn std::error::Error>> {
89//! let mut partial = Partial::alloc::<HashMap<String, i32>>()?;
90//!
91//! // Insert key-value pairs
92//! partial = partial.begin_key()?;
93//! partial = partial.set("score")?;
94//! partial = partial.end()?;
95//! partial = partial.begin_value()?;
96//! partial = partial.set(100i32)?;
97//! partial = partial.end()?;
98//!
99//! let map = partial.build()?;
100//! # Ok(())
101//! # }
102//! ```
103//!
104//! # Safety and Memory Management
105//!
106//! The `Partial` type ensures memory safety by:
107//! - Tracking initialization state of all fields
108//! - Preventing use-after-build through state tracking
109//! - Properly handling drop semantics for partially initialized values
110//! - Supporting both owned and borrowed values through lifetime parameters
111//!
112//! # Drop-ownership invariant (single-source-of-truth)
113//!
114//! Every heap buffer allocated during partial construction has exactly one drop-and-dealloc
115//! authority at all times. Authority is either the owning frame (its `FrameOwnership`
116//! controls deinit + dealloc) or a parent tracker's pending slot
117//! (`Tracker::Map::pending_entries`, `Tracker::Option::pending_inner`,
118//! `Tracker::SmartPointer::pending_inner`, `DynamicValueState::Object::pending_entries`,
119//! `DynamicValueState::Array::pending_elements`). Authority transfers between the two
120//! at well-defined points โ never simultaneously held.
121//!
122//! - **Stored frames in deferred mode keep their buffer**: a frame that gets stored in
123//! `stored_frames` for later re-entry retains full `TrackedBuffer`/`Owned` ownership
124//! of its data. Parent tracker pending slots are NOT populated at store-time. On
125//! error (`Partial` dropped mid-build, or `finish_deferred` aborting), the stored
126//! frame's own `deinit` + `dealloc` handles cleanup; no parent pending-slot severing
127//! is required.
128//! - **Pending-slot population is consume-time**: `finish_deferred`'s walk calls
129//! `complete_map_{key,value}_frame` / `complete_option_frame` /
130//! `complete_smart_pointer_frame` AFTER `require_full_initialization` has validated
131//! the child frame. Only then does the buffer pointer move into the parent's pending
132//! slot (or directly into the parent's final storage, for Option/SmartPointer). The
133//! child frame is dropped silently (Frame has no `Drop` impl); the parent's pending
134//! slot becomes the sole owner.
135//! - **Non-stored frames in deferred mode**: when a child frame isn't stored (e.g.
136//! scalar Option inner), its popped `data` pointer is moved into the parent pending
137//! slot directly in `end()`. The frame is silently dropped after the match block, so
138//! single ownership is preserved without any ownership mutation.
139//! - **Map half-entries**: `pending_entries` holds `(key_ptr, Option<value_ptr>)`. A key
140//! pushed without a paired value is a half-entry `(key, None)`; its value-phase
141//! counterpart upgrades the last entry to `(key, Some(value))`. A half-entry left at
142//! finalize is an invariant violation; a half-entry at drop-time drops the orphan key
143//! only.
144//! - **No sever helpers, no ownership-mutation protocol**: the walk's consume-time
145//! ordering guarantees `pending_entries` / `pending_inner` entries only reference
146//! fully-validated buffers. `Tracker::*::deinit` can drain these unconditionally.
147//! If you catch yourself adding a `sever_parent_pending_*` or `untransfer_*` helper,
148//! re-read this block โ the answer is to push to the pending slot later, not to add
149//! a sever step.
150
151use alloc::{collections::BTreeMap, sync::Arc, vec::Vec};
152
153mod arena;
154mod iset;
155mod rope;
156pub(crate) mod typeplan;
157pub use typeplan::{DeserStrategy, NodeId, TypePlan, TypePlanCore};
158
159mod partial_api;
160
161use crate::{ReflectErrorKind, TrackerKind, trace};
162use facet_core::Facet;
163use facet_path::{Path, PathStep};
164
165use core::marker::PhantomData;
166
167mod heap_value;
168pub use heap_value::*;
169
170use facet_core::{
171 Def, EnumType, Field, PtrMut, PtrUninit, Shape, SliceBuilderVTable, Type, UserType, Variant,
172};
173use iset::ISet;
174use rope::ListRope;
175use typeplan::{FieldDefault, FieldInitPlan, FillRule};
176
177/// State of a partial value
178#[derive(Debug, Clone, Copy, PartialEq, Eq)]
179enum PartialState {
180 /// Partial is active and can be modified
181 Active,
182
183 /// Partial has been successfully built and cannot be reused
184 Built,
185}
186
187/// Mode of operation for frame management.
188///
189/// In `Strict` mode, frames must be fully initialized before being popped.
190/// In `Deferred` mode, frames can be stored when popped and restored on re-entry,
191/// with final validation happening in `finish_deferred()`.
192enum FrameMode {
193 /// Strict mode: frames must be fully initialized before popping.
194 Strict {
195 /// Stack of frames for nested initialization.
196 stack: Vec<Frame>,
197 },
198
199 /// Deferred mode: frames are stored when popped, can be re-entered.
200 Deferred {
201 /// Stack of frames for nested initialization.
202 stack: Vec<Frame>,
203
204 /// The frame depth when deferred mode was started.
205 /// Path calculations are relative to this depth.
206 start_depth: usize,
207
208 /// Frames saved when popped, keyed by their path (derived from frame stack).
209 /// When we re-enter a path, we restore the stored frame.
210 /// Uses the full `Path` type which includes the root shape for proper type anchoring.
211 stored_frames: BTreeMap<Path, Frame>,
212 },
213}
214
215impl FrameMode {
216 /// Get a reference to the frame stack.
217 const fn stack(&self) -> &Vec<Frame> {
218 match self {
219 FrameMode::Strict { stack } | FrameMode::Deferred { stack, .. } => stack,
220 }
221 }
222
223 /// Get a mutable reference to the frame stack.
224 const fn stack_mut(&mut self) -> &mut Vec<Frame> {
225 match self {
226 FrameMode::Strict { stack } | FrameMode::Deferred { stack, .. } => stack,
227 }
228 }
229
230 /// Check if we're in deferred mode.
231 const fn is_deferred(&self) -> bool {
232 matches!(self, FrameMode::Deferred { .. })
233 }
234
235 /// Get the start depth if in deferred mode.
236 const fn start_depth(&self) -> Option<usize> {
237 match self {
238 FrameMode::Deferred { start_depth, .. } => Some(*start_depth),
239 FrameMode::Strict { .. } => None,
240 }
241 }
242}
243
244/// A type-erased, heap-allocated, partially-initialized value.
245///
246/// [Partial] keeps track of the state of initialiation of the underlying
247/// value: if we're building `struct S { a: u32, b: String }`, we may
248/// have initialized `a`, or `b`, or both, or neither.
249///
250/// [Partial] allows navigating down nested structs and initializing them
251/// progressively: [Partial::begin_field] pushes a frame onto the stack,
252/// which then has to be initialized, and popped off with [Partial::end].
253///
254/// If [Partial::end] is called but the current frame isn't fully initialized,
255/// an error is returned: in other words, if you navigate down to a field,
256/// you have to fully initialize it one go. You can't go back up and back down
257/// to it again.
258pub struct Partial<'facet, const BORROW: bool = true> {
259 /// Frame management mode (strict or deferred) and associated state.
260 mode: FrameMode,
261
262 /// current state of the Partial
263 state: PartialState,
264
265 /// Precomputed deserialization plan for the root type.
266 /// Built once at allocation time, navigated in parallel with value construction.
267 /// Each Frame holds a NodeId (index) into this plan's arenas.
268 root_plan: Arc<TypePlanCore>,
269
270 /// PhantomData marker for the 'facet lifetime.
271 /// This is covariant in 'facet, which is safe because 'facet represents
272 /// the lifetime of borrowed data FROM the input (deserialization source).
273 /// A Partial<'long, ...> can be safely treated as Partial<'short, ...>
274 /// because it only needs borrowed data to live at least as long as 'short.
275 _marker: PhantomData<&'facet ()>,
276}
277
278#[derive(Clone, Copy, Debug)]
279pub(crate) enum MapInsertState {
280 /// Not currently inserting
281 Idle,
282
283 /// Pushing key - memory allocated, waiting for initialization.
284 ///
285 /// The key buffer is owned by the key frame currently on the stack
286 /// (a `TrackedBuffer` frame). In non-deferred mode, the key frame's `end()`
287 /// transfers the buffer into `pending_entries` as a half-entry
288 /// `(key_ptr, None)` before transitioning to `PushingValue`. In deferred
289 /// mode, the key frame is stored (retaining ownership) and only the state
290 /// transitions here; `pending_entries` is populated at consume-time by
291 /// `complete_map_key_frame` during `finish_deferred`.
292 PushingKey {
293 /// Temporary storage for the key being built
294 key_ptr: PtrUninit,
295 },
296
297 /// Pushing value after key is done.
298 ///
299 /// In non-deferred mode, the value frame's `end()` upgrades the last
300 /// half-entry in `pending_entries` to `(key_ptr, Some(value_ptr))`. In
301 /// deferred mode, the value frame is stored (retaining ownership); the
302 /// `finish_deferred` walk's `complete_map_value_frame` upgrades
303 /// the half-entry at consume-time.
304 PushingValue {
305 /// Temporary storage for the key that was built (always initialized)
306 key_ptr: PtrUninit,
307 /// Temporary storage for the value being built
308 value_ptr: Option<PtrUninit>,
309 },
310}
311
312#[derive(Debug, Clone, Copy)]
313pub(crate) enum FrameOwnership {
314 /// This frame owns the allocation and should deallocate it on drop
315 Owned,
316
317 /// This frame points to a field/element within a parent's allocation.
318 /// The parent's `iset[field_idx]` was CLEARED when this frame was created.
319 /// On drop: deinit if initialized, but do NOT deallocate.
320 /// On successful end(): parent's `iset[field_idx]` will be SET.
321 Field { field_idx: usize },
322
323 /// Temporary buffer tracked by parent's MapInsertState.
324 /// Used by begin_key(), begin_value() for map insertions.
325 /// Safe to drop on deinit - parent's cleanup respects is_init propagation.
326 TrackedBuffer,
327
328 /// Pointer into existing collection entry (Value object, Option inner, etc.)
329 /// Used by begin_object_entry() on existing key, begin_some() re-entry.
330 /// NOT safe to drop on deinit - parent collection has no per-entry tracking
331 /// and would try to drop the freed value again (double-free).
332 BorrowedInPlace,
333
334 /// Pointer to externally-owned memory (e.g., caller's stack via MaybeUninit).
335 /// Used by `from_raw()` for stack-friendly deserialization.
336 /// On drop: deinit if initialized (drop partially constructed values), but do NOT deallocate.
337 /// The caller owns the memory and is responsible for its lifetime.
338 External,
339
340 /// Points into a stable rope chunk for list element building.
341 /// Used by `begin_list_item()` for building Vec elements.
342 /// The memory is stable (won't move during Vec growth),
343 /// so frames inside can be stored for deferred processing.
344 /// On successful end(): element is tracked for later finalization.
345 /// On list frame end(): all elements are moved into the real Vec.
346 /// On drop/failure: the rope chunk handles cleanup.
347 RopeSlot,
348}
349
350impl FrameOwnership {
351 /// Returns true if this frame is responsible for deallocating its memory.
352 ///
353 /// Both `Owned` and `TrackedBuffer` frames allocated their memory and need
354 /// to deallocate it. `Field`, `BorrowedInPlace`, `External`, and `RopeSlot`
355 /// frames borrow from somewhere else.
356 const fn needs_dealloc(&self) -> bool {
357 matches!(self, FrameOwnership::Owned | FrameOwnership::TrackedBuffer)
358 }
359}
360
361/// Immutable pairing of a shape with its actual allocation size.
362///
363/// This ensures that the shape and allocated size are always in sync and cannot
364/// drift apart, preventing the class of bugs where a frame's shape doesn't match
365/// what was actually allocated (see issue #1568).
366pub(crate) struct AllocatedShape {
367 shape: &'static Shape,
368 allocated_size: usize,
369}
370
371impl AllocatedShape {
372 pub(crate) const fn new(shape: &'static Shape, allocated_size: usize) -> Self {
373 Self {
374 shape,
375 allocated_size,
376 }
377 }
378
379 pub(crate) const fn shape(&self) -> &'static Shape {
380 self.shape
381 }
382
383 pub(crate) const fn allocated_size(&self) -> usize {
384 self.allocated_size
385 }
386}
387
388/// Points somewhere in a partially-initialized value. If we're initializing
389/// `a.b.c`, then the first frame would point to the beginning of `a`, the
390/// second to the beginning of the `b` field of `a`, etc.
391///
392/// A frame can point to a complex data structure, like a struct or an enum:
393/// it keeps track of whether a variant was selected, which fields are initialized,
394/// etc. and is able to drop & deinitialize
395#[must_use]
396pub(crate) struct Frame {
397 /// Address of the value being initialized
398 pub(crate) data: PtrUninit,
399
400 /// Shape of the value being initialized, paired with the actual allocation size
401 pub(crate) allocated: AllocatedShape,
402
403 /// Whether this frame's data is fully initialized
404 pub(crate) is_init: bool,
405
406 /// Tracks building mode and partial initialization state
407 pub(crate) tracker: Tracker,
408
409 /// Whether this frame owns the allocation or is just a field pointer
410 pub(crate) ownership: FrameOwnership,
411
412 /// Whether this frame is for a custom deserialization pipeline
413 pub(crate) using_custom_deserialization: bool,
414
415 /// Container-level proxy definition (from `#[facet(proxy = ...)]` on the shape).
416 /// Used during custom deserialization to convert from proxy type to target type.
417 pub(crate) shape_level_proxy: Option<&'static facet_core::ProxyDef>,
418
419 /// Index of the precomputed TypePlan node for this frame's type.
420 /// This is navigated in parallel with the value - when we begin_nth_field,
421 /// the new frame gets the index for that field's child plan node.
422 /// Use `plan.node(type_plan)` to get the actual `&TypePlanNode`.
423 /// Always present - TypePlan is built for what we actually deserialize into
424 /// (including proxies).
425 pub(crate) type_plan: typeplan::NodeId,
426}
427
428#[derive(Debug)]
429pub(crate) enum Tracker {
430 /// Simple scalar value - no partial initialization tracking needed.
431 /// Whether it's initialized is tracked by `Frame::is_init`.
432 Scalar,
433
434 /// Partially initialized array
435 Array {
436 /// Track which array elements are initialized (up to 63 elements)
437 iset: ISet,
438 /// If we're pushing another frame, this is set to the array index
439 current_child: Option<usize>,
440 },
441
442 /// Partially initialized struct/tuple-struct etc.
443 Struct {
444 /// fields need to be individually tracked โ we only
445 /// support up to 63 fields.
446 iset: ISet,
447 /// if we're pushing another frame, this is set to the index of the struct field
448 current_child: Option<usize>,
449 },
450
451 /// Smart pointer being initialized.
452 /// Whether it's initialized is tracked by `Frame::is_init`.
453 SmartPointer {
454 /// Whether we're currently building the inner value
455 building_inner: bool,
456 /// Pending inner value pointer to be moved with new_into_fn on finalization.
457 /// Deferred processing requires keeping the inner value's memory stable,
458 /// so we delay the new_into_fn() call until the SmartPointer frame is finalized.
459 /// None = no pending inner, Some = inner value ready to be moved into SmartPointer.
460 pending_inner: Option<PtrUninit>,
461 },
462
463 /// We're initializing an `Arc<[T]>`, `Box<[T]>`, `Rc<[T]>`, etc.
464 ///
465 /// We're using the slice builder API to construct the slice
466 SmartPointerSlice {
467 /// The slice builder vtable
468 vtable: &'static SliceBuilderVTable,
469
470 /// Whether we're currently building an item to push
471 building_item: bool,
472
473 /// Current element index being built (for path derivation in deferred mode)
474 current_child: Option<usize>,
475 },
476
477 /// Transparent inner type wrapper (`NonZero<T>`, ByteString, etc.)
478 /// Used to distinguish inner frames from their parent for deferred path tracking.
479 Inner {
480 /// Whether we're currently building the inner value
481 building_inner: bool,
482 },
483
484 /// Partially initialized enum (but we picked a variant,
485 /// so it's not Uninit)
486 Enum {
487 /// Variant chosen for the enum
488 variant: &'static Variant,
489 /// Index of the variant in the enum's variants array
490 variant_idx: usize,
491 /// tracks enum fields (for the given variant)
492 data: ISet,
493 /// If we're pushing another frame, this is set to the field index
494 current_child: Option<usize>,
495 },
496
497 /// Partially initialized list (Vec, etc.)
498 /// Whether it's initialized is tracked by `Frame::is_init`.
499 List {
500 /// If we're pushing another frame for an element, this is the element index
501 current_child: Option<usize>,
502 /// Stable rope storage for elements during list building.
503 /// A rope is a list of fixed-size chunks - chunks never reallocate, only new
504 /// chunks are added. This keeps element pointers stable, enabling deferred
505 /// frame processing for nested structs inside Vec elements.
506 /// On finalization, elements are moved into the real Vec.
507 rope: Option<ListRope>,
508 },
509
510 /// Partially initialized map (HashMap, BTreeMap, etc.)
511 /// Whether it's initialized is tracked by `Frame::is_init`.
512 Map {
513 /// State of the current insertion operation
514 insert_state: MapInsertState,
515 /// Pending key-value entries to be inserted on map finalization.
516 /// Deferred processing requires keeping buffers alive until finish_deferred(),
517 /// so we delay actual insertion until the map frame is finalized.
518 /// Each entry is (key_ptr, value_ptr) - both are initialized and owned by this tracker.
519 pending_entries: Vec<(PtrUninit, Option<PtrUninit>)>,
520 /// The current entry index, used for building unique paths for deferred frame storage.
521 /// Incremented each time we start a new key (in begin_key).
522 /// This allows inner frames of different map entries to have distinct paths.
523 current_entry_index: Option<usize>,
524 /// Whether we're currently building a key (true) or value (false).
525 /// Used to determine whether to push MapKey or MapValue to the path.
526 building_key: bool,
527 },
528
529 /// Partially initialized set (HashSet, BTreeSet, etc.)
530 /// Whether it's initialized is tracked by `Frame::is_init`.
531 Set {
532 /// If we're pushing another frame for an element
533 current_child: bool,
534 },
535
536 /// Option being initialized with Some(inner_value)
537 Option {
538 /// Whether we're currently building the inner value
539 building_inner: bool,
540 /// Pending inner value pointer to be moved with init_some on finalization.
541 /// Deferred processing requires keeping the inner value's memory stable,
542 /// so we delay the init_some() call until the Option frame is finalized.
543 /// None = no pending inner, Some = inner value ready to be moved into Option.
544 pending_inner: Option<PtrUninit>,
545 },
546
547 /// Result being initialized with Ok or Err
548 Result {
549 /// Whether we're building Ok (true) or Err (false)
550 is_ok: bool,
551 /// Whether we're currently building the inner value
552 building_inner: bool,
553 },
554
555 /// Dynamic value (e.g., facet_value::Value) being initialized
556 DynamicValue {
557 /// What kind of dynamic value we're building
558 state: DynamicValueState,
559 },
560}
561
562/// State for building a dynamic value
563#[derive(Debug)]
564#[allow(dead_code)] // Some variants are for future use (object support)
565pub(crate) enum DynamicValueState {
566 /// Not yet initialized - will be set to scalar, array, or object
567 Uninit,
568 /// Initialized as a scalar (null, bool, number, string, bytes)
569 Scalar,
570 /// Initialized as an array, currently building an element
571 Array {
572 building_element: bool,
573 /// Pending elements to be inserted during finalization (deferred mode)
574 pending_elements: alloc::vec::Vec<PtrUninit>,
575 },
576 /// Initialized as an object
577 Object {
578 insert_state: DynamicObjectInsertState,
579 /// Pending entries to be inserted during finalization (deferred mode)
580 pending_entries: alloc::vec::Vec<(alloc::string::String, PtrUninit)>,
581 },
582}
583
584/// State for inserting into a dynamic object
585#[derive(Debug)]
586#[allow(dead_code)] // For future use (object support)
587pub(crate) enum DynamicObjectInsertState {
588 /// Idle - ready for a new key-value pair
589 Idle,
590 /// Currently building the value for a key
591 BuildingValue {
592 /// The key for the current entry
593 key: alloc::string::String,
594 },
595}
596
597impl Tracker {
598 const fn kind(&self) -> TrackerKind {
599 match self {
600 Tracker::Scalar => TrackerKind::Scalar,
601 Tracker::Array { .. } => TrackerKind::Array,
602 Tracker::Struct { .. } => TrackerKind::Struct,
603 Tracker::SmartPointer { .. } => TrackerKind::SmartPointer,
604 Tracker::SmartPointerSlice { .. } => TrackerKind::SmartPointerSlice,
605 Tracker::Enum { .. } => TrackerKind::Enum,
606 Tracker::List { .. } => TrackerKind::List,
607 Tracker::Map { .. } => TrackerKind::Map,
608 Tracker::Set { .. } => TrackerKind::Set,
609 Tracker::Option { .. } => TrackerKind::Option,
610 Tracker::Result { .. } => TrackerKind::Result,
611 Tracker::DynamicValue { .. } => TrackerKind::DynamicValue,
612 Tracker::Inner { .. } => TrackerKind::Inner,
613 }
614 }
615
616 /// Set the current_child index for trackers that support it
617 const fn set_current_child(&mut self, idx: usize) {
618 match self {
619 Tracker::Struct { current_child, .. }
620 | Tracker::Enum { current_child, .. }
621 | Tracker::Array { current_child, .. } => {
622 *current_child = Some(idx);
623 }
624 _ => {}
625 }
626 }
627
628 /// Clear the current_child index for trackers that support it
629 fn clear_current_child(&mut self) {
630 match self {
631 Tracker::Struct { current_child, .. }
632 | Tracker::Enum { current_child, .. }
633 | Tracker::Array { current_child, .. }
634 | Tracker::List { current_child, .. } => {
635 *current_child = None;
636 }
637 Tracker::Set { current_child } => {
638 *current_child = false;
639 }
640 _ => {}
641 }
642 }
643}
644
645impl Frame {
646 fn new(
647 data: PtrUninit,
648 allocated: AllocatedShape,
649 ownership: FrameOwnership,
650 type_plan: typeplan::NodeId,
651 ) -> Self {
652 // For empty structs (structs with 0 fields), start as initialized since there's nothing to initialize
653 // This includes empty tuples () which are zero-sized types with no fields to initialize
654 let is_init = matches!(
655 allocated.shape().ty,
656 Type::User(UserType::Struct(struct_type)) if struct_type.fields.is_empty()
657 );
658
659 Self {
660 data,
661 allocated,
662 is_init,
663 tracker: Tracker::Scalar,
664 ownership,
665 using_custom_deserialization: false,
666 shape_level_proxy: None,
667 type_plan,
668 }
669 }
670
671 /// Deinitialize any initialized field: calls `drop_in_place` but does not free any
672 /// memory even if the frame owns that memory.
673 ///
674 /// After this call, `is_init` will be false and `tracker` will be [Tracker::Scalar].
675 fn deinit(&mut self) {
676 // For BorrowedInPlace frames, we must NOT drop. These point into existing
677 // collection entries (Value objects, Option inners) where the parent has no
678 // per-entry tracking. Dropping here would cause double-free when parent drops.
679 if matches!(self.ownership, FrameOwnership::BorrowedInPlace) {
680 self.is_init = false;
681 self.tracker = Tracker::Scalar;
682 return;
683 }
684
685 // For RopeSlot frames, `frame.data` points into a ListRope chunk owned by
686 // the parent List's tracker. We DO need to drop the element's contents
687 // (respecting partial init), because with consume-time rope marking, if a
688 // RopeSlot frame is still alive at deinit time the rope's initialized_count
689 // does NOT cover this slot โ so `ListRope::drain_into` won't drop it.
690 // The drop below runs `call_drop_in_place` against `self.data` (in-rope);
691 // `dealloc` is skipped since `needs_dealloc()` is false for RopeSlot, so
692 // the chunk allocation stays intact for the parent rope to reclaim.
693
694 // Field frames are responsible for their value during cleanup.
695 // The ownership model ensures no double-free:
696 // - begin_field: parent's iset[idx] is cleared (parent relinquishes responsibility)
697 // - end: parent's iset[idx] is set (parent reclaims responsibility), frame is popped
698 // So if Field frame is still on stack during cleanup, parent's iset[idx] is false,
699 // meaning the parent won't drop this field - the Field frame must do it.
700
701 match &mut self.tracker {
702 Tracker::Scalar => {
703 // Simple scalar - drop if initialized
704 if self.is_init {
705 unsafe {
706 self.allocated
707 .shape()
708 .call_drop_in_place(self.data.assume_init())
709 };
710 }
711 }
712 Tracker::Array { iset, .. } => {
713 // Drop initialized array elements
714 if let Type::Sequence(facet_core::SequenceType::Array(array_def)) =
715 self.allocated.shape().ty
716 {
717 let element_layout = array_def.t.layout.sized_layout().ok();
718 if let Some(layout) = element_layout {
719 for idx in 0..array_def.n {
720 if iset.get(idx) {
721 let offset = layout.size() * idx;
722 let element_ptr = unsafe { self.data.field_init(offset) };
723 unsafe { array_def.t.call_drop_in_place(element_ptr) };
724 }
725 }
726 }
727 }
728 }
729 Tracker::Struct { iset, .. } => {
730 // Drop initialized struct fields
731 if let Type::User(UserType::Struct(struct_type)) = self.allocated.shape().ty {
732 if iset.all_set(struct_type.fields.len()) {
733 unsafe {
734 self.allocated
735 .shape()
736 .call_drop_in_place(self.data.assume_init())
737 };
738 } else {
739 for (idx, field) in struct_type.fields.iter().enumerate() {
740 if iset.get(idx) {
741 // This field was initialized, drop it
742 let field_ptr = unsafe { self.data.field_init(field.offset) };
743 unsafe { field.shape().call_drop_in_place(field_ptr) };
744 }
745 }
746 }
747 }
748 }
749 Tracker::Enum { variant, data, .. } => {
750 // Drop initialized enum variant fields
751 for (idx, field) in variant.data.fields.iter().enumerate() {
752 if data.get(idx) {
753 // This field was initialized, drop it
754 let field_ptr = unsafe { self.data.field_init(field.offset) };
755 unsafe { field.shape().call_drop_in_place(field_ptr) };
756 }
757 }
758 }
759 Tracker::SmartPointer { pending_inner, .. } => {
760 // If there's a pending inner value, drop it
761 if let Some(inner_ptr) = pending_inner
762 && let Def::Pointer(ptr_def) = self.allocated.shape().def
763 && let Some(inner_shape) = ptr_def.pointee
764 {
765 unsafe {
766 inner_shape.call_drop_in_place(PtrMut::new(inner_ptr.as_mut_byte_ptr()))
767 };
768 }
769 // Drop the initialized SmartPointer
770 if self.is_init {
771 unsafe {
772 self.allocated
773 .shape()
774 .call_drop_in_place(self.data.assume_init())
775 };
776 }
777 }
778 Tracker::SmartPointerSlice { vtable, .. } => {
779 // Free the slice builder
780 let builder_ptr = unsafe { self.data.assume_init() };
781 unsafe {
782 (vtable.free_fn)(builder_ptr);
783 }
784 }
785 Tracker::List { rope, .. } => {
786 // Drain any rope elements first. `is_init` only indicates that the Vec
787 // has been allocated (via `init_in_place_with_capacity`); elements pushed
788 // via `begin_list_item` live in the rope until `drain_rope_into_vec` moves
789 // them into the Vec. A successful drain leaves `rope = None` (via `.take()`),
790 // so if we see `rope = Some(..)` here the elements inside were never moved
791 // into the Vec and they're still owned by the rope. Drop them now.
792 if let Some(mut rope) = rope.take()
793 && let Def::List(list_def) = self.allocated.shape().def
794 {
795 let element_shape = list_def.t;
796 unsafe {
797 rope.drain_into(|ptr| {
798 element_shape.call_drop_in_place(PtrMut::new(ptr.as_ptr()));
799 });
800 }
801 }
802
803 // Now drop the Vec (and whatever elements it already owns).
804 if self.is_init {
805 unsafe {
806 self.allocated
807 .shape()
808 .call_drop_in_place(self.data.assume_init())
809 };
810 }
811 }
812 Tracker::Map {
813 pending_entries, ..
814 } => {
815 // Drop the initialized Map
816 if self.is_init {
817 unsafe {
818 self.allocated
819 .shape()
820 .call_drop_in_place(self.data.assume_init())
821 };
822 }
823
824 // Clean up pending entries. Each entry is `(key_ptr, Option<value_ptr>)`:
825 // a full entry has `Some(value_ptr)`; a half-entry (key pushed but no value
826 // paired yet) has `None`. `pending_entries` is the sole drop-and-dealloc
827 // authority for any buffer it holds; entries only ever reference fully-
828 // initialized buffers (pushed at walk consume-time or non-stored end()
829 // paths, after validation), so drop-in-place is safe.
830 if let Def::Map(map_def) = self.allocated.shape().def {
831 for (key_ptr, value_ptr) in pending_entries.drain(..) {
832 // Drop and deallocate key
833 unsafe { map_def.k().call_drop_in_place(key_ptr.assume_init()) };
834 if let Ok(key_layout) = map_def.k().layout.sized_layout()
835 && key_layout.size() > 0
836 {
837 unsafe { alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout) };
838 }
839 // Drop and deallocate value if present (half-entries have None).
840 if let Some(value_ptr) = value_ptr {
841 unsafe { map_def.v().call_drop_in_place(value_ptr.assume_init()) };
842 if let Ok(value_layout) = map_def.v().layout.sized_layout()
843 && value_layout.size() > 0
844 {
845 unsafe {
846 alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), value_layout)
847 };
848 }
849 }
850 }
851 }
852 // Note: insert_state is no longer a cleanup source. Any key/value buffer
853 // for an in-flight frame is still owned by that frame (the stored frame's
854 // own dealloc handles it during `cleanup_stored_frames_on_error`, or the
855 // on-stack frame's dealloc handles it in `Drop::drop`).
856 }
857 Tracker::Set { .. } => {
858 // Drop the initialized Set
859 if self.is_init {
860 unsafe {
861 self.allocated
862 .shape()
863 .call_drop_in_place(self.data.assume_init())
864 };
865 }
866 }
867 Tracker::Option {
868 building_inner,
869 pending_inner,
870 } => {
871 // Clean up pending inner value if it was never finalized
872 let had_pending = pending_inner.is_some();
873 if let Some(inner_ptr) = pending_inner.take()
874 && let Def::Option(option_def) = self.allocated.shape().def
875 {
876 // Drop the inner value
877 unsafe { option_def.t.call_drop_in_place(inner_ptr.assume_init()) };
878 // Deallocate the inner buffer
879 if let Ok(layout) = option_def.t.layout.sized_layout()
880 && layout.size() > 0
881 {
882 unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), layout) };
883 }
884 }
885 // If we're building the inner value, it will be handled by the Option vtable
886 // No special cleanup needed here as the Option will either be properly
887 // initialized or remain uninitialized
888 if !*building_inner && !had_pending {
889 // Option is fully initialized (no pending), drop it normally
890 unsafe {
891 self.allocated
892 .shape()
893 .call_drop_in_place(self.data.assume_init())
894 };
895 }
896 }
897 Tracker::Result { building_inner, .. } => {
898 // If we're building the inner value, it will be handled by the Result vtable
899 // No special cleanup needed here as the Result will either be properly
900 // initialized or remain uninitialized
901 if !*building_inner {
902 // Result is fully initialized, drop it normally
903 unsafe {
904 self.allocated
905 .shape()
906 .call_drop_in_place(self.data.assume_init())
907 };
908 }
909 }
910 Tracker::DynamicValue { state } => {
911 // Clean up pending_entries if this is an Object
912 if let DynamicValueState::Object {
913 pending_entries, ..
914 } = state
915 {
916 // Drop and deallocate any pending values that weren't inserted
917 if let Def::DynamicValue(dyn_def) = self.allocated.shape().def {
918 let value_shape = self.allocated.shape(); // Value entries are same shape
919 for (_key, value_ptr) in pending_entries.drain(..) {
920 // Drop the value
921 unsafe {
922 value_shape.call_drop_in_place(value_ptr.assume_init());
923 }
924 // Deallocate the value buffer
925 if let Ok(layout) = value_shape.layout.sized_layout()
926 && layout.size() > 0
927 {
928 unsafe {
929 alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), layout);
930 }
931 }
932 }
933 // Note: keys are Strings and will be dropped when pending_entries is dropped
934 let _ = dyn_def; // silence unused warning
935 }
936 }
937
938 // Clean up pending_elements if this is an Array
939 if let DynamicValueState::Array {
940 pending_elements, ..
941 } = state
942 {
943 // Drop and deallocate any pending elements that weren't inserted
944 let element_shape = self.allocated.shape(); // Array elements are same shape
945 for element_ptr in pending_elements.drain(..) {
946 // Drop the element
947 unsafe {
948 element_shape.call_drop_in_place(element_ptr.assume_init());
949 }
950 // Deallocate the element buffer
951 if let Ok(layout) = element_shape.layout.sized_layout()
952 && layout.size() > 0
953 {
954 unsafe {
955 alloc::alloc::dealloc(element_ptr.as_mut_byte_ptr(), layout);
956 }
957 }
958 }
959 }
960
961 // Drop if initialized
962 if self.is_init {
963 let result = unsafe {
964 self.allocated
965 .shape()
966 .call_drop_in_place(self.data.assume_init())
967 };
968 if result.is_none() {
969 // This would be a bug - DynamicValue should always have drop_in_place
970 panic!(
971 "DynamicValue type {} has no drop_in_place implementation",
972 self.allocated.shape()
973 );
974 }
975 }
976 }
977 Tracker::Inner { .. } => {
978 // Inner wrapper - drop if initialized
979 if self.is_init {
980 unsafe {
981 self.allocated
982 .shape()
983 .call_drop_in_place(self.data.assume_init())
984 };
985 }
986 }
987 }
988
989 self.is_init = false;
990 self.tracker = Tracker::Scalar;
991 }
992
993 /// Deinitialize any initialized value for REPLACEMENT purposes.
994 ///
995 /// Unlike `deinit()` which is used during error cleanup, this method is used when
996 /// we're about to overwrite a value with a new one (e.g., in `set_shape`).
997 ///
998 /// The difference is important for Field frames with simple trackers:
999 /// - During cleanup: parent struct will drop all initialized fields, so Field frames skip dropping
1000 /// - During replacement: we're about to overwrite, so we MUST drop the old value
1001 ///
1002 /// For BorrowedInPlace frames: same logic applies - we must drop when replacing.
1003 fn deinit_for_replace(&mut self) {
1004 // For BorrowedInPlace frames, deinit() skips dropping (parent owns on cleanup).
1005 // But when REPLACING a value, we must drop the old value first.
1006 if matches!(self.ownership, FrameOwnership::BorrowedInPlace) && self.is_init {
1007 unsafe {
1008 self.allocated
1009 .shape()
1010 .call_drop_in_place(self.data.assume_init());
1011 }
1012
1013 // CRITICAL: For DynamicValue (e.g., facet_value::Value), the parent Object's
1014 // HashMap entry still points to this location. If we just drop and leave garbage,
1015 // the parent will try to drop that garbage when it's cleaned up, causing
1016 // use-after-free. We must reinitialize to a safe default (Null) so the parent
1017 // can safely drop it later.
1018 if let Def::DynamicValue(dyn_def) = &self.allocated.shape().def {
1019 unsafe {
1020 (dyn_def.vtable.set_null)(self.data);
1021 }
1022 // Keep is_init = true since we just initialized it to Null
1023 self.tracker = Tracker::DynamicValue {
1024 state: DynamicValueState::Scalar,
1025 };
1026 return;
1027 }
1028
1029 self.is_init = false;
1030 self.tracker = Tracker::Scalar;
1031 return;
1032 }
1033
1034 // Field frames handle their own cleanup in deinit() - no special handling needed here.
1035
1036 // All other cases: use normal deinit
1037 self.deinit();
1038 }
1039
1040 /// This must be called after (fully) initializing a value.
1041 ///
1042 /// This sets `is_init` to `true` to indicate the value is initialized.
1043 /// Composite types (structs, enums, etc.) might be handled differently.
1044 ///
1045 /// # Safety
1046 ///
1047 /// This should only be called when `self.data` has been actually initialized.
1048 const unsafe fn mark_as_init(&mut self) {
1049 self.is_init = true;
1050 }
1051
1052 /// Deallocate the memory associated with this frame, if it owns it.
1053 ///
1054 /// The memory has to be deinitialized first, see [Frame::deinit]
1055 fn dealloc(self) {
1056 // Only deallocate if this frame owns its memory
1057 if !self.ownership.needs_dealloc() {
1058 return;
1059 }
1060
1061 // If we need to deallocate, the frame must be deinitialized first
1062 if self.is_init {
1063 unreachable!("a frame has to be deinitialized before being deallocated")
1064 }
1065
1066 // Deallocate using the actual allocated size (not derived from shape)
1067 if self.allocated.allocated_size() > 0 {
1068 // Use the shape for alignment, but the stored size for the actual allocation
1069 if let Ok(layout) = self.allocated.shape().layout.sized_layout() {
1070 let actual_layout = core::alloc::Layout::from_size_align(
1071 self.allocated.allocated_size(),
1072 layout.align(),
1073 )
1074 .expect("allocated_size must be valid");
1075 unsafe { alloc::alloc::dealloc(self.data.as_mut_byte_ptr(), actual_layout) };
1076 }
1077 }
1078 }
1079
1080 /// Fill in defaults for any unset fields that have default values.
1081 ///
1082 /// This handles:
1083 /// - Container-level defaults (when no fields set and struct has Default impl)
1084 /// - Fields with `#[facet(default = ...)]` - uses the explicit default function
1085 /// - Fields with `#[facet(default)]` - uses the type's Default impl
1086 /// - `Option<T>` fields - default to None
1087 ///
1088 /// Returns Ok(()) if successful, or an error if a field has `#[facet(default)]`
1089 /// but no default implementation is available.
1090 fn fill_defaults(&mut self) -> Result<(), ReflectErrorKind> {
1091 // First, check if we need to upgrade from Scalar to Struct tracker
1092 // This happens when no fields were visited at all in deferred mode
1093 if !self.is_init
1094 && matches!(self.tracker, Tracker::Scalar)
1095 && let Type::User(UserType::Struct(struct_type)) = self.allocated.shape().ty
1096 {
1097 // If no fields were visited and the container has a default, use it
1098 // SAFETY: We're about to initialize the entire struct with its default value
1099 if unsafe { self.allocated.shape().call_default_in_place(self.data) }.is_some() {
1100 self.is_init = true;
1101 return Ok(());
1102 }
1103 // Otherwise initialize the struct tracker with empty iset
1104 self.tracker = Tracker::Struct {
1105 iset: ISet::new(struct_type.fields.len()),
1106 current_child: None,
1107 };
1108 }
1109
1110 // Handle Option types with Scalar tracker - default to None
1111 // This happens in deferred mode when an Option field was never touched
1112 if !self.is_init
1113 && matches!(self.tracker, Tracker::Scalar)
1114 && matches!(self.allocated.shape().def, Def::Option(_))
1115 {
1116 // SAFETY: Option<T> always implements Default (as None)
1117 if unsafe { self.allocated.shape().call_default_in_place(self.data) }.is_some() {
1118 self.is_init = true;
1119 return Ok(());
1120 }
1121 }
1122
1123 match &mut self.tracker {
1124 Tracker::Struct { iset, .. } => {
1125 if let Type::User(UserType::Struct(struct_type)) = self.allocated.shape().ty {
1126 // Fast path: if ALL fields are set, nothing to do
1127 if iset.all_set(struct_type.fields.len()) {
1128 return Ok(());
1129 }
1130
1131 // Check if NO fields have been set and the container has a default
1132 let no_fields_set = (0..struct_type.fields.len()).all(|i| !iset.get(i));
1133 if no_fields_set {
1134 // SAFETY: We're about to initialize the entire struct with its default value
1135 if unsafe { self.allocated.shape().call_default_in_place(self.data) }
1136 .is_some()
1137 {
1138 self.tracker = Tracker::Scalar;
1139 self.is_init = true;
1140 return Ok(());
1141 }
1142 }
1143
1144 // Check if the container has #[facet(default)] attribute
1145 let container_has_default = self.allocated.shape().has_default_attr();
1146
1147 // Fill defaults for individual fields
1148 for (idx, field) in struct_type.fields.iter().enumerate() {
1149 // Skip already-initialized fields
1150 if iset.get(idx) {
1151 continue;
1152 }
1153
1154 // Calculate field pointer
1155 let field_ptr = unsafe { self.data.field_uninit(field.offset) };
1156
1157 // Try to initialize with default
1158 if unsafe {
1159 Self::try_init_field_default(field, field_ptr, container_has_default)
1160 } {
1161 // Mark field as initialized
1162 iset.set(idx);
1163 } else if field.has_default() {
1164 // Field has #[facet(default)] but we couldn't find a default function.
1165 // This happens with opaque types that don't have default_in_place.
1166 return Err(ReflectErrorKind::DefaultAttrButNoDefaultImpl {
1167 shape: field.shape(),
1168 });
1169 }
1170 }
1171 }
1172 }
1173 Tracker::Enum { variant, data, .. } => {
1174 // Fast path: if ALL fields are set, nothing to do
1175 let num_fields = variant.data.fields.len();
1176 if num_fields == 0 || data.all_set(num_fields) {
1177 return Ok(());
1178 }
1179
1180 // Check if the container has #[facet(default)] attribute
1181 let container_has_default = self.allocated.shape().has_default_attr();
1182
1183 // Handle enum variant fields
1184 for (idx, field) in variant.data.fields.iter().enumerate() {
1185 // Skip already-initialized fields
1186 if data.get(idx) {
1187 continue;
1188 }
1189
1190 // Calculate field pointer within the variant data
1191 let field_ptr = unsafe { self.data.field_uninit(field.offset) };
1192
1193 // Try to initialize with default
1194 if unsafe {
1195 Self::try_init_field_default(field, field_ptr, container_has_default)
1196 } {
1197 // Mark field as initialized
1198 data.set(idx);
1199 } else if field.has_default() {
1200 // Field has #[facet(default)] but we couldn't find a default function.
1201 return Err(ReflectErrorKind::DefaultAttrButNoDefaultImpl {
1202 shape: field.shape(),
1203 });
1204 }
1205 }
1206 }
1207 // Other tracker types don't have fields with defaults
1208 _ => {}
1209 }
1210 Ok(())
1211 }
1212
1213 /// Initialize a field with its default value if one is available.
1214 ///
1215 /// Priority:
1216 /// 1. Explicit field-level default_fn (from `#[facet(default = ...)]`)
1217 /// 2. Type-level default_in_place (from Default impl, including `Option<T>`)
1218 /// but only if the field has the DEFAULT flag
1219 /// 3. Container-level default: if the container has `#[facet(default)]` and
1220 /// the field's type implements Default, use that
1221 /// 4. Special cases: `Option<T>` (defaults to None), () (unit type)
1222 ///
1223 /// Returns true if a default was applied, false otherwise.
1224 ///
1225 /// # Safety
1226 ///
1227 /// `field_ptr` must point to uninitialized memory of the appropriate type.
1228 unsafe fn try_init_field_default(
1229 field: &Field,
1230 field_ptr: PtrUninit,
1231 container_has_default: bool,
1232 ) -> bool {
1233 use facet_core::DefaultSource;
1234
1235 // First check for explicit field-level default
1236 if let Some(default_source) = field.default {
1237 match default_source {
1238 DefaultSource::Custom(default_fn) => {
1239 // Custom default function - it expects PtrUninit
1240 unsafe { default_fn(field_ptr) };
1241 return true;
1242 }
1243 DefaultSource::FromTrait => {
1244 // Use the type's Default trait
1245 if unsafe { field.shape().call_default_in_place(field_ptr) }.is_some() {
1246 return true;
1247 }
1248 }
1249 }
1250 }
1251
1252 // If container has #[facet(default)] and the field's type implements Default,
1253 // use the type's Default impl. This allows `#[facet(default)]` on a struct to
1254 // mean "use Default for any missing fields whose types implement Default".
1255 if container_has_default
1256 && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1257 {
1258 return true;
1259 }
1260
1261 // Special case: Option<T> always defaults to None, even without explicit #[facet(default)]
1262 // This is because Option is fundamentally "optional" - if not set, it should be None
1263 if matches!(field.shape().def, Def::Option(_))
1264 && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1265 {
1266 return true;
1267 }
1268
1269 // Special case: () unit type always defaults to ()
1270 if field.shape().is_type::<()>()
1271 && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1272 {
1273 return true;
1274 }
1275
1276 // Special case: Collection types (Vec, HashMap, HashSet, etc.) default to empty
1277 // These types have obvious "zero values" and it's almost always what you want
1278 // when deserializing data where the collection is simply absent.
1279 if matches!(field.shape().def, Def::List(_) | Def::Map(_) | Def::Set(_))
1280 && unsafe { field.shape().call_default_in_place(field_ptr) }.is_some()
1281 {
1282 return true;
1283 }
1284
1285 false
1286 }
1287
1288 /// Drain all initialized elements from the rope into the Vec.
1289 ///
1290 /// This is called when finalizing a list that used rope storage. Elements were
1291 /// built in stable rope chunks to allow deferred processing; now we move them
1292 /// into the actual Vec.
1293 ///
1294 /// # Safety
1295 ///
1296 /// The rope must contain only initialized elements (via `mark_last_initialized`).
1297 /// The list_data must point to an initialized Vec with capacity for the elements.
1298 fn drain_rope_into_vec(
1299 mut rope: ListRope,
1300 list_def: &facet_core::ListDef,
1301 list_data: PtrUninit,
1302 ) -> Result<(), ReflectErrorKind> {
1303 let count = rope.initialized_count();
1304 if count == 0 {
1305 return Ok(());
1306 }
1307
1308 let push_fn = list_def
1309 .push()
1310 .ok_or_else(|| ReflectErrorKind::OperationFailed {
1311 shape: list_def.t(),
1312 operation: "List missing push function for rope drain",
1313 })?;
1314
1315 // SAFETY: list_data points to initialized Vec (is_init was true)
1316 let list_ptr = unsafe { list_data.assume_init() };
1317
1318 // Reserve space if available (optimization, not required)
1319 if let Some(reserve_fn) = list_def.reserve() {
1320 unsafe {
1321 reserve_fn(list_ptr, count);
1322 }
1323 }
1324
1325 // Move each element from rope to Vec
1326 // SAFETY: rope contains `count` initialized elements
1327 unsafe {
1328 rope.drain_into(|element_ptr| {
1329 push_fn(
1330 facet_core::PtrMut::new(list_ptr.as_mut_byte_ptr()),
1331 facet_core::PtrMut::new(element_ptr.as_ptr()),
1332 );
1333 });
1334 }
1335
1336 Ok(())
1337 }
1338
1339 /// Insert all pending key-value entries into the map.
1340 ///
1341 /// This is called when finalizing a map that used delayed insertion. Entries were
1342 /// kept in pending_entries to allow deferred processing; now we insert them into
1343 /// the actual map and deallocate the temporary buffers.
1344 fn drain_pending_into_map(
1345 pending_entries: &mut Vec<(PtrUninit, Option<PtrUninit>)>,
1346 map_def: &facet_core::MapDef,
1347 map_data: PtrUninit,
1348 ) -> Result<(), ReflectErrorKind> {
1349 let insert_fn = map_def.vtable.insert;
1350
1351 // SAFETY: map_data points to initialized map (is_init was true)
1352 let map_ptr = unsafe { map_data.assume_init() };
1353
1354 for (key_ptr, value_ptr) in pending_entries.drain(..) {
1355 // Every entry at finalize time MUST be a full (key, value) pair. A half-entry
1356 // (value is None) means a key was pushed without a paired value โ that's an
1357 // invariant violation.
1358 let Some(value_ptr) = value_ptr else {
1359 return Err(ReflectErrorKind::InvariantViolation {
1360 invariant: "map pending_entries contains half-entry (key without value) at finalize",
1361 });
1362 };
1363 // Insert the key-value pair
1364 unsafe {
1365 insert_fn(
1366 facet_core::PtrMut::new(map_ptr.as_mut_byte_ptr()),
1367 facet_core::PtrMut::new(key_ptr.as_mut_byte_ptr()),
1368 facet_core::PtrMut::new(value_ptr.as_mut_byte_ptr()),
1369 );
1370 }
1371
1372 // Deallocate the temporary buffers (insert moved the data)
1373 if let Ok(key_layout) = map_def.k().layout.sized_layout()
1374 && key_layout.size() > 0
1375 {
1376 unsafe { alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout) };
1377 }
1378 if let Ok(value_layout) = map_def.v().layout.sized_layout()
1379 && value_layout.size() > 0
1380 {
1381 unsafe { alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), value_layout) };
1382 }
1383 }
1384
1385 Ok(())
1386 }
1387
1388 /// Complete an Option by moving the pending inner value into it.
1389 ///
1390 /// This is called when finalizing an Option that used deferred init_some.
1391 /// The inner value was kept in stable memory for deferred processing;
1392 /// now we move it into the Option and deallocate the temporary buffer.
1393 fn complete_pending_option(
1394 option_def: facet_core::OptionDef,
1395 option_data: PtrUninit,
1396 inner_ptr: PtrUninit,
1397 ) -> Result<(), ReflectErrorKind> {
1398 let init_some_fn = option_def.vtable.init_some;
1399 let inner_shape = option_def.t;
1400
1401 // The inner_ptr contains the initialized inner value
1402 let inner_value_ptr = unsafe { inner_ptr.assume_init() };
1403
1404 // Initialize the Option as Some(inner_value)
1405 unsafe {
1406 init_some_fn(option_data, inner_value_ptr);
1407 }
1408
1409 // Deallocate the inner value's memory since init_some_fn moved it
1410 if let Ok(layout) = inner_shape.layout.sized_layout()
1411 && layout.size() > 0
1412 {
1413 unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), layout) };
1414 }
1415
1416 Ok(())
1417 }
1418
1419 fn complete_pending_smart_pointer(
1420 smart_ptr_shape: &'static Shape,
1421 smart_ptr_def: facet_core::PointerDef,
1422 smart_ptr_data: PtrUninit,
1423 inner_ptr: PtrUninit,
1424 ) -> Result<(), ReflectErrorKind> {
1425 // Check for sized pointee case first (uses new_into_fn)
1426 if let Some(new_into_fn) = smart_ptr_def.vtable.new_into_fn {
1427 let Some(inner_shape) = smart_ptr_def.pointee else {
1428 return Err(ReflectErrorKind::OperationFailed {
1429 shape: smart_ptr_shape,
1430 operation: "SmartPointer missing pointee shape",
1431 });
1432 };
1433
1434 // The inner_ptr contains the initialized inner value
1435 let _ = unsafe { inner_ptr.assume_init() };
1436
1437 // Initialize the SmartPointer with the inner value
1438 unsafe {
1439 new_into_fn(smart_ptr_data, PtrMut::new(inner_ptr.as_mut_byte_ptr()));
1440 }
1441
1442 // Deallocate the inner value's memory since new_into_fn moved it
1443 if let Ok(layout) = inner_shape.layout.sized_layout()
1444 && layout.size() > 0
1445 {
1446 unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), layout) };
1447 }
1448
1449 return Ok(());
1450 }
1451
1452 // Check for unsized pointee case: String -> Arc<str>/Box<str>/Rc<str>
1453 if let Some(pointee) = smart_ptr_def.pointee()
1454 && pointee.is_shape(str::SHAPE)
1455 {
1456 use alloc::{rc::Rc, string::String, sync::Arc};
1457 use facet_core::KnownPointer;
1458
1459 let Some(known) = smart_ptr_def.known else {
1460 return Err(ReflectErrorKind::OperationFailed {
1461 shape: smart_ptr_shape,
1462 operation: "SmartPointer<str> missing known pointer type",
1463 });
1464 };
1465
1466 // Read the String value from inner_ptr
1467 let string_ptr = inner_ptr.as_mut_byte_ptr() as *mut String;
1468 let string_value = unsafe { core::ptr::read(string_ptr) };
1469
1470 // Convert to the appropriate smart pointer type
1471 match known {
1472 KnownPointer::Box => {
1473 let boxed: alloc::boxed::Box<str> = string_value.into_boxed_str();
1474 unsafe {
1475 core::ptr::write(
1476 smart_ptr_data.as_mut_byte_ptr() as *mut alloc::boxed::Box<str>,
1477 boxed,
1478 );
1479 }
1480 }
1481 KnownPointer::Arc => {
1482 let arc: Arc<str> = Arc::from(string_value.into_boxed_str());
1483 unsafe {
1484 core::ptr::write(smart_ptr_data.as_mut_byte_ptr() as *mut Arc<str>, arc);
1485 }
1486 }
1487 KnownPointer::Rc => {
1488 let rc: Rc<str> = Rc::from(string_value.into_boxed_str());
1489 unsafe {
1490 core::ptr::write(smart_ptr_data.as_mut_byte_ptr() as *mut Rc<str>, rc);
1491 }
1492 }
1493 _ => {
1494 return Err(ReflectErrorKind::OperationFailed {
1495 shape: smart_ptr_shape,
1496 operation: "Unsupported SmartPointer<str> type",
1497 });
1498 }
1499 }
1500
1501 // Deallocate the String's memory (we moved the data out via ptr::read)
1502 let string_layout = alloc::string::String::SHAPE.layout.sized_layout().unwrap();
1503 if string_layout.size() > 0 {
1504 unsafe { alloc::alloc::dealloc(inner_ptr.as_mut_byte_ptr(), string_layout) };
1505 }
1506
1507 return Ok(());
1508 }
1509
1510 Err(ReflectErrorKind::OperationFailed {
1511 shape: smart_ptr_shape,
1512 operation: "SmartPointer missing new_into_fn and not a supported unsized type",
1513 })
1514 }
1515
1516 /// Returns an error if the value is not fully initialized.
1517 /// For lists with rope storage, drains the rope into the Vec.
1518 /// For maps with pending entries, drains the entries into the map.
1519 /// For options with pending inner values, calls init_some.
1520 fn require_full_initialization(&mut self) -> Result<(), ReflectErrorKind> {
1521 match &mut self.tracker {
1522 Tracker::Scalar => {
1523 if self.is_init {
1524 Ok(())
1525 } else {
1526 Err(ReflectErrorKind::UninitializedValue {
1527 shape: self.allocated.shape(),
1528 })
1529 }
1530 }
1531 Tracker::Array { iset, .. } => {
1532 match self.allocated.shape().ty {
1533 Type::Sequence(facet_core::SequenceType::Array(array_def)) => {
1534 // Check if all array elements are initialized
1535 if (0..array_def.n).all(|idx| iset.get(idx)) {
1536 Ok(())
1537 } else {
1538 Err(ReflectErrorKind::UninitializedValue {
1539 shape: self.allocated.shape(),
1540 })
1541 }
1542 }
1543 _ => Err(ReflectErrorKind::UninitializedValue {
1544 shape: self.allocated.shape(),
1545 }),
1546 }
1547 }
1548 Tracker::Struct { iset, .. } => {
1549 match self.allocated.shape().ty {
1550 Type::User(UserType::Struct(struct_type)) => {
1551 if iset.all_set(struct_type.fields.len()) {
1552 Ok(())
1553 } else {
1554 // Find index of the first bit not set
1555 let first_missing_idx =
1556 (0..struct_type.fields.len()).find(|&idx| !iset.get(idx));
1557 if let Some(missing_idx) = first_missing_idx {
1558 let field_name = struct_type.fields[missing_idx].name;
1559 Err(ReflectErrorKind::UninitializedField {
1560 shape: self.allocated.shape(),
1561 field_name,
1562 })
1563 } else {
1564 // fallback, something went wrong
1565 Err(ReflectErrorKind::UninitializedValue {
1566 shape: self.allocated.shape(),
1567 })
1568 }
1569 }
1570 }
1571 _ => Err(ReflectErrorKind::UninitializedValue {
1572 shape: self.allocated.shape(),
1573 }),
1574 }
1575 }
1576 Tracker::Enum { variant, data, .. } => {
1577 // Check if all fields of the variant are initialized
1578 let num_fields = variant.data.fields.len();
1579 if num_fields == 0 {
1580 // Unit variant, always initialized
1581 Ok(())
1582 } else if (0..num_fields).all(|idx| data.get(idx)) {
1583 Ok(())
1584 } else {
1585 // Find the first uninitialized field
1586 let first_missing_idx = (0..num_fields).find(|&idx| !data.get(idx));
1587 if let Some(missing_idx) = first_missing_idx {
1588 let field_name = variant.data.fields[missing_idx].name;
1589 Err(ReflectErrorKind::UninitializedField {
1590 shape: self.allocated.shape(),
1591 field_name,
1592 })
1593 } else {
1594 Err(ReflectErrorKind::UninitializedValue {
1595 shape: self.allocated.shape(),
1596 })
1597 }
1598 }
1599 }
1600 Tracker::SmartPointer {
1601 building_inner,
1602 pending_inner,
1603 } => {
1604 if *building_inner {
1605 // Inner value is still being built
1606 Err(ReflectErrorKind::UninitializedValue {
1607 shape: self.allocated.shape(),
1608 })
1609 } else if let Some(inner_ptr) = pending_inner.take() {
1610 // Finalize the pending inner value
1611 let smart_ptr_shape = self.allocated.shape();
1612 if let Def::Pointer(smart_ptr_def) = smart_ptr_shape.def {
1613 Self::complete_pending_smart_pointer(
1614 smart_ptr_shape,
1615 smart_ptr_def,
1616 self.data,
1617 inner_ptr,
1618 )?;
1619 self.is_init = true;
1620 Ok(())
1621 } else {
1622 Err(ReflectErrorKind::OperationFailed {
1623 shape: smart_ptr_shape,
1624 operation: "SmartPointer frame without SmartPointer definition",
1625 })
1626 }
1627 } else if self.is_init {
1628 Ok(())
1629 } else {
1630 Err(ReflectErrorKind::UninitializedValue {
1631 shape: self.allocated.shape(),
1632 })
1633 }
1634 }
1635 Tracker::SmartPointerSlice { building_item, .. } => {
1636 if *building_item {
1637 Err(ReflectErrorKind::UninitializedValue {
1638 shape: self.allocated.shape(),
1639 })
1640 } else {
1641 Ok(())
1642 }
1643 }
1644 Tracker::List {
1645 current_child,
1646 rope,
1647 } => {
1648 if self.is_init && current_child.is_none() {
1649 // Drain rope into Vec if we have elements stored there
1650 if let Some(rope) = rope.take()
1651 && let Def::List(list_def) = self.allocated.shape().def
1652 {
1653 Self::drain_rope_into_vec(rope, &list_def, self.data)?;
1654 }
1655 Ok(())
1656 } else {
1657 Err(ReflectErrorKind::UninitializedValue {
1658 shape: self.allocated.shape(),
1659 })
1660 }
1661 }
1662 Tracker::Map {
1663 insert_state,
1664 pending_entries,
1665 ..
1666 } => {
1667 if self.is_init && matches!(insert_state, MapInsertState::Idle) {
1668 // Insert all pending entries into the map
1669 if !pending_entries.is_empty()
1670 && let Def::Map(map_def) = self.allocated.shape().def
1671 {
1672 Self::drain_pending_into_map(pending_entries, &map_def, self.data)?;
1673 }
1674 Ok(())
1675 } else {
1676 Err(ReflectErrorKind::UninitializedValue {
1677 shape: self.allocated.shape(),
1678 })
1679 }
1680 }
1681 Tracker::Set { current_child } => {
1682 if self.is_init && !*current_child {
1683 Ok(())
1684 } else {
1685 Err(ReflectErrorKind::UninitializedValue {
1686 shape: self.allocated.shape(),
1687 })
1688 }
1689 }
1690 Tracker::Option {
1691 building_inner,
1692 pending_inner,
1693 } => {
1694 if *building_inner {
1695 Err(ReflectErrorKind::UninitializedValue {
1696 shape: self.allocated.shape(),
1697 })
1698 } else {
1699 // Finalize pending init_some if we have a pending inner value
1700 if let Some(inner_ptr) = pending_inner.take()
1701 && let Def::Option(option_def) = self.allocated.shape().def
1702 {
1703 Self::complete_pending_option(option_def, self.data, inner_ptr)?;
1704 }
1705 Ok(())
1706 }
1707 }
1708 Tracker::Result { building_inner, .. } => {
1709 if *building_inner {
1710 Err(ReflectErrorKind::UninitializedValue {
1711 shape: self.allocated.shape(),
1712 })
1713 } else {
1714 Ok(())
1715 }
1716 }
1717 Tracker::Inner { building_inner } => {
1718 if *building_inner {
1719 // Inner value is still being built
1720 Err(ReflectErrorKind::UninitializedValue {
1721 shape: self.allocated.shape(),
1722 })
1723 } else if self.is_init {
1724 Ok(())
1725 } else {
1726 Err(ReflectErrorKind::UninitializedValue {
1727 shape: self.allocated.shape(),
1728 })
1729 }
1730 }
1731 Tracker::DynamicValue { state } => {
1732 if matches!(state, DynamicValueState::Uninit) {
1733 Err(ReflectErrorKind::UninitializedValue {
1734 shape: self.allocated.shape(),
1735 })
1736 } else {
1737 // Insert pending entries for Object state
1738 if let DynamicValueState::Object {
1739 pending_entries,
1740 insert_state,
1741 } = state
1742 {
1743 if !matches!(insert_state, DynamicObjectInsertState::Idle) {
1744 return Err(ReflectErrorKind::UninitializedValue {
1745 shape: self.allocated.shape(),
1746 });
1747 }
1748
1749 if !pending_entries.is_empty()
1750 && let Def::DynamicValue(dyn_def) = self.allocated.shape().def
1751 {
1752 let object_ptr = unsafe { self.data.assume_init() };
1753 let value_shape = self.allocated.shape();
1754
1755 for (key, value_ptr) in pending_entries.drain(..) {
1756 // Insert the entry
1757 unsafe {
1758 (dyn_def.vtable.insert_object_entry)(
1759 object_ptr,
1760 &key,
1761 value_ptr.assume_init(),
1762 );
1763 }
1764 // Deallocate the value buffer (insert_object_entry moved the value)
1765 if let Ok(layout) = value_shape.layout.sized_layout()
1766 && layout.size() > 0
1767 {
1768 unsafe {
1769 alloc::alloc::dealloc(value_ptr.as_mut_byte_ptr(), layout);
1770 }
1771 }
1772 }
1773 }
1774 }
1775
1776 // Insert pending elements for Array state
1777 if let DynamicValueState::Array {
1778 pending_elements,
1779 building_element,
1780 } = state
1781 {
1782 if *building_element {
1783 return Err(ReflectErrorKind::UninitializedValue {
1784 shape: self.allocated.shape(),
1785 });
1786 }
1787
1788 if !pending_elements.is_empty()
1789 && let Def::DynamicValue(dyn_def) = self.allocated.shape().def
1790 {
1791 let array_ptr = unsafe { self.data.assume_init() };
1792 let element_shape = self.allocated.shape();
1793
1794 for element_ptr in pending_elements.drain(..) {
1795 // Push the element into the array
1796 unsafe {
1797 (dyn_def.vtable.push_array_element)(
1798 array_ptr,
1799 element_ptr.assume_init(),
1800 );
1801 }
1802 // Deallocate the element buffer (push_array_element moved the value)
1803 if let Ok(layout) = element_shape.layout.sized_layout()
1804 && layout.size() > 0
1805 {
1806 unsafe {
1807 alloc::alloc::dealloc(
1808 element_ptr.as_mut_byte_ptr(),
1809 layout,
1810 );
1811 }
1812 }
1813 }
1814 }
1815 }
1816
1817 Ok(())
1818 }
1819 }
1820 }
1821 }
1822
1823 /// Fill defaults and check required fields in a single pass using precomputed plans.
1824 ///
1825 /// This replaces the separate `fill_defaults` + `require_full_initialization` calls
1826 /// with a single iteration over the precomputed `FieldInitPlan` list.
1827 ///
1828 /// # Arguments
1829 /// * `plans` - Precomputed field initialization plans from TypePlan
1830 /// * `num_fields` - Total number of fields (from StructPlan/VariantPlanMeta)
1831 /// * `type_plan_core` - Reference to the TypePlanCore for resolving validators
1832 ///
1833 /// # Returns
1834 /// `Ok(())` if all required fields are set (or filled with defaults), or an error
1835 /// describing the first missing required field.
1836 #[allow(unsafe_code)]
1837 fn fill_and_require_fields(
1838 &mut self,
1839 plans: &[FieldInitPlan],
1840 num_fields: usize,
1841 type_plan_core: &TypePlanCore,
1842 ) -> Result<(), ReflectErrorKind> {
1843 // With lazy tracker initialization, structs start with Tracker::Scalar.
1844 // If is_init is true with Scalar, the struct was set wholesale - nothing to do.
1845 // If is_init is false, we need to upgrade to Tracker::Struct to track fields.
1846 if !self.is_init
1847 && matches!(self.tracker, Tracker::Scalar)
1848 && matches!(self.allocated.shape().ty, Type::User(UserType::Struct(_)))
1849 {
1850 // Try container-level default first
1851 if unsafe { self.allocated.shape().call_default_in_place(self.data) }.is_some() {
1852 self.is_init = true;
1853 return Ok(());
1854 }
1855 // Upgrade to Tracker::Struct for field-by-field tracking
1856 self.tracker = Tracker::Struct {
1857 iset: ISet::new(num_fields),
1858 current_child: None,
1859 };
1860 }
1861
1862 // Get the iset based on tracker type
1863 let iset = match &mut self.tracker {
1864 Tracker::Struct { iset, .. } => iset,
1865 Tracker::Enum { data, .. } => data,
1866 // Scalar with is_init=true means struct was set wholesale - all fields initialized
1867 Tracker::Scalar if self.is_init => return Ok(()),
1868 // Other tracker types don't use field_init_plans
1869 _ => return Ok(()),
1870 };
1871
1872 // Fast path: if all fields are already set, no defaults needed.
1873 // But validators still need to run.
1874 let all_fields_set = iset.all_set(num_fields);
1875
1876 for plan in plans {
1877 if !all_fields_set && !iset.get(plan.index) {
1878 // Field not set - handle according to fill rule
1879 match &plan.fill_rule {
1880 FillRule::Defaultable(default) => {
1881 // Calculate field pointer
1882 let field_ptr = unsafe { self.data.field_uninit(plan.offset) };
1883
1884 // Call the appropriate default function
1885 let success = match default {
1886 FieldDefault::Custom(default_fn) => {
1887 // SAFETY: default_fn writes to uninitialized memory
1888 unsafe { default_fn(field_ptr) };
1889 true
1890 }
1891 FieldDefault::FromTrait(shape) => {
1892 // SAFETY: call_default_in_place writes to uninitialized memory
1893 unsafe { shape.call_default_in_place(field_ptr) }.is_some()
1894 }
1895 };
1896
1897 if success {
1898 iset.set(plan.index);
1899 } else {
1900 return Err(ReflectErrorKind::UninitializedField {
1901 shape: self.allocated.shape(),
1902 field_name: plan.name,
1903 });
1904 }
1905 }
1906 FillRule::Required => {
1907 return Err(ReflectErrorKind::UninitializedField {
1908 shape: self.allocated.shape(),
1909 field_name: plan.name,
1910 });
1911 }
1912 }
1913 }
1914
1915 // Run validators on the (now initialized) field
1916 if !plan.validators.is_empty() {
1917 let field_ptr = unsafe { self.data.field_init(plan.offset) };
1918 for validator in type_plan_core.validators(plan.validators) {
1919 validator.run(field_ptr.into(), plan.name, self.allocated.shape())?;
1920 }
1921 }
1922 }
1923
1924 Ok(())
1925 }
1926
1927 /// Get the [EnumType] of the frame's shape, if it is an enum type
1928 pub(crate) const fn get_enum_type(&self) -> Result<EnumType, ReflectErrorKind> {
1929 match self.allocated.shape().ty {
1930 Type::User(UserType::Enum(e)) => Ok(e),
1931 _ => Err(ReflectErrorKind::WasNotA {
1932 expected: "enum",
1933 actual: self.allocated.shape(),
1934 }),
1935 }
1936 }
1937
1938 pub(crate) fn get_field(&self) -> Option<&Field> {
1939 match self.allocated.shape().ty {
1940 Type::User(user_type) => match user_type {
1941 UserType::Struct(struct_type) => {
1942 // Try to get currently active field index
1943 if let Tracker::Struct {
1944 current_child: Some(idx),
1945 ..
1946 } = &self.tracker
1947 {
1948 struct_type.fields.get(*idx)
1949 } else {
1950 None
1951 }
1952 }
1953 UserType::Enum(_enum_type) => {
1954 if let Tracker::Enum {
1955 variant,
1956 current_child: Some(idx),
1957 ..
1958 } = &self.tracker
1959 {
1960 variant.data.fields.get(*idx)
1961 } else {
1962 None
1963 }
1964 }
1965 _ => None,
1966 },
1967 _ => None,
1968 }
1969 }
1970}
1971
1972// Convenience methods on Partial for accessing FrameMode internals.
1973// These help minimize changes to the rest of the codebase during the refactor.
1974impl<'facet, const BORROW: bool> Partial<'facet, BORROW> {
1975 /// Get a reference to the frame stack.
1976 #[inline]
1977 pub(crate) const fn frames(&self) -> &Vec<Frame> {
1978 self.mode.stack()
1979 }
1980
1981 /// Get a mutable reference to the frame stack.
1982 #[inline]
1983 pub(crate) fn frames_mut(&mut self) -> &mut Vec<Frame> {
1984 self.mode.stack_mut()
1985 }
1986
1987 /// Check if we're in deferred mode.
1988 #[inline]
1989 pub const fn is_deferred(&self) -> bool {
1990 self.mode.is_deferred()
1991 }
1992
1993 /// Get the start depth if in deferred mode.
1994 #[inline]
1995 pub(crate) const fn start_depth(&self) -> Option<usize> {
1996 self.mode.start_depth()
1997 }
1998
1999 /// Derive the path from the current frame stack.
2000 ///
2001 /// Compute the navigation path for deferred mode storage and lookup.
2002 /// The returned `Path` is anchored to the root shape for proper type context.
2003 ///
2004 /// This extracts Field steps from struct/enum frames and Index steps from
2005 /// array/list frames. Option wrappers, smart pointers (Box, Rc, etc.), and
2006 /// other transparent types don't add path steps.
2007 ///
2008 /// This MUST match the storage path computation in end() for consistency.
2009 pub(crate) fn derive_path(&self) -> Path {
2010 // Get the root shape from the first frame
2011 let root_shape = self
2012 .frames()
2013 .first()
2014 .map(|f| f.allocated.shape())
2015 .unwrap_or_else(|| {
2016 // Fallback to unit type shape if no frames (shouldn't happen in practice)
2017 <() as facet_core::Facet>::SHAPE
2018 });
2019
2020 let mut path = Path::new(root_shape);
2021
2022 // Walk ALL frames, extracting navigation steps
2023 // This matches the storage path computation in end()
2024 let frames = self.frames();
2025 for (frame_idx, frame) in frames.iter().enumerate() {
2026 match &frame.tracker {
2027 Tracker::Struct {
2028 current_child: Some(idx),
2029 ..
2030 } => {
2031 path.push(PathStep::Field(*idx as u32));
2032 }
2033 Tracker::Enum {
2034 current_child: Some(idx),
2035 ..
2036 } => {
2037 path.push(PathStep::Field(*idx as u32));
2038 }
2039 Tracker::List {
2040 current_child: Some(idx),
2041 ..
2042 } => {
2043 path.push(PathStep::Index(*idx as u32));
2044 }
2045 Tracker::Array {
2046 current_child: Some(idx),
2047 ..
2048 } => {
2049 path.push(PathStep::Index(*idx as u32));
2050 }
2051 Tracker::Option {
2052 building_inner: true,
2053 ..
2054 } => {
2055 // Option with building_inner contributes OptionSome to path
2056 path.push(PathStep::OptionSome);
2057 }
2058 Tracker::SmartPointer {
2059 building_inner: true,
2060 ..
2061 } => {
2062 // SmartPointer with building_inner contributes Deref to path
2063 path.push(PathStep::Deref);
2064 }
2065 Tracker::SmartPointerSlice {
2066 current_child: Some(idx),
2067 ..
2068 } => {
2069 // SmartPointerSlice with current_child contributes Index to path
2070 path.push(PathStep::Index(*idx as u32));
2071 }
2072 Tracker::Inner {
2073 building_inner: true,
2074 } => {
2075 // Inner with building_inner contributes Inner to path
2076 path.push(PathStep::Inner);
2077 }
2078 Tracker::Map {
2079 current_entry_index: Some(idx),
2080 building_key,
2081 ..
2082 } => {
2083 // Map with active entry contributes MapKey or MapValue with entry index
2084 if *building_key {
2085 path.push(PathStep::MapKey(*idx as u32));
2086 } else {
2087 path.push(PathStep::MapValue(*idx as u32));
2088 }
2089 }
2090 // Other tracker types (Set, Result, etc.)
2091 // don't contribute to the storage path - they're transparent wrappers
2092 _ => {}
2093 }
2094
2095 // If the next frame is a proxy frame, add a Proxy step (matches end())
2096 if frame_idx + 1 < frames.len() && frames[frame_idx + 1].using_custom_deserialization {
2097 path.push(PathStep::Proxy);
2098 }
2099 }
2100
2101 path
2102 }
2103}
2104
2105impl<'facet, const BORROW: bool> Drop for Partial<'facet, BORROW> {
2106 fn drop(&mut self) {
2107 trace!("๐งน Partial is being dropped");
2108
2109 // With the ownership transfer model:
2110 // - When we enter a field, parent's iset[idx] is cleared
2111 // - Parent won't try to drop fields with iset[idx] = false
2112 // - No double-free possible by construction
2113
2114 // 1. Clean up stored frames from deferred state
2115 if let FrameMode::Deferred {
2116 stored_frames,
2117 stack,
2118 ..
2119 } = &mut self.mode
2120 {
2121 // Stored frames have ownership of their data (parent's iset was cleared).
2122 // IMPORTANT: Process in deepest-first order so children are dropped before parents.
2123 // Child frames have data pointers into parent memory, so parents must stay valid
2124 // until all their children are cleaned up.
2125 //
2126 // CRITICAL: Before dropping a child frame, we must mark the parent's field as
2127 // uninitialized. Otherwise, when we later drop the parent, it will try to drop
2128 // that field again, causing a double-free.
2129 let mut stored_frames = core::mem::take(stored_frames);
2130 let mut paths: Vec<_> = stored_frames.keys().cloned().collect();
2131 // Sort by path depth (number of steps), deepest first
2132 paths.sort_by_key(|p| core::cmp::Reverse(p.steps.len()));
2133 for path in paths {
2134 if let Some(mut frame) = stored_frames.remove(&path) {
2135 // Before dropping this frame, update the parent to prevent double-free.
2136 // The parent path is everything except the last step.
2137 let parent_path = Path {
2138 shape: path.shape,
2139 steps: path.steps[..path.steps.len().saturating_sub(1)].to_vec(),
2140 };
2141
2142 // Helper to find parent frame in stored_frames or stack
2143 let find_parent_frame =
2144 |stored: &mut alloc::collections::BTreeMap<Path, Frame>,
2145 stk: &mut [Frame],
2146 pp: &Path|
2147 -> Option<*mut Frame> {
2148 if let Some(pf) = stored.get_mut(pp) {
2149 Some(pf as *mut Frame)
2150 } else {
2151 let idx = pp.steps.len();
2152 stk.get_mut(idx).map(|f| f as *mut Frame)
2153 }
2154 };
2155
2156 match path.steps.last() {
2157 Some(PathStep::Field(field_idx)) => {
2158 let field_idx = *field_idx as usize;
2159 if let Some(parent_ptr) =
2160 find_parent_frame(&mut stored_frames, stack, &parent_path)
2161 {
2162 // SAFETY: parent_ptr is valid for the duration of this block
2163 let parent_frame = unsafe { &mut *parent_ptr };
2164 match &mut parent_frame.tracker {
2165 Tracker::Struct { iset, .. } => {
2166 iset.unset(field_idx);
2167 }
2168 Tracker::Enum { data, .. } => {
2169 data.unset(field_idx);
2170 }
2171 _ => {}
2172 }
2173 }
2174 }
2175 // Stored map key/value frames keep their TrackedBuffer ownership
2176 // until consume-time in finish_deferred; pending_entries is only
2177 // populated by the walk, so at Partial::drop it contains nothing
2178 // referencing these frames. Their own deinit/dealloc handles the
2179 // partial-init drop. No parent-side cleanup required.
2180 Some(PathStep::Index(_)) => {
2181 // List element frames with RopeSlot ownership are handled by
2182 // the deinit check for RopeSlot - they skip dropping since the
2183 // rope owns the data. No parent update needed.
2184 }
2185 _ => {}
2186 }
2187 frame.deinit();
2188 frame.dealloc();
2189 }
2190 }
2191 }
2192
2193 // 2. Pop and deinit stack frames
2194 // CRITICAL: Before deiniting a child frame, we must mark the parent's field as
2195 // uninitialized. Otherwise, the parent will try to drop the field again.
2196 loop {
2197 let stack = self.mode.stack_mut();
2198 if stack.is_empty() {
2199 break;
2200 }
2201
2202 let mut frame = stack.pop().unwrap();
2203
2204 // If this frame has Field ownership, mark the parent's bit as unset
2205 // so the parent won't try to drop it again.
2206 if let FrameOwnership::Field { field_idx } = frame.ownership
2207 && let Some(parent_frame) = stack.last_mut()
2208 {
2209 match &mut parent_frame.tracker {
2210 Tracker::Struct { iset, .. } => {
2211 iset.unset(field_idx);
2212 }
2213 Tracker::Enum { data, .. } => {
2214 data.unset(field_idx);
2215 }
2216 Tracker::Array { iset, .. } => {
2217 iset.unset(field_idx);
2218 }
2219 _ => {}
2220 }
2221 }
2222
2223 frame.deinit();
2224 frame.dealloc();
2225 }
2226 }
2227}
2228
2229#[cfg(test)]
2230mod size_tests {
2231 use super::*;
2232 use core::mem::size_of;
2233
2234 #[test]
2235 fn print_type_sizes() {
2236 eprintln!("\n=== Type Sizes ===");
2237 eprintln!("Frame: {} bytes", size_of::<Frame>());
2238 eprintln!("Tracker: {} bytes", size_of::<Tracker>());
2239 eprintln!("ISet: {} bytes", size_of::<ISet>());
2240 eprintln!("AllocatedShape: {} bytes", size_of::<AllocatedShape>());
2241 eprintln!("FrameOwnership: {} bytes", size_of::<FrameOwnership>());
2242 eprintln!("PtrUninit: {} bytes", size_of::<facet_core::PtrUninit>());
2243 eprintln!("Option<usize>: {} bytes", size_of::<Option<usize>>());
2244 eprintln!(
2245 "Option<&'static facet_core::ProxyDef>: {} bytes",
2246 size_of::<Option<&'static facet_core::ProxyDef>>()
2247 );
2248 eprintln!(
2249 "TypePlanNode: {} bytes",
2250 size_of::<typeplan::TypePlanNode>()
2251 );
2252 eprintln!("Vec<Frame>: {} bytes", size_of::<Vec<Frame>>());
2253 eprintln!("MapInsertState: {} bytes", size_of::<MapInsertState>());
2254 eprintln!(
2255 "DynamicValueState: {} bytes",
2256 size_of::<DynamicValueState>()
2257 );
2258 eprintln!("===================\n");
2259 }
2260}