Skip to main content

facet_reflect/partial/partial_api/
misc.rs

1use facet_core::TryFromOutcome;
2use facet_path::{Path, PathStep};
3
4use super::*;
5use crate::typeplan::{DeserStrategy, TypePlanNodeKind};
6
7////////////////////////////////////////////////////////////////////////////////////////////////////
8// Misc.
9////////////////////////////////////////////////////////////////////////////////////////////////////
10impl<'facet, const BORROW: bool> Partial<'facet, BORROW> {
11    /// Applies a closure to this Partial, enabling chaining with operations that
12    /// take ownership and return `Result<Self, E>`.
13    ///
14    /// This is useful for chaining deserializer methods that need `&mut self`:
15    ///
16    /// ```ignore
17    /// wip = wip
18    ///     .begin_field("name")?
19    ///     .with(|w| deserializer.deserialize_into(w))?
20    ///     .end()?;
21    /// ```
22    #[inline]
23    pub fn with<F, E>(self, f: F) -> Result<Self, E>
24    where
25        F: FnOnce(Self) -> Result<Self, E>,
26    {
27        f(self)
28    }
29
30    /// Returns true if the Partial is in an active state (not built or poisoned).
31    ///
32    /// After `build()` succeeds or after an error causes poisoning, the Partial
33    /// becomes inactive and most operations will fail.
34    #[inline]
35    pub fn is_active(&self) -> bool {
36        self.state == PartialState::Active
37    }
38
39    /// Returns the current frame count (depth of nesting)
40    ///
41    /// The initial frame count is 1 — `begin_field` would push a new frame,
42    /// bringing it to 2, then `end` would bring it back to `1`.
43    ///
44    /// This is an implementation detail of `Partial`, kinda, but deserializers
45    /// might use this for debug assertions, to make sure the state is what
46    /// they think it is.
47    #[inline]
48    pub const fn frame_count(&self) -> usize {
49        self.frames().len()
50    }
51
52    /// Returns the shape of the current frame.
53    ///
54    /// # Panics
55    ///
56    /// Panics if the Partial has been poisoned or built, or if there are no frames
57    /// (which indicates a bug in the Partial implementation).
58    #[inline]
59    pub fn shape(&self) -> &'static Shape {
60        if self.state != PartialState::Active {
61            panic!(
62                "Partial::shape() called on non-active Partial (state: {:?})",
63                self.state
64            );
65        }
66        self.frames()
67            .last()
68            .expect("Partial::shape() called but no frames exist - this is a bug")
69            .allocated
70            .shape()
71    }
72
73    /// Returns the shape of the current frame, or `None` if the Partial is
74    /// inactive (poisoned or built) or has no frames.
75    ///
76    /// This is useful for debugging/logging where you want to inspect the state
77    /// without risking a panic.
78    #[inline]
79    pub fn try_shape(&self) -> Option<&'static Shape> {
80        if self.state != PartialState::Active {
81            return None;
82        }
83        self.frames().last().map(|f| f.allocated.shape())
84    }
85
86    /// Returns the TypePlanCore for this Partial.
87    ///
88    /// This provides access to the arena-based type plan data, useful for
89    /// resolving field lookups and accessing precomputed metadata.
90    #[inline]
91    pub fn type_plan_core(&self) -> &crate::typeplan::TypePlanCore {
92        &self.root_plan
93    }
94
95    /// Returns the precomputed StructPlan for the current frame, if available.
96    ///
97    /// This provides O(1) or O(log n) field lookup instead of O(n) linear scanning.
98    /// Returns `None` if:
99    /// - The Partial is not active
100    /// - The current frame has no TypePlan (e.g., custom deserialization frames)
101    /// - The current type is not a struct
102    #[inline]
103    pub fn struct_plan(&self) -> Option<&crate::typeplan::StructPlan> {
104        if self.state != PartialState::Active {
105            return None;
106        }
107        let frame = self.frames().last()?;
108        self.root_plan.struct_plan_by_id(frame.type_plan)
109    }
110
111    /// Returns the precomputed EnumPlan for the current frame, if available.
112    ///
113    /// This provides O(1) or O(log n) variant lookup instead of O(n) linear scanning.
114    /// Returns `None` if:
115    /// - The Partial is not active
116    /// - The current type is not an enum
117    #[inline]
118    pub fn enum_plan(&self) -> Option<&crate::typeplan::EnumPlan> {
119        if self.state != PartialState::Active {
120            return None;
121        }
122        let frame = self.frames().last()?;
123        self.root_plan.enum_plan_by_id(frame.type_plan)
124    }
125
126    /// Returns the precomputed field plans for the current frame.
127    ///
128    /// This provides access to precomputed validators and default handling without
129    /// runtime attribute scanning.
130    ///
131    /// Returns `None` if the current type is not a struct or enum variant.
132    #[inline]
133    pub fn field_plans(&self) -> Option<&[crate::typeplan::FieldPlan]> {
134        use crate::typeplan::TypePlanNodeKind;
135        let frame = self.frames().last().unwrap();
136        let node = self.root_plan.node(frame.type_plan);
137        match &node.kind {
138            TypePlanNodeKind::Struct(struct_plan) => {
139                Some(self.root_plan.fields(struct_plan.fields))
140            }
141            TypePlanNodeKind::Enum(enum_plan) => {
142                // For enums, we need the variant index from the tracker
143                if let crate::partial::Tracker::Enum { variant_idx, .. } = &frame.tracker {
144                    self.root_plan
145                        .variants(enum_plan.variants)
146                        .get(*variant_idx)
147                        .map(|v| self.root_plan.fields(v.fields))
148                } else {
149                    None
150                }
151            }
152            _ => None,
153        }
154    }
155
156    /// Returns the precomputed TypePlanNode for the current frame.
157    ///
158    /// This provides access to the precomputed deserialization strategy and
159    /// other metadata computed at Partial allocation time.
160    ///
161    /// Returns `None` if:
162    /// - The Partial is not active
163    /// - There are no frames
164    #[inline]
165    pub fn plan_node(&self) -> Option<&crate::typeplan::TypePlanNode> {
166        if self.state != PartialState::Active {
167            return None;
168        }
169        let frame = self.frames().last()?;
170        Some(self.root_plan.node(frame.type_plan))
171    }
172
173    /// Returns the node ID for the current frame's type plan.
174    ///
175    /// Returns `None` if:
176    /// - The Partial is not active
177    /// - There are no frames
178    #[inline]
179    pub fn plan_node_id(&self) -> Option<crate::typeplan::NodeId> {
180        if self.state != PartialState::Active {
181            return None;
182        }
183        let frame = self.frames().last()?;
184        Some(frame.type_plan)
185    }
186
187    /// Returns the precomputed deserialization strategy for the current frame.
188    ///
189    /// This tells facet-format exactly how to deserialize the current type without
190    /// runtime inspection of Shape/Def/vtable. The strategy is computed once at
191    /// TypePlan build time.
192    ///
193    /// If the current node is a BackRef (recursive type), this automatically
194    /// follows the reference to return the target node's strategy.
195    ///
196    /// Returns `None` if:
197    /// - The Partial is not active
198    /// - There are no frames
199    #[inline]
200    pub fn deser_strategy(&self) -> Option<&DeserStrategy> {
201        let node = self.plan_node()?;
202        // Resolve BackRef if needed - resolve_backref returns the node unchanged if not a BackRef
203        let resolved = self.root_plan.resolve_backref(node);
204        Some(&resolved.strategy)
205    }
206
207    /// Returns the precomputed proxy nodes for the current frame's type.
208    ///
209    /// These contain TypePlan nodes for all proxies (format-agnostic and format-specific)
210    /// on this type, allowing runtime lookup based on format namespace.
211    #[inline]
212    pub fn proxy_nodes(&self) -> Option<&crate::typeplan::ProxyNodes> {
213        let node = self.plan_node()?;
214        let resolved = self.root_plan.resolve_backref(node);
215        Some(&resolved.proxies)
216    }
217
218    /// Returns true if the current frame is building a smart pointer slice (Arc<\[T\]>, Rc<\[T\]>, Box<\[T\]>).
219    ///
220    /// This is used by deserializers to determine if they should deserialize as a list
221    /// rather than recursing into the smart pointer type.
222    #[inline]
223    pub fn is_building_smart_ptr_slice(&self) -> bool {
224        if self.state != PartialState::Active {
225            return false;
226        }
227        self.frames()
228            .last()
229            .is_some_and(|f| matches!(f.tracker, Tracker::SmartPointerSlice { .. }))
230    }
231
232    /// Returns the current path in deferred mode (for debugging/tracing).
233    #[inline]
234    pub fn current_path(&self) -> Option<facet_path::Path> {
235        if self.is_deferred() {
236            Some(self.derive_path())
237        } else {
238            None
239        }
240    }
241
242    /// Checks if the current frame should be stored for deferred processing.
243    ///
244    /// This determines whether a frame can safely be stored and re-entered later
245    /// in deferred mode. A frame should be stored if:
246    /// 1. It's a re-entrant type (struct, enum, collection, Option)
247    /// 2. It has storable ownership (Field or Owned)
248    /// 3. It doesn't have a SmartPointer parent that needs immediate completion
249    ///
250    /// Returns `true` if the frame should be stored, `false` if it should be
251    /// validated immediately.
252    fn should_store_frame_for_deferred(&self) -> bool {
253        // In deferred mode, all frames have stable memory and can be stored.
254        // PR #2019 added stable storage for all container elements (ListRope for Vec,
255        // pending_entries for Map, pending_inner for Option).
256        true
257    }
258
259    /// Enables deferred materialization mode with the given Resolution.
260    ///
261    /// When deferred mode is enabled:
262    /// - `end()` stores frames instead of validating them
263    /// - Re-entering a path restores the stored frame with its state intact
264    /// - `finish_deferred()` performs final validation and materialization
265    ///
266    /// This allows deserializers to handle interleaved fields (e.g., TOML dotted
267    /// keys, flattened structs) where nested fields aren't contiguous in the input.
268    ///
269    /// # Use Cases
270    ///
271    /// - TOML dotted keys: `inner.x = 1` followed by `count = 2` then `inner.y = 3`
272    /// - Flattened structs where nested fields appear at the parent level
273    /// - Any format where field order doesn't match struct nesting
274    ///
275    /// # Errors
276    ///
277    /// Returns an error if already in deferred mode.
278    #[inline]
279    pub fn begin_deferred(mut self) -> Result<Self, ReflectError> {
280        // Cannot enable deferred mode if already in deferred mode
281        if self.is_deferred() {
282            return Err(self.err(ReflectErrorKind::InvariantViolation {
283                invariant: "begin_deferred() called but already in deferred mode",
284            }));
285        }
286
287        // Take the stack out of Strict mode and wrap in Deferred mode
288        let FrameMode::Strict { stack } = core::mem::replace(
289            &mut self.mode,
290            FrameMode::Strict { stack: Vec::new() }, // temporary placeholder
291        ) else {
292            unreachable!("just checked we're not in deferred mode");
293        };
294
295        let start_depth = stack.len();
296        self.mode = FrameMode::Deferred {
297            stack,
298            start_depth,
299            stored_frames: BTreeMap::new(),
300        };
301        Ok(self)
302    }
303
304    /// Finishes deferred mode: validates all stored frames and finalizes.
305    ///
306    /// This method:
307    /// 1. Validates that all stored frames are fully initialized
308    /// 2. Processes frames from deepest to shallowest, updating parent ISets
309    /// 3. Validates the root frame
310    ///
311    /// # Errors
312    ///
313    /// Returns an error if any required fields are missing or if the partial is
314    /// not in deferred mode.
315    pub fn finish_deferred(mut self) -> Result<Self, ReflectError> {
316        // Check if we're in deferred mode first, before extracting state
317        if !self.is_deferred() {
318            return Err(self.err(ReflectErrorKind::InvariantViolation {
319                invariant: "finish_deferred() called but deferred mode is not enabled",
320            }));
321        }
322
323        // Extract deferred state, transitioning back to Strict mode
324        let FrameMode::Deferred {
325            stack,
326            mut stored_frames,
327            ..
328        } = core::mem::replace(&mut self.mode, FrameMode::Strict { stack: Vec::new() })
329        else {
330            unreachable!("just checked is_deferred()");
331        };
332
333        // Restore the stack to self.mode
334        self.mode = FrameMode::Strict { stack };
335
336        // Sort paths by depth (deepest first) so we process children before parents.
337        // For equal-depth paths, we need stable ordering for list elements:
338        // Index(0) must be processed before Index(1) to maintain insertion order.
339        let mut paths: Vec<_> = stored_frames.keys().cloned().collect();
340        paths.sort_by(|a, b| {
341            // Primary: deeper paths first
342            let depth_cmp = b.len().cmp(&a.len());
343            if depth_cmp != core::cmp::Ordering::Equal {
344                return depth_cmp;
345            }
346            // Secondary: for same-depth paths, compare step by step
347            // This ensures Index(0) comes before Index(1) for the same parent
348            for (step_a, step_b) in a.steps.iter().zip(b.steps.iter()) {
349                let step_cmp = step_a.cmp(step_b);
350                if step_cmp != core::cmp::Ordering::Equal {
351                    return step_cmp;
352                }
353            }
354            core::cmp::Ordering::Equal
355        });
356
357        trace!(
358            "finish_deferred: Processing {} stored frames in order: {:?}",
359            paths.len(),
360            paths
361        );
362
363        // Process each stored frame from deepest to shallowest
364        for path in paths {
365            let mut frame = stored_frames.remove(&path).unwrap();
366
367            trace!(
368                "finish_deferred: Processing frame at {:?}, shape {}, tracker {:?}",
369                path,
370                frame.allocated.shape(),
371                frame.tracker.kind()
372            );
373
374            // Special handling for SmartPointerSlice: convert builder to Arc<[T]> before validation
375            if let Tracker::SmartPointerSlice { vtable, .. } = &frame.tracker {
376                let vtable = *vtable;
377                let current_shape = frame.allocated.shape();
378
379                // Convert the builder to Arc<[T]>
380                let builder_ptr = unsafe { frame.data.assume_init() };
381                let arc_ptr = unsafe { (vtable.convert_fn)(builder_ptr) };
382
383                trace!(
384                    "finish_deferred: Converting SmartPointerSlice builder to {}",
385                    current_shape
386                );
387
388                // Handle different ownership cases
389                match frame.ownership {
390                    FrameOwnership::Field { field_idx } => {
391                        // Arc<[T]> is a field in a struct
392                        // Find the parent frame and write the Arc to the field location
393                        let parent_path = facet_path::Path {
394                            shape: path.shape,
395                            steps: path.steps[..path.steps.len() - 1].to_vec(),
396                        };
397
398                        // Paths are absolute from the root, so the parent frame lives at
399                        // stack[parent_path.steps.len()] when it's still on the stack.
400                        let parent_frame_opt =
401                            if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
402                                Some(parent_frame)
403                            } else {
404                                self.frames_mut().get_mut(parent_path.steps.len())
405                            };
406
407                        if let Some(parent_frame) = parent_frame_opt {
408                            // Get the field to find its offset
409                            if let Type::User(UserType::Struct(struct_type)) =
410                                parent_frame.allocated.shape().ty
411                            {
412                                let field = &struct_type.fields[field_idx];
413
414                                // Calculate where the Arc should be written (parent.data + field.offset)
415                                let field_location =
416                                    unsafe { parent_frame.data.field_uninit(field.offset) };
417
418                                // Write the Arc to the parent struct's field location
419                                if let Ok(arc_layout) = current_shape.layout.sized_layout() {
420                                    let arc_size = arc_layout.size();
421                                    unsafe {
422                                        core::ptr::copy_nonoverlapping(
423                                            arc_ptr.as_byte_ptr(),
424                                            field_location.as_mut_byte_ptr(),
425                                            arc_size,
426                                        );
427                                    }
428
429                                    // Free the staging allocation from convert_fn
430                                    unsafe {
431                                        ::alloc::alloc::dealloc(
432                                            arc_ptr.as_byte_ptr() as *mut u8,
433                                            arc_layout,
434                                        );
435                                    }
436
437                                    // Update the frame to point to the correct field location and mark as initialized
438                                    frame.data = field_location;
439                                    frame.tracker = Tracker::Scalar;
440                                    frame.is_init = true;
441
442                                    trace!(
443                                        "finish_deferred: SmartPointerSlice converted and written to field {}",
444                                        field_idx
445                                    );
446                                }
447                            }
448                        }
449                    }
450                    FrameOwnership::Owned => {
451                        // Arc<[T]> is the root - write in place
452                        if let Ok(arc_layout) = current_shape.layout.sized_layout() {
453                            let arc_size = arc_layout.size();
454                            // Allocate new memory for the Arc
455                            let new_ptr = facet_core::alloc_for_layout(arc_layout);
456                            unsafe {
457                                core::ptr::copy_nonoverlapping(
458                                    arc_ptr.as_byte_ptr(),
459                                    new_ptr.as_mut_byte_ptr(),
460                                    arc_size,
461                                );
462                            }
463                            // Free the staging allocation
464                            unsafe {
465                                ::alloc::alloc::dealloc(
466                                    arc_ptr.as_byte_ptr() as *mut u8,
467                                    arc_layout,
468                                );
469                            }
470                            frame.data = new_ptr;
471                            frame.tracker = Tracker::Scalar;
472                            frame.is_init = true;
473                        }
474                    }
475                    _ => {}
476                }
477            }
478
479            // Fill in defaults for unset fields that have defaults
480            if let Err(e) = frame.fill_defaults() {
481                // Before cleanup, clear the parent's iset bit for the frame that failed.
482                // This prevents the parent from trying to drop this field when Partial is dropped.
483                Self::clear_parent_iset_for_path(&path, self.frames_mut(), &mut stored_frames);
484                // If this is a MapValue/Deref/OptionSome frame, a parent tracker holds
485                // a pointer to our frame's memory (pending_entries / pending_inner). Clear
486                // that pointer before we dealloc, so the parent's deinit won't double-drop.
487                Self::sever_parent_pending_for_path(&path, self.frames_mut(), &mut stored_frames);
488                frame.deinit();
489                frame.dealloc();
490                // Clean up remaining stored frames safely (deepest first, clearing parent isets)
491                Self::cleanup_stored_frames_on_error(stored_frames, self.frames_mut());
492                return Err(self.err(e));
493            }
494
495            // Validate the frame is fully initialized
496            if let Err(e) = frame.require_full_initialization() {
497                // Before cleanup, clear the parent's iset bit for the frame that failed.
498                // This prevents the parent from trying to drop this field when Partial is dropped.
499                Self::clear_parent_iset_for_path(&path, self.frames_mut(), &mut stored_frames);
500                // If this is a MapValue/Deref/OptionSome frame, a parent tracker holds
501                // a pointer to our frame's memory (pending_entries / pending_inner). Clear
502                // that pointer before we dealloc, so the parent's deinit won't double-drop.
503                Self::sever_parent_pending_for_path(&path, self.frames_mut(), &mut stored_frames);
504                frame.deinit();
505                frame.dealloc();
506                // Clean up remaining stored frames safely (deepest first, clearing parent isets)
507                Self::cleanup_stored_frames_on_error(stored_frames, self.frames_mut());
508                return Err(self.err(e));
509            }
510
511            // Update parent's ISet to mark this field as initialized.
512            // The parent lives either in stored_frames (if it was ended during deferred mode)
513            // or on the frames stack at index parent_path.steps.len() (paths are absolute).
514            if let Some(last_step) = path.steps.last() {
515                // Construct parent path (same shape, all steps except the last one)
516                let parent_path = facet_path::Path {
517                    shape: path.shape,
518                    steps: path.steps[..path.steps.len() - 1].to_vec(),
519                };
520
521                // Special handling for Option inner values: when path ends with OptionSome,
522                // the parent is an Option frame and we need to complete the Option by
523                // writing the inner value into the Option's memory.
524                if matches!(last_step, PathStep::OptionSome) {
525                    // Find the Option frame (parent)
526                    let option_frame =
527                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
528                            Some(parent_frame)
529                        } else {
530                            self.frames_mut().get_mut(parent_path.steps.len())
531                        };
532
533                    if let Some(option_frame) = option_frame {
534                        // The frame contains the inner value - write it into the Option's memory
535                        Self::complete_option_frame(option_frame, frame);
536                        // Frame data has been transferred to Option - don't drop it
537                        continue;
538                    }
539                }
540
541                // Special handling for SmartPointer inner values: when path ends with Deref,
542                // the parent is a SmartPointer frame and we need to complete it by
543                // creating the SmartPointer from the inner value.
544                if matches!(last_step, PathStep::Deref) {
545                    // Find the SmartPointer frame (parent)
546                    let smart_ptr_frame =
547                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
548                            Some(parent_frame)
549                        } else {
550                            self.frames_mut().get_mut(parent_path.steps.len())
551                        };
552
553                    if let Some(smart_ptr_frame) = smart_ptr_frame {
554                        // The frame contains the inner value - create the SmartPointer from it
555                        Self::complete_smart_pointer_frame(smart_ptr_frame, frame);
556                        // Frame data has been transferred to SmartPointer - don't drop it
557                        continue;
558                    }
559                }
560
561                // Special handling for Inner values: when path ends with Inner,
562                // the parent is a transparent wrapper (NonZero, ByteString, etc.) and we need
563                // to convert the inner value to the parent type using try_from.
564                if matches!(last_step, PathStep::Inner) {
565                    // Find the parent frame (Inner wrapper)
566                    let parent_frame =
567                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
568                            Some(parent_frame)
569                        } else {
570                            self.frames_mut().get_mut(parent_path.steps.len())
571                        };
572
573                    if let Some(inner_wrapper_frame) = parent_frame {
574                        // The frame contains the inner value - convert to parent type using try_from
575                        Self::complete_inner_frame(inner_wrapper_frame, frame);
576                        // Frame data has been transferred - don't drop it
577                        continue;
578                    }
579                }
580
581                // Special handling for Proxy values: when path ends with Proxy,
582                // the parent is the target type (e.g., Inner) and we need to convert
583                // the proxy value (e.g., InnerProxy) using the proxy's convert_in.
584                if matches!(last_step, PathStep::Proxy) {
585                    // Find the parent frame (the proxy target)
586                    let parent_frame =
587                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
588                            Some(parent_frame)
589                        } else {
590                            self.frames_mut().get_mut(parent_path.steps.len())
591                        };
592
593                    if let Some(target_frame) = parent_frame {
594                        Self::complete_proxy_frame(target_frame, frame);
595                        continue;
596                    }
597                }
598
599                // Special handling for List/SmartPointerSlice element values: when path ends with Index,
600                // the parent is a List or SmartPointerSlice frame and we need to push the element into it.
601                // RopeSlot frames are already stored in the rope and will be drained during
602                // validation - pushing them here would duplicate the elements.
603                if matches!(last_step, PathStep::Index(_))
604                    && !matches!(frame.ownership, FrameOwnership::RopeSlot)
605                {
606                    // Find the parent frame (List or SmartPointerSlice)
607                    let parent_frame =
608                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
609                            Some(parent_frame)
610                        } else {
611                            self.frames_mut().get_mut(parent_path.steps.len())
612                        };
613
614                    if let Some(parent_frame) = parent_frame {
615                        // Check if parent is a SmartPointerSlice (e.g., Arc<[T]>)
616                        if matches!(parent_frame.tracker, Tracker::SmartPointerSlice { .. }) {
617                            Self::complete_smart_pointer_slice_item_frame(parent_frame, frame);
618                            // Frame data has been transferred to slice builder - don't drop it
619                            continue;
620                        }
621                        // Otherwise try List handling
622                        Self::complete_list_item_frame(parent_frame, frame);
623                        // Frame data has been transferred to List - don't drop it
624                        continue;
625                    }
626                }
627
628                // Special handling for Map key values: when path ends with MapKey,
629                // the parent is a Map frame and we need to transition it to PushingValue state.
630                if matches!(last_step, PathStep::MapKey(_)) {
631                    // Find the Map frame (parent)
632                    let map_frame = if let Some(parent_frame) = stored_frames.get_mut(&parent_path)
633                    {
634                        Some(parent_frame)
635                    } else {
636                        self.frames_mut().get_mut(parent_path.steps.len())
637                    };
638
639                    if let Some(map_frame) = map_frame {
640                        // Transition the Map from PushingKey to PushingValue state
641                        Self::complete_map_key_frame(map_frame, frame);
642                        continue;
643                    }
644                }
645
646                // Special handling for Map value values: when path ends with MapValue,
647                // the parent is a Map frame and we need to add the entry to pending_entries.
648                if matches!(last_step, PathStep::MapValue(_)) {
649                    // Find the Map frame (parent)
650                    let map_frame = if let Some(parent_frame) = stored_frames.get_mut(&parent_path)
651                    {
652                        Some(parent_frame)
653                    } else {
654                        self.frames_mut().get_mut(parent_path.steps.len())
655                    };
656
657                    if let Some(map_frame) = map_frame {
658                        // Add the key-value pair to pending_entries
659                        Self::complete_map_value_frame(map_frame, frame);
660                        continue;
661                    }
662                }
663
664                // Only mark field initialized if the step is actually a Field
665                if let PathStep::Field(field_idx) = last_step {
666                    let field_idx = *field_idx as usize;
667                    // Paths are absolute from the root, so the parent frame lives at
668                    // stack[parent_path.steps.len()] when it's still on the stack.
669                    let parent_frame =
670                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
671                            Some(parent_frame)
672                        } else {
673                            self.frames_mut().get_mut(parent_path.steps.len())
674                        };
675                    if let Some(parent_frame) = parent_frame {
676                        Self::mark_field_initialized_by_index(parent_frame, field_idx);
677                    }
678                }
679            }
680
681            // Frame is validated and parent is updated - dealloc if needed
682            frame.dealloc();
683        }
684
685        // Invariant check: we must have at least one frame after finish_deferred
686        if self.frames().is_empty() {
687            // No need to poison - returning Err consumes self, Drop will handle cleanup
688            return Err(self.err(ReflectErrorKind::InvariantViolation {
689                invariant: "finish_deferred() left Partial with no frames",
690            }));
691        }
692
693        // Fill defaults and validate the root frame is fully initialized
694        if let Some(frame) = self.frames_mut().last_mut() {
695            // Fill defaults - this can fail if a field has #[facet(default)] but no default impl
696            if let Err(e) = frame.fill_defaults() {
697                return Err(self.err(e));
698            }
699            // Root validation failed. At this point, all stored frames have been
700            // processed and their parent isets updated.
701            // No need to poison - returning Err consumes self, Drop will handle cleanup
702            if let Err(e) = frame.require_full_initialization() {
703                return Err(self.err(e));
704            }
705        }
706
707        Ok(self)
708    }
709
710    /// Mark a field as initialized in a frame's tracker by index
711    fn mark_field_initialized_by_index(frame: &mut Frame, idx: usize) {
712        crate::trace!(
713            "mark_field_initialized_by_index: idx={}, frame shape={}, tracker={:?}",
714            idx,
715            frame.allocated.shape(),
716            frame.tracker.kind()
717        );
718
719        // If the tracker is Scalar but this is a struct type, upgrade to Struct tracker.
720        // This can happen if the frame was deinit'd (e.g., by a failed set_default)
721        // which resets the tracker to Scalar.
722        if matches!(frame.tracker, Tracker::Scalar)
723            && let Type::User(UserType::Struct(struct_type)) = frame.allocated.shape().ty
724        {
725            frame.tracker = Tracker::Struct {
726                iset: ISet::new(struct_type.fields.len()),
727                current_child: None,
728            };
729        }
730
731        match &mut frame.tracker {
732            Tracker::Struct { iset, .. } => {
733                crate::trace!("mark_field_initialized_by_index: setting iset for struct");
734                iset.set(idx);
735            }
736            Tracker::Enum { data, .. } => {
737                crate::trace!(
738                    "mark_field_initialized_by_index: setting data for enum, before={:?}",
739                    data
740                );
741                data.set(idx);
742                crate::trace!(
743                    "mark_field_initialized_by_index: setting data for enum, after={:?}",
744                    data
745                );
746            }
747            Tracker::Array { iset, .. } => {
748                crate::trace!("mark_field_initialized_by_index: setting iset for array");
749                iset.set(idx);
750            }
751            _ => {
752                crate::trace!(
753                    "mark_field_initialized_by_index: no match for tracker {:?}",
754                    frame.tracker.kind()
755                );
756            }
757        }
758    }
759
760    /// Clear a parent frame's iset bit for a given path.
761    /// The parent could be on the stack or in stored_frames.
762    fn clear_parent_iset_for_path(
763        path: &Path,
764        stack: &mut [Frame],
765        stored_frames: &mut ::alloc::collections::BTreeMap<Path, Frame>,
766    ) {
767        let Some(&PathStep::Field(field_idx)) = path.steps.last() else {
768            return;
769        };
770        let field_idx = field_idx as usize;
771        let parent_path = Path {
772            shape: path.shape,
773            steps: path.steps[..path.steps.len() - 1].to_vec(),
774        };
775
776        // Paths are absolute from the root; the frame at a given path lives at
777        // stack[path.steps.len()], so the parent lives at stack[parent_path.steps.len()].
778        let parent_frame = if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
779            Some(parent_frame)
780        } else {
781            stack.get_mut(parent_path.steps.len())
782        };
783        if let Some(parent_frame) = parent_frame {
784            Self::unset_field_in_tracker(&mut parent_frame.tracker, field_idx);
785        }
786    }
787
788    /// Sever parent/child pointer ownership when a stored child frame fails validation
789    /// in `finish_deferred`.
790    ///
791    /// When a child frame is stored for deferred processing, the parent may keep a
792    /// pointer to the child's buffer so it can finalize later (Map's `pending_entries`,
793    /// SmartPointer's / Option's `pending_inner`). If the child then fails validation
794    /// and its buffer is deallocated, that parent pointer would dangle and cause a
795    /// double-free when the parent is subsequently deinited.
796    ///
797    /// This helper clears the relevant parent field so the parent's own cleanup leaves
798    /// the buffer alone.
799    fn sever_parent_pending_for_path(
800        path: &Path,
801        stack: &mut [Frame],
802        stored_frames: &mut ::alloc::collections::BTreeMap<Path, Frame>,
803    ) {
804        let Some(last_step) = path.steps.last() else {
805            return;
806        };
807        if !matches!(
808            last_step,
809            PathStep::MapValue(_) | PathStep::Deref | PathStep::OptionSome
810        ) {
811            return;
812        }
813
814        let parent_path = Path {
815            shape: path.shape,
816            steps: path.steps[..path.steps.len() - 1].to_vec(),
817        };
818
819        // Paths are absolute from the root; the frame at a given path lives at
820        // stack[path.steps.len()], so the parent lives at stack[parent_path.steps.len()].
821        let parent_frame = if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
822            Some(parent_frame)
823        } else {
824            stack.get_mut(parent_path.steps.len())
825        };
826
827        let Some(parent_frame) = parent_frame else {
828            return;
829        };
830
831        let parent_shape = parent_frame.allocated.shape();
832
833        match (&mut parent_frame.tracker, last_step) {
834            (
835                Tracker::Map {
836                    pending_entries, ..
837                },
838                PathStep::MapValue(_),
839            ) => {
840                // The pending entry held both (key_ptr, value_ptr). The value buffer is
841                // about to be freed by the caller via frame.dealloc(). The key buffer,
842                // however, is solely owned by this pending entry — if we just pop it
843                // without dropping, both the key's in-place contents and its allocation
844                // leak.
845                if let Some((key_ptr, _value_ptr)) = pending_entries.pop()
846                    && let Def::Map(map_def) = parent_shape.def
847                {
848                    unsafe {
849                        map_def.k().call_drop_in_place(key_ptr.assume_init());
850                    }
851                    if let Ok(key_layout) = map_def.k().layout.sized_layout()
852                        && key_layout.size() > 0
853                    {
854                        unsafe {
855                            ::alloc::alloc::dealloc(key_ptr.as_mut_byte_ptr(), key_layout);
856                        }
857                    }
858                }
859                trace!(
860                    "sever_parent_pending_for_path: popped & dropped map pending_entry for failed MapValue at {:?}",
861                    path,
862                );
863            }
864            (
865                Tracker::SmartPointer {
866                    building_inner,
867                    pending_inner,
868                },
869                PathStep::Deref,
870            ) => {
871                *pending_inner = None;
872                *building_inner = true;
873                parent_frame.is_init = false;
874                trace!(
875                    "sever_parent_pending_for_path: cleared SmartPointer pending_inner for failed Deref at {:?}",
876                    path,
877                );
878            }
879            (
880                Tracker::Option {
881                    building_inner,
882                    pending_inner,
883                },
884                PathStep::OptionSome,
885            ) => {
886                *pending_inner = None;
887                *building_inner = true;
888                parent_frame.is_init = false;
889                trace!(
890                    "sever_parent_pending_for_path: cleared Option pending_inner for failed OptionSome at {:?}",
891                    path,
892                );
893            }
894            _ => {}
895        }
896    }
897
898    /// Helper to unset a field index in a tracker's iset
899    fn unset_field_in_tracker(tracker: &mut Tracker, field_idx: usize) {
900        match tracker {
901            Tracker::Struct { iset, .. } => {
902                iset.unset(field_idx);
903            }
904            Tracker::Enum { data, .. } => {
905                data.unset(field_idx);
906            }
907            Tracker::Array { iset, .. } => {
908                iset.unset(field_idx);
909            }
910            _ => {}
911        }
912    }
913
914    /// Safely clean up stored frames on error in finish_deferred.
915    ///
916    /// This mirrors the cleanup logic in Drop: process frames deepest-first and
917    /// clear parent's iset bits before deiniting children to prevent double-drops.
918    fn cleanup_stored_frames_on_error(
919        mut stored_frames: ::alloc::collections::BTreeMap<Path, Frame>,
920        stack: &mut [Frame],
921    ) {
922        // Sort by depth (deepest first) so children are processed before parents
923        let mut paths: Vec<_> = stored_frames.keys().cloned().collect();
924        paths.sort_by_key(|p| core::cmp::Reverse(p.steps.len()));
925
926        trace!(
927            "cleanup_stored_frames_on_error: {} frames to clean, paths: {:?}",
928            paths.len(),
929            paths
930        );
931
932        for path in &paths {
933            if let Some(frame) = stored_frames.get(path) {
934                trace!(
935                    "cleanup: processing path={:?}, shape={}, tracker={:?}, is_init={}, ownership={:?}",
936                    path,
937                    frame.allocated.shape(),
938                    frame.tracker.kind(),
939                    frame.is_init,
940                    frame.ownership,
941                );
942                // Dump iset contents for struct/enum trackers
943                match &frame.tracker {
944                    Tracker::Struct { iset: _iset, .. } => {
945                        trace!("cleanup:   Struct iset = {:?}", _iset);
946                    }
947                    Tracker::Enum {
948                        variant: _variant,
949                        data: _data,
950                        ..
951                    } => {
952                        trace!("cleanup:   Enum {:?} data = {:?}", _variant.name, _data);
953                    }
954                    _ => {}
955                }
956            }
957        }
958
959        for path in paths {
960            if let Some(mut frame) = stored_frames.remove(&path) {
961                trace!(
962                    "cleanup: REMOVING path={:?}, shape={}, tracker={:?}",
963                    path,
964                    frame.allocated.shape(),
965                    frame.tracker.kind(),
966                );
967                // Before dropping this frame, clear the parent's iset bit so the
968                // parent won't try to drop this field again.
969                Self::clear_parent_iset_for_path(&path, stack, &mut stored_frames);
970                // If this frame's buffer is also tracked by a parent Map/SmartPointer/Option
971                // pending pointer, clear that reference too — otherwise the parent will try to
972                // drop the same buffer we're about to dealloc.
973                Self::sever_parent_pending_for_path(&path, stack, &mut stored_frames);
974                trace!("cleanup: calling deinit() on path={:?}", path,);
975                frame.deinit();
976                frame.dealloc();
977            }
978        }
979    }
980
981    /// Complete an Option frame by writing the inner value and marking it initialized.
982    /// Used in finish_deferred when processing a stored frame at a path ending with "Some".
983    fn complete_option_frame(option_frame: &mut Frame, inner_frame: Frame) {
984        if let Def::Option(option_def) = option_frame.allocated.shape().def {
985            // Use the Option vtable to initialize Some(inner_value)
986            let init_some_fn = option_def.vtable.init_some;
987
988            // The inner frame contains the inner value
989            let inner_value_ptr = unsafe { inner_frame.data.assume_init() };
990
991            // Initialize the Option as Some(inner_value)
992            unsafe {
993                init_some_fn(option_frame.data, inner_value_ptr);
994            }
995
996            // Deallocate the inner value's memory since init_some_fn moved it
997            if let FrameOwnership::Owned = inner_frame.ownership
998                && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
999                && layout.size() > 0
1000            {
1001                unsafe {
1002                    ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
1003                }
1004            }
1005
1006            // Mark the Option as initialized
1007            option_frame.tracker = Tracker::Option {
1008                building_inner: false,
1009                pending_inner: None,
1010            };
1011            option_frame.is_init = true;
1012        }
1013    }
1014
1015    fn complete_smart_pointer_frame(smart_ptr_frame: &mut Frame, inner_frame: Frame) {
1016        if let Def::Pointer(smart_ptr_def) = smart_ptr_frame.allocated.shape().def {
1017            // Use the SmartPointer vtable to create the smart pointer from the inner value
1018            if let Some(new_into_fn) = smart_ptr_def.vtable.new_into_fn {
1019                // Sized pointee case: use new_into_fn
1020                let _ = unsafe { inner_frame.data.assume_init() };
1021
1022                // Create the SmartPointer with the inner value
1023                unsafe {
1024                    new_into_fn(
1025                        smart_ptr_frame.data,
1026                        PtrMut::new(inner_frame.data.as_mut_byte_ptr()),
1027                    );
1028                }
1029
1030                // Deallocate the inner value's memory since new_into_fn moved it
1031                if let FrameOwnership::Owned = inner_frame.ownership
1032                    && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
1033                    && layout.size() > 0
1034                {
1035                    unsafe {
1036                        ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
1037                    }
1038                }
1039
1040                // Mark the SmartPointer as initialized
1041                smart_ptr_frame.tracker = Tracker::SmartPointer {
1042                    building_inner: false,
1043                    pending_inner: None,
1044                };
1045                smart_ptr_frame.is_init = true;
1046            } else if let Some(pointee) = smart_ptr_def.pointee()
1047                && pointee.is_shape(str::SHAPE)
1048                && inner_frame.allocated.shape().is_shape(String::SHAPE)
1049            {
1050                // Unsized pointee case: String -> Arc<str>/Box<str>/Rc<str> conversion
1051                use ::alloc::{rc::Rc, string::String, sync::Arc};
1052                use facet_core::KnownPointer;
1053
1054                let Some(known) = smart_ptr_def.known else {
1055                    return;
1056                };
1057
1058                // Read the String value from the inner frame
1059                let string_ptr = inner_frame.data.as_mut_byte_ptr() as *mut String;
1060                let string_value = unsafe { core::ptr::read(string_ptr) };
1061
1062                // Convert to the appropriate smart pointer type
1063                match known {
1064                    KnownPointer::Box => {
1065                        let boxed: ::alloc::boxed::Box<str> = string_value.into_boxed_str();
1066                        unsafe {
1067                            core::ptr::write(
1068                                smart_ptr_frame.data.as_mut_byte_ptr()
1069                                    as *mut ::alloc::boxed::Box<str>,
1070                                boxed,
1071                            );
1072                        }
1073                    }
1074                    KnownPointer::Arc => {
1075                        let arc: Arc<str> = Arc::from(string_value.into_boxed_str());
1076                        unsafe {
1077                            core::ptr::write(
1078                                smart_ptr_frame.data.as_mut_byte_ptr() as *mut Arc<str>,
1079                                arc,
1080                            );
1081                        }
1082                    }
1083                    KnownPointer::Rc => {
1084                        let rc: Rc<str> = Rc::from(string_value.into_boxed_str());
1085                        unsafe {
1086                            core::ptr::write(
1087                                smart_ptr_frame.data.as_mut_byte_ptr() as *mut Rc<str>,
1088                                rc,
1089                            );
1090                        }
1091                    }
1092                    _ => return,
1093                }
1094
1095                // Deallocate the String's memory (we moved the data out via ptr::read)
1096                if let FrameOwnership::Owned = inner_frame.ownership
1097                    && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
1098                    && layout.size() > 0
1099                {
1100                    unsafe {
1101                        ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
1102                    }
1103                }
1104
1105                // Mark the SmartPointer as initialized
1106                smart_ptr_frame.tracker = Tracker::SmartPointer {
1107                    building_inner: false,
1108                    pending_inner: None,
1109                };
1110                smart_ptr_frame.is_init = true;
1111            }
1112        }
1113    }
1114
1115    /// Complete an Inner frame by converting the inner value to the parent type using try_from
1116    /// (for deferred finalization)
1117    fn complete_inner_frame(inner_wrapper_frame: &mut Frame, inner_frame: Frame) {
1118        let wrapper_shape = inner_wrapper_frame.allocated.shape();
1119        let inner_ptr = PtrConst::new(inner_frame.data.as_byte_ptr());
1120        let inner_shape = inner_frame.allocated.shape();
1121
1122        // Handle Direct and Indirect vtables - both return TryFromOutcome
1123        let result = match wrapper_shape.vtable {
1124            facet_core::VTableErased::Direct(vt) => {
1125                if let Some(try_from_fn) = vt.try_from {
1126                    unsafe {
1127                        try_from_fn(
1128                            inner_wrapper_frame.data.as_mut_byte_ptr() as *mut (),
1129                            inner_shape,
1130                            inner_ptr,
1131                        )
1132                    }
1133                } else {
1134                    return;
1135                }
1136            }
1137            facet_core::VTableErased::Indirect(vt) => {
1138                if let Some(try_from_fn) = vt.try_from {
1139                    let ox_uninit =
1140                        facet_core::OxPtrUninit::new(inner_wrapper_frame.data, wrapper_shape);
1141                    unsafe { try_from_fn(ox_uninit, inner_shape, inner_ptr) }
1142                } else {
1143                    return;
1144                }
1145            }
1146        };
1147
1148        match result {
1149            TryFromOutcome::Converted => {
1150                crate::trace!(
1151                    "complete_inner_frame: converted {} to {}",
1152                    inner_shape,
1153                    wrapper_shape
1154                );
1155            }
1156            TryFromOutcome::Unsupported | TryFromOutcome::Failed(_) => {
1157                crate::trace!(
1158                    "complete_inner_frame: conversion failed from {} to {}",
1159                    inner_shape,
1160                    wrapper_shape
1161                );
1162                return;
1163            }
1164        }
1165
1166        // Deallocate the inner value's memory (try_from consumed it)
1167        if let FrameOwnership::Owned = inner_frame.ownership
1168            && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
1169            && layout.size() > 0
1170        {
1171            unsafe {
1172                ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
1173            }
1174        }
1175
1176        // Mark the wrapper as initialized
1177        inner_wrapper_frame.tracker = Tracker::Scalar;
1178        inner_wrapper_frame.is_init = true;
1179    }
1180
1181    /// Complete a proxy conversion during deferred finalization.
1182    ///
1183    /// This handles proxy types (e.g., `#[facet(proxy = InnerProxy)]`) that were
1184    /// deferred during flatten deserialization. The proxy frame's children (e.g.,
1185    /// `Vec<f64>` fields) have already been materialized (ropes drained), so it's
1186    /// now safe to run the conversion.
1187    fn complete_proxy_frame(target_frame: &mut Frame, proxy_frame: Frame) {
1188        // Get the convert_in function from the proxy stored on the frame
1189        let Some(proxy_def) = proxy_frame.shape_level_proxy else {
1190            crate::trace!(
1191                "complete_proxy_frame: no shape_level_proxy on frame {}",
1192                proxy_frame.allocated.shape()
1193            );
1194            return;
1195        };
1196        let convert_in = proxy_def.convert_in;
1197
1198        let _proxy_shape = proxy_frame.allocated.shape();
1199        let _target_shape = target_frame.allocated.shape();
1200
1201        crate::trace!(
1202            "complete_proxy_frame: converting {} to {}",
1203            _proxy_shape,
1204            _target_shape
1205        );
1206
1207        unsafe {
1208            let inner_value_ptr = proxy_frame.data.assume_init().as_const();
1209            let res = (convert_in)(inner_value_ptr, target_frame.data);
1210
1211            match res {
1212                Ok(rptr) => {
1213                    if rptr.as_uninit() != target_frame.data {
1214                        crate::trace!(
1215                            "complete_proxy_frame: convert_in returned unexpected pointer"
1216                        );
1217                        return;
1218                    }
1219                }
1220                Err(_message) => {
1221                    crate::trace!("complete_proxy_frame: conversion failed: {}", _message);
1222                    return;
1223                }
1224            }
1225        }
1226
1227        // Deallocate the proxy frame's memory (convert_in consumed it via ptr::read)
1228        if let FrameOwnership::Owned = proxy_frame.ownership
1229            && let Ok(layout) = proxy_frame.allocated.shape().layout.sized_layout()
1230            && layout.size() > 0
1231        {
1232            unsafe {
1233                ::alloc::alloc::dealloc(proxy_frame.data.as_mut_byte_ptr(), layout);
1234            }
1235        }
1236
1237        // Mark the target as initialized
1238        target_frame.is_init = true;
1239    }
1240
1241    /// Complete a List frame by pushing an element into it (for deferred finalization)
1242    fn complete_list_item_frame(list_frame: &mut Frame, element_frame: Frame) {
1243        if let Def::List(list_def) = list_frame.allocated.shape().def
1244            && let Some(push_fn) = list_def.push()
1245        {
1246            // The element frame contains the element value
1247            let element_ptr = PtrMut::new(element_frame.data.as_mut_byte_ptr());
1248
1249            // Use push to add element to the list
1250            unsafe {
1251                push_fn(PtrMut::new(list_frame.data.as_mut_byte_ptr()), element_ptr);
1252            }
1253
1254            crate::trace!(
1255                "complete_list_item_frame: pushed element into {}",
1256                list_frame.allocated.shape()
1257            );
1258
1259            // Deallocate the element's memory since push moved it
1260            if let FrameOwnership::Owned = element_frame.ownership
1261                && let Ok(layout) = element_frame.allocated.shape().layout.sized_layout()
1262                && layout.size() > 0
1263            {
1264                unsafe {
1265                    ::alloc::alloc::dealloc(element_frame.data.as_mut_byte_ptr(), layout);
1266                }
1267            }
1268        }
1269    }
1270
1271    /// Complete a SmartPointerSlice element frame by pushing the element into the slice builder
1272    /// (for deferred finalization)
1273    fn complete_smart_pointer_slice_item_frame(
1274        slice_frame: &mut Frame,
1275        element_frame: Frame,
1276    ) -> bool {
1277        if let Tracker::SmartPointerSlice { vtable, .. } = &slice_frame.tracker {
1278            let vtable = *vtable;
1279            // The slice frame's data pointer IS the builder pointer
1280            let builder_ptr = slice_frame.data;
1281
1282            // Push the element into the builder
1283            unsafe {
1284                (vtable.push_fn)(
1285                    PtrMut::new(builder_ptr.as_mut_byte_ptr()),
1286                    PtrMut::new(element_frame.data.as_mut_byte_ptr()),
1287                );
1288            }
1289
1290            crate::trace!(
1291                "complete_smart_pointer_slice_item_frame: pushed element into builder for {}",
1292                slice_frame.allocated.shape()
1293            );
1294
1295            // Deallocate the element's memory since push moved it
1296            if let FrameOwnership::Owned = element_frame.ownership
1297                && let Ok(layout) = element_frame.allocated.shape().layout.sized_layout()
1298                && layout.size() > 0
1299            {
1300                unsafe {
1301                    ::alloc::alloc::dealloc(element_frame.data.as_mut_byte_ptr(), layout);
1302                }
1303            }
1304            return true;
1305        }
1306        false
1307    }
1308
1309    /// Complete a Map key frame by transitioning the Map from PushingKey to PushingValue state
1310    /// (for deferred finalization)
1311    fn complete_map_key_frame(map_frame: &mut Frame, key_frame: Frame) {
1312        if let Tracker::Map { insert_state, .. } = &mut map_frame.tracker
1313            && let MapInsertState::PushingKey { key_ptr, .. } = insert_state
1314        {
1315            // Transition to PushingValue state, keeping the key pointer.
1316            // key_frame_stored = false because the key frame is being finalized here,
1317            // so after this the Map owns the key buffer.
1318            *insert_state = MapInsertState::PushingValue {
1319                key_ptr: *key_ptr,
1320                value_ptr: None,
1321                value_initialized: false,
1322                value_frame_on_stack: false,
1323                key_frame_stored: false,
1324            };
1325
1326            crate::trace!(
1327                "complete_map_key_frame: transitioned {} to PushingValue",
1328                map_frame.allocated.shape()
1329            );
1330
1331            // Deallocate the key frame's memory (the key data lives at key_ptr which Map owns)
1332            if let FrameOwnership::Owned = key_frame.ownership
1333                && let Ok(layout) = key_frame.allocated.shape().layout.sized_layout()
1334                && layout.size() > 0
1335            {
1336                unsafe {
1337                    ::alloc::alloc::dealloc(key_frame.data.as_mut_byte_ptr(), layout);
1338                }
1339            }
1340        }
1341    }
1342
1343    /// Complete a Map value frame by adding the key-value pair to pending_entries
1344    /// (for deferred finalization)
1345    fn complete_map_value_frame(map_frame: &mut Frame, value_frame: Frame) {
1346        if let Tracker::Map {
1347            insert_state,
1348            pending_entries,
1349            ..
1350        } = &mut map_frame.tracker
1351            && let MapInsertState::PushingValue {
1352                key_ptr,
1353                value_ptr: Some(value_ptr),
1354                ..
1355            } = insert_state
1356        {
1357            // Add the key-value pair to pending_entries
1358            pending_entries.push((*key_ptr, *value_ptr));
1359
1360            crate::trace!(
1361                "complete_map_value_frame: added entry to pending_entries for {}",
1362                map_frame.allocated.shape()
1363            );
1364
1365            // Reset to idle state
1366            *insert_state = MapInsertState::Idle;
1367
1368            // Deallocate the value frame's memory (the value data lives at value_ptr which Map owns)
1369            if let FrameOwnership::Owned = value_frame.ownership
1370                && let Ok(layout) = value_frame.allocated.shape().layout.sized_layout()
1371                && layout.size() > 0
1372            {
1373                unsafe {
1374                    ::alloc::alloc::dealloc(value_frame.data.as_mut_byte_ptr(), layout);
1375                }
1376            }
1377        }
1378    }
1379
1380    /// Pops the current frame off the stack, indicating we're done initializing the current field
1381    pub fn end(mut self) -> Result<Self, ReflectError> {
1382        // FAST PATH: Handle the common case of ending a simple scalar field in a struct.
1383        // This avoids all the edge-case checks (SmartPointerSlice, deferred mode, custom
1384        // deserialization, etc.) that dominate the slow path.
1385        if self.frames().len() >= 2 && !self.is_deferred() {
1386            let frames = self.frames_mut();
1387            let top_idx = frames.len() - 1;
1388            let parent_idx = top_idx - 1;
1389
1390            // Check if this is a simple scalar field being returned to a struct parent
1391            if let (
1392                Tracker::Scalar,
1393                true, // is_init
1394                FrameOwnership::Field { field_idx },
1395                false, // not using custom deserialization
1396            ) = (
1397                &frames[top_idx].tracker,
1398                frames[top_idx].is_init,
1399                frames[top_idx].ownership,
1400                frames[top_idx].using_custom_deserialization,
1401            ) && let Tracker::Struct {
1402                iset,
1403                current_child,
1404            } = &mut frames[parent_idx].tracker
1405            {
1406                // Fast path: just update parent's iset and pop
1407                iset.set(field_idx);
1408                *current_child = None;
1409                frames.pop();
1410                return Ok(self);
1411            }
1412        }
1413
1414        // SLOW PATH: Handle all the edge cases
1415
1416        // Strategic tracing: show the frame stack state
1417        #[cfg(feature = "tracing")]
1418        {
1419            use ::alloc::string::ToString;
1420            let frames = self.frames();
1421            let stack_desc: Vec<_> = frames
1422                .iter()
1423                .map(|f| ::alloc::format!("{}({:?})", f.allocated.shape(), f.tracker.kind()))
1424                .collect();
1425            let path = if self.is_deferred() {
1426                ::alloc::format!("{:?}", self.derive_path())
1427            } else {
1428                "N/A".to_string()
1429            };
1430            crate::trace!(
1431                "end() SLOW PATH: stack=[{}], deferred={}, path={}",
1432                stack_desc.join(" > "),
1433                self.is_deferred(),
1434                path
1435            );
1436        }
1437
1438        // Special handling for SmartPointerSlice - convert builder to Arc
1439        // Check if the current (top) frame is a SmartPointerSlice that needs conversion
1440        let needs_slice_conversion = {
1441            let frames = self.frames();
1442            if frames.is_empty() {
1443                false
1444            } else {
1445                let top_idx = frames.len() - 1;
1446                matches!(
1447                    frames[top_idx].tracker,
1448                    Tracker::SmartPointerSlice {
1449                        building_item: false,
1450                        ..
1451                    }
1452                )
1453            }
1454        };
1455
1456        if needs_slice_conversion {
1457            // In deferred mode, don't convert immediately - let finish_deferred handle it.
1458            // Set building_item = true and return early (matching non-deferred behavior).
1459            // The next end() call will store the frame.
1460            if self.is_deferred() {
1461                let frames = self.frames_mut();
1462                let top_idx = frames.len() - 1;
1463                if let Tracker::SmartPointerSlice { building_item, .. } =
1464                    &mut frames[top_idx].tracker
1465                {
1466                    *building_item = true;
1467                }
1468                return Ok(self);
1469            } else {
1470                // Get shape info upfront to avoid borrow conflicts
1471                let current_shape = self.frames().last().unwrap().allocated.shape();
1472
1473                let frames = self.frames_mut();
1474                let top_idx = frames.len() - 1;
1475
1476                if let Tracker::SmartPointerSlice { vtable, .. } = &frames[top_idx].tracker {
1477                    // Convert the builder to Arc<[T]>
1478                    let vtable = *vtable;
1479                    let builder_ptr = unsafe { frames[top_idx].data.assume_init() };
1480                    let arc_ptr = unsafe { (vtable.convert_fn)(builder_ptr) };
1481
1482                    match frames[top_idx].ownership {
1483                        FrameOwnership::Field { field_idx } => {
1484                            // Arc<[T]> is a field in a struct
1485                            // The field frame's original data pointer was overwritten with the builder pointer,
1486                            // so we need to reconstruct where the Arc should be written.
1487
1488                            // Get parent frame and field info
1489                            let parent_idx = top_idx - 1;
1490                            let parent_frame = &frames[parent_idx];
1491
1492                            // Get the field to find its offset
1493                            let field = if let Type::User(UserType::Struct(struct_type)) =
1494                                parent_frame.allocated.shape().ty
1495                            {
1496                                &struct_type.fields[field_idx]
1497                            } else {
1498                                return Err(self.err(ReflectErrorKind::InvariantViolation {
1499                                invariant: "SmartPointerSlice field frame parent must be a struct",
1500                            }));
1501                            };
1502
1503                            // Calculate where the Arc should be written (parent.data + field.offset)
1504                            let field_location =
1505                                unsafe { parent_frame.data.field_uninit(field.offset) };
1506
1507                            // Write the Arc to the parent struct's field location
1508                            let arc_layout = match current_shape.layout.sized_layout() {
1509                                Ok(layout) => layout,
1510                                Err(_) => {
1511                                    return Err(self.err(ReflectErrorKind::Unsized {
1512                                    shape: current_shape,
1513                                    operation: "SmartPointerSlice conversion requires sized Arc",
1514                                }));
1515                                }
1516                            };
1517                            let arc_size = arc_layout.size();
1518                            unsafe {
1519                                core::ptr::copy_nonoverlapping(
1520                                    arc_ptr.as_byte_ptr(),
1521                                    field_location.as_mut_byte_ptr(),
1522                                    arc_size,
1523                                );
1524                            }
1525
1526                            // Free the staging allocation from convert_fn (the Arc was copied to field_location)
1527                            unsafe {
1528                                ::alloc::alloc::dealloc(
1529                                    arc_ptr.as_byte_ptr() as *mut u8,
1530                                    arc_layout,
1531                                );
1532                            }
1533
1534                            // Update the frame to point to the correct field location and mark as initialized
1535                            frames[top_idx].data = field_location;
1536                            frames[top_idx].tracker = Tracker::Scalar;
1537                            frames[top_idx].is_init = true;
1538
1539                            // Return WITHOUT popping - the field frame will be popped by the next end() call
1540                            return Ok(self);
1541                        }
1542                        FrameOwnership::Owned => {
1543                            // Arc<[T]> is the root type or owned independently
1544                            // The frame already has the allocation, we just need to update it with the Arc
1545
1546                            // The frame's data pointer is currently the builder, but we allocated
1547                            // the Arc memory in the convert_fn. Update to point to the Arc.
1548                            frames[top_idx].data = PtrUninit::new(arc_ptr.as_byte_ptr() as *mut u8);
1549                            frames[top_idx].tracker = Tracker::Scalar;
1550                            frames[top_idx].is_init = true;
1551                            // Keep Owned ownership so Guard will properly deallocate
1552
1553                            // Return WITHOUT popping - the frame stays and will be built/dropped normally
1554                            return Ok(self);
1555                        }
1556                        FrameOwnership::TrackedBuffer
1557                        | FrameOwnership::BorrowedInPlace
1558                        | FrameOwnership::External
1559                        | FrameOwnership::RopeSlot => {
1560                            return Err(self.err(ReflectErrorKind::InvariantViolation {
1561                            invariant: "SmartPointerSlice cannot have TrackedBuffer/BorrowedInPlace/External/RopeSlot ownership after conversion",
1562                        }));
1563                        }
1564                    }
1565                }
1566            }
1567        }
1568
1569        if self.frames().len() <= 1 {
1570            // Never pop the last/root frame - this indicates a broken state machine
1571            // No need to poison - returning Err consumes self, Drop will handle cleanup
1572            return Err(self.err(ReflectErrorKind::InvariantViolation {
1573                invariant: "Partial::end() called with only one frame on the stack",
1574            }));
1575        }
1576
1577        // In deferred mode, cannot pop below the start depth
1578        if let Some(start_depth) = self.start_depth()
1579            && self.frames().len() <= start_depth
1580        {
1581            // No need to poison - returning Err consumes self, Drop will handle cleanup
1582            return Err(self.err(ReflectErrorKind::InvariantViolation {
1583                invariant: "Partial::end() called but would pop below deferred start depth",
1584            }));
1585        }
1586
1587        // Require that the top frame is fully initialized before popping.
1588        // In deferred mode, tracked frames (those that will be stored for re-entry)
1589        // defer validation to finish_deferred(). All other frames validate now
1590        // using the TypePlan's FillRule (which knows what's Required vs Defaultable).
1591        let requires_full_init = if !self.is_deferred() {
1592            true
1593        } else {
1594            // If this frame will be stored, defer validation to finish_deferred().
1595            // Otherwise validate now.
1596            !self.should_store_frame_for_deferred()
1597        };
1598
1599        if requires_full_init {
1600            // Try the optimized path using precomputed FieldInitPlan
1601            // Extract frame info first (borrows only self.mode)
1602            let frame_info = self.mode.stack().last().map(|frame| {
1603                let variant_idx = match &frame.tracker {
1604                    Tracker::Enum { variant_idx, .. } => Some(*variant_idx),
1605                    _ => None,
1606                };
1607                (frame.type_plan, variant_idx)
1608            });
1609
1610            // Look up plans from the type plan node - need to resolve NodeId to get the actual node
1611            let plans_info = frame_info.and_then(|(type_plan_id, variant_idx)| {
1612                let type_plan = self.root_plan.node(type_plan_id);
1613                match &type_plan.kind {
1614                    TypePlanNodeKind::Struct(struct_plan) => Some(struct_plan.fields),
1615                    TypePlanNodeKind::Enum(enum_plan) => {
1616                        let variants = self.root_plan.variants(enum_plan.variants);
1617                        variant_idx.and_then(|idx| variants.get(idx).map(|v| v.fields))
1618                    }
1619                    _ => None,
1620                }
1621            });
1622
1623            if let Some(plans_range) = plans_info {
1624                // Resolve the SliceRange to an actual slice
1625                let plans = self.root_plan.fields(plans_range);
1626                // Now mutably borrow mode.stack to get the frame
1627                // (root_plan borrow of `plans` is still active but that's fine -
1628                // mode and root_plan are separate fields)
1629                let frame = self.mode.stack_mut().last_mut().unwrap();
1630                frame
1631                    .fill_and_require_fields(plans, plans.len(), &self.root_plan)
1632                    .map_err(|e| self.err(e))?;
1633            } else {
1634                // Fall back to the old path if optimized path wasn't available
1635                if let Some(frame) = self.frames_mut().last_mut() {
1636                    frame.fill_defaults().map_err(|e| self.err(e))?;
1637                }
1638
1639                let frame = self.frames_mut().last_mut().unwrap();
1640                let result = frame.require_full_initialization();
1641                if result.is_err() {
1642                    crate::trace!(
1643                        "end() VALIDATION FAILED: {} ({:?}) is_init={} - {:?}",
1644                        frame.allocated.shape(),
1645                        frame.tracker.kind(),
1646                        frame.is_init,
1647                        result
1648                    );
1649                }
1650                result.map_err(|e| self.err(e))?
1651            }
1652        }
1653
1654        // In deferred mode, check if we should store this frame for potential re-entry.
1655        // We need to compute the storage path BEFORE popping so we can check it.
1656        //
1657        // Store frames that can be re-entered in deferred mode.
1658        // This includes structs, enums, collections, and Options (which need to be
1659        // stored so finish_deferred can find them when processing their inner values).
1660        let deferred_storage_info = if self.is_deferred() {
1661            let should_store = self.should_store_frame_for_deferred();
1662
1663            if should_store {
1664                // Compute the "field-only" path for storage by finding all Field steps
1665                // from PARENT frames only. The frame being ended shouldn't contribute to
1666                // its own path (its current_child points to ITS children, not to itself).
1667                //
1668                // Note: We include ALL frames in the path computation (including those
1669                // before start_depth) because they contain navigation info. The start_depth
1670                // only determines which frames we STORE, not which frames contribute to paths.
1671                //
1672                // Get the root shape for the Path from the first frame
1673                let root_shape = self
1674                    .frames()
1675                    .first()
1676                    .map(|f| f.allocated.shape())
1677                    .unwrap_or_else(|| <() as facet_core::Facet>::SHAPE);
1678
1679                let mut field_path = facet_path::Path::new(root_shape);
1680                let frames_len = self.frames().len();
1681                // Iterate over all frames EXCEPT the last one (the one being ended)
1682                for (frame_idx, frame) in self.frames().iter().enumerate() {
1683                    // Skip the frame being ended
1684                    if frame_idx == frames_len - 1 {
1685                        continue;
1686                    }
1687                    // Extract navigation steps from frames
1688                    // This MUST match derive_path() for consistency
1689                    match &frame.tracker {
1690                        Tracker::Struct {
1691                            current_child: Some(idx),
1692                            ..
1693                        } => {
1694                            field_path.push(PathStep::Field(*idx as u32));
1695                        }
1696                        Tracker::Enum {
1697                            current_child: Some(idx),
1698                            ..
1699                        } => {
1700                            field_path.push(PathStep::Field(*idx as u32));
1701                        }
1702                        Tracker::List {
1703                            current_child: Some(idx),
1704                            ..
1705                        } => {
1706                            field_path.push(PathStep::Index(*idx as u32));
1707                        }
1708                        Tracker::Array {
1709                            current_child: Some(idx),
1710                            ..
1711                        } => {
1712                            field_path.push(PathStep::Index(*idx as u32));
1713                        }
1714                        Tracker::Option {
1715                            building_inner: true,
1716                            ..
1717                        } => {
1718                            // Option with building_inner contributes OptionSome to path
1719                            field_path.push(PathStep::OptionSome);
1720                        }
1721                        Tracker::SmartPointer {
1722                            building_inner: true,
1723                            ..
1724                        } => {
1725                            // SmartPointer with building_inner contributes Deref to path
1726                            field_path.push(PathStep::Deref);
1727                        }
1728                        Tracker::SmartPointerSlice {
1729                            current_child: Some(idx),
1730                            ..
1731                        } => {
1732                            // SmartPointerSlice with current_child contributes Index to path
1733                            field_path.push(PathStep::Index(*idx as u32));
1734                        }
1735                        Tracker::Inner {
1736                            building_inner: true,
1737                        } => {
1738                            // Inner with building_inner contributes Inner to path
1739                            field_path.push(PathStep::Inner);
1740                        }
1741                        Tracker::Map {
1742                            current_entry_index: Some(idx),
1743                            building_key,
1744                            ..
1745                        } => {
1746                            // Map with active entry contributes MapKey or MapValue with entry index
1747                            if *building_key {
1748                                field_path.push(PathStep::MapKey(*idx as u32));
1749                            } else {
1750                                field_path.push(PathStep::MapValue(*idx as u32));
1751                            }
1752                        }
1753                        _ => {}
1754                    }
1755
1756                    // If the next frame on the stack is a proxy frame, add a Proxy
1757                    // path step. This distinguishes the proxy frame (and its children)
1758                    // from the parent frame that the proxy writes into, preventing path
1759                    // collisions in deferred mode where both frames are stored.
1760                    if frame_idx + 1 < frames_len
1761                        && self.frames()[frame_idx + 1].using_custom_deserialization
1762                    {
1763                        field_path.push(PathStep::Proxy);
1764                    }
1765                }
1766
1767                if !field_path.is_empty() {
1768                    Some(field_path)
1769                } else {
1770                    None
1771                }
1772            } else {
1773                None
1774            }
1775        } else {
1776            None
1777        };
1778
1779        // Pop the frame and save its data pointer for SmartPointer handling
1780        let mut popped_frame = self.frames_mut().pop().unwrap();
1781
1782        // In non-deferred mode, proxy frames are processed immediately.
1783        // In deferred mode, proxy frames are stored (with a PathStep::Proxy
1784        // distinguishing them from their parent) and the conversion is handled
1785        // by finish_deferred after children have been fully materialized.
1786        if popped_frame.using_custom_deserialization && deferred_storage_info.is_none() {
1787            // First check the proxy stored in the frame (used for format-specific proxies
1788            // and container-level proxies), then fall back to field-level proxy.
1789            // This ordering is important because format-specific proxies store their
1790            // proxy in shape_level_proxy, and we want them to take precedence over
1791            // the format-agnostic field.proxy().
1792            let deserialize_with: Option<facet_core::ProxyConvertInFn> =
1793                popped_frame.shape_level_proxy.map(|p| p.convert_in);
1794
1795            // Fall back to field-level proxy (format-agnostic)
1796            let deserialize_with = deserialize_with.or_else(|| {
1797                self.parent_field()
1798                    .and_then(|f| f.proxy().map(|p| p.convert_in))
1799            });
1800
1801            if let Some(deserialize_with) = deserialize_with {
1802                // Get parent shape upfront to avoid borrow conflicts
1803                let parent_shape = self.frames().last().unwrap().allocated.shape();
1804                let parent_frame = self.frames_mut().last_mut().unwrap();
1805
1806                trace!(
1807                    "Detected custom conversion needed from {} to {}",
1808                    popped_frame.allocated.shape(),
1809                    parent_shape
1810                );
1811
1812                unsafe {
1813                    let res = {
1814                        let inner_value_ptr = popped_frame.data.assume_init().as_const();
1815                        (deserialize_with)(inner_value_ptr, parent_frame.data)
1816                    };
1817                    let popped_frame_shape = popped_frame.allocated.shape();
1818
1819                    // Note: We do NOT call deinit() here because deserialize_with uses
1820                    // ptr::read to take ownership of the source value. Calling deinit()
1821                    // would cause a double-free. We mark is_init as false to satisfy
1822                    // dealloc()'s assertion, then deallocate the memory.
1823                    popped_frame.is_init = false;
1824                    popped_frame.dealloc();
1825                    let parent_data = parent_frame.data;
1826                    match res {
1827                        Ok(rptr) => {
1828                            if rptr.as_uninit() != parent_data {
1829                                return Err(self.err(
1830                                    ReflectErrorKind::CustomDeserializationError {
1831                                        message:
1832                                            "deserialize_with did not return the expected pointer"
1833                                                .into(),
1834                                        src_shape: popped_frame_shape,
1835                                        dst_shape: parent_shape,
1836                                    },
1837                                ));
1838                            }
1839                        }
1840                        Err(message) => {
1841                            return Err(self.err(ReflectErrorKind::CustomDeserializationError {
1842                                message,
1843                                src_shape: popped_frame_shape,
1844                                dst_shape: parent_shape,
1845                            }));
1846                        }
1847                    }
1848                    // Re-borrow parent_frame after potential early returns
1849                    let parent_frame = self.frames_mut().last_mut().unwrap();
1850                    parent_frame.mark_as_init();
1851                }
1852                return Ok(self);
1853            }
1854        }
1855
1856        // If we determined this frame should be stored for deferred re-entry, do it now
1857        if let Some(storage_path) = deferred_storage_info {
1858            trace!(
1859                "end(): Storing frame for deferred path {:?}, shape {}",
1860                storage_path,
1861                popped_frame.allocated.shape()
1862            );
1863
1864            if let FrameMode::Deferred {
1865                stack,
1866                stored_frames,
1867                ..
1868            } = &mut self.mode
1869            {
1870                // Mark the field as initialized in the parent frame.
1871                // This is important because the parent might validate before
1872                // finish_deferred runs (e.g., parent is an array element that
1873                // isn't stored). Without this, the parent's validation would
1874                // fail with "missing field".
1875                if let FrameOwnership::Field { field_idx } = popped_frame.ownership
1876                    && let Some(parent_frame) = stack.last_mut()
1877                {
1878                    Self::mark_field_initialized_by_index(parent_frame, field_idx);
1879                }
1880
1881                // For BorrowedInPlace DynamicValue frames (e.g., re-entered pending entries),
1882                // flush pending_elements/pending_entries and return without storing.
1883                // These frames point to memory that's already tracked in the parent's
1884                // pending_entries - storing them would overwrite the entry.
1885                if matches!(popped_frame.ownership, FrameOwnership::BorrowedInPlace) {
1886                    crate::trace!(
1887                        "end(): BorrowedInPlace frame, flushing pending items and returning"
1888                    );
1889                    if let Err(kind) = popped_frame.require_full_initialization() {
1890                        return Err(ReflectError::new(kind, storage_path));
1891                    }
1892                    return Ok(self);
1893                }
1894
1895                // Handle Map state transitions even when storing frames.
1896                // The Map needs to transition states so that subsequent operations work:
1897                // - PushingKey -> PushingValue: so begin_value() can be called
1898                // - PushingValue -> Idle: so begin_key() can be called for the next entry
1899                // The frames are still stored for potential re-entry and finalization.
1900                if let Some(parent_frame) = stack.last_mut() {
1901                    if let Tracker::Map {
1902                        insert_state,
1903                        pending_entries,
1904                        ..
1905                    } = &mut parent_frame.tracker
1906                    {
1907                        match insert_state {
1908                            MapInsertState::PushingKey { key_ptr, .. } => {
1909                                // Transition to PushingValue state.
1910                                // key_frame_stored = true because the key frame is being stored,
1911                                // so the stored frame will handle cleanup (not the Map's deinit).
1912                                *insert_state = MapInsertState::PushingValue {
1913                                    key_ptr: *key_ptr,
1914                                    value_ptr: None,
1915                                    value_initialized: false,
1916                                    value_frame_on_stack: false,
1917                                    key_frame_stored: true,
1918                                };
1919                                crate::trace!(
1920                                    "end(): Map transitioned to PushingValue while storing key frame"
1921                                );
1922                            }
1923                            MapInsertState::PushingValue {
1924                                key_ptr,
1925                                value_ptr: Some(value_ptr),
1926                                ..
1927                            } => {
1928                                // Add entry to pending_entries and reset to Idle
1929                                pending_entries.push((*key_ptr, *value_ptr));
1930                                *insert_state = MapInsertState::Idle;
1931                                crate::trace!(
1932                                    "end(): Map added entry to pending_entries while storing value frame"
1933                                );
1934                            }
1935                            _ => {}
1936                        }
1937                    }
1938
1939                    // Handle Set element insertion immediately.
1940                    // Set elements have no path identity (no index), so they can't be stored
1941                    // and re-entered. We must insert them into the Set now.
1942                    if let Tracker::Set { current_child } = &mut parent_frame.tracker
1943                        && *current_child
1944                        && parent_frame.is_init
1945                        && let Def::Set(set_def) = parent_frame.allocated.shape().def
1946                    {
1947                        let insert = set_def.vtable.insert;
1948                        let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
1949                        unsafe {
1950                            insert(
1951                                PtrMut::new(parent_frame.data.as_mut_byte_ptr()),
1952                                element_ptr,
1953                            );
1954                        }
1955                        crate::trace!("end(): Set element inserted immediately in deferred mode");
1956                        // Insert moved out of popped_frame - don't store it
1957                        popped_frame.tracker = Tracker::Scalar;
1958                        popped_frame.is_init = false;
1959                        popped_frame.dealloc();
1960                        *current_child = false;
1961                        // Don't store this frame - return early
1962                        return Ok(self);
1963                    }
1964
1965                    // Handle DynamicValue object entry - add to pending_entries for deferred insertion.
1966                    // Like Map entries, we store the key-value pair and insert during finalization.
1967                    if let Tracker::DynamicValue {
1968                        state:
1969                            DynamicValueState::Object {
1970                                insert_state,
1971                                pending_entries,
1972                            },
1973                    } = &mut parent_frame.tracker
1974                        && let DynamicObjectInsertState::BuildingValue { key } = insert_state
1975                    {
1976                        // Take ownership of the key from insert_state
1977                        let key = core::mem::take(key);
1978
1979                        // Finalize the child Value before adding to pending_entries.
1980                        // The child might have its own pending_entries/pending_elements
1981                        // that need to be inserted first.
1982                        if let Err(kind) = popped_frame.require_full_initialization() {
1983                            return Err(ReflectError::new(kind, storage_path.clone()));
1984                        }
1985
1986                        // Add to pending_entries for deferred insertion
1987                        pending_entries.push((key, popped_frame.data));
1988                        crate::trace!(
1989                            "end(): DynamicValue object entry added to pending_entries in deferred mode"
1990                        );
1991
1992                        // The value frame's data is now owned by pending_entries
1993                        // Mark frame as not owning the data so it won't be deallocated
1994                        popped_frame.tracker = Tracker::Scalar;
1995                        popped_frame.is_init = false;
1996                        // Don't dealloc - pending_entries owns the pointer now
1997
1998                        // Reset insert state to Idle so more entries can be added
1999                        *insert_state = DynamicObjectInsertState::Idle;
2000
2001                        // Don't store this frame - return early
2002                        return Ok(self);
2003                    }
2004
2005                    // Handle DynamicValue array element - add to pending_elements for deferred insertion.
2006                    if let Tracker::DynamicValue {
2007                        state:
2008                            DynamicValueState::Array {
2009                                building_element,
2010                                pending_elements,
2011                            },
2012                    } = &mut parent_frame.tracker
2013                        && *building_element
2014                    {
2015                        // Finalize the child Value before adding to pending_elements.
2016                        // The child might have its own pending_entries/pending_elements
2017                        // that need to be inserted first.
2018                        if let Err(kind) = popped_frame.require_full_initialization() {
2019                            return Err(ReflectError::new(kind, storage_path.clone()));
2020                        }
2021
2022                        // Add to pending_elements for deferred insertion
2023                        pending_elements.push(popped_frame.data);
2024                        crate::trace!(
2025                            "end(): DynamicValue array element added to pending_elements in deferred mode"
2026                        );
2027
2028                        // The element frame's data is now owned by pending_elements
2029                        // Mark frame as not owning the data so it won't be deallocated
2030                        popped_frame.tracker = Tracker::Scalar;
2031                        popped_frame.is_init = false;
2032                        // Don't dealloc - pending_elements owns the pointer now
2033
2034                        // Reset building_element so more elements can be added
2035                        *building_element = false;
2036
2037                        // Don't store this frame - return early
2038                        return Ok(self);
2039                    }
2040
2041                    // For List elements stored in a rope (RopeSlot ownership), we need to
2042                    // mark the element as initialized in the rope. When the List frame is
2043                    // deinited, the rope will drop all initialized elements.
2044                    if matches!(popped_frame.ownership, FrameOwnership::RopeSlot)
2045                        && let Tracker::List {
2046                            rope: Some(rope), ..
2047                        } = &mut parent_frame.tracker
2048                    {
2049                        rope.mark_last_initialized();
2050                    }
2051
2052                    // Clear building_item for SmartPointerSlice so the next element can be added
2053                    if let Tracker::SmartPointerSlice { building_item, .. } =
2054                        &mut parent_frame.tracker
2055                    {
2056                        *building_item = false;
2057                        crate::trace!(
2058                            "end(): SmartPointerSlice building_item cleared while storing element"
2059                        );
2060                    }
2061                }
2062
2063                stored_frames.insert(storage_path, popped_frame);
2064
2065                // Clear parent's current_child tracking
2066                if let Some(parent_frame) = stack.last_mut() {
2067                    parent_frame.tracker.clear_current_child();
2068                }
2069            }
2070
2071            return Ok(self);
2072        }
2073
2074        // Update parent frame's tracking when popping from a child
2075        // Get parent shape upfront to avoid borrow conflicts
2076        let parent_shape = self.frames().last().unwrap().allocated.shape();
2077        let is_deferred_mode = self.is_deferred();
2078        let parent_frame = self.frames_mut().last_mut().unwrap();
2079
2080        crate::trace!(
2081            "end(): Popped {} (tracker {:?}), Parent {} (tracker {:?})",
2082            popped_frame.allocated.shape(),
2083            popped_frame.tracker.kind(),
2084            parent_shape,
2085            parent_frame.tracker.kind()
2086        );
2087
2088        // Check if we need to do a conversion - this happens when:
2089        // 1. The parent frame has a builder_shape or inner type that matches the popped frame's shape
2090        // 2. The parent frame has try_from
2091        // 3. The parent frame is not yet initialized
2092        // 4. The parent frame's tracker is Scalar or Inner (not Option, SmartPointer, etc.)
2093        //    This ensures we only do conversion when begin_inner was used, not begin_some
2094        let needs_conversion = !parent_frame.is_init
2095            && matches!(
2096                parent_frame.tracker,
2097                Tracker::Scalar | Tracker::Inner { .. }
2098            )
2099            && ((parent_shape.builder_shape.is_some()
2100                && parent_shape.builder_shape.unwrap() == popped_frame.allocated.shape())
2101                || (parent_shape.inner.is_some()
2102                    && parent_shape.inner.unwrap() == popped_frame.allocated.shape()))
2103            && match parent_shape.vtable {
2104                facet_core::VTableErased::Direct(vt) => vt.try_from.is_some(),
2105                facet_core::VTableErased::Indirect(vt) => vt.try_from.is_some(),
2106            };
2107
2108        if needs_conversion {
2109            trace!(
2110                "Detected implicit conversion needed from {} to {}",
2111                popped_frame.allocated.shape(),
2112                parent_shape
2113            );
2114
2115            // The conversion requires the source frame to be fully initialized
2116            // (we're about to call assume_init() and pass to try_from)
2117            if let Err(e) = popped_frame.require_full_initialization() {
2118                // Deallocate the memory since the frame wasn't fully initialized
2119                if let FrameOwnership::Owned = popped_frame.ownership
2120                    && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2121                    && layout.size() > 0
2122                {
2123                    trace!(
2124                        "Deallocating uninitialized conversion frame memory: size={}, align={}",
2125                        layout.size(),
2126                        layout.align()
2127                    );
2128                    unsafe {
2129                        ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2130                    }
2131                }
2132                return Err(self.err(e));
2133            }
2134
2135            // Perform the conversion
2136            let inner_ptr = unsafe { popped_frame.data.assume_init().as_const() };
2137            let inner_shape = popped_frame.allocated.shape();
2138
2139            trace!("Converting from {} to {}", inner_shape, parent_shape);
2140
2141            // Handle Direct and Indirect vtables - both return TryFromOutcome
2142            let outcome = match parent_shape.vtable {
2143                facet_core::VTableErased::Direct(vt) => {
2144                    if let Some(try_from_fn) = vt.try_from {
2145                        unsafe {
2146                            try_from_fn(
2147                                parent_frame.data.as_mut_byte_ptr() as *mut (),
2148                                inner_shape,
2149                                inner_ptr,
2150                            )
2151                        }
2152                    } else {
2153                        return Err(self.err(ReflectErrorKind::OperationFailed {
2154                            shape: parent_shape,
2155                            operation: "try_from not available for this type",
2156                        }));
2157                    }
2158                }
2159                facet_core::VTableErased::Indirect(vt) => {
2160                    if let Some(try_from_fn) = vt.try_from {
2161                        // parent_frame.data is uninitialized - we're writing the converted
2162                        // value into it
2163                        let ox_uninit =
2164                            facet_core::OxPtrUninit::new(parent_frame.data, parent_shape);
2165                        unsafe { try_from_fn(ox_uninit, inner_shape, inner_ptr) }
2166                    } else {
2167                        return Err(self.err(ReflectErrorKind::OperationFailed {
2168                            shape: parent_shape,
2169                            operation: "try_from not available for this type",
2170                        }));
2171                    }
2172                }
2173            };
2174
2175            // Handle the TryFromOutcome, which explicitly communicates ownership semantics:
2176            // - Converted: source was consumed, conversion succeeded
2177            // - Unsupported: source was NOT consumed, caller retains ownership
2178            // - Failed: source WAS consumed, but conversion failed
2179            match outcome {
2180                facet_core::TryFromOutcome::Converted => {
2181                    trace!("Conversion succeeded, marking parent as initialized");
2182                    parent_frame.is_init = true;
2183                    // Reset Inner tracker to Scalar after successful conversion
2184                    if matches!(parent_frame.tracker, Tracker::Inner { .. }) {
2185                        parent_frame.tracker = Tracker::Scalar;
2186                    }
2187                }
2188                facet_core::TryFromOutcome::Unsupported => {
2189                    trace!("Source type not supported for conversion - source NOT consumed");
2190
2191                    // Source was NOT consumed, so we need to drop it properly
2192                    if let FrameOwnership::Owned = popped_frame.ownership
2193                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2194                        && layout.size() > 0
2195                    {
2196                        // Drop the value, then deallocate
2197                        unsafe {
2198                            popped_frame
2199                                .allocated
2200                                .shape()
2201                                .call_drop_in_place(popped_frame.data.assume_init());
2202                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2203                        }
2204                    }
2205
2206                    return Err(self.err(ReflectErrorKind::TryFromError {
2207                        src_shape: inner_shape,
2208                        dst_shape: parent_shape,
2209                        inner: facet_core::TryFromError::UnsupportedSourceType,
2210                    }));
2211                }
2212                facet_core::TryFromOutcome::Failed(e) => {
2213                    trace!("Conversion failed after consuming source: {e:?}");
2214
2215                    // Source WAS consumed, so we only deallocate memory (don't drop)
2216                    if let FrameOwnership::Owned = popped_frame.ownership
2217                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2218                        && layout.size() > 0
2219                    {
2220                        trace!(
2221                            "Deallocating conversion frame memory after failure: size={}, align={}",
2222                            layout.size(),
2223                            layout.align()
2224                        );
2225                        unsafe {
2226                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2227                        }
2228                    }
2229
2230                    return Err(self.err(ReflectErrorKind::TryFromError {
2231                        src_shape: inner_shape,
2232                        dst_shape: parent_shape,
2233                        inner: facet_core::TryFromError::Generic(e.into_owned()),
2234                    }));
2235                }
2236            }
2237
2238            // Deallocate the inner value's memory since try_from consumed it
2239            if let FrameOwnership::Owned = popped_frame.ownership
2240                && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2241                && layout.size() > 0
2242            {
2243                trace!(
2244                    "Deallocating conversion frame memory: size={}, align={}",
2245                    layout.size(),
2246                    layout.align()
2247                );
2248                unsafe {
2249                    ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2250                }
2251            }
2252
2253            return Ok(self);
2254        }
2255
2256        // For Field-owned frames, reclaim responsibility in parent's tracker
2257        // Only mark as initialized if the child frame was actually initialized.
2258        // This prevents double-free when begin_inner/begin_some drops a value via
2259        // prepare_for_reinitialization but then fails, leaving the child uninitialized.
2260        //
2261        // We use require_full_initialization() rather than just is_init because:
2262        // - Scalar frames use is_init as the source of truth
2263        // - Struct/Array/Enum frames use their iset/data as the source of truth
2264        //   (is_init may never be set to true for these tracker types)
2265        if let FrameOwnership::Field { field_idx } = popped_frame.ownership {
2266            // In deferred mode, fill defaults on the child frame before checking initialization.
2267            // Fill defaults for child frame before checking if it's fully initialized.
2268            // This handles structs/enums with optional fields that should auto-fill.
2269            if let Err(e) = popped_frame.fill_defaults() {
2270                return Err(self.err(e));
2271            }
2272            let child_is_initialized = popped_frame.require_full_initialization().is_ok();
2273            match &mut parent_frame.tracker {
2274                Tracker::Struct {
2275                    iset,
2276                    current_child,
2277                } => {
2278                    if child_is_initialized {
2279                        iset.set(field_idx); // Parent reclaims responsibility only if child was init
2280                    }
2281                    *current_child = None;
2282                }
2283                Tracker::Array {
2284                    iset,
2285                    current_child,
2286                } => {
2287                    if child_is_initialized {
2288                        iset.set(field_idx); // Parent reclaims responsibility only if child was init
2289                    }
2290                    *current_child = None;
2291                }
2292                Tracker::Enum {
2293                    data,
2294                    current_child,
2295                    ..
2296                } => {
2297                    crate::trace!(
2298                        "end(): Enum field {} child_is_initialized={}, data before={:?}",
2299                        field_idx,
2300                        child_is_initialized,
2301                        data
2302                    );
2303                    if child_is_initialized {
2304                        data.set(field_idx); // Parent reclaims responsibility only if child was init
2305                    }
2306                    *current_child = None;
2307                }
2308                _ => {}
2309            }
2310            return Ok(self);
2311        }
2312
2313        // For BorrowedInPlace DynamicValue frames (e.g., re-entered pending entries),
2314        // flush any pending_elements/pending_entries that were accumulated during
2315        // this re-entry. This is necessary because BorrowedInPlace frames aren't
2316        // stored for deferred processing - they modify existing memory in-place.
2317        if matches!(popped_frame.ownership, FrameOwnership::BorrowedInPlace)
2318            && let Err(e) = popped_frame.require_full_initialization()
2319        {
2320            return Err(self.err(e));
2321        }
2322
2323        match &mut parent_frame.tracker {
2324            Tracker::SmartPointer {
2325                building_inner,
2326                pending_inner,
2327            } => {
2328                crate::trace!(
2329                    "end() SMARTPTR: popped {} into parent {} (building_inner={}, deferred={})",
2330                    popped_frame.allocated.shape(),
2331                    parent_frame.allocated.shape(),
2332                    *building_inner,
2333                    is_deferred_mode
2334                );
2335                // We just popped the inner value frame for a SmartPointer
2336                if *building_inner {
2337                    if matches!(parent_frame.allocated.shape().def, Def::Pointer(_)) {
2338                        // Check if we're in deferred mode - if so, store the inner value pointer
2339                        if is_deferred_mode {
2340                            // Store the inner value pointer for deferred new_into_fn.
2341                            *pending_inner = Some(popped_frame.data);
2342                            *building_inner = false;
2343                            parent_frame.is_init = true;
2344                            crate::trace!(
2345                                "end() SMARTPTR: stored pending_inner, will finalize in finish_deferred"
2346                            );
2347                        } else {
2348                            // Not in deferred mode - complete immediately
2349                            if let Def::Pointer(_) = parent_frame.allocated.shape().def {
2350                                if let Err(e) = popped_frame.require_full_initialization() {
2351                                    popped_frame.deinit();
2352                                    popped_frame.dealloc();
2353                                    return Err(self.err(e));
2354                                }
2355
2356                                // Use complete_smart_pointer_frame which handles both:
2357                                // - Sized pointees (via new_into_fn)
2358                                // - Unsized pointees like str (via String conversion)
2359                                Self::complete_smart_pointer_frame(parent_frame, popped_frame);
2360                                crate::trace!(
2361                                    "end() SMARTPTR: completed smart pointer via complete_smart_pointer_frame"
2362                                );
2363
2364                                // Change tracker to Scalar so the next end() just pops it
2365                                parent_frame.tracker = Tracker::Scalar;
2366                            }
2367                        }
2368                    } else {
2369                        return Err(self.err(ReflectErrorKind::OperationFailed {
2370                            shape: parent_shape,
2371                            operation: "SmartPointer frame without SmartPointer definition",
2372                        }));
2373                    }
2374                } else {
2375                    // building_inner is false - shouldn't happen in normal flow
2376                    return Err(self.err(ReflectErrorKind::OperationFailed {
2377                        shape: parent_shape,
2378                        operation: "SmartPointer end() called with building_inner = false",
2379                    }));
2380                }
2381            }
2382            Tracker::List {
2383                current_child,
2384                rope,
2385                ..
2386            } if parent_frame.is_init => {
2387                if current_child.is_some() {
2388                    // We just popped an element frame, now add it to the list
2389                    if let Def::List(list_def) = parent_shape.def {
2390                        // Check which storage mode we used
2391                        if matches!(popped_frame.ownership, FrameOwnership::RopeSlot) {
2392                            // Rope storage: element lives in a stable chunk.
2393                            // Mark it as initialized; we'll drain to Vec when the list frame pops.
2394                            if let Some(rope) = rope {
2395                                rope.mark_last_initialized();
2396                            }
2397                            // No dealloc needed - memory belongs to rope
2398                        } else {
2399                            // Fallback: element is in separate heap buffer, use push to copy
2400                            let Some(push_fn) = list_def.push() else {
2401                                return Err(self.err(ReflectErrorKind::OperationFailed {
2402                                    shape: parent_shape,
2403                                    operation: "List missing push function",
2404                                }));
2405                            };
2406
2407                            // The child frame contained the element value
2408                            let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
2409
2410                            // Use push to add element to the list
2411                            unsafe {
2412                                push_fn(
2413                                    PtrMut::new(parent_frame.data.as_mut_byte_ptr()),
2414                                    element_ptr,
2415                                );
2416                            }
2417
2418                            // Push moved out of popped_frame
2419                            popped_frame.tracker = Tracker::Scalar;
2420                            popped_frame.is_init = false;
2421                            popped_frame.dealloc();
2422                        }
2423
2424                        *current_child = None;
2425                    }
2426                }
2427            }
2428            Tracker::Map {
2429                insert_state,
2430                pending_entries,
2431                ..
2432            } if parent_frame.is_init => {
2433                match insert_state {
2434                    MapInsertState::PushingKey { key_ptr, .. } => {
2435                        // Fill defaults on the key frame before considering it done.
2436                        // This handles metadata containers and other structs with Option fields.
2437                        if let Err(e) = popped_frame.fill_defaults() {
2438                            return Err(self.err(e));
2439                        }
2440
2441                        // We just popped the key frame - mark key as initialized and transition
2442                        // to PushingValue state. key_frame_on_stack = false because the frame
2443                        // was just popped, so Map now owns the key buffer.
2444                        *insert_state = MapInsertState::PushingValue {
2445                            key_ptr: *key_ptr,
2446                            value_ptr: None,
2447                            value_initialized: false,
2448                            value_frame_on_stack: false, // No value frame yet
2449                            key_frame_stored: false,     // Key frame was popped, Map owns key
2450                        };
2451                    }
2452                    MapInsertState::PushingValue {
2453                        key_ptr, value_ptr, ..
2454                    } => {
2455                        // Fill defaults on the value frame before considering it done.
2456                        // This handles structs with Option fields.
2457                        if let Err(e) = popped_frame.fill_defaults() {
2458                            return Err(self.err(e));
2459                        }
2460
2461                        // We just popped the value frame.
2462                        // Instead of inserting immediately, add to pending_entries.
2463                        // This keeps the buffers alive for deferred processing.
2464                        // Actual insertion happens in require_full_initialization.
2465                        if let Some(value_ptr) = value_ptr {
2466                            pending_entries.push((*key_ptr, *value_ptr));
2467
2468                            // Reset to idle state
2469                            *insert_state = MapInsertState::Idle;
2470                        }
2471                    }
2472                    MapInsertState::Idle => {
2473                        // Nothing to do
2474                    }
2475                }
2476            }
2477            Tracker::Set { current_child } if parent_frame.is_init => {
2478                if *current_child {
2479                    // We just popped an element frame, now insert it into the set
2480                    if let Def::Set(set_def) = parent_frame.allocated.shape().def {
2481                        let insert = set_def.vtable.insert;
2482
2483                        // The child frame contained the element value
2484                        let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
2485
2486                        // Use insert to add element to the set
2487                        unsafe {
2488                            insert(
2489                                PtrMut::new(parent_frame.data.as_mut_byte_ptr()),
2490                                element_ptr,
2491                            );
2492                        }
2493
2494                        // Insert moved out of popped_frame
2495                        popped_frame.tracker = Tracker::Scalar;
2496                        popped_frame.is_init = false;
2497                        popped_frame.dealloc();
2498
2499                        *current_child = false;
2500                    }
2501                }
2502            }
2503            Tracker::Option {
2504                building_inner,
2505                pending_inner,
2506            } => {
2507                crate::trace!(
2508                    "end(): matched Tracker::Option, building_inner={}",
2509                    *building_inner
2510                );
2511                // We just popped the inner value frame for an Option's Some variant
2512                if *building_inner {
2513                    if matches!(parent_frame.allocated.shape().def, Def::Option(_)) {
2514                        // Store the inner value pointer for deferred init_some.
2515                        // This keeps the inner value's memory stable for deferred processing.
2516                        // Actual init_some() happens in require_full_initialization().
2517                        *pending_inner = Some(popped_frame.data);
2518
2519                        // Mark that we're no longer building the inner value
2520                        *building_inner = false;
2521                        crate::trace!("end(): stored pending_inner, set building_inner to false");
2522                        // Mark the Option as initialized (pending finalization)
2523                        parent_frame.is_init = true;
2524                        crate::trace!("end(): set parent_frame.is_init to true");
2525                    } else {
2526                        return Err(self.err(ReflectErrorKind::OperationFailed {
2527                            shape: parent_shape,
2528                            operation: "Option frame without Option definition",
2529                        }));
2530                    }
2531                } else {
2532                    // building_inner is false - the Option was already initialized but
2533                    // begin_some was called again. The popped frame was not used to
2534                    // initialize the Option, so we need to clean it up.
2535                    popped_frame.deinit();
2536                    if let FrameOwnership::Owned = popped_frame.ownership
2537                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2538                        && layout.size() > 0
2539                    {
2540                        unsafe {
2541                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2542                        }
2543                    }
2544                }
2545            }
2546            Tracker::Result {
2547                is_ok,
2548                building_inner,
2549            } => {
2550                crate::trace!(
2551                    "end(): matched Tracker::Result, is_ok={}, building_inner={}",
2552                    *is_ok,
2553                    *building_inner
2554                );
2555                // We just popped the inner value frame for a Result's Ok or Err variant
2556                if *building_inner {
2557                    if let Def::Result(result_def) = parent_frame.allocated.shape().def {
2558                        // The popped frame contains the inner value
2559                        let inner_value_ptr = unsafe { popped_frame.data.assume_init() };
2560
2561                        // Initialize the Result as Ok(inner_value) or Err(inner_value)
2562                        if *is_ok {
2563                            let init_ok_fn = result_def.vtable.init_ok;
2564                            unsafe {
2565                                init_ok_fn(parent_frame.data, inner_value_ptr);
2566                            }
2567                        } else {
2568                            let init_err_fn = result_def.vtable.init_err;
2569                            unsafe {
2570                                init_err_fn(parent_frame.data, inner_value_ptr);
2571                            }
2572                        }
2573
2574                        // Deallocate the inner value's memory since init_ok/err_fn moved it
2575                        if let FrameOwnership::Owned = popped_frame.ownership
2576                            && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2577                            && layout.size() > 0
2578                        {
2579                            unsafe {
2580                                ::alloc::alloc::dealloc(
2581                                    popped_frame.data.as_mut_byte_ptr(),
2582                                    layout,
2583                                );
2584                            }
2585                        }
2586
2587                        // Mark that we're no longer building the inner value
2588                        *building_inner = false;
2589                        crate::trace!("end(): set building_inner to false");
2590                        // Mark the Result as initialized
2591                        parent_frame.is_init = true;
2592                        crate::trace!("end(): set parent_frame.is_init to true");
2593                    } else {
2594                        return Err(self.err(ReflectErrorKind::OperationFailed {
2595                            shape: parent_shape,
2596                            operation: "Result frame without Result definition",
2597                        }));
2598                    }
2599                } else {
2600                    // building_inner is false - the Result was already initialized but
2601                    // begin_ok/begin_err was called again. The popped frame was not used to
2602                    // initialize the Result, so we need to clean it up.
2603                    popped_frame.deinit();
2604                    if let FrameOwnership::Owned = popped_frame.ownership
2605                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2606                        && layout.size() > 0
2607                    {
2608                        unsafe {
2609                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2610                        }
2611                    }
2612                }
2613            }
2614            Tracker::Scalar => {
2615                // the main case here is: the popped frame was a `String` and the
2616                // parent frame is an `Arc<str>`, `Box<str>` etc.
2617                match &parent_shape.def {
2618                    Def::Pointer(smart_ptr_def) => {
2619                        let pointee = match smart_ptr_def.pointee() {
2620                            Some(p) => p,
2621                            None => {
2622                                return Err(self.err(ReflectErrorKind::InvariantViolation {
2623                                    invariant: "pointer type doesn't have a pointee",
2624                                }));
2625                            }
2626                        };
2627
2628                        if !pointee.is_shape(str::SHAPE) {
2629                            return Err(self.err(ReflectErrorKind::InvariantViolation {
2630                                invariant: "only T=str is supported when building SmartPointer<T> and T is unsized",
2631                            }));
2632                        }
2633
2634                        if !popped_frame.allocated.shape().is_shape(String::SHAPE) {
2635                            return Err(self.err(ReflectErrorKind::InvariantViolation {
2636                                invariant: "the popped frame should be String when building a SmartPointer<T>",
2637                            }));
2638                        }
2639
2640                        if let Err(e) = popped_frame.require_full_initialization() {
2641                            return Err(self.err(e));
2642                        }
2643
2644                        // if the just-popped frame was a SmartPointerStr, we have some conversion to do:
2645                        // Special-case: SmartPointer<str> (Box<str>, Arc<str>, Rc<str>) via SmartPointerStr tracker
2646                        // Here, popped_frame actually contains a value for String that should be moved into the smart pointer.
2647                        // We convert the String into Box<str>, Arc<str>, or Rc<str> as appropriate and write it to the parent frame.
2648                        use ::alloc::{rc::Rc, string::String, sync::Arc};
2649
2650                        let Some(known) = smart_ptr_def.known else {
2651                            return Err(self.err(ReflectErrorKind::OperationFailed {
2652                                shape: parent_shape,
2653                                operation: "SmartPointerStr for unknown smart pointer kind",
2654                            }));
2655                        };
2656
2657                        parent_frame.deinit();
2658
2659                        // Interpret the memory as a String, then convert and write.
2660                        let string_ptr = popped_frame.data.as_mut_byte_ptr() as *mut String;
2661                        let string_value = unsafe { core::ptr::read(string_ptr) };
2662
2663                        match known {
2664                            KnownPointer::Box => {
2665                                let boxed: Box<str> = string_value.into_boxed_str();
2666                                unsafe {
2667                                    core::ptr::write(
2668                                        parent_frame.data.as_mut_byte_ptr() as *mut Box<str>,
2669                                        boxed,
2670                                    );
2671                                }
2672                            }
2673                            KnownPointer::Arc => {
2674                                let arc: Arc<str> = Arc::from(string_value.into_boxed_str());
2675                                unsafe {
2676                                    core::ptr::write(
2677                                        parent_frame.data.as_mut_byte_ptr() as *mut Arc<str>,
2678                                        arc,
2679                                    );
2680                                }
2681                            }
2682                            KnownPointer::Rc => {
2683                                let rc: Rc<str> = Rc::from(string_value.into_boxed_str());
2684                                unsafe {
2685                                    core::ptr::write(
2686                                        parent_frame.data.as_mut_byte_ptr() as *mut Rc<str>,
2687                                        rc,
2688                                    );
2689                                }
2690                            }
2691                            _ => {
2692                                return Err(self.err(ReflectErrorKind::OperationFailed {
2693                                    shape: parent_shape,
2694                                    operation: "Don't know how to build this pointer type",
2695                                }));
2696                            }
2697                        }
2698
2699                        parent_frame.is_init = true;
2700
2701                        popped_frame.tracker = Tracker::Scalar;
2702                        popped_frame.is_init = false;
2703                        popped_frame.dealloc();
2704                    }
2705                    _ => {
2706                        // This can happen if begin_inner() was called on a type that
2707                        // has shape.inner but isn't a SmartPointer (e.g., Option).
2708                        // In this case, we can't complete the conversion, so return error.
2709                        return Err(self.err(ReflectErrorKind::OperationFailed {
2710                            shape: parent_shape,
2711                            operation: "end() called but parent has Uninit/Init tracker and isn't a SmartPointer",
2712                        }));
2713                    }
2714                }
2715            }
2716            Tracker::SmartPointerSlice {
2717                vtable,
2718                building_item,
2719                ..
2720            } => {
2721                if *building_item {
2722                    // We just popped an element frame, now push it to the slice builder
2723                    let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
2724
2725                    // Use the slice builder's push_fn to add the element
2726                    crate::trace!("Pushing element to slice builder");
2727                    unsafe {
2728                        let parent_ptr = parent_frame.data.assume_init();
2729                        (vtable.push_fn)(parent_ptr, element_ptr);
2730                    }
2731
2732                    popped_frame.tracker = Tracker::Scalar;
2733                    popped_frame.is_init = false;
2734                    popped_frame.dealloc();
2735
2736                    if let Tracker::SmartPointerSlice {
2737                        building_item: bi, ..
2738                    } = &mut parent_frame.tracker
2739                    {
2740                        *bi = false;
2741                    }
2742                }
2743            }
2744            Tracker::DynamicValue {
2745                state:
2746                    DynamicValueState::Array {
2747                        building_element, ..
2748                    },
2749            } => {
2750                if *building_element {
2751                    // Check that the element is initialized before pushing
2752                    if !popped_frame.is_init {
2753                        // Element was never set - clean up and return error
2754                        let shape = parent_frame.allocated.shape();
2755                        popped_frame.dealloc();
2756                        *building_element = false;
2757                        // No need to poison - returning Err consumes self, Drop will handle cleanup
2758                        return Err(self.err(ReflectErrorKind::OperationFailed {
2759                            shape,
2760                            operation: "end() called but array element was never initialized",
2761                        }));
2762                    }
2763
2764                    // We just popped an element frame, now push it to the dynamic array
2765                    if let Def::DynamicValue(dyn_def) = parent_frame.allocated.shape().def {
2766                        // Get mutable pointers - both array and element need PtrMut
2767                        let array_ptr = unsafe { parent_frame.data.assume_init() };
2768                        let element_ptr = unsafe { popped_frame.data.assume_init() };
2769
2770                        // Use push_array_element to add element to the array
2771                        unsafe {
2772                            (dyn_def.vtable.push_array_element)(array_ptr, element_ptr);
2773                        }
2774
2775                        // Push moved out of popped_frame
2776                        popped_frame.tracker = Tracker::Scalar;
2777                        popped_frame.is_init = false;
2778                        popped_frame.dealloc();
2779
2780                        *building_element = false;
2781                    }
2782                }
2783            }
2784            Tracker::DynamicValue {
2785                state: DynamicValueState::Object { insert_state, .. },
2786            } => {
2787                if let DynamicObjectInsertState::BuildingValue { key } = insert_state {
2788                    // Check that the value is initialized before inserting
2789                    if !popped_frame.is_init {
2790                        // Value was never set - clean up and return error
2791                        let shape = parent_frame.allocated.shape();
2792                        popped_frame.dealloc();
2793                        *insert_state = DynamicObjectInsertState::Idle;
2794                        // No need to poison - returning Err consumes self, Drop will handle cleanup
2795                        return Err(self.err(ReflectErrorKind::OperationFailed {
2796                            shape,
2797                            operation: "end() called but object entry value was never initialized",
2798                        }));
2799                    }
2800
2801                    // We just popped a value frame, now insert it into the dynamic object
2802                    if let Def::DynamicValue(dyn_def) = parent_frame.allocated.shape().def {
2803                        // Get mutable pointers - both object and value need PtrMut
2804                        let object_ptr = unsafe { parent_frame.data.assume_init() };
2805                        let value_ptr = unsafe { popped_frame.data.assume_init() };
2806
2807                        // Use insert_object_entry to add the key-value pair
2808                        unsafe {
2809                            (dyn_def.vtable.insert_object_entry)(object_ptr, key, value_ptr);
2810                        }
2811
2812                        // Insert moved out of popped_frame
2813                        popped_frame.tracker = Tracker::Scalar;
2814                        popped_frame.is_init = false;
2815                        popped_frame.dealloc();
2816
2817                        // Reset insert state to Idle
2818                        *insert_state = DynamicObjectInsertState::Idle;
2819                    }
2820                }
2821            }
2822            _ => {}
2823        }
2824
2825        Ok(self)
2826    }
2827
2828    /// Returns a path representing the current traversal in the builder.
2829    ///
2830    /// The returned [`facet_path::Path`] can be formatted as a human-readable string
2831    /// using [`Path::format_with_shape()`](facet_path::Path::format_with_shape),
2832    /// e.g., `fieldName[index].subfield`.
2833    pub fn path(&self) -> Path {
2834        use facet_path::PathStep;
2835
2836        let root_shape = self
2837            .frames()
2838            .first()
2839            .expect("Partial must have at least one frame")
2840            .allocated
2841            .shape();
2842        let mut path = Path::new(root_shape);
2843
2844        for frame in self.frames().iter() {
2845            match frame.allocated.shape().ty {
2846                Type::User(user_type) => match user_type {
2847                    UserType::Struct(_struct_type) => {
2848                        // Add field step if we're currently in a field
2849                        if let Tracker::Struct {
2850                            current_child: Some(idx),
2851                            ..
2852                        } = &frame.tracker
2853                        {
2854                            path.push(PathStep::Field(*idx as u32));
2855                        }
2856                    }
2857                    UserType::Enum(enum_type) => {
2858                        // Add variant and optional field step
2859                        if let Tracker::Enum {
2860                            variant,
2861                            current_child,
2862                            ..
2863                        } = &frame.tracker
2864                        {
2865                            // Find the variant index by comparing pointers
2866                            if let Some(variant_idx) = enum_type
2867                                .variants
2868                                .iter()
2869                                .position(|v| core::ptr::eq(v, *variant))
2870                            {
2871                                path.push(PathStep::Variant(variant_idx as u32));
2872                            }
2873                            if let Some(idx) = *current_child {
2874                                path.push(PathStep::Field(idx as u32));
2875                            }
2876                        }
2877                    }
2878                    UserType::Union(_) => {
2879                        // No structural path steps for unions
2880                    }
2881                    UserType::Opaque => {
2882                        // Opaque types might be lists (e.g., Vec<T>)
2883                        if let Tracker::List {
2884                            current_child: Some(idx),
2885                            ..
2886                        } = &frame.tracker
2887                        {
2888                            path.push(PathStep::Index(*idx as u32));
2889                        }
2890                    }
2891                },
2892                Type::Sequence(facet_core::SequenceType::Array(_array_def)) => {
2893                    // Add index step if we're currently in an element
2894                    if let Tracker::Array {
2895                        current_child: Some(idx),
2896                        ..
2897                    } = &frame.tracker
2898                    {
2899                        path.push(PathStep::Index(*idx as u32));
2900                    }
2901                }
2902                Type::Sequence(_) => {
2903                    // Other sequence types (Slice, etc.) - no index tracking
2904                }
2905                Type::Pointer(_) => {
2906                    path.push(PathStep::Deref);
2907                }
2908                _ => {
2909                    // No structural path for scalars, etc.
2910                }
2911            }
2912        }
2913
2914        path
2915    }
2916
2917    /// Returns the root shape for path formatting.
2918    ///
2919    /// Use this together with [`path()`](Self::path) to format the path:
2920    /// ```ignore
2921    /// let path_str = partial.path().format_with_shape(partial.root_shape());
2922    /// ```
2923    pub fn root_shape(&self) -> &'static Shape {
2924        self.frames()
2925            .first()
2926            .expect("Partial should always have at least one frame")
2927            .allocated
2928            .shape()
2929    }
2930
2931    /// Create a [`ReflectError`] with the current path context.
2932    ///
2933    /// This is a convenience method for constructing errors inside `Partial` methods
2934    /// that automatically captures the current traversal path.
2935    #[inline]
2936    pub fn err(&self, kind: ReflectErrorKind) -> ReflectError {
2937        ReflectError::new(kind, self.path())
2938    }
2939
2940    /// Get the field for the parent frame
2941    pub fn parent_field(&self) -> Option<&Field> {
2942        self.frames()
2943            .iter()
2944            .rev()
2945            .nth(1)
2946            .and_then(|f| f.get_field())
2947    }
2948
2949    /// Gets the field for the current frame
2950    pub fn current_field(&self) -> Option<&Field> {
2951        self.frames().last().and_then(|f| f.get_field())
2952    }
2953
2954    /// Gets the nearest active field when nested wrapper frames are involved.
2955    ///
2956    /// This walks frames from innermost to outermost and returns the first frame
2957    /// that currently points at a struct/enum field.
2958    pub fn nearest_field(&self) -> Option<&Field> {
2959        self.frames().iter().rev().find_map(|f| f.get_field())
2960    }
2961
2962    /// Returns a const pointer to the current frame's data.
2963    ///
2964    /// This is useful for validation - after deserializing a field value,
2965    /// validators can read the value through this pointer.
2966    ///
2967    /// # Safety
2968    ///
2969    /// The returned pointer is valid only while the frame exists.
2970    /// The caller must ensure the frame is fully initialized before
2971    /// reading through this pointer.
2972    #[deprecated(note = "use initialized_data_ptr() instead, which checks initialization")]
2973    pub fn data_ptr(&self) -> Option<facet_core::PtrConst> {
2974        if self.state != PartialState::Active {
2975            return None;
2976        }
2977        self.frames().last().map(|f| {
2978            // SAFETY: We're in active state, so the frame is valid.
2979            // The caller is responsible for ensuring the data is initialized.
2980            unsafe { f.data.assume_init().as_const() }
2981        })
2982    }
2983
2984    /// Returns a const pointer to the current frame's data, but only if fully initialized.
2985    ///
2986    /// This is the safe way to get a pointer for validation - it verifies that
2987    /// the frame is fully initialized before returning the pointer.
2988    ///
2989    /// Returns `None` if:
2990    /// - The partial is not in active state
2991    /// - The current frame is not fully initialized
2992    #[allow(unsafe_code)]
2993    pub fn initialized_data_ptr(&mut self) -> Option<facet_core::PtrConst> {
2994        if self.state != PartialState::Active {
2995            return None;
2996        }
2997        let frame = self.frames_mut().last_mut()?;
2998
2999        // Check if fully initialized (may drain rope for lists)
3000        if frame.require_full_initialization().is_err() {
3001            return None;
3002        }
3003
3004        // SAFETY: We've verified the partial is active and the frame is fully initialized.
3005        Some(unsafe { frame.data.assume_init().as_const() })
3006    }
3007
3008    /// Returns a typed reference to the current frame's data if:
3009    /// 1. The partial is in active state
3010    /// 2. The current frame is fully initialized
3011    /// 3. The shape matches `T::SHAPE`
3012    ///
3013    /// This is the safe way to read a value from a Partial for validation purposes.
3014    #[allow(unsafe_code)]
3015    pub fn read_as<T: facet_core::Facet<'facet>>(&mut self) -> Option<&T> {
3016        if self.state != PartialState::Active {
3017            return None;
3018        }
3019        let frame = self.frames_mut().last_mut()?;
3020
3021        // Check if fully initialized (may drain rope for lists)
3022        if frame.require_full_initialization().is_err() {
3023            return None;
3024        }
3025
3026        // Check shape matches
3027        if frame.allocated.shape() != T::SHAPE {
3028            return None;
3029        }
3030
3031        // SAFETY: We've verified:
3032        // 1. The partial is active (frame is valid)
3033        // 2. The frame is fully initialized
3034        // 3. The shape matches T::SHAPE
3035        unsafe {
3036            let ptr = frame.data.assume_init().as_const();
3037            Some(&*ptr.as_ptr::<T>())
3038        }
3039    }
3040}