Skip to main content

facet_reflect/partial/partial_api/
misc.rs

1use facet_core::TryFromOutcome;
2use facet_path::{Path, PathStep};
3
4use super::*;
5use crate::typeplan::{DeserStrategy, TypePlanNodeKind};
6
7////////////////////////////////////////////////////////////////////////////////////////////////////
8// Misc.
9////////////////////////////////////////////////////////////////////////////////////////////////////
10impl<'facet, const BORROW: bool> Partial<'facet, BORROW> {
11    /// Applies a closure to this Partial, enabling chaining with operations that
12    /// take ownership and return `Result<Self, E>`.
13    ///
14    /// This is useful for chaining deserializer methods that need `&mut self`:
15    ///
16    /// ```ignore
17    /// wip = wip
18    ///     .begin_field("name")?
19    ///     .with(|w| deserializer.deserialize_into(w))?
20    ///     .end()?;
21    /// ```
22    #[inline]
23    pub fn with<F, E>(self, f: F) -> Result<Self, E>
24    where
25        F: FnOnce(Self) -> Result<Self, E>,
26    {
27        f(self)
28    }
29
30    /// Returns true if the Partial is in an active state (not built or poisoned).
31    ///
32    /// After `build()` succeeds or after an error causes poisoning, the Partial
33    /// becomes inactive and most operations will fail.
34    #[inline]
35    pub fn is_active(&self) -> bool {
36        self.state == PartialState::Active
37    }
38
39    /// Returns the current frame count (depth of nesting)
40    ///
41    /// The initial frame count is 1 — `begin_field` would push a new frame,
42    /// bringing it to 2, then `end` would bring it back to `1`.
43    ///
44    /// This is an implementation detail of `Partial`, kinda, but deserializers
45    /// might use this for debug assertions, to make sure the state is what
46    /// they think it is.
47    #[inline]
48    pub const fn frame_count(&self) -> usize {
49        self.frames().len()
50    }
51
52    /// Returns the shape of the current frame.
53    ///
54    /// # Panics
55    ///
56    /// Panics if the Partial has been poisoned or built, or if there are no frames
57    /// (which indicates a bug in the Partial implementation).
58    #[inline]
59    pub fn shape(&self) -> &'static Shape {
60        if self.state != PartialState::Active {
61            panic!(
62                "Partial::shape() called on non-active Partial (state: {:?})",
63                self.state
64            );
65        }
66        self.frames()
67            .last()
68            .expect("Partial::shape() called but no frames exist - this is a bug")
69            .allocated
70            .shape()
71    }
72
73    /// Returns the shape of the current frame, or `None` if the Partial is
74    /// inactive (poisoned or built) or has no frames.
75    ///
76    /// This is useful for debugging/logging where you want to inspect the state
77    /// without risking a panic.
78    #[inline]
79    pub fn try_shape(&self) -> Option<&'static Shape> {
80        if self.state != PartialState::Active {
81            return None;
82        }
83        self.frames().last().map(|f| f.allocated.shape())
84    }
85
86    /// Returns the TypePlanCore for this Partial.
87    ///
88    /// This provides access to the arena-based type plan data, useful for
89    /// resolving field lookups and accessing precomputed metadata.
90    #[inline]
91    pub fn type_plan_core(&self) -> &crate::typeplan::TypePlanCore {
92        &self.root_plan
93    }
94
95    /// Returns the precomputed StructPlan for the current frame, if available.
96    ///
97    /// This provides O(1) or O(log n) field lookup instead of O(n) linear scanning.
98    /// Returns `None` if:
99    /// - The Partial is not active
100    /// - The current frame has no TypePlan (e.g., custom deserialization frames)
101    /// - The current type is not a struct
102    #[inline]
103    pub fn struct_plan(&self) -> Option<&crate::typeplan::StructPlan> {
104        if self.state != PartialState::Active {
105            return None;
106        }
107        let frame = self.frames().last()?;
108        self.root_plan.struct_plan_by_id(frame.type_plan)
109    }
110
111    /// Returns the precomputed EnumPlan for the current frame, if available.
112    ///
113    /// This provides O(1) or O(log n) variant lookup instead of O(n) linear scanning.
114    /// Returns `None` if:
115    /// - The Partial is not active
116    /// - The current type is not an enum
117    #[inline]
118    pub fn enum_plan(&self) -> Option<&crate::typeplan::EnumPlan> {
119        if self.state != PartialState::Active {
120            return None;
121        }
122        let frame = self.frames().last()?;
123        self.root_plan.enum_plan_by_id(frame.type_plan)
124    }
125
126    /// Returns the precomputed field plans for the current frame.
127    ///
128    /// This provides access to precomputed validators and default handling without
129    /// runtime attribute scanning.
130    ///
131    /// Returns `None` if the current type is not a struct or enum variant.
132    #[inline]
133    pub fn field_plans(&self) -> Option<&[crate::typeplan::FieldPlan]> {
134        use crate::typeplan::TypePlanNodeKind;
135        let frame = self.frames().last().unwrap();
136        let node = self.root_plan.node(frame.type_plan);
137        match &node.kind {
138            TypePlanNodeKind::Struct(struct_plan) => {
139                Some(self.root_plan.fields(struct_plan.fields))
140            }
141            TypePlanNodeKind::Enum(enum_plan) => {
142                // For enums, we need the variant index from the tracker
143                if let crate::partial::Tracker::Enum { variant_idx, .. } = &frame.tracker {
144                    self.root_plan
145                        .variants(enum_plan.variants)
146                        .get(*variant_idx)
147                        .map(|v| self.root_plan.fields(v.fields))
148                } else {
149                    None
150                }
151            }
152            _ => None,
153        }
154    }
155
156    /// Returns the precomputed TypePlanNode for the current frame.
157    ///
158    /// This provides access to the precomputed deserialization strategy and
159    /// other metadata computed at Partial allocation time.
160    ///
161    /// Returns `None` if:
162    /// - The Partial is not active
163    /// - There are no frames
164    #[inline]
165    pub fn plan_node(&self) -> Option<&crate::typeplan::TypePlanNode> {
166        if self.state != PartialState::Active {
167            return None;
168        }
169        let frame = self.frames().last()?;
170        Some(self.root_plan.node(frame.type_plan))
171    }
172
173    /// Returns the node ID for the current frame's type plan.
174    ///
175    /// Returns `None` if:
176    /// - The Partial is not active
177    /// - There are no frames
178    #[inline]
179    pub fn plan_node_id(&self) -> Option<crate::typeplan::NodeId> {
180        if self.state != PartialState::Active {
181            return None;
182        }
183        let frame = self.frames().last()?;
184        Some(frame.type_plan)
185    }
186
187    /// Returns the precomputed deserialization strategy for the current frame.
188    ///
189    /// This tells facet-format exactly how to deserialize the current type without
190    /// runtime inspection of Shape/Def/vtable. The strategy is computed once at
191    /// TypePlan build time.
192    ///
193    /// If the current node is a BackRef (recursive type), this automatically
194    /// follows the reference to return the target node's strategy.
195    ///
196    /// Returns `None` if:
197    /// - The Partial is not active
198    /// - There are no frames
199    #[inline]
200    pub fn deser_strategy(&self) -> Option<&DeserStrategy> {
201        let node = self.plan_node()?;
202        // Resolve BackRef if needed - resolve_backref returns the node unchanged if not a BackRef
203        let resolved = self.root_plan.resolve_backref(node);
204        Some(&resolved.strategy)
205    }
206
207    /// Returns the precomputed proxy nodes for the current frame's type.
208    ///
209    /// These contain TypePlan nodes for all proxies (format-agnostic and format-specific)
210    /// on this type, allowing runtime lookup based on format namespace.
211    #[inline]
212    pub fn proxy_nodes(&self) -> Option<&crate::typeplan::ProxyNodes> {
213        let node = self.plan_node()?;
214        let resolved = self.root_plan.resolve_backref(node);
215        Some(&resolved.proxies)
216    }
217
218    /// Returns true if the current frame is building a smart pointer slice (Arc<\[T\]>, Rc<\[T\]>, Box<\[T\]>).
219    ///
220    /// This is used by deserializers to determine if they should deserialize as a list
221    /// rather than recursing into the smart pointer type.
222    #[inline]
223    pub fn is_building_smart_ptr_slice(&self) -> bool {
224        if self.state != PartialState::Active {
225            return false;
226        }
227        self.frames()
228            .last()
229            .is_some_and(|f| matches!(f.tracker, Tracker::SmartPointerSlice { .. }))
230    }
231
232    /// Returns the current path in deferred mode (for debugging/tracing).
233    #[inline]
234    pub fn current_path(&self) -> Option<facet_path::Path> {
235        if self.is_deferred() {
236            Some(self.derive_path())
237        } else {
238            None
239        }
240    }
241
242    /// Checks if the current frame should be stored for deferred processing.
243    ///
244    /// This determines whether a frame can safely be stored and re-entered later
245    /// in deferred mode. A frame should be stored if:
246    /// 1. It's a re-entrant type (struct, enum, collection, Option)
247    /// 2. It has storable ownership (Field or Owned)
248    /// 3. It doesn't have a SmartPointer parent that needs immediate completion
249    ///
250    /// Returns `true` if the frame should be stored, `false` if it should be
251    /// validated immediately.
252    fn should_store_frame_for_deferred(&self) -> bool {
253        // In deferred mode, all frames have stable memory and can be stored.
254        // PR #2019 added stable storage for all container elements (ListRope for Vec,
255        // pending_entries for Map, pending_inner for Option).
256        true
257    }
258
259    /// Enables deferred materialization mode with the given Resolution.
260    ///
261    /// When deferred mode is enabled:
262    /// - `end()` stores frames instead of validating them
263    /// - Re-entering a path restores the stored frame with its state intact
264    /// - `finish_deferred()` performs final validation and materialization
265    ///
266    /// This allows deserializers to handle interleaved fields (e.g., TOML dotted
267    /// keys, flattened structs) where nested fields aren't contiguous in the input.
268    ///
269    /// # Use Cases
270    ///
271    /// - TOML dotted keys: `inner.x = 1` followed by `count = 2` then `inner.y = 3`
272    /// - Flattened structs where nested fields appear at the parent level
273    /// - Any format where field order doesn't match struct nesting
274    ///
275    /// # Errors
276    ///
277    /// Returns an error if already in deferred mode.
278    #[inline]
279    pub fn begin_deferred(mut self) -> Result<Self, ReflectError> {
280        // Cannot enable deferred mode if already in deferred mode
281        if self.is_deferred() {
282            return Err(self.err(ReflectErrorKind::InvariantViolation {
283                invariant: "begin_deferred() called but already in deferred mode",
284            }));
285        }
286
287        // Take the stack out of Strict mode and wrap in Deferred mode
288        let FrameMode::Strict { stack } = core::mem::replace(
289            &mut self.mode,
290            FrameMode::Strict { stack: Vec::new() }, // temporary placeholder
291        ) else {
292            unreachable!("just checked we're not in deferred mode");
293        };
294
295        let start_depth = stack.len();
296        self.mode = FrameMode::Deferred {
297            stack,
298            start_depth,
299            stored_frames: BTreeMap::new(),
300        };
301        Ok(self)
302    }
303
304    /// Finishes deferred mode: validates all stored frames and finalizes.
305    ///
306    /// This method:
307    /// 1. Validates that all stored frames are fully initialized
308    /// 2. Processes frames from deepest to shallowest, updating parent ISets
309    /// 3. Validates the root frame
310    ///
311    /// # Errors
312    ///
313    /// Returns an error if any required fields are missing or if the partial is
314    /// not in deferred mode.
315    pub fn finish_deferred(mut self) -> Result<Self, ReflectError> {
316        // Check if we're in deferred mode first, before extracting state
317        if !self.is_deferred() {
318            return Err(self.err(ReflectErrorKind::InvariantViolation {
319                invariant: "finish_deferred() called but deferred mode is not enabled",
320            }));
321        }
322
323        // Extract deferred state, transitioning back to Strict mode
324        let FrameMode::Deferred {
325            stack,
326            start_depth,
327            mut stored_frames,
328            ..
329        } = core::mem::replace(&mut self.mode, FrameMode::Strict { stack: Vec::new() })
330        else {
331            unreachable!("just checked is_deferred()");
332        };
333
334        // Restore the stack to self.mode
335        self.mode = FrameMode::Strict { stack };
336
337        // Sort paths by depth (deepest first) so we process children before parents.
338        // For equal-depth paths, we need stable ordering for list elements:
339        // Index(0) must be processed before Index(1) to maintain insertion order.
340        let mut paths: Vec<_> = stored_frames.keys().cloned().collect();
341        paths.sort_by(|a, b| {
342            // Primary: deeper paths first
343            let depth_cmp = b.len().cmp(&a.len());
344            if depth_cmp != core::cmp::Ordering::Equal {
345                return depth_cmp;
346            }
347            // Secondary: for same-depth paths, compare step by step
348            // This ensures Index(0) comes before Index(1) for the same parent
349            for (step_a, step_b) in a.steps.iter().zip(b.steps.iter()) {
350                let step_cmp = step_a.cmp(step_b);
351                if step_cmp != core::cmp::Ordering::Equal {
352                    return step_cmp;
353                }
354            }
355            core::cmp::Ordering::Equal
356        });
357
358        trace!(
359            "finish_deferred: Processing {} stored frames in order: {:?}",
360            paths.len(),
361            paths
362        );
363
364        // Process each stored frame from deepest to shallowest
365        for path in paths {
366            let mut frame = stored_frames.remove(&path).unwrap();
367
368            trace!(
369                "finish_deferred: Processing frame at {:?}, shape {}, tracker {:?}",
370                path,
371                frame.allocated.shape(),
372                frame.tracker.kind()
373            );
374
375            // Special handling for SmartPointerSlice: convert builder to Arc<[T]> before validation
376            if let Tracker::SmartPointerSlice { vtable, .. } = &frame.tracker {
377                let vtable = *vtable;
378                let current_shape = frame.allocated.shape();
379
380                // Convert the builder to Arc<[T]>
381                let builder_ptr = unsafe { frame.data.assume_init() };
382                let arc_ptr = unsafe { (vtable.convert_fn)(builder_ptr) };
383
384                trace!(
385                    "finish_deferred: Converting SmartPointerSlice builder to {}",
386                    current_shape
387                );
388
389                // Handle different ownership cases
390                match frame.ownership {
391                    FrameOwnership::Field { field_idx } => {
392                        // Arc<[T]> is a field in a struct
393                        // Find the parent frame and write the Arc to the field location
394                        let parent_path = facet_path::Path {
395                            shape: path.shape,
396                            steps: path.steps[..path.steps.len() - 1].to_vec(),
397                        };
398
399                        // Get the parent frame
400                        let parent_frame_opt = if parent_path.steps.is_empty() {
401                            let parent_index = start_depth.saturating_sub(1);
402                            self.frames_mut().get_mut(parent_index)
403                        } else if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
404                            Some(parent_frame)
405                        } else {
406                            let parent_frame_index = start_depth + parent_path.steps.len() - 1;
407                            self.frames_mut().get_mut(parent_frame_index)
408                        };
409
410                        if let Some(parent_frame) = parent_frame_opt {
411                            // Get the field to find its offset
412                            if let Type::User(UserType::Struct(struct_type)) =
413                                parent_frame.allocated.shape().ty
414                            {
415                                let field = &struct_type.fields[field_idx];
416
417                                // Calculate where the Arc should be written (parent.data + field.offset)
418                                let field_location =
419                                    unsafe { parent_frame.data.field_uninit(field.offset) };
420
421                                // Write the Arc to the parent struct's field location
422                                if let Ok(arc_layout) = current_shape.layout.sized_layout() {
423                                    let arc_size = arc_layout.size();
424                                    unsafe {
425                                        core::ptr::copy_nonoverlapping(
426                                            arc_ptr.as_byte_ptr(),
427                                            field_location.as_mut_byte_ptr(),
428                                            arc_size,
429                                        );
430                                    }
431
432                                    // Free the staging allocation from convert_fn
433                                    unsafe {
434                                        ::alloc::alloc::dealloc(
435                                            arc_ptr.as_byte_ptr() as *mut u8,
436                                            arc_layout,
437                                        );
438                                    }
439
440                                    // Update the frame to point to the correct field location and mark as initialized
441                                    frame.data = field_location;
442                                    frame.tracker = Tracker::Scalar;
443                                    frame.is_init = true;
444
445                                    trace!(
446                                        "finish_deferred: SmartPointerSlice converted and written to field {}",
447                                        field_idx
448                                    );
449                                }
450                            }
451                        }
452                    }
453                    FrameOwnership::Owned => {
454                        // Arc<[T]> is the root - write in place
455                        if let Ok(arc_layout) = current_shape.layout.sized_layout() {
456                            let arc_size = arc_layout.size();
457                            // Allocate new memory for the Arc
458                            let new_ptr = facet_core::alloc_for_layout(arc_layout);
459                            unsafe {
460                                core::ptr::copy_nonoverlapping(
461                                    arc_ptr.as_byte_ptr(),
462                                    new_ptr.as_mut_byte_ptr(),
463                                    arc_size,
464                                );
465                            }
466                            // Free the staging allocation
467                            unsafe {
468                                ::alloc::alloc::dealloc(
469                                    arc_ptr.as_byte_ptr() as *mut u8,
470                                    arc_layout,
471                                );
472                            }
473                            frame.data = new_ptr;
474                            frame.tracker = Tracker::Scalar;
475                            frame.is_init = true;
476                        }
477                    }
478                    _ => {}
479                }
480            }
481
482            // Fill in defaults for unset fields that have defaults
483            if let Err(e) = frame.fill_defaults() {
484                // Before cleanup, clear the parent's iset bit for the frame that failed.
485                // This prevents the parent from trying to drop this field when Partial is dropped.
486                Self::clear_parent_iset_for_path(
487                    &path,
488                    start_depth,
489                    self.frames_mut(),
490                    &mut stored_frames,
491                );
492                frame.deinit();
493                frame.dealloc();
494                // Clean up remaining stored frames safely (deepest first, clearing parent isets)
495                Self::cleanup_stored_frames_on_error(stored_frames, start_depth, self.frames_mut());
496                return Err(self.err(e));
497            }
498
499            // Validate the frame is fully initialized
500            if let Err(e) = frame.require_full_initialization() {
501                // Before cleanup, clear the parent's iset bit for the frame that failed.
502                // This prevents the parent from trying to drop this field when Partial is dropped.
503                Self::clear_parent_iset_for_path(
504                    &path,
505                    start_depth,
506                    self.frames_mut(),
507                    &mut stored_frames,
508                );
509                frame.deinit();
510                frame.dealloc();
511                // Clean up remaining stored frames safely (deepest first, clearing parent isets)
512                Self::cleanup_stored_frames_on_error(stored_frames, start_depth, self.frames_mut());
513                return Err(self.err(e));
514            }
515
516            // Update parent's ISet to mark this field as initialized.
517            // The parent could be:
518            // 1. On the frames stack (if path.steps.len() == 1, parent is at start_depth - 1)
519            // 2. On the frames stack (if parent was pushed but never ended)
520            // 3. In stored_frames (if parent was ended during deferred mode)
521            if let Some(last_step) = path.steps.last() {
522                // Construct parent path (same shape, all steps except the last one)
523                let parent_path = facet_path::Path {
524                    shape: path.shape,
525                    steps: path.steps[..path.steps.len() - 1].to_vec(),
526                };
527
528                // Special handling for Option inner values: when path ends with OptionSome,
529                // the parent is an Option frame and we need to complete the Option by
530                // writing the inner value into the Option's memory.
531                if matches!(last_step, PathStep::OptionSome) {
532                    // Find the Option frame (parent)
533                    let option_frame = if parent_path.steps.is_empty() {
534                        let parent_index = start_depth.saturating_sub(1);
535                        self.frames_mut().get_mut(parent_index)
536                    } else if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
537                        Some(parent_frame)
538                    } else {
539                        let parent_frame_index = start_depth + parent_path.steps.len() - 1;
540                        self.frames_mut().get_mut(parent_frame_index)
541                    };
542
543                    if let Some(option_frame) = option_frame {
544                        // The frame contains the inner value - write it into the Option's memory
545                        Self::complete_option_frame(option_frame, frame);
546                        // Frame data has been transferred to Option - don't drop it
547                        continue;
548                    }
549                }
550
551                // Special handling for SmartPointer inner values: when path ends with Deref,
552                // the parent is a SmartPointer frame and we need to complete it by
553                // creating the SmartPointer from the inner value.
554                if matches!(last_step, PathStep::Deref) {
555                    // Find the SmartPointer frame (parent)
556                    let smart_ptr_frame = if parent_path.steps.is_empty() {
557                        let parent_index = start_depth.saturating_sub(1);
558                        self.frames_mut().get_mut(parent_index)
559                    } else if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
560                        Some(parent_frame)
561                    } else {
562                        let parent_frame_index = start_depth + parent_path.steps.len() - 1;
563                        self.frames_mut().get_mut(parent_frame_index)
564                    };
565
566                    if let Some(smart_ptr_frame) = smart_ptr_frame {
567                        // The frame contains the inner value - create the SmartPointer from it
568                        Self::complete_smart_pointer_frame(smart_ptr_frame, frame);
569                        // Frame data has been transferred to SmartPointer - don't drop it
570                        continue;
571                    }
572                }
573
574                // Special handling for Inner values: when path ends with Inner,
575                // the parent is a transparent wrapper (NonZero, ByteString, etc.) and we need
576                // to convert the inner value to the parent type using try_from.
577                if matches!(last_step, PathStep::Inner) {
578                    // Find the parent frame (Inner wrapper)
579                    let parent_frame = if parent_path.steps.is_empty() {
580                        let parent_index = start_depth.saturating_sub(1);
581                        self.frames_mut().get_mut(parent_index)
582                    } else if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
583                        Some(parent_frame)
584                    } else {
585                        let parent_frame_index = start_depth + parent_path.steps.len() - 1;
586                        self.frames_mut().get_mut(parent_frame_index)
587                    };
588
589                    if let Some(inner_wrapper_frame) = parent_frame {
590                        // The frame contains the inner value - convert to parent type using try_from
591                        Self::complete_inner_frame(inner_wrapper_frame, frame);
592                        // Frame data has been transferred - don't drop it
593                        continue;
594                    }
595                }
596
597                // Special handling for Proxy values: when path ends with Proxy,
598                // the parent is the target type (e.g., Inner) and we need to convert
599                // the proxy value (e.g., InnerProxy) using the proxy's convert_in.
600                if matches!(last_step, PathStep::Proxy) {
601                    // Find the parent frame (the proxy target)
602                    let parent_frame = if parent_path.steps.is_empty() {
603                        let parent_index = start_depth.saturating_sub(1);
604                        self.frames_mut().get_mut(parent_index)
605                    } else if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
606                        Some(parent_frame)
607                    } else {
608                        let parent_frame_index = start_depth + parent_path.steps.len() - 1;
609                        self.frames_mut().get_mut(parent_frame_index)
610                    };
611
612                    if let Some(target_frame) = parent_frame {
613                        Self::complete_proxy_frame(target_frame, frame);
614                        continue;
615                    }
616                }
617
618                // Special handling for List/SmartPointerSlice element values: when path ends with Index,
619                // the parent is a List or SmartPointerSlice frame and we need to push the element into it.
620                // RopeSlot frames are already stored in the rope and will be drained during
621                // validation - pushing them here would duplicate the elements.
622                if matches!(last_step, PathStep::Index(_))
623                    && !matches!(frame.ownership, FrameOwnership::RopeSlot)
624                {
625                    // Find the parent frame (List or SmartPointerSlice)
626                    let parent_frame = if parent_path.steps.is_empty() {
627                        let parent_index = start_depth.saturating_sub(1);
628                        self.frames_mut().get_mut(parent_index)
629                    } else if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
630                        Some(parent_frame)
631                    } else {
632                        let parent_frame_index = start_depth + parent_path.steps.len() - 1;
633                        self.frames_mut().get_mut(parent_frame_index)
634                    };
635
636                    if let Some(parent_frame) = parent_frame {
637                        // Check if parent is a SmartPointerSlice (e.g., Arc<[T]>)
638                        if matches!(parent_frame.tracker, Tracker::SmartPointerSlice { .. }) {
639                            Self::complete_smart_pointer_slice_item_frame(parent_frame, frame);
640                            // Frame data has been transferred to slice builder - don't drop it
641                            continue;
642                        }
643                        // Otherwise try List handling
644                        Self::complete_list_item_frame(parent_frame, frame);
645                        // Frame data has been transferred to List - don't drop it
646                        continue;
647                    }
648                }
649
650                // Special handling for Map key values: when path ends with MapKey,
651                // the parent is a Map frame and we need to transition it to PushingValue state.
652                if matches!(last_step, PathStep::MapKey(_)) {
653                    // Find the Map frame (parent)
654                    let map_frame = if parent_path.steps.is_empty() {
655                        let parent_index = start_depth.saturating_sub(1);
656                        self.frames_mut().get_mut(parent_index)
657                    } else if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
658                        Some(parent_frame)
659                    } else {
660                        let parent_frame_index = start_depth + parent_path.steps.len() - 1;
661                        self.frames_mut().get_mut(parent_frame_index)
662                    };
663
664                    if let Some(map_frame) = map_frame {
665                        // Transition the Map from PushingKey to PushingValue state
666                        Self::complete_map_key_frame(map_frame, frame);
667                        continue;
668                    }
669                }
670
671                // Special handling for Map value values: when path ends with MapValue,
672                // the parent is a Map frame and we need to add the entry to pending_entries.
673                if matches!(last_step, PathStep::MapValue(_)) {
674                    // Find the Map frame (parent)
675                    let map_frame = if parent_path.steps.is_empty() {
676                        let parent_index = start_depth.saturating_sub(1);
677                        self.frames_mut().get_mut(parent_index)
678                    } else if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
679                        Some(parent_frame)
680                    } else {
681                        let parent_frame_index = start_depth + parent_path.steps.len() - 1;
682                        self.frames_mut().get_mut(parent_frame_index)
683                    };
684
685                    if let Some(map_frame) = map_frame {
686                        // Add the key-value pair to pending_entries
687                        Self::complete_map_value_frame(map_frame, frame);
688                        continue;
689                    }
690                }
691
692                // Only mark field initialized if the step is actually a Field
693                if let PathStep::Field(field_idx) = last_step {
694                    let field_idx = *field_idx as usize;
695                    if parent_path.steps.is_empty() {
696                        // Parent is the frame that was current when deferred mode started.
697                        // It's at index (start_depth - 1) because deferred mode stores frames
698                        // relative to the position at start_depth.
699                        let parent_index = start_depth.saturating_sub(1);
700                        if let Some(root_frame) = self.frames_mut().get_mut(parent_index) {
701                            Self::mark_field_initialized_by_index(root_frame, field_idx);
702                        }
703                    } else {
704                        // Try stored_frames first
705                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
706                            Self::mark_field_initialized_by_index(parent_frame, field_idx);
707                        } else {
708                            // Parent might still be on the frames stack (never ended).
709                            // The frame at index (start_depth + parent_path.steps.len() - 1) should be the parent.
710                            let parent_frame_index = start_depth + parent_path.steps.len() - 1;
711                            if let Some(parent_frame) =
712                                self.frames_mut().get_mut(parent_frame_index)
713                            {
714                                Self::mark_field_initialized_by_index(parent_frame, field_idx);
715                            }
716                        }
717                    }
718                }
719            }
720
721            // Frame is validated and parent is updated - dealloc if needed
722            frame.dealloc();
723        }
724
725        // Invariant check: we must have at least one frame after finish_deferred
726        if self.frames().is_empty() {
727            // No need to poison - returning Err consumes self, Drop will handle cleanup
728            return Err(self.err(ReflectErrorKind::InvariantViolation {
729                invariant: "finish_deferred() left Partial with no frames",
730            }));
731        }
732
733        // Fill defaults and validate the root frame is fully initialized
734        if let Some(frame) = self.frames_mut().last_mut() {
735            // Fill defaults - this can fail if a field has #[facet(default)] but no default impl
736            if let Err(e) = frame.fill_defaults() {
737                return Err(self.err(e));
738            }
739            // Root validation failed. At this point, all stored frames have been
740            // processed and their parent isets updated.
741            // No need to poison - returning Err consumes self, Drop will handle cleanup
742            if let Err(e) = frame.require_full_initialization() {
743                return Err(self.err(e));
744            }
745        }
746
747        Ok(self)
748    }
749
750    /// Mark a field as initialized in a frame's tracker by index
751    fn mark_field_initialized_by_index(frame: &mut Frame, idx: usize) {
752        crate::trace!(
753            "mark_field_initialized_by_index: idx={}, frame shape={}, tracker={:?}",
754            idx,
755            frame.allocated.shape(),
756            frame.tracker.kind()
757        );
758
759        // If the tracker is Scalar but this is a struct type, upgrade to Struct tracker.
760        // This can happen if the frame was deinit'd (e.g., by a failed set_default)
761        // which resets the tracker to Scalar.
762        if matches!(frame.tracker, Tracker::Scalar)
763            && let Type::User(UserType::Struct(struct_type)) = frame.allocated.shape().ty
764        {
765            frame.tracker = Tracker::Struct {
766                iset: ISet::new(struct_type.fields.len()),
767                current_child: None,
768            };
769        }
770
771        match &mut frame.tracker {
772            Tracker::Struct { iset, .. } => {
773                crate::trace!("mark_field_initialized_by_index: setting iset for struct");
774                iset.set(idx);
775            }
776            Tracker::Enum { data, .. } => {
777                crate::trace!(
778                    "mark_field_initialized_by_index: setting data for enum, before={:?}",
779                    data
780                );
781                data.set(idx);
782                crate::trace!(
783                    "mark_field_initialized_by_index: setting data for enum, after={:?}",
784                    data
785                );
786            }
787            Tracker::Array { iset, .. } => {
788                crate::trace!("mark_field_initialized_by_index: setting iset for array");
789                iset.set(idx);
790            }
791            _ => {
792                crate::trace!(
793                    "mark_field_initialized_by_index: no match for tracker {:?}",
794                    frame.tracker.kind()
795                );
796            }
797        }
798    }
799
800    /// Clear a parent frame's iset bit for a given path.
801    /// The parent could be on the stack or in stored_frames.
802    fn clear_parent_iset_for_path(
803        path: &Path,
804        start_depth: usize,
805        stack: &mut [Frame],
806        stored_frames: &mut ::alloc::collections::BTreeMap<Path, Frame>,
807    ) {
808        if let Some(&PathStep::Field(field_idx)) = path.steps.last() {
809            let field_idx = field_idx as usize;
810            let parent_path = Path {
811                shape: path.shape,
812                steps: path.steps[..path.steps.len() - 1].to_vec(),
813            };
814
815            // Try stored_frames first
816            if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
817                Self::unset_field_in_tracker(&mut parent_frame.tracker, field_idx);
818            } else if parent_path.steps.is_empty() {
819                // Parent is on the stack at (start_depth - 1)
820                let parent_index = start_depth.saturating_sub(1);
821                if let Some(parent_frame) = stack.get_mut(parent_index) {
822                    Self::unset_field_in_tracker(&mut parent_frame.tracker, field_idx);
823                }
824            } else {
825                // Parent is on the stack at (start_depth + parent_path.steps.len() - 1)
826                let parent_index = start_depth + parent_path.steps.len() - 1;
827                if let Some(parent_frame) = stack.get_mut(parent_index) {
828                    Self::unset_field_in_tracker(&mut parent_frame.tracker, field_idx);
829                }
830            }
831        }
832    }
833
834    /// Helper to unset a field index in a tracker's iset
835    fn unset_field_in_tracker(tracker: &mut Tracker, field_idx: usize) {
836        match tracker {
837            Tracker::Struct { iset, .. } => {
838                iset.unset(field_idx);
839            }
840            Tracker::Enum { data, .. } => {
841                data.unset(field_idx);
842            }
843            Tracker::Array { iset, .. } => {
844                iset.unset(field_idx);
845            }
846            _ => {}
847        }
848    }
849
850    /// Safely clean up stored frames on error in finish_deferred.
851    ///
852    /// This mirrors the cleanup logic in Drop: process frames deepest-first and
853    /// clear parent's iset bits before deiniting children to prevent double-drops.
854    fn cleanup_stored_frames_on_error(
855        mut stored_frames: ::alloc::collections::BTreeMap<Path, Frame>,
856        start_depth: usize,
857        stack: &mut [Frame],
858    ) {
859        // Sort by depth (deepest first) so children are processed before parents
860        let mut paths: Vec<_> = stored_frames.keys().cloned().collect();
861        paths.sort_by_key(|p| core::cmp::Reverse(p.steps.len()));
862
863        for path in paths {
864            if let Some(mut frame) = stored_frames.remove(&path) {
865                // Before dropping this frame, clear the parent's iset bit so the
866                // parent won't try to drop this field again.
867                Self::clear_parent_iset_for_path(&path, start_depth, stack, &mut stored_frames);
868                frame.deinit();
869                frame.dealloc();
870            }
871        }
872    }
873
874    /// Complete an Option frame by writing the inner value and marking it initialized.
875    /// Used in finish_deferred when processing a stored frame at a path ending with "Some".
876    fn complete_option_frame(option_frame: &mut Frame, inner_frame: Frame) {
877        if let Def::Option(option_def) = option_frame.allocated.shape().def {
878            // Use the Option vtable to initialize Some(inner_value)
879            let init_some_fn = option_def.vtable.init_some;
880
881            // The inner frame contains the inner value
882            let inner_value_ptr = unsafe { inner_frame.data.assume_init() };
883
884            // Initialize the Option as Some(inner_value)
885            unsafe {
886                init_some_fn(option_frame.data, inner_value_ptr);
887            }
888
889            // Deallocate the inner value's memory since init_some_fn moved it
890            if let FrameOwnership::Owned = inner_frame.ownership
891                && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
892                && layout.size() > 0
893            {
894                unsafe {
895                    ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
896                }
897            }
898
899            // Mark the Option as initialized
900            option_frame.tracker = Tracker::Option {
901                building_inner: false,
902                pending_inner: None,
903            };
904            option_frame.is_init = true;
905        }
906    }
907
908    fn complete_smart_pointer_frame(smart_ptr_frame: &mut Frame, inner_frame: Frame) {
909        if let Def::Pointer(smart_ptr_def) = smart_ptr_frame.allocated.shape().def {
910            // Use the SmartPointer vtable to create the smart pointer from the inner value
911            if let Some(new_into_fn) = smart_ptr_def.vtable.new_into_fn {
912                // Sized pointee case: use new_into_fn
913                let _ = unsafe { inner_frame.data.assume_init() };
914
915                // Create the SmartPointer with the inner value
916                unsafe {
917                    new_into_fn(
918                        smart_ptr_frame.data,
919                        PtrMut::new(inner_frame.data.as_mut_byte_ptr()),
920                    );
921                }
922
923                // Deallocate the inner value's memory since new_into_fn moved it
924                if let FrameOwnership::Owned = inner_frame.ownership
925                    && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
926                    && layout.size() > 0
927                {
928                    unsafe {
929                        ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
930                    }
931                }
932
933                // Mark the SmartPointer as initialized
934                smart_ptr_frame.tracker = Tracker::SmartPointer {
935                    building_inner: false,
936                    pending_inner: None,
937                };
938                smart_ptr_frame.is_init = true;
939            } else if let Some(pointee) = smart_ptr_def.pointee()
940                && pointee.is_shape(str::SHAPE)
941                && inner_frame.allocated.shape().is_shape(String::SHAPE)
942            {
943                // Unsized pointee case: String -> Arc<str>/Box<str>/Rc<str> conversion
944                use ::alloc::{rc::Rc, string::String, sync::Arc};
945                use facet_core::KnownPointer;
946
947                let Some(known) = smart_ptr_def.known else {
948                    return;
949                };
950
951                // Read the String value from the inner frame
952                let string_ptr = inner_frame.data.as_mut_byte_ptr() as *mut String;
953                let string_value = unsafe { core::ptr::read(string_ptr) };
954
955                // Convert to the appropriate smart pointer type
956                match known {
957                    KnownPointer::Box => {
958                        let boxed: ::alloc::boxed::Box<str> = string_value.into_boxed_str();
959                        unsafe {
960                            core::ptr::write(
961                                smart_ptr_frame.data.as_mut_byte_ptr()
962                                    as *mut ::alloc::boxed::Box<str>,
963                                boxed,
964                            );
965                        }
966                    }
967                    KnownPointer::Arc => {
968                        let arc: Arc<str> = Arc::from(string_value.into_boxed_str());
969                        unsafe {
970                            core::ptr::write(
971                                smart_ptr_frame.data.as_mut_byte_ptr() as *mut Arc<str>,
972                                arc,
973                            );
974                        }
975                    }
976                    KnownPointer::Rc => {
977                        let rc: Rc<str> = Rc::from(string_value.into_boxed_str());
978                        unsafe {
979                            core::ptr::write(
980                                smart_ptr_frame.data.as_mut_byte_ptr() as *mut Rc<str>,
981                                rc,
982                            );
983                        }
984                    }
985                    _ => return,
986                }
987
988                // Deallocate the String's memory (we moved the data out via ptr::read)
989                if let FrameOwnership::Owned = inner_frame.ownership
990                    && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
991                    && layout.size() > 0
992                {
993                    unsafe {
994                        ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
995                    }
996                }
997
998                // Mark the SmartPointer as initialized
999                smart_ptr_frame.tracker = Tracker::SmartPointer {
1000                    building_inner: false,
1001                    pending_inner: None,
1002                };
1003                smart_ptr_frame.is_init = true;
1004            }
1005        }
1006    }
1007
1008    /// Complete an Inner frame by converting the inner value to the parent type using try_from
1009    /// (for deferred finalization)
1010    fn complete_inner_frame(inner_wrapper_frame: &mut Frame, inner_frame: Frame) {
1011        let wrapper_shape = inner_wrapper_frame.allocated.shape();
1012        let inner_ptr = PtrConst::new(inner_frame.data.as_byte_ptr());
1013        let inner_shape = inner_frame.allocated.shape();
1014
1015        // Handle Direct and Indirect vtables - both return TryFromOutcome
1016        let result = match wrapper_shape.vtable {
1017            facet_core::VTableErased::Direct(vt) => {
1018                if let Some(try_from_fn) = vt.try_from {
1019                    unsafe {
1020                        try_from_fn(
1021                            inner_wrapper_frame.data.as_mut_byte_ptr() as *mut (),
1022                            inner_shape,
1023                            inner_ptr,
1024                        )
1025                    }
1026                } else {
1027                    return;
1028                }
1029            }
1030            facet_core::VTableErased::Indirect(vt) => {
1031                if let Some(try_from_fn) = vt.try_from {
1032                    let ox_uninit =
1033                        facet_core::OxPtrUninit::new(inner_wrapper_frame.data, wrapper_shape);
1034                    unsafe { try_from_fn(ox_uninit, inner_shape, inner_ptr) }
1035                } else {
1036                    return;
1037                }
1038            }
1039        };
1040
1041        match result {
1042            TryFromOutcome::Converted => {
1043                crate::trace!(
1044                    "complete_inner_frame: converted {} to {}",
1045                    inner_shape,
1046                    wrapper_shape
1047                );
1048            }
1049            TryFromOutcome::Unsupported | TryFromOutcome::Failed(_) => {
1050                crate::trace!(
1051                    "complete_inner_frame: conversion failed from {} to {}",
1052                    inner_shape,
1053                    wrapper_shape
1054                );
1055                return;
1056            }
1057        }
1058
1059        // Deallocate the inner value's memory (try_from consumed it)
1060        if let FrameOwnership::Owned = inner_frame.ownership
1061            && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
1062            && layout.size() > 0
1063        {
1064            unsafe {
1065                ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
1066            }
1067        }
1068
1069        // Mark the wrapper as initialized
1070        inner_wrapper_frame.tracker = Tracker::Scalar;
1071        inner_wrapper_frame.is_init = true;
1072    }
1073
1074    /// Complete a proxy conversion during deferred finalization.
1075    ///
1076    /// This handles proxy types (e.g., `#[facet(proxy = InnerProxy)]`) that were
1077    /// deferred during flatten deserialization. The proxy frame's children (e.g.,
1078    /// `Vec<f64>` fields) have already been materialized (ropes drained), so it's
1079    /// now safe to run the conversion.
1080    fn complete_proxy_frame(target_frame: &mut Frame, proxy_frame: Frame) {
1081        // Get the convert_in function from the proxy stored on the frame
1082        let Some(proxy_def) = proxy_frame.shape_level_proxy else {
1083            crate::trace!(
1084                "complete_proxy_frame: no shape_level_proxy on frame {}",
1085                proxy_frame.allocated.shape()
1086            );
1087            return;
1088        };
1089        let convert_in = proxy_def.convert_in;
1090
1091        let _proxy_shape = proxy_frame.allocated.shape();
1092        let _target_shape = target_frame.allocated.shape();
1093
1094        crate::trace!(
1095            "complete_proxy_frame: converting {} to {}",
1096            _proxy_shape,
1097            _target_shape
1098        );
1099
1100        unsafe {
1101            let inner_value_ptr = proxy_frame.data.assume_init().as_const();
1102            let res = (convert_in)(inner_value_ptr, target_frame.data);
1103
1104            match res {
1105                Ok(rptr) => {
1106                    if rptr.as_uninit() != target_frame.data {
1107                        crate::trace!(
1108                            "complete_proxy_frame: convert_in returned unexpected pointer"
1109                        );
1110                        return;
1111                    }
1112                }
1113                Err(_message) => {
1114                    crate::trace!("complete_proxy_frame: conversion failed: {}", _message);
1115                    return;
1116                }
1117            }
1118        }
1119
1120        // Deallocate the proxy frame's memory (convert_in consumed it via ptr::read)
1121        if let FrameOwnership::Owned = proxy_frame.ownership
1122            && let Ok(layout) = proxy_frame.allocated.shape().layout.sized_layout()
1123            && layout.size() > 0
1124        {
1125            unsafe {
1126                ::alloc::alloc::dealloc(proxy_frame.data.as_mut_byte_ptr(), layout);
1127            }
1128        }
1129
1130        // Mark the target as initialized
1131        target_frame.is_init = true;
1132    }
1133
1134    /// Complete a List frame by pushing an element into it (for deferred finalization)
1135    fn complete_list_item_frame(list_frame: &mut Frame, element_frame: Frame) {
1136        if let Def::List(list_def) = list_frame.allocated.shape().def
1137            && let Some(push_fn) = list_def.push()
1138        {
1139            // The element frame contains the element value
1140            let element_ptr = PtrMut::new(element_frame.data.as_mut_byte_ptr());
1141
1142            // Use push to add element to the list
1143            unsafe {
1144                push_fn(PtrMut::new(list_frame.data.as_mut_byte_ptr()), element_ptr);
1145            }
1146
1147            crate::trace!(
1148                "complete_list_item_frame: pushed element into {}",
1149                list_frame.allocated.shape()
1150            );
1151
1152            // Deallocate the element's memory since push moved it
1153            if let FrameOwnership::Owned = element_frame.ownership
1154                && let Ok(layout) = element_frame.allocated.shape().layout.sized_layout()
1155                && layout.size() > 0
1156            {
1157                unsafe {
1158                    ::alloc::alloc::dealloc(element_frame.data.as_mut_byte_ptr(), layout);
1159                }
1160            }
1161        }
1162    }
1163
1164    /// Complete a SmartPointerSlice element frame by pushing the element into the slice builder
1165    /// (for deferred finalization)
1166    fn complete_smart_pointer_slice_item_frame(
1167        slice_frame: &mut Frame,
1168        element_frame: Frame,
1169    ) -> bool {
1170        if let Tracker::SmartPointerSlice { vtable, .. } = &slice_frame.tracker {
1171            let vtable = *vtable;
1172            // The slice frame's data pointer IS the builder pointer
1173            let builder_ptr = slice_frame.data;
1174
1175            // Push the element into the builder
1176            unsafe {
1177                (vtable.push_fn)(
1178                    PtrMut::new(builder_ptr.as_mut_byte_ptr()),
1179                    PtrMut::new(element_frame.data.as_mut_byte_ptr()),
1180                );
1181            }
1182
1183            crate::trace!(
1184                "complete_smart_pointer_slice_item_frame: pushed element into builder for {}",
1185                slice_frame.allocated.shape()
1186            );
1187
1188            // Deallocate the element's memory since push moved it
1189            if let FrameOwnership::Owned = element_frame.ownership
1190                && let Ok(layout) = element_frame.allocated.shape().layout.sized_layout()
1191                && layout.size() > 0
1192            {
1193                unsafe {
1194                    ::alloc::alloc::dealloc(element_frame.data.as_mut_byte_ptr(), layout);
1195                }
1196            }
1197            return true;
1198        }
1199        false
1200    }
1201
1202    /// Complete a Map key frame by transitioning the Map from PushingKey to PushingValue state
1203    /// (for deferred finalization)
1204    fn complete_map_key_frame(map_frame: &mut Frame, key_frame: Frame) {
1205        if let Tracker::Map { insert_state, .. } = &mut map_frame.tracker
1206            && let MapInsertState::PushingKey { key_ptr, .. } = insert_state
1207        {
1208            // Transition to PushingValue state, keeping the key pointer.
1209            // key_frame_stored = false because the key frame is being finalized here,
1210            // so after this the Map owns the key buffer.
1211            *insert_state = MapInsertState::PushingValue {
1212                key_ptr: *key_ptr,
1213                value_ptr: None,
1214                value_initialized: false,
1215                value_frame_on_stack: false,
1216                key_frame_stored: false,
1217            };
1218
1219            crate::trace!(
1220                "complete_map_key_frame: transitioned {} to PushingValue",
1221                map_frame.allocated.shape()
1222            );
1223
1224            // Deallocate the key frame's memory (the key data lives at key_ptr which Map owns)
1225            if let FrameOwnership::Owned = key_frame.ownership
1226                && let Ok(layout) = key_frame.allocated.shape().layout.sized_layout()
1227                && layout.size() > 0
1228            {
1229                unsafe {
1230                    ::alloc::alloc::dealloc(key_frame.data.as_mut_byte_ptr(), layout);
1231                }
1232            }
1233        }
1234    }
1235
1236    /// Complete a Map value frame by adding the key-value pair to pending_entries
1237    /// (for deferred finalization)
1238    fn complete_map_value_frame(map_frame: &mut Frame, value_frame: Frame) {
1239        if let Tracker::Map {
1240            insert_state,
1241            pending_entries,
1242            ..
1243        } = &mut map_frame.tracker
1244            && let MapInsertState::PushingValue {
1245                key_ptr,
1246                value_ptr: Some(value_ptr),
1247                ..
1248            } = insert_state
1249        {
1250            // Add the key-value pair to pending_entries
1251            pending_entries.push((*key_ptr, *value_ptr));
1252
1253            crate::trace!(
1254                "complete_map_value_frame: added entry to pending_entries for {}",
1255                map_frame.allocated.shape()
1256            );
1257
1258            // Reset to idle state
1259            *insert_state = MapInsertState::Idle;
1260
1261            // Deallocate the value frame's memory (the value data lives at value_ptr which Map owns)
1262            if let FrameOwnership::Owned = value_frame.ownership
1263                && let Ok(layout) = value_frame.allocated.shape().layout.sized_layout()
1264                && layout.size() > 0
1265            {
1266                unsafe {
1267                    ::alloc::alloc::dealloc(value_frame.data.as_mut_byte_ptr(), layout);
1268                }
1269            }
1270        }
1271    }
1272
1273    /// Pops the current frame off the stack, indicating we're done initializing the current field
1274    pub fn end(mut self) -> Result<Self, ReflectError> {
1275        // FAST PATH: Handle the common case of ending a simple scalar field in a struct.
1276        // This avoids all the edge-case checks (SmartPointerSlice, deferred mode, custom
1277        // deserialization, etc.) that dominate the slow path.
1278        if self.frames().len() >= 2 && !self.is_deferred() {
1279            let frames = self.frames_mut();
1280            let top_idx = frames.len() - 1;
1281            let parent_idx = top_idx - 1;
1282
1283            // Check if this is a simple scalar field being returned to a struct parent
1284            if let (
1285                Tracker::Scalar,
1286                true, // is_init
1287                FrameOwnership::Field { field_idx },
1288                false, // not using custom deserialization
1289            ) = (
1290                &frames[top_idx].tracker,
1291                frames[top_idx].is_init,
1292                frames[top_idx].ownership,
1293                frames[top_idx].using_custom_deserialization,
1294            ) && let Tracker::Struct {
1295                iset,
1296                current_child,
1297            } = &mut frames[parent_idx].tracker
1298            {
1299                // Fast path: just update parent's iset and pop
1300                iset.set(field_idx);
1301                *current_child = None;
1302                frames.pop();
1303                return Ok(self);
1304            }
1305        }
1306
1307        // SLOW PATH: Handle all the edge cases
1308
1309        // Strategic tracing: show the frame stack state
1310        #[cfg(feature = "tracing")]
1311        {
1312            use ::alloc::string::ToString;
1313            let frames = self.frames();
1314            let stack_desc: Vec<_> = frames
1315                .iter()
1316                .map(|f| ::alloc::format!("{}({:?})", f.allocated.shape(), f.tracker.kind()))
1317                .collect();
1318            let path = if self.is_deferred() {
1319                ::alloc::format!("{:?}", self.derive_path())
1320            } else {
1321                "N/A".to_string()
1322            };
1323            crate::trace!(
1324                "end() SLOW PATH: stack=[{}], deferred={}, path={}",
1325                stack_desc.join(" > "),
1326                self.is_deferred(),
1327                path
1328            );
1329        }
1330
1331        // Special handling for SmartPointerSlice - convert builder to Arc
1332        // Check if the current (top) frame is a SmartPointerSlice that needs conversion
1333        let needs_slice_conversion = {
1334            let frames = self.frames();
1335            if frames.is_empty() {
1336                false
1337            } else {
1338                let top_idx = frames.len() - 1;
1339                matches!(
1340                    frames[top_idx].tracker,
1341                    Tracker::SmartPointerSlice {
1342                        building_item: false,
1343                        ..
1344                    }
1345                )
1346            }
1347        };
1348
1349        if needs_slice_conversion {
1350            // In deferred mode, don't convert immediately - let finish_deferred handle it.
1351            // Set building_item = true and return early (matching non-deferred behavior).
1352            // The next end() call will store the frame.
1353            if self.is_deferred() {
1354                let frames = self.frames_mut();
1355                let top_idx = frames.len() - 1;
1356                if let Tracker::SmartPointerSlice { building_item, .. } =
1357                    &mut frames[top_idx].tracker
1358                {
1359                    *building_item = true;
1360                }
1361                return Ok(self);
1362            } else {
1363                // Get shape info upfront to avoid borrow conflicts
1364                let current_shape = self.frames().last().unwrap().allocated.shape();
1365
1366                let frames = self.frames_mut();
1367                let top_idx = frames.len() - 1;
1368
1369                if let Tracker::SmartPointerSlice { vtable, .. } = &frames[top_idx].tracker {
1370                    // Convert the builder to Arc<[T]>
1371                    let vtable = *vtable;
1372                    let builder_ptr = unsafe { frames[top_idx].data.assume_init() };
1373                    let arc_ptr = unsafe { (vtable.convert_fn)(builder_ptr) };
1374
1375                    match frames[top_idx].ownership {
1376                        FrameOwnership::Field { field_idx } => {
1377                            // Arc<[T]> is a field in a struct
1378                            // The field frame's original data pointer was overwritten with the builder pointer,
1379                            // so we need to reconstruct where the Arc should be written.
1380
1381                            // Get parent frame and field info
1382                            let parent_idx = top_idx - 1;
1383                            let parent_frame = &frames[parent_idx];
1384
1385                            // Get the field to find its offset
1386                            let field = if let Type::User(UserType::Struct(struct_type)) =
1387                                parent_frame.allocated.shape().ty
1388                            {
1389                                &struct_type.fields[field_idx]
1390                            } else {
1391                                return Err(self.err(ReflectErrorKind::InvariantViolation {
1392                                invariant: "SmartPointerSlice field frame parent must be a struct",
1393                            }));
1394                            };
1395
1396                            // Calculate where the Arc should be written (parent.data + field.offset)
1397                            let field_location =
1398                                unsafe { parent_frame.data.field_uninit(field.offset) };
1399
1400                            // Write the Arc to the parent struct's field location
1401                            let arc_layout = match current_shape.layout.sized_layout() {
1402                                Ok(layout) => layout,
1403                                Err(_) => {
1404                                    return Err(self.err(ReflectErrorKind::Unsized {
1405                                    shape: current_shape,
1406                                    operation: "SmartPointerSlice conversion requires sized Arc",
1407                                }));
1408                                }
1409                            };
1410                            let arc_size = arc_layout.size();
1411                            unsafe {
1412                                core::ptr::copy_nonoverlapping(
1413                                    arc_ptr.as_byte_ptr(),
1414                                    field_location.as_mut_byte_ptr(),
1415                                    arc_size,
1416                                );
1417                            }
1418
1419                            // Free the staging allocation from convert_fn (the Arc was copied to field_location)
1420                            unsafe {
1421                                ::alloc::alloc::dealloc(
1422                                    arc_ptr.as_byte_ptr() as *mut u8,
1423                                    arc_layout,
1424                                );
1425                            }
1426
1427                            // Update the frame to point to the correct field location and mark as initialized
1428                            frames[top_idx].data = field_location;
1429                            frames[top_idx].tracker = Tracker::Scalar;
1430                            frames[top_idx].is_init = true;
1431
1432                            // Return WITHOUT popping - the field frame will be popped by the next end() call
1433                            return Ok(self);
1434                        }
1435                        FrameOwnership::Owned => {
1436                            // Arc<[T]> is the root type or owned independently
1437                            // The frame already has the allocation, we just need to update it with the Arc
1438
1439                            // The frame's data pointer is currently the builder, but we allocated
1440                            // the Arc memory in the convert_fn. Update to point to the Arc.
1441                            frames[top_idx].data = PtrUninit::new(arc_ptr.as_byte_ptr() as *mut u8);
1442                            frames[top_idx].tracker = Tracker::Scalar;
1443                            frames[top_idx].is_init = true;
1444                            // Keep Owned ownership so Guard will properly deallocate
1445
1446                            // Return WITHOUT popping - the frame stays and will be built/dropped normally
1447                            return Ok(self);
1448                        }
1449                        FrameOwnership::TrackedBuffer
1450                        | FrameOwnership::BorrowedInPlace
1451                        | FrameOwnership::External
1452                        | FrameOwnership::RopeSlot => {
1453                            return Err(self.err(ReflectErrorKind::InvariantViolation {
1454                            invariant: "SmartPointerSlice cannot have TrackedBuffer/BorrowedInPlace/External/RopeSlot ownership after conversion",
1455                        }));
1456                        }
1457                    }
1458                }
1459            }
1460        }
1461
1462        if self.frames().len() <= 1 {
1463            // Never pop the last/root frame - this indicates a broken state machine
1464            // No need to poison - returning Err consumes self, Drop will handle cleanup
1465            return Err(self.err(ReflectErrorKind::InvariantViolation {
1466                invariant: "Partial::end() called with only one frame on the stack",
1467            }));
1468        }
1469
1470        // In deferred mode, cannot pop below the start depth
1471        if let Some(start_depth) = self.start_depth()
1472            && self.frames().len() <= start_depth
1473        {
1474            // No need to poison - returning Err consumes self, Drop will handle cleanup
1475            return Err(self.err(ReflectErrorKind::InvariantViolation {
1476                invariant: "Partial::end() called but would pop below deferred start depth",
1477            }));
1478        }
1479
1480        // Require that the top frame is fully initialized before popping.
1481        // In deferred mode, tracked frames (those that will be stored for re-entry)
1482        // defer validation to finish_deferred(). All other frames validate now
1483        // using the TypePlan's FillRule (which knows what's Required vs Defaultable).
1484        let requires_full_init = if !self.is_deferred() {
1485            true
1486        } else {
1487            // If this frame will be stored, defer validation to finish_deferred().
1488            // Otherwise validate now.
1489            !self.should_store_frame_for_deferred()
1490        };
1491
1492        if requires_full_init {
1493            // Try the optimized path using precomputed FieldInitPlan
1494            // Extract frame info first (borrows only self.mode)
1495            let frame_info = self.mode.stack().last().map(|frame| {
1496                let variant_idx = match &frame.tracker {
1497                    Tracker::Enum { variant_idx, .. } => Some(*variant_idx),
1498                    _ => None,
1499                };
1500                (frame.type_plan, variant_idx)
1501            });
1502
1503            // Look up plans from the type plan node - need to resolve NodeId to get the actual node
1504            let plans_info = frame_info.and_then(|(type_plan_id, variant_idx)| {
1505                let type_plan = self.root_plan.node(type_plan_id);
1506                match &type_plan.kind {
1507                    TypePlanNodeKind::Struct(struct_plan) => Some(struct_plan.fields),
1508                    TypePlanNodeKind::Enum(enum_plan) => {
1509                        let variants = self.root_plan.variants(enum_plan.variants);
1510                        variant_idx.and_then(|idx| variants.get(idx).map(|v| v.fields))
1511                    }
1512                    _ => None,
1513                }
1514            });
1515
1516            if let Some(plans_range) = plans_info {
1517                // Resolve the SliceRange to an actual slice
1518                let plans = self.root_plan.fields(plans_range);
1519                // Now mutably borrow mode.stack to get the frame
1520                // (root_plan borrow of `plans` is still active but that's fine -
1521                // mode and root_plan are separate fields)
1522                let frame = self.mode.stack_mut().last_mut().unwrap();
1523                frame
1524                    .fill_and_require_fields(plans, plans.len(), &self.root_plan)
1525                    .map_err(|e| self.err(e))?;
1526            } else {
1527                // Fall back to the old path if optimized path wasn't available
1528                if let Some(frame) = self.frames_mut().last_mut() {
1529                    frame.fill_defaults().map_err(|e| self.err(e))?;
1530                }
1531
1532                let frame = self.frames_mut().last_mut().unwrap();
1533                let result = frame.require_full_initialization();
1534                if result.is_err() {
1535                    crate::trace!(
1536                        "end() VALIDATION FAILED: {} ({:?}) is_init={} - {:?}",
1537                        frame.allocated.shape(),
1538                        frame.tracker.kind(),
1539                        frame.is_init,
1540                        result
1541                    );
1542                }
1543                result.map_err(|e| self.err(e))?
1544            }
1545        }
1546
1547        // In deferred mode, check if we should store this frame for potential re-entry.
1548        // We need to compute the storage path BEFORE popping so we can check it.
1549        //
1550        // Store frames that can be re-entered in deferred mode.
1551        // This includes structs, enums, collections, and Options (which need to be
1552        // stored so finish_deferred can find them when processing their inner values).
1553        let deferred_storage_info = if self.is_deferred() {
1554            let should_store = self.should_store_frame_for_deferred();
1555
1556            if should_store {
1557                // Compute the "field-only" path for storage by finding all Field steps
1558                // from PARENT frames only. The frame being ended shouldn't contribute to
1559                // its own path (its current_child points to ITS children, not to itself).
1560                //
1561                // Note: We include ALL frames in the path computation (including those
1562                // before start_depth) because they contain navigation info. The start_depth
1563                // only determines which frames we STORE, not which frames contribute to paths.
1564                //
1565                // Get the root shape for the Path from the first frame
1566                let root_shape = self
1567                    .frames()
1568                    .first()
1569                    .map(|f| f.allocated.shape())
1570                    .unwrap_or_else(|| <() as facet_core::Facet>::SHAPE);
1571
1572                let mut field_path = facet_path::Path::new(root_shape);
1573                let frames_len = self.frames().len();
1574                // Iterate over all frames EXCEPT the last one (the one being ended)
1575                for (frame_idx, frame) in self.frames().iter().enumerate() {
1576                    // Skip the frame being ended
1577                    if frame_idx == frames_len - 1 {
1578                        continue;
1579                    }
1580                    // Extract navigation steps from frames
1581                    // This MUST match derive_path() for consistency
1582                    match &frame.tracker {
1583                        Tracker::Struct {
1584                            current_child: Some(idx),
1585                            ..
1586                        } => {
1587                            field_path.push(PathStep::Field(*idx as u32));
1588                        }
1589                        Tracker::Enum {
1590                            current_child: Some(idx),
1591                            ..
1592                        } => {
1593                            field_path.push(PathStep::Field(*idx as u32));
1594                        }
1595                        Tracker::List {
1596                            current_child: Some(idx),
1597                            ..
1598                        } => {
1599                            field_path.push(PathStep::Index(*idx as u32));
1600                        }
1601                        Tracker::Array {
1602                            current_child: Some(idx),
1603                            ..
1604                        } => {
1605                            field_path.push(PathStep::Index(*idx as u32));
1606                        }
1607                        Tracker::Option {
1608                            building_inner: true,
1609                            ..
1610                        } => {
1611                            // Option with building_inner contributes OptionSome to path
1612                            field_path.push(PathStep::OptionSome);
1613                        }
1614                        Tracker::SmartPointer {
1615                            building_inner: true,
1616                            ..
1617                        } => {
1618                            // SmartPointer with building_inner contributes Deref to path
1619                            field_path.push(PathStep::Deref);
1620                        }
1621                        Tracker::SmartPointerSlice {
1622                            current_child: Some(idx),
1623                            ..
1624                        } => {
1625                            // SmartPointerSlice with current_child contributes Index to path
1626                            field_path.push(PathStep::Index(*idx as u32));
1627                        }
1628                        Tracker::Inner {
1629                            building_inner: true,
1630                        } => {
1631                            // Inner with building_inner contributes Inner to path
1632                            field_path.push(PathStep::Inner);
1633                        }
1634                        Tracker::Map {
1635                            current_entry_index: Some(idx),
1636                            building_key,
1637                            ..
1638                        } => {
1639                            // Map with active entry contributes MapKey or MapValue with entry index
1640                            if *building_key {
1641                                field_path.push(PathStep::MapKey(*idx as u32));
1642                            } else {
1643                                field_path.push(PathStep::MapValue(*idx as u32));
1644                            }
1645                        }
1646                        _ => {}
1647                    }
1648
1649                    // If the next frame on the stack is a proxy frame, add a Proxy
1650                    // path step. This distinguishes the proxy frame (and its children)
1651                    // from the parent frame that the proxy writes into, preventing path
1652                    // collisions in deferred mode where both frames are stored.
1653                    if frame_idx + 1 < frames_len
1654                        && self.frames()[frame_idx + 1].using_custom_deserialization
1655                    {
1656                        field_path.push(PathStep::Proxy);
1657                    }
1658                }
1659
1660                if !field_path.is_empty() {
1661                    Some(field_path)
1662                } else {
1663                    None
1664                }
1665            } else {
1666                None
1667            }
1668        } else {
1669            None
1670        };
1671
1672        // Pop the frame and save its data pointer for SmartPointer handling
1673        let mut popped_frame = self.frames_mut().pop().unwrap();
1674
1675        // In non-deferred mode, proxy frames are processed immediately.
1676        // In deferred mode, proxy frames are stored (with a PathStep::Proxy
1677        // distinguishing them from their parent) and the conversion is handled
1678        // by finish_deferred after children have been fully materialized.
1679        if popped_frame.using_custom_deserialization && deferred_storage_info.is_none() {
1680            // First check the proxy stored in the frame (used for format-specific proxies
1681            // and container-level proxies), then fall back to field-level proxy.
1682            // This ordering is important because format-specific proxies store their
1683            // proxy in shape_level_proxy, and we want them to take precedence over
1684            // the format-agnostic field.proxy().
1685            let deserialize_with: Option<facet_core::ProxyConvertInFn> =
1686                popped_frame.shape_level_proxy.map(|p| p.convert_in);
1687
1688            // Fall back to field-level proxy (format-agnostic)
1689            let deserialize_with = deserialize_with.or_else(|| {
1690                self.parent_field()
1691                    .and_then(|f| f.proxy().map(|p| p.convert_in))
1692            });
1693
1694            if let Some(deserialize_with) = deserialize_with {
1695                // Get parent shape upfront to avoid borrow conflicts
1696                let parent_shape = self.frames().last().unwrap().allocated.shape();
1697                let parent_frame = self.frames_mut().last_mut().unwrap();
1698
1699                trace!(
1700                    "Detected custom conversion needed from {} to {}",
1701                    popped_frame.allocated.shape(),
1702                    parent_shape
1703                );
1704
1705                unsafe {
1706                    let res = {
1707                        let inner_value_ptr = popped_frame.data.assume_init().as_const();
1708                        (deserialize_with)(inner_value_ptr, parent_frame.data)
1709                    };
1710                    let popped_frame_shape = popped_frame.allocated.shape();
1711
1712                    // Note: We do NOT call deinit() here because deserialize_with uses
1713                    // ptr::read to take ownership of the source value. Calling deinit()
1714                    // would cause a double-free. We mark is_init as false to satisfy
1715                    // dealloc()'s assertion, then deallocate the memory.
1716                    popped_frame.is_init = false;
1717                    popped_frame.dealloc();
1718                    let parent_data = parent_frame.data;
1719                    match res {
1720                        Ok(rptr) => {
1721                            if rptr.as_uninit() != parent_data {
1722                                return Err(self.err(
1723                                    ReflectErrorKind::CustomDeserializationError {
1724                                        message:
1725                                            "deserialize_with did not return the expected pointer"
1726                                                .into(),
1727                                        src_shape: popped_frame_shape,
1728                                        dst_shape: parent_shape,
1729                                    },
1730                                ));
1731                            }
1732                        }
1733                        Err(message) => {
1734                            return Err(self.err(ReflectErrorKind::CustomDeserializationError {
1735                                message,
1736                                src_shape: popped_frame_shape,
1737                                dst_shape: parent_shape,
1738                            }));
1739                        }
1740                    }
1741                    // Re-borrow parent_frame after potential early returns
1742                    let parent_frame = self.frames_mut().last_mut().unwrap();
1743                    parent_frame.mark_as_init();
1744                }
1745                return Ok(self);
1746            }
1747        }
1748
1749        // If we determined this frame should be stored for deferred re-entry, do it now
1750        if let Some(storage_path) = deferred_storage_info {
1751            trace!(
1752                "end(): Storing frame for deferred path {:?}, shape {}",
1753                storage_path,
1754                popped_frame.allocated.shape()
1755            );
1756
1757            if let FrameMode::Deferred {
1758                stack,
1759                stored_frames,
1760                ..
1761            } = &mut self.mode
1762            {
1763                // Mark the field as initialized in the parent frame.
1764                // This is important because the parent might validate before
1765                // finish_deferred runs (e.g., parent is an array element that
1766                // isn't stored). Without this, the parent's validation would
1767                // fail with "missing field".
1768                if let FrameOwnership::Field { field_idx } = popped_frame.ownership
1769                    && let Some(parent_frame) = stack.last_mut()
1770                {
1771                    Self::mark_field_initialized_by_index(parent_frame, field_idx);
1772                }
1773
1774                // For BorrowedInPlace DynamicValue frames (e.g., re-entered pending entries),
1775                // flush pending_elements/pending_entries and return without storing.
1776                // These frames point to memory that's already tracked in the parent's
1777                // pending_entries - storing them would overwrite the entry.
1778                if matches!(popped_frame.ownership, FrameOwnership::BorrowedInPlace) {
1779                    crate::trace!(
1780                        "end(): BorrowedInPlace frame, flushing pending items and returning"
1781                    );
1782                    if let Err(kind) = popped_frame.require_full_initialization() {
1783                        return Err(ReflectError::new(kind, storage_path));
1784                    }
1785                    return Ok(self);
1786                }
1787
1788                // Handle Map state transitions even when storing frames.
1789                // The Map needs to transition states so that subsequent operations work:
1790                // - PushingKey -> PushingValue: so begin_value() can be called
1791                // - PushingValue -> Idle: so begin_key() can be called for the next entry
1792                // The frames are still stored for potential re-entry and finalization.
1793                if let Some(parent_frame) = stack.last_mut() {
1794                    if let Tracker::Map {
1795                        insert_state,
1796                        pending_entries,
1797                        ..
1798                    } = &mut parent_frame.tracker
1799                    {
1800                        match insert_state {
1801                            MapInsertState::PushingKey { key_ptr, .. } => {
1802                                // Transition to PushingValue state.
1803                                // key_frame_stored = true because the key frame is being stored,
1804                                // so the stored frame will handle cleanup (not the Map's deinit).
1805                                *insert_state = MapInsertState::PushingValue {
1806                                    key_ptr: *key_ptr,
1807                                    value_ptr: None,
1808                                    value_initialized: false,
1809                                    value_frame_on_stack: false,
1810                                    key_frame_stored: true,
1811                                };
1812                                crate::trace!(
1813                                    "end(): Map transitioned to PushingValue while storing key frame"
1814                                );
1815                            }
1816                            MapInsertState::PushingValue {
1817                                key_ptr,
1818                                value_ptr: Some(value_ptr),
1819                                ..
1820                            } => {
1821                                // Add entry to pending_entries and reset to Idle
1822                                pending_entries.push((*key_ptr, *value_ptr));
1823                                *insert_state = MapInsertState::Idle;
1824                                crate::trace!(
1825                                    "end(): Map added entry to pending_entries while storing value frame"
1826                                );
1827                            }
1828                            _ => {}
1829                        }
1830                    }
1831
1832                    // Handle Set element insertion immediately.
1833                    // Set elements have no path identity (no index), so they can't be stored
1834                    // and re-entered. We must insert them into the Set now.
1835                    if let Tracker::Set { current_child } = &mut parent_frame.tracker
1836                        && *current_child
1837                        && parent_frame.is_init
1838                        && let Def::Set(set_def) = parent_frame.allocated.shape().def
1839                    {
1840                        let insert = set_def.vtable.insert;
1841                        let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
1842                        unsafe {
1843                            insert(
1844                                PtrMut::new(parent_frame.data.as_mut_byte_ptr()),
1845                                element_ptr,
1846                            );
1847                        }
1848                        crate::trace!("end(): Set element inserted immediately in deferred mode");
1849                        // Insert moved out of popped_frame - don't store it
1850                        popped_frame.tracker = Tracker::Scalar;
1851                        popped_frame.is_init = false;
1852                        popped_frame.dealloc();
1853                        *current_child = false;
1854                        // Don't store this frame - return early
1855                        return Ok(self);
1856                    }
1857
1858                    // Handle DynamicValue object entry - add to pending_entries for deferred insertion.
1859                    // Like Map entries, we store the key-value pair and insert during finalization.
1860                    if let Tracker::DynamicValue {
1861                        state:
1862                            DynamicValueState::Object {
1863                                insert_state,
1864                                pending_entries,
1865                            },
1866                    } = &mut parent_frame.tracker
1867                        && let DynamicObjectInsertState::BuildingValue { key } = insert_state
1868                    {
1869                        // Take ownership of the key from insert_state
1870                        let key = core::mem::take(key);
1871
1872                        // Finalize the child Value before adding to pending_entries.
1873                        // The child might have its own pending_entries/pending_elements
1874                        // that need to be inserted first.
1875                        if let Err(kind) = popped_frame.require_full_initialization() {
1876                            return Err(ReflectError::new(kind, storage_path.clone()));
1877                        }
1878
1879                        // Add to pending_entries for deferred insertion
1880                        pending_entries.push((key, popped_frame.data));
1881                        crate::trace!(
1882                            "end(): DynamicValue object entry added to pending_entries in deferred mode"
1883                        );
1884
1885                        // The value frame's data is now owned by pending_entries
1886                        // Mark frame as not owning the data so it won't be deallocated
1887                        popped_frame.tracker = Tracker::Scalar;
1888                        popped_frame.is_init = false;
1889                        // Don't dealloc - pending_entries owns the pointer now
1890
1891                        // Reset insert state to Idle so more entries can be added
1892                        *insert_state = DynamicObjectInsertState::Idle;
1893
1894                        // Don't store this frame - return early
1895                        return Ok(self);
1896                    }
1897
1898                    // Handle DynamicValue array element - add to pending_elements for deferred insertion.
1899                    if let Tracker::DynamicValue {
1900                        state:
1901                            DynamicValueState::Array {
1902                                building_element,
1903                                pending_elements,
1904                            },
1905                    } = &mut parent_frame.tracker
1906                        && *building_element
1907                    {
1908                        // Finalize the child Value before adding to pending_elements.
1909                        // The child might have its own pending_entries/pending_elements
1910                        // that need to be inserted first.
1911                        if let Err(kind) = popped_frame.require_full_initialization() {
1912                            return Err(ReflectError::new(kind, storage_path.clone()));
1913                        }
1914
1915                        // Add to pending_elements for deferred insertion
1916                        pending_elements.push(popped_frame.data);
1917                        crate::trace!(
1918                            "end(): DynamicValue array element added to pending_elements in deferred mode"
1919                        );
1920
1921                        // The element frame's data is now owned by pending_elements
1922                        // Mark frame as not owning the data so it won't be deallocated
1923                        popped_frame.tracker = Tracker::Scalar;
1924                        popped_frame.is_init = false;
1925                        // Don't dealloc - pending_elements owns the pointer now
1926
1927                        // Reset building_element so more elements can be added
1928                        *building_element = false;
1929
1930                        // Don't store this frame - return early
1931                        return Ok(self);
1932                    }
1933
1934                    // For List elements stored in a rope (RopeSlot ownership), we need to
1935                    // mark the element as initialized in the rope. When the List frame is
1936                    // deinited, the rope will drop all initialized elements.
1937                    if matches!(popped_frame.ownership, FrameOwnership::RopeSlot)
1938                        && let Tracker::List {
1939                            rope: Some(rope), ..
1940                        } = &mut parent_frame.tracker
1941                    {
1942                        rope.mark_last_initialized();
1943                    }
1944
1945                    // Clear building_item for SmartPointerSlice so the next element can be added
1946                    if let Tracker::SmartPointerSlice { building_item, .. } =
1947                        &mut parent_frame.tracker
1948                    {
1949                        *building_item = false;
1950                        crate::trace!(
1951                            "end(): SmartPointerSlice building_item cleared while storing element"
1952                        );
1953                    }
1954                }
1955
1956                stored_frames.insert(storage_path, popped_frame);
1957
1958                // Clear parent's current_child tracking
1959                if let Some(parent_frame) = stack.last_mut() {
1960                    parent_frame.tracker.clear_current_child();
1961                }
1962            }
1963
1964            return Ok(self);
1965        }
1966
1967        // Update parent frame's tracking when popping from a child
1968        // Get parent shape upfront to avoid borrow conflicts
1969        let parent_shape = self.frames().last().unwrap().allocated.shape();
1970        let is_deferred_mode = self.is_deferred();
1971        let parent_frame = self.frames_mut().last_mut().unwrap();
1972
1973        crate::trace!(
1974            "end(): Popped {} (tracker {:?}), Parent {} (tracker {:?})",
1975            popped_frame.allocated.shape(),
1976            popped_frame.tracker.kind(),
1977            parent_shape,
1978            parent_frame.tracker.kind()
1979        );
1980
1981        // Check if we need to do a conversion - this happens when:
1982        // 1. The parent frame has a builder_shape or inner type that matches the popped frame's shape
1983        // 2. The parent frame has try_from
1984        // 3. The parent frame is not yet initialized
1985        // 4. The parent frame's tracker is Scalar or Inner (not Option, SmartPointer, etc.)
1986        //    This ensures we only do conversion when begin_inner was used, not begin_some
1987        let needs_conversion = !parent_frame.is_init
1988            && matches!(
1989                parent_frame.tracker,
1990                Tracker::Scalar | Tracker::Inner { .. }
1991            )
1992            && ((parent_shape.builder_shape.is_some()
1993                && parent_shape.builder_shape.unwrap() == popped_frame.allocated.shape())
1994                || (parent_shape.inner.is_some()
1995                    && parent_shape.inner.unwrap() == popped_frame.allocated.shape()))
1996            && match parent_shape.vtable {
1997                facet_core::VTableErased::Direct(vt) => vt.try_from.is_some(),
1998                facet_core::VTableErased::Indirect(vt) => vt.try_from.is_some(),
1999            };
2000
2001        if needs_conversion {
2002            trace!(
2003                "Detected implicit conversion needed from {} to {}",
2004                popped_frame.allocated.shape(),
2005                parent_shape
2006            );
2007
2008            // The conversion requires the source frame to be fully initialized
2009            // (we're about to call assume_init() and pass to try_from)
2010            if let Err(e) = popped_frame.require_full_initialization() {
2011                // Deallocate the memory since the frame wasn't fully initialized
2012                if let FrameOwnership::Owned = popped_frame.ownership
2013                    && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2014                    && layout.size() > 0
2015                {
2016                    trace!(
2017                        "Deallocating uninitialized conversion frame memory: size={}, align={}",
2018                        layout.size(),
2019                        layout.align()
2020                    );
2021                    unsafe {
2022                        ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2023                    }
2024                }
2025                return Err(self.err(e));
2026            }
2027
2028            // Perform the conversion
2029            let inner_ptr = unsafe { popped_frame.data.assume_init().as_const() };
2030            let inner_shape = popped_frame.allocated.shape();
2031
2032            trace!("Converting from {} to {}", inner_shape, parent_shape);
2033
2034            // Handle Direct and Indirect vtables - both return TryFromOutcome
2035            let outcome = match parent_shape.vtable {
2036                facet_core::VTableErased::Direct(vt) => {
2037                    if let Some(try_from_fn) = vt.try_from {
2038                        unsafe {
2039                            try_from_fn(
2040                                parent_frame.data.as_mut_byte_ptr() as *mut (),
2041                                inner_shape,
2042                                inner_ptr,
2043                            )
2044                        }
2045                    } else {
2046                        return Err(self.err(ReflectErrorKind::OperationFailed {
2047                            shape: parent_shape,
2048                            operation: "try_from not available for this type",
2049                        }));
2050                    }
2051                }
2052                facet_core::VTableErased::Indirect(vt) => {
2053                    if let Some(try_from_fn) = vt.try_from {
2054                        // parent_frame.data is uninitialized - we're writing the converted
2055                        // value into it
2056                        let ox_uninit =
2057                            facet_core::OxPtrUninit::new(parent_frame.data, parent_shape);
2058                        unsafe { try_from_fn(ox_uninit, inner_shape, inner_ptr) }
2059                    } else {
2060                        return Err(self.err(ReflectErrorKind::OperationFailed {
2061                            shape: parent_shape,
2062                            operation: "try_from not available for this type",
2063                        }));
2064                    }
2065                }
2066            };
2067
2068            // Handle the TryFromOutcome, which explicitly communicates ownership semantics:
2069            // - Converted: source was consumed, conversion succeeded
2070            // - Unsupported: source was NOT consumed, caller retains ownership
2071            // - Failed: source WAS consumed, but conversion failed
2072            match outcome {
2073                facet_core::TryFromOutcome::Converted => {
2074                    trace!("Conversion succeeded, marking parent as initialized");
2075                    parent_frame.is_init = true;
2076                    // Reset Inner tracker to Scalar after successful conversion
2077                    if matches!(parent_frame.tracker, Tracker::Inner { .. }) {
2078                        parent_frame.tracker = Tracker::Scalar;
2079                    }
2080                }
2081                facet_core::TryFromOutcome::Unsupported => {
2082                    trace!("Source type not supported for conversion - source NOT consumed");
2083
2084                    // Source was NOT consumed, so we need to drop it properly
2085                    if let FrameOwnership::Owned = popped_frame.ownership
2086                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2087                        && layout.size() > 0
2088                    {
2089                        // Drop the value, then deallocate
2090                        unsafe {
2091                            popped_frame
2092                                .allocated
2093                                .shape()
2094                                .call_drop_in_place(popped_frame.data.assume_init());
2095                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2096                        }
2097                    }
2098
2099                    return Err(self.err(ReflectErrorKind::TryFromError {
2100                        src_shape: inner_shape,
2101                        dst_shape: parent_shape,
2102                        inner: facet_core::TryFromError::UnsupportedSourceType,
2103                    }));
2104                }
2105                facet_core::TryFromOutcome::Failed(e) => {
2106                    trace!("Conversion failed after consuming source: {e:?}");
2107
2108                    // Source WAS consumed, so we only deallocate memory (don't drop)
2109                    if let FrameOwnership::Owned = popped_frame.ownership
2110                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2111                        && layout.size() > 0
2112                    {
2113                        trace!(
2114                            "Deallocating conversion frame memory after failure: size={}, align={}",
2115                            layout.size(),
2116                            layout.align()
2117                        );
2118                        unsafe {
2119                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2120                        }
2121                    }
2122
2123                    return Err(self.err(ReflectErrorKind::TryFromError {
2124                        src_shape: inner_shape,
2125                        dst_shape: parent_shape,
2126                        inner: facet_core::TryFromError::Generic(e.into_owned()),
2127                    }));
2128                }
2129            }
2130
2131            // Deallocate the inner value's memory since try_from consumed it
2132            if let FrameOwnership::Owned = popped_frame.ownership
2133                && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2134                && layout.size() > 0
2135            {
2136                trace!(
2137                    "Deallocating conversion frame memory: size={}, align={}",
2138                    layout.size(),
2139                    layout.align()
2140                );
2141                unsafe {
2142                    ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2143                }
2144            }
2145
2146            return Ok(self);
2147        }
2148
2149        // For Field-owned frames, reclaim responsibility in parent's tracker
2150        // Only mark as initialized if the child frame was actually initialized.
2151        // This prevents double-free when begin_inner/begin_some drops a value via
2152        // prepare_for_reinitialization but then fails, leaving the child uninitialized.
2153        //
2154        // We use require_full_initialization() rather than just is_init because:
2155        // - Scalar frames use is_init as the source of truth
2156        // - Struct/Array/Enum frames use their iset/data as the source of truth
2157        //   (is_init may never be set to true for these tracker types)
2158        if let FrameOwnership::Field { field_idx } = popped_frame.ownership {
2159            // In deferred mode, fill defaults on the child frame before checking initialization.
2160            // Fill defaults for child frame before checking if it's fully initialized.
2161            // This handles structs/enums with optional fields that should auto-fill.
2162            if let Err(e) = popped_frame.fill_defaults() {
2163                return Err(self.err(e));
2164            }
2165            let child_is_initialized = popped_frame.require_full_initialization().is_ok();
2166            match &mut parent_frame.tracker {
2167                Tracker::Struct {
2168                    iset,
2169                    current_child,
2170                } => {
2171                    if child_is_initialized {
2172                        iset.set(field_idx); // Parent reclaims responsibility only if child was init
2173                    }
2174                    *current_child = None;
2175                }
2176                Tracker::Array {
2177                    iset,
2178                    current_child,
2179                } => {
2180                    if child_is_initialized {
2181                        iset.set(field_idx); // Parent reclaims responsibility only if child was init
2182                    }
2183                    *current_child = None;
2184                }
2185                Tracker::Enum {
2186                    data,
2187                    current_child,
2188                    ..
2189                } => {
2190                    crate::trace!(
2191                        "end(): Enum field {} child_is_initialized={}, data before={:?}",
2192                        field_idx,
2193                        child_is_initialized,
2194                        data
2195                    );
2196                    if child_is_initialized {
2197                        data.set(field_idx); // Parent reclaims responsibility only if child was init
2198                    }
2199                    *current_child = None;
2200                }
2201                _ => {}
2202            }
2203            return Ok(self);
2204        }
2205
2206        // For BorrowedInPlace DynamicValue frames (e.g., re-entered pending entries),
2207        // flush any pending_elements/pending_entries that were accumulated during
2208        // this re-entry. This is necessary because BorrowedInPlace frames aren't
2209        // stored for deferred processing - they modify existing memory in-place.
2210        if matches!(popped_frame.ownership, FrameOwnership::BorrowedInPlace)
2211            && let Err(e) = popped_frame.require_full_initialization()
2212        {
2213            return Err(self.err(e));
2214        }
2215
2216        match &mut parent_frame.tracker {
2217            Tracker::SmartPointer {
2218                building_inner,
2219                pending_inner,
2220            } => {
2221                crate::trace!(
2222                    "end() SMARTPTR: popped {} into parent {} (building_inner={}, deferred={})",
2223                    popped_frame.allocated.shape(),
2224                    parent_frame.allocated.shape(),
2225                    *building_inner,
2226                    is_deferred_mode
2227                );
2228                // We just popped the inner value frame for a SmartPointer
2229                if *building_inner {
2230                    if matches!(parent_frame.allocated.shape().def, Def::Pointer(_)) {
2231                        // Check if we're in deferred mode - if so, store the inner value pointer
2232                        if is_deferred_mode {
2233                            // Store the inner value pointer for deferred new_into_fn.
2234                            *pending_inner = Some(popped_frame.data);
2235                            *building_inner = false;
2236                            parent_frame.is_init = true;
2237                            crate::trace!(
2238                                "end() SMARTPTR: stored pending_inner, will finalize in finish_deferred"
2239                            );
2240                        } else {
2241                            // Not in deferred mode - complete immediately
2242                            if let Def::Pointer(_) = parent_frame.allocated.shape().def {
2243                                if let Err(e) = popped_frame.require_full_initialization() {
2244                                    popped_frame.deinit();
2245                                    popped_frame.dealloc();
2246                                    return Err(self.err(e));
2247                                }
2248
2249                                // Use complete_smart_pointer_frame which handles both:
2250                                // - Sized pointees (via new_into_fn)
2251                                // - Unsized pointees like str (via String conversion)
2252                                Self::complete_smart_pointer_frame(parent_frame, popped_frame);
2253                                crate::trace!(
2254                                    "end() SMARTPTR: completed smart pointer via complete_smart_pointer_frame"
2255                                );
2256
2257                                // Change tracker to Scalar so the next end() just pops it
2258                                parent_frame.tracker = Tracker::Scalar;
2259                            }
2260                        }
2261                    } else {
2262                        return Err(self.err(ReflectErrorKind::OperationFailed {
2263                            shape: parent_shape,
2264                            operation: "SmartPointer frame without SmartPointer definition",
2265                        }));
2266                    }
2267                } else {
2268                    // building_inner is false - shouldn't happen in normal flow
2269                    return Err(self.err(ReflectErrorKind::OperationFailed {
2270                        shape: parent_shape,
2271                        operation: "SmartPointer end() called with building_inner = false",
2272                    }));
2273                }
2274            }
2275            Tracker::List {
2276                current_child,
2277                rope,
2278                ..
2279            } if parent_frame.is_init => {
2280                if current_child.is_some() {
2281                    // We just popped an element frame, now add it to the list
2282                    if let Def::List(list_def) = parent_shape.def {
2283                        // Check which storage mode we used
2284                        if matches!(popped_frame.ownership, FrameOwnership::RopeSlot) {
2285                            // Rope storage: element lives in a stable chunk.
2286                            // Mark it as initialized; we'll drain to Vec when the list frame pops.
2287                            if let Some(rope) = rope {
2288                                rope.mark_last_initialized();
2289                            }
2290                            // No dealloc needed - memory belongs to rope
2291                        } else {
2292                            // Fallback: element is in separate heap buffer, use push to copy
2293                            let Some(push_fn) = list_def.push() else {
2294                                return Err(self.err(ReflectErrorKind::OperationFailed {
2295                                    shape: parent_shape,
2296                                    operation: "List missing push function",
2297                                }));
2298                            };
2299
2300                            // The child frame contained the element value
2301                            let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
2302
2303                            // Use push to add element to the list
2304                            unsafe {
2305                                push_fn(
2306                                    PtrMut::new(parent_frame.data.as_mut_byte_ptr()),
2307                                    element_ptr,
2308                                );
2309                            }
2310
2311                            // Push moved out of popped_frame
2312                            popped_frame.tracker = Tracker::Scalar;
2313                            popped_frame.is_init = false;
2314                            popped_frame.dealloc();
2315                        }
2316
2317                        *current_child = None;
2318                    }
2319                }
2320            }
2321            Tracker::Map {
2322                insert_state,
2323                pending_entries,
2324                ..
2325            } if parent_frame.is_init => {
2326                match insert_state {
2327                    MapInsertState::PushingKey { key_ptr, .. } => {
2328                        // Fill defaults on the key frame before considering it done.
2329                        // This handles metadata containers and other structs with Option fields.
2330                        if let Err(e) = popped_frame.fill_defaults() {
2331                            return Err(self.err(e));
2332                        }
2333
2334                        // We just popped the key frame - mark key as initialized and transition
2335                        // to PushingValue state. key_frame_on_stack = false because the frame
2336                        // was just popped, so Map now owns the key buffer.
2337                        *insert_state = MapInsertState::PushingValue {
2338                            key_ptr: *key_ptr,
2339                            value_ptr: None,
2340                            value_initialized: false,
2341                            value_frame_on_stack: false, // No value frame yet
2342                            key_frame_stored: false,     // Key frame was popped, Map owns key
2343                        };
2344                    }
2345                    MapInsertState::PushingValue {
2346                        key_ptr, value_ptr, ..
2347                    } => {
2348                        // Fill defaults on the value frame before considering it done.
2349                        // This handles structs with Option fields.
2350                        if let Err(e) = popped_frame.fill_defaults() {
2351                            return Err(self.err(e));
2352                        }
2353
2354                        // We just popped the value frame.
2355                        // Instead of inserting immediately, add to pending_entries.
2356                        // This keeps the buffers alive for deferred processing.
2357                        // Actual insertion happens in require_full_initialization.
2358                        if let Some(value_ptr) = value_ptr {
2359                            pending_entries.push((*key_ptr, *value_ptr));
2360
2361                            // Reset to idle state
2362                            *insert_state = MapInsertState::Idle;
2363                        }
2364                    }
2365                    MapInsertState::Idle => {
2366                        // Nothing to do
2367                    }
2368                }
2369            }
2370            Tracker::Set { current_child } if parent_frame.is_init => {
2371                if *current_child {
2372                    // We just popped an element frame, now insert it into the set
2373                    if let Def::Set(set_def) = parent_frame.allocated.shape().def {
2374                        let insert = set_def.vtable.insert;
2375
2376                        // The child frame contained the element value
2377                        let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
2378
2379                        // Use insert to add element to the set
2380                        unsafe {
2381                            insert(
2382                                PtrMut::new(parent_frame.data.as_mut_byte_ptr()),
2383                                element_ptr,
2384                            );
2385                        }
2386
2387                        // Insert moved out of popped_frame
2388                        popped_frame.tracker = Tracker::Scalar;
2389                        popped_frame.is_init = false;
2390                        popped_frame.dealloc();
2391
2392                        *current_child = false;
2393                    }
2394                }
2395            }
2396            Tracker::Option {
2397                building_inner,
2398                pending_inner,
2399            } => {
2400                crate::trace!(
2401                    "end(): matched Tracker::Option, building_inner={}",
2402                    *building_inner
2403                );
2404                // We just popped the inner value frame for an Option's Some variant
2405                if *building_inner {
2406                    if matches!(parent_frame.allocated.shape().def, Def::Option(_)) {
2407                        // Store the inner value pointer for deferred init_some.
2408                        // This keeps the inner value's memory stable for deferred processing.
2409                        // Actual init_some() happens in require_full_initialization().
2410                        *pending_inner = Some(popped_frame.data);
2411
2412                        // Mark that we're no longer building the inner value
2413                        *building_inner = false;
2414                        crate::trace!("end(): stored pending_inner, set building_inner to false");
2415                        // Mark the Option as initialized (pending finalization)
2416                        parent_frame.is_init = true;
2417                        crate::trace!("end(): set parent_frame.is_init to true");
2418                    } else {
2419                        return Err(self.err(ReflectErrorKind::OperationFailed {
2420                            shape: parent_shape,
2421                            operation: "Option frame without Option definition",
2422                        }));
2423                    }
2424                } else {
2425                    // building_inner is false - the Option was already initialized but
2426                    // begin_some was called again. The popped frame was not used to
2427                    // initialize the Option, so we need to clean it up.
2428                    popped_frame.deinit();
2429                    if let FrameOwnership::Owned = popped_frame.ownership
2430                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2431                        && layout.size() > 0
2432                    {
2433                        unsafe {
2434                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2435                        }
2436                    }
2437                }
2438            }
2439            Tracker::Result {
2440                is_ok,
2441                building_inner,
2442            } => {
2443                crate::trace!(
2444                    "end(): matched Tracker::Result, is_ok={}, building_inner={}",
2445                    *is_ok,
2446                    *building_inner
2447                );
2448                // We just popped the inner value frame for a Result's Ok or Err variant
2449                if *building_inner {
2450                    if let Def::Result(result_def) = parent_frame.allocated.shape().def {
2451                        // The popped frame contains the inner value
2452                        let inner_value_ptr = unsafe { popped_frame.data.assume_init() };
2453
2454                        // Initialize the Result as Ok(inner_value) or Err(inner_value)
2455                        if *is_ok {
2456                            let init_ok_fn = result_def.vtable.init_ok;
2457                            unsafe {
2458                                init_ok_fn(parent_frame.data, inner_value_ptr);
2459                            }
2460                        } else {
2461                            let init_err_fn = result_def.vtable.init_err;
2462                            unsafe {
2463                                init_err_fn(parent_frame.data, inner_value_ptr);
2464                            }
2465                        }
2466
2467                        // Deallocate the inner value's memory since init_ok/err_fn moved it
2468                        if let FrameOwnership::Owned = popped_frame.ownership
2469                            && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2470                            && layout.size() > 0
2471                        {
2472                            unsafe {
2473                                ::alloc::alloc::dealloc(
2474                                    popped_frame.data.as_mut_byte_ptr(),
2475                                    layout,
2476                                );
2477                            }
2478                        }
2479
2480                        // Mark that we're no longer building the inner value
2481                        *building_inner = false;
2482                        crate::trace!("end(): set building_inner to false");
2483                        // Mark the Result as initialized
2484                        parent_frame.is_init = true;
2485                        crate::trace!("end(): set parent_frame.is_init to true");
2486                    } else {
2487                        return Err(self.err(ReflectErrorKind::OperationFailed {
2488                            shape: parent_shape,
2489                            operation: "Result frame without Result definition",
2490                        }));
2491                    }
2492                } else {
2493                    // building_inner is false - the Result was already initialized but
2494                    // begin_ok/begin_err was called again. The popped frame was not used to
2495                    // initialize the Result, so we need to clean it up.
2496                    popped_frame.deinit();
2497                    if let FrameOwnership::Owned = popped_frame.ownership
2498                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2499                        && layout.size() > 0
2500                    {
2501                        unsafe {
2502                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2503                        }
2504                    }
2505                }
2506            }
2507            Tracker::Scalar => {
2508                // the main case here is: the popped frame was a `String` and the
2509                // parent frame is an `Arc<str>`, `Box<str>` etc.
2510                match &parent_shape.def {
2511                    Def::Pointer(smart_ptr_def) => {
2512                        let pointee = match smart_ptr_def.pointee() {
2513                            Some(p) => p,
2514                            None => {
2515                                return Err(self.err(ReflectErrorKind::InvariantViolation {
2516                                    invariant: "pointer type doesn't have a pointee",
2517                                }));
2518                            }
2519                        };
2520
2521                        if !pointee.is_shape(str::SHAPE) {
2522                            return Err(self.err(ReflectErrorKind::InvariantViolation {
2523                                invariant: "only T=str is supported when building SmartPointer<T> and T is unsized",
2524                            }));
2525                        }
2526
2527                        if !popped_frame.allocated.shape().is_shape(String::SHAPE) {
2528                            return Err(self.err(ReflectErrorKind::InvariantViolation {
2529                                invariant: "the popped frame should be String when building a SmartPointer<T>",
2530                            }));
2531                        }
2532
2533                        if let Err(e) = popped_frame.require_full_initialization() {
2534                            return Err(self.err(e));
2535                        }
2536
2537                        // if the just-popped frame was a SmartPointerStr, we have some conversion to do:
2538                        // Special-case: SmartPointer<str> (Box<str>, Arc<str>, Rc<str>) via SmartPointerStr tracker
2539                        // Here, popped_frame actually contains a value for String that should be moved into the smart pointer.
2540                        // We convert the String into Box<str>, Arc<str>, or Rc<str> as appropriate and write it to the parent frame.
2541                        use ::alloc::{rc::Rc, string::String, sync::Arc};
2542
2543                        let Some(known) = smart_ptr_def.known else {
2544                            return Err(self.err(ReflectErrorKind::OperationFailed {
2545                                shape: parent_shape,
2546                                operation: "SmartPointerStr for unknown smart pointer kind",
2547                            }));
2548                        };
2549
2550                        parent_frame.deinit();
2551
2552                        // Interpret the memory as a String, then convert and write.
2553                        let string_ptr = popped_frame.data.as_mut_byte_ptr() as *mut String;
2554                        let string_value = unsafe { core::ptr::read(string_ptr) };
2555
2556                        match known {
2557                            KnownPointer::Box => {
2558                                let boxed: Box<str> = string_value.into_boxed_str();
2559                                unsafe {
2560                                    core::ptr::write(
2561                                        parent_frame.data.as_mut_byte_ptr() as *mut Box<str>,
2562                                        boxed,
2563                                    );
2564                                }
2565                            }
2566                            KnownPointer::Arc => {
2567                                let arc: Arc<str> = Arc::from(string_value.into_boxed_str());
2568                                unsafe {
2569                                    core::ptr::write(
2570                                        parent_frame.data.as_mut_byte_ptr() as *mut Arc<str>,
2571                                        arc,
2572                                    );
2573                                }
2574                            }
2575                            KnownPointer::Rc => {
2576                                let rc: Rc<str> = Rc::from(string_value.into_boxed_str());
2577                                unsafe {
2578                                    core::ptr::write(
2579                                        parent_frame.data.as_mut_byte_ptr() as *mut Rc<str>,
2580                                        rc,
2581                                    );
2582                                }
2583                            }
2584                            _ => {
2585                                return Err(self.err(ReflectErrorKind::OperationFailed {
2586                                    shape: parent_shape,
2587                                    operation: "Don't know how to build this pointer type",
2588                                }));
2589                            }
2590                        }
2591
2592                        parent_frame.is_init = true;
2593
2594                        popped_frame.tracker = Tracker::Scalar;
2595                        popped_frame.is_init = false;
2596                        popped_frame.dealloc();
2597                    }
2598                    _ => {
2599                        // This can happen if begin_inner() was called on a type that
2600                        // has shape.inner but isn't a SmartPointer (e.g., Option).
2601                        // In this case, we can't complete the conversion, so return error.
2602                        return Err(self.err(ReflectErrorKind::OperationFailed {
2603                            shape: parent_shape,
2604                            operation: "end() called but parent has Uninit/Init tracker and isn't a SmartPointer",
2605                        }));
2606                    }
2607                }
2608            }
2609            Tracker::SmartPointerSlice {
2610                vtable,
2611                building_item,
2612                ..
2613            } => {
2614                if *building_item {
2615                    // We just popped an element frame, now push it to the slice builder
2616                    let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
2617
2618                    // Use the slice builder's push_fn to add the element
2619                    crate::trace!("Pushing element to slice builder");
2620                    unsafe {
2621                        let parent_ptr = parent_frame.data.assume_init();
2622                        (vtable.push_fn)(parent_ptr, element_ptr);
2623                    }
2624
2625                    popped_frame.tracker = Tracker::Scalar;
2626                    popped_frame.is_init = false;
2627                    popped_frame.dealloc();
2628
2629                    if let Tracker::SmartPointerSlice {
2630                        building_item: bi, ..
2631                    } = &mut parent_frame.tracker
2632                    {
2633                        *bi = false;
2634                    }
2635                }
2636            }
2637            Tracker::DynamicValue {
2638                state:
2639                    DynamicValueState::Array {
2640                        building_element, ..
2641                    },
2642            } => {
2643                if *building_element {
2644                    // Check that the element is initialized before pushing
2645                    if !popped_frame.is_init {
2646                        // Element was never set - clean up and return error
2647                        let shape = parent_frame.allocated.shape();
2648                        popped_frame.dealloc();
2649                        *building_element = false;
2650                        // No need to poison - returning Err consumes self, Drop will handle cleanup
2651                        return Err(self.err(ReflectErrorKind::OperationFailed {
2652                            shape,
2653                            operation: "end() called but array element was never initialized",
2654                        }));
2655                    }
2656
2657                    // We just popped an element frame, now push it to the dynamic array
2658                    if let Def::DynamicValue(dyn_def) = parent_frame.allocated.shape().def {
2659                        // Get mutable pointers - both array and element need PtrMut
2660                        let array_ptr = unsafe { parent_frame.data.assume_init() };
2661                        let element_ptr = unsafe { popped_frame.data.assume_init() };
2662
2663                        // Use push_array_element to add element to the array
2664                        unsafe {
2665                            (dyn_def.vtable.push_array_element)(array_ptr, element_ptr);
2666                        }
2667
2668                        // Push moved out of popped_frame
2669                        popped_frame.tracker = Tracker::Scalar;
2670                        popped_frame.is_init = false;
2671                        popped_frame.dealloc();
2672
2673                        *building_element = false;
2674                    }
2675                }
2676            }
2677            Tracker::DynamicValue {
2678                state: DynamicValueState::Object { insert_state, .. },
2679            } => {
2680                if let DynamicObjectInsertState::BuildingValue { key } = insert_state {
2681                    // Check that the value is initialized before inserting
2682                    if !popped_frame.is_init {
2683                        // Value was never set - clean up and return error
2684                        let shape = parent_frame.allocated.shape();
2685                        popped_frame.dealloc();
2686                        *insert_state = DynamicObjectInsertState::Idle;
2687                        // No need to poison - returning Err consumes self, Drop will handle cleanup
2688                        return Err(self.err(ReflectErrorKind::OperationFailed {
2689                            shape,
2690                            operation: "end() called but object entry value was never initialized",
2691                        }));
2692                    }
2693
2694                    // We just popped a value frame, now insert it into the dynamic object
2695                    if let Def::DynamicValue(dyn_def) = parent_frame.allocated.shape().def {
2696                        // Get mutable pointers - both object and value need PtrMut
2697                        let object_ptr = unsafe { parent_frame.data.assume_init() };
2698                        let value_ptr = unsafe { popped_frame.data.assume_init() };
2699
2700                        // Use insert_object_entry to add the key-value pair
2701                        unsafe {
2702                            (dyn_def.vtable.insert_object_entry)(object_ptr, key, value_ptr);
2703                        }
2704
2705                        // Insert moved out of popped_frame
2706                        popped_frame.tracker = Tracker::Scalar;
2707                        popped_frame.is_init = false;
2708                        popped_frame.dealloc();
2709
2710                        // Reset insert state to Idle
2711                        *insert_state = DynamicObjectInsertState::Idle;
2712                    }
2713                }
2714            }
2715            _ => {}
2716        }
2717
2718        Ok(self)
2719    }
2720
2721    /// Returns a path representing the current traversal in the builder.
2722    ///
2723    /// The returned [`facet_path::Path`] can be formatted as a human-readable string
2724    /// using [`Path::format_with_shape()`](facet_path::Path::format_with_shape),
2725    /// e.g., `fieldName[index].subfield`.
2726    pub fn path(&self) -> Path {
2727        use facet_path::PathStep;
2728
2729        let root_shape = self
2730            .frames()
2731            .first()
2732            .expect("Partial must have at least one frame")
2733            .allocated
2734            .shape();
2735        let mut path = Path::new(root_shape);
2736
2737        for frame in self.frames().iter() {
2738            match frame.allocated.shape().ty {
2739                Type::User(user_type) => match user_type {
2740                    UserType::Struct(_struct_type) => {
2741                        // Add field step if we're currently in a field
2742                        if let Tracker::Struct {
2743                            current_child: Some(idx),
2744                            ..
2745                        } = &frame.tracker
2746                        {
2747                            path.push(PathStep::Field(*idx as u32));
2748                        }
2749                    }
2750                    UserType::Enum(enum_type) => {
2751                        // Add variant and optional field step
2752                        if let Tracker::Enum {
2753                            variant,
2754                            current_child,
2755                            ..
2756                        } = &frame.tracker
2757                        {
2758                            // Find the variant index by comparing pointers
2759                            if let Some(variant_idx) = enum_type
2760                                .variants
2761                                .iter()
2762                                .position(|v| core::ptr::eq(v, *variant))
2763                            {
2764                                path.push(PathStep::Variant(variant_idx as u32));
2765                            }
2766                            if let Some(idx) = *current_child {
2767                                path.push(PathStep::Field(idx as u32));
2768                            }
2769                        }
2770                    }
2771                    UserType::Union(_) => {
2772                        // No structural path steps for unions
2773                    }
2774                    UserType::Opaque => {
2775                        // Opaque types might be lists (e.g., Vec<T>)
2776                        if let Tracker::List {
2777                            current_child: Some(idx),
2778                            ..
2779                        } = &frame.tracker
2780                        {
2781                            path.push(PathStep::Index(*idx as u32));
2782                        }
2783                    }
2784                },
2785                Type::Sequence(facet_core::SequenceType::Array(_array_def)) => {
2786                    // Add index step if we're currently in an element
2787                    if let Tracker::Array {
2788                        current_child: Some(idx),
2789                        ..
2790                    } = &frame.tracker
2791                    {
2792                        path.push(PathStep::Index(*idx as u32));
2793                    }
2794                }
2795                Type::Sequence(_) => {
2796                    // Other sequence types (Slice, etc.) - no index tracking
2797                }
2798                Type::Pointer(_) => {
2799                    path.push(PathStep::Deref);
2800                }
2801                _ => {
2802                    // No structural path for scalars, etc.
2803                }
2804            }
2805        }
2806
2807        path
2808    }
2809
2810    /// Returns the root shape for path formatting.
2811    ///
2812    /// Use this together with [`path()`](Self::path) to format the path:
2813    /// ```ignore
2814    /// let path_str = partial.path().format_with_shape(partial.root_shape());
2815    /// ```
2816    pub fn root_shape(&self) -> &'static Shape {
2817        self.frames()
2818            .first()
2819            .expect("Partial should always have at least one frame")
2820            .allocated
2821            .shape()
2822    }
2823
2824    /// Create a [`ReflectError`] with the current path context.
2825    ///
2826    /// This is a convenience method for constructing errors inside `Partial` methods
2827    /// that automatically captures the current traversal path.
2828    #[inline]
2829    pub fn err(&self, kind: ReflectErrorKind) -> ReflectError {
2830        ReflectError::new(kind, self.path())
2831    }
2832
2833    /// Get the field for the parent frame
2834    pub fn parent_field(&self) -> Option<&Field> {
2835        self.frames()
2836            .iter()
2837            .rev()
2838            .nth(1)
2839            .and_then(|f| f.get_field())
2840    }
2841
2842    /// Gets the field for the current frame
2843    pub fn current_field(&self) -> Option<&Field> {
2844        self.frames().last().and_then(|f| f.get_field())
2845    }
2846
2847    /// Gets the nearest active field when nested wrapper frames are involved.
2848    ///
2849    /// This walks frames from innermost to outermost and returns the first frame
2850    /// that currently points at a struct/enum field.
2851    pub fn nearest_field(&self) -> Option<&Field> {
2852        self.frames().iter().rev().find_map(|f| f.get_field())
2853    }
2854
2855    /// Returns a const pointer to the current frame's data.
2856    ///
2857    /// This is useful for validation - after deserializing a field value,
2858    /// validators can read the value through this pointer.
2859    ///
2860    /// # Safety
2861    ///
2862    /// The returned pointer is valid only while the frame exists.
2863    /// The caller must ensure the frame is fully initialized before
2864    /// reading through this pointer.
2865    #[deprecated(note = "use initialized_data_ptr() instead, which checks initialization")]
2866    pub fn data_ptr(&self) -> Option<facet_core::PtrConst> {
2867        if self.state != PartialState::Active {
2868            return None;
2869        }
2870        self.frames().last().map(|f| {
2871            // SAFETY: We're in active state, so the frame is valid.
2872            // The caller is responsible for ensuring the data is initialized.
2873            unsafe { f.data.assume_init().as_const() }
2874        })
2875    }
2876
2877    /// Returns a const pointer to the current frame's data, but only if fully initialized.
2878    ///
2879    /// This is the safe way to get a pointer for validation - it verifies that
2880    /// the frame is fully initialized before returning the pointer.
2881    ///
2882    /// Returns `None` if:
2883    /// - The partial is not in active state
2884    /// - The current frame is not fully initialized
2885    #[allow(unsafe_code)]
2886    pub fn initialized_data_ptr(&mut self) -> Option<facet_core::PtrConst> {
2887        if self.state != PartialState::Active {
2888            return None;
2889        }
2890        let frame = self.frames_mut().last_mut()?;
2891
2892        // Check if fully initialized (may drain rope for lists)
2893        if frame.require_full_initialization().is_err() {
2894            return None;
2895        }
2896
2897        // SAFETY: We've verified the partial is active and the frame is fully initialized.
2898        Some(unsafe { frame.data.assume_init().as_const() })
2899    }
2900
2901    /// Returns a typed reference to the current frame's data if:
2902    /// 1. The partial is in active state
2903    /// 2. The current frame is fully initialized
2904    /// 3. The shape matches `T::SHAPE`
2905    ///
2906    /// This is the safe way to read a value from a Partial for validation purposes.
2907    #[allow(unsafe_code)]
2908    pub fn read_as<T: facet_core::Facet<'facet>>(&mut self) -> Option<&T> {
2909        if self.state != PartialState::Active {
2910            return None;
2911        }
2912        let frame = self.frames_mut().last_mut()?;
2913
2914        // Check if fully initialized (may drain rope for lists)
2915        if frame.require_full_initialization().is_err() {
2916            return None;
2917        }
2918
2919        // Check shape matches
2920        if frame.allocated.shape() != T::SHAPE {
2921            return None;
2922        }
2923
2924        // SAFETY: We've verified:
2925        // 1. The partial is active (frame is valid)
2926        // 2. The frame is fully initialized
2927        // 3. The shape matches T::SHAPE
2928        unsafe {
2929            let ptr = frame.data.assume_init().as_const();
2930            Some(&*ptr.as_ptr::<T>())
2931        }
2932    }
2933}