Skip to main content

facet_reflect/partial/partial_api/
misc.rs

1use facet_core::TryFromOutcome;
2use facet_path::{Path, PathStep};
3
4use super::*;
5use crate::typeplan::{DeserStrategy, TypePlanNodeKind};
6
7////////////////////////////////////////////////////////////////////////////////////////////////////
8// Misc.
9////////////////////////////////////////////////////////////////////////////////////////////////////
10impl<'facet, const BORROW: bool> Partial<'facet, BORROW> {
11    /// Applies a closure to this Partial, enabling chaining with operations that
12    /// take ownership and return `Result<Self, E>`.
13    ///
14    /// This is useful for chaining deserializer methods that need `&mut self`:
15    ///
16    /// ```ignore
17    /// wip = wip
18    ///     .begin_field("name")?
19    ///     .with(|w| deserializer.deserialize_into(w))?
20    ///     .end()?;
21    /// ```
22    #[inline]
23    pub fn with<F, E>(self, f: F) -> Result<Self, E>
24    where
25        F: FnOnce(Self) -> Result<Self, E>,
26    {
27        f(self)
28    }
29
30    /// Returns true if the Partial is in an active state (not built or poisoned).
31    ///
32    /// After `build()` succeeds or after an error causes poisoning, the Partial
33    /// becomes inactive and most operations will fail.
34    #[inline]
35    pub fn is_active(&self) -> bool {
36        self.state == PartialState::Active
37    }
38
39    /// Returns the current frame count (depth of nesting)
40    ///
41    /// The initial frame count is 1 — `begin_field` would push a new frame,
42    /// bringing it to 2, then `end` would bring it back to `1`.
43    ///
44    /// This is an implementation detail of `Partial`, kinda, but deserializers
45    /// might use this for debug assertions, to make sure the state is what
46    /// they think it is.
47    #[inline]
48    pub const fn frame_count(&self) -> usize {
49        self.frames().len()
50    }
51
52    /// Returns the shape of the current frame.
53    ///
54    /// # Panics
55    ///
56    /// Panics if the Partial has been poisoned or built, or if there are no frames
57    /// (which indicates a bug in the Partial implementation).
58    #[inline]
59    pub fn shape(&self) -> &'static Shape {
60        if self.state != PartialState::Active {
61            panic!(
62                "Partial::shape() called on non-active Partial (state: {:?})",
63                self.state
64            );
65        }
66        self.frames()
67            .last()
68            .expect("Partial::shape() called but no frames exist - this is a bug")
69            .allocated
70            .shape()
71    }
72
73    /// Returns the shape of the current frame, or `None` if the Partial is
74    /// inactive (poisoned or built) or has no frames.
75    ///
76    /// This is useful for debugging/logging where you want to inspect the state
77    /// without risking a panic.
78    #[inline]
79    pub fn try_shape(&self) -> Option<&'static Shape> {
80        if self.state != PartialState::Active {
81            return None;
82        }
83        self.frames().last().map(|f| f.allocated.shape())
84    }
85
86    /// Returns the TypePlanCore for this Partial.
87    ///
88    /// This provides access to the arena-based type plan data, useful for
89    /// resolving field lookups and accessing precomputed metadata.
90    #[inline]
91    pub fn type_plan_core(&self) -> &crate::typeplan::TypePlanCore {
92        &self.root_plan
93    }
94
95    /// Returns the precomputed StructPlan for the current frame, if available.
96    ///
97    /// This provides O(1) or O(log n) field lookup instead of O(n) linear scanning.
98    /// Returns `None` if:
99    /// - The Partial is not active
100    /// - The current frame has no TypePlan (e.g., custom deserialization frames)
101    /// - The current type is not a struct
102    #[inline]
103    pub fn struct_plan(&self) -> Option<&crate::typeplan::StructPlan> {
104        if self.state != PartialState::Active {
105            return None;
106        }
107        let frame = self.frames().last()?;
108        self.root_plan.struct_plan_by_id(frame.type_plan)
109    }
110
111    /// Returns the precomputed EnumPlan for the current frame, if available.
112    ///
113    /// This provides O(1) or O(log n) variant lookup instead of O(n) linear scanning.
114    /// Returns `None` if:
115    /// - The Partial is not active
116    /// - The current type is not an enum
117    #[inline]
118    pub fn enum_plan(&self) -> Option<&crate::typeplan::EnumPlan> {
119        if self.state != PartialState::Active {
120            return None;
121        }
122        let frame = self.frames().last()?;
123        self.root_plan.enum_plan_by_id(frame.type_plan)
124    }
125
126    /// Returns the precomputed field plans for the current frame.
127    ///
128    /// This provides access to precomputed validators and default handling without
129    /// runtime attribute scanning.
130    ///
131    /// Returns `None` if the current type is not a struct or enum variant.
132    #[inline]
133    pub fn field_plans(&self) -> Option<&[crate::typeplan::FieldPlan]> {
134        use crate::typeplan::TypePlanNodeKind;
135        let frame = self.frames().last().unwrap();
136        let node = self.root_plan.node(frame.type_plan);
137        match &node.kind {
138            TypePlanNodeKind::Struct(struct_plan) => {
139                Some(self.root_plan.fields(struct_plan.fields))
140            }
141            TypePlanNodeKind::Enum(enum_plan) => {
142                // For enums, we need the variant index from the tracker
143                if let crate::partial::Tracker::Enum { variant_idx, .. } = &frame.tracker {
144                    self.root_plan
145                        .variants(enum_plan.variants)
146                        .get(*variant_idx)
147                        .map(|v| self.root_plan.fields(v.fields))
148                } else {
149                    None
150                }
151            }
152            _ => None,
153        }
154    }
155
156    /// Returns the precomputed TypePlanNode for the current frame.
157    ///
158    /// This provides access to the precomputed deserialization strategy and
159    /// other metadata computed at Partial allocation time.
160    ///
161    /// Returns `None` if:
162    /// - The Partial is not active
163    /// - There are no frames
164    #[inline]
165    pub fn plan_node(&self) -> Option<&crate::typeplan::TypePlanNode> {
166        if self.state != PartialState::Active {
167            return None;
168        }
169        let frame = self.frames().last()?;
170        Some(self.root_plan.node(frame.type_plan))
171    }
172
173    /// Returns the node ID for the current frame's type plan.
174    ///
175    /// Returns `None` if:
176    /// - The Partial is not active
177    /// - There are no frames
178    #[inline]
179    pub fn plan_node_id(&self) -> Option<crate::typeplan::NodeId> {
180        if self.state != PartialState::Active {
181            return None;
182        }
183        let frame = self.frames().last()?;
184        Some(frame.type_plan)
185    }
186
187    /// Returns the precomputed deserialization strategy for the current frame.
188    ///
189    /// This tells facet-format exactly how to deserialize the current type without
190    /// runtime inspection of Shape/Def/vtable. The strategy is computed once at
191    /// TypePlan build time.
192    ///
193    /// If the current node is a BackRef (recursive type), this automatically
194    /// follows the reference to return the target node's strategy.
195    ///
196    /// Returns `None` if:
197    /// - The Partial is not active
198    /// - There are no frames
199    #[inline]
200    pub fn deser_strategy(&self) -> Option<&DeserStrategy> {
201        let node = self.plan_node()?;
202        // Resolve BackRef if needed - resolve_backref returns the node unchanged if not a BackRef
203        let resolved = self.root_plan.resolve_backref(node);
204        Some(&resolved.strategy)
205    }
206
207    /// Returns the precomputed proxy nodes for the current frame's type.
208    ///
209    /// These contain TypePlan nodes for all proxies (format-agnostic and format-specific)
210    /// on this type, allowing runtime lookup based on format namespace.
211    #[inline]
212    pub fn proxy_nodes(&self) -> Option<&crate::typeplan::ProxyNodes> {
213        let node = self.plan_node()?;
214        let resolved = self.root_plan.resolve_backref(node);
215        Some(&resolved.proxies)
216    }
217
218    /// Returns true if the current frame is building a smart pointer slice (Arc<\[T\]>, Rc<\[T\]>, Box<\[T\]>).
219    ///
220    /// This is used by deserializers to determine if they should deserialize as a list
221    /// rather than recursing into the smart pointer type.
222    #[inline]
223    pub fn is_building_smart_ptr_slice(&self) -> bool {
224        if self.state != PartialState::Active {
225            return false;
226        }
227        self.frames()
228            .last()
229            .is_some_and(|f| matches!(f.tracker, Tracker::SmartPointerSlice { .. }))
230    }
231
232    /// Returns the current path in deferred mode (for debugging/tracing).
233    #[inline]
234    pub fn current_path(&self) -> Option<facet_path::Path> {
235        if self.is_deferred() {
236            Some(self.derive_path())
237        } else {
238            None
239        }
240    }
241
242    /// Checks if the current frame should be stored for deferred processing.
243    ///
244    /// This determines whether a frame can safely be stored and re-entered later
245    /// in deferred mode. A frame should be stored if:
246    /// 1. It's a re-entrant type (struct, enum, collection, Option)
247    /// 2. It has storable ownership (Field or Owned)
248    /// 3. It doesn't have a SmartPointer parent that needs immediate completion
249    ///
250    /// Returns `true` if the frame should be stored, `false` if it should be
251    /// validated immediately.
252    fn should_store_frame_for_deferred(&self) -> bool {
253        // In deferred mode, all frames have stable memory and can be stored.
254        // PR #2019 added stable storage for all container elements (ListRope for Vec,
255        // pending_entries for Map, pending_inner for Option).
256        true
257    }
258
259    /// Enables deferred materialization mode with the given Resolution.
260    ///
261    /// When deferred mode is enabled:
262    /// - `end()` stores frames instead of validating them
263    /// - Re-entering a path restores the stored frame with its state intact
264    /// - `finish_deferred()` performs final validation and materialization
265    ///
266    /// This allows deserializers to handle interleaved fields (e.g., TOML dotted
267    /// keys, flattened structs) where nested fields aren't contiguous in the input.
268    ///
269    /// # Use Cases
270    ///
271    /// - TOML dotted keys: `inner.x = 1` followed by `count = 2` then `inner.y = 3`
272    /// - Flattened structs where nested fields appear at the parent level
273    /// - Any format where field order doesn't match struct nesting
274    ///
275    /// # Errors
276    ///
277    /// Returns an error if already in deferred mode.
278    #[inline]
279    pub fn begin_deferred(mut self) -> Result<Self, ReflectError> {
280        // Cannot enable deferred mode if already in deferred mode
281        if self.is_deferred() {
282            return Err(self.err(ReflectErrorKind::InvariantViolation {
283                invariant: "begin_deferred() called but already in deferred mode",
284            }));
285        }
286
287        // Take the stack out of Strict mode and wrap in Deferred mode
288        let FrameMode::Strict { stack } = core::mem::replace(
289            &mut self.mode,
290            FrameMode::Strict { stack: Vec::new() }, // temporary placeholder
291        ) else {
292            unreachable!("just checked we're not in deferred mode");
293        };
294
295        let start_depth = stack.len();
296        self.mode = FrameMode::Deferred {
297            stack,
298            start_depth,
299            stored_frames: BTreeMap::new(),
300        };
301        Ok(self)
302    }
303
304    /// Finishes deferred mode: validates all stored frames and finalizes.
305    ///
306    /// This method:
307    /// 1. Validates that all stored frames are fully initialized
308    /// 2. Processes frames from deepest to shallowest, updating parent ISets
309    /// 3. Validates the root frame
310    ///
311    /// # Errors
312    ///
313    /// Returns an error if any required fields are missing or if the partial is
314    /// not in deferred mode.
315    pub fn finish_deferred(mut self) -> Result<Self, ReflectError> {
316        // Check if we're in deferred mode first, before extracting state
317        if !self.is_deferred() {
318            return Err(self.err(ReflectErrorKind::InvariantViolation {
319                invariant: "finish_deferred() called but deferred mode is not enabled",
320            }));
321        }
322
323        // Extract deferred state, transitioning back to Strict mode
324        let FrameMode::Deferred {
325            stack,
326            mut stored_frames,
327            ..
328        } = core::mem::replace(&mut self.mode, FrameMode::Strict { stack: Vec::new() })
329        else {
330            unreachable!("just checked is_deferred()");
331        };
332
333        // Restore the stack to self.mode
334        self.mode = FrameMode::Strict { stack };
335
336        // Sort paths by depth (deepest first) so we process children before parents.
337        // For equal-depth paths, we need stable ordering for list elements:
338        // Index(0) must be processed before Index(1) to maintain insertion order.
339        let mut paths: Vec<_> = stored_frames.keys().cloned().collect();
340        paths.sort_by(|a, b| {
341            // Primary: deeper paths first
342            let depth_cmp = b.len().cmp(&a.len());
343            if depth_cmp != core::cmp::Ordering::Equal {
344                return depth_cmp;
345            }
346            // Secondary: for same-depth paths, compare step by step
347            // This ensures Index(0) comes before Index(1) for the same parent
348            for (step_a, step_b) in a.steps.iter().zip(b.steps.iter()) {
349                let step_cmp = step_a.cmp(step_b);
350                if step_cmp != core::cmp::Ordering::Equal {
351                    return step_cmp;
352                }
353            }
354            core::cmp::Ordering::Equal
355        });
356
357        trace!(
358            "finish_deferred: Processing {} stored frames in order: {:?}",
359            paths.len(),
360            paths
361        );
362
363        // Process each stored frame from deepest to shallowest
364        for path in paths {
365            let mut frame = stored_frames.remove(&path).unwrap();
366
367            trace!(
368                "finish_deferred: Processing frame at {:?}, shape {}, tracker {:?}",
369                path,
370                frame.allocated.shape(),
371                frame.tracker.kind()
372            );
373
374            // Special handling for SmartPointerSlice: convert builder to Arc<[T]> before validation
375            if let Tracker::SmartPointerSlice { vtable, .. } = &frame.tracker {
376                let vtable = *vtable;
377                let current_shape = frame.allocated.shape();
378
379                // Convert the builder to Arc<[T]>
380                let builder_ptr = unsafe { frame.data.assume_init() };
381                let arc_ptr = unsafe { (vtable.convert_fn)(builder_ptr) };
382
383                trace!(
384                    "finish_deferred: Converting SmartPointerSlice builder to {}",
385                    current_shape
386                );
387
388                // Handle different ownership cases
389                match frame.ownership {
390                    FrameOwnership::Field { field_idx } => {
391                        // Arc<[T]> is a field in a struct
392                        // Find the parent frame and write the Arc to the field location
393                        let parent_path = facet_path::Path {
394                            shape: path.shape,
395                            steps: path.steps[..path.steps.len() - 1].to_vec(),
396                        };
397
398                        // Paths are absolute from the root, so the parent frame lives at
399                        // stack[parent_path.steps.len()] when it's still on the stack.
400                        let parent_frame_opt =
401                            if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
402                                Some(parent_frame)
403                            } else {
404                                self.frames_mut().get_mut(parent_path.steps.len())
405                            };
406
407                        if let Some(parent_frame) = parent_frame_opt {
408                            // Get the field to find its offset
409                            if let Type::User(UserType::Struct(struct_type)) =
410                                parent_frame.allocated.shape().ty
411                            {
412                                let field = &struct_type.fields[field_idx];
413
414                                // Calculate where the Arc should be written (parent.data + field.offset)
415                                let field_location =
416                                    unsafe { parent_frame.data.field_uninit(field.offset) };
417
418                                // Write the Arc to the parent struct's field location
419                                if let Ok(arc_layout) = current_shape.layout.sized_layout() {
420                                    let arc_size = arc_layout.size();
421                                    unsafe {
422                                        core::ptr::copy_nonoverlapping(
423                                            arc_ptr.as_byte_ptr(),
424                                            field_location.as_mut_byte_ptr(),
425                                            arc_size,
426                                        );
427                                    }
428
429                                    // Free the staging allocation from convert_fn
430                                    unsafe {
431                                        ::alloc::alloc::dealloc(
432                                            arc_ptr.as_byte_ptr() as *mut u8,
433                                            arc_layout,
434                                        );
435                                    }
436
437                                    // Update the frame to point to the correct field location and mark as initialized
438                                    frame.data = field_location;
439                                    frame.tracker = Tracker::Scalar;
440                                    frame.is_init = true;
441
442                                    trace!(
443                                        "finish_deferred: SmartPointerSlice converted and written to field {}",
444                                        field_idx
445                                    );
446                                }
447                            }
448                        }
449                    }
450                    FrameOwnership::Owned => {
451                        // Arc<[T]> is the root - write in place
452                        if let Ok(arc_layout) = current_shape.layout.sized_layout() {
453                            let arc_size = arc_layout.size();
454                            // Allocate new memory for the Arc
455                            let new_ptr = facet_core::alloc_for_layout(arc_layout);
456                            unsafe {
457                                core::ptr::copy_nonoverlapping(
458                                    arc_ptr.as_byte_ptr(),
459                                    new_ptr.as_mut_byte_ptr(),
460                                    arc_size,
461                                );
462                            }
463                            // Free the staging allocation
464                            unsafe {
465                                ::alloc::alloc::dealloc(
466                                    arc_ptr.as_byte_ptr() as *mut u8,
467                                    arc_layout,
468                                );
469                            }
470                            frame.data = new_ptr;
471                            frame.tracker = Tracker::Scalar;
472                            frame.is_init = true;
473                        }
474                    }
475                    _ => {}
476                }
477            }
478
479            // Fill in defaults for unset fields that have defaults
480            if let Err(e) = frame.fill_defaults() {
481                // Before cleanup, clear the parent's iset bit for the frame that failed.
482                // This prevents the parent from trying to drop this field when Partial is dropped.
483                Self::clear_parent_iset_for_path(&path, self.frames_mut(), &mut stored_frames);
484                // Consume-time invariant: pending_entries/pending_inner are only populated
485                // by the walk after validation succeeds. A frame that fails here hasn't
486                // been transferred anywhere, so its own deinit/dealloc is the sole cleanup.
487                frame.deinit();
488                frame.dealloc();
489                // Clean up remaining stored frames safely (deepest first, clearing parent isets)
490                Self::cleanup_stored_frames_on_error(stored_frames, self.frames_mut());
491                return Err(self.err(e));
492            }
493
494            // Validate the frame is fully initialized
495            if let Err(e) = frame.require_full_initialization() {
496                // Before cleanup, clear the parent's iset bit for the frame that failed.
497                // This prevents the parent from trying to drop this field when Partial is dropped.
498                Self::clear_parent_iset_for_path(&path, self.frames_mut(), &mut stored_frames);
499                // Consume-time invariant: frame hasn't been transferred yet.
500                frame.deinit();
501                frame.dealloc();
502                // Clean up remaining stored frames safely (deepest first, clearing parent isets)
503                Self::cleanup_stored_frames_on_error(stored_frames, self.frames_mut());
504                return Err(self.err(e));
505            }
506
507            // Update parent's ISet to mark this field as initialized.
508            // The parent lives either in stored_frames (if it was ended during deferred mode)
509            // or on the frames stack at index parent_path.steps.len() (paths are absolute).
510            if let Some(last_step) = path.steps.last() {
511                // Construct parent path (same shape, all steps except the last one)
512                let parent_path = facet_path::Path {
513                    shape: path.shape,
514                    steps: path.steps[..path.steps.len() - 1].to_vec(),
515                };
516
517                // Special handling for Option inner values: when path ends with OptionSome,
518                // the parent is an Option frame and we need to complete the Option by
519                // writing the inner value into the Option's memory.
520                if matches!(last_step, PathStep::OptionSome) {
521                    // Find the Option frame (parent)
522                    let option_frame =
523                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
524                            Some(parent_frame)
525                        } else {
526                            self.frames_mut().get_mut(parent_path.steps.len())
527                        };
528
529                    if let Some(option_frame) = option_frame {
530                        // The frame contains the inner value - write it into the Option's memory
531                        Self::complete_option_frame(option_frame, frame);
532                        // Frame data has been transferred to Option - don't drop it
533                        continue;
534                    }
535                }
536
537                // Special handling for SmartPointer inner values: when path ends with Deref,
538                // the parent is a SmartPointer frame and we need to complete it by
539                // creating the SmartPointer from the inner value.
540                if matches!(last_step, PathStep::Deref) {
541                    // Find the SmartPointer frame (parent)
542                    let smart_ptr_frame =
543                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
544                            Some(parent_frame)
545                        } else {
546                            self.frames_mut().get_mut(parent_path.steps.len())
547                        };
548
549                    if let Some(smart_ptr_frame) = smart_ptr_frame {
550                        // The frame contains the inner value - create the SmartPointer from it
551                        Self::complete_smart_pointer_frame(smart_ptr_frame, frame);
552                        // Frame data has been transferred to SmartPointer - don't drop it
553                        continue;
554                    }
555                }
556
557                // Special handling for Inner values: when path ends with Inner,
558                // the parent is a transparent wrapper (NonZero, ByteString, etc.) and we need
559                // to convert the inner value to the parent type using try_from.
560                if matches!(last_step, PathStep::Inner) {
561                    // Find the parent frame (Inner wrapper)
562                    let parent_frame =
563                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
564                            Some(parent_frame)
565                        } else {
566                            self.frames_mut().get_mut(parent_path.steps.len())
567                        };
568
569                    if let Some(inner_wrapper_frame) = parent_frame {
570                        // The frame contains the inner value - convert to parent type using try_from
571                        Self::complete_inner_frame(inner_wrapper_frame, frame);
572                        // Frame data has been transferred - don't drop it
573                        continue;
574                    }
575                }
576
577                // Special handling for Proxy values: when path ends with Proxy,
578                // the parent is the target type (e.g., Inner) and we need to convert
579                // the proxy value (e.g., InnerProxy) using the proxy's convert_in.
580                if matches!(last_step, PathStep::Proxy) {
581                    // Find the parent frame (the proxy target)
582                    let parent_frame =
583                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
584                            Some(parent_frame)
585                        } else {
586                            self.frames_mut().get_mut(parent_path.steps.len())
587                        };
588
589                    if let Some(target_frame) = parent_frame {
590                        Self::complete_proxy_frame(target_frame, frame);
591                        continue;
592                    }
593                }
594
595                // Special handling for List/SmartPointerSlice element values: when path
596                // ends with Index, the parent is a List or SmartPointerSlice frame and we
597                // need to push the element into it. RopeSlot frames live in the parent
598                // rope's slot: they don't get "pushed" here (the slot is pre-allocated),
599                // but we DO need to mark the slot initialized now that the element has
600                // passed validation, so the rope knows it's safe to drop on error.
601                if matches!(last_step, PathStep::Index(_)) {
602                    // Find the parent frame (List or SmartPointerSlice)
603                    let parent_frame =
604                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
605                            Some(parent_frame)
606                        } else {
607                            self.frames_mut().get_mut(parent_path.steps.len())
608                        };
609
610                    if let Some(parent_frame) = parent_frame {
611                        if matches!(frame.ownership, FrameOwnership::RopeSlot) {
612                            // Element already lives in rope slot. Mark it initialized now
613                            // that validation passed (consume-time). Frame is dropped
614                            // silently — no Drop impl, rope owns the buffer.
615                            if let Tracker::List {
616                                rope: Some(rope), ..
617                            } = &mut parent_frame.tracker
618                            {
619                                rope.mark_last_initialized();
620                            }
621                            continue;
622                        }
623                        // Check if parent is a SmartPointerSlice (e.g., Arc<[T]>)
624                        if matches!(parent_frame.tracker, Tracker::SmartPointerSlice { .. }) {
625                            Self::complete_smart_pointer_slice_item_frame(parent_frame, frame);
626                            // Frame data has been transferred to slice builder - don't drop it
627                            continue;
628                        }
629                        // Otherwise try List handling
630                        Self::complete_list_item_frame(parent_frame, frame);
631                        // Frame data has been transferred to List - don't drop it
632                        continue;
633                    }
634                }
635
636                // Special handling for Map key values: when path ends with MapKey,
637                // the parent is a Map frame and we need to push the key into
638                // pending_entries at the matching entry_idx.
639                if let PathStep::MapKey(entry_idx) = last_step {
640                    let entry_idx = *entry_idx;
641                    // Find the Map frame (parent)
642                    let map_frame = if let Some(parent_frame) = stored_frames.get_mut(&parent_path)
643                    {
644                        Some(parent_frame)
645                    } else {
646                        self.frames_mut().get_mut(parent_path.steps.len())
647                    };
648
649                    if let Some(map_frame) = map_frame {
650                        Self::complete_map_key_frame(map_frame, entry_idx, frame);
651                        continue;
652                    }
653                }
654
655                // Special handling for Map value values: when path ends with MapValue,
656                // the parent is a Map frame and we need to fill in the value for the
657                // half-entry at the matching entry_idx.
658                if let PathStep::MapValue(entry_idx) = last_step {
659                    let entry_idx = *entry_idx;
660                    // Find the Map frame (parent)
661                    let map_frame = if let Some(parent_frame) = stored_frames.get_mut(&parent_path)
662                    {
663                        Some(parent_frame)
664                    } else {
665                        self.frames_mut().get_mut(parent_path.steps.len())
666                    };
667
668                    if let Some(map_frame) = map_frame {
669                        Self::complete_map_value_frame(map_frame, entry_idx, frame);
670                        continue;
671                    }
672                }
673
674                // Only mark field initialized if the step is actually a Field
675                if let PathStep::Field(field_idx) = last_step {
676                    let field_idx = *field_idx as usize;
677                    // Paths are absolute from the root, so the parent frame lives at
678                    // stack[parent_path.steps.len()] when it's still on the stack.
679                    let parent_frame =
680                        if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
681                            Some(parent_frame)
682                        } else {
683                            self.frames_mut().get_mut(parent_path.steps.len())
684                        };
685                    if let Some(parent_frame) = parent_frame {
686                        Self::mark_field_initialized_by_index(parent_frame, field_idx);
687                    }
688                }
689            }
690
691            // Frame is validated and parent is updated - dealloc if needed
692            frame.dealloc();
693        }
694
695        // Invariant check: we must have at least one frame after finish_deferred
696        if self.frames().is_empty() {
697            // No need to poison - returning Err consumes self, Drop will handle cleanup
698            return Err(self.err(ReflectErrorKind::InvariantViolation {
699                invariant: "finish_deferred() left Partial with no frames",
700            }));
701        }
702
703        // Fill defaults and validate the root frame is fully initialized
704        if let Some(frame) = self.frames_mut().last_mut() {
705            // Fill defaults - this can fail if a field has #[facet(default)] but no default impl
706            if let Err(e) = frame.fill_defaults() {
707                return Err(self.err(e));
708            }
709            // Root validation failed. At this point, all stored frames have been
710            // processed and their parent isets updated.
711            // No need to poison - returning Err consumes self, Drop will handle cleanup
712            if let Err(e) = frame.require_full_initialization() {
713                return Err(self.err(e));
714            }
715        }
716
717        Ok(self)
718    }
719
720    /// Mark a field as initialized in a frame's tracker by index
721    fn mark_field_initialized_by_index(frame: &mut Frame, idx: usize) {
722        crate::trace!(
723            "mark_field_initialized_by_index: idx={}, frame shape={}, tracker={:?}",
724            idx,
725            frame.allocated.shape(),
726            frame.tracker.kind()
727        );
728
729        // If the tracker is Scalar but this is a struct type, upgrade to Struct tracker.
730        // This can happen if the frame was deinit'd (e.g., by a failed set_default)
731        // which resets the tracker to Scalar.
732        if matches!(frame.tracker, Tracker::Scalar)
733            && let Type::User(UserType::Struct(struct_type)) = frame.allocated.shape().ty
734        {
735            frame.tracker = Tracker::Struct {
736                iset: ISet::new(struct_type.fields.len()),
737                current_child: None,
738            };
739        }
740
741        match &mut frame.tracker {
742            Tracker::Struct { iset, .. } => {
743                crate::trace!("mark_field_initialized_by_index: setting iset for struct");
744                iset.set(idx);
745            }
746            Tracker::Enum { data, .. } => {
747                crate::trace!(
748                    "mark_field_initialized_by_index: setting data for enum, before={:?}",
749                    data
750                );
751                data.set(idx);
752                crate::trace!(
753                    "mark_field_initialized_by_index: setting data for enum, after={:?}",
754                    data
755                );
756            }
757            Tracker::Array { iset, .. } => {
758                crate::trace!("mark_field_initialized_by_index: setting iset for array");
759                iset.set(idx);
760            }
761            _ => {
762                crate::trace!(
763                    "mark_field_initialized_by_index: no match for tracker {:?}",
764                    frame.tracker.kind()
765                );
766            }
767        }
768    }
769
770    /// Clear a parent frame's iset bit for a given path.
771    /// The parent could be on the stack or in stored_frames.
772    fn clear_parent_iset_for_path(
773        path: &Path,
774        stack: &mut [Frame],
775        stored_frames: &mut ::alloc::collections::BTreeMap<Path, Frame>,
776    ) {
777        let Some(&PathStep::Field(field_idx)) = path.steps.last() else {
778            return;
779        };
780        let field_idx = field_idx as usize;
781        let parent_path = Path {
782            shape: path.shape,
783            steps: path.steps[..path.steps.len() - 1].to_vec(),
784        };
785
786        // Paths are absolute from the root; the frame at a given path lives at
787        // stack[path.steps.len()], so the parent lives at stack[parent_path.steps.len()].
788        let parent_frame = if let Some(parent_frame) = stored_frames.get_mut(&parent_path) {
789            Some(parent_frame)
790        } else {
791            stack.get_mut(parent_path.steps.len())
792        };
793        if let Some(parent_frame) = parent_frame {
794            Self::unset_field_in_tracker(&mut parent_frame.tracker, field_idx);
795        }
796    }
797
798    // NOTE: `sever_parent_pending_for_path` has been removed. Under the consume-time
799    // pending-population invariant, parent pending slots (pending_entries, pending_inner,
800    // etc.) are only populated by the walk in `finish_deferred` AFTER a child frame's
801    // validation passes. A failing child frame has its buffer still owned by itself, so
802    // `frame.deinit(); frame.dealloc()` is the complete cleanup. No parent-side sever
803    // is required.
804
805    /// Helper to unset a field index in a tracker's iset
806    fn unset_field_in_tracker(tracker: &mut Tracker, field_idx: usize) {
807        match tracker {
808            Tracker::Struct { iset, .. } => {
809                iset.unset(field_idx);
810            }
811            Tracker::Enum { data, .. } => {
812                data.unset(field_idx);
813            }
814            Tracker::Array { iset, .. } => {
815                iset.unset(field_idx);
816            }
817            _ => {}
818        }
819    }
820
821    /// Safely clean up stored frames on error in finish_deferred.
822    ///
823    /// This mirrors the cleanup logic in Drop: process frames deepest-first and
824    /// clear parent's iset bits before deiniting children to prevent double-drops.
825    fn cleanup_stored_frames_on_error(
826        mut stored_frames: ::alloc::collections::BTreeMap<Path, Frame>,
827        stack: &mut [Frame],
828    ) {
829        // Sort by depth (deepest first) so children are processed before parents
830        let mut paths: Vec<_> = stored_frames.keys().cloned().collect();
831        paths.sort_by_key(|p| core::cmp::Reverse(p.steps.len()));
832
833        trace!(
834            "cleanup_stored_frames_on_error: {} frames to clean, paths: {:?}",
835            paths.len(),
836            paths
837        );
838
839        for path in &paths {
840            if let Some(frame) = stored_frames.get(path) {
841                trace!(
842                    "cleanup: processing path={:?}, shape={}, tracker={:?}, is_init={}, ownership={:?}",
843                    path,
844                    frame.allocated.shape(),
845                    frame.tracker.kind(),
846                    frame.is_init,
847                    frame.ownership,
848                );
849                // Dump iset contents for struct/enum trackers
850                match &frame.tracker {
851                    Tracker::Struct { iset: _iset, .. } => {
852                        trace!("cleanup:   Struct iset = {:?}", _iset);
853                    }
854                    Tracker::Enum {
855                        variant: _variant,
856                        data: _data,
857                        ..
858                    } => {
859                        trace!("cleanup:   Enum {:?} data = {:?}", _variant.name, _data);
860                    }
861                    _ => {}
862                }
863            }
864        }
865
866        for path in paths {
867            if let Some(mut frame) = stored_frames.remove(&path) {
868                trace!(
869                    "cleanup: REMOVING path={:?}, shape={}, tracker={:?}",
870                    path,
871                    frame.allocated.shape(),
872                    frame.tracker.kind(),
873                );
874                // Before dropping this frame, clear the parent's iset bit so the
875                // parent won't try to drop this field again.
876                Self::clear_parent_iset_for_path(&path, stack, &mut stored_frames);
877                // Under the consume-time SSoT invariant, stored frames always own
878                // their own buffer (parent pending slots are only populated at walk
879                // consume-time, after validation). Standard deinit + dealloc handles
880                // cleanup; no parent pending-slot severing is needed.
881                trace!("cleanup: calling deinit() on path={:?}", path,);
882                frame.deinit();
883                frame.dealloc();
884            }
885        }
886    }
887
888    /// Complete an Option frame by writing the inner value and marking it initialized.
889    /// Used in finish_deferred when processing a stored frame at a path ending with "Some".
890    fn complete_option_frame(option_frame: &mut Frame, inner_frame: Frame) {
891        if let Def::Option(option_def) = option_frame.allocated.shape().def {
892            // Use the Option vtable to initialize Some(inner_value)
893            let init_some_fn = option_def.vtable.init_some;
894
895            // The inner frame contains the inner value
896            let inner_value_ptr = unsafe { inner_frame.data.assume_init() };
897
898            // Initialize the Option as Some(inner_value)
899            unsafe {
900                init_some_fn(option_frame.data, inner_value_ptr);
901            }
902
903            // Deallocate the inner value's memory since init_some_fn moved it
904            if let FrameOwnership::Owned = inner_frame.ownership
905                && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
906                && layout.size() > 0
907            {
908                unsafe {
909                    ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
910                }
911            }
912
913            // Mark the Option as initialized
914            option_frame.tracker = Tracker::Option {
915                building_inner: false,
916                pending_inner: None,
917            };
918            option_frame.is_init = true;
919        }
920    }
921
922    fn complete_smart_pointer_frame(smart_ptr_frame: &mut Frame, inner_frame: Frame) {
923        if let Def::Pointer(smart_ptr_def) = smart_ptr_frame.allocated.shape().def {
924            // Use the SmartPointer vtable to create the smart pointer from the inner value
925            if let Some(new_into_fn) = smart_ptr_def.vtable.new_into_fn {
926                // Sized pointee case: use new_into_fn
927                let _ = unsafe { inner_frame.data.assume_init() };
928
929                // Create the SmartPointer with the inner value
930                unsafe {
931                    new_into_fn(
932                        smart_ptr_frame.data,
933                        PtrMut::new(inner_frame.data.as_mut_byte_ptr()),
934                    );
935                }
936
937                // Deallocate the inner value's memory since new_into_fn moved it
938                if let FrameOwnership::Owned = inner_frame.ownership
939                    && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
940                    && layout.size() > 0
941                {
942                    unsafe {
943                        ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
944                    }
945                }
946
947                // Mark the SmartPointer as initialized
948                smart_ptr_frame.tracker = Tracker::SmartPointer {
949                    building_inner: false,
950                    pending_inner: None,
951                };
952                smart_ptr_frame.is_init = true;
953            } else if let Some(pointee) = smart_ptr_def.pointee()
954                && pointee.is_shape(str::SHAPE)
955                && inner_frame.allocated.shape().is_shape(String::SHAPE)
956            {
957                // Unsized pointee case: String -> Arc<str>/Box<str>/Rc<str> conversion
958                use ::alloc::{rc::Rc, string::String, sync::Arc};
959                use facet_core::KnownPointer;
960
961                let Some(known) = smart_ptr_def.known else {
962                    return;
963                };
964
965                // Read the String value from the inner frame
966                let string_ptr = inner_frame.data.as_mut_byte_ptr() as *mut String;
967                let string_value = unsafe { core::ptr::read(string_ptr) };
968
969                // Convert to the appropriate smart pointer type
970                match known {
971                    KnownPointer::Box => {
972                        let boxed: ::alloc::boxed::Box<str> = string_value.into_boxed_str();
973                        unsafe {
974                            core::ptr::write(
975                                smart_ptr_frame.data.as_mut_byte_ptr()
976                                    as *mut ::alloc::boxed::Box<str>,
977                                boxed,
978                            );
979                        }
980                    }
981                    KnownPointer::Arc => {
982                        let arc: Arc<str> = Arc::from(string_value.into_boxed_str());
983                        unsafe {
984                            core::ptr::write(
985                                smart_ptr_frame.data.as_mut_byte_ptr() as *mut Arc<str>,
986                                arc,
987                            );
988                        }
989                    }
990                    KnownPointer::Rc => {
991                        let rc: Rc<str> = Rc::from(string_value.into_boxed_str());
992                        unsafe {
993                            core::ptr::write(
994                                smart_ptr_frame.data.as_mut_byte_ptr() as *mut Rc<str>,
995                                rc,
996                            );
997                        }
998                    }
999                    _ => return,
1000                }
1001
1002                // Deallocate the String's memory (we moved the data out via ptr::read)
1003                if let FrameOwnership::Owned = inner_frame.ownership
1004                    && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
1005                    && layout.size() > 0
1006                {
1007                    unsafe {
1008                        ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
1009                    }
1010                }
1011
1012                // Mark the SmartPointer as initialized
1013                smart_ptr_frame.tracker = Tracker::SmartPointer {
1014                    building_inner: false,
1015                    pending_inner: None,
1016                };
1017                smart_ptr_frame.is_init = true;
1018            }
1019        }
1020    }
1021
1022    /// Complete an Inner frame by converting the inner value to the parent type using try_from
1023    /// (for deferred finalization)
1024    fn complete_inner_frame(inner_wrapper_frame: &mut Frame, inner_frame: Frame) {
1025        let wrapper_shape = inner_wrapper_frame.allocated.shape();
1026        let inner_ptr = PtrConst::new(inner_frame.data.as_byte_ptr());
1027        let inner_shape = inner_frame.allocated.shape();
1028
1029        // Handle Direct and Indirect vtables - both return TryFromOutcome
1030        let result = match wrapper_shape.vtable {
1031            facet_core::VTableErased::Direct(vt) => {
1032                if let Some(try_from_fn) = vt.try_from {
1033                    unsafe {
1034                        try_from_fn(
1035                            inner_wrapper_frame.data.as_mut_byte_ptr() as *mut (),
1036                            inner_shape,
1037                            inner_ptr,
1038                        )
1039                    }
1040                } else {
1041                    return;
1042                }
1043            }
1044            facet_core::VTableErased::Indirect(vt) => {
1045                if let Some(try_from_fn) = vt.try_from {
1046                    let ox_uninit =
1047                        facet_core::OxPtrUninit::new(inner_wrapper_frame.data, wrapper_shape);
1048                    unsafe { try_from_fn(ox_uninit, inner_shape, inner_ptr) }
1049                } else {
1050                    return;
1051                }
1052            }
1053        };
1054
1055        match result {
1056            TryFromOutcome::Converted => {
1057                crate::trace!(
1058                    "complete_inner_frame: converted {} to {}",
1059                    inner_shape,
1060                    wrapper_shape
1061                );
1062            }
1063            TryFromOutcome::Unsupported | TryFromOutcome::Failed(_) => {
1064                crate::trace!(
1065                    "complete_inner_frame: conversion failed from {} to {}",
1066                    inner_shape,
1067                    wrapper_shape
1068                );
1069                return;
1070            }
1071        }
1072
1073        // Deallocate the inner value's memory (try_from consumed it)
1074        if let FrameOwnership::Owned = inner_frame.ownership
1075            && let Ok(layout) = inner_frame.allocated.shape().layout.sized_layout()
1076            && layout.size() > 0
1077        {
1078            unsafe {
1079                ::alloc::alloc::dealloc(inner_frame.data.as_mut_byte_ptr(), layout);
1080            }
1081        }
1082
1083        // Mark the wrapper as initialized
1084        inner_wrapper_frame.tracker = Tracker::Scalar;
1085        inner_wrapper_frame.is_init = true;
1086    }
1087
1088    /// Complete a proxy conversion during deferred finalization.
1089    ///
1090    /// This handles proxy types (e.g., `#[facet(proxy = InnerProxy)]`) that were
1091    /// deferred during flatten deserialization. The proxy frame's children (e.g.,
1092    /// `Vec<f64>` fields) have already been materialized (ropes drained), so it's
1093    /// now safe to run the conversion.
1094    fn complete_proxy_frame(target_frame: &mut Frame, proxy_frame: Frame) {
1095        // Get the convert_in function from the proxy stored on the frame
1096        let Some(proxy_def) = proxy_frame.shape_level_proxy else {
1097            crate::trace!(
1098                "complete_proxy_frame: no shape_level_proxy on frame {}",
1099                proxy_frame.allocated.shape()
1100            );
1101            return;
1102        };
1103        let convert_in = proxy_def.convert_in;
1104
1105        let _proxy_shape = proxy_frame.allocated.shape();
1106        let _target_shape = target_frame.allocated.shape();
1107
1108        crate::trace!(
1109            "complete_proxy_frame: converting {} to {}",
1110            _proxy_shape,
1111            _target_shape
1112        );
1113
1114        unsafe {
1115            let inner_value_ptr = proxy_frame.data.assume_init().as_const();
1116            let res = (convert_in)(inner_value_ptr, target_frame.data);
1117
1118            match res {
1119                Ok(rptr) => {
1120                    if rptr.as_uninit() != target_frame.data {
1121                        crate::trace!(
1122                            "complete_proxy_frame: convert_in returned unexpected pointer"
1123                        );
1124                        return;
1125                    }
1126                }
1127                Err(_message) => {
1128                    crate::trace!("complete_proxy_frame: conversion failed: {}", _message);
1129                    return;
1130                }
1131            }
1132        }
1133
1134        // Deallocate the proxy frame's memory (convert_in consumed it via ptr::read)
1135        if let FrameOwnership::Owned = proxy_frame.ownership
1136            && let Ok(layout) = proxy_frame.allocated.shape().layout.sized_layout()
1137            && layout.size() > 0
1138        {
1139            unsafe {
1140                ::alloc::alloc::dealloc(proxy_frame.data.as_mut_byte_ptr(), layout);
1141            }
1142        }
1143
1144        // Mark the target as initialized
1145        target_frame.is_init = true;
1146    }
1147
1148    /// Complete a List frame by pushing an element into it (for deferred finalization)
1149    fn complete_list_item_frame(list_frame: &mut Frame, element_frame: Frame) {
1150        if let Def::List(list_def) = list_frame.allocated.shape().def
1151            && let Some(push_fn) = list_def.push()
1152        {
1153            // The element frame contains the element value
1154            let element_ptr = PtrMut::new(element_frame.data.as_mut_byte_ptr());
1155
1156            // Use push to add element to the list
1157            unsafe {
1158                push_fn(PtrMut::new(list_frame.data.as_mut_byte_ptr()), element_ptr);
1159            }
1160
1161            crate::trace!(
1162                "complete_list_item_frame: pushed element into {}",
1163                list_frame.allocated.shape()
1164            );
1165
1166            // Deallocate the element's memory since push moved it
1167            if let FrameOwnership::Owned = element_frame.ownership
1168                && let Ok(layout) = element_frame.allocated.shape().layout.sized_layout()
1169                && layout.size() > 0
1170            {
1171                unsafe {
1172                    ::alloc::alloc::dealloc(element_frame.data.as_mut_byte_ptr(), layout);
1173                }
1174            }
1175        }
1176    }
1177
1178    /// Complete a SmartPointerSlice element frame by pushing the element into the slice builder
1179    /// (for deferred finalization)
1180    fn complete_smart_pointer_slice_item_frame(
1181        slice_frame: &mut Frame,
1182        element_frame: Frame,
1183    ) -> bool {
1184        if let Tracker::SmartPointerSlice { vtable, .. } = &slice_frame.tracker {
1185            let vtable = *vtable;
1186            // The slice frame's data pointer IS the builder pointer
1187            let builder_ptr = slice_frame.data;
1188
1189            // Push the element into the builder
1190            unsafe {
1191                (vtable.push_fn)(
1192                    PtrMut::new(builder_ptr.as_mut_byte_ptr()),
1193                    PtrMut::new(element_frame.data.as_mut_byte_ptr()),
1194                );
1195            }
1196
1197            crate::trace!(
1198                "complete_smart_pointer_slice_item_frame: pushed element into builder for {}",
1199                slice_frame.allocated.shape()
1200            );
1201
1202            // Deallocate the element's memory since push moved it
1203            if let FrameOwnership::Owned = element_frame.ownership
1204                && let Ok(layout) = element_frame.allocated.shape().layout.sized_layout()
1205                && layout.size() > 0
1206            {
1207                unsafe {
1208                    ::alloc::alloc::dealloc(element_frame.data.as_mut_byte_ptr(), layout);
1209                }
1210            }
1211            return true;
1212        }
1213        false
1214    }
1215
1216    /// Complete a Map key frame by transferring the key buffer into `pending_entries`
1217    /// at position `entry_idx` as a half-entry `(key_ptr, None)`
1218    /// (for deferred finalization, walk consume-time).
1219    ///
1220    /// Called only from the `finish_deferred` walk, after `require_full_initialization`
1221    /// has validated the key frame. The `entry_idx` comes from the `PathStep::MapKey(idx)`
1222    /// and matches the map's `current_entry_index` assigned at `begin_key` time.
1223    /// MapKey frames for the same map are visited in ascending idx order, so pushing
1224    /// is equivalent to indexed insertion (asserted below).
1225    ///
1226    /// `key_frame` is dropped silently on return — Frame has no Drop impl, so
1227    /// `pending_entries` becomes the sole owner of this buffer.
1228    fn complete_map_key_frame(map_frame: &mut Frame, entry_idx: u32, key_frame: Frame) {
1229        if let Tracker::Map {
1230            pending_entries, ..
1231        } = &mut map_frame.tracker
1232        {
1233            debug_assert_eq!(
1234                pending_entries.len(),
1235                entry_idx as usize,
1236                "MapKey frames must arrive in ascending entry_idx order"
1237            );
1238            pending_entries.push((key_frame.data, None));
1239            crate::trace!(
1240                "complete_map_key_frame: pushed half-entry at idx {} for {}",
1241                entry_idx,
1242                map_frame.allocated.shape()
1243            );
1244        }
1245    }
1246
1247    /// Complete a Map value frame by upgrading the half-entry at position `entry_idx`
1248    /// `(key_ptr, None)` to a full `(key_ptr, Some(value_ptr))`
1249    /// (for deferred finalization, walk consume-time).
1250    ///
1251    /// Called only from the `finish_deferred` walk, after `require_full_initialization`
1252    /// has validated the value frame. The `entry_idx` comes from
1253    /// `PathStep::MapValue(idx)` and indexes the matching half-entry placed by
1254    /// `complete_map_key_frame`. `value_frame` is dropped silently on return —
1255    /// Frame has no Drop impl, so `pending_entries` becomes the sole owner of the
1256    /// buffer.
1257    fn complete_map_value_frame(map_frame: &mut Frame, entry_idx: u32, value_frame: Frame) {
1258        if let Tracker::Map {
1259            pending_entries, ..
1260        } = &mut map_frame.tracker
1261        {
1262            let slot = pending_entries
1263                .get_mut(entry_idx as usize)
1264                .expect("pending_entries must have a half-entry at entry_idx");
1265            debug_assert!(
1266                slot.1.is_none(),
1267                "pending entry at entry_idx must be a half-entry (None value), got Some"
1268            );
1269            slot.1 = Some(value_frame.data);
1270            crate::trace!(
1271                "complete_map_value_frame: upgraded half-entry at idx {} to full entry for {}",
1272                entry_idx,
1273                map_frame.allocated.shape()
1274            );
1275        }
1276    }
1277
1278    /// Pops the current frame off the stack, indicating we're done initializing the current field
1279    pub fn end(mut self) -> Result<Self, ReflectError> {
1280        // FAST PATH: Handle the common case of ending a simple scalar field in a struct.
1281        // This avoids all the edge-case checks (SmartPointerSlice, deferred mode, custom
1282        // deserialization, etc.) that dominate the slow path.
1283        if self.frames().len() >= 2 && !self.is_deferred() {
1284            let frames = self.frames_mut();
1285            let top_idx = frames.len() - 1;
1286            let parent_idx = top_idx - 1;
1287
1288            // Check if this is a simple scalar field being returned to a struct parent
1289            if let (
1290                Tracker::Scalar,
1291                true, // is_init
1292                FrameOwnership::Field { field_idx },
1293                false, // not using custom deserialization
1294            ) = (
1295                &frames[top_idx].tracker,
1296                frames[top_idx].is_init,
1297                frames[top_idx].ownership,
1298                frames[top_idx].using_custom_deserialization,
1299            ) && let Tracker::Struct {
1300                iset,
1301                current_child,
1302            } = &mut frames[parent_idx].tracker
1303            {
1304                // Fast path: just update parent's iset and pop
1305                iset.set(field_idx);
1306                *current_child = None;
1307                frames.pop();
1308                return Ok(self);
1309            }
1310        }
1311
1312        // SLOW PATH: Handle all the edge cases
1313
1314        // Strategic tracing: show the frame stack state
1315        #[cfg(feature = "tracing")]
1316        {
1317            use ::alloc::string::ToString;
1318            let frames = self.frames();
1319            let stack_desc: Vec<_> = frames
1320                .iter()
1321                .map(|f| ::alloc::format!("{}({:?})", f.allocated.shape(), f.tracker.kind()))
1322                .collect();
1323            let path = if self.is_deferred() {
1324                ::alloc::format!("{:?}", self.derive_path())
1325            } else {
1326                "N/A".to_string()
1327            };
1328            crate::trace!(
1329                "end() SLOW PATH: stack=[{}], deferred={}, path={}",
1330                stack_desc.join(" > "),
1331                self.is_deferred(),
1332                path
1333            );
1334        }
1335
1336        // Special handling for SmartPointerSlice - convert builder to Arc
1337        // Check if the current (top) frame is a SmartPointerSlice that needs conversion
1338        let needs_slice_conversion = {
1339            let frames = self.frames();
1340            if frames.is_empty() {
1341                false
1342            } else {
1343                let top_idx = frames.len() - 1;
1344                matches!(
1345                    frames[top_idx].tracker,
1346                    Tracker::SmartPointerSlice {
1347                        building_item: false,
1348                        ..
1349                    }
1350                )
1351            }
1352        };
1353
1354        if needs_slice_conversion {
1355            // In deferred mode, don't convert immediately - let finish_deferred handle it.
1356            // Set building_item = true and return early (matching non-deferred behavior).
1357            // The next end() call will store the frame.
1358            if self.is_deferred() {
1359                let frames = self.frames_mut();
1360                let top_idx = frames.len() - 1;
1361                if let Tracker::SmartPointerSlice { building_item, .. } =
1362                    &mut frames[top_idx].tracker
1363                {
1364                    *building_item = true;
1365                }
1366                return Ok(self);
1367            } else {
1368                // Get shape info upfront to avoid borrow conflicts
1369                let current_shape = self.frames().last().unwrap().allocated.shape();
1370
1371                let frames = self.frames_mut();
1372                let top_idx = frames.len() - 1;
1373
1374                if let Tracker::SmartPointerSlice { vtable, .. } = &frames[top_idx].tracker {
1375                    // Convert the builder to Arc<[T]>
1376                    let vtable = *vtable;
1377                    let builder_ptr = unsafe { frames[top_idx].data.assume_init() };
1378                    let arc_ptr = unsafe { (vtable.convert_fn)(builder_ptr) };
1379
1380                    match frames[top_idx].ownership {
1381                        FrameOwnership::Field { field_idx } => {
1382                            // Arc<[T]> is a field in a struct
1383                            // The field frame's original data pointer was overwritten with the builder pointer,
1384                            // so we need to reconstruct where the Arc should be written.
1385
1386                            // Get parent frame and field info
1387                            let parent_idx = top_idx - 1;
1388                            let parent_frame = &frames[parent_idx];
1389
1390                            // Get the field to find its offset
1391                            let field = if let Type::User(UserType::Struct(struct_type)) =
1392                                parent_frame.allocated.shape().ty
1393                            {
1394                                &struct_type.fields[field_idx]
1395                            } else {
1396                                return Err(self.err(ReflectErrorKind::InvariantViolation {
1397                                invariant: "SmartPointerSlice field frame parent must be a struct",
1398                            }));
1399                            };
1400
1401                            // Calculate where the Arc should be written (parent.data + field.offset)
1402                            let field_location =
1403                                unsafe { parent_frame.data.field_uninit(field.offset) };
1404
1405                            // Write the Arc to the parent struct's field location
1406                            let arc_layout = match current_shape.layout.sized_layout() {
1407                                Ok(layout) => layout,
1408                                Err(_) => {
1409                                    return Err(self.err(ReflectErrorKind::Unsized {
1410                                    shape: current_shape,
1411                                    operation: "SmartPointerSlice conversion requires sized Arc",
1412                                }));
1413                                }
1414                            };
1415                            let arc_size = arc_layout.size();
1416                            unsafe {
1417                                core::ptr::copy_nonoverlapping(
1418                                    arc_ptr.as_byte_ptr(),
1419                                    field_location.as_mut_byte_ptr(),
1420                                    arc_size,
1421                                );
1422                            }
1423
1424                            // Free the staging allocation from convert_fn (the Arc was copied to field_location)
1425                            unsafe {
1426                                ::alloc::alloc::dealloc(
1427                                    arc_ptr.as_byte_ptr() as *mut u8,
1428                                    arc_layout,
1429                                );
1430                            }
1431
1432                            // Update the frame to point to the correct field location and mark as initialized
1433                            frames[top_idx].data = field_location;
1434                            frames[top_idx].tracker = Tracker::Scalar;
1435                            frames[top_idx].is_init = true;
1436
1437                            // Return WITHOUT popping - the field frame will be popped by the next end() call
1438                            return Ok(self);
1439                        }
1440                        FrameOwnership::Owned => {
1441                            // Arc<[T]> is the root type or owned independently
1442                            // The frame already has the allocation, we just need to update it with the Arc
1443
1444                            // The frame's data pointer is currently the builder, but we allocated
1445                            // the Arc memory in the convert_fn. Update to point to the Arc.
1446                            frames[top_idx].data = PtrUninit::new(arc_ptr.as_byte_ptr() as *mut u8);
1447                            frames[top_idx].tracker = Tracker::Scalar;
1448                            frames[top_idx].is_init = true;
1449                            // Keep Owned ownership so Guard will properly deallocate
1450
1451                            // Return WITHOUT popping - the frame stays and will be built/dropped normally
1452                            return Ok(self);
1453                        }
1454                        FrameOwnership::TrackedBuffer
1455                        | FrameOwnership::BorrowedInPlace
1456                        | FrameOwnership::External
1457                        | FrameOwnership::RopeSlot => {
1458                            return Err(self.err(ReflectErrorKind::InvariantViolation {
1459                            invariant: "SmartPointerSlice cannot have TrackedBuffer/BorrowedInPlace/External/RopeSlot ownership after conversion",
1460                        }));
1461                        }
1462                    }
1463                }
1464            }
1465        }
1466
1467        if self.frames().len() <= 1 {
1468            // Never pop the last/root frame - this indicates a broken state machine
1469            // No need to poison - returning Err consumes self, Drop will handle cleanup
1470            return Err(self.err(ReflectErrorKind::InvariantViolation {
1471                invariant: "Partial::end() called with only one frame on the stack",
1472            }));
1473        }
1474
1475        // In deferred mode, cannot pop below the start depth
1476        if let Some(start_depth) = self.start_depth()
1477            && self.frames().len() <= start_depth
1478        {
1479            // No need to poison - returning Err consumes self, Drop will handle cleanup
1480            return Err(self.err(ReflectErrorKind::InvariantViolation {
1481                invariant: "Partial::end() called but would pop below deferred start depth",
1482            }));
1483        }
1484
1485        // Require that the top frame is fully initialized before popping.
1486        // In deferred mode, tracked frames (those that will be stored for re-entry)
1487        // defer validation to finish_deferred(). All other frames validate now
1488        // using the TypePlan's FillRule (which knows what's Required vs Defaultable).
1489        let requires_full_init = if !self.is_deferred() {
1490            true
1491        } else {
1492            // If this frame will be stored, defer validation to finish_deferred().
1493            // Otherwise validate now.
1494            !self.should_store_frame_for_deferred()
1495        };
1496
1497        if requires_full_init {
1498            // Try the optimized path using precomputed FieldInitPlan
1499            // Extract frame info first (borrows only self.mode)
1500            let frame_info = self.mode.stack().last().map(|frame| {
1501                let variant_idx = match &frame.tracker {
1502                    Tracker::Enum { variant_idx, .. } => Some(*variant_idx),
1503                    _ => None,
1504                };
1505                (frame.type_plan, variant_idx)
1506            });
1507
1508            // Look up plans from the type plan node - need to resolve NodeId to get the actual node
1509            let plans_info = frame_info.and_then(|(type_plan_id, variant_idx)| {
1510                let type_plan = self.root_plan.node(type_plan_id);
1511                match &type_plan.kind {
1512                    TypePlanNodeKind::Struct(struct_plan) => Some(struct_plan.fields),
1513                    TypePlanNodeKind::Enum(enum_plan) => {
1514                        let variants = self.root_plan.variants(enum_plan.variants);
1515                        variant_idx.and_then(|idx| variants.get(idx).map(|v| v.fields))
1516                    }
1517                    _ => None,
1518                }
1519            });
1520
1521            if let Some(plans_range) = plans_info {
1522                // Resolve the SliceRange to an actual slice
1523                let plans = self.root_plan.fields(plans_range);
1524                // Now mutably borrow mode.stack to get the frame
1525                // (root_plan borrow of `plans` is still active but that's fine -
1526                // mode and root_plan are separate fields)
1527                let frame = self.mode.stack_mut().last_mut().unwrap();
1528                frame
1529                    .fill_and_require_fields(plans, plans.len(), &self.root_plan)
1530                    .map_err(|e| self.err(e))?;
1531            } else {
1532                // Fall back to the old path if optimized path wasn't available
1533                if let Some(frame) = self.frames_mut().last_mut() {
1534                    frame.fill_defaults().map_err(|e| self.err(e))?;
1535                }
1536
1537                let frame = self.frames_mut().last_mut().unwrap();
1538                let result = frame.require_full_initialization();
1539                if result.is_err() {
1540                    crate::trace!(
1541                        "end() VALIDATION FAILED: {} ({:?}) is_init={} - {:?}",
1542                        frame.allocated.shape(),
1543                        frame.tracker.kind(),
1544                        frame.is_init,
1545                        result
1546                    );
1547                }
1548                result.map_err(|e| self.err(e))?
1549            }
1550        }
1551
1552        // In deferred mode, check if we should store this frame for potential re-entry.
1553        // We need to compute the storage path BEFORE popping so we can check it.
1554        //
1555        // Store frames that can be re-entered in deferred mode.
1556        // This includes structs, enums, collections, and Options (which need to be
1557        // stored so finish_deferred can find them when processing their inner values).
1558        let deferred_storage_info = if self.is_deferred() {
1559            let should_store = self.should_store_frame_for_deferred();
1560
1561            if should_store {
1562                // Compute the "field-only" path for storage by finding all Field steps
1563                // from PARENT frames only. The frame being ended shouldn't contribute to
1564                // its own path (its current_child points to ITS children, not to itself).
1565                //
1566                // Note: We include ALL frames in the path computation (including those
1567                // before start_depth) because they contain navigation info. The start_depth
1568                // only determines which frames we STORE, not which frames contribute to paths.
1569                //
1570                // Get the root shape for the Path from the first frame
1571                let root_shape = self
1572                    .frames()
1573                    .first()
1574                    .map(|f| f.allocated.shape())
1575                    .unwrap_or_else(|| <() as facet_core::Facet>::SHAPE);
1576
1577                let mut field_path = facet_path::Path::new(root_shape);
1578                let frames_len = self.frames().len();
1579                // Iterate over all frames EXCEPT the last one (the one being ended)
1580                for (frame_idx, frame) in self.frames().iter().enumerate() {
1581                    // Skip the frame being ended
1582                    if frame_idx == frames_len - 1 {
1583                        continue;
1584                    }
1585                    // Extract navigation steps from frames
1586                    // This MUST match derive_path() for consistency
1587                    match &frame.tracker {
1588                        Tracker::Struct {
1589                            current_child: Some(idx),
1590                            ..
1591                        } => {
1592                            field_path.push(PathStep::Field(*idx as u32));
1593                        }
1594                        Tracker::Enum {
1595                            current_child: Some(idx),
1596                            ..
1597                        } => {
1598                            field_path.push(PathStep::Field(*idx as u32));
1599                        }
1600                        Tracker::List {
1601                            current_child: Some(idx),
1602                            ..
1603                        } => {
1604                            field_path.push(PathStep::Index(*idx as u32));
1605                        }
1606                        Tracker::Array {
1607                            current_child: Some(idx),
1608                            ..
1609                        } => {
1610                            field_path.push(PathStep::Index(*idx as u32));
1611                        }
1612                        Tracker::Option {
1613                            building_inner: true,
1614                            ..
1615                        } => {
1616                            // Option with building_inner contributes OptionSome to path
1617                            field_path.push(PathStep::OptionSome);
1618                        }
1619                        Tracker::SmartPointer {
1620                            building_inner: true,
1621                            ..
1622                        } => {
1623                            // SmartPointer with building_inner contributes Deref to path
1624                            field_path.push(PathStep::Deref);
1625                        }
1626                        Tracker::SmartPointerSlice {
1627                            current_child: Some(idx),
1628                            ..
1629                        } => {
1630                            // SmartPointerSlice with current_child contributes Index to path
1631                            field_path.push(PathStep::Index(*idx as u32));
1632                        }
1633                        Tracker::Inner {
1634                            building_inner: true,
1635                        } => {
1636                            // Inner with building_inner contributes Inner to path
1637                            field_path.push(PathStep::Inner);
1638                        }
1639                        Tracker::Map {
1640                            current_entry_index: Some(idx),
1641                            building_key,
1642                            ..
1643                        } => {
1644                            // Map with active entry contributes MapKey or MapValue with entry index
1645                            if *building_key {
1646                                field_path.push(PathStep::MapKey(*idx as u32));
1647                            } else {
1648                                field_path.push(PathStep::MapValue(*idx as u32));
1649                            }
1650                        }
1651                        _ => {}
1652                    }
1653
1654                    // If the next frame on the stack is a proxy frame, add a Proxy
1655                    // path step. This distinguishes the proxy frame (and its children)
1656                    // from the parent frame that the proxy writes into, preventing path
1657                    // collisions in deferred mode where both frames are stored.
1658                    if frame_idx + 1 < frames_len
1659                        && self.frames()[frame_idx + 1].using_custom_deserialization
1660                    {
1661                        field_path.push(PathStep::Proxy);
1662                    }
1663                }
1664
1665                if !field_path.is_empty() {
1666                    Some(field_path)
1667                } else {
1668                    None
1669                }
1670            } else {
1671                None
1672            }
1673        } else {
1674            None
1675        };
1676
1677        // Pop the frame and save its data pointer for SmartPointer handling
1678        let mut popped_frame = self.frames_mut().pop().unwrap();
1679
1680        // In non-deferred mode, proxy frames are processed immediately.
1681        // In deferred mode, proxy frames are stored (with a PathStep::Proxy
1682        // distinguishing them from their parent) and the conversion is handled
1683        // by finish_deferred after children have been fully materialized.
1684        if popped_frame.using_custom_deserialization && deferred_storage_info.is_none() {
1685            // First check the proxy stored in the frame (used for format-specific proxies
1686            // and container-level proxies), then fall back to field-level proxy.
1687            // This ordering is important because format-specific proxies store their
1688            // proxy in shape_level_proxy, and we want them to take precedence over
1689            // the format-agnostic field.proxy().
1690            let deserialize_with: Option<facet_core::ProxyConvertInFn> =
1691                popped_frame.shape_level_proxy.map(|p| p.convert_in);
1692
1693            // Fall back to field-level proxy (format-agnostic)
1694            let deserialize_with = deserialize_with.or_else(|| {
1695                self.parent_field()
1696                    .and_then(|f| f.proxy().map(|p| p.convert_in))
1697            });
1698
1699            if let Some(deserialize_with) = deserialize_with {
1700                // Get parent shape upfront to avoid borrow conflicts
1701                let parent_shape = self.frames().last().unwrap().allocated.shape();
1702                let parent_frame = self.frames_mut().last_mut().unwrap();
1703
1704                trace!(
1705                    "Detected custom conversion needed from {} to {}",
1706                    popped_frame.allocated.shape(),
1707                    parent_shape
1708                );
1709
1710                unsafe {
1711                    let res = {
1712                        let inner_value_ptr = popped_frame.data.assume_init().as_const();
1713                        (deserialize_with)(inner_value_ptr, parent_frame.data)
1714                    };
1715                    let popped_frame_shape = popped_frame.allocated.shape();
1716
1717                    // Note: We do NOT call deinit() here because deserialize_with uses
1718                    // ptr::read to take ownership of the source value. Calling deinit()
1719                    // would cause a double-free. We mark is_init as false to satisfy
1720                    // dealloc()'s assertion, then deallocate the memory.
1721                    popped_frame.is_init = false;
1722                    popped_frame.dealloc();
1723                    let parent_data = parent_frame.data;
1724                    match res {
1725                        Ok(rptr) => {
1726                            if rptr.as_uninit() != parent_data {
1727                                return Err(self.err(
1728                                    ReflectErrorKind::CustomDeserializationError {
1729                                        message:
1730                                            "deserialize_with did not return the expected pointer"
1731                                                .into(),
1732                                        src_shape: popped_frame_shape,
1733                                        dst_shape: parent_shape,
1734                                    },
1735                                ));
1736                            }
1737                        }
1738                        Err(message) => {
1739                            return Err(self.err(ReflectErrorKind::CustomDeserializationError {
1740                                message,
1741                                src_shape: popped_frame_shape,
1742                                dst_shape: parent_shape,
1743                            }));
1744                        }
1745                    }
1746                    // Re-borrow parent_frame after potential early returns
1747                    let parent_frame = self.frames_mut().last_mut().unwrap();
1748                    parent_frame.mark_as_init();
1749                }
1750                return Ok(self);
1751            }
1752        }
1753
1754        // If we determined this frame should be stored for deferred re-entry, do it now
1755        if let Some(storage_path) = deferred_storage_info {
1756            trace!(
1757                "end(): Storing frame for deferred path {:?}, shape {}",
1758                storage_path,
1759                popped_frame.allocated.shape()
1760            );
1761
1762            if let FrameMode::Deferred {
1763                stack,
1764                stored_frames,
1765                ..
1766            } = &mut self.mode
1767            {
1768                // Mark the field as initialized in the parent frame.
1769                // This is important because the parent might validate before
1770                // finish_deferred runs (e.g., parent is an array element that
1771                // isn't stored). Without this, the parent's validation would
1772                // fail with "missing field".
1773                if let FrameOwnership::Field { field_idx } = popped_frame.ownership
1774                    && let Some(parent_frame) = stack.last_mut()
1775                {
1776                    Self::mark_field_initialized_by_index(parent_frame, field_idx);
1777                }
1778
1779                // For BorrowedInPlace DynamicValue frames (e.g., re-entered pending entries),
1780                // flush pending_elements/pending_entries and return without storing.
1781                // These frames point to memory that's already tracked in the parent's
1782                // pending_entries - storing them would overwrite the entry.
1783                if matches!(popped_frame.ownership, FrameOwnership::BorrowedInPlace) {
1784                    crate::trace!(
1785                        "end(): BorrowedInPlace frame, flushing pending items and returning"
1786                    );
1787                    if let Err(kind) = popped_frame.require_full_initialization() {
1788                        return Err(ReflectError::new(kind, storage_path));
1789                    }
1790                    return Ok(self);
1791                }
1792
1793                // Handle Map state transitions even when storing frames.
1794                // The Map needs to transition states so that subsequent operations work:
1795                // - PushingKey -> PushingValue: so begin_value() can be called
1796                // - PushingValue -> Idle: so begin_key() can be called for the next entry
1797                //
1798                // SSoT: we do NOT touch pending_entries here. The key/value buffer stays
1799                // owned by the stored frame. finish_deferred's walk will call
1800                // complete_map_{key,value}_frame at consume-time to transfer buffer
1801                // ownership into pending_entries *after* the frame has passed validation.
1802                if let Some(parent_frame) = stack.last_mut() {
1803                    if let Tracker::Map { insert_state, .. } = &mut parent_frame.tracker {
1804                        match insert_state {
1805                            MapInsertState::PushingKey { key_ptr, .. } => {
1806                                *insert_state = MapInsertState::PushingValue {
1807                                    key_ptr: *key_ptr,
1808                                    value_ptr: None,
1809                                };
1810                                crate::trace!(
1811                                    "end(): Map transitioned to PushingValue while storing key frame"
1812                                );
1813                            }
1814                            MapInsertState::PushingValue { .. } => {
1815                                *insert_state = MapInsertState::Idle;
1816                                crate::trace!(
1817                                    "end(): Map transitioned to Idle while storing value frame"
1818                                );
1819                            }
1820                            _ => {}
1821                        }
1822                    }
1823
1824                    // Handle Set element insertion immediately.
1825                    // Set elements have no path identity (no index), so they can't be stored
1826                    // and re-entered. We must insert them into the Set now.
1827                    if let Tracker::Set { current_child } = &mut parent_frame.tracker
1828                        && *current_child
1829                        && parent_frame.is_init
1830                        && let Def::Set(set_def) = parent_frame.allocated.shape().def
1831                    {
1832                        let insert = set_def.vtable.insert;
1833                        let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
1834                        unsafe {
1835                            insert(
1836                                PtrMut::new(parent_frame.data.as_mut_byte_ptr()),
1837                                element_ptr,
1838                            );
1839                        }
1840                        crate::trace!("end(): Set element inserted immediately in deferred mode");
1841                        // Insert moved out of popped_frame - don't store it
1842                        popped_frame.tracker = Tracker::Scalar;
1843                        popped_frame.is_init = false;
1844                        popped_frame.dealloc();
1845                        *current_child = false;
1846                        // Don't store this frame - return early
1847                        return Ok(self);
1848                    }
1849
1850                    // Handle DynamicValue object entry - add to pending_entries for deferred insertion.
1851                    // Like Map entries, we store the key-value pair and insert during finalization.
1852                    if let Tracker::DynamicValue {
1853                        state:
1854                            DynamicValueState::Object {
1855                                insert_state,
1856                                pending_entries,
1857                            },
1858                    } = &mut parent_frame.tracker
1859                        && let DynamicObjectInsertState::BuildingValue { key } = insert_state
1860                    {
1861                        // Take ownership of the key from insert_state
1862                        let key = core::mem::take(key);
1863
1864                        // Finalize the child Value before adding to pending_entries.
1865                        // The child might have its own pending_entries/pending_elements
1866                        // that need to be inserted first.
1867                        if let Err(kind) = popped_frame.require_full_initialization() {
1868                            return Err(ReflectError::new(kind, storage_path.clone()));
1869                        }
1870
1871                        // Add to pending_entries for deferred insertion.
1872                        // The frame isn't stored — popped_frame is silently dropped after
1873                        // this block returns, and Frame has no Drop impl, so
1874                        // pending_entries is the sole owner of this buffer.
1875                        pending_entries.push((key, popped_frame.data));
1876                        crate::trace!(
1877                            "end(): DynamicValue object entry added to pending_entries in deferred mode"
1878                        );
1879
1880                        // Reset insert state to Idle so more entries can be added
1881                        *insert_state = DynamicObjectInsertState::Idle;
1882
1883                        // Don't store this frame - return early
1884                        return Ok(self);
1885                    }
1886
1887                    // Handle DynamicValue array element - add to pending_elements for deferred insertion.
1888                    if let Tracker::DynamicValue {
1889                        state:
1890                            DynamicValueState::Array {
1891                                building_element,
1892                                pending_elements,
1893                            },
1894                    } = &mut parent_frame.tracker
1895                        && *building_element
1896                    {
1897                        // Finalize the child Value before adding to pending_elements.
1898                        // The child might have its own pending_entries/pending_elements
1899                        // that need to be inserted first.
1900                        if let Err(kind) = popped_frame.require_full_initialization() {
1901                            return Err(ReflectError::new(kind, storage_path.clone()));
1902                        }
1903
1904                        // Add to pending_elements for deferred insertion.
1905                        // The frame isn't stored and Frame has no Drop impl, so
1906                        // pending_elements is the sole owner of this buffer.
1907                        pending_elements.push(popped_frame.data);
1908                        crate::trace!(
1909                            "end(): DynamicValue array element added to pending_elements in deferred mode"
1910                        );
1911
1912                        // Reset building_element so more elements can be added
1913                        *building_element = false;
1914
1915                        // Don't store this frame - return early
1916                        return Ok(self);
1917                    }
1918
1919                    // Note: we intentionally do NOT call `rope.mark_last_initialized()`
1920                    // here for RopeSlot frames in deferred mode. Marking the slot as
1921                    // initialized now would let `ListRope::drain_into` drop it on a
1922                    // later error path — but the stored frame may have partial-init
1923                    // descendants (e.g. a Box<enum> whose fields weren't all set), and
1924                    // dropping those is UB. Instead, the frame stays stored; the
1925                    // `finish_deferred` walk marks the rope slot initialized only after
1926                    // `require_full_initialization` passes for this element. Same
1927                    // consume-time protocol used for Map `pending_entries`.
1928
1929                    // Clear building_item for SmartPointerSlice so the next element can be added
1930                    if let Tracker::SmartPointerSlice { building_item, .. } =
1931                        &mut parent_frame.tracker
1932                    {
1933                        *building_item = false;
1934                        crate::trace!(
1935                            "end(): SmartPointerSlice building_item cleared while storing element"
1936                        );
1937                    }
1938                }
1939
1940                stored_frames.insert(storage_path, popped_frame);
1941
1942                // Clear parent's current_child tracking
1943                if let Some(parent_frame) = stack.last_mut() {
1944                    parent_frame.tracker.clear_current_child();
1945                }
1946            }
1947
1948            return Ok(self);
1949        }
1950
1951        // Update parent frame's tracking when popping from a child
1952        // Get parent shape upfront to avoid borrow conflicts
1953        let parent_shape = self.frames().last().unwrap().allocated.shape();
1954        let is_deferred_mode = self.is_deferred();
1955        let parent_frame = self.frames_mut().last_mut().unwrap();
1956
1957        crate::trace!(
1958            "end(): Popped {} (tracker {:?}), Parent {} (tracker {:?})",
1959            popped_frame.allocated.shape(),
1960            popped_frame.tracker.kind(),
1961            parent_shape,
1962            parent_frame.tracker.kind()
1963        );
1964
1965        // Check if we need to do a conversion - this happens when:
1966        // 1. The parent frame has a builder_shape or inner type that matches the popped frame's shape
1967        // 2. The parent frame has try_from
1968        // 3. The parent frame is not yet initialized
1969        // 4. The parent frame's tracker is Scalar or Inner (not Option, SmartPointer, etc.)
1970        //    This ensures we only do conversion when begin_inner was used, not begin_some
1971        let needs_conversion = !parent_frame.is_init
1972            && matches!(
1973                parent_frame.tracker,
1974                Tracker::Scalar | Tracker::Inner { .. }
1975            )
1976            && ((parent_shape.builder_shape.is_some()
1977                && parent_shape.builder_shape.unwrap() == popped_frame.allocated.shape())
1978                || (parent_shape.inner.is_some()
1979                    && parent_shape.inner.unwrap() == popped_frame.allocated.shape()))
1980            && match parent_shape.vtable {
1981                facet_core::VTableErased::Direct(vt) => vt.try_from.is_some(),
1982                facet_core::VTableErased::Indirect(vt) => vt.try_from.is_some(),
1983            };
1984
1985        if needs_conversion {
1986            trace!(
1987                "Detected implicit conversion needed from {} to {}",
1988                popped_frame.allocated.shape(),
1989                parent_shape
1990            );
1991
1992            // The conversion requires the source frame to be fully initialized
1993            // (we're about to call assume_init() and pass to try_from)
1994            if let Err(e) = popped_frame.require_full_initialization() {
1995                // Deallocate the memory since the frame wasn't fully initialized
1996                if let FrameOwnership::Owned = popped_frame.ownership
1997                    && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
1998                    && layout.size() > 0
1999                {
2000                    trace!(
2001                        "Deallocating uninitialized conversion frame memory: size={}, align={}",
2002                        layout.size(),
2003                        layout.align()
2004                    );
2005                    unsafe {
2006                        ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2007                    }
2008                }
2009                return Err(self.err(e));
2010            }
2011
2012            // Perform the conversion
2013            let inner_ptr = unsafe { popped_frame.data.assume_init().as_const() };
2014            let inner_shape = popped_frame.allocated.shape();
2015
2016            trace!("Converting from {} to {}", inner_shape, parent_shape);
2017
2018            // Handle Direct and Indirect vtables - both return TryFromOutcome
2019            let outcome = match parent_shape.vtable {
2020                facet_core::VTableErased::Direct(vt) => {
2021                    if let Some(try_from_fn) = vt.try_from {
2022                        unsafe {
2023                            try_from_fn(
2024                                parent_frame.data.as_mut_byte_ptr() as *mut (),
2025                                inner_shape,
2026                                inner_ptr,
2027                            )
2028                        }
2029                    } else {
2030                        return Err(self.err(ReflectErrorKind::OperationFailed {
2031                            shape: parent_shape,
2032                            operation: "try_from not available for this type",
2033                        }));
2034                    }
2035                }
2036                facet_core::VTableErased::Indirect(vt) => {
2037                    if let Some(try_from_fn) = vt.try_from {
2038                        // parent_frame.data is uninitialized - we're writing the converted
2039                        // value into it
2040                        let ox_uninit =
2041                            facet_core::OxPtrUninit::new(parent_frame.data, parent_shape);
2042                        unsafe { try_from_fn(ox_uninit, inner_shape, inner_ptr) }
2043                    } else {
2044                        return Err(self.err(ReflectErrorKind::OperationFailed {
2045                            shape: parent_shape,
2046                            operation: "try_from not available for this type",
2047                        }));
2048                    }
2049                }
2050            };
2051
2052            // Handle the TryFromOutcome, which explicitly communicates ownership semantics:
2053            // - Converted: source was consumed, conversion succeeded
2054            // - Unsupported: source was NOT consumed, caller retains ownership
2055            // - Failed: source WAS consumed, but conversion failed
2056            match outcome {
2057                facet_core::TryFromOutcome::Converted => {
2058                    trace!("Conversion succeeded, marking parent as initialized");
2059                    parent_frame.is_init = true;
2060                    // Reset Inner tracker to Scalar after successful conversion
2061                    if matches!(parent_frame.tracker, Tracker::Inner { .. }) {
2062                        parent_frame.tracker = Tracker::Scalar;
2063                    }
2064                }
2065                facet_core::TryFromOutcome::Unsupported => {
2066                    trace!("Source type not supported for conversion - source NOT consumed");
2067
2068                    // Source was NOT consumed, so we need to drop it properly
2069                    if let FrameOwnership::Owned = popped_frame.ownership
2070                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2071                        && layout.size() > 0
2072                    {
2073                        // Drop the value, then deallocate
2074                        unsafe {
2075                            popped_frame
2076                                .allocated
2077                                .shape()
2078                                .call_drop_in_place(popped_frame.data.assume_init());
2079                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2080                        }
2081                    }
2082
2083                    return Err(self.err(ReflectErrorKind::TryFromError {
2084                        src_shape: inner_shape,
2085                        dst_shape: parent_shape,
2086                        inner: facet_core::TryFromError::UnsupportedSourceType,
2087                    }));
2088                }
2089                facet_core::TryFromOutcome::Failed(e) => {
2090                    trace!("Conversion failed after consuming source: {e:?}");
2091
2092                    // Source WAS consumed, so we only deallocate memory (don't drop)
2093                    if let FrameOwnership::Owned = popped_frame.ownership
2094                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2095                        && layout.size() > 0
2096                    {
2097                        trace!(
2098                            "Deallocating conversion frame memory after failure: size={}, align={}",
2099                            layout.size(),
2100                            layout.align()
2101                        );
2102                        unsafe {
2103                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2104                        }
2105                    }
2106
2107                    return Err(self.err(ReflectErrorKind::TryFromError {
2108                        src_shape: inner_shape,
2109                        dst_shape: parent_shape,
2110                        inner: facet_core::TryFromError::Generic(e.into_owned()),
2111                    }));
2112                }
2113            }
2114
2115            // Deallocate the inner value's memory since try_from consumed it
2116            if let FrameOwnership::Owned = popped_frame.ownership
2117                && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2118                && layout.size() > 0
2119            {
2120                trace!(
2121                    "Deallocating conversion frame memory: size={}, align={}",
2122                    layout.size(),
2123                    layout.align()
2124                );
2125                unsafe {
2126                    ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2127                }
2128            }
2129
2130            return Ok(self);
2131        }
2132
2133        // For Field-owned frames, reclaim responsibility in parent's tracker
2134        // Only mark as initialized if the child frame was actually initialized.
2135        // This prevents double-free when begin_inner/begin_some drops a value via
2136        // prepare_for_reinitialization but then fails, leaving the child uninitialized.
2137        //
2138        // We use require_full_initialization() rather than just is_init because:
2139        // - Scalar frames use is_init as the source of truth
2140        // - Struct/Array/Enum frames use their iset/data as the source of truth
2141        //   (is_init may never be set to true for these tracker types)
2142        if let FrameOwnership::Field { field_idx } = popped_frame.ownership {
2143            // In deferred mode, fill defaults on the child frame before checking initialization.
2144            // Fill defaults for child frame before checking if it's fully initialized.
2145            // This handles structs/enums with optional fields that should auto-fill.
2146            if let Err(e) = popped_frame.fill_defaults() {
2147                return Err(self.err(e));
2148            }
2149            let child_is_initialized = popped_frame.require_full_initialization().is_ok();
2150            match &mut parent_frame.tracker {
2151                Tracker::Struct {
2152                    iset,
2153                    current_child,
2154                } => {
2155                    if child_is_initialized {
2156                        iset.set(field_idx); // Parent reclaims responsibility only if child was init
2157                    }
2158                    *current_child = None;
2159                }
2160                Tracker::Array {
2161                    iset,
2162                    current_child,
2163                } => {
2164                    if child_is_initialized {
2165                        iset.set(field_idx); // Parent reclaims responsibility only if child was init
2166                    }
2167                    *current_child = None;
2168                }
2169                Tracker::Enum {
2170                    data,
2171                    current_child,
2172                    ..
2173                } => {
2174                    crate::trace!(
2175                        "end(): Enum field {} child_is_initialized={}, data before={:?}",
2176                        field_idx,
2177                        child_is_initialized,
2178                        data
2179                    );
2180                    if child_is_initialized {
2181                        data.set(field_idx); // Parent reclaims responsibility only if child was init
2182                    }
2183                    *current_child = None;
2184                }
2185                _ => {}
2186            }
2187            return Ok(self);
2188        }
2189
2190        // For BorrowedInPlace DynamicValue frames (e.g., re-entered pending entries),
2191        // flush any pending_elements/pending_entries that were accumulated during
2192        // this re-entry. This is necessary because BorrowedInPlace frames aren't
2193        // stored for deferred processing - they modify existing memory in-place.
2194        if matches!(popped_frame.ownership, FrameOwnership::BorrowedInPlace)
2195            && let Err(e) = popped_frame.require_full_initialization()
2196        {
2197            return Err(self.err(e));
2198        }
2199
2200        match &mut parent_frame.tracker {
2201            Tracker::SmartPointer {
2202                building_inner,
2203                pending_inner,
2204            } => {
2205                crate::trace!(
2206                    "end() SMARTPTR: popped {} into parent {} (building_inner={}, deferred={})",
2207                    popped_frame.allocated.shape(),
2208                    parent_frame.allocated.shape(),
2209                    *building_inner,
2210                    is_deferred_mode
2211                );
2212                // We just popped the inner value frame for a SmartPointer
2213                if *building_inner {
2214                    if matches!(parent_frame.allocated.shape().def, Def::Pointer(_)) {
2215                        // Check if we're in deferred mode - if so, store the inner value pointer
2216                        if is_deferred_mode {
2217                            // Store the inner value pointer for deferred new_into_fn.
2218                            // popped_frame isn't stored — it's silently dropped after this
2219                            // block (Frame has no Drop impl), so pending_inner is the sole
2220                            // owner of this buffer.
2221                            *pending_inner = Some(popped_frame.data);
2222                            *building_inner = false;
2223                            parent_frame.is_init = true;
2224                            crate::trace!(
2225                                "end() SMARTPTR: stored pending_inner, will finalize in finish_deferred"
2226                            );
2227                        } else {
2228                            // Not in deferred mode - complete immediately
2229                            if let Def::Pointer(_) = parent_frame.allocated.shape().def {
2230                                if let Err(e) = popped_frame.require_full_initialization() {
2231                                    popped_frame.deinit();
2232                                    popped_frame.dealloc();
2233                                    return Err(self.err(e));
2234                                }
2235
2236                                // Use complete_smart_pointer_frame which handles both:
2237                                // - Sized pointees (via new_into_fn)
2238                                // - Unsized pointees like str (via String conversion)
2239                                Self::complete_smart_pointer_frame(parent_frame, popped_frame);
2240                                crate::trace!(
2241                                    "end() SMARTPTR: completed smart pointer via complete_smart_pointer_frame"
2242                                );
2243
2244                                // Change tracker to Scalar so the next end() just pops it
2245                                parent_frame.tracker = Tracker::Scalar;
2246                            }
2247                        }
2248                    } else {
2249                        return Err(self.err(ReflectErrorKind::OperationFailed {
2250                            shape: parent_shape,
2251                            operation: "SmartPointer frame without SmartPointer definition",
2252                        }));
2253                    }
2254                } else {
2255                    // building_inner is false - shouldn't happen in normal flow
2256                    return Err(self.err(ReflectErrorKind::OperationFailed {
2257                        shape: parent_shape,
2258                        operation: "SmartPointer end() called with building_inner = false",
2259                    }));
2260                }
2261            }
2262            Tracker::List {
2263                current_child,
2264                rope,
2265                ..
2266            } if parent_frame.is_init => {
2267                if current_child.is_some() {
2268                    // We just popped an element frame, now add it to the list
2269                    if let Def::List(list_def) = parent_shape.def {
2270                        // Check which storage mode we used
2271                        if matches!(popped_frame.ownership, FrameOwnership::RopeSlot) {
2272                            // Rope storage: element lives in a stable chunk.
2273                            // Mark it as initialized; we'll drain to Vec when the list frame pops.
2274                            if let Some(rope) = rope {
2275                                rope.mark_last_initialized();
2276                            }
2277                            // No dealloc needed - memory belongs to rope
2278                        } else {
2279                            // Fallback: element is in separate heap buffer, use push to copy
2280                            let Some(push_fn) = list_def.push() else {
2281                                return Err(self.err(ReflectErrorKind::OperationFailed {
2282                                    shape: parent_shape,
2283                                    operation: "List missing push function",
2284                                }));
2285                            };
2286
2287                            // The child frame contained the element value
2288                            let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
2289
2290                            // Use push to add element to the list
2291                            unsafe {
2292                                push_fn(
2293                                    PtrMut::new(parent_frame.data.as_mut_byte_ptr()),
2294                                    element_ptr,
2295                                );
2296                            }
2297
2298                            // Push moved out of popped_frame
2299                            popped_frame.tracker = Tracker::Scalar;
2300                            popped_frame.is_init = false;
2301                            popped_frame.dealloc();
2302                        }
2303
2304                        *current_child = None;
2305                    }
2306                }
2307            }
2308            Tracker::Map {
2309                insert_state,
2310                pending_entries,
2311                ..
2312            } if parent_frame.is_init => {
2313                match insert_state {
2314                    MapInsertState::PushingKey { key_ptr, .. } => {
2315                        // Fill defaults on the key frame before considering it done.
2316                        // This handles metadata containers and other structs with Option fields.
2317                        if let Err(e) = popped_frame.fill_defaults() {
2318                            return Err(self.err(e));
2319                        }
2320
2321                        // Transfer key buffer ownership into pending_entries as a
2322                        // half-entry (key_ptr, None). popped_frame is silently dropped
2323                        // after this block (Frame has no Drop impl), so pending_entries
2324                        // becomes the sole owner. The value phase will upgrade the
2325                        // half-entry to a full (key, Some(value)) pair.
2326                        pending_entries.push((*key_ptr, None));
2327
2328                        *insert_state = MapInsertState::PushingValue {
2329                            key_ptr: *key_ptr,
2330                            value_ptr: None,
2331                        };
2332                    }
2333                    MapInsertState::PushingValue { value_ptr, .. } => {
2334                        // Fill defaults on the value frame before considering it done.
2335                        // This handles structs with Option fields.
2336                        if let Err(e) = popped_frame.fill_defaults() {
2337                            return Err(self.err(e));
2338                        }
2339
2340                        // Upgrade the last half-entry (key_ptr, None) to a full entry
2341                        // (key_ptr, Some(value_ptr)).
2342                        if let Some(value_ptr) = value_ptr {
2343                            let last = pending_entries.last_mut().expect(
2344                                "pending_entries must have a half-entry from the PushingKey -> PushingValue transition",
2345                            );
2346                            debug_assert!(
2347                                last.1.is_none(),
2348                                "last pending entry must be a half-entry (None value), got Some — invariant violation"
2349                            );
2350                            last.1 = Some(*value_ptr);
2351
2352                            // Reset to idle state
2353                            *insert_state = MapInsertState::Idle;
2354                        }
2355                    }
2356                    MapInsertState::Idle => {
2357                        // Nothing to do
2358                    }
2359                }
2360            }
2361            Tracker::Set { current_child } if parent_frame.is_init => {
2362                if *current_child {
2363                    // We just popped an element frame, now insert it into the set
2364                    if let Def::Set(set_def) = parent_frame.allocated.shape().def {
2365                        let insert = set_def.vtable.insert;
2366
2367                        // The child frame contained the element value
2368                        let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
2369
2370                        // Use insert to add element to the set
2371                        unsafe {
2372                            insert(
2373                                PtrMut::new(parent_frame.data.as_mut_byte_ptr()),
2374                                element_ptr,
2375                            );
2376                        }
2377
2378                        // Insert moved out of popped_frame
2379                        popped_frame.tracker = Tracker::Scalar;
2380                        popped_frame.is_init = false;
2381                        popped_frame.dealloc();
2382
2383                        *current_child = false;
2384                    }
2385                }
2386            }
2387            Tracker::Option {
2388                building_inner,
2389                pending_inner,
2390            } => {
2391                crate::trace!(
2392                    "end(): matched Tracker::Option, building_inner={}",
2393                    *building_inner
2394                );
2395                // We just popped the inner value frame for an Option's Some variant
2396                if *building_inner {
2397                    if matches!(parent_frame.allocated.shape().def, Def::Option(_)) {
2398                        // Store the inner value pointer for deferred init_some.
2399                        // This keeps the inner value's memory stable for deferred processing.
2400                        // Actual init_some() happens in require_full_initialization().
2401                        //
2402                        // popped_frame isn't stored — it's silently dropped after this
2403                        // block (Frame has no Drop impl), so pending_inner is the sole
2404                        // owner of this buffer.
2405                        *pending_inner = Some(popped_frame.data);
2406
2407                        // Mark that we're no longer building the inner value
2408                        *building_inner = false;
2409                        crate::trace!("end(): stored pending_inner, set building_inner to false");
2410                        // Mark the Option as initialized (pending finalization)
2411                        parent_frame.is_init = true;
2412                        crate::trace!("end(): set parent_frame.is_init to true");
2413                    } else {
2414                        return Err(self.err(ReflectErrorKind::OperationFailed {
2415                            shape: parent_shape,
2416                            operation: "Option frame without Option definition",
2417                        }));
2418                    }
2419                } else {
2420                    // building_inner is false - the Option was already initialized but
2421                    // begin_some was called again. The popped frame was not used to
2422                    // initialize the Option, so we need to clean it up.
2423                    popped_frame.deinit();
2424                    if let FrameOwnership::Owned = popped_frame.ownership
2425                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2426                        && layout.size() > 0
2427                    {
2428                        unsafe {
2429                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2430                        }
2431                    }
2432                }
2433            }
2434            Tracker::Result {
2435                is_ok,
2436                building_inner,
2437            } => {
2438                crate::trace!(
2439                    "end(): matched Tracker::Result, is_ok={}, building_inner={}",
2440                    *is_ok,
2441                    *building_inner
2442                );
2443                // We just popped the inner value frame for a Result's Ok or Err variant
2444                if *building_inner {
2445                    if let Def::Result(result_def) = parent_frame.allocated.shape().def {
2446                        // The popped frame contains the inner value
2447                        let inner_value_ptr = unsafe { popped_frame.data.assume_init() };
2448
2449                        // Initialize the Result as Ok(inner_value) or Err(inner_value)
2450                        if *is_ok {
2451                            let init_ok_fn = result_def.vtable.init_ok;
2452                            unsafe {
2453                                init_ok_fn(parent_frame.data, inner_value_ptr);
2454                            }
2455                        } else {
2456                            let init_err_fn = result_def.vtable.init_err;
2457                            unsafe {
2458                                init_err_fn(parent_frame.data, inner_value_ptr);
2459                            }
2460                        }
2461
2462                        // Deallocate the inner value's memory since init_ok/err_fn moved it
2463                        if let FrameOwnership::Owned = popped_frame.ownership
2464                            && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2465                            && layout.size() > 0
2466                        {
2467                            unsafe {
2468                                ::alloc::alloc::dealloc(
2469                                    popped_frame.data.as_mut_byte_ptr(),
2470                                    layout,
2471                                );
2472                            }
2473                        }
2474
2475                        // Mark that we're no longer building the inner value
2476                        *building_inner = false;
2477                        crate::trace!("end(): set building_inner to false");
2478                        // Mark the Result as initialized
2479                        parent_frame.is_init = true;
2480                        crate::trace!("end(): set parent_frame.is_init to true");
2481                    } else {
2482                        return Err(self.err(ReflectErrorKind::OperationFailed {
2483                            shape: parent_shape,
2484                            operation: "Result frame without Result definition",
2485                        }));
2486                    }
2487                } else {
2488                    // building_inner is false - the Result was already initialized but
2489                    // begin_ok/begin_err was called again. The popped frame was not used to
2490                    // initialize the Result, so we need to clean it up.
2491                    popped_frame.deinit();
2492                    if let FrameOwnership::Owned = popped_frame.ownership
2493                        && let Ok(layout) = popped_frame.allocated.shape().layout.sized_layout()
2494                        && layout.size() > 0
2495                    {
2496                        unsafe {
2497                            ::alloc::alloc::dealloc(popped_frame.data.as_mut_byte_ptr(), layout);
2498                        }
2499                    }
2500                }
2501            }
2502            Tracker::Scalar => {
2503                // the main case here is: the popped frame was a `String` and the
2504                // parent frame is an `Arc<str>`, `Box<str>` etc.
2505                match &parent_shape.def {
2506                    Def::Pointer(smart_ptr_def) => {
2507                        let pointee = match smart_ptr_def.pointee() {
2508                            Some(p) => p,
2509                            None => {
2510                                return Err(self.err(ReflectErrorKind::InvariantViolation {
2511                                    invariant: "pointer type doesn't have a pointee",
2512                                }));
2513                            }
2514                        };
2515
2516                        if !pointee.is_shape(str::SHAPE) {
2517                            return Err(self.err(ReflectErrorKind::InvariantViolation {
2518                                invariant: "only T=str is supported when building SmartPointer<T> and T is unsized",
2519                            }));
2520                        }
2521
2522                        if !popped_frame.allocated.shape().is_shape(String::SHAPE) {
2523                            return Err(self.err(ReflectErrorKind::InvariantViolation {
2524                                invariant: "the popped frame should be String when building a SmartPointer<T>",
2525                            }));
2526                        }
2527
2528                        if let Err(e) = popped_frame.require_full_initialization() {
2529                            return Err(self.err(e));
2530                        }
2531
2532                        // if the just-popped frame was a SmartPointerStr, we have some conversion to do:
2533                        // Special-case: SmartPointer<str> (Box<str>, Arc<str>, Rc<str>) via SmartPointerStr tracker
2534                        // Here, popped_frame actually contains a value for String that should be moved into the smart pointer.
2535                        // We convert the String into Box<str>, Arc<str>, or Rc<str> as appropriate and write it to the parent frame.
2536                        use ::alloc::{rc::Rc, string::String, sync::Arc};
2537
2538                        let Some(known) = smart_ptr_def.known else {
2539                            return Err(self.err(ReflectErrorKind::OperationFailed {
2540                                shape: parent_shape,
2541                                operation: "SmartPointerStr for unknown smart pointer kind",
2542                            }));
2543                        };
2544
2545                        parent_frame.deinit();
2546
2547                        // Interpret the memory as a String, then convert and write.
2548                        let string_ptr = popped_frame.data.as_mut_byte_ptr() as *mut String;
2549                        let string_value = unsafe { core::ptr::read(string_ptr) };
2550
2551                        match known {
2552                            KnownPointer::Box => {
2553                                let boxed: Box<str> = string_value.into_boxed_str();
2554                                unsafe {
2555                                    core::ptr::write(
2556                                        parent_frame.data.as_mut_byte_ptr() as *mut Box<str>,
2557                                        boxed,
2558                                    );
2559                                }
2560                            }
2561                            KnownPointer::Arc => {
2562                                let arc: Arc<str> = Arc::from(string_value.into_boxed_str());
2563                                unsafe {
2564                                    core::ptr::write(
2565                                        parent_frame.data.as_mut_byte_ptr() as *mut Arc<str>,
2566                                        arc,
2567                                    );
2568                                }
2569                            }
2570                            KnownPointer::Rc => {
2571                                let rc: Rc<str> = Rc::from(string_value.into_boxed_str());
2572                                unsafe {
2573                                    core::ptr::write(
2574                                        parent_frame.data.as_mut_byte_ptr() as *mut Rc<str>,
2575                                        rc,
2576                                    );
2577                                }
2578                            }
2579                            _ => {
2580                                return Err(self.err(ReflectErrorKind::OperationFailed {
2581                                    shape: parent_shape,
2582                                    operation: "Don't know how to build this pointer type",
2583                                }));
2584                            }
2585                        }
2586
2587                        parent_frame.is_init = true;
2588
2589                        popped_frame.tracker = Tracker::Scalar;
2590                        popped_frame.is_init = false;
2591                        popped_frame.dealloc();
2592                    }
2593                    _ => {
2594                        // This can happen if begin_inner() was called on a type that
2595                        // has shape.inner but isn't a SmartPointer (e.g., Option).
2596                        // In this case, we can't complete the conversion, so return error.
2597                        return Err(self.err(ReflectErrorKind::OperationFailed {
2598                            shape: parent_shape,
2599                            operation: "end() called but parent has Uninit/Init tracker and isn't a SmartPointer",
2600                        }));
2601                    }
2602                }
2603            }
2604            Tracker::SmartPointerSlice {
2605                vtable,
2606                building_item,
2607                ..
2608            } => {
2609                if *building_item {
2610                    // We just popped an element frame, now push it to the slice builder
2611                    let element_ptr = PtrMut::new(popped_frame.data.as_mut_byte_ptr());
2612
2613                    // Use the slice builder's push_fn to add the element
2614                    crate::trace!("Pushing element to slice builder");
2615                    unsafe {
2616                        let parent_ptr = parent_frame.data.assume_init();
2617                        (vtable.push_fn)(parent_ptr, element_ptr);
2618                    }
2619
2620                    popped_frame.tracker = Tracker::Scalar;
2621                    popped_frame.is_init = false;
2622                    popped_frame.dealloc();
2623
2624                    if let Tracker::SmartPointerSlice {
2625                        building_item: bi, ..
2626                    } = &mut parent_frame.tracker
2627                    {
2628                        *bi = false;
2629                    }
2630                }
2631            }
2632            Tracker::DynamicValue {
2633                state:
2634                    DynamicValueState::Array {
2635                        building_element, ..
2636                    },
2637            } => {
2638                if *building_element {
2639                    // Check that the element is initialized before pushing
2640                    if !popped_frame.is_init {
2641                        // Element was never set - clean up and return error
2642                        let shape = parent_frame.allocated.shape();
2643                        popped_frame.dealloc();
2644                        *building_element = false;
2645                        // No need to poison - returning Err consumes self, Drop will handle cleanup
2646                        return Err(self.err(ReflectErrorKind::OperationFailed {
2647                            shape,
2648                            operation: "end() called but array element was never initialized",
2649                        }));
2650                    }
2651
2652                    // We just popped an element frame, now push it to the dynamic array
2653                    if let Def::DynamicValue(dyn_def) = parent_frame.allocated.shape().def {
2654                        // Get mutable pointers - both array and element need PtrMut
2655                        let array_ptr = unsafe { parent_frame.data.assume_init() };
2656                        let element_ptr = unsafe { popped_frame.data.assume_init() };
2657
2658                        // Use push_array_element to add element to the array
2659                        unsafe {
2660                            (dyn_def.vtable.push_array_element)(array_ptr, element_ptr);
2661                        }
2662
2663                        // Push moved out of popped_frame
2664                        popped_frame.tracker = Tracker::Scalar;
2665                        popped_frame.is_init = false;
2666                        popped_frame.dealloc();
2667
2668                        *building_element = false;
2669                    }
2670                }
2671            }
2672            Tracker::DynamicValue {
2673                state: DynamicValueState::Object { insert_state, .. },
2674            } => {
2675                if let DynamicObjectInsertState::BuildingValue { key } = insert_state {
2676                    // Check that the value is initialized before inserting
2677                    if !popped_frame.is_init {
2678                        // Value was never set - clean up and return error
2679                        let shape = parent_frame.allocated.shape();
2680                        popped_frame.dealloc();
2681                        *insert_state = DynamicObjectInsertState::Idle;
2682                        // No need to poison - returning Err consumes self, Drop will handle cleanup
2683                        return Err(self.err(ReflectErrorKind::OperationFailed {
2684                            shape,
2685                            operation: "end() called but object entry value was never initialized",
2686                        }));
2687                    }
2688
2689                    // We just popped a value frame, now insert it into the dynamic object
2690                    if let Def::DynamicValue(dyn_def) = parent_frame.allocated.shape().def {
2691                        // Get mutable pointers - both object and value need PtrMut
2692                        let object_ptr = unsafe { parent_frame.data.assume_init() };
2693                        let value_ptr = unsafe { popped_frame.data.assume_init() };
2694
2695                        // Use insert_object_entry to add the key-value pair
2696                        unsafe {
2697                            (dyn_def.vtable.insert_object_entry)(object_ptr, key, value_ptr);
2698                        }
2699
2700                        // Insert moved out of popped_frame
2701                        popped_frame.tracker = Tracker::Scalar;
2702                        popped_frame.is_init = false;
2703                        popped_frame.dealloc();
2704
2705                        // Reset insert state to Idle
2706                        *insert_state = DynamicObjectInsertState::Idle;
2707                    }
2708                }
2709            }
2710            _ => {}
2711        }
2712
2713        Ok(self)
2714    }
2715
2716    /// Returns a path representing the current traversal in the builder.
2717    ///
2718    /// The returned [`facet_path::Path`] can be formatted as a human-readable string
2719    /// using [`Path::format_with_shape()`](facet_path::Path::format_with_shape),
2720    /// e.g., `fieldName[index].subfield`.
2721    pub fn path(&self) -> Path {
2722        use facet_path::PathStep;
2723
2724        let root_shape = self
2725            .frames()
2726            .first()
2727            .expect("Partial must have at least one frame")
2728            .allocated
2729            .shape();
2730        let mut path = Path::new(root_shape);
2731
2732        for frame in self.frames().iter() {
2733            match frame.allocated.shape().ty {
2734                Type::User(user_type) => match user_type {
2735                    UserType::Struct(_struct_type) => {
2736                        // Add field step if we're currently in a field
2737                        if let Tracker::Struct {
2738                            current_child: Some(idx),
2739                            ..
2740                        } = &frame.tracker
2741                        {
2742                            path.push(PathStep::Field(*idx as u32));
2743                        }
2744                    }
2745                    UserType::Enum(enum_type) => {
2746                        // Add variant and optional field step
2747                        if let Tracker::Enum {
2748                            variant,
2749                            current_child,
2750                            ..
2751                        } = &frame.tracker
2752                        {
2753                            // Find the variant index by comparing pointers
2754                            if let Some(variant_idx) = enum_type
2755                                .variants
2756                                .iter()
2757                                .position(|v| core::ptr::eq(v, *variant))
2758                            {
2759                                path.push(PathStep::Variant(variant_idx as u32));
2760                            }
2761                            if let Some(idx) = *current_child {
2762                                path.push(PathStep::Field(idx as u32));
2763                            }
2764                        }
2765                    }
2766                    UserType::Union(_) => {
2767                        // No structural path steps for unions
2768                    }
2769                    UserType::Opaque => {
2770                        // Opaque types might be lists (e.g., Vec<T>)
2771                        if let Tracker::List {
2772                            current_child: Some(idx),
2773                            ..
2774                        } = &frame.tracker
2775                        {
2776                            path.push(PathStep::Index(*idx as u32));
2777                        }
2778                    }
2779                },
2780                Type::Sequence(facet_core::SequenceType::Array(_array_def)) => {
2781                    // Add index step if we're currently in an element
2782                    if let Tracker::Array {
2783                        current_child: Some(idx),
2784                        ..
2785                    } = &frame.tracker
2786                    {
2787                        path.push(PathStep::Index(*idx as u32));
2788                    }
2789                }
2790                Type::Sequence(_) => {
2791                    // Other sequence types (Slice, etc.) - no index tracking
2792                }
2793                Type::Pointer(_) => {
2794                    path.push(PathStep::Deref);
2795                }
2796                _ => {
2797                    // No structural path for scalars, etc.
2798                }
2799            }
2800        }
2801
2802        path
2803    }
2804
2805    /// Returns the root shape for path formatting.
2806    ///
2807    /// Use this together with [`path()`](Self::path) to format the path:
2808    /// ```ignore
2809    /// let path_str = partial.path().format_with_shape(partial.root_shape());
2810    /// ```
2811    pub fn root_shape(&self) -> &'static Shape {
2812        self.frames()
2813            .first()
2814            .expect("Partial should always have at least one frame")
2815            .allocated
2816            .shape()
2817    }
2818
2819    /// Create a [`ReflectError`] with the current path context.
2820    ///
2821    /// This is a convenience method for constructing errors inside `Partial` methods
2822    /// that automatically captures the current traversal path.
2823    #[inline]
2824    pub fn err(&self, kind: ReflectErrorKind) -> ReflectError {
2825        ReflectError::new(kind, self.path())
2826    }
2827
2828    /// Get the field for the parent frame
2829    pub fn parent_field(&self) -> Option<&Field> {
2830        self.frames()
2831            .iter()
2832            .rev()
2833            .nth(1)
2834            .and_then(|f| f.get_field())
2835    }
2836
2837    /// Gets the field for the current frame
2838    pub fn current_field(&self) -> Option<&Field> {
2839        self.frames().last().and_then(|f| f.get_field())
2840    }
2841
2842    /// Gets the nearest active field when nested wrapper frames are involved.
2843    ///
2844    /// This walks frames from innermost to outermost and returns the first frame
2845    /// that currently points at a struct/enum field.
2846    pub fn nearest_field(&self) -> Option<&Field> {
2847        self.frames().iter().rev().find_map(|f| f.get_field())
2848    }
2849
2850    /// Returns a const pointer to the current frame's data.
2851    ///
2852    /// This is useful for validation - after deserializing a field value,
2853    /// validators can read the value through this pointer.
2854    ///
2855    /// # Safety
2856    ///
2857    /// The returned pointer is valid only while the frame exists.
2858    /// The caller must ensure the frame is fully initialized before
2859    /// reading through this pointer.
2860    #[deprecated(note = "use initialized_data_ptr() instead, which checks initialization")]
2861    pub fn data_ptr(&self) -> Option<facet_core::PtrConst> {
2862        if self.state != PartialState::Active {
2863            return None;
2864        }
2865        self.frames().last().map(|f| {
2866            // SAFETY: We're in active state, so the frame is valid.
2867            // The caller is responsible for ensuring the data is initialized.
2868            unsafe { f.data.assume_init().as_const() }
2869        })
2870    }
2871
2872    /// Returns a const pointer to the current frame's data, but only if fully initialized.
2873    ///
2874    /// This is the safe way to get a pointer for validation - it verifies that
2875    /// the frame is fully initialized before returning the pointer.
2876    ///
2877    /// Returns `None` if:
2878    /// - The partial is not in active state
2879    /// - The current frame is not fully initialized
2880    #[allow(unsafe_code)]
2881    pub fn initialized_data_ptr(&mut self) -> Option<facet_core::PtrConst> {
2882        if self.state != PartialState::Active {
2883            return None;
2884        }
2885        let frame = self.frames_mut().last_mut()?;
2886
2887        // Check if fully initialized (may drain rope for lists)
2888        if frame.require_full_initialization().is_err() {
2889            return None;
2890        }
2891
2892        // SAFETY: We've verified the partial is active and the frame is fully initialized.
2893        Some(unsafe { frame.data.assume_init().as_const() })
2894    }
2895
2896    /// Returns a typed reference to the current frame's data if:
2897    /// 1. The partial is in active state
2898    /// 2. The current frame is fully initialized
2899    /// 3. The shape matches `T::SHAPE`
2900    ///
2901    /// This is the safe way to read a value from a Partial for validation purposes.
2902    #[allow(unsafe_code)]
2903    pub fn read_as<T: facet_core::Facet<'facet>>(&mut self) -> Option<&T> {
2904        if self.state != PartialState::Active {
2905            return None;
2906        }
2907        let frame = self.frames_mut().last_mut()?;
2908
2909        // Check if fully initialized (may drain rope for lists)
2910        if frame.require_full_initialization().is_err() {
2911            return None;
2912        }
2913
2914        // Check shape matches
2915        if frame.allocated.shape() != T::SHAPE {
2916            return None;
2917        }
2918
2919        // SAFETY: We've verified:
2920        // 1. The partial is active (frame is valid)
2921        // 2. The frame is fully initialized
2922        // 3. The shape matches T::SHAPE
2923        unsafe {
2924            let ptr = frame.data.assume_init().as_const();
2925            Some(&*ptr.as_ptr::<T>())
2926        }
2927    }
2928}