Skip to main content

facet_reflect/partial/partial_api/
build.rs

1use super::*;
2use crate::HasFields;
3#[cfg(feature = "std")]
4use core::cell::RefCell;
5use hashbrown::{HashMap, HashSet};
6
7#[cfg(feature = "std")]
8thread_local! {
9    static INVARIANT_SUBTREE_CACHE: RefCell<HashMap<facet_core::ConstTypeId, bool>> =
10        RefCell::new(HashMap::new());
11}
12
13fn shape_subtree_has_invariants(
14    shape: &'static Shape,
15    cache: &mut HashMap<facet_core::ConstTypeId, Option<bool>>,
16) -> bool {
17    #[cfg(feature = "std")]
18    if let Some(cached) = INVARIANT_SUBTREE_CACHE.with(|memo| memo.borrow().get(&shape.id).copied())
19    {
20        return cached;
21    }
22
23    if let Some(cached) = cache.get(&shape.id) {
24        // `None` means we're currently evaluating this shape in a recursive cycle.
25        // Returning false here breaks recursion; direct invariants are checked before descent.
26        return cached.unwrap_or(false);
27    }
28
29    cache.insert(shape.id, None);
30
31    let has_invariants = if shape.vtable.has_invariants() {
32        true
33    } else {
34        match shape.ty {
35            Type::User(UserType::Struct(struct_ty)) => struct_ty
36                .fields
37                .iter()
38                .any(|field| shape_subtree_has_invariants(field.shape.get(), cache)),
39            Type::User(UserType::Enum(enum_ty)) => enum_ty.variants.iter().any(|variant| {
40                variant
41                    .data
42                    .fields
43                    .iter()
44                    .any(|field| shape_subtree_has_invariants(field.shape.get(), cache))
45            }),
46            _ => match shape.def {
47                Def::List(list) => shape_subtree_has_invariants(list.t(), cache),
48                Def::Array(array) => shape_subtree_has_invariants(array.t(), cache),
49                Def::Slice(slice) => shape_subtree_has_invariants(slice.t(), cache),
50                Def::Map(map) => {
51                    shape_subtree_has_invariants(map.k(), cache)
52                        || shape_subtree_has_invariants(map.v(), cache)
53                }
54                Def::Set(set) => shape_subtree_has_invariants(set.t(), cache),
55                Def::Option(opt) => shape_subtree_has_invariants(opt.t(), cache),
56                Def::Result(result) => {
57                    shape_subtree_has_invariants(result.t(), cache)
58                        || shape_subtree_has_invariants(result.e(), cache)
59                }
60                Def::Pointer(ptr) => ptr
61                    .pointee()
62                    .is_some_and(|pointee| shape_subtree_has_invariants(pointee, cache)),
63                _ => false,
64            },
65        }
66    };
67
68    #[cfg(feature = "std")]
69    INVARIANT_SUBTREE_CACHE.with(|memo| {
70        memo.borrow_mut().insert(shape.id, has_invariants);
71    });
72    cache.insert(shape.id, Some(has_invariants));
73    has_invariants
74}
75
76fn validate_invariants_recursive<'mem, 'facet>(
77    value: Peek<'mem, 'facet>,
78    visited: &mut HashSet<crate::ValueId>,
79    shape_cache: &mut HashMap<facet_core::ConstTypeId, Option<bool>>,
80) -> Result<(), (&'static Shape, String)> {
81    if !shape_subtree_has_invariants(value.shape(), shape_cache) {
82        return Ok(());
83    }
84
85    let id = value.id();
86    if !visited.insert(id) {
87        return Ok(());
88    }
89
90    if let Some(result) = unsafe { value.shape().call_invariants(value.data()) }
91        && let Err(message) = result
92    {
93        return Err((value.shape(), message));
94    }
95
96    match value.shape().ty {
97        Type::User(UserType::Struct(_)) => {
98            if let Ok(peek_struct) = value.into_struct() {
99                for (field, child) in peek_struct.fields() {
100                    if shape_subtree_has_invariants(field.shape.get(), shape_cache) {
101                        validate_invariants_recursive(child, visited, shape_cache)?;
102                    }
103                }
104            }
105        }
106        Type::User(UserType::Enum(_)) => {
107            if let Ok(peek_enum) = value.into_enum() {
108                for (field, child) in peek_enum.fields() {
109                    if shape_subtree_has_invariants(field.shape.get(), shape_cache) {
110                        validate_invariants_recursive(child, visited, shape_cache)?;
111                    }
112                }
113            }
114        }
115        _ => match value.shape().def {
116            Def::List(_) | Def::Array(_) | Def::Slice(_) => {
117                if let Ok(list_like) = value.into_list_like()
118                    && shape_subtree_has_invariants(list_like.def.t(), shape_cache)
119                {
120                    for elem in list_like.iter() {
121                        validate_invariants_recursive(elem, visited, shape_cache)?;
122                    }
123                }
124            }
125            Def::Map(_) => {
126                if let Ok(map) = value.into_map() {
127                    let def = map.def();
128                    let key_has_invariants = shape_subtree_has_invariants(def.k(), shape_cache);
129                    let value_has_invariants = shape_subtree_has_invariants(def.v(), shape_cache);
130                    if key_has_invariants || value_has_invariants {
131                        for (key, val) in map.iter() {
132                            if key_has_invariants {
133                                validate_invariants_recursive(key, visited, shape_cache)?;
134                            }
135                            if value_has_invariants {
136                                validate_invariants_recursive(val, visited, shape_cache)?;
137                            }
138                        }
139                    }
140                }
141            }
142            Def::Set(_) => {
143                if let Ok(set) = value.into_set()
144                    && shape_subtree_has_invariants(set.def().t(), shape_cache)
145                {
146                    for elem in set.iter() {
147                        validate_invariants_recursive(elem, visited, shape_cache)?;
148                    }
149                }
150            }
151            Def::Option(_) => {
152                if let Ok(opt) = value.into_option()
153                    && let Some(inner) = opt.value()
154                    && shape_subtree_has_invariants(inner.shape(), shape_cache)
155                {
156                    validate_invariants_recursive(inner, visited, shape_cache)?;
157                }
158            }
159            Def::Result(_) => {
160                if let Ok(result) = value.into_result() {
161                    if let Some(ok) = result.ok() {
162                        validate_invariants_recursive(ok, visited, shape_cache)?;
163                    }
164                    if let Some(err) = result.err() {
165                        validate_invariants_recursive(err, visited, shape_cache)?;
166                    }
167                }
168            }
169            Def::Pointer(_) => {
170                if let Ok(ptr) = value.into_pointer()
171                    && let Some(inner) = ptr.borrow_inner()
172                    && shape_subtree_has_invariants(inner.shape(), shape_cache)
173                {
174                    validate_invariants_recursive(inner, visited, shape_cache)?;
175                }
176            }
177            _ => {}
178        },
179    }
180
181    Ok(())
182}
183
184////////////////////////////////////////////////////////////////////////////////////////////////////
185// Build
186////////////////////////////////////////////////////////////////////////////////////////////////////
187impl<'facet, const BORROW: bool> Partial<'facet, BORROW> {
188    /// Builds the value, consuming the Partial.
189    pub fn build(mut self) -> Result<HeapValue<'facet, BORROW>, ReflectError> {
190        use crate::typeplan::TypePlanNodeKind;
191
192        if self.frames().len() != 1 {
193            return Err(self.err(ReflectErrorKind::InvariantViolation {
194                invariant: "Partial::build() expects a single frame — call end() until that's the case",
195            }));
196        }
197
198        // Try the optimized path using precomputed FieldInitPlan (includes validators)
199        // Extract frame info first (borrows only self.mode)
200        let frame_info = self.mode.stack().last().map(|frame| {
201            let variant_idx = match &frame.tracker {
202                Tracker::Enum { variant_idx, .. } => Some(*variant_idx),
203                _ => None,
204            };
205            (frame.type_plan, variant_idx)
206        });
207
208        // Look up plans from the type plan node - need to resolve NodeId to get the actual node
209        let plans_info = frame_info.and_then(|(type_plan_id, variant_idx)| {
210            let type_plan = self.root_plan.node(type_plan_id);
211            match &type_plan.kind {
212                TypePlanNodeKind::Struct(struct_plan) => Some(struct_plan.fields),
213                TypePlanNodeKind::Enum(enum_plan) => {
214                    let variants = self.root_plan.variants(enum_plan.variants);
215                    variant_idx.and_then(|idx| variants.get(idx).map(|v| v.fields))
216                }
217                _ => None,
218            }
219        });
220
221        if let Some(plans_range) = plans_info {
222            // Resolve the SliceRange to an actual slice
223            let plans = self.root_plan.fields(plans_range);
224            // Now mutably borrow mode.stack to get the frame
225            // (root_plan borrow of `plans` is still active but that's fine -
226            // mode and root_plan are separate fields)
227            let frame = self.mode.stack_mut().last_mut().unwrap();
228            crate::trace!(
229                "build(): Using optimized fill_and_require_fields for {}, tracker={:?}",
230                frame.allocated.shape(),
231                frame.tracker.kind()
232            );
233            frame
234                .fill_and_require_fields(plans, plans.len(), &self.root_plan)
235                .map_err(|e| self.err(e))?;
236        } else {
237            // Fall back to the old path if optimized path wasn't available
238            let frame = self.frames_mut().last_mut().unwrap();
239            crate::trace!(
240                "build(): calling fill_defaults for {}, tracker={:?}, is_init={}",
241                frame.allocated.shape(),
242                frame.tracker.kind(),
243                frame.is_init
244            );
245            if let Err(e) = frame.fill_defaults() {
246                return Err(self.err(e));
247            }
248            crate::trace!(
249                "build(): after fill_defaults, tracker={:?}, is_init={}",
250                frame.tracker.kind(),
251                frame.is_init
252            );
253
254            let frame = self.frames_mut().last_mut().unwrap();
255            crate::trace!(
256                "build(): calling require_full_initialization, tracker={:?}",
257                frame.tracker.kind()
258            );
259            let result = frame.require_full_initialization();
260            crate::trace!(
261                "build(): require_full_initialization result: {:?}",
262                result.is_ok()
263            );
264            result.map_err(|e| self.err(e))?
265        }
266
267        let frame = self.frames_mut().pop().unwrap();
268
269        // Validate invariants on the full value tree (root + nested values).
270        // Safety: the value is fully initialized at this point.
271        let value_ptr = unsafe { frame.data.assume_init().as_const() };
272        let root = unsafe { Peek::unchecked_new(value_ptr, frame.allocated.shape()) };
273        let mut visited = HashSet::new();
274        let mut shape_cache = HashMap::new();
275        if let Err((shape, message)) =
276            validate_invariants_recursive(root, &mut visited, &mut shape_cache)
277        {
278            // Put the frame back so Drop can handle cleanup properly
279            self.frames_mut().push(frame);
280            return Err(self.err(ReflectErrorKind::UserInvariantFailed { message, shape }));
281        }
282
283        // Mark as built to prevent Drop from cleaning up the value
284        self.state = PartialState::Built;
285
286        match frame
287            .allocated
288            .shape()
289            .layout
290            .sized_layout()
291            .map_err(|_layout_err| {
292                self.err(ReflectErrorKind::Unsized {
293                    shape: frame.allocated.shape(),
294                    operation: "build (final check for sized layout)",
295                })
296            }) {
297            Ok(layout) => {
298                // Determine if we should deallocate based on ownership
299                let should_dealloc = frame.ownership.needs_dealloc();
300
301                Ok(HeapValue {
302                    guard: Some(Guard {
303                        ptr: unsafe { NonNull::new_unchecked(frame.data.as_mut_byte_ptr()) },
304                        layout,
305                        should_dealloc,
306                    }),
307                    shape: frame.allocated.shape(),
308                    phantom: PhantomData,
309                })
310            }
311            Err(e) => {
312                // Put the frame back for proper cleanup
313                self.frames_mut().push(frame);
314                Err(e)
315            }
316        }
317    }
318
319    /// Finishes deserialization in-place, validating the value without moving it.
320    ///
321    /// This is intended for use with [`from_raw`](Self::from_raw) where the value
322    /// is deserialized into caller-provided memory (e.g., a `MaybeUninit<T>` on the stack).
323    ///
324    /// On success, the caller can safely assume the memory contains a fully initialized,
325    /// valid value and call `MaybeUninit::assume_init()`.
326    ///
327    /// On failure, any partially initialized data is cleaned up (dropped), and the
328    /// memory should be considered uninitialized.
329    ///
330    /// # Panics
331    ///
332    /// Panics if called with more than one frame on the stack (i.e., if you haven't
333    /// called `end()` enough times to return to the root level).
334    ///
335    /// # Example
336    ///
337    /// ```ignore
338    /// use std::mem::MaybeUninit;
339    /// use facet_core::{Facet, PtrUninit};
340    /// use facet_reflect::Partial;
341    ///
342    /// let mut slot = MaybeUninit::<MyStruct>::uninit();
343    /// let ptr = PtrUninit::new(slot.as_mut_ptr().cast());
344    ///
345    /// let partial = unsafe { Partial::from_raw_with_shape(ptr, MyStruct::SHAPE)? };
346    /// // ... deserialize into partial ...
347    /// partial.finish_in_place()?;
348    ///
349    /// // Now safe to assume initialized
350    /// let value = unsafe { slot.assume_init() };
351    /// ```
352    pub fn finish_in_place(mut self) -> Result<(), ReflectError> {
353        use crate::typeplan::TypePlanNodeKind;
354
355        if self.frames().len() != 1 {
356            return Err(self.err(ReflectErrorKind::InvariantViolation {
357                invariant: "Partial::finish_in_place() expects a single frame — call end() until that's the case",
358            }));
359        }
360
361        // Try the optimized path using precomputed FieldInitPlan (includes validators)
362        // Extract frame info first (borrows only self.mode)
363        let frame_info = self.mode.stack().last().map(|frame| {
364            let variant_idx = match &frame.tracker {
365                Tracker::Enum { variant_idx, .. } => Some(*variant_idx),
366                _ => None,
367            };
368            (frame.type_plan, variant_idx)
369        });
370
371        // Look up plans from the type plan node - need to resolve NodeId to get the actual node
372        let plans_info = frame_info.and_then(|(type_plan_id, variant_idx)| {
373            let type_plan = self.root_plan.node(type_plan_id);
374            match &type_plan.kind {
375                TypePlanNodeKind::Struct(struct_plan) => Some(struct_plan.fields),
376                TypePlanNodeKind::Enum(enum_plan) => {
377                    let variants = self.root_plan.variants(enum_plan.variants);
378                    variant_idx.and_then(|idx| variants.get(idx).map(|v| v.fields))
379                }
380                _ => None,
381            }
382        });
383
384        if let Some(plans_range) = plans_info {
385            // Resolve the SliceRange to an actual slice
386            let plans = self.root_plan.fields(plans_range);
387            // Now mutably borrow mode.stack to get the frame
388            // (root_plan borrow of `plans` is still active but that's fine -
389            // mode and root_plan are separate fields)
390            let frame = self.mode.stack_mut().last_mut().unwrap();
391            crate::trace!(
392                "finish_in_place(): Using optimized fill_and_require_fields for {}, tracker={:?}",
393                frame.allocated.shape(),
394                frame.tracker.kind()
395            );
396            frame
397                .fill_and_require_fields(plans, plans.len(), &self.root_plan)
398                .map_err(|e| self.err(e))?;
399        } else {
400            // Fall back to the old path if optimized path wasn't available
401            let frame = self.frames_mut().last_mut().unwrap();
402            crate::trace!(
403                "finish_in_place(): calling fill_defaults for {}, tracker={:?}, is_init={}",
404                frame.allocated.shape(),
405                frame.tracker.kind(),
406                frame.is_init
407            );
408            if let Err(e) = frame.fill_defaults() {
409                return Err(self.err(e));
410            }
411            crate::trace!(
412                "finish_in_place(): after fill_defaults, tracker={:?}, is_init={}",
413                frame.tracker.kind(),
414                frame.is_init
415            );
416
417            let frame = self.frames_mut().last_mut().unwrap();
418            crate::trace!(
419                "finish_in_place(): calling require_full_initialization, tracker={:?}",
420                frame.tracker.kind()
421            );
422            let result = frame.require_full_initialization();
423            crate::trace!(
424                "finish_in_place(): require_full_initialization result: {:?}",
425                result.is_ok()
426            );
427            result.map_err(|e| self.err(e))?
428        }
429
430        let frame = self.frames_mut().pop().unwrap();
431
432        // Validate invariants on the full value tree (root + nested values).
433        // Safety: the value is fully initialized at this point.
434        let value_ptr = unsafe { frame.data.assume_init().as_const() };
435        let root = unsafe { Peek::unchecked_new(value_ptr, frame.allocated.shape()) };
436        let mut visited = HashSet::new();
437        let mut shape_cache = HashMap::new();
438        if let Err((shape, message)) =
439            validate_invariants_recursive(root, &mut visited, &mut shape_cache)
440        {
441            // Put the frame back so Drop can handle cleanup properly
442            self.frames_mut().push(frame);
443            return Err(self.err(ReflectErrorKind::UserInvariantFailed { message, shape }));
444        }
445
446        // Mark as built to prevent Drop from cleaning up the now-valid value.
447        // The caller owns the memory and will handle the value from here.
448        self.state = PartialState::Built;
449
450        // Frame is dropped here without deallocation (External ownership doesn't dealloc)
451        Ok(())
452    }
453}