Skip to main content

facet_reflect/peek/
value.rs

1use core::{cmp::Ordering, marker::PhantomData, ptr::NonNull};
2#[cfg(feature = "alloc")]
3use facet_core::Field;
4use facet_core::{
5    Def, Facet, PointerType, PtrConst, Shape, StructKind, Type, TypeNameOpts, UserType,
6    VTableErased, Variance,
7};
8
9use crate::{PeekNdArray, PeekSet, ReflectError, ReflectErrorKind, ScalarType};
10use facet_path::{Path, PathAccessError, PathStep};
11
12use super::{
13    ListLikeDef, PeekDynamicValue, PeekEnum, PeekList, PeekListLike, PeekMap, PeekOption,
14    PeekPointer, PeekResult, PeekStruct, PeekTuple, tuple::TupleType,
15};
16
17#[cfg(feature = "alloc")]
18use super::OwnedPeek;
19
20/// A unique identifier for a peek value
21#[derive(Clone, Copy, PartialEq, PartialOrd, Ord, Eq, Hash)]
22pub struct ValueId {
23    pub(crate) shape: &'static Shape,
24    pub(crate) ptr: *const u8,
25}
26
27impl ValueId {
28    #[inline]
29    pub(crate) const fn new(shape: &'static Shape, ptr: *const u8) -> Self {
30        Self { shape, ptr }
31    }
32}
33
34impl core::fmt::Display for ValueId {
35    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
36        write!(f, "{}@{:p}", self.shape, self.ptr)
37    }
38}
39
40impl core::fmt::Debug for ValueId {
41    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
42        core::fmt::Display::fmt(self, f)
43    }
44}
45
46/// A read-only view into a value with runtime type information.
47///
48/// `Peek` provides reflection capabilities for reading values at runtime.
49/// If the value is a struct, you can read its fields; if it's an enum,
50/// you can determine which variant is selected; if it's a scalar, you can
51/// extract a concrete value.
52///
53/// # Lifetime Parameters
54///
55/// - `'mem`: The memory lifetime - how long the underlying data is valid
56/// - `'facet`: The type's lifetime parameter (for types like `&'a str`)
57///
58/// # Variance and Soundness
59///
60/// `Peek` is **invariant** with respect to `'facet`. This is required for soundness:
61/// if `Peek` were covariant, it would be possible to launder lifetimes
62/// through reflection, leading to use-after-free bugs with types like
63/// `fn(&'a str)`. See [issue #1168](https://github.com/facet-rs/facet/issues/1168).
64///
65/// The underlying type's variance is tracked in [`Shape::variance`], which
66/// can be used for future variance-aware APIs.
67#[allow(clippy::type_complexity)]
68#[derive(Clone, Copy)]
69pub struct Peek<'mem, 'facet> {
70    /// Underlying data
71    pub(crate) data: PtrConst,
72
73    /// Shape of the value
74    pub(crate) shape: &'static Shape,
75
76    // Invariant with respect to 'facet: Peek<'mem, 'a> cannot be cast to Peek<'mem, 'b> even if 'a: 'b.
77    //
78    // This is REQUIRED for soundness! If Peek were covariant with respect to 'facet, we could:
79    // 1. Create Peek<'mem, 'static> from FnWrapper<'static> (contains fn(&'static str))
80    // 2. Use covariance to cast it to Peek<'mem, 'short>
81    // 3. Call get::<FnWrapper<'short>>() to get &FnWrapper<'short>
82    // 4. This would allow calling the function with a &'short str that goes out of scope
83    //    while the original function pointer still holds it as 'static
84    //
85    // The fn(&'a ()) -> &'a () pattern makes this type invariant with respect to 'facet.
86    // The &'mem () makes this type covariant with respect to 'mem (safe because we only read through it).
87    // See: https://github.com/facet-rs/facet/issues/1168
88    _invariant: PhantomData<(&'mem (), fn(&'facet ()) -> &'facet ())>,
89}
90
91impl<'mem, 'facet> Peek<'mem, 'facet> {
92    /// Returns a read-only view over a `T` value.
93    pub fn new<T: Facet<'facet> + ?Sized>(t: &'mem T) -> Self {
94        Self {
95            data: PtrConst::new(NonNull::from(t).as_ptr()),
96            shape: T::SHAPE,
97            _invariant: PhantomData,
98        }
99    }
100
101    /// Construct a ReflectError with this peek's shape as the root path.
102    #[inline]
103    pub(crate) fn err(&self, kind: ReflectErrorKind) -> ReflectError {
104        ReflectError::new(kind, Path::new(self.shape))
105    }
106
107    /// Returns a read-only view over a value (given its shape), trusting you
108    /// that those two match.
109    ///
110    /// # Safety
111    ///
112    /// This function is unsafe because it doesn't check if the provided data
113    /// and shape are compatible. The caller must ensure that the data is valid
114    /// for the given shape.
115    pub unsafe fn unchecked_new(data: PtrConst, shape: &'static Shape) -> Self {
116        Self {
117            data,
118            shape,
119            _invariant: PhantomData,
120        }
121    }
122
123    // =============================================================================
124    // Variance-aware lifetime transformation methods
125    // =============================================================================
126
127    /// Returns the computed variance of the underlying type.
128    ///
129    /// This walks the type's fields to determine if the type is covariant,
130    /// contravariant, or invariant with respect to its lifetime parameter.
131    #[inline]
132    pub fn variance(&self) -> Variance {
133        self.shape.computed_variance()
134    }
135
136    /// Shrinks the `'facet` lifetime parameter.
137    ///
138    /// This is safe for covariant and bivariant types: if data is valid for `'static`,
139    /// it's also valid for any shorter lifetime `'shorter`.
140    ///
141    /// From the [Rust Reference](https://doc.rust-lang.org/reference/subtyping.html):
142    /// - Covariant types can shrink lifetimes (`'static` → `'a`)
143    /// - Bivariant types can go either direction (no lifetime constraints)
144    ///
145    /// # Panics
146    ///
147    /// Panics if the type cannot shrink lifetimes (i.e., if it's contravariant or invariant).
148    #[inline]
149    pub fn shrink_lifetime<'shorter>(self) -> Peek<'mem, 'shorter>
150    where
151        'facet: 'shorter,
152    {
153        self.try_shrink_lifetime()
154            .expect("shrink_lifetime requires a covariant type")
155    }
156
157    /// Tries to shrink the `'facet` lifetime parameter.
158    ///
159    /// Returns `Some` if the type can shrink lifetimes (covariant or bivariant),
160    /// or `None` if the type is invariant or contravariant.
161    ///
162    /// See [`Variance::can_shrink`] for details.
163    #[inline]
164    pub fn try_shrink_lifetime<'shorter>(self) -> Option<Peek<'mem, 'shorter>>
165    where
166        'facet: 'shorter,
167    {
168        if self.variance().can_shrink() {
169            Some(Peek {
170                data: self.data,
171                shape: self.shape,
172                _invariant: PhantomData,
173            })
174        } else {
175            None
176        }
177    }
178
179    /// Grows the `'facet` lifetime parameter.
180    ///
181    /// This is safe for contravariant and bivariant types: if a function accepts `'short`,
182    /// it can also accept `'longer` (a longer lifetime is more restrictive).
183    ///
184    /// From the [Rust Reference](https://doc.rust-lang.org/reference/subtyping.html):
185    /// - Contravariant types can grow lifetimes (`'a` → `'static`)
186    /// - Bivariant types can go either direction (no lifetime constraints)
187    ///
188    /// # Panics
189    ///
190    /// Panics if the type cannot grow lifetimes (i.e., if it's covariant or invariant).
191    #[inline]
192    pub fn grow_lifetime<'longer>(self) -> Peek<'mem, 'longer>
193    where
194        'longer: 'facet,
195    {
196        self.try_grow_lifetime()
197            .expect("grow_lifetime requires a contravariant type")
198    }
199
200    /// Tries to grow the `'facet` lifetime parameter.
201    ///
202    /// Returns `Some` if the type can grow lifetimes (contravariant or bivariant),
203    /// or `None` if the type is invariant or covariant.
204    ///
205    /// See [`Variance::can_grow`] for details.
206    #[inline]
207    pub fn try_grow_lifetime<'longer>(self) -> Option<Peek<'mem, 'longer>>
208    where
209        'longer: 'facet,
210    {
211        if self.variance().can_grow() {
212            Some(Peek {
213                data: self.data,
214                shape: self.shape,
215                _invariant: PhantomData,
216            })
217        } else {
218            None
219        }
220    }
221
222    /// Returns the vtable
223    #[inline(always)]
224    pub const fn vtable(&self) -> VTableErased {
225        self.shape.vtable
226    }
227
228    /// Returns a unique identifier for this value, usable for cycle detection
229    #[inline]
230    pub fn id(&self) -> ValueId {
231        ValueId::new(self.shape, self.data.raw_ptr())
232    }
233
234    /// Returns true if the two values are pointer-equal
235    #[inline]
236    pub fn ptr_eq(&self, other: &Peek<'_, '_>) -> bool {
237        self.data.raw_ptr() == other.data.raw_ptr()
238    }
239
240    /// Returns true if this scalar is equal to the other scalar
241    ///
242    /// # Returns
243    ///
244    /// `false` if equality comparison is not supported for this scalar type
245    #[inline]
246    pub fn partial_eq(&self, other: &Peek<'_, '_>) -> Result<bool, ReflectError> {
247        if self.shape != other.shape {
248            return Err(self.err(ReflectErrorKind::WrongShape {
249                expected: self.shape,
250                actual: other.shape,
251            }));
252        }
253
254        if let Some(result) = unsafe { self.shape.call_partial_eq(self.data, other.data) } {
255            return Ok(result);
256        }
257
258        Err(self.err(ReflectErrorKind::OperationFailed {
259            shape: self.shape(),
260            operation: "partial_eq",
261        }))
262    }
263
264    /// Compares this scalar with another and returns their ordering
265    ///
266    /// # Returns
267    ///
268    /// `None` if comparison is not supported for this scalar type
269    #[inline]
270    pub fn partial_cmp(&self, other: &Peek<'_, '_>) -> Result<Option<Ordering>, ReflectError> {
271        if self.shape != other.shape {
272            return Err(self.err(ReflectErrorKind::WrongShape {
273                expected: self.shape,
274                actual: other.shape,
275            }));
276        }
277
278        if let Some(result) = unsafe { self.shape.call_partial_cmp(self.data, other.data) } {
279            return Ok(result);
280        }
281
282        Err(self.err(ReflectErrorKind::OperationFailed {
283            shape: self.shape(),
284            operation: "partial_cmp",
285        }))
286    }
287
288    /// Hashes this scalar using the vtable hash function.
289    ///
290    /// # Returns
291    ///
292    /// `Err` if hashing is not supported for this scalar type, `Ok` otherwise
293    #[inline(always)]
294    pub fn hash(&self, hasher: &mut dyn core::hash::Hasher) -> Result<(), ReflectError> {
295        let mut proxy = facet_core::HashProxy::new(hasher);
296        if unsafe { self.shape.call_hash(self.data, &mut proxy) }.is_some() {
297            return Ok(());
298        }
299
300        Err(self.err(ReflectErrorKind::OperationFailed {
301            shape: self.shape(),
302            operation: "hash",
303        }))
304    }
305
306    /// Computes a structural hash of this value.
307    ///
308    /// Unlike [`hash`](Self::hash), this method recursively traverses the structure
309    /// and hashes each component, making it work for types that don't implement `Hash`.
310    ///
311    /// For scalars with a vtable hash function, it uses that. For compound types
312    /// (structs, enums, lists, etc.), it recursively hashes the structure.
313    ///
314    /// This is useful for Merkle-tree style hashing where you want to compare
315    /// subtrees for equality based on their structural content.
316    pub fn structural_hash<H: core::hash::Hasher>(&self, hasher: &mut H) {
317        use core::hash::Hash;
318
319        // First, hash the shape's type identifier for type discrimination
320        self.shape.id.hash(hasher);
321
322        // Try vtable hash first for scalars
323        let mut proxy = facet_core::HashProxy::new(hasher);
324        if unsafe { self.shape.call_hash(self.data, &mut proxy) }.is_some() {
325            return;
326        }
327
328        // Otherwise, traverse the structure recursively
329        match self.shape.ty {
330            Type::User(UserType::Struct(struct_type)) => {
331                // Hash struct kind
332                (struct_type.kind as u8).hash(hasher);
333
334                // Hash each field, skipping metadata fields
335                for field in struct_type.fields {
336                    // Skip metadata fields - they don't affect structural identity
337                    if field.is_metadata() {
338                        continue;
339                    }
340
341                    // Hash field name
342                    field.name.hash(hasher);
343
344                    // Get field value and hash it recursively
345                    let field_offset = field.offset;
346                    let field_shape = field.shape();
347                    let field_ptr = unsafe { self.data.field(field_offset) };
348                    let field_peek = unsafe { Peek::unchecked_new(field_ptr, field_shape) };
349                    field_peek.structural_hash(hasher);
350                }
351            }
352
353            Type::User(UserType::Enum(_enum_type)) => {
354                // Get the discriminant and variant
355                if let Ok(peek_enum) = self.into_enum()
356                    && let Ok(variant) = peek_enum.active_variant()
357                {
358                    // Hash variant name
359                    variant.name.hash(hasher);
360
361                    // Hash variant payload based on kind
362                    match variant.data.kind {
363                        StructKind::Unit => {
364                            // No payload to hash
365                        }
366                        StructKind::TupleStruct | StructKind::Tuple => {
367                            // Hash tuple fields (no names)
368                            use super::HasFields;
369                            for (_field, peek) in peek_enum.fields() {
370                                peek.structural_hash(hasher);
371                            }
372                        }
373                        StructKind::Struct => {
374                            // Hash named fields
375                            use super::HasFields;
376                            for (field, peek) in peek_enum.fields() {
377                                field.name.hash(hasher);
378                                peek.structural_hash(hasher);
379                            }
380                        }
381                    }
382                }
383            }
384
385            _ => {
386                // Handle Def-based types
387                match self.shape.def {
388                    Def::List(_) | Def::Array(_) | Def::Slice(_) => {
389                        if let Ok(list_like) = self.into_list_like() {
390                            // Hash length
391                            list_like.len().hash(hasher);
392
393                            // Hash each element
394                            for elem in list_like.iter() {
395                                elem.structural_hash(hasher);
396                            }
397                        }
398                    }
399
400                    Def::Map(_) => {
401                        if let Ok(map) = self.into_map() {
402                            // Hash length
403                            map.len().hash(hasher);
404
405                            // Hash each key-value pair
406                            for (key, value) in map.iter() {
407                                key.structural_hash(hasher);
408                                value.structural_hash(hasher);
409                            }
410                        }
411                    }
412
413                    Def::Set(_) => {
414                        if let Ok(set) = self.into_set() {
415                            // Hash length
416                            set.len().hash(hasher);
417
418                            // Hash each element
419                            for elem in set.iter() {
420                                elem.structural_hash(hasher);
421                            }
422                        }
423                    }
424
425                    Def::Option(_) => {
426                        if let Ok(opt) = self.into_option() {
427                            if let Some(inner) = opt.value() {
428                                true.hash(hasher);
429                                inner.structural_hash(hasher);
430                            } else {
431                                false.hash(hasher);
432                            }
433                        }
434                    }
435
436                    Def::Result(_) => {
437                        if let Ok(result) = self.into_result() {
438                            if result.is_ok() {
439                                0u8.hash(hasher);
440                                if let Some(ok_val) = result.ok() {
441                                    ok_val.structural_hash(hasher);
442                                }
443                            } else {
444                                1u8.hash(hasher);
445                                if let Some(err_val) = result.err() {
446                                    err_val.structural_hash(hasher);
447                                }
448                            }
449                        }
450                    }
451
452                    Def::Pointer(_) => {
453                        if let Ok(ptr) = self.into_pointer()
454                            && let Some(inner) = ptr.borrow_inner()
455                        {
456                            inner.structural_hash(hasher);
457                        }
458                    }
459
460                    Def::DynamicValue(_) => {
461                        if let Ok(dyn_val) = self.into_dynamic_value() {
462                            // Hash based on dynamic value kind
463                            dyn_val.structural_hash_inner(hasher);
464                        }
465                    }
466
467                    Def::NdArray(_) => {
468                        // For ndarray, hash the dimensions and data
469                        if let Ok(arr) = self.into_ndarray() {
470                            let n_dim = arr.n_dim();
471                            n_dim.hash(hasher);
472                            for i in 0..n_dim {
473                                if let Some(dim) = arr.dim(i) {
474                                    dim.hash(hasher);
475                                }
476                            }
477                            // Hash each element
478                            let count = arr.count();
479                            for i in 0..count {
480                                if let Some(elem) = arr.get(i) {
481                                    elem.structural_hash(hasher);
482                                }
483                            }
484                        }
485                    }
486
487                    Def::Scalar | Def::Undefined | _ => {
488                        // Try to handle f32/f64 by hashing their bit representation
489                        match self.scalar_type() {
490                            Some(ScalarType::F32) => {
491                                if let Ok(v) = self.get::<f32>() {
492                                    v.to_bits().hash(hasher);
493                                    return;
494                                }
495                            }
496                            Some(ScalarType::F64) => {
497                                if let Ok(v) = self.get::<f64>() {
498                                    v.to_bits().hash(hasher);
499                                    return;
500                                }
501                            }
502                            _ => {}
503                        }
504                        panic!(
505                            "structural_hash: type {} has no Hash impl and cannot be structurally hashed",
506                            self.shape
507                        );
508                    }
509                }
510            }
511        }
512    }
513
514    /// Returns the type name of this scalar
515    ///
516    /// # Arguments
517    ///
518    /// * `f` - A mutable reference to a `core::fmt::Formatter`
519    /// * `opts` - The `TypeNameOpts` to use for formatting
520    ///
521    /// # Returns
522    ///
523    /// The result of the type name formatting
524    #[inline(always)]
525    pub fn type_name(
526        &self,
527        f: &mut core::fmt::Formatter<'_>,
528        opts: TypeNameOpts,
529    ) -> core::fmt::Result {
530        self.shape.write_type_name(f, opts)
531    }
532
533    /// Returns the shape
534    #[inline(always)]
535    pub const fn shape(&self) -> &'static Shape {
536        self.shape
537    }
538
539    /// Returns the data
540    #[inline(always)]
541    pub const fn data(&self) -> PtrConst {
542        self.data
543    }
544
545    /// Get the scalar type if set.
546    #[inline]
547    pub fn scalar_type(&self) -> Option<ScalarType> {
548        ScalarType::try_from_shape(self.shape)
549    }
550
551    /// Read the value from memory into a Rust value.
552    ///
553    /// # Panics
554    ///
555    /// Panics if the shape doesn't match the type `T`.
556    #[inline]
557    pub fn get<T: Facet<'facet> + ?Sized>(&self) -> Result<&'mem T, ReflectError> {
558        if self.shape != T::SHAPE {
559            Err(self.err(ReflectErrorKind::WrongShape {
560                expected: self.shape,
561                actual: T::SHAPE,
562            }))
563        } else {
564            Ok(unsafe { self.data.get::<T>() })
565        }
566    }
567
568    /// Try to get the value as a string if it's a string type
569    /// Returns None if the value is not a string or couldn't be extracted
570    pub fn as_str(&self) -> Option<&'mem str> {
571        let peek = self.innermost_peek();
572        // ScalarType::Str matches both bare `str` and `&str`.
573        // For bare `str` (not a pointer), data points to str bytes directly.
574        // For `&str`, let it fall through to the pointer handler below.
575        if let Some(ScalarType::Str) = peek.scalar_type()
576            && !matches!(peek.shape.ty, Type::Pointer(_))
577        {
578            // Bare `str`: data is a wide pointer to str bytes.
579            // get::<str>() creates a &str reference to that data.
580            return unsafe { Some(peek.data.get::<str>()) };
581        }
582        #[cfg(feature = "alloc")]
583        if let Some(ScalarType::String) = peek.scalar_type() {
584            return unsafe { Some(peek.data.get::<alloc::string::String>().as_str()) };
585        }
586        #[cfg(feature = "alloc")]
587        if let Some(ScalarType::CowStr) = peek.scalar_type() {
588            return unsafe { Some(peek.data.get::<alloc::borrow::Cow<'mem, str>>().as_ref()) };
589        }
590
591        // Handle references, including nested references like &&str
592        if let Type::Pointer(PointerType::Reference(vpt)) = peek.shape.ty {
593            let target_shape = vpt.target;
594
595            // Check if this is a nested reference (&&str) first
596            if let Type::Pointer(PointerType::Reference(inner_vpt)) = target_shape.ty {
597                let inner_target_shape = inner_vpt.target;
598                if let Some(ScalarType::Str) = ScalarType::try_from_shape(inner_target_shape) {
599                    // For &&str, we need to dereference twice.
600                    // Read the outer reference (8 bytes) as a pointer to &str, then dereference
601                    let outer_ptr: *const *const &str =
602                        unsafe { peek.data.as_ptr::<*const &str>() };
603                    let inner_ref: &str = unsafe { **outer_ptr };
604                    return Some(inner_ref);
605                }
606            } else if let Some(ScalarType::Str) = ScalarType::try_from_shape(target_shape)
607                && !matches!(target_shape.ty, Type::Pointer(_))
608            {
609                // Simple case: &str (but only if target is not a pointer itself)
610                return unsafe { Some(peek.data.get::<&str>()) };
611            }
612        }
613
614        // Handle smart pointer types like Box<str>, Arc<str>, Rc<str>
615        // These have Def::Pointer with pointee = str::SHAPE and a borrow_fn
616        #[cfg(feature = "alloc")]
617        if let Def::Pointer(ptr_def) = peek.shape.def
618            && let Some(pointee_shape) = ptr_def.pointee
619            && let Some(ScalarType::Str) = ScalarType::try_from_shape(pointee_shape)
620            && let Some(borrow_fn) = ptr_def.vtable.borrow_fn
621        {
622            // borrow_fn returns a PtrConst pointing to the inner str
623            let inner_ptr = unsafe { borrow_fn(peek.data) };
624            // The inner ptr is a wide pointer to str
625            return unsafe { Some(inner_ptr.get::<str>()) };
626        }
627
628        None
629    }
630
631    /// Try to get the value as a byte slice if it's a &[u8] type
632    /// Returns None if the value is not a byte slice or couldn't be extracted
633    #[inline]
634    pub fn as_bytes(&self) -> Option<&'mem [u8]> {
635        // Check if it's a direct &[u8]
636        if let Type::Pointer(PointerType::Reference(vpt)) = self.shape.ty {
637            let target_shape = vpt.target;
638            if let Def::Slice(sd) = target_shape.def
639                && sd.t().is_type::<u8>()
640            {
641                unsafe { return Some(self.data.get::<&[u8]>()) }
642            }
643        }
644        None
645    }
646
647    /// Tries to identify this value as a struct
648    #[inline]
649    pub fn into_struct(self) -> Result<PeekStruct<'mem, 'facet>, ReflectError> {
650        if let Type::User(UserType::Struct(ty)) = self.shape.ty {
651            Ok(PeekStruct { value: self, ty })
652        } else {
653            Err(self.err(ReflectErrorKind::WasNotA {
654                expected: "struct",
655                actual: self.shape,
656            }))
657        }
658    }
659
660    /// Tries to identify this value as an enum
661    #[inline]
662    pub fn into_enum(self) -> Result<PeekEnum<'mem, 'facet>, ReflectError> {
663        if let Type::User(UserType::Enum(ty)) = self.shape.ty {
664            Ok(PeekEnum { value: self, ty })
665        } else {
666            Err(self.err(ReflectErrorKind::WasNotA {
667                expected: "enum",
668                actual: self.shape,
669            }))
670        }
671    }
672
673    /// Tries to identify this value as a map
674    #[inline]
675    pub fn into_map(self) -> Result<PeekMap<'mem, 'facet>, ReflectError> {
676        if let Def::Map(def) = self.shape.def {
677            // SAFETY: The MapDef comes from self.shape.def, where self.shape is obtained
678            // from a trusted source (either T::SHAPE from the Facet trait, or validated
679            // through other safe constructors). The vtable is therefore trusted.
680            Ok(unsafe { PeekMap::new(self, def) })
681        } else {
682            Err(self.err(ReflectErrorKind::WasNotA {
683                expected: "map",
684                actual: self.shape,
685            }))
686        }
687    }
688
689    /// Tries to identify this value as a set
690    #[inline]
691    pub fn into_set(self) -> Result<PeekSet<'mem, 'facet>, ReflectError> {
692        if let Def::Set(def) = self.shape.def {
693            // SAFETY: The SetDef comes from self.shape.def, where self.shape is obtained
694            // from a trusted source (either T::SHAPE from the Facet trait, or validated
695            // through other safe constructors). The vtable is therefore trusted.
696            Ok(unsafe { PeekSet::new(self, def) })
697        } else {
698            Err(self.err(ReflectErrorKind::WasNotA {
699                expected: "set",
700                actual: self.shape,
701            }))
702        }
703    }
704
705    /// Tries to identify this value as a list
706    #[inline]
707    pub fn into_list(self) -> Result<PeekList<'mem, 'facet>, ReflectError> {
708        if let Def::List(def) = self.shape.def {
709            // SAFETY: The ListDef comes from self.shape.def, where self.shape is obtained
710            // from a trusted source (either T::SHAPE from the Facet trait, or validated
711            // through other safe constructors). The vtable is therefore trusted.
712            return Ok(unsafe { PeekList::new(self, def) });
713        }
714
715        Err(self.err(ReflectErrorKind::WasNotA {
716            expected: "list",
717            actual: self.shape,
718        }))
719    }
720
721    /// Tries to identify this value as a ndarray
722    #[inline]
723    pub fn into_ndarray(self) -> Result<PeekNdArray<'mem, 'facet>, ReflectError> {
724        if let Def::NdArray(def) = self.shape.def {
725            // SAFETY: The NdArrayDef comes from self.shape.def, where self.shape is obtained
726            // from a trusted source (either T::SHAPE from the Facet trait, or validated
727            // through other safe constructors). The vtable is therefore trusted.
728            return Ok(unsafe { PeekNdArray::new(self, def) });
729        }
730
731        Err(self.err(ReflectErrorKind::WasNotA {
732            expected: "ndarray",
733            actual: self.shape,
734        }))
735    }
736
737    /// Tries to identify this value as a list, array or slice
738    #[inline]
739    pub fn into_list_like(self) -> Result<PeekListLike<'mem, 'facet>, ReflectError> {
740        match self.shape.def {
741            Def::List(def) => {
742                // SAFETY: The ListDef comes from self.shape.def, where self.shape is obtained
743                // from a trusted source (either T::SHAPE from the Facet trait, or validated
744                // through other safe constructors). The vtable is therefore trusted.
745                Ok(unsafe { PeekListLike::new(self, ListLikeDef::List(def)) })
746            }
747            Def::Array(def) => {
748                // SAFETY: The ArrayDef comes from self.shape.def, where self.shape is obtained
749                // from a trusted source (either T::SHAPE from the Facet trait, or validated
750                // through other safe constructors). The vtable is therefore trusted.
751                Ok(unsafe { PeekListLike::new(self, ListLikeDef::Array(def)) })
752            }
753            Def::Slice(def) => {
754                // When we have a bare slice shape with a wide pointer,
755                // it means we have a reference to a slice (e.g., from Arc<[T]>::borrow_inner)
756                // SAFETY: The SliceDef comes from self.shape.def, where self.shape is obtained
757                // from a trusted source (either T::SHAPE from the Facet trait, or validated
758                // through other safe constructors). The vtable is therefore trusted.
759                Ok(unsafe { PeekListLike::new(self, ListLikeDef::Slice(def)) })
760            }
761            _ => {
762                // &[i32] is actually a _pointer_ to a slice.
763                match self.shape.ty {
764                    Type::Pointer(ptr) => match ptr {
765                        PointerType::Reference(vpt) | PointerType::Raw(vpt) => {
766                            let target = vpt.target;
767                            match target.def {
768                                Def::Slice(def) => {
769                                    let ptr = unsafe { self.data.as_ptr::<*const [()]>() };
770                                    let ptr = PtrConst::new(unsafe {
771                                        NonNull::new_unchecked((*ptr) as *mut [()]).as_ptr()
772                                    });
773                                    let peek = unsafe { Peek::unchecked_new(ptr, def.t) };
774
775                                    // SAFETY: The SliceDef comes from target.def, where target is obtained
776                                    // from self.shape which comes from a trusted source. The vtable is therefore trusted.
777                                    return Ok(unsafe {
778                                        PeekListLike::new(peek, ListLikeDef::Slice(def))
779                                    });
780                                }
781                                _ => {
782                                    // well it's not list-like then
783                                }
784                            }
785                        }
786                        PointerType::Function(_) => {
787                            // well that's not a list-like
788                        }
789                    },
790                    _ => {
791                        // well that's not a list-like either
792                    }
793                }
794
795                Err(self.err(ReflectErrorKind::WasNotA {
796                    expected: "list, array or slice",
797                    actual: self.shape,
798                }))
799            }
800        }
801    }
802
803    /// Tries to identify this value as a pointer
804    #[inline]
805    pub fn into_pointer(self) -> Result<PeekPointer<'mem, 'facet>, ReflectError> {
806        if let Def::Pointer(def) = self.shape.def {
807            Ok(PeekPointer { value: self, def })
808        } else {
809            Err(self.err(ReflectErrorKind::WasNotA {
810                expected: "smart pointer",
811                actual: self.shape,
812            }))
813        }
814    }
815
816    /// Tries to identify this value as an option
817    #[inline]
818    pub fn into_option(self) -> Result<PeekOption<'mem, 'facet>, ReflectError> {
819        if let Def::Option(def) = self.shape.def {
820            Ok(PeekOption { value: self, def })
821        } else {
822            Err(self.err(ReflectErrorKind::WasNotA {
823                expected: "option",
824                actual: self.shape,
825            }))
826        }
827    }
828
829    /// Tries to identify this value as a result
830    #[inline]
831    pub fn into_result(self) -> Result<PeekResult<'mem, 'facet>, ReflectError> {
832        if let Def::Result(def) = self.shape.def {
833            Ok(PeekResult { value: self, def })
834        } else {
835            Err(self.err(ReflectErrorKind::WasNotA {
836                expected: "result",
837                actual: self.shape,
838            }))
839        }
840    }
841
842    /// Tries to identify this value as a tuple
843    #[inline]
844    pub fn into_tuple(self) -> Result<PeekTuple<'mem, 'facet>, ReflectError> {
845        if let Type::User(UserType::Struct(struct_type)) = self.shape.ty {
846            if struct_type.kind == StructKind::Tuple {
847                Ok(PeekTuple {
848                    value: self,
849                    ty: TupleType {
850                        fields: struct_type.fields,
851                    },
852                })
853            } else {
854                Err(self.err(ReflectErrorKind::WasNotA {
855                    expected: "tuple",
856                    actual: self.shape,
857                }))
858            }
859        } else {
860            Err(self.err(ReflectErrorKind::WasNotA {
861                expected: "tuple",
862                actual: self.shape,
863            }))
864        }
865    }
866
867    /// Tries to identify this value as a dynamic value (like `facet_value::Value`)
868    #[inline]
869    pub fn into_dynamic_value(self) -> Result<PeekDynamicValue<'mem, 'facet>, ReflectError> {
870        if let Def::DynamicValue(def) = self.shape.def {
871            Ok(PeekDynamicValue { value: self, def })
872        } else {
873            Err(self.err(ReflectErrorKind::WasNotA {
874                expected: "dynamic value",
875                actual: self.shape,
876            }))
877        }
878    }
879
880    /// Tries to return the innermost value — useful for serialization. For example, we serialize a `NonZero<u8>` the same
881    /// as a `u8`. Similarly, we serialize a `Utf8PathBuf` the same as a `String.
882    ///
883    /// Returns a `Peek` to the innermost value, unwrapping transparent wrappers recursively.
884    /// For example, this will peel through newtype wrappers or smart pointers that have an `inner`.
885    pub fn innermost_peek(self) -> Self {
886        let mut current_peek = self;
887        loop {
888            // First, try to dereference if this is a pointer type (Box, Arc, etc.)
889            if let Ok(ptr) = current_peek.into_pointer()
890                && let Some(target) = ptr.borrow_inner()
891            {
892                current_peek = target;
893                continue;
894            }
895
896            // Then, try to unwrap transparent wrappers via shape.inner
897            if let Some(inner_shape) = current_peek.shape.inner {
898                let result = unsafe { current_peek.shape.call_try_borrow_inner(current_peek.data) };
899                match result {
900                    Some(Ok(inner_data)) => {
901                        current_peek = Peek {
902                            data: inner_data.as_const(),
903                            shape: inner_shape,
904                            _invariant: PhantomData,
905                        };
906                        continue;
907                    }
908                    Some(Err(e)) => {
909                        panic!(
910                            "innermost_peek: try_borrow_inner returned an error! was trying to go from {} to {}. error: {e}",
911                            current_peek.shape, inner_shape
912                        );
913                    }
914                    None => {
915                        // No try_borrow_inner function - this might be a pointer type
916                        // that we already tried above, so we're done
917                    }
918                }
919            }
920
921            // No more unwrapping possible
922            break;
923        }
924        current_peek
925    }
926
927    /// Performs custom serialization of the current peek using the provided field's metadata.
928    ///
929    /// Returns an `OwnedPeek` that points to the final type that should be serialized in place
930    /// of the current peek.
931    #[cfg(feature = "alloc")]
932    pub fn custom_serialization(&self, field: Field) -> Result<OwnedPeek<'mem>, ReflectError> {
933        let Some(proxy_def) = field.proxy() else {
934            return Err(self.err(ReflectErrorKind::OperationFailed {
935                shape: self.shape,
936                operation: "field does not have a proxy definition",
937            }));
938        };
939
940        let target_shape = proxy_def.shape;
941        let tptr = target_shape.allocate().map_err(|_| {
942            self.err(ReflectErrorKind::Unsized {
943                shape: target_shape,
944                operation: "Not a Sized type",
945            })
946        })?;
947        let ser_res = unsafe { (proxy_def.convert_out)(self.data(), tptr) };
948        let err = match ser_res {
949            Ok(rptr) => {
950                if rptr.as_uninit() != tptr {
951                    ReflectErrorKind::CustomSerializationError {
952                        message: "convert_out did not return the expected pointer".into(),
953                        src_shape: self.shape,
954                        dst_shape: target_shape,
955                    }
956                } else {
957                    return Ok(OwnedPeek {
958                        shape: target_shape,
959                        data: rptr,
960                        _phantom: PhantomData,
961                    });
962                }
963            }
964            Err(message) => ReflectErrorKind::CustomSerializationError {
965                message,
966                src_shape: self.shape,
967                dst_shape: target_shape,
968            },
969        };
970        // if we reach here we have an error and we need to deallocate the target allocation
971        unsafe {
972            // SAFETY: unwrap should be ok since the allocation was ok
973            target_shape.deallocate_uninit(tptr).unwrap()
974        };
975        Err(self.err(err))
976    }
977
978    /// Performs custom serialization using a specific proxy definition.
979    ///
980    /// This is a lower-level method that takes a `ProxyDef` directly, useful when
981    /// the caller has already resolved which proxy to use (e.g., via `effective_proxy()`).
982    #[cfg(feature = "alloc")]
983    pub fn custom_serialization_with_proxy(
984        &self,
985        proxy_def: &'static facet_core::ProxyDef,
986    ) -> Result<OwnedPeek<'mem>, ReflectError> {
987        let target_shape = proxy_def.shape;
988        let tptr = target_shape.allocate().map_err(|_| {
989            self.err(ReflectErrorKind::Unsized {
990                shape: target_shape,
991                operation: "Not a Sized type",
992            })
993        })?;
994        let ser_res = unsafe { (proxy_def.convert_out)(self.data(), tptr) };
995        let err = match ser_res {
996            Ok(rptr) => {
997                if rptr.as_uninit() != tptr {
998                    ReflectErrorKind::CustomSerializationError {
999                        message: "convert_out did not return the expected pointer".into(),
1000                        src_shape: self.shape,
1001                        dst_shape: target_shape,
1002                    }
1003                } else {
1004                    return Ok(OwnedPeek {
1005                        shape: target_shape,
1006                        data: rptr,
1007                        _phantom: PhantomData,
1008                    });
1009                }
1010            }
1011            Err(message) => ReflectErrorKind::CustomSerializationError {
1012                message,
1013                src_shape: self.shape,
1014                dst_shape: target_shape,
1015            },
1016        };
1017        // if we reach here we have an error and we need to deallocate the target allocation
1018        unsafe {
1019            // SAFETY: unwrap should be ok since the allocation was ok
1020            target_shape.deallocate_uninit(tptr).unwrap()
1021        };
1022        Err(self.err(err))
1023    }
1024
1025    /// Returns an `OwnedPeek` using the shape's container-level proxy for serialization.
1026    ///
1027    /// This is used when a type has `#[facet(proxy = ProxyType)]` at the container level.
1028    /// Unlike field-level proxies which are checked via `custom_serialization(field)`,
1029    /// this method checks the Shape itself for a proxy definition.
1030    ///
1031    /// Returns `None` if the shape has no container-level proxy.
1032    #[cfg(feature = "alloc")]
1033    pub fn custom_serialization_from_shape(&self) -> Result<Option<OwnedPeek<'mem>>, ReflectError> {
1034        self.custom_serialization_from_shape_with_format(None)
1035    }
1036
1037    /// Returns an `OwnedPeek` using the shape's container-level proxy for serialization,
1038    /// with support for format-specific proxies.
1039    ///
1040    /// If `format_namespace` is provided (e.g., `Some("xml")`), looks for a format-specific
1041    /// proxy first, falling back to the format-agnostic proxy.
1042    ///
1043    /// Returns `None` if no applicable proxy is found.
1044    #[cfg(feature = "alloc")]
1045    pub fn custom_serialization_from_shape_with_format(
1046        &self,
1047        format_namespace: Option<&str>,
1048    ) -> Result<Option<OwnedPeek<'mem>>, ReflectError> {
1049        let Some(proxy_def) = self.shape.effective_proxy(format_namespace) else {
1050            return Ok(None);
1051        };
1052
1053        let target_shape = proxy_def.shape;
1054        let tptr = target_shape.allocate().map_err(|_| {
1055            self.err(ReflectErrorKind::Unsized {
1056                shape: target_shape,
1057                operation: "Not a Sized type",
1058            })
1059        })?;
1060
1061        let ser_res = unsafe { (proxy_def.convert_out)(self.data(), tptr) };
1062        let err = match ser_res {
1063            Ok(rptr) => {
1064                if rptr.as_uninit() != tptr {
1065                    ReflectErrorKind::CustomSerializationError {
1066                        message: "proxy convert_out did not return the expected pointer".into(),
1067                        src_shape: self.shape,
1068                        dst_shape: target_shape,
1069                    }
1070                } else {
1071                    return Ok(Some(OwnedPeek {
1072                        shape: target_shape,
1073                        data: rptr,
1074                        _phantom: PhantomData,
1075                    }));
1076                }
1077            }
1078            Err(message) => ReflectErrorKind::CustomSerializationError {
1079                message,
1080                src_shape: self.shape,
1081                dst_shape: target_shape,
1082            },
1083        };
1084
1085        // if we reach here we have an error and we need to deallocate the target allocation
1086        unsafe {
1087            // SAFETY: unwrap should be ok since the allocation was ok
1088            target_shape.deallocate_uninit(tptr).unwrap()
1089        };
1090        Err(self.err(err))
1091    }
1092
1093    /// Navigate to a nested value by following a [`Path`].
1094    ///
1095    /// Each [`PathStep`] in the path is applied in order, descending into
1096    /// structs, enums, lists, maps, options, pointers, etc. If any step
1097    /// cannot be applied, a [`PathAccessError`] is returned with the step
1098    /// index and context about what went wrong.
1099    ///
1100    /// # Errors
1101    ///
1102    /// Returns [`PathAccessError`] if:
1103    /// - The path's root shape doesn't match this value's shape
1104    /// - A step kind doesn't apply to the current shape
1105    /// - A field/list index is out of bounds
1106    /// - An enum variant doesn't match the runtime variant
1107    /// - A deref/inner/proxy target is missing
1108    /// - An option is `None` when `OptionSome` is requested
1109    pub fn at_path(self, path: &Path) -> Result<Peek<'mem, 'facet>, PathAccessError> {
1110        if self.shape != path.shape {
1111            return Err(PathAccessError::RootShapeMismatch {
1112                expected: path.shape,
1113                actual: self.shape,
1114            });
1115        }
1116
1117        let mut current = self;
1118
1119        for (step_index, step) in path.steps().iter().enumerate() {
1120            current = current.apply_step(*step, step_index)?;
1121        }
1122
1123        Ok(current)
1124    }
1125
1126    /// Apply a single [`PathStep`] to this value, returning the resulting [`Peek`].
1127    fn apply_step(
1128        self,
1129        step: PathStep,
1130        step_index: usize,
1131    ) -> Result<Peek<'mem, 'facet>, PathAccessError> {
1132        match step {
1133            PathStep::Field(idx) => {
1134                let idx = idx as usize;
1135                match self.shape.ty {
1136                    // Struct field access
1137                    Type::User(UserType::Struct(sd)) => {
1138                        if idx >= sd.fields.len() {
1139                            return Err(PathAccessError::IndexOutOfBounds {
1140                                step,
1141                                step_index,
1142                                shape: self.shape,
1143                                index: idx,
1144                                bound: sd.fields.len(),
1145                            });
1146                        }
1147                        let field = &sd.fields[idx];
1148                        let field_data = unsafe { self.data.field(field.offset) };
1149                        Ok(unsafe { Peek::unchecked_new(field_data, field.shape()) })
1150                    }
1151                    // Enum variant field access — a preceding Variant step verified
1152                    // which variant is active and returned the enum Peek as-is.
1153                    // Now we read the active variant's field by index.
1154                    Type::User(UserType::Enum(_)) => {
1155                        let peek_enum =
1156                            self.into_enum()
1157                                .map_err(|_| PathAccessError::WrongStepKind {
1158                                    step,
1159                                    step_index,
1160                                    shape: self.shape,
1161                                })?;
1162                        let variant = peek_enum.active_variant().map_err(|_| {
1163                            PathAccessError::WrongStepKind {
1164                                step,
1165                                step_index,
1166                                shape: self.shape,
1167                            }
1168                        })?;
1169                        if idx >= variant.data.fields.len() {
1170                            return Err(PathAccessError::IndexOutOfBounds {
1171                                step,
1172                                step_index,
1173                                shape: self.shape,
1174                                index: idx,
1175                                bound: variant.data.fields.len(),
1176                            });
1177                        }
1178                        peek_enum
1179                            .field(idx)
1180                            .map_err(|_| PathAccessError::WrongStepKind {
1181                                step,
1182                                step_index,
1183                                shape: self.shape,
1184                            })?
1185                            .ok_or(PathAccessError::IndexOutOfBounds {
1186                                step,
1187                                step_index,
1188                                shape: self.shape,
1189                                index: idx,
1190                                bound: variant.data.fields.len(),
1191                            })
1192                    }
1193                    _ => Err(PathAccessError::WrongStepKind {
1194                        step,
1195                        step_index,
1196                        shape: self.shape,
1197                    }),
1198                }
1199            }
1200
1201            PathStep::Variant(expected_idx) => {
1202                let expected_idx = expected_idx as usize;
1203                let peek_enum = self
1204                    .into_enum()
1205                    .map_err(|_| PathAccessError::WrongStepKind {
1206                        step,
1207                        step_index,
1208                        shape: self.shape,
1209                    })?;
1210
1211                if expected_idx >= peek_enum.variants().len() {
1212                    return Err(PathAccessError::IndexOutOfBounds {
1213                        step,
1214                        step_index,
1215                        shape: self.shape,
1216                        index: expected_idx,
1217                        bound: peek_enum.variants().len(),
1218                    });
1219                }
1220
1221                let actual_idx =
1222                    peek_enum
1223                        .variant_index()
1224                        .map_err(|_| PathAccessError::WrongStepKind {
1225                            step,
1226                            step_index,
1227                            shape: self.shape,
1228                        })?;
1229
1230                if actual_idx != expected_idx {
1231                    return Err(PathAccessError::VariantMismatch {
1232                        step_index,
1233                        shape: self.shape,
1234                        expected_variant: expected_idx,
1235                        actual_variant: actual_idx,
1236                    });
1237                }
1238
1239                // After verifying the variant matches, we return the enum Peek
1240                // unchanged. The next Field step will use the active variant's
1241                // fields (handled in the Field arm's Enum branch).
1242                Ok(self)
1243            }
1244
1245            PathStep::Index(idx) => {
1246                let idx = idx as usize;
1247                match self.shape.def {
1248                    Def::List(def) => {
1249                        let list = unsafe { super::PeekList::new(self, def) };
1250                        let len = list.len();
1251                        list.get(idx).ok_or(PathAccessError::IndexOutOfBounds {
1252                            step,
1253                            step_index,
1254                            shape: self.shape,
1255                            index: idx,
1256                            bound: len,
1257                        })
1258                    }
1259                    Def::Array(def) => {
1260                        let list_like =
1261                            unsafe { super::PeekListLike::new(self, ListLikeDef::Array(def)) };
1262                        let len = list_like.len();
1263                        list_like.get(idx).ok_or(PathAccessError::IndexOutOfBounds {
1264                            step,
1265                            step_index,
1266                            shape: self.shape,
1267                            index: idx,
1268                            bound: len,
1269                        })
1270                    }
1271                    _ => Err(PathAccessError::WrongStepKind {
1272                        step,
1273                        step_index,
1274                        shape: self.shape,
1275                    }),
1276                }
1277            }
1278
1279            PathStep::MapKey(entry_idx) => {
1280                let entry_idx = entry_idx as usize;
1281                if let Def::Map(def) = self.shape.def {
1282                    let map = unsafe { super::PeekMap::new(self, def) };
1283                    let len = map.len();
1284                    if entry_idx >= len {
1285                        return Err(PathAccessError::IndexOutOfBounds {
1286                            step,
1287                            step_index,
1288                            shape: self.shape,
1289                            index: entry_idx,
1290                            bound: len,
1291                        });
1292                    }
1293                    // Iterate to the nth entry and return the key
1294                    for (i, (key, _value)) in map.iter().enumerate() {
1295                        if i == entry_idx {
1296                            return Ok(key);
1297                        }
1298                    }
1299                    // Should be unreachable given the bounds check above
1300                    Err(PathAccessError::IndexOutOfBounds {
1301                        step,
1302                        step_index,
1303                        shape: self.shape,
1304                        index: entry_idx,
1305                        bound: len,
1306                    })
1307                } else {
1308                    Err(PathAccessError::WrongStepKind {
1309                        step,
1310                        step_index,
1311                        shape: self.shape,
1312                    })
1313                }
1314            }
1315
1316            PathStep::MapValue(entry_idx) => {
1317                let entry_idx = entry_idx as usize;
1318                if let Def::Map(def) = self.shape.def {
1319                    let map = unsafe { super::PeekMap::new(self, def) };
1320                    let len = map.len();
1321                    if entry_idx >= len {
1322                        return Err(PathAccessError::IndexOutOfBounds {
1323                            step,
1324                            step_index,
1325                            shape: self.shape,
1326                            index: entry_idx,
1327                            bound: len,
1328                        });
1329                    }
1330                    for (i, (_key, value)) in map.iter().enumerate() {
1331                        if i == entry_idx {
1332                            return Ok(value);
1333                        }
1334                    }
1335                    Err(PathAccessError::IndexOutOfBounds {
1336                        step,
1337                        step_index,
1338                        shape: self.shape,
1339                        index: entry_idx,
1340                        bound: len,
1341                    })
1342                } else {
1343                    Err(PathAccessError::WrongStepKind {
1344                        step,
1345                        step_index,
1346                        shape: self.shape,
1347                    })
1348                }
1349            }
1350
1351            PathStep::OptionSome => {
1352                if let Def::Option(def) = self.shape.def {
1353                    let opt = PeekOption { value: self, def };
1354                    opt.value().ok_or(PathAccessError::OptionIsNone {
1355                        step_index,
1356                        shape: self.shape,
1357                    })
1358                } else {
1359                    Err(PathAccessError::WrongStepKind {
1360                        step,
1361                        step_index,
1362                        shape: self.shape,
1363                    })
1364                }
1365            }
1366
1367            PathStep::Deref => {
1368                if let Def::Pointer(def) = self.shape.def {
1369                    let ptr = PeekPointer { value: self, def };
1370                    ptr.borrow_inner().ok_or(PathAccessError::MissingTarget {
1371                        step,
1372                        step_index,
1373                        shape: self.shape,
1374                    })
1375                } else {
1376                    Err(PathAccessError::WrongStepKind {
1377                        step,
1378                        step_index,
1379                        shape: self.shape,
1380                    })
1381                }
1382            }
1383
1384            PathStep::Inner => {
1385                let inner_shape = self.shape.inner.ok_or(PathAccessError::MissingTarget {
1386                    step,
1387                    step_index,
1388                    shape: self.shape,
1389                })?;
1390
1391                let result = unsafe { self.shape.call_try_borrow_inner(self.data) };
1392                match result {
1393                    Some(Ok(inner_data)) => Ok(Peek {
1394                        data: inner_data.as_const(),
1395                        shape: inner_shape,
1396                        _invariant: PhantomData,
1397                    }),
1398                    _ => Err(PathAccessError::MissingTarget {
1399                        step,
1400                        step_index,
1401                        shape: self.shape,
1402                    }),
1403                }
1404            }
1405
1406            PathStep::Proxy => {
1407                let proxy_def =
1408                    self.shape
1409                        .effective_proxy(None)
1410                        .ok_or(PathAccessError::MissingTarget {
1411                            step,
1412                            step_index,
1413                            shape: self.shape,
1414                        })?;
1415                // Proxy navigation requires converting out, which allocates.
1416                // For read-only path access, we can't do that without ownership.
1417                // Return MissingTarget since proxy traversal isn't supported in at_path.
1418                Err(PathAccessError::MissingTarget {
1419                    step,
1420                    step_index,
1421                    shape: proxy_def.shape,
1422                })
1423            }
1424        }
1425    }
1426}
1427
1428impl<'mem, 'facet> core::fmt::Display for Peek<'mem, 'facet> {
1429    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1430        if let Some(result) = unsafe { self.shape.call_display(self.data, f) } {
1431            return result;
1432        }
1433        write!(f, "⟨{}⟩", self.shape)
1434    }
1435}
1436
1437impl<'mem, 'facet> core::fmt::Debug for Peek<'mem, 'facet> {
1438    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1439        if let Some(result) = unsafe { self.shape.call_debug(self.data, f) } {
1440            return result;
1441        }
1442
1443        write!(f, "⟨{}⟩", self.shape)
1444    }
1445}
1446
1447impl<'mem, 'facet> core::cmp::PartialEq for Peek<'mem, 'facet> {
1448    #[inline]
1449    fn eq(&self, other: &Self) -> bool {
1450        self.partial_eq(other).unwrap_or(false)
1451    }
1452}
1453
1454impl<'mem, 'facet> core::cmp::PartialOrd for Peek<'mem, 'facet> {
1455    #[inline]
1456    fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
1457        self.partial_cmp(other).unwrap_or(None)
1458    }
1459}
1460
1461impl<'mem, 'facet> core::hash::Hash for Peek<'mem, 'facet> {
1462    fn hash<H: core::hash::Hasher>(&self, hasher: &mut H) {
1463        self.hash(hasher)
1464            .expect("Hashing is not supported for this shape");
1465    }
1466}
1467
1468/// A covariant wrapper around [`Peek`] for types that can safely shrink lifetimes.
1469///
1470/// Unlike [`Peek`], which is invariant with respect to `'facet` for soundness reasons,
1471/// `CovariantPeek` is **covariant** with respect to `'facet`. This means a `CovariantPeek<'mem, 'static>`
1472/// can be used where a `CovariantPeek<'mem, 'a>` is expected.
1473///
1474/// # Variance Background
1475///
1476/// From the [Rust Reference on Subtyping](https://doc.rust-lang.org/reference/subtyping.html):
1477/// - **Covariant** types can shrink lifetimes (`'static` → `'a`)
1478/// - **Bivariant** types have no lifetime constraints and can go either direction
1479/// - **Contravariant** types can only grow lifetimes
1480/// - **Invariant** types cannot change lifetimes at all
1481///
1482/// `CovariantPeek` accepts both covariant and bivariant types, since both can
1483/// safely shrink lifetimes.
1484///
1485/// # When to Use
1486///
1487/// Use `CovariantPeek` when you need to:
1488/// - Store multiple `Peek` values with different lifetimes in a single collection
1489/// - Pass `Peek` values to functions expecting shorter lifetimes
1490/// - Build data structures that wrap `Peek` without forcing invariance on the wrapper
1491///
1492/// # Safety
1493///
1494/// `CovariantPeek` can only be constructed from types that can safely shrink lifetimes
1495/// (covariant or bivariant). The constructor verifies this at runtime by checking
1496/// [`Variance::can_shrink`]. This ensures that lifetime shrinking is always safe.
1497///
1498/// # Example
1499///
1500/// ```
1501/// use facet::Facet;
1502/// use facet_reflect::{Peek, CovariantPeek};
1503///
1504/// #[derive(Facet)]
1505/// struct Data<'a> {
1506///     value: &'a str,
1507/// }
1508///
1509/// // Data<'a> is covariant with respect to 'a because &'a str is covariant
1510/// let data = Data { value: "hello" };
1511/// let peek: Peek<'_, 'static> = Peek::new(&data);
1512///
1513/// // Convert to CovariantPeek - this verifies the type can shrink lifetimes
1514/// let covariant = CovariantPeek::new(peek).expect("Data can shrink lifetimes");
1515///
1516/// // Now we can use it where shorter lifetimes are expected
1517/// fn use_shorter<'a>(p: CovariantPeek<'_, 'a>) {
1518///     let _ = p;
1519/// }
1520/// use_shorter(covariant);
1521/// ```
1522#[derive(Clone, Copy)]
1523pub struct CovariantPeek<'mem, 'facet> {
1524    /// Underlying data
1525    data: PtrConst,
1526
1527    /// Shape of the value
1528    shape: &'static Shape,
1529
1530    // Covariant with respect to both 'mem and 'facet: CovariantPeek<'mem, 'static> can be used where
1531    // CovariantPeek<'mem, 'a> is expected.
1532    //
1533    // This is safe ONLY because we verify at construction time that the underlying
1534    // type can shrink lifetimes (is covariant or bivariant).
1535    // See: https://doc.rust-lang.org/reference/subtyping.html
1536    _covariant: PhantomData<(&'mem (), &'facet ())>,
1537}
1538
1539impl<'mem, 'facet> CovariantPeek<'mem, 'facet> {
1540    /// Creates a new `CovariantPeek` from a `Peek`, verifying that the underlying type
1541    /// can be used in covariant contexts.
1542    ///
1543    /// Returns `None` if the type cannot safely shrink lifetimes (i.e., it's contravariant
1544    /// or invariant). Both covariant and bivariant types are accepted.
1545    ///
1546    /// From the [Rust Reference](https://doc.rust-lang.org/reference/subtyping.html):
1547    /// - Covariant types can shrink lifetimes (`'static` → `'a`)
1548    /// - Bivariant types have no lifetime constraints and can go either direction
1549    /// - Both are safe to use in covariant contexts
1550    ///
1551    /// # Example
1552    ///
1553    /// ```
1554    /// use facet::Facet;
1555    /// use facet_reflect::{Peek, CovariantPeek};
1556    ///
1557    /// // i32 has no lifetime parameters, so it's bivariant (can be used as covariant)
1558    /// let value = 42i32;
1559    /// let peek = Peek::new(&value);
1560    /// let covariant = CovariantPeek::new(peek);
1561    /// assert!(covariant.is_some());
1562    /// ```
1563    #[inline]
1564    pub fn new(peek: Peek<'mem, 'facet>) -> Option<Self> {
1565        // Accept types that can shrink lifetimes: Covariant and Bivariant
1566        // See: https://doc.rust-lang.org/reference/subtyping.html
1567        if peek.variance().can_shrink() {
1568            Some(Self {
1569                data: peek.data,
1570                shape: peek.shape,
1571                _covariant: PhantomData,
1572            })
1573        } else {
1574            None
1575        }
1576    }
1577
1578    /// Creates a new `CovariantPeek` from a `Peek`, panicking if the type cannot be
1579    /// used in covariant contexts.
1580    ///
1581    /// # Panics
1582    ///
1583    /// Panics if the underlying type is contravariant or invariant.
1584    ///
1585    /// # Example
1586    ///
1587    /// ```
1588    /// use facet::Facet;
1589    /// use facet_reflect::{Peek, CovariantPeek};
1590    ///
1591    /// let value = "hello";
1592    /// let peek = Peek::new(&value);
1593    /// let covariant = CovariantPeek::new_unchecked(peek); // Will succeed
1594    /// ```
1595    #[inline]
1596    pub fn new_unchecked(peek: Peek<'mem, 'facet>) -> Self {
1597        Self::new(peek).unwrap_or_else(|| {
1598            panic!(
1599                "CovariantPeek::new_unchecked called on type that cannot shrink lifetimes: {} (variance: {:?})",
1600                peek.shape,
1601                peek.variance()
1602            )
1603        })
1604    }
1605
1606    /// Creates a `CovariantPeek` directly from a `Facet` type that can be used
1607    /// in covariant contexts.
1608    ///
1609    /// Returns `None` if the type is contravariant or invariant.
1610    ///
1611    /// # Example
1612    ///
1613    /// ```
1614    /// use facet::Facet;
1615    /// use facet_reflect::CovariantPeek;
1616    ///
1617    /// let value = 42i32;
1618    /// let covariant = CovariantPeek::from_ref(&value);
1619    /// assert!(covariant.is_some());
1620    /// ```
1621    #[inline]
1622    pub fn from_ref<T: Facet<'facet> + ?Sized>(t: &'mem T) -> Option<Self> {
1623        Self::new(Peek::new(t))
1624    }
1625
1626    /// Returns the underlying `Peek`.
1627    ///
1628    /// Note that the returned `Peek` is invariant, so you cannot use it to
1629    /// shrink lifetimes directly. Use `CovariantPeek` for lifetime flexibility.
1630    #[inline]
1631    pub fn into_peek(self) -> Peek<'mem, 'facet> {
1632        Peek {
1633            data: self.data,
1634            shape: self.shape,
1635            _invariant: PhantomData,
1636        }
1637    }
1638
1639    /// Returns the shape of the underlying value.
1640    #[inline]
1641    pub const fn shape(&self) -> &'static Shape {
1642        self.shape
1643    }
1644
1645    /// Returns the data pointer.
1646    #[inline]
1647    pub const fn data(&self) -> PtrConst {
1648        self.data
1649    }
1650}
1651
1652impl<'mem, 'facet> core::ops::Deref for CovariantPeek<'mem, 'facet> {
1653    type Target = Peek<'mem, 'facet>;
1654
1655    #[inline]
1656    fn deref(&self) -> &Self::Target {
1657        // SAFETY: CovariantPeek and Peek have the same memory layout for the
1658        // data and shape fields. The PhantomData fields don't affect layout.
1659        // We're creating a reference to a Peek that views the same data.
1660        //
1661        // This is safe because:
1662        // 1. We only construct CovariantPeek from covariant types
1663        // 2. The Peek reference we return has the same lifetime bounds
1664        // 3. We're not allowing mutation through this reference
1665        unsafe { &*(self as *const CovariantPeek<'mem, 'facet> as *const Peek<'mem, 'facet>) }
1666    }
1667}
1668
1669impl<'mem, 'facet> core::fmt::Debug for CovariantPeek<'mem, 'facet> {
1670    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1671        f.debug_struct("CovariantPeek")
1672            .field("shape", &self.shape)
1673            .field("data", &self.data)
1674            .finish()
1675    }
1676}
1677
1678impl<'mem, 'facet> core::fmt::Display for CovariantPeek<'mem, 'facet> {
1679    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1680        core::fmt::Display::fmt(&**self, f)
1681    }
1682}
1683
1684#[cfg(test)]
1685mod tests {
1686    use super::*;
1687
1688    /// Regression test for issue #1082: UB in `Peek("").as_str()`
1689    /// Previously, `as_str()` used `get::<&str>()` which tried to read a fat pointer
1690    /// from the str data, causing UB for empty strings (reading 16 bytes from 0-byte allocation).
1691    #[test]
1692    fn test_peek_as_str_empty_string() {
1693        let peek = Peek::new("");
1694        assert_eq!(peek.as_str(), Some(""));
1695    }
1696
1697    #[test]
1698    fn test_peek_as_str_non_empty_string() {
1699        let peek = Peek::new("hello");
1700        assert_eq!(peek.as_str(), Some("hello"));
1701    }
1702
1703    #[test]
1704    #[cfg(feature = "alloc")]
1705    fn test_peek_as_str_owned_string() {
1706        let s = alloc::string::String::from("owned string");
1707        let peek = Peek::new(&s);
1708        assert_eq!(peek.as_str(), Some("owned string"));
1709    }
1710
1711    /// Regression test for issue #794: Peek::as_str() with double reference
1712    /// Previously, this would cause UB when trying to read &&str as &str
1713    #[test]
1714    fn test_peek_as_str_double_reference() {
1715        let value = &"hello";
1716        let peek = Peek::new(&value);
1717        assert_eq!(peek.as_str(), Some("hello"));
1718    }
1719
1720    #[test]
1721    fn test_covariant_peek_from_covariant_type() {
1722        // i32 has no lifetime parameters, so it's covariant
1723        let value = 42i32;
1724        let peek = Peek::new(&value);
1725        let covariant = CovariantPeek::new(peek);
1726        assert!(covariant.is_some());
1727
1728        // Verify we can access Peek methods through Deref
1729        let covariant = covariant.unwrap();
1730        assert_eq!(covariant.shape(), peek.shape());
1731    }
1732
1733    #[test]
1734    fn test_covariant_peek_from_ref() {
1735        let value = 42i32;
1736        let covariant = CovariantPeek::from_ref(&value);
1737        assert!(covariant.is_some());
1738    }
1739
1740    #[test]
1741    fn test_covariant_peek_deref_to_peek() {
1742        let value = "hello";
1743        let peek = Peek::new(&value);
1744        let covariant = CovariantPeek::new(peek).unwrap();
1745
1746        // Test that Deref works - we can call Peek methods directly
1747        assert_eq!(covariant.as_str(), Some("hello"));
1748        assert_eq!(covariant.shape(), peek.shape());
1749    }
1750
1751    #[test]
1752    fn test_covariant_peek_into_peek() {
1753        let value = 42i32;
1754        let original_peek = Peek::new(&value);
1755        let covariant = CovariantPeek::new(original_peek).unwrap();
1756        let recovered_peek = covariant.into_peek();
1757
1758        assert_eq!(recovered_peek.shape(), original_peek.shape());
1759    }
1760
1761    #[test]
1762    fn test_covariant_peek_lifetime_covariance() {
1763        // This test verifies that CovariantPeek is actually covariant with respect to 'facet
1764        // by passing a CovariantPeek<'_, 'static> to a function expecting CovariantPeek<'_, 'a>
1765        fn use_shorter<'a>(_p: CovariantPeek<'_, 'a>) {}
1766
1767        let value = 42i32;
1768        let covariant: CovariantPeek<'_, 'static> = CovariantPeek::from_ref(&value).unwrap();
1769
1770        // This compiles because CovariantPeek is covariant with respect to 'facet
1771        use_shorter(covariant);
1772    }
1773
1774    #[test]
1775    #[cfg(feature = "alloc")]
1776    fn test_covariant_peek_vec_type() {
1777        // Vec<T> is covariant with respect to T
1778        let vec = alloc::vec![1i32, 2, 3];
1779        let peek = Peek::new(&vec);
1780        let covariant = CovariantPeek::new(peek);
1781        assert!(covariant.is_some());
1782    }
1783
1784    #[test]
1785    #[cfg(feature = "alloc")]
1786    fn test_covariant_peek_option_type() {
1787        // Option<T> is covariant with respect to T
1788        let opt = Some(42i32);
1789        let peek = Peek::new(&opt);
1790        let covariant = CovariantPeek::new(peek);
1791        assert!(covariant.is_some());
1792    }
1793
1794    /// Local Spanned<T> for testing metadata_container behavior.
1795    /// Users define their own version using #[facet(metadata_container)].
1796    #[derive(Debug, Clone, facet::Facet)]
1797    #[facet(metadata_container)]
1798    struct Spanned<T> {
1799        value: T,
1800        #[facet(metadata = "span")]
1801        span: Option<crate::Span>,
1802    }
1803
1804    impl<T> Spanned<T> {
1805        fn new(value: T, span: crate::Span) -> Self {
1806            Self {
1807                value,
1808                span: Some(span),
1809            }
1810        }
1811    }
1812
1813    #[test]
1814    fn test_spanned_structural_hash_ignores_span() {
1815        use crate::Span;
1816        use core::hash::Hasher;
1817        use std::hash::DefaultHasher;
1818
1819        // Two Spanned values with same inner value but different spans
1820        let a = Spanned::new(42i32, Span::new(0, 10));
1821        let b = Spanned::new(42i32, Span::new(100, 20));
1822
1823        // They should have the same structural hash
1824        let mut hasher_a = DefaultHasher::new();
1825        Peek::new(&a).structural_hash(&mut hasher_a);
1826        let hash_a = hasher_a.finish();
1827
1828        let mut hasher_b = DefaultHasher::new();
1829        Peek::new(&b).structural_hash(&mut hasher_b);
1830        let hash_b = hasher_b.finish();
1831
1832        assert_eq!(
1833            hash_a, hash_b,
1834            "Spanned values with same inner value should have same structural hash"
1835        );
1836    }
1837
1838    #[test]
1839    fn test_spanned_structural_hash_differs_for_different_values() {
1840        use crate::Span;
1841        use core::hash::Hasher;
1842        use std::hash::DefaultHasher;
1843
1844        // Two Spanned values with different inner values
1845        let a = Spanned::new(42i32, Span::new(0, 10));
1846        let b = Spanned::new(99i32, Span::new(0, 10));
1847
1848        // They should have different structural hashes
1849        let mut hasher_a = DefaultHasher::new();
1850        Peek::new(&a).structural_hash(&mut hasher_a);
1851        let hash_a = hasher_a.finish();
1852
1853        let mut hasher_b = DefaultHasher::new();
1854        Peek::new(&b).structural_hash(&mut hasher_b);
1855        let hash_b = hasher_b.finish();
1856
1857        assert_ne!(
1858            hash_a, hash_b,
1859            "Spanned values with different inner values should have different structural hashes"
1860        );
1861    }
1862
1863    #[test]
1864    fn test_spanned_field_metadata() {
1865        use facet_core::{Type, UserType};
1866
1867        // Get the shape for Spanned<i32>
1868        let shape = <Spanned<i32> as facet_core::Facet>::SHAPE;
1869
1870        // Extract the struct type
1871        let struct_type = match shape.ty {
1872            Type::User(UserType::Struct(st)) => st,
1873            _ => panic!("Expected struct type"),
1874        };
1875
1876        // Find the span field and verify it has metadata = "span"
1877        let span_field = struct_type
1878            .fields
1879            .iter()
1880            .find(|f| f.name == "span")
1881            .expect("Should have span field");
1882
1883        assert!(
1884            span_field.is_metadata(),
1885            "span field should be marked as metadata"
1886        );
1887        assert_eq!(
1888            span_field.metadata_kind(),
1889            Some("span"),
1890            "span field should have metadata kind 'span'"
1891        );
1892
1893        // Verify the value field is NOT metadata
1894        let value_field = struct_type
1895            .fields
1896            .iter()
1897            .find(|f| f.name == "value")
1898            .expect("Should have value field");
1899
1900        assert!(
1901            !value_field.is_metadata(),
1902            "value field should not be marked as metadata"
1903        );
1904    }
1905}