1use core::{cmp::Ordering, marker::PhantomData, mem::transmute};
2use facet_core::{
3 Def, Facet, PointerType, PtrConst, PtrConstWide, PtrMut, Shape, StructKind, Type, TypeNameOpts,
4 UserType, ValueVTable,
5};
6
7use crate::{ReflectError, ScalarType};
8
9use super::{
10 ListLikeDef, PeekEnum, PeekList, PeekListLike, PeekMap, PeekSmartPointer, PeekStruct,
11 PeekTuple, tuple::TupleType,
12};
13
14#[derive(Clone, Copy, PartialEq, Eq, Hash)]
16pub struct ValueId<'shape> {
17 pub(crate) shape: &'shape Shape<'shape>,
18 pub(crate) ptr: *const u8,
19}
20
21impl<'shape> ValueId<'shape> {
22 pub(crate) fn new(shape: &'shape Shape<'shape>, ptr: *const u8) -> Self {
23 Self { shape, ptr }
24 }
25}
26
27impl core::fmt::Display for ValueId<'_> {
28 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
29 write!(f, "{}@{:p}", self.shape, self.ptr)
30 }
31}
32
33impl core::fmt::Debug for ValueId<'_> {
34 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
35 core::fmt::Display::fmt(self, f)
36 }
37}
38
39#[derive(Clone, Copy)]
42pub enum GenericPtr<'mem> {
43 Thin(PtrConst<'mem>),
45 Wide(PtrConstWide<'mem>),
47}
48
49impl<'a> From<PtrConst<'a>> for GenericPtr<'a> {
50 fn from(value: PtrConst<'a>) -> Self {
51 GenericPtr::Thin(value)
52 }
53}
54
55impl<'a> From<PtrConstWide<'a>> for GenericPtr<'a> {
56 fn from(value: PtrConstWide<'a>) -> Self {
57 GenericPtr::Wide(value)
58 }
59}
60
61impl<'mem> GenericPtr<'mem> {
62 #[inline(always)]
63 fn new<T: ?Sized>(ptr: *const T) -> Self {
64 if size_of_val(&ptr) == size_of::<PtrConst>() {
65 GenericPtr::Thin(PtrConst::new(ptr.cast::<()>()))
66 } else if size_of_val(&ptr) == size_of::<PtrConstWide>() {
67 GenericPtr::Wide(PtrConstWide::new(ptr))
68 } else {
69 panic!("Couldn't determine if pointer to T is thin or wide");
70 }
71 }
72
73 #[inline(always)]
75 pub fn thin(self) -> Option<PtrConst<'mem>> {
76 match self {
77 GenericPtr::Thin(ptr) => Some(ptr),
78 GenericPtr::Wide(_ptr) => None,
79 }
80 }
81
82 #[inline(always)]
83 unsafe fn get<T: ?Sized>(self) -> &'mem T {
84 match self {
85 GenericPtr::Thin(ptr) => {
86 let ptr = ptr.as_byte_ptr();
87 let ptr_ref = &ptr;
88
89 (unsafe { transmute::<&*const u8, &&T>(ptr_ref) }) as _
90 }
91 GenericPtr::Wide(ptr) => unsafe { ptr.get() },
92 }
93 }
94
95 #[inline(always)]
96 fn as_byte_ptr(self) -> *const u8 {
97 match self {
98 GenericPtr::Thin(ptr) => ptr.as_byte_ptr(),
99 GenericPtr::Wide(ptr) => ptr.as_byte_ptr(),
100 }
101 }
102
103 #[inline(always)]
110 pub unsafe fn field(self, offset: usize) -> GenericPtr<'mem> {
111 match self {
112 GenericPtr::Thin(ptr) => GenericPtr::Thin(unsafe { ptr.field(offset) }),
113 GenericPtr::Wide(_ptr) => {
114 panic!("Field access on wide pointers is not supported")
117 }
118 }
119 }
120}
121
122#[derive(Clone, Copy)]
124pub struct Peek<'mem, 'facet, 'shape> {
125 pub(crate) data: GenericPtr<'mem>,
127
128 pub(crate) shape: &'shape Shape<'shape>,
130
131 invariant: PhantomData<fn(&'facet ()) -> &'facet ()>,
132}
133
134impl<'mem, 'facet, 'shape> Peek<'mem, 'facet, 'shape> {
135 pub fn new<T: Facet<'facet> + ?Sized>(t: &'mem T) -> Self {
137 Self {
138 data: GenericPtr::new(t),
139 shape: T::SHAPE,
140 invariant: PhantomData,
141 }
142 }
143
144 pub unsafe fn unchecked_new(
152 data: impl Into<GenericPtr<'mem>>,
153 shape: &'shape Shape<'shape>,
154 ) -> Self {
155 Self {
156 data: data.into(),
157 shape,
158 invariant: PhantomData,
159 }
160 }
161
162 #[inline(always)]
164 pub fn vtable(&self) -> &'shape ValueVTable {
165 self.shape.vtable
166 }
167
168 pub fn id(&self) -> ValueId<'shape> {
170 ValueId::new(self.shape, self.data.as_byte_ptr())
171 }
172
173 #[inline]
175 pub fn ptr_eq(&self, other: &Peek<'_, '_, '_>) -> bool {
176 self.data.as_byte_ptr() == other.data.as_byte_ptr()
177 }
178
179 #[inline]
185 pub fn partial_eq(&self, other: &Peek<'_, '_, '_>) -> Option<bool> {
186 match (self.data, other.data) {
187 (GenericPtr::Thin(a), GenericPtr::Thin(b)) => unsafe {
188 (self.vtable().sized().unwrap().partial_eq)().map(|f| f(a, b))
189 },
190 (GenericPtr::Wide(a), GenericPtr::Wide(b)) => unsafe {
191 (self.vtable().r#unsized().unwrap().partial_eq)().map(|f| f(a, b))
192 },
193 _ => None,
194 }
195 }
196
197 #[inline]
203 pub fn partial_cmp(&self, other: &Peek<'_, '_, '_>) -> Option<Option<Ordering>> {
204 match (self.data, other.data) {
205 (GenericPtr::Thin(a), GenericPtr::Thin(b)) => unsafe {
206 (self.vtable().sized().unwrap().partial_ord)().map(|f| f(a, b))
207 },
208 (GenericPtr::Wide(a), GenericPtr::Wide(b)) => unsafe {
209 (self.vtable().r#unsized().unwrap().partial_ord)().map(|f| f(a, b))
210 },
211 _ => None,
212 }
213 }
214 #[inline(always)]
220 pub fn hash<H: core::hash::Hasher>(&self, hasher: &mut H) -> Result<(), ReflectError<'_>> {
221 match self.data {
222 GenericPtr::Thin(ptr) => {
223 if let Some(hash_fn) = (self.vtable().sized().unwrap().hash)() {
224 let hasher_opaque = PtrMut::new(hasher);
225 unsafe {
226 hash_fn(ptr, hasher_opaque, |opaque, bytes| {
227 opaque.as_mut::<H>().write(bytes)
228 })
229 };
230 return Ok(());
231 }
232 }
233 GenericPtr::Wide(ptr) => {
234 if let Some(hash_fn) = (self.vtable().r#unsized().unwrap().hash)() {
235 let hasher_opaque = PtrMut::new(hasher);
236 unsafe {
237 hash_fn(ptr, hasher_opaque, |opaque, bytes| {
238 opaque.as_mut::<H>().write(bytes)
239 })
240 };
241 return Ok(());
242 }
243 }
244 }
245 Err(ReflectError::OperationFailed {
246 shape: self.shape(),
247 operation: "hash",
248 })
249 }
250
251 #[inline(always)]
262 pub fn type_name(
263 &self,
264 f: &mut core::fmt::Formatter<'_>,
265 opts: TypeNameOpts,
266 ) -> core::fmt::Result {
267 (self.shape.vtable.type_name())(f, opts)
268 }
269
270 #[inline(always)]
272 pub const fn shape(&self) -> &'shape Shape<'shape> {
273 self.shape
274 }
275
276 #[inline(always)]
278 pub const fn data(&self) -> GenericPtr<'mem> {
279 self.data
280 }
281
282 pub fn scalar_type(&self) -> Option<ScalarType> {
284 ScalarType::try_from_shape(self.shape)
285 }
286
287 #[inline]
293 pub fn get<T: Facet<'facet>>(&self) -> Result<&T, ReflectError<'shape>> {
294 if self.shape != T::SHAPE {
295 Err(ReflectError::WrongShape {
296 expected: self.shape,
297 actual: T::SHAPE,
298 })
299 } else {
300 Ok(unsafe { self.data.get::<T>() })
301 }
302 }
303
304 pub fn as_str(&self) -> Option<&'mem str> {
307 let peek = self.innermost_peek();
308 if let Some(ScalarType::Str) = peek.scalar_type() {
309 unsafe { Some(peek.data.get::<&str>()) }
310 } else if let Some(ScalarType::String) = peek.scalar_type() {
311 unsafe { Some(peek.data.get::<alloc::string::String>().as_str()) }
312 } else if let Type::Pointer(PointerType::Reference(vpt)) = peek.shape.ty {
313 let target_shape = (vpt.target)();
314 if let Some(ScalarType::Str) = ScalarType::try_from_shape(target_shape) {
315 unsafe { Some(peek.data.get::<&str>()) }
316 } else {
317 None
318 }
319 } else {
320 None
321 }
322 }
323
324 pub fn as_bytes(&self) -> Option<&'mem [u8]> {
327 if let Type::Pointer(PointerType::Reference(vpt)) = self.shape.ty {
329 let target_shape = (vpt.target)();
330 if let Def::Slice(sd) = target_shape.def {
331 if sd.t().is_type::<u8>() {
332 unsafe { return Some(self.data.get::<&[u8]>()) }
333 }
334 }
335 }
336 None
337 }
338
339 pub fn into_struct(self) -> Result<PeekStruct<'mem, 'facet, 'shape>, ReflectError<'shape>> {
341 if let Type::User(UserType::Struct(ty)) = self.shape.ty {
342 Ok(PeekStruct { value: self, ty })
343 } else {
344 Err(ReflectError::WasNotA {
345 expected: "struct",
346 actual: self.shape,
347 })
348 }
349 }
350
351 pub fn into_enum(self) -> Result<PeekEnum<'mem, 'facet, 'shape>, ReflectError<'shape>> {
353 if let Type::User(UserType::Enum(ty)) = self.shape.ty {
354 Ok(PeekEnum { value: self, ty })
355 } else {
356 Err(ReflectError::WasNotA {
357 expected: "enum",
358 actual: self.shape,
359 })
360 }
361 }
362
363 pub fn into_map(self) -> Result<PeekMap<'mem, 'facet, 'shape>, ReflectError<'shape>> {
365 if let Def::Map(def) = self.shape.def {
366 Ok(PeekMap { value: self, def })
367 } else {
368 Err(ReflectError::WasNotA {
369 expected: "map",
370 actual: self.shape,
371 })
372 }
373 }
374
375 pub fn into_list(self) -> Result<PeekList<'mem, 'facet, 'shape>, ReflectError<'shape>> {
377 if let Def::List(def) = self.shape.def {
378 return Ok(PeekList { value: self, def });
379 }
380
381 Err(ReflectError::WasNotA {
382 expected: "list",
383 actual: self.shape,
384 })
385 }
386
387 pub fn into_list_like(
389 self,
390 ) -> Result<PeekListLike<'mem, 'facet, 'shape>, ReflectError<'shape>> {
391 match self.shape.def {
392 Def::List(def) => Ok(PeekListLike::new(self, ListLikeDef::List(def))),
393 Def::Array(def) => Ok(PeekListLike::new(self, ListLikeDef::Array(def))),
394 _ => {
395 match self.shape.ty {
397 Type::Pointer(ptr) => match ptr {
398 PointerType::Reference(vpt) | PointerType::Raw(vpt) => {
399 let target = (vpt.target)();
400 match target.def {
401 Def::Slice(def) => {
402 return Ok(PeekListLike::new(self, ListLikeDef::Slice(def)));
403 }
404 _ => {
405 }
407 }
408 }
409 PointerType::Function(_) => {
410 }
412 },
413 _ => {
414 }
416 }
417
418 Err(ReflectError::WasNotA {
419 expected: "list, array or slice",
420 actual: self.shape,
421 })
422 }
423 }
424 }
425
426 pub fn into_smart_pointer(
428 self,
429 ) -> Result<PeekSmartPointer<'mem, 'facet, 'shape>, ReflectError<'shape>> {
430 if let Def::SmartPointer(def) = self.shape.def {
431 Ok(PeekSmartPointer { value: self, def })
432 } else {
433 Err(ReflectError::WasNotA {
434 expected: "smart pointer",
435 actual: self.shape,
436 })
437 }
438 }
439
440 pub fn into_option(
442 self,
443 ) -> Result<super::PeekOption<'mem, 'facet, 'shape>, ReflectError<'shape>> {
444 if let Def::Option(def) = self.shape.def {
445 Ok(super::PeekOption { value: self, def })
446 } else {
447 Err(ReflectError::WasNotA {
448 expected: "option",
449 actual: self.shape,
450 })
451 }
452 }
453
454 pub fn into_tuple(self) -> Result<PeekTuple<'mem, 'facet, 'shape>, ReflectError<'shape>> {
456 if let Type::User(UserType::Struct(struct_type)) = self.shape.ty {
457 if struct_type.kind == StructKind::Tuple {
458 Ok(PeekTuple {
459 value: self,
460 ty: TupleType {
461 fields: struct_type.fields,
462 },
463 })
464 } else {
465 Err(ReflectError::WasNotA {
466 expected: "tuple",
467 actual: self.shape,
468 })
469 }
470 } else {
471 Err(ReflectError::WasNotA {
472 expected: "tuple",
473 actual: self.shape,
474 })
475 }
476 }
477
478 pub fn innermost_peek(self) -> Self {
484 let mut current_peek = self;
485 while let (Some(try_borrow_inner_fn), Some(inner_shape)) = (
486 current_peek
487 .shape
488 .vtable
489 .sized()
490 .and_then(|v| (v.try_borrow_inner)()),
491 current_peek.shape.inner,
492 ) {
493 unsafe {
494 let inner_data = try_borrow_inner_fn(current_peek.data.thin().unwrap()).unwrap_or_else(|e| {
495 panic!("innermost_peek: try_borrow_inner returned an error! was trying to go from {} to {}. error: {e}", current_peek.shape,
496 inner_shape())
497 });
498
499 current_peek = Peek {
500 data: inner_data.into(),
501 shape: inner_shape(),
502 invariant: PhantomData,
503 };
504 }
505 }
506 current_peek
507 }
508}
509
510impl<'mem, 'facet, 'shape> core::fmt::Display for Peek<'mem, 'facet, 'shape> {
511 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
512 match self.data {
513 GenericPtr::Thin(ptr) => {
514 if let Some(display_fn) = (self.vtable().sized().unwrap().display)() {
515 return unsafe { display_fn(ptr, f) };
516 }
517 }
518 GenericPtr::Wide(ptr) => {
519 if let Some(display_fn) = (self.vtable().r#unsized().unwrap().display)() {
520 return unsafe { display_fn(ptr, f) };
521 }
522 }
523 }
524 write!(f, "⟨{}⟩", self.shape)
525 }
526}
527
528impl<'mem, 'facet, 'shape> core::fmt::Debug for Peek<'mem, 'facet, 'shape> {
529 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
530 match self.data {
531 GenericPtr::Thin(ptr) => {
532 if let Some(debug_fn) = (self.vtable().sized().unwrap().debug)() {
533 return unsafe { debug_fn(ptr, f) };
534 }
535 }
536 GenericPtr::Wide(ptr) => {
537 if let Some(debug_fn) = (self.vtable().r#unsized().unwrap().debug)() {
538 return unsafe { debug_fn(ptr, f) };
539 }
540 }
541 }
542 write!(f, "⟨{}⟩", self.shape)
543 }
544}
545
546impl<'mem, 'facet, 'shape> core::cmp::PartialEq for Peek<'mem, 'facet, 'shape> {
547 fn eq(&self, other: &Self) -> bool {
548 self.partial_eq(other).unwrap_or(false)
549 }
550}
551
552impl<'mem, 'facet, 'shape> core::cmp::PartialOrd for Peek<'mem, 'facet, 'shape> {
553 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
554 self.partial_cmp(other).unwrap_or(None)
555 }
556}
557
558impl<'mem, 'facet, 'shape> core::hash::Hash for Peek<'mem, 'facet, 'shape> {
559 fn hash<H: core::hash::Hasher>(&self, hasher: &mut H) {
560 self.hash(hasher)
561 .expect("Hashing is not supported for this shape");
562 }
563}