facet_reflect/wip/
heap_value.rs1use crate::ReflectError;
2use core::{alloc::Layout, marker::PhantomData};
3use facet_ansi::Stylize as _;
4use facet_core::{Facet, PtrConst, PtrMut, Shape};
5
6pub struct HeapValue<'a> {
8 pub(crate) guard: Option<Guard>,
9 pub(crate) shape: &'static Shape,
10 pub(crate) phantom: PhantomData<&'a ()>,
11}
12
13impl Drop for HeapValue<'_> {
14 fn drop(&mut self) {
15 if let Some(guard) = self.guard.take() {
16 if let Some(drop_fn) = self.shape.vtable.drop_in_place {
17 unsafe { drop_fn(PtrMut::new(guard.ptr)) };
18 }
19 drop(guard);
20 }
21 }
22}
23
24impl<'a> HeapValue<'a> {
25 pub fn materialize<T: Facet + 'a>(mut self) -> Result<T, ReflectError> {
27 if self.shape != T::SHAPE {
28 return Err(ReflectError::WrongShape {
29 expected: self.shape,
30 actual: T::SHAPE,
31 });
32 }
33
34 let guard = self.guard.take().unwrap();
35 let data = PtrConst::new(guard.ptr);
36 let res = unsafe { data.read::<T>() };
37 drop(guard); Ok(res)
39 }
40}
41
42impl HeapValue<'_> {
43 pub fn fmt_display(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
45 if let Some(display_fn) = self.shape.vtable.display {
46 unsafe { display_fn(PtrConst::new(self.guard.as_ref().unwrap().ptr), f) }
47 } else {
48 write!(f, "⟨{}⟩", self.shape)
49 }
50 }
51
52 pub fn fmt_debug(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
54 if let Some(debug_fn) = self.shape.vtable.debug {
55 unsafe { debug_fn(PtrConst::new(self.guard.as_ref().unwrap().ptr), f) }
56 } else {
57 write!(f, "⟨{}⟩", self.shape)
58 }
59 }
60}
61
62impl core::fmt::Display for HeapValue<'_> {
63 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
64 self.fmt_display(f)
65 }
66}
67
68impl core::fmt::Debug for HeapValue<'_> {
69 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
70 self.fmt_debug(f)
71 }
72}
73
74impl PartialEq for HeapValue<'_> {
75 fn eq(&self, other: &Self) -> bool {
76 if self.shape != other.shape {
77 return false;
78 }
79 if let Some(eq_fn) = self.shape.vtable.eq {
80 unsafe {
81 eq_fn(
82 PtrConst::new(self.guard.as_ref().unwrap().ptr),
83 PtrConst::new(other.guard.as_ref().unwrap().ptr),
84 )
85 }
86 } else {
87 false
88 }
89 }
90}
91
92impl PartialOrd for HeapValue<'_> {
93 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
94 if self.shape != other.shape {
95 return None;
96 }
97 if let Some(partial_ord_fn) = self.shape.vtable.partial_ord {
98 unsafe {
99 partial_ord_fn(
100 PtrConst::new(self.guard.as_ref().unwrap().ptr),
101 PtrConst::new(other.guard.as_ref().unwrap().ptr),
102 )
103 }
104 } else {
105 None
106 }
107 }
108}
109
110pub struct Guard {
116 pub(crate) ptr: *mut u8,
118 pub(crate) layout: Layout,
120}
121
122impl Drop for Guard {
123 fn drop(&mut self) {
124 if self.layout.size() != 0 {
125 log::trace!(
126 "Deallocating memory at ptr: {:p}, size: {}, align: {}",
127 self.ptr.cyan(),
128 self.layout.size().yellow(),
129 self.layout.align().green()
130 );
131 unsafe { alloc::alloc::dealloc(self.ptr, self.layout) };
133 }
134 }
135}