facet_reflect/wip/
heap_value.rs1use crate::ReflectError;
2use core::{alloc::Layout, marker::PhantomData};
3
4use facet_core::{Facet, PtrConst, PtrMut, Shape};
5use yansi::Paint as _;
6
7pub struct HeapValue<'a> {
9 pub(crate) guard: Option<Guard>,
10 pub(crate) shape: &'static Shape,
11 pub(crate) phantom: PhantomData<&'a ()>,
12}
13
14impl Drop for HeapValue<'_> {
15 fn drop(&mut self) {
16 if let Some(guard) = self.guard.take() {
17 if let Some(drop_fn) = self.shape.vtable.drop_in_place {
18 unsafe { drop_fn(PtrMut::new(guard.ptr)) };
19 }
20 drop(guard);
21 }
22 }
23}
24
25impl<'a> HeapValue<'a> {
26 pub fn materialize<T: Facet + 'a>(mut self) -> Result<T, ReflectError> {
28 if self.shape != T::SHAPE {
29 return Err(ReflectError::WrongShape {
30 expected: self.shape,
31 actual: T::SHAPE,
32 });
33 }
34
35 let guard = self.guard.take().unwrap();
36 let data = PtrConst::new(guard.ptr);
37 let res = unsafe { data.read::<T>() };
38 drop(guard); Ok(res)
40 }
41}
42
43impl HeapValue<'_> {
44 pub fn fmt_display(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
46 if let Some(display_fn) = self.shape.vtable.display {
47 unsafe { display_fn(PtrConst::new(self.guard.as_ref().unwrap().ptr), f) }
48 } else {
49 write!(f, "⟨{}⟩", self.shape)
50 }
51 }
52
53 pub fn fmt_debug(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
55 if let Some(debug_fn) = self.shape.vtable.debug {
56 unsafe { debug_fn(PtrConst::new(self.guard.as_ref().unwrap().ptr), f) }
57 } else {
58 write!(f, "⟨{}⟩", self.shape)
59 }
60 }
61}
62
63impl core::fmt::Display for HeapValue<'_> {
64 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
65 self.fmt_display(f)
66 }
67}
68
69impl core::fmt::Debug for HeapValue<'_> {
70 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
71 self.fmt_debug(f)
72 }
73}
74
75impl PartialEq for HeapValue<'_> {
76 fn eq(&self, other: &Self) -> bool {
77 if self.shape != other.shape {
78 return false;
79 }
80 if let Some(eq_fn) = self.shape.vtable.eq {
81 unsafe {
82 eq_fn(
83 PtrConst::new(self.guard.as_ref().unwrap().ptr),
84 PtrConst::new(other.guard.as_ref().unwrap().ptr),
85 )
86 }
87 } else {
88 false
89 }
90 }
91}
92
93impl PartialOrd for HeapValue<'_> {
94 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
95 if self.shape != other.shape {
96 return None;
97 }
98 if let Some(partial_ord_fn) = self.shape.vtable.partial_ord {
99 unsafe {
100 partial_ord_fn(
101 PtrConst::new(self.guard.as_ref().unwrap().ptr),
102 PtrConst::new(other.guard.as_ref().unwrap().ptr),
103 )
104 }
105 } else {
106 None
107 }
108 }
109}
110
111pub struct Guard {
117 pub(crate) ptr: *mut u8,
119 pub(crate) layout: Layout,
121}
122
123impl Drop for Guard {
124 fn drop(&mut self) {
125 if self.layout.size() != 0 {
126 log::trace!(
127 "Deallocating memory at ptr: {:p}, size: {}, align: {}",
128 self.ptr.cyan(),
129 self.layout.size().yellow(),
130 self.layout.align().green()
131 );
132 unsafe { alloc::alloc::dealloc(self.ptr, self.layout) };
134 }
135 }
136}