facet_reflect/wip/
heap_value.rs1use crate::ReflectError;
2use crate::trace;
3use core::{alloc::Layout, marker::PhantomData};
4use facet_core::{Facet, PtrConst, PtrMut, Shape};
5#[cfg(feature = "log")]
6use owo_colors::OwoColorize as _;
7
8pub struct HeapValue<'a> {
10 pub(crate) guard: Option<Guard>,
11 pub(crate) shape: &'static Shape,
12 pub(crate) phantom: PhantomData<&'a ()>,
13}
14
15impl Drop for HeapValue<'_> {
16 fn drop(&mut self) {
17 if let Some(guard) = self.guard.take() {
18 if let Some(drop_fn) = self.shape.vtable.drop_in_place {
19 unsafe { drop_fn(PtrMut::new(guard.ptr)) };
20 }
21 drop(guard);
22 }
23 }
24}
25
26impl<'a> HeapValue<'a> {
27 pub fn materialize<T: Facet + 'a>(mut self) -> Result<T, ReflectError> {
29 if self.shape != T::SHAPE {
30 return Err(ReflectError::WrongShape {
31 expected: self.shape,
32 actual: T::SHAPE,
33 });
34 }
35
36 let guard = self.guard.take().unwrap();
37 let data = PtrConst::new(guard.ptr);
38 let res = unsafe { data.read::<T>() };
39 drop(guard); Ok(res)
41 }
42}
43
44impl HeapValue<'_> {
45 pub fn fmt_display(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
47 if let Some(display_fn) = self.shape.vtable.display {
48 unsafe { display_fn(PtrConst::new(self.guard.as_ref().unwrap().ptr), f) }
49 } else {
50 write!(f, "⟨{}⟩", self.shape)
51 }
52 }
53
54 pub fn fmt_debug(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
56 if let Some(debug_fn) = self.shape.vtable.debug {
57 unsafe { debug_fn(PtrConst::new(self.guard.as_ref().unwrap().ptr), f) }
58 } else {
59 write!(f, "⟨{}⟩", self.shape)
60 }
61 }
62}
63
64impl core::fmt::Display for HeapValue<'_> {
65 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
66 self.fmt_display(f)
67 }
68}
69
70impl core::fmt::Debug for HeapValue<'_> {
71 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
72 self.fmt_debug(f)
73 }
74}
75
76impl PartialEq for HeapValue<'_> {
77 fn eq(&self, other: &Self) -> bool {
78 if self.shape != other.shape {
79 return false;
80 }
81 if let Some(eq_fn) = self.shape.vtable.eq {
82 unsafe {
83 eq_fn(
84 PtrConst::new(self.guard.as_ref().unwrap().ptr),
85 PtrConst::new(other.guard.as_ref().unwrap().ptr),
86 )
87 }
88 } else {
89 false
90 }
91 }
92}
93
94impl PartialOrd for HeapValue<'_> {
95 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
96 if self.shape != other.shape {
97 return None;
98 }
99 if let Some(partial_ord_fn) = self.shape.vtable.partial_ord {
100 unsafe {
101 partial_ord_fn(
102 PtrConst::new(self.guard.as_ref().unwrap().ptr),
103 PtrConst::new(other.guard.as_ref().unwrap().ptr),
104 )
105 }
106 } else {
107 None
108 }
109 }
110}
111
112pub struct Guard {
118 pub(crate) ptr: *mut u8,
120 pub(crate) layout: Layout,
122}
123
124impl Drop for Guard {
125 fn drop(&mut self) {
126 if self.layout.size() != 0 {
127 trace!(
128 "Deallocating memory at ptr: {:p}, size: {}, align: {}",
129 self.ptr.cyan(),
130 self.layout.size().yellow(),
131 self.layout.align().green()
132 );
133 unsafe { alloc::alloc::dealloc(self.ptr, self.layout) };
135 }
136 }
137}