1#[cfg(feature = "unstable")]
5use crate::raw::basic_allocation::mi_free;
6use crate::raw::{heap::*, types::mi_heap_t};
7#[cfg(feature = "unstable")]
8use core::{
9 alloc::*,
10 ptr::{slice_from_raw_parts_mut, NonNull},
11};
12use core::{ffi::c_void, fmt::Debug, ops::Deref};
13
14pub struct MiMallocHeap<T: Deref<Target = *mut mi_heap_t>> {
16 pub heap: T,
17}
18
19impl<T: Deref<Target = *mut mi_heap_t>> MiMallocHeap<T> {
20 #[inline]
21 pub fn new(heap: T) -> Self {
22 Self { heap }
23 }
24}
25
26impl<T> Debug for MiMallocHeap<T>
27where
28 T: Deref<Target = *mut mi_heap_t> + Debug,
29{
30 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
31 f.write_fmt(format_args!("{:?}", self.heap))
32 }
33}
34
35impl<T: Deref<Target = *mut mi_heap_t>> From<T> for MiMallocHeap<T> {
36 fn from(heap: T) -> Self {
37 Self { heap }
38 }
39}
40
41#[cfg(feature = "unstable")]
42unsafe impl<T: Deref<Target = *mut mi_heap_t>> Allocator for MiMallocHeap<T> {
43 #[inline]
44 fn allocate(
45 &self,
46 layout: Layout,
47 ) -> Result<core::ptr::NonNull<[u8]>, core::alloc::AllocError> {
48 unsafe {
49 let mem = mi_heap_malloc_aligned(*self.heap.deref(), layout.size(), layout.align());
50 match NonNull::new(mem) {
51 Some(mem) => Ok(NonNull::new_unchecked(slice_from_raw_parts_mut(
52 mem.as_ptr() as *mut _,
53 layout.size(),
54 ))),
55 None => Err(AllocError),
56 }
57 }
58 }
59
60 #[inline]
61 unsafe fn deallocate(&self, ptr: core::ptr::NonNull<u8>, _layout: Layout) {
62 mi_free(ptr.as_ptr() as *mut _)
63 }
64
65 #[inline]
66 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, core::alloc::AllocError> {
67 unsafe {
68 let mem = mi_heap_zalloc_aligned(*self.heap.deref(), layout.size(), layout.align());
69 match NonNull::new(mem) {
70 Some(mem) => Ok(NonNull::new_unchecked(slice_from_raw_parts_mut(
71 mem.as_ptr() as *mut _,
72 layout.size(),
73 ))),
74 None => Err(AllocError),
75 }
76 }
77 }
78
79 #[inline]
80 unsafe fn grow(
81 &self,
82 ptr: NonNull<u8>,
83 old_layout: Layout,
84 new_layout: Layout,
85 ) -> Result<NonNull<[u8]>, AllocError> {
86 debug_assert!(
87 new_layout.size() >= old_layout.size(),
88 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
89 );
90
91 let mem = mi_heap_realloc_aligned(
92 *self.heap.deref(),
93 ptr.as_ptr() as *mut _,
94 new_layout.size(),
95 new_layout.align(),
96 );
97 match NonNull::new(mem) {
98 Some(mem) => Ok(NonNull::new_unchecked(slice_from_raw_parts_mut(
99 mem.as_ptr() as *mut _,
100 new_layout.size(),
101 ))),
102 None => Err(AllocError),
103 }
104 }
105
106 #[inline]
107 unsafe fn grow_zeroed(
108 &self,
109 ptr: NonNull<u8>,
110 old_layout: Layout,
111 new_layout: Layout,
112 ) -> Result<NonNull<[u8]>, AllocError> {
113 debug_assert!(
114 new_layout.size() >= old_layout.size(),
115 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
116 );
117
118 let mem = mi_heap_rezalloc_aligned(
119 *self.heap.deref(),
120 ptr.as_ptr() as *mut _,
121 new_layout.size(),
122 new_layout.align(),
123 );
124 match NonNull::new(mem) {
125 Some(mem) => Ok(NonNull::new_unchecked(slice_from_raw_parts_mut(
126 mem.as_ptr() as *mut _,
127 new_layout.size(),
128 ))),
129 None => Err(AllocError),
130 }
131 }
132
133 #[inline]
134 unsafe fn shrink(
135 &self,
136 ptr: NonNull<u8>,
137 old_layout: Layout,
138 new_layout: Layout,
139 ) -> Result<NonNull<[u8]>, AllocError> {
140 debug_assert!(
141 new_layout.size() <= old_layout.size(),
142 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
143 );
144
145 let mem = mi_heap_realloc_aligned(
146 *self.heap.deref(),
147 ptr.as_ptr() as *mut _,
148 new_layout.size(),
149 new_layout.align(),
150 );
151 match NonNull::new(mem) {
152 Some(mem) => Ok(NonNull::new_unchecked(slice_from_raw_parts_mut(
153 mem.as_ptr() as *mut _,
154 new_layout.size(),
155 ))),
156 None => Err(AllocError),
157 }
158 }
159
160 #[inline]
161 fn by_ref(&self) -> &Self
162 where
163 Self: Sized,
164 {
165 self
166 }
167}
168
169pub trait HeapVisitor<VisitorName, T: Deref<Target = *mut mi_heap_t>>
171where
172 Self: Sized,
173{
174 fn visitor(
175 &mut self,
176 heap: &mi_heap_t,
177 area: &mi_heap_area_t,
178 block: *mut c_void,
179 size: usize,
180 ) -> bool;
181
182 fn visit(&mut self, heap: &MiMallocHeap<T>) {
183 unsafe {
184 let heap: *mut mi_heap_t = *heap.heap.deref();
185 mi_heap_visit_blocks(
186 heap as *const mi_heap_t,
187 false,
188 Some(visit_handler::<VisitorName, T, Self>),
189 self as *mut Self as *mut c_void,
190 );
191 }
192 }
193}
194
195#[derive(Debug, PartialEq, Eq)]
197pub struct GlobalHeap {
198 pub heap: *mut mi_heap_t,
199}
200
201impl Deref for GlobalHeap {
202 type Target = *mut mi_heap_t;
203
204 #[inline]
205 fn deref(&self) -> &Self::Target {
206 &self.heap
207 }
208}
209pub type MiMallocHeapGlobal = MiMallocHeap<GlobalHeap>;
211
212#[inline]
213unsafe extern "C" fn visit_handler<
214 VisitorName,
215 T: Deref<Target = *mut mi_heap_t>,
216 Visitor: HeapVisitor<VisitorName, T>,
217>(
218 heap: *const mi_heap_t,
219 area: *const mi_heap_area_t,
220 block: *mut c_void,
221 size: usize,
222 args: *mut c_void,
223) -> bool {
224 let visitor = &mut *(args as *mut Visitor);
225 Visitor::visitor(visitor, &*heap, &*area, block, size)
226}
227
228#[macro_export]
230macro_rules! with_heap {
231 ($heap: ty, $do: expr) => {{
232 let heap = MiMallocHeap::from(<$heap>::new());
233 let global = GlobalMiMalloc::replace_by(&heap);
234 debug_assert!(GlobalMiMalloc::get().heap != global.heap);
235 let res = { $do };
236 GlobalMiMalloc::replace_by(&global);
237 debug_assert!(GlobalMiMalloc::get().heap == global.heap);
238 (res, heap)
239 }};
240}