1#![no_std]
2
3use core::{alloc::*, ffi::c_void, ptr::null_mut, sync::atomic::*};
4
5pub mod raw;
8use raw::*;
9
10unsafe extern "C"
11{
12 fn memcpy(dest:*mut c_void,src:*const c_void,cb:usize)->*mut c_void;
13}
14
15pub struct DLMalloc;
18
19unsafe impl GlobalAlloc for DLMalloc
20{
21 unsafe fn alloc(&self, layout: Layout) -> *mut u8
22 {
23 unsafe
24 {
25 dlmemalign(layout.align(),layout.size()).cast()
26 }
27 }
28
29 unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout)
30 {
31 unsafe
32 {
33 dlfree(ptr.cast())
34 }
35 }
36
37 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
38 {
39 let p=unsafe{dlrealloc_in_place(ptr.cast(),new_size)};
41 if p==ptr.cast()
42 {
43 ptr
45 }
46 else
47 {
48 assert!(p.is_null(),"dlrealloc_in_place returned Non-Null pointer on failure!");
50 let p=unsafe{dlmemalign(layout.align(),new_size)};
51 if !p.is_null()
52 {
53 unsafe
54 {
55 memcpy(p,ptr.cast(),layout.size());
57 dlfree(ptr.cast());
58 }
59 }
60 p.cast()
61 }
62 }
63}
64
65pub struct MspaceAlloc
68{
69 mspace:AtomicPtr<c_void>,
70 init:AtomicBool,
71 capacity:usize
72}
73
74unsafe impl GlobalAlloc for MspaceAlloc
75{
76 unsafe fn alloc(&self, layout: Layout) -> *mut u8
77 {
78 if self.init.compare_exchange(false,true,Ordering::Acquire,Ordering::Relaxed).is_ok()
80 {
81 self.mspace.store(unsafe{create_mspace(self.capacity,1)},Ordering::Release);
82 }
83 unsafe{mspace_memalign(self.mspace.load(Ordering::Acquire),layout.align(),layout.size()).cast()}
84 }
85
86 unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout)
87 {
88 unsafe{mspace_free(self.mspace.load(Ordering::Acquire),ptr.cast())}
89 }
90
91 unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
92 {
93 let p=unsafe{mspace_realloc_in_place(self.mspace.load(Ordering::Acquire),ptr.cast(),new_size)};
95 if p==ptr.cast()
96 {
97 ptr
99 }
100 else
101 {
102 assert!(p.is_null(),"mspace_realloc_in_place returned Non-Null pointer on failure!");
104 let p=unsafe{mspace_memalign(self.mspace.load(Ordering::Acquire),layout.size(),new_size)};
105 if !p.is_null()
106 {
107 unsafe
108 {
109 memcpy(p,ptr.cast(),layout.size());
111 mspace_free(self.mspace.load(Ordering::Acquire),ptr.cast());
112 }
113 }
114 p.cast()
115 }
116 }
117}
118
119impl MspaceAlloc
120{
121 pub const fn new(capacity:usize)->Self
124 {
125 Self
126 {
127 mspace:AtomicPtr::new(null_mut()),
129 init:AtomicBool::new(false),
130 capacity
131 }
132 }
133
134 pub unsafe fn destroy(&self)
140 {
141 if self.init.compare_exchange(true,false,Ordering::Acquire,Ordering::Relaxed).is_ok()
142 {
143 unsafe
144 {
145 destroy_mspace(self.mspace.load(Ordering::Acquire));
146 }
147 }
148 }
149}