buddy_system_allocator/
lib.rs1#![no_std]
2
3#[cfg(test)]
4#[macro_use]
5extern crate std;
6
7#[cfg(feature = "use_spin")]
8extern crate spin;
9
10#[cfg(feature = "alloc")]
11extern crate alloc;
12
13#[cfg(feature = "use_spin")]
14use core::alloc::GlobalAlloc;
15use core::alloc::Layout;
16use core::cmp::{max, min};
17use core::fmt;
18use core::mem::size_of;
19#[cfg(feature = "use_spin")]
20use core::ops::Deref;
21use core::ptr::NonNull;
22#[cfg(feature = "use_spin")]
23use spin::Mutex;
24
25#[cfg(feature = "alloc")]
26mod frame;
27pub mod linked_list;
28#[cfg(test)]
29mod test;
30
31#[cfg(feature = "alloc")]
32pub use frame::*;
33
34pub struct Heap<const ORDER: usize> {
54 free_list: [linked_list::LinkedList; ORDER],
56
57 user: usize,
59 allocated: usize,
60 total: usize,
61}
62
63impl<const ORDER: usize> Heap<ORDER> {
64 pub const fn new() -> Self {
66 Heap {
67 free_list: [linked_list::LinkedList::new(); ORDER],
68 user: 0,
69 allocated: 0,
70 total: 0,
71 }
72 }
73
74 pub const fn empty() -> Self {
76 Self::new()
77 }
78
79 pub unsafe fn add_to_heap(&mut self, mut start: usize, mut end: usize) {
81 start = (start + size_of::<usize>() - 1) & (!size_of::<usize>() + 1);
83 end &= !size_of::<usize>() + 1;
84 assert!(start <= end);
85
86 let mut total = 0;
87 let mut current_start = start;
88
89 while current_start + size_of::<usize>() <= end {
90 let lowbit = current_start & (!current_start + 1);
91 let mut size = min(lowbit, prev_power_of_two(end - current_start));
92
93 let mut order = size.trailing_zeros() as usize;
96 if order > ORDER - 1 {
97 order = ORDER - 1;
98 size = 1 << order;
99 }
100 total += size;
101
102 self.free_list[order].push(current_start as *mut usize);
103 current_start += size;
104 }
105
106 self.total += total;
107 }
108
109 pub unsafe fn init(&mut self, start: usize, size: usize) {
111 self.add_to_heap(start, start + size);
112 }
113
114 pub fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
116 let size = max(
117 layout.size().next_power_of_two(),
118 max(layout.align(), size_of::<usize>()),
119 );
120 let class = size.trailing_zeros() as usize;
121 for i in class..self.free_list.len() {
122 if !self.free_list[i].is_empty() {
124 for j in (class + 1..i + 1).rev() {
126 if let Some(block) = self.free_list[j].pop() {
127 unsafe {
128 self.free_list[j - 1]
129 .push((block as usize + (1 << (j - 1))) as *mut usize);
130 self.free_list[j - 1].push(block);
131 }
132 } else {
133 return Err(());
134 }
135 }
136
137 let result = NonNull::new(
138 self.free_list[class]
139 .pop()
140 .expect("current block should have free space now")
141 as *mut u8,
142 );
143 if let Some(result) = result {
144 self.user += layout.size();
145 self.allocated += size;
146 return Ok(result);
147 } else {
148 return Err(());
149 }
150 }
151 }
152 Err(())
153 }
154
155 pub fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
157 let size = max(
158 layout.size().next_power_of_two(),
159 max(layout.align(), size_of::<usize>()),
160 );
161 let class = size.trailing_zeros() as usize;
162
163 unsafe {
164 self.free_list[class].push(ptr.as_ptr() as *mut usize);
166
167 let mut current_ptr = ptr.as_ptr() as usize;
169 let mut current_class = class;
170
171 while current_class < self.free_list.len() - 1 {
172 let buddy = current_ptr ^ (1 << current_class);
173 let mut flag = false;
174 for block in self.free_list[current_class].iter_mut() {
175 if block.value() as usize == buddy {
176 block.pop();
177 flag = true;
178 break;
179 }
180 }
181
182 if flag {
184 self.free_list[current_class].pop();
185 current_ptr = min(current_ptr, buddy);
186 current_class += 1;
187 self.free_list[current_class].push(current_ptr as *mut usize);
188 } else {
189 break;
190 }
191 }
192 }
193
194 self.user -= layout.size();
195 self.allocated -= size;
196 }
197
198 pub fn stats_alloc_user(&self) -> usize {
200 self.user
201 }
202
203 pub fn stats_alloc_actual(&self) -> usize {
205 self.allocated
206 }
207
208 pub fn stats_total_bytes(&self) -> usize {
210 self.total
211 }
212}
213
214impl<const ORDER: usize> fmt::Debug for Heap<ORDER> {
215 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
216 fmt.debug_struct("Heap")
217 .field("user", &self.user)
218 .field("allocated", &self.allocated)
219 .field("total", &self.total)
220 .finish()
221 }
222}
223
224#[cfg(feature = "use_spin")]
244pub struct LockedHeap<const ORDER: usize>(Mutex<Heap<ORDER>>);
245
246#[cfg(feature = "use_spin")]
247impl<const ORDER: usize> LockedHeap<ORDER> {
248 pub const fn new() -> Self {
250 LockedHeap(Mutex::new(Heap::<ORDER>::new()))
251 }
252
253 pub const fn empty() -> Self {
255 LockedHeap(Mutex::new(Heap::<ORDER>::new()))
256 }
257}
258
259#[cfg(feature = "use_spin")]
260impl<const ORDER: usize> Deref for LockedHeap<ORDER> {
261 type Target = Mutex<Heap<ORDER>>;
262
263 fn deref(&self) -> &Self::Target {
264 &self.0
265 }
266}
267
268#[cfg(feature = "use_spin")]
269unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeap<ORDER> {
270 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
271 self.0
272 .lock()
273 .alloc(layout)
274 .ok()
275 .map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
276 }
277
278 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
279 self.0.lock().dealloc(NonNull::new_unchecked(ptr), layout)
280 }
281}
282
283#[cfg(feature = "use_spin")]
295pub struct LockedHeapWithRescue<const ORDER: usize> {
296 inner: Mutex<Heap<ORDER>>,
297 rescue: fn(&mut Heap<ORDER>, &Layout),
298}
299
300#[cfg(feature = "use_spin")]
301impl<const ORDER: usize> LockedHeapWithRescue<ORDER> {
302 pub const fn new(rescue: fn(&mut Heap<ORDER>, &Layout)) -> Self {
304 LockedHeapWithRescue {
305 inner: Mutex::new(Heap::<ORDER>::new()),
306 rescue,
307 }
308 }
309}
310
311#[cfg(feature = "use_spin")]
312impl<const ORDER: usize> Deref for LockedHeapWithRescue<ORDER> {
313 type Target = Mutex<Heap<ORDER>>;
314
315 fn deref(&self) -> &Self::Target {
316 &self.inner
317 }
318}
319
320#[cfg(feature = "use_spin")]
321unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
322 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
323 let mut inner = self.inner.lock();
324 match inner.alloc(layout) {
325 Ok(allocation) => allocation.as_ptr(),
326 Err(_) => {
327 (self.rescue)(&mut inner, &layout);
328 inner
329 .alloc(layout)
330 .ok()
331 .map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
332 }
333 }
334 }
335
336 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
337 self.inner
338 .lock()
339 .dealloc(NonNull::new_unchecked(ptr), layout)
340 }
341}
342
343pub(crate) fn prev_power_of_two(num: usize) -> usize {
344 1 << (usize::BITS as usize - num.leading_zeros() as usize - 1)
345}