buddy_system_allocator/
lib.rs1#![no_std]
2
3#[cfg(test)]
4#[macro_use]
5extern crate std;
6
7#[cfg(feature = "use_spin")]
8extern crate spin;
9
10#[cfg(feature = "alloc")]
11extern crate alloc;
12
13#[cfg(feature = "use_spin")]
14use core::alloc::GlobalAlloc;
15use core::alloc::Layout;
16use core::cmp::{max, min};
17use core::fmt;
18use core::mem::size_of;
19#[cfg(feature = "use_spin")]
20use core::ops::Deref;
21use core::ptr::NonNull;
22#[cfg(feature = "use_spin")]
23use spin::Mutex;
24
25#[cfg(feature = "alloc")]
26mod frame;
27pub mod linked_list;
28#[cfg(test)]
29mod test;
30
31#[cfg(feature = "alloc")]
32pub use frame::*;
33
34pub struct Heap<const ORDER: usize> {
57 free_list: [linked_list::LinkedList; ORDER],
59
60 user: usize,
62 allocated: usize,
63 total: usize,
64}
65
66impl<const ORDER: usize> Heap<ORDER> {
67 pub const fn new() -> Self {
69 Heap {
70 free_list: [linked_list::LinkedList::new(); ORDER],
71 user: 0,
72 allocated: 0,
73 total: 0,
74 }
75 }
76
77 pub const fn empty() -> Self {
79 Self::new()
80 }
81
82 pub unsafe fn add_to_heap(&mut self, mut start: usize, mut end: usize) {
84 start = (start + size_of::<usize>() - 1) & (!size_of::<usize>() + 1);
86 end &= !size_of::<usize>() + 1;
87 assert!(start <= end);
88
89 let mut total = 0;
90 let mut current_start = start;
91
92 while current_start + size_of::<usize>() <= end {
93 let lowbit = current_start & (!current_start + 1);
94 let mut size = min(lowbit, prev_power_of_two(end - current_start));
95
96 let mut order = size.trailing_zeros() as usize;
99 if order > ORDER - 1 {
100 order = ORDER - 1;
101 size = 1 << order;
102 }
103 total += size;
104
105 self.free_list[order].push(current_start as *mut usize);
106 current_start += size;
107 }
108
109 self.total += total;
110 }
111
112 pub unsafe fn init(&mut self, start: usize, size: usize) {
114 self.add_to_heap(start, start + size);
115 }
116
117 pub fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
119 let size = max(
120 layout.size().next_power_of_two(),
121 max(layout.align(), size_of::<usize>()),
122 );
123 let class = size.trailing_zeros() as usize;
124 for i in class..self.free_list.len() {
125 if !self.free_list[i].is_empty() {
127 for j in (class + 1..i + 1).rev() {
129 if let Some(block) = self.free_list[j].pop() {
130 unsafe {
131 self.free_list[j - 1]
132 .push((block as usize + (1 << (j - 1))) as *mut usize);
133 self.free_list[j - 1].push(block);
134 }
135 } else {
136 return Err(());
137 }
138 }
139
140 let result = NonNull::new(
141 self.free_list[class]
142 .pop()
143 .expect("current block should have free space now")
144 as *mut u8,
145 );
146 if let Some(result) = result {
147 self.user += layout.size();
148 self.allocated += size;
149 return Ok(result);
150 } else {
151 return Err(());
152 }
153 }
154 }
155 Err(())
156 }
157
158 pub unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
160 let size = max(
161 layout.size().next_power_of_two(),
162 max(layout.align(), size_of::<usize>()),
163 );
164 let class = size.trailing_zeros() as usize;
165
166 self.free_list[class].push(ptr.as_ptr() as *mut usize);
168
169 let mut current_ptr = ptr.as_ptr() as usize;
171 let mut current_class = class;
172
173 while current_class < self.free_list.len() - 1 {
174 let buddy = current_ptr ^ (1 << current_class);
175 let mut flag = false;
176 for block in self.free_list[current_class].iter_mut() {
177 if block.value() as usize == buddy {
178 block.pop();
179 flag = true;
180 break;
181 }
182 }
183
184 if flag {
186 self.free_list[current_class].pop();
187 current_ptr = min(current_ptr, buddy);
188 current_class += 1;
189 self.free_list[current_class].push(current_ptr as *mut usize);
190 } else {
191 break;
192 }
193 }
194
195 self.user -= layout.size();
196 self.allocated -= size;
197 }
198
199 pub fn stats_alloc_user(&self) -> usize {
201 self.user
202 }
203
204 pub fn stats_alloc_actual(&self) -> usize {
206 self.allocated
207 }
208
209 pub fn stats_total_bytes(&self) -> usize {
211 self.total
212 }
213}
214
215impl<const ORDER: usize> Default for Heap<ORDER> {
216 fn default() -> Self {
217 Self::new()
218 }
219}
220
221impl<const ORDER: usize> fmt::Debug for Heap<ORDER> {
222 fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
223 fmt.debug_struct("Heap")
224 .field("user", &self.user)
225 .field("allocated", &self.allocated)
226 .field("total", &self.total)
227 .finish()
228 }
229}
230
231#[cfg(feature = "use_spin")]
254#[derive(Default)]
255pub struct LockedHeap<const ORDER: usize>(Mutex<Heap<ORDER>>);
256
257#[cfg(feature = "use_spin")]
258impl<const ORDER: usize> LockedHeap<ORDER> {
259 pub const fn new() -> Self {
261 LockedHeap(Mutex::new(Heap::<ORDER>::new()))
262 }
263
264 pub const fn empty() -> Self {
266 LockedHeap(Mutex::new(Heap::<ORDER>::new()))
267 }
268}
269
270#[cfg(feature = "use_spin")]
271impl<const ORDER: usize> Deref for LockedHeap<ORDER> {
272 type Target = Mutex<Heap<ORDER>>;
273
274 fn deref(&self) -> &Self::Target {
275 &self.0
276 }
277}
278
279#[cfg(feature = "use_spin")]
280unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeap<ORDER> {
281 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
282 self.0
283 .lock()
284 .alloc(layout)
285 .ok()
286 .map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
287 }
288
289 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
290 self.0.lock().dealloc(NonNull::new_unchecked(ptr), layout)
291 }
292}
293
294#[cfg(feature = "use_spin")]
306pub struct LockedHeapWithRescue<const ORDER: usize> {
307 inner: Mutex<Heap<ORDER>>,
308 rescue: fn(&mut Heap<ORDER>, &Layout),
309}
310
311#[cfg(feature = "use_spin")]
312impl<const ORDER: usize> LockedHeapWithRescue<ORDER> {
313 pub const fn new(rescue: fn(&mut Heap<ORDER>, &Layout)) -> Self {
315 LockedHeapWithRescue {
316 inner: Mutex::new(Heap::<ORDER>::new()),
317 rescue,
318 }
319 }
320}
321
322#[cfg(feature = "use_spin")]
323impl<const ORDER: usize> Deref for LockedHeapWithRescue<ORDER> {
324 type Target = Mutex<Heap<ORDER>>;
325
326 fn deref(&self) -> &Self::Target {
327 &self.inner
328 }
329}
330
331#[cfg(feature = "use_spin")]
332unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
333 unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
334 let mut inner = self.inner.lock();
335 match inner.alloc(layout) {
336 Ok(allocation) => allocation.as_ptr(),
337 Err(_) => {
338 (self.rescue)(&mut inner, &layout);
339 inner
340 .alloc(layout)
341 .ok()
342 .map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
343 }
344 }
345 }
346
347 unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
348 self.inner
349 .lock()
350 .dealloc(NonNull::new_unchecked(ptr), layout)
351 }
352}
353
354pub(crate) fn prev_power_of_two(num: usize) -> usize {
355 1 << (usize::BITS as usize - num.leading_zeros() as usize - 1)
356}