buddy_system_allocator/
lib.rs

1#![no_std]
2
3#[cfg(test)]
4#[macro_use]
5extern crate std;
6
7#[cfg(feature = "use_spin")]
8extern crate spin;
9
10#[cfg(feature = "alloc")]
11extern crate alloc;
12
13#[cfg(feature = "use_spin")]
14use core::alloc::GlobalAlloc;
15use core::alloc::Layout;
16use core::cmp::{max, min};
17use core::fmt;
18use core::mem::size_of;
19#[cfg(feature = "use_spin")]
20use core::ops::Deref;
21use core::ptr::NonNull;
22#[cfg(feature = "use_spin")]
23use spin::Mutex;
24
25#[cfg(feature = "alloc")]
26mod frame;
27pub mod linked_list;
28#[cfg(test)]
29mod test;
30
31#[cfg(feature = "alloc")]
32pub use frame::*;
33
34/// A heap that uses buddy system with configurable order.
35///
36/// # Usage
37///
38/// Create a heap and add a memory region to it:
39/// ```
40/// use buddy_system_allocator::*;
41/// # use core::mem::size_of;
42/// // The max order of the buddy system is `ORDER - 1`.
43/// // For example, to create a heap with a maximum block size of 2^32 bytes,
44/// // you should define the heap with `ORDER = 33`.
45/// let mut heap = Heap::<33>::empty();
46/// # let space: [usize; 100] = [0; 100];
47/// # let begin: usize = space.as_ptr() as usize;
48/// # let end: usize = begin + 100 * size_of::<usize>();
49/// # let size: usize = 100 * size_of::<usize>();
50/// unsafe {
51///     heap.init(begin, size);
52///     // or
53///     heap.add_to_heap(begin, end);
54/// }
55/// ```
56pub struct Heap<const ORDER: usize> {
57    // buddy system with max order of `ORDER - 1`
58    free_list: [linked_list::LinkedList; ORDER],
59
60    // statistics
61    user: usize,
62    allocated: usize,
63    total: usize,
64}
65
66impl<const ORDER: usize> Heap<ORDER> {
67    /// Create an empty heap
68    pub const fn new() -> Self {
69        Heap {
70            free_list: [linked_list::LinkedList::new(); ORDER],
71            user: 0,
72            allocated: 0,
73            total: 0,
74        }
75    }
76
77    /// Create an empty heap
78    pub const fn empty() -> Self {
79        Self::new()
80    }
81
82    /// Add a range of memory [start, end) to the heap
83    pub unsafe fn add_to_heap(&mut self, mut start: usize, mut end: usize) {
84        // avoid unaligned access on some platforms
85        start = (start + size_of::<usize>() - 1) & (!size_of::<usize>() + 1);
86        end &= !size_of::<usize>() + 1;
87        assert!(start <= end);
88
89        let mut total = 0;
90        let mut current_start = start;
91
92        while current_start + size_of::<usize>() <= end {
93            let lowbit = current_start & (!current_start + 1);
94            let mut size = min(lowbit, prev_power_of_two(end - current_start));
95
96            // If the order of size is larger than the max order,
97            // split it into smaller blocks.
98            let mut order = size.trailing_zeros() as usize;
99            if order > ORDER - 1 {
100                order = ORDER - 1;
101                size = 1 << order;
102            }
103            total += size;
104
105            self.free_list[order].push(current_start as *mut usize);
106            current_start += size;
107        }
108
109        self.total += total;
110    }
111
112    /// Add a range of memory [start, start+size) to the heap
113    pub unsafe fn init(&mut self, start: usize, size: usize) {
114        self.add_to_heap(start, start + size);
115    }
116
117    /// Alloc a range of memory from the heap satifying `layout` requirements
118    pub fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
119        let size = max(
120            layout.size().next_power_of_two(),
121            max(layout.align(), size_of::<usize>()),
122        );
123        let class = size.trailing_zeros() as usize;
124        for i in class..self.free_list.len() {
125            // Find the first non-empty size class
126            if !self.free_list[i].is_empty() {
127                // Split buffers
128                for j in (class + 1..i + 1).rev() {
129                    if let Some(block) = self.free_list[j].pop() {
130                        unsafe {
131                            self.free_list[j - 1]
132                                .push((block as usize + (1 << (j - 1))) as *mut usize);
133                            self.free_list[j - 1].push(block);
134                        }
135                    } else {
136                        return Err(());
137                    }
138                }
139
140                let result = NonNull::new(
141                    self.free_list[class]
142                        .pop()
143                        .expect("current block should have free space now")
144                        as *mut u8,
145                );
146                if let Some(result) = result {
147                    self.user += layout.size();
148                    self.allocated += size;
149                    return Ok(result);
150                } else {
151                    return Err(());
152                }
153            }
154        }
155        Err(())
156    }
157
158    /// Dealloc a range of memory from the heap
159    pub unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
160        let size = max(
161            layout.size().next_power_of_two(),
162            max(layout.align(), size_of::<usize>()),
163        );
164        let class = size.trailing_zeros() as usize;
165
166        // Put back into free list
167        self.free_list[class].push(ptr.as_ptr() as *mut usize);
168
169        // Merge free buddy lists
170        let mut current_ptr = ptr.as_ptr() as usize;
171        let mut current_class = class;
172
173        while current_class < self.free_list.len() - 1 {
174            let buddy = current_ptr ^ (1 << current_class);
175            let mut flag = false;
176            for block in self.free_list[current_class].iter_mut() {
177                if block.value() as usize == buddy {
178                    block.pop();
179                    flag = true;
180                    break;
181                }
182            }
183
184            // Free buddy found
185            if flag {
186                self.free_list[current_class].pop();
187                current_ptr = min(current_ptr, buddy);
188                current_class += 1;
189                self.free_list[current_class].push(current_ptr as *mut usize);
190            } else {
191                break;
192            }
193        }
194
195        self.user -= layout.size();
196        self.allocated -= size;
197    }
198
199    /// Return the number of bytes that user requests
200    pub fn stats_alloc_user(&self) -> usize {
201        self.user
202    }
203
204    /// Return the number of bytes that are actually allocated
205    pub fn stats_alloc_actual(&self) -> usize {
206        self.allocated
207    }
208
209    /// Return the total number of bytes in the heap
210    pub fn stats_total_bytes(&self) -> usize {
211        self.total
212    }
213}
214
215impl<const ORDER: usize> Default for Heap<ORDER> {
216    fn default() -> Self {
217        Self::new()
218    }
219}
220
221impl<const ORDER: usize> fmt::Debug for Heap<ORDER> {
222    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
223        fmt.debug_struct("Heap")
224            .field("user", &self.user)
225            .field("allocated", &self.allocated)
226            .field("total", &self.total)
227            .finish()
228    }
229}
230
231/// A locked version of `Heap`
232///
233/// # Usage
234///
235/// Create a locked heap and add a memory region to it:
236/// ```
237/// use buddy_system_allocator::*;
238/// # use core::mem::size_of;
239/// // The max order of the buddy system is `ORDER - 1`.
240/// // For example, to create a heap with a maximum block size of 2^32 bytes,
241/// // you should define the heap with `ORDER = 33`.
242/// let mut heap = LockedHeap::<33>::new();
243/// # let space: [usize; 100] = [0; 100];
244/// # let begin: usize = space.as_ptr() as usize;
245/// # let end: usize = begin + 100 * size_of::<usize>();
246/// # let size: usize = 100 * size_of::<usize>();
247/// unsafe {
248///     heap.lock().init(begin, size);
249///     // or
250///     heap.lock().add_to_heap(begin, end);
251/// }
252/// ```
253#[cfg(feature = "use_spin")]
254#[derive(Default)]
255pub struct LockedHeap<const ORDER: usize>(Mutex<Heap<ORDER>>);
256
257#[cfg(feature = "use_spin")]
258impl<const ORDER: usize> LockedHeap<ORDER> {
259    /// Creates an empty heap
260    pub const fn new() -> Self {
261        LockedHeap(Mutex::new(Heap::<ORDER>::new()))
262    }
263
264    /// Creates an empty heap
265    pub const fn empty() -> Self {
266        LockedHeap(Mutex::new(Heap::<ORDER>::new()))
267    }
268}
269
270#[cfg(feature = "use_spin")]
271impl<const ORDER: usize> Deref for LockedHeap<ORDER> {
272    type Target = Mutex<Heap<ORDER>>;
273
274    fn deref(&self) -> &Self::Target {
275        &self.0
276    }
277}
278
279#[cfg(feature = "use_spin")]
280unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeap<ORDER> {
281    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
282        self.0
283            .lock()
284            .alloc(layout)
285            .ok()
286            .map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
287    }
288
289    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
290        self.0.lock().dealloc(NonNull::new_unchecked(ptr), layout)
291    }
292}
293
294/// A locked version of `Heap` with rescue before oom
295///
296/// # Usage
297///
298/// Create a locked heap:
299/// ```
300/// use buddy_system_allocator::*;
301/// let heap = LockedHeapWithRescue::new(|heap: &mut Heap<33>, layout: &core::alloc::Layout| {});
302/// ```
303///
304/// Before oom, the allocator will try to call rescue function and try for one more time.
305#[cfg(feature = "use_spin")]
306pub struct LockedHeapWithRescue<const ORDER: usize> {
307    inner: Mutex<Heap<ORDER>>,
308    rescue: fn(&mut Heap<ORDER>, &Layout),
309}
310
311#[cfg(feature = "use_spin")]
312impl<const ORDER: usize> LockedHeapWithRescue<ORDER> {
313    /// Creates an empty heap
314    pub const fn new(rescue: fn(&mut Heap<ORDER>, &Layout)) -> Self {
315        LockedHeapWithRescue {
316            inner: Mutex::new(Heap::<ORDER>::new()),
317            rescue,
318        }
319    }
320}
321
322#[cfg(feature = "use_spin")]
323impl<const ORDER: usize> Deref for LockedHeapWithRescue<ORDER> {
324    type Target = Mutex<Heap<ORDER>>;
325
326    fn deref(&self) -> &Self::Target {
327        &self.inner
328    }
329}
330
331#[cfg(feature = "use_spin")]
332unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
333    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
334        let mut inner = self.inner.lock();
335        match inner.alloc(layout) {
336            Ok(allocation) => allocation.as_ptr(),
337            Err(_) => {
338                (self.rescue)(&mut inner, &layout);
339                inner
340                    .alloc(layout)
341                    .ok()
342                    .map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
343            }
344        }
345    }
346
347    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
348        self.inner
349            .lock()
350            .dealloc(NonNull::new_unchecked(ptr), layout)
351    }
352}
353
354pub(crate) fn prev_power_of_two(num: usize) -> usize {
355    1 << (usize::BITS as usize - num.leading_zeros() as usize - 1)
356}