buddy_system_allocator/
lib.rs

1#![no_std]
2
3#[cfg(test)]
4#[macro_use]
5extern crate std;
6
7#[cfg(feature = "use_spin")]
8extern crate spin;
9
10#[cfg(feature = "alloc")]
11extern crate alloc;
12
13#[cfg(feature = "use_spin")]
14use core::alloc::GlobalAlloc;
15use core::alloc::Layout;
16use core::cmp::{max, min};
17use core::fmt;
18use core::mem::size_of;
19#[cfg(feature = "use_spin")]
20use core::ops::Deref;
21use core::ptr::NonNull;
22#[cfg(feature = "use_spin")]
23use spin::Mutex;
24
25#[cfg(feature = "alloc")]
26mod frame;
27pub mod linked_list;
28#[cfg(test)]
29mod test;
30
31#[cfg(feature = "alloc")]
32pub use frame::*;
33
34/// A heap that uses buddy system with configurable order.
35///
36/// # Usage
37///
38/// Create a heap and add a memory region to it:
39/// ```
40/// use buddy_system_allocator::*;
41/// # use core::mem::size_of;
42/// let mut heap = Heap::<32>::empty();
43/// # let space: [usize; 100] = [0; 100];
44/// # let begin: usize = space.as_ptr() as usize;
45/// # let end: usize = begin + 100 * size_of::<usize>();
46/// # let size: usize = 100 * size_of::<usize>();
47/// unsafe {
48///     heap.init(begin, size);
49///     // or
50///     heap.add_to_heap(begin, end);
51/// }
52/// ```
53pub struct Heap<const ORDER: usize> {
54    // buddy system with max order of `ORDER`
55    free_list: [linked_list::LinkedList; ORDER],
56
57    // statistics
58    user: usize,
59    allocated: usize,
60    total: usize,
61}
62
63impl<const ORDER: usize> Heap<ORDER> {
64    /// Create an empty heap
65    pub const fn new() -> Self {
66        Heap {
67            free_list: [linked_list::LinkedList::new(); ORDER],
68            user: 0,
69            allocated: 0,
70            total: 0,
71        }
72    }
73
74    /// Create an empty heap
75    pub const fn empty() -> Self {
76        Self::new()
77    }
78
79    /// Add a range of memory [start, end) to the heap
80    pub unsafe fn add_to_heap(&mut self, mut start: usize, mut end: usize) {
81        // avoid unaligned access on some platforms
82        start = (start + size_of::<usize>() - 1) & (!size_of::<usize>() + 1);
83        end &= !size_of::<usize>() + 1;
84        assert!(start <= end);
85
86        let mut total = 0;
87        let mut current_start = start;
88
89        while current_start + size_of::<usize>() <= end {
90            let lowbit = current_start & (!current_start + 1);
91            let mut size = min(lowbit, prev_power_of_two(end - current_start));
92            
93            // If the order of size is larger than the max order,
94            // split it into smaller blocks.
95            let mut order = size.trailing_zeros() as usize;
96            if order > ORDER - 1 {
97                order = ORDER - 1;
98                size = 1 << order;
99            }
100            total += size;
101
102            self.free_list[order].push(current_start as *mut usize);
103            current_start += size;
104        }
105
106        self.total += total;
107    }
108
109    /// Add a range of memory [start, start+size) to the heap
110    pub unsafe fn init(&mut self, start: usize, size: usize) {
111        self.add_to_heap(start, start + size);
112    }
113
114    /// Alloc a range of memory from the heap satifying `layout` requirements
115    pub fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
116        let size = max(
117            layout.size().next_power_of_two(),
118            max(layout.align(), size_of::<usize>()),
119        );
120        let class = size.trailing_zeros() as usize;
121        for i in class..self.free_list.len() {
122            // Find the first non-empty size class
123            if !self.free_list[i].is_empty() {
124                // Split buffers
125                for j in (class + 1..i + 1).rev() {
126                    if let Some(block) = self.free_list[j].pop() {
127                        unsafe {
128                            self.free_list[j - 1]
129                                .push((block as usize + (1 << (j - 1))) as *mut usize);
130                            self.free_list[j - 1].push(block);
131                        }
132                    } else {
133                        return Err(());
134                    }
135                }
136
137                let result = NonNull::new(
138                    self.free_list[class]
139                        .pop()
140                        .expect("current block should have free space now")
141                        as *mut u8,
142                );
143                if let Some(result) = result {
144                    self.user += layout.size();
145                    self.allocated += size;
146                    return Ok(result);
147                } else {
148                    return Err(());
149                }
150            }
151        }
152        Err(())
153    }
154
155    /// Dealloc a range of memory from the heap
156    pub fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
157        let size = max(
158            layout.size().next_power_of_two(),
159            max(layout.align(), size_of::<usize>()),
160        );
161        let class = size.trailing_zeros() as usize;
162
163        unsafe {
164            // Put back into free list
165            self.free_list[class].push(ptr.as_ptr() as *mut usize);
166
167            // Merge free buddy lists
168            let mut current_ptr = ptr.as_ptr() as usize;
169            let mut current_class = class;
170
171            while current_class < self.free_list.len() - 1 {
172                let buddy = current_ptr ^ (1 << current_class);
173                let mut flag = false;
174                for block in self.free_list[current_class].iter_mut() {
175                    if block.value() as usize == buddy {
176                        block.pop();
177                        flag = true;
178                        break;
179                    }
180                }
181
182                // Free buddy found
183                if flag {
184                    self.free_list[current_class].pop();
185                    current_ptr = min(current_ptr, buddy);
186                    current_class += 1;
187                    self.free_list[current_class].push(current_ptr as *mut usize);
188                } else {
189                    break;
190                }
191            }
192        }
193
194        self.user -= layout.size();
195        self.allocated -= size;
196    }
197
198    /// Return the number of bytes that user requests
199    pub fn stats_alloc_user(&self) -> usize {
200        self.user
201    }
202
203    /// Return the number of bytes that are actually allocated
204    pub fn stats_alloc_actual(&self) -> usize {
205        self.allocated
206    }
207
208    /// Return the total number of bytes in the heap
209    pub fn stats_total_bytes(&self) -> usize {
210        self.total
211    }
212}
213
214impl<const ORDER: usize> fmt::Debug for Heap<ORDER> {
215    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
216        fmt.debug_struct("Heap")
217            .field("user", &self.user)
218            .field("allocated", &self.allocated)
219            .field("total", &self.total)
220            .finish()
221    }
222}
223
224/// A locked version of `Heap`
225///
226/// # Usage
227///
228/// Create a locked heap and add a memory region to it:
229/// ```
230/// use buddy_system_allocator::*;
231/// # use core::mem::size_of;
232/// let mut heap = LockedHeap::<32>::new();
233/// # let space: [usize; 100] = [0; 100];
234/// # let begin: usize = space.as_ptr() as usize;
235/// # let end: usize = begin + 100 * size_of::<usize>();
236/// # let size: usize = 100 * size_of::<usize>();
237/// unsafe {
238///     heap.lock().init(begin, size);
239///     // or
240///     heap.lock().add_to_heap(begin, end);
241/// }
242/// ```
243#[cfg(feature = "use_spin")]
244pub struct LockedHeap<const ORDER: usize>(Mutex<Heap<ORDER>>);
245
246#[cfg(feature = "use_spin")]
247impl<const ORDER: usize> LockedHeap<ORDER> {
248    /// Creates an empty heap
249    pub const fn new() -> Self {
250        LockedHeap(Mutex::new(Heap::<ORDER>::new()))
251    }
252
253    /// Creates an empty heap
254    pub const fn empty() -> Self {
255        LockedHeap(Mutex::new(Heap::<ORDER>::new()))
256    }
257}
258
259#[cfg(feature = "use_spin")]
260impl<const ORDER: usize> Deref for LockedHeap<ORDER> {
261    type Target = Mutex<Heap<ORDER>>;
262
263    fn deref(&self) -> &Self::Target {
264        &self.0
265    }
266}
267
268#[cfg(feature = "use_spin")]
269unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeap<ORDER> {
270    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
271        self.0
272            .lock()
273            .alloc(layout)
274            .ok()
275            .map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
276    }
277
278    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
279        self.0.lock().dealloc(NonNull::new_unchecked(ptr), layout)
280    }
281}
282
283/// A locked version of `Heap` with rescue before oom
284///
285/// # Usage
286///
287/// Create a locked heap:
288/// ```
289/// use buddy_system_allocator::*;
290/// let heap = LockedHeapWithRescue::new(|heap: &mut Heap<32>, layout: &core::alloc::Layout| {});
291/// ```
292///
293/// Before oom, the allocator will try to call rescue function and try for one more time.
294#[cfg(feature = "use_spin")]
295pub struct LockedHeapWithRescue<const ORDER: usize> {
296    inner: Mutex<Heap<ORDER>>,
297    rescue: fn(&mut Heap<ORDER>, &Layout),
298}
299
300#[cfg(feature = "use_spin")]
301impl<const ORDER: usize> LockedHeapWithRescue<ORDER> {
302    /// Creates an empty heap
303    pub const fn new(rescue: fn(&mut Heap<ORDER>, &Layout)) -> Self {
304        LockedHeapWithRescue {
305            inner: Mutex::new(Heap::<ORDER>::new()),
306            rescue,
307        }
308    }
309}
310
311#[cfg(feature = "use_spin")]
312impl<const ORDER: usize> Deref for LockedHeapWithRescue<ORDER> {
313    type Target = Mutex<Heap<ORDER>>;
314
315    fn deref(&self) -> &Self::Target {
316        &self.inner
317    }
318}
319
320#[cfg(feature = "use_spin")]
321unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
322    unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
323        let mut inner = self.inner.lock();
324        match inner.alloc(layout) {
325            Ok(allocation) => allocation.as_ptr(),
326            Err(_) => {
327                (self.rescue)(&mut inner, &layout);
328                inner
329                    .alloc(layout)
330                    .ok()
331                    .map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
332            }
333        }
334    }
335
336    unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
337        self.inner
338            .lock()
339            .dealloc(NonNull::new_unchecked(ptr), layout)
340    }
341}
342
343pub(crate) fn prev_power_of_two(num: usize) -> usize {
344    1 << (usize::BITS as usize - num.leading_zeros() as usize - 1)
345}