#![no_std]
#![deny(warnings, unstable_features, missing_docs)]
mod avl;
mod bitmap;
mod linked_list;
pub use avl::AvlBuddy;
pub use bitmap::UsizeBuddy;
pub use linked_list::LinkedListBuddy;
use core::{alloc::Layout, fmt, num::NonZeroUsize, ptr::NonNull};
pub trait BuddyLine {
const EMPTY: Self;
const INTRUSIVE_META_SIZE: usize = 0;
#[inline]
fn init(&mut self, _order: usize, _base: usize) {}
#[inline]
fn take(&mut self, _idx: usize) -> bool {
unimplemented!()
}
}
pub trait OligarchyCollection: BuddyLine {
fn take_any(&mut self, align_order: usize, count: usize) -> Option<usize>;
fn put(&mut self, idx: usize);
}
pub trait BuddyCollection: BuddyLine {
fn take_any(&mut self, align_order: usize) -> Option<usize>;
fn put(&mut self, idx: usize) -> Option<usize>;
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
#[repr(transparent)]
pub struct BuddyError;
pub struct BuddyAllocator<const N: usize, O: OligarchyCollection, B: BuddyCollection> {
oligarchy: O,
buddies: [B; N],
min_order: usize,
free: usize,
capacity: usize,
}
impl<const N: usize, O: OligarchyCollection, B: BuddyCollection> BuddyAllocator<N, O, B> {
const MAX_LAYER: usize = N;
const O_MIN_ORDER: usize = O::INTRUSIVE_META_SIZE.next_power_of_two().trailing_zeros() as _;
const B_MIN_ORDER: usize = B::INTRUSIVE_META_SIZE.next_power_of_two().trailing_zeros() as _;
#[inline]
pub const fn new() -> Self {
Self {
oligarchy: O::EMPTY,
buddies: [B::EMPTY; N],
min_order: 0,
free: 0,
capacity: 0,
}
}
}
impl<const N: usize, O: OligarchyCollection, B: BuddyCollection> Default
for BuddyAllocator<N, O, B>
{
fn default() -> Self {
Self::new()
}
}
impl<const N: usize, O: OligarchyCollection, B: BuddyCollection> BuddyAllocator<N, O, B> {
#[inline]
pub fn capacity(&self) -> usize {
self.capacity
}
#[inline]
pub fn free(&self) -> usize {
self.free
}
#[inline]
const fn max_order(&self) -> usize {
self.min_order + Self::MAX_LAYER
}
#[inline]
pub fn init<T>(&mut self, min_order: usize, base: NonNull<T>) {
assert_eq!(
0, self.capacity,
"init is not allowed after any transfering"
);
self.min_order = min_order;
let max_order = self.max_order();
assert!(Self::O_MIN_ORDER <= max_order);
assert!(Self::B_MIN_ORDER <= min_order);
let base = base.as_ptr() as usize;
self.buddies.iter_mut().enumerate().for_each(|(i, c)| {
let o = self.min_order + i;
c.init(o, base >> o)
});
self.oligarchy.init(max_order, base >> max_order);
}
#[inline]
pub unsafe fn transfer<T>(&mut self, ptr: NonNull<T>, size: usize) {
self.capacity += size;
self.deallocate(ptr, size)
}
#[inline]
pub fn snatch<T>(
&mut self,
align_order: usize,
size: NonZeroUsize,
) -> Result<(NonNull<T>, usize), BuddyError> {
let ans = self.allocate(align_order, size);
if let Ok((_, size)) = ans {
self.capacity -= size;
}
ans
}
#[inline]
pub fn allocate_type<T>(&mut self) -> Result<(NonNull<T>, usize), BuddyError> {
self.allocate_layout(Layout::new::<T>())
}
#[inline]
pub fn allocate_layout<T>(
&mut self,
layout: Layout,
) -> Result<(NonNull<T>, usize), BuddyError> {
#[inline]
const fn allocated<T, U>(ptr: *mut T, size: usize) -> (NonNull<U>, usize) {
(unsafe { NonNull::new_unchecked(ptr) }.cast(), size)
}
if let Some(size) = NonZeroUsize::new(layout.size()) {
self.allocate(layout.align().trailing_zeros() as _, size)
} else {
Ok(allocated(self, 0))
}
}
pub fn allocate<T>(
&mut self,
align_order: usize,
size: NonZeroUsize,
) -> Result<(NonNull<T>, usize), BuddyError> {
let max_order = self.max_order();
#[inline]
const fn allocated<T, U>(ptr: *mut T, size: usize) -> (NonNull<U>, usize) {
(unsafe { NonNull::new_unchecked(ptr) }.cast(), size)
}
let page_mask = (1usize << self.min_order) - 1;
let ans_size = (size.get() + page_mask) & !page_mask;
let size_order = nonzero(ans_size.next_power_of_two()).trailing_zeros() as usize;
let (ptr, alloc_size) = if size_order >= max_order {
let count = ((ans_size >> (max_order - 1)) + 1) >> 1;
let align_offset = align_order.saturating_sub(max_order);
match self.oligarchy.take_any(align_offset, count) {
Some(idx) => (idx << max_order, count << max_order),
None => Err(BuddyError)?,
}
} else {
let layer0 = size_order - self.min_order;
let mut layer = layer0;
let mut idx = loop {
if layer == Self::MAX_LAYER {
let align_offset = align_order.saturating_sub(max_order);
match self.oligarchy.take_any(align_offset, 1) {
Some(idx) => break idx,
None => Err(BuddyError)?,
}
}
let align_offset = align_order.saturating_sub(self.min_order + layer);
match self.buddies[layer].take_any(align_offset) {
Some(idx) => break idx,
None => layer += 1,
}
};
assert!(self.buddies[layer0..layer].iter_mut().rev().all(|b| {
idx <<= 1;
b.put(idx + 1).is_none()
}));
(idx << size_order, 1 << size_order)
};
self.free -= alloc_size;
if alloc_size > ans_size {
self.deallocate(
unsafe { NonNull::new_unchecked((ptr + ans_size) as *mut u8) },
alloc_size - ans_size,
);
}
Ok(allocated(ptr as *mut (), ans_size))
}
pub unsafe fn deallocate_layout<T>(&mut self, ptr: NonNull<T>, layout: Layout) {
debug_assert!((1 << (ptr.as_ptr() as usize).trailing_zeros()) >= layout.align());
let mask = (1 << self.min_order) - 1;
self.deallocate(ptr, (layout.size() + mask) & !mask)
}
pub fn deallocate<T>(&mut self, ptr: NonNull<T>, size: usize) {
debug_assert!(
size.trailing_zeros() as usize >= self.min_order,
"size must align to minium order"
);
let max_order = self.max_order();
let mut ptr = ptr.as_ptr() as usize;
let end = ptr + size;
while ptr < end {
let len = nonzero(end - ptr);
let order_ptr = nonzero(ptr).trailing_zeros();
let order_len = usize::BITS - len.leading_zeros() - 1;
let order = order_ptr.min(order_len) as usize;
if order >= max_order {
let idx = ptr >> max_order;
let count = len.get() >> max_order;
ptr += count << max_order;
(idx..).take(count).for_each(|idx| self.oligarchy.put(idx));
} else {
let mut idx = ptr >> order;
ptr += 1 << order;
for layer in (order - self.min_order).. {
if layer == Self::MAX_LAYER {
self.oligarchy.put(idx);
break;
}
match self.buddies[layer].put(idx) {
Some(parent) => idx = parent,
None => break,
}
}
}
}
self.free += size;
assert!(
self.free <= self.capacity,
"something wrong with the free bytes, it is larger than the capacity: {} > {}",
self.free,
self.capacity
);
}
}
impl<const N: usize, O: OligarchyCollection + fmt::Debug, B: BuddyCollection + fmt::Debug>
fmt::Debug for BuddyAllocator<N, O, B>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "BuddyAllocator@{:#018x}", self as *const _ as usize)?;
writeln!(f, "---------------------------------")?;
for (i, line) in self.buddies.iter().enumerate() {
writeln!(f, "{:>2}> {line:?}", self.min_order + i)?;
}
writeln!(f, "{:>2}> {:?}", self.max_order(), self.oligarchy)
}
}
#[inline]
const fn nonzero(val: usize) -> NonZeroUsize {
unsafe { NonZeroUsize::new_unchecked(val) }
}
struct Order(usize);
impl Order {
#[inline]
const fn new(order: usize) -> Self {
Self(order)
}
#[inline]
fn idx_to_ptr<T>(&self, idx: usize) -> Option<NonNull<T>> {
NonNull::new((idx << self.0) as *mut _)
}
#[inline]
fn ptr_to_idx<T>(&self, ptr: NonNull<T>) -> usize {
(ptr.as_ptr() as usize) >> self.0
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{LinkedListBuddy, UsizeBuddy};
type TestAllocator<const N: usize> = BuddyAllocator<N, UsizeBuddy, LinkedListBuddy>;
#[repr(C, align(4096))]
#[derive(Clone, Copy)]
struct TestPage([u8; 4096]);
static mut TEST_MEMORY: [TestPage; 16] = [TestPage([0; 4096]); 16];
#[test]
fn test_allocator_new() {
let allocator: TestAllocator<4> = BuddyAllocator::new();
assert_eq!(allocator.capacity(), 0);
assert_eq!(allocator.free(), 0);
assert_eq!(allocator.min_order, 0);
}
#[test]
fn test_allocator_default() {
let allocator: TestAllocator<4> = BuddyAllocator::default();
assert_eq!(allocator.capacity(), 0);
assert_eq!(allocator.free(), 0);
}
#[test]
fn test_allocator_init() {
let mut allocator: TestAllocator<4> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
allocator.init(12, ptr);
assert_eq!(allocator.min_order, 12);
assert_eq!(allocator.capacity(), 0);
assert_eq!(allocator.free(), 0);
}
#[test]
fn test_allocator_transfer() {
let mut allocator: TestAllocator<4> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
allocator.init(12, ptr);
unsafe {
allocator.transfer(ptr, len);
}
assert_eq!(allocator.capacity(), len);
assert_eq!(allocator.free(), len);
}
#[test]
fn test_allocator_allocate_deallocate_basic() {
let mut allocator: TestAllocator<8> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
allocator.init(12, ptr);
unsafe {
allocator.transfer(ptr, len);
}
let initial_free = allocator.free();
let size = NonZeroUsize::new(4096).unwrap();
let (alloc_ptr, alloc_size) = allocator.allocate::<u8>(0, size).unwrap();
assert!(alloc_size >= 4096);
assert!(allocator.free() < initial_free);
allocator.deallocate(alloc_ptr, alloc_size);
assert_eq!(allocator.free(), initial_free);
}
#[test]
fn test_allocator_allocate_type() {
let mut allocator: TestAllocator<8> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
allocator.init(12, ptr);
unsafe {
allocator.transfer(ptr, len);
}
let initial_free = allocator.free();
let (alloc_ptr, alloc_size) = allocator.allocate_type::<usize>().unwrap();
assert!(alloc_size >= core::mem::size_of::<usize>());
allocator.deallocate(alloc_ptr, alloc_size);
assert_eq!(allocator.free(), initial_free);
}
#[test]
fn test_allocator_allocate_layout() {
let mut allocator: TestAllocator<8> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
allocator.init(12, ptr);
unsafe {
allocator.transfer(ptr, len);
}
let initial_free = allocator.free();
let layout = Layout::from_size_align(64, 8).unwrap();
let (alloc_ptr, alloc_size) = allocator.allocate_layout::<u8>(layout).unwrap();
assert!(alloc_size >= 64);
unsafe {
allocator.deallocate_layout(alloc_ptr, layout);
}
assert_eq!(allocator.free(), initial_free);
}
#[test]
fn test_allocator_multiple_allocations() {
let mut allocator: TestAllocator<8> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
allocator.init(12, ptr);
unsafe {
allocator.transfer(ptr, len);
}
let initial_free = allocator.free();
let size1 = NonZeroUsize::new(1024).unwrap();
let size2 = NonZeroUsize::new(2048).unwrap();
let size3 = NonZeroUsize::new(4096).unwrap();
let (ptr1, size1) = allocator.allocate::<u8>(0, size1).unwrap();
let (ptr2, size2) = allocator.allocate::<u8>(0, size2).unwrap();
let (ptr3, size3) = allocator.allocate::<u8>(0, size3).unwrap();
allocator.deallocate(ptr2, size2);
allocator.deallocate(ptr1, size1);
allocator.deallocate(ptr3, size3);
assert_eq!(allocator.free(), initial_free);
}
#[test]
fn test_allocator_snatch() {
let mut allocator: TestAllocator<8> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
allocator.init(12, ptr);
unsafe {
allocator.transfer(ptr, len);
}
let initial_capacity = allocator.capacity();
let initial_free = allocator.free();
let size = NonZeroUsize::new(4096).unwrap();
let (_alloc_ptr, alloc_size) = allocator.snatch::<u8>(0, size).unwrap();
assert!(allocator.capacity() < initial_capacity);
assert_eq!(allocator.capacity(), initial_capacity - alloc_size);
assert_eq!(allocator.free(), initial_free - alloc_size);
}
#[test]
fn test_allocator_allocate_failure() {
let mut allocator: TestAllocator<8> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
allocator.init(12, ptr);
unsafe {
allocator.transfer(ptr, len);
}
let huge_size = NonZeroUsize::new(len * 2).unwrap();
assert!(allocator.allocate::<u8>(0, huge_size).is_err());
}
#[test]
fn test_allocator_zero_size_allocation() {
let mut allocator: TestAllocator<8> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
let len = core::mem::size_of_val(unsafe { &*core::ptr::addr_of!(TEST_MEMORY) });
allocator.init(12, ptr);
unsafe {
allocator.transfer(ptr, len);
}
let layout = Layout::from_size_align(0, 1).unwrap();
let (_alloc_ptr, alloc_size) = allocator.allocate_layout::<u8>(layout).unwrap();
assert_eq!(alloc_size, 0);
}
#[test]
fn test_max_order() {
let mut allocator: TestAllocator<4> = BuddyAllocator::new();
let ptr = NonNull::new(core::ptr::addr_of_mut!(TEST_MEMORY).cast::<u8>()).unwrap();
allocator.init(3, ptr);
assert_eq!(allocator.max_order(), 7);
}
#[test]
fn test_order_idx_to_ptr() {
let order = Order::new(12);
assert!(order.idx_to_ptr::<u8>(0).is_none());
let ptr = order.idx_to_ptr::<u8>(1).unwrap();
assert_eq!(ptr.as_ptr() as usize, 4096);
let ptr = order.idx_to_ptr::<u8>(2).unwrap();
assert_eq!(ptr.as_ptr() as usize, 8192);
let ptr = order.idx_to_ptr::<u8>(3).unwrap();
assert_eq!(ptr.as_ptr() as usize, 12288); }
#[test]
fn test_order_ptr_to_idx() {
let order = Order::new(12);
let ptr = NonNull::new(4096 as *mut u8).unwrap();
assert_eq!(order.ptr_to_idx(ptr), 1);
let ptr = NonNull::new(8192 as *mut u8).unwrap();
assert_eq!(order.ptr_to_idx(ptr), 2);
}
#[test]
fn test_order_roundtrip() {
let order = Order::new(12);
for i in [1, 5, 100, 1000] {
let ptr = order.idx_to_ptr::<u8>(i).unwrap();
let idx = order.ptr_to_idx(ptr);
assert_eq!(idx, i);
}
}
#[test]
fn test_transfer_low_address_crossing_boundary() {
#[repr(C, align(4096))]
struct Buf {
_data: [u8; 128 * 1024],
}
static mut BUF: Buf = Buf {
_data: [0; 128 * 1024],
};
type A = BuddyAllocator<32, UsizeBuddy, LinkedListBuddy>;
let mut alloc = A::new();
let ptr = NonNull::new((&raw mut BUF).cast::<u8>()).unwrap();
let len = 128 * 1024;
alloc.init(3, ptr);
unsafe { alloc.transfer(ptr, len) };
let size = NonZeroUsize::new(4096).unwrap();
let (p, s) = alloc.allocate::<u8>(0, size).unwrap();
assert!(s >= 4096);
alloc.deallocate(p, s);
}
}