#![cfg(any(feature = "sptr", feature = "unstable"))]
use core::{
iter::{self, Peekable},
mem::ManuallyDrop,
ops::Range,
};
use crate::core::{
alloc::{AllocError, Layout},
cmp, fmt,
mem::{self, MaybeUninit},
num::NonZeroUsize,
ptr::NonNull,
};
#[cfg(feature = "unstable")]
use crate::core::alloc::Allocator;
#[cfg(not(feature = "unstable"))]
use crate::core::{
alloc::LayoutExt,
num::UsizeExt,
ptr::{NonNullStrict, Strict},
};
#[cfg(all(any(feature = "alloc", test), feature = "unstable"))]
use alloc::alloc::Global;
use crate::{bitmap::Bitmap, AllocInitError, BackingAllocator, BasePtr, DoubleBlockLink, Raw};
#[cfg(all(any(feature = "alloc", test), not(feature = "unstable")))]
use crate::Global;
struct BuddyLevel {
block_size: usize,
block_pow: u32,
free_list: Option<NonZeroUsize>,
buddies: Bitmap,
splits: Option<Bitmap>,
}
impl BuddyLevel {
#[inline]
fn index_of(&self, block_ofs: usize) -> usize {
block_ofs >> self.block_pow as usize
}
#[inline]
fn buddy_bit(&self, block_ofs: usize) -> usize {
self.index_of(block_ofs) >> 1
}
#[inline]
fn buddy_ofs(&self, block_ofs: usize) -> usize {
block_ofs ^ self.block_size
}
#[cfg(test)]
fn enumerate_free_list(&self, base: BasePtr) -> usize {
let mut item = self.free_list;
let mut num = 0;
let mut prev = None;
while let Some(it) = item {
num += 1;
let link = unsafe { base.double_link_mut(it) };
assert_eq!(link.prev, prev);
prev = item;
item = link.next;
}
num
}
unsafe fn free_list_push(&mut self, base: BasePtr, block: NonZeroUsize) {
assert_eq!(block.get() & (mem::align_of::<DoubleBlockLink>() - 1), 0);
let new_head = block;
if let Some(old_head) = self.free_list {
let old_head_mut = unsafe { base.double_link_mut(old_head) };
old_head_mut.prev = Some(new_head);
}
let old_head = self.free_list;
unsafe {
base.init_double_link_at(
new_head,
DoubleBlockLink {
next: old_head,
prev: None,
},
)
};
self.free_list = Some(new_head);
}
unsafe fn free_list_remove(&mut self, base: BasePtr, block: NonZeroUsize) {
unsafe {
let removed = base.double_link_mut(block);
debug_assert!(removed.next.map_or(true, |next| base.contains_addr(next)));
match removed.prev {
Some(p) => {
base.double_link_mut(p).next = removed.next;
}
None => self.free_list = removed.next,
}
if let Some(n) = removed.next {
base.double_link_mut(n).prev = removed.prev;
}
}
}
unsafe fn allocate(&mut self, base: BasePtr) -> Option<NonZeroUsize> {
let block = self.free_list?;
unsafe { self.free_list_remove(base, block) };
let ofs = base.offset_to(block);
self.buddies.toggle(self.buddy_bit(ofs));
Some(block)
}
unsafe fn assign(&mut self, base: BasePtr, block: NonZeroUsize) {
let ofs = base.offset_to(block);
let buddy_bit = self.buddy_bit(ofs);
assert!(!self.buddies.get(buddy_bit));
self.buddies.set(buddy_bit, true);
unsafe { self.free_list_push(base, block) };
}
unsafe fn deallocate(
&mut self,
base: BasePtr,
block: NonNull<u8>,
coalesce: bool,
) -> Option<NonNull<u8>> {
let block = block.addr();
let block_ofs = base.offset_to(block);
let buddy_bit = self.buddy_bit(block_ofs);
self.buddies.toggle(buddy_bit);
let split_bit = self.index_of(block_ofs);
if let Some(splits) = self.splits.as_mut() {
splits.set(split_bit, false);
}
if !coalesce || self.buddies.get(buddy_bit) {
unsafe { self.free_list_push(base, block) };
None
} else {
let buddy_ofs = self.buddy_ofs(block_ofs);
let buddy =
NonZeroUsize::new(base.addr().get().checked_add(buddy_ofs).unwrap()).unwrap();
unsafe { self.free_list_remove(base, buddy) };
let coalesced_ofs = buddy_ofs & !self.block_size;
Some(base.with_offset(coalesced_ofs).unwrap())
}
}
}
pub struct Buddy<const BLK_SIZE: usize, const LEVELS: usize, A: BackingAllocator> {
raw: RawBuddy<BLK_SIZE, LEVELS>,
backing_allocator: A,
}
unsafe impl<const BLK_SIZE: usize, const LEVELS: usize, A: BackingAllocator + Send> Send
for Buddy<BLK_SIZE, LEVELS, A>
{
}
unsafe impl<const BLK_SIZE: usize, const LEVELS: usize, A: BackingAllocator + Sync> Sync
for Buddy<BLK_SIZE, LEVELS, A>
{
}
impl<const BLK_SIZE: usize, const LEVELS: usize> Buddy<BLK_SIZE, LEVELS, Raw> {
pub unsafe fn new_raw(
metadata: NonNull<u8>,
region: NonNull<u8>,
num_blocks: usize,
) -> Result<Buddy<BLK_SIZE, LEVELS, Raw>, AllocInitError> {
unsafe {
RawBuddy::try_new_with_address_gaps(metadata, region, num_blocks, iter::empty())
.map(|p| p.with_backing_allocator(Raw))
}
}
pub unsafe fn new_raw_unpopulated(
metadata: NonNull<u8>,
region: NonNull<u8>,
num_blocks: usize,
) -> Result<Buddy<BLK_SIZE, LEVELS, Raw>, AllocInitError> {
unsafe {
RawBuddy::try_new_unpopulated(metadata, region, num_blocks)
.map(|p| p.with_backing_allocator(Raw))
}
}
pub unsafe fn add_region(&mut self, addr_range: Range<NonZeroUsize>) {
unsafe { self.raw.add_region(addr_range) };
}
pub unsafe fn into_raw_parts(self) -> RawBuddyParts {
let metadata = self.raw.metadata;
let metadata_layout = Self::metadata_layout(self.raw.num_blocks.get()).unwrap();
let region = self.raw.base.ptr();
let region_layout = Self::region_layout(self.raw.num_blocks.get()).unwrap();
let _ = ManuallyDrop::new(self);
RawBuddyParts {
metadata,
metadata_layout,
region,
region_layout,
}
}
}
#[cfg(all(any(feature = "alloc", test), not(feature = "unstable")))]
impl<const BLK_SIZE: usize, const LEVELS: usize> Buddy<BLK_SIZE, LEVELS, Global> {
pub fn try_new(num_blocks: usize) -> Result<Buddy<BLK_SIZE, LEVELS, Global>, AllocInitError> {
Self::try_new_with_offset_gaps(num_blocks, iter::empty())
}
#[doc(hidden)]
pub fn try_new_with_offset_gaps<I>(
num_blocks: usize,
gaps: I,
) -> Result<Buddy<BLK_SIZE, LEVELS, Global>, AllocInitError>
where
I: IntoIterator<Item = Range<usize>>,
{
let region_layout = Self::region_layout(num_blocks)?;
let metadata_layout = Self::metadata_layout(num_blocks)?;
let num_blocks = NonZeroUsize::new(num_blocks).ok_or(AllocInitError::InvalidConfig)?;
unsafe {
let region_ptr = {
let raw = alloc::alloc::alloc(region_layout);
NonNull::new(raw).ok_or(AllocInitError::AllocFailed(region_layout))?
};
let metadata_ptr = {
let raw = alloc::alloc::alloc(metadata_layout);
NonNull::new(raw).ok_or_else(|| {
alloc::alloc::dealloc(region_ptr.as_ptr(), region_layout);
AllocInitError::AllocFailed(metadata_layout)
})?
};
match RawBuddy::<BLK_SIZE, LEVELS>::try_new_with_offset_gaps(
metadata_ptr,
region_ptr,
num_blocks.get(),
gaps,
) {
Ok(b) => Ok(b.with_backing_allocator(Global)),
Err(e) => {
alloc::alloc::dealloc(region_ptr.as_ptr(), region_layout);
alloc::alloc::dealloc(metadata_ptr.as_ptr(), metadata_layout);
Err(e)
}
}
}
}
}
#[cfg(all(any(feature = "alloc", test), feature = "unstable"))]
impl<const BLK_SIZE: usize, const LEVELS: usize> Buddy<BLK_SIZE, LEVELS, Global> {
#[cfg_attr(docs_rs, doc(cfg(feature = "alloc")))]
pub fn try_new(num_blocks: usize) -> Result<Buddy<BLK_SIZE, LEVELS, Global>, AllocInitError> {
Buddy::<BLK_SIZE, LEVELS, Global>::try_new_in(num_blocks, Global)
}
#[doc(hidden)]
pub fn try_new_with_offset_gaps<I>(
num_blocks: usize,
gaps: I,
) -> Result<Buddy<BLK_SIZE, LEVELS, Global>, AllocInitError>
where
I: IntoIterator<Item = Range<usize>>,
{
Buddy::<BLK_SIZE, LEVELS, Global>::try_new_with_offset_gaps_in(num_blocks, gaps, Global)
}
}
#[cfg(feature = "unstable")]
impl<const BLK_SIZE: usize, const LEVELS: usize, A: Allocator> Buddy<BLK_SIZE, LEVELS, A> {
#[cfg_attr(docs_rs, doc(cfg(feature = "unstable")))]
pub fn try_new_in(
num_blocks: usize,
allocator: A,
) -> Result<Buddy<BLK_SIZE, LEVELS, A>, AllocInitError> {
Self::try_new_with_offset_gaps_in(num_blocks, iter::empty(), allocator)
}
#[doc(hidden)]
#[cfg_attr(docs_rs, doc(cfg(feature = "unstable")))]
pub fn try_new_with_offset_gaps_in<I>(
num_blocks: usize,
gaps: I,
allocator: A,
) -> Result<Buddy<BLK_SIZE, LEVELS, A>, AllocInitError>
where
I: IntoIterator<Item = Range<usize>>,
{
let region_layout = Self::region_layout(num_blocks)?;
let metadata_layout = Self::metadata_layout(num_blocks)?;
let num_blocks = NonZeroUsize::new(num_blocks).ok_or(AllocInitError::InvalidConfig)?;
let region = allocator
.allocate(region_layout)
.map_err(|_| AllocInitError::AllocFailed(region_layout))?;
let metadata = match allocator.allocate(metadata_layout) {
Ok(m) => m,
Err(_) => unsafe {
let region_ptr = NonNull::new_unchecked(region.as_ptr() as *mut u8);
allocator.deallocate(region_ptr, region_layout);
return Err(AllocInitError::AllocFailed(metadata_layout));
},
};
unsafe {
let region_ptr = NonNull::new_unchecked(region.as_ptr() as *mut u8);
let metadata_ptr = NonNull::new_unchecked(metadata.as_ptr() as *mut u8);
RawBuddy::<BLK_SIZE, LEVELS>::try_new_with_offset_gaps(
metadata_ptr,
region_ptr,
num_blocks.get(),
gaps,
)
.map(|p| p.with_backing_allocator(allocator))
}
}
}
impl<const BLK_SIZE: usize, const LEVELS: usize, A: BackingAllocator> Buddy<BLK_SIZE, LEVELS, A> {
pub const fn min_block_size() -> Result<usize, AllocInitError> {
if LEVELS == 0 || !BLK_SIZE.is_power_of_two() || LEVELS >= usize::BITS as usize {
return Err(AllocInitError::InvalidConfig);
}
let min_block_size = BLK_SIZE >> (LEVELS - 1);
if min_block_size < mem::size_of::<DoubleBlockLink>() {
return Err(AllocInitError::InvalidConfig);
}
Ok(min_block_size)
}
pub fn region_layout(num_blocks: usize) -> Result<Layout, AllocInitError> {
let num_blocks = NonZeroUsize::new(num_blocks).ok_or(AllocInitError::InvalidConfig)?;
Self::region_layout_impl(num_blocks)
}
fn region_layout_impl(num_blocks: NonZeroUsize) -> Result<Layout, AllocInitError> {
let min_block_size = Self::min_block_size()?;
let levels: u32 = LEVELS
.try_into()
.map_err(|_| AllocInitError::InvalidConfig)?;
let size = 2usize
.pow(levels - 1)
.checked_mul(min_block_size)
.ok_or(AllocInitError::InvalidConfig)?
.checked_mul(num_blocks.get())
.ok_or(AllocInitError::InvalidConfig)?;
let align = BLK_SIZE;
Layout::from_size_align(size, align).map_err(|_| AllocInitError::InvalidConfig)
}
pub fn metadata_layout(num_blocks: usize) -> Result<Layout, AllocInitError> {
let num_blocks = NonZeroUsize::new(num_blocks).ok_or(AllocInitError::InvalidConfig)?;
Self::metadata_layout_impl(num_blocks)
}
fn metadata_layout_impl(num_blocks: NonZeroUsize) -> Result<Layout, AllocInitError> {
const fn sum_of_powers_of_2(max: u32) -> usize {
2_usize.pow(max + 1) - 1
}
let levels: u32 = LEVELS.try_into().unwrap();
let num_pairs = (num_blocks.get() + 1) / 2;
let buddy_l0_layout = Bitmap::map_layout(num_pairs);
let (buddy_layout, _) = buddy_l0_layout
.repeat(sum_of_powers_of_2(levels - 1))
.unwrap();
if LEVELS == 1 {
return Ok(buddy_layout);
}
let split_l0_layout = Bitmap::map_layout(num_blocks.get());
let (split_layout, _) = split_l0_layout
.repeat(sum_of_powers_of_2(levels - 1))
.map_err(|_| AllocInitError::InvalidConfig)?;
let (full_layout, _) = buddy_layout
.extend(split_layout)
.map_err(|_| AllocInitError::InvalidConfig)?;
Ok(full_layout)
}
#[cfg(test)]
#[inline]
fn enumerate_free_list(&self, level: usize) -> usize {
self.raw.enumerate_free_list(level)
}
pub fn allocate(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.raw.allocate(layout)
}
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>) {
unsafe { self.raw.deallocate(ptr) }
}
}
impl<const BLK_SIZE: usize, const LEVELS: usize, A: BackingAllocator> fmt::Debug
for Buddy<BLK_SIZE, LEVELS, A>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Buddy")
.field("metadata", &self.raw.metadata)
.field("base", &self.raw.base.ptr())
.field("num_blocks", &self.raw.num_blocks)
.finish()
}
}
impl<const BLK_SIZE: usize, const LEVELS: usize, A: BackingAllocator> Drop
for Buddy<BLK_SIZE, LEVELS, A>
{
fn drop(&mut self) {
let region = self.raw.base.ptr();
let metadata = self.raw.metadata;
let num_blocks = self.raw.num_blocks;
let region_layout = Self::region_layout_impl(num_blocks).unwrap();
let metadata_layout = Self::metadata_layout_impl(num_blocks).unwrap();
unsafe {
self.backing_allocator.deallocate(region, region_layout);
self.backing_allocator.deallocate(metadata, metadata_layout);
}
}
}
#[derive(Debug)]
pub struct RawBuddyParts {
pub metadata: NonNull<u8>,
pub metadata_layout: Layout,
pub region: NonNull<u8>,
pub region_layout: Layout,
}
struct RawBuddy<const BLK_SIZE: usize, const LEVELS: usize> {
base: BasePtr,
metadata: NonNull<u8>,
num_blocks: NonZeroUsize,
levels: [BuddyLevel; LEVELS],
}
impl<const BLK_SIZE: usize, const LEVELS: usize> RawBuddy<BLK_SIZE, LEVELS> {
fn with_backing_allocator<A: BackingAllocator>(
self,
backing_allocator: A,
) -> Buddy<BLK_SIZE, LEVELS, A> {
Buddy {
raw: self,
backing_allocator,
}
}
#[cfg(any(feature = "unstable", feature = "alloc", test))]
unsafe fn try_new_with_offset_gaps<I>(
metadata: NonNull<u8>,
base: NonNull<u8>,
num_blocks: usize,
gaps: I,
) -> Result<RawBuddy<BLK_SIZE, LEVELS>, AllocInitError>
where
I: IntoIterator<Item = Range<usize>>,
{
let region_addr = base.addr().get();
let gaps = gaps.into_iter().map(|ofs| {
let start = NonZeroUsize::new(region_addr.checked_add(ofs.start).unwrap()).unwrap();
let end = NonZeroUsize::new(region_addr.checked_add(ofs.end).unwrap()).unwrap();
start..end
});
unsafe { Self::try_new_with_address_gaps(metadata, base, num_blocks, gaps) }
}
unsafe fn try_new_with_address_gaps<I>(
metadata: NonNull<u8>,
base: NonNull<u8>,
num_blocks: usize,
gaps: I,
) -> Result<RawBuddy<BLK_SIZE, LEVELS>, AllocInitError>
where
I: IntoIterator<Item = Range<NonZeroUsize>>,
{
let mut buddy = unsafe { Self::try_new_unpopulated(metadata, base, num_blocks) }?;
let min_block_size = Buddy::<BLK_SIZE, LEVELS, Raw>::min_block_size().unwrap();
let gaps = AlignedRanges::new(gaps.into_iter(), min_block_size);
let mut start = buddy.base.addr();
for gap in gaps {
let end = gap.start;
unsafe { buddy.add_region(start..end) };
start = gap.end;
}
let end = buddy.base.limit();
unsafe { buddy.add_region(start..end) };
Ok(buddy)
}
unsafe fn add_region(&mut self, addr_range: Range<NonZeroUsize>) {
assert!(self.base.addr() <= addr_range.start);
assert!(addr_range.end <= self.base.limit());
let min_block_size = Self::min_block_size().unwrap();
assert_eq!(addr_range.start.get() % min_block_size, 0);
assert_eq!(addr_range.end.get() % min_block_size, 0);
let min_pow = min_block_size.trailing_zeros();
let max_pow = BLK_SIZE.trailing_zeros();
let mut curs = addr_range.start;
while curs < addr_range.end {
let curs_pow = curs.trailing_zeros().min(max_pow);
assert!(curs_pow >= min_pow);
let curs_align = 1 << curs_pow;
let curs_ofs = curs.get() - self.base.addr().get();
let remaining = addr_range.end.get() - curs.get();
let remaining_po2 = 1 << (usize::BITS - (remaining.leading_zeros() + 1));
let block_size: usize = cmp::min(remaining_po2, curs_align);
let init_level = (max_pow - curs_pow) as usize;
let target_level = (max_pow - block_size.trailing_zeros()) as usize;
for lv in self.levels.iter_mut().take(target_level).skip(init_level) {
let split_bit = lv.index_of(curs_ofs);
if let Some(s) = lv.splits.as_mut() {
s.set(split_bit, true);
}
}
unsafe { self.deallocate(self.base.with_addr(curs)) };
curs = curs
.get()
.checked_add(block_size)
.and_then(NonZeroUsize::new)
.unwrap();
}
}
pub unsafe fn try_new_unpopulated(
metadata: NonNull<u8>,
base: NonNull<u8>,
num_blocks: usize,
) -> Result<RawBuddy<BLK_SIZE, LEVELS>, AllocInitError> {
let num_blocks = NonZeroUsize::new(num_blocks).ok_or(AllocInitError::InvalidConfig)?;
let min_block_size = Buddy::<BLK_SIZE, LEVELS, Raw>::min_block_size()?;
let meta_layout = Buddy::<BLK_SIZE, LEVELS, Raw>::metadata_layout_impl(num_blocks)?;
let region_layout = Buddy::<BLK_SIZE, LEVELS, Raw>::region_layout_impl(num_blocks)?;
let meta_end = metadata
.addr()
.get()
.checked_add(meta_layout.size())
.ok_or(AllocInitError::InvalidLocation)?;
base.addr()
.get()
.checked_add(region_layout.size())
.ok_or(AllocInitError::InvalidLocation)?;
let mut levels: [MaybeUninit<BuddyLevel>; LEVELS] = unsafe {
MaybeUninit::<[MaybeUninit<BuddyLevel>; LEVELS]>::uninit().assume_init()
};
let mut meta_curs = metadata.as_ptr();
for (li, level) in levels.iter_mut().enumerate() {
let block_size = 2_usize.pow((LEVELS - li) as u32 - 1) * min_block_size;
let block_factor = 2_usize.pow(li as u32);
let num_blocks = block_factor * num_blocks.get();
let num_pairs = (num_blocks + 1) / 2;
let buddy_size = Bitmap::map_layout(num_pairs).size();
let buddy_bitmap = unsafe { Bitmap::new(num_pairs, meta_curs as *mut u64) };
meta_curs = unsafe {
meta_curs.offset(
buddy_size
.try_into()
.expect("buddy bitmap layout size overflows isize"),
)
};
let split_bitmap = if li < LEVELS - 1 {
let split_size = Bitmap::map_layout(num_blocks).size();
let split_bitmap = unsafe { Bitmap::new(num_blocks, meta_curs as *mut u64) };
meta_curs = unsafe {
meta_curs.offset(
split_size
.try_into()
.expect("split bitmap layout size overflows isize"),
)
};
Some(split_bitmap)
} else {
None
};
unsafe {
level.as_mut_ptr().write(BuddyLevel {
block_size,
block_pow: block_size.trailing_zeros(),
free_list: None,
buddies: buddy_bitmap,
splits: split_bitmap,
});
}
}
if meta_curs.addr() > meta_end {
panic!(
"metadata cursor overran layout size: curs = {meta_end}, layout = {meta_layout:?}"
);
}
let levels = unsafe {
(&levels as *const _ as *const [BuddyLevel; LEVELS]).read()
};
let base = BasePtr::new(base, region_layout.size());
Ok(RawBuddy {
base,
metadata,
num_blocks,
levels,
})
}
const fn min_block_size() -> Result<usize, AllocInitError> {
if LEVELS == 0 || !BLK_SIZE.is_power_of_two() || LEVELS >= usize::BITS as usize {
return Err(AllocInitError::InvalidConfig);
}
let min_block_size = BLK_SIZE >> (LEVELS - 1);
if min_block_size < mem::size_of::<DoubleBlockLink>() {
return Err(AllocInitError::InvalidConfig);
}
Ok(min_block_size)
}
#[cfg(test)]
#[inline]
fn enumerate_free_list(&self, level: usize) -> usize {
self.levels[level].enumerate_free_list(self.base)
}
fn level_for(&self, size: usize) -> Option<usize> {
fn round_up_pow2(x: usize) -> Option<usize> {
match x {
0 => None,
1 => Some(1),
x if x >= (1 << 63) => None,
_ => Some(2usize.pow((x - 1).ilog2() as u32 + 1)),
}
}
let min_block_size = self.levels[LEVELS - 1].block_size;
let max_block_size = self.levels[0].block_size;
if size > max_block_size {
return None;
}
let alloc_size = cmp::max(round_up_pow2(size).unwrap(), min_block_size);
let level: usize = (max_block_size.ilog2() - alloc_size.ilog2())
.try_into()
.unwrap();
Some(level)
}
fn min_free_level(&self, block_ofs: usize) -> usize {
let min_block_size = self.levels[LEVELS - 1].block_size;
let max_block_size = self.levels[0].block_size;
if block_ofs == 0 {
return 0;
}
let max_size = 1 << block_ofs.trailing_zeros();
if max_size > max_block_size {
return 0;
}
assert!(max_size >= min_block_size);
(max_block_size.ilog2() - max_size.ilog2())
.try_into()
.unwrap()
}
pub fn allocate(&mut self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
if layout.size() == 0 || layout.align() > layout.size() {
return Err(AllocError);
}
let target_level = self.level_for(layout.size()).ok_or(AllocError)?;
if let Some(block) = unsafe { self.levels[target_level].allocate(self.base) } {
return Ok(self.base.with_addr_and_size(block, layout.size()));
}
let (block, init_level) = (0..target_level)
.rev()
.find_map(|level| unsafe {
self.levels[level]
.allocate(self.base)
.map(|block| (block, level))
})
.ok_or(AllocError)?;
let block_ofs = self.base.offset_to(block);
for level in init_level..target_level {
let half_block_size = self.levels[level].block_size / 2;
let back_half = NonZeroUsize::new(block.get() + half_block_size).unwrap();
let split_bit = self.levels[level].index_of(block_ofs);
if let Some(s) = self.levels[level].splits.as_mut() {
s.set(split_bit, true);
}
unsafe { self.levels[level + 1].assign(self.base, back_half) };
}
Ok(self.base.with_addr_and_size(block, layout.size()))
}
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>) {
let block_ofs = self.base.offset_to(ptr.addr());
let min_level = self.min_free_level(block_ofs);
let mut at_level = None;
for level in min_level..self.levels.len() {
if self.levels[level]
.splits
.as_ref()
.map(|s| !s.get(self.levels[level].index_of(block_ofs)))
.unwrap_or(true)
{
at_level = Some(level);
break;
}
}
let at_level = at_level.expect("no level found to free block");
let mut block = Some(ptr);
for level in (0..=at_level).rev() {
match block.take() {
Some(b) => unsafe {
block = self.levels[level].deallocate(self.base, b, level != 0);
},
None => break,
}
}
assert!(block.is_none(), "top level coalesced a block");
}
}
struct AlignedRanges<I>
where
I: Iterator<Item = Range<NonZeroUsize>>,
{
align: usize,
inner: Peekable<I>,
}
impl<I> AlignedRanges<I>
where
I: Iterator<Item = Range<NonZeroUsize>>,
{
fn new(iter: I, align: usize) -> AlignedRanges<I> {
assert!(align.is_power_of_two());
AlignedRanges {
align,
inner: iter.peekable(),
}
}
}
impl<I> Iterator for AlignedRanges<I>
where
I: Iterator<Item = Range<NonZeroUsize>>,
{
type Item = Range<NonZeroUsize>;
fn next(&mut self) -> Option<Self::Item> {
let mut cur;
loop {
cur = self.inner.next()?;
if !cur.is_empty() {
break;
}
}
let align = self.align;
let start = cur.start.get() & !(align - 1);
let mut end;
loop {
end = {
let less_one = cur.end.get() - 1;
let above = less_one
.checked_add(align)
.expect("end overflowed when aligned up");
above & !(align - 1)
};
let next = match self.inner.peek() {
Some(next) => next.clone(),
None => break,
};
assert!(next.start >= cur.end);
let next_start = next.start.get() & !(align - 1);
if next_start <= end {
assert!(next.end >= cur.end);
cur.end = next.end;
let _ = self.inner.next();
} else {
break;
}
}
cur.start = NonZeroUsize::new(start).expect("start aligned down to null");
cur.end = NonZeroUsize::new(end).unwrap();
Some(cur)
}
}
#[cfg(test)]
mod tests {
use super::*;
extern crate std;
use crate::core::{alloc::Layout, ptr::NonNull, slice};
use std::prelude::rust_2021::*;
#[cfg(all(any(feature = "alloc", test), not(feature = "unstable")))]
use crate::Global;
#[cfg(all(any(feature = "alloc", test), feature = "unstable"))]
use alloc::alloc::Global;
#[test]
fn zero_levels_errors() {
let _ = Buddy::<256, 0, Global>::try_new(8).unwrap_err();
}
#[test]
fn overflow_address_space_errors() {
let _ = Buddy::<256, 1, Global>::try_new(usize::MAX).unwrap_err();
}
#[test]
fn too_many_levels_errors() {
const LEVELS: usize = usize::BITS as usize;
let _ = Buddy::<256, LEVELS, Global>::try_new(8).unwrap_err();
}
#[test]
fn non_power_of_two_block_size_errors() {
let _ = Buddy::<0, 1, Global>::try_new(8).unwrap_err();
let _ = Buddy::<255, 4, Global>::try_new(8).unwrap_err();
}
#[test]
fn too_small_min_block_size_errors() {
const LEVELS: usize = 8;
const MIN_SIZE: usize = core::mem::size_of::<usize>() / 2;
const BLK_SIZE: usize = MIN_SIZE << (LEVELS - 1);
const NUM_BLOCKS: usize = 8;
let _ = Buddy::<BLK_SIZE, LEVELS, Global>::try_new(NUM_BLOCKS).unwrap_err();
}
#[test]
fn zero_blocks_errors() {
Buddy::<128, 4, Global>::try_new(0).unwrap_err();
}
#[test]
fn one_level() {
Buddy::<16, 1, Global>::try_new(1).unwrap();
Buddy::<128, 1, Global>::try_new(1).unwrap();
Buddy::<4096, 1, Global>::try_new(1).unwrap();
}
#[test]
fn create_and_destroy() {
const LEVELS: usize = 8;
const MIN_SIZE: usize = 16;
const BLK_SIZE: usize = MIN_SIZE << (LEVELS - 1);
const NUM_BLOCKS: usize = 8;
let allocator = Buddy::<BLK_SIZE, LEVELS, Global>::try_new(NUM_BLOCKS).unwrap();
drop(allocator);
}
#[test]
fn alloc_empty() {
const LEVELS: usize = 4;
const MIN_SIZE: usize = 16;
const BLK_SIZE: usize = MIN_SIZE << (LEVELS - 1);
const NUM_BLOCKS: usize = 8;
let mut allocator = Buddy::<BLK_SIZE, LEVELS, Global>::try_new(NUM_BLOCKS).unwrap();
let layout = Layout::from_size_align(0, 1).unwrap();
allocator.allocate(layout).unwrap_err();
}
#[test]
fn alloc_min_size() {
const LEVELS: usize = 4;
const MIN_SIZE: usize = 16;
const BLK_SIZE: usize = MIN_SIZE << (LEVELS - 1);
const NUM_BLOCKS: usize = 8;
let mut allocator = Buddy::<BLK_SIZE, LEVELS, Global>::try_new(NUM_BLOCKS).unwrap();
let layout = Layout::from_size_align(1, 1).unwrap();
let a = allocator.allocate(layout).unwrap();
let _b = allocator.allocate(layout).unwrap();
let c = allocator.allocate(layout).unwrap();
unsafe {
allocator.deallocate(a.cast());
allocator.deallocate(c.cast());
}
}
#[test]
fn alloc_write_and_free() {
const LEVELS: usize = 8;
const MIN_SIZE: usize = 16;
const BLK_SIZE: usize = MIN_SIZE << (LEVELS - 1);
const NUM_BLOCKS: usize = 8;
let mut allocator = Buddy::<BLK_SIZE, LEVELS, Global>::try_new(NUM_BLOCKS).unwrap();
unsafe {
let layout = Layout::from_size_align(64, MIN_SIZE).unwrap();
let ptr: NonNull<u8> = allocator.allocate(layout).unwrap().cast();
{
let buf: &mut [u8] = slice::from_raw_parts_mut(ptr.as_ptr(), layout.size());
for (i, byte) in buf.iter_mut().enumerate() {
*byte = i as u8;
}
}
allocator.deallocate(ptr);
}
}
#[test]
fn coalesce_one() {
const LEVELS: usize = 2;
const MIN_SIZE: usize = 16;
const BLK_SIZE: usize = MIN_SIZE << (LEVELS - 1);
const NUM_BLOCKS: usize = 1;
let mut allocator = Buddy::<BLK_SIZE, LEVELS, Global>::try_new(NUM_BLOCKS).unwrap();
let full_layout = Layout::from_size_align(2 * MIN_SIZE, MIN_SIZE).unwrap();
let half_layout = Layout::from_size_align(MIN_SIZE, MIN_SIZE).unwrap();
unsafe {
let a = allocator.allocate(half_layout).unwrap();
let b = allocator.allocate(half_layout).unwrap();
allocator.deallocate(a.cast());
allocator.deallocate(b.cast());
let c = allocator.allocate(full_layout).unwrap();
allocator.deallocate(c.cast());
let a = allocator.allocate(half_layout).unwrap();
let b = allocator.allocate(half_layout).unwrap();
allocator.deallocate(a.cast());
allocator.deallocate(b.cast());
let c = allocator.allocate(full_layout).unwrap();
allocator.deallocate(c.cast());
}
}
#[test]
fn coalesce_many() {
const LEVELS: usize = 4;
const MIN_SIZE: usize = 16;
const BLK_SIZE: usize = MIN_SIZE << (LEVELS - 1);
const NUM_BLOCKS: usize = 8;
let mut allocator = Buddy::<BLK_SIZE, LEVELS, Global>::try_new(NUM_BLOCKS).unwrap();
for lvl in (0..LEVELS).rev() {
let alloc_size = 2usize.pow((LEVELS - lvl - 1) as u32) * MIN_SIZE;
let layout = Layout::from_size_align(alloc_size, MIN_SIZE).unwrap();
let num_allocs = 2usize.pow(lvl as u32) * NUM_BLOCKS;
let mut allocs = Vec::with_capacity(num_allocs);
for _ in 0..num_allocs {
let ptr = allocator.allocate(layout).unwrap();
{
let buf: &mut [u8] =
unsafe { slice::from_raw_parts_mut(ptr.as_ptr().cast(), layout.size()) };
for (i, byte) in buf.iter_mut().enumerate() {
*byte = (i % 256) as u8;
}
}
allocs.push(ptr);
}
for alloc in allocs {
unsafe {
allocator.deallocate(alloc.cast());
}
}
}
}
#[test]
fn one_level_gaps() {
type Alloc = Buddy<16, 1, Global>;
let layout = Layout::from_size_align(1, 1).unwrap();
for gaps in std::vec![[0..1], [15..16], [0..16], [15..32]] {
let mut allocator = Alloc::try_new_with_offset_gaps(1, gaps).unwrap();
allocator.allocate(layout).unwrap_err();
}
for gaps in std::vec![[0..32], [0..17], [15..32], [15..17]] {
let mut allocator = Alloc::try_new_with_offset_gaps(2, gaps).unwrap();
allocator.allocate(layout).unwrap_err();
}
for gaps in std::vec![[0..48], [0..33], [15..48], [15..33]] {
let mut allocator = Alloc::try_new_with_offset_gaps(3, gaps).unwrap();
allocator.allocate(layout).unwrap_err();
}
for gaps in std::vec![[0..32], [0..17], [15..32], [15..17], [16..48], [16..33]] {
let mut allocator = Alloc::try_new_with_offset_gaps(3, gaps).unwrap();
let a = allocator.allocate(layout).unwrap();
unsafe { allocator.deallocate(a.cast()) };
}
let mut allocator = Alloc::try_new_with_offset_gaps(1, [16..32]).unwrap();
let a = allocator.allocate(layout).unwrap();
unsafe { allocator.deallocate(a.cast()) };
}
#[test]
fn two_level_gaps() {
type Alloc = Buddy<32, 2, Global>;
let one_byte = Layout::from_size_align(1, 1).unwrap();
let half = Layout::from_size_align(16, 16).unwrap();
let full = Layout::from_size_align(32, 32).unwrap();
for gaps in std::vec![[0..32], [0..17], [15..32], [8..24], [0..48], [15..48]] {
let mut allocator = Alloc::try_new_with_offset_gaps(1, gaps).unwrap();
allocator.allocate(one_byte).unwrap_err();
}
for gaps in std::vec![[0..16], [16..32], [16..48]] {
let mut allocator = Alloc::try_new_with_offset_gaps(1, gaps).unwrap();
allocator.allocate(full).unwrap_err();
let a = allocator.allocate(half).unwrap();
allocator.allocate(one_byte).unwrap_err();
unsafe { allocator.deallocate(a.cast()) };
allocator.allocate(full).unwrap_err();
}
}
#[test]
fn three_level_gaps() {
type Alloc = Buddy<128, 4, Global>;
let layout = Layout::from_size_align(16, 4).unwrap();
let mut allocator = Alloc::try_new_with_offset_gaps(2, [0..72]).unwrap();
std::println!("base = {:08x}", allocator.raw.base.addr().get());
let a = allocator.allocate(layout).unwrap();
std::println!("a = {:08x}", a.as_ptr() as *mut () as usize);
assert_eq!(allocator.enumerate_free_list(0), 1);
assert_eq!(allocator.enumerate_free_list(1), 0);
assert_eq!(allocator.enumerate_free_list(2), 1);
assert_eq!(allocator.enumerate_free_list(3), 0);
let b = allocator.allocate(layout).unwrap();
std::println!("b = {:08x}", b.as_ptr() as *mut () as usize);
assert_eq!(allocator.enumerate_free_list(0), 1);
assert_eq!(allocator.enumerate_free_list(1), 0);
assert_eq!(allocator.enumerate_free_list(2), 0);
assert_eq!(allocator.enumerate_free_list(3), 1);
unsafe { allocator.deallocate(a.cast()) };
assert_eq!(allocator.enumerate_free_list(0), 1);
assert_eq!(allocator.enumerate_free_list(1), 0);
assert_eq!(allocator.enumerate_free_list(2), 0);
assert_eq!(allocator.enumerate_free_list(3), 2);
for lev in allocator.raw.levels.iter() {
for bit in lev.buddies.iter() {
let s = if bit { "1" } else { "0" };
std::print!("{s}");
}
std::println!();
}
unsafe { allocator.deallocate(b.cast()) };
}
#[test]
fn add_coalesce() {
const BLK_SIZE: usize = 128;
const LEVELS: usize = 2;
type Alloc = Buddy<BLK_SIZE, LEVELS, Raw>;
const NUM_BLOCKS: usize = 1;
let region_layout = Alloc::region_layout(NUM_BLOCKS).unwrap();
let metadata_layout = Alloc::metadata_layout(NUM_BLOCKS).unwrap();
let region = NonNull::new(unsafe { std::alloc::alloc(region_layout) }).unwrap();
let metadata = NonNull::new(unsafe { std::alloc::alloc(metadata_layout) }).unwrap();
let mut buddy =
unsafe { Alloc::new_raw_unpopulated(metadata, region, NUM_BLOCKS).unwrap() };
let base_addr = buddy.raw.base.addr();
let middle = base_addr
.get()
.checked_add(BLK_SIZE / 2)
.and_then(NonZeroUsize::new)
.unwrap();
let limit = buddy.raw.base.limit();
let left = base_addr..middle;
let right = middle..limit;
let half_layout = Layout::from_size_align(BLK_SIZE / 2, BLK_SIZE / 2).unwrap();
let full_layout = Layout::from_size_align(BLK_SIZE, BLK_SIZE).unwrap();
buddy.allocate(half_layout).unwrap_err();
unsafe { buddy.add_region(left) };
let left_blk = buddy.allocate(half_layout).unwrap();
unsafe { buddy.deallocate(left_blk.cast()) };
unsafe { buddy.add_region(right) };
let full_blk = buddy.allocate(full_layout).unwrap();
unsafe { buddy.deallocate(full_blk.cast()) };
drop(buddy);
unsafe {
std::alloc::dealloc(region.as_ptr(), region_layout);
std::alloc::dealloc(metadata.as_ptr(), metadata_layout);
}
}
}