#![cfg_attr(feature = "const_mut_refs", feature(const_mut_refs))]
#![cfg_attr(
feature = "alloc_ref",
feature(allocator_api, alloc_layout_extra, nonnull_slice_from_raw_parts)
)]
#![no_std]
#[cfg(any(test, fuzzing))]
#[macro_use]
extern crate std;
#[cfg(feature = "use_spin")]
extern crate spinning_top;
#[cfg(feature = "use_spin")]
use core::alloc::GlobalAlloc;
use core::alloc::Layout;
#[cfg(feature = "alloc_ref")]
use core::alloc::{AllocError, Allocator};
use core::mem::MaybeUninit;
#[cfg(feature = "use_spin")]
use core::ops::Deref;
use core::ptr::NonNull;
#[cfg(test)]
use hole::Hole;
use hole::HoleList;
#[cfg(feature = "use_spin")]
use spinning_top::Spinlock;
pub mod hole;
#[cfg(test)]
mod test;
pub struct Heap {
used: usize,
holes: HoleList,
}
#[cfg(fuzzing)]
impl Heap {
pub fn debug(&mut self) {
println!(
"bottom: {:?}, top: {:?}, size: {}, pending: {}",
self.bottom(),
self.top(),
self.size(),
self.holes.first.size,
);
self.holes.debug();
}
}
unsafe impl Send for Heap {}
impl Heap {
#[cfg(not(feature = "const_mut_refs"))]
pub fn empty() -> Heap {
Heap {
used: 0,
holes: HoleList::empty(),
}
}
#[cfg(feature = "const_mut_refs")]
pub const fn empty() -> Heap {
Heap {
used: 0,
holes: HoleList::empty(),
}
}
pub unsafe fn init(&mut self, heap_bottom: *mut u8, heap_size: usize) {
self.used = 0;
self.holes = HoleList::new(heap_bottom, heap_size);
}
pub fn init_from_slice(&mut self, mem: &'static mut [MaybeUninit<u8>]) {
assert!(
self.bottom().is_null(),
"The heap has already been initialized."
);
let size = mem.len();
let address = mem.as_mut_ptr().cast();
unsafe { self.init(address, size) }
}
pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> Heap {
Heap {
used: 0,
holes: HoleList::new(heap_bottom, heap_size),
}
}
pub fn from_slice(mem: &'static mut [MaybeUninit<u8>]) -> Heap {
let size = mem.len();
let address = mem.as_mut_ptr().cast();
unsafe { Self::new(address, size) }
}
#[allow(clippy::result_unit_err)]
pub fn allocate_first_fit(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
match self.holes.allocate_first_fit(layout) {
Ok((ptr, aligned_layout)) => {
self.used += aligned_layout.size();
Ok(ptr)
}
Err(err) => Err(err),
}
}
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
self.used -= self.holes.deallocate(ptr, layout).size();
}
pub fn bottom(&self) -> *mut u8 {
self.holes.bottom
}
pub fn size(&self) -> usize {
unsafe { self.holes.top.offset_from(self.holes.bottom) as usize }
}
pub fn top(&self) -> *mut u8 {
unsafe { self.holes.top.add(self.holes.pending_extend as usize) }
}
pub fn used(&self) -> usize {
self.used
}
pub fn free(&self) -> usize {
self.size() - self.used
}
pub unsafe fn extend(&mut self, by: usize) {
self.holes.extend(by);
}
}
#[cfg(all(feature = "alloc_ref", feature = "use_spin"))]
unsafe impl Allocator for LockedHeap {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
if layout.size() == 0 {
return Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0));
}
match self.0.lock().allocate_first_fit(layout) {
Ok(ptr) => Ok(NonNull::slice_from_raw_parts(ptr, layout.size())),
Err(()) => Err(AllocError),
}
}
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
self.0.lock().deallocate(ptr, layout);
}
}
}
#[cfg(feature = "use_spin")]
pub struct LockedHeap(Spinlock<Heap>);
#[cfg(feature = "use_spin")]
impl LockedHeap {
#[cfg(feature = "use_spin_nightly")]
pub const fn empty() -> LockedHeap {
LockedHeap(Spinlock::new(Heap::empty()))
}
#[cfg(not(feature = "use_spin_nightly"))]
pub fn empty() -> LockedHeap {
LockedHeap(Spinlock::new(Heap::empty()))
}
pub unsafe fn new(heap_bottom: *mut u8, heap_size: usize) -> LockedHeap {
LockedHeap(Spinlock::new(Heap {
used: 0,
holes: HoleList::new(heap_bottom, heap_size),
}))
}
}
#[cfg(feature = "use_spin")]
impl Deref for LockedHeap {
type Target = Spinlock<Heap>;
fn deref(&self) -> &Spinlock<Heap> {
&self.0
}
}
#[cfg(feature = "use_spin")]
unsafe impl GlobalAlloc for LockedHeap {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.0
.lock()
.allocate_first_fit(layout)
.ok()
.map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
self.0
.lock()
.deallocate(NonNull::new_unchecked(ptr), layout)
}
}
pub fn align_down_size(size: usize, align: usize) -> usize {
if align.is_power_of_two() {
size & !(align - 1)
} else if align == 0 {
size
} else {
panic!("`align` must be a power of 2");
}
}
pub fn align_up_size(size: usize, align: usize) -> usize {
align_down_size(size + align - 1, align)
}
pub fn align_up(addr: *mut u8, align: usize) -> *mut u8 {
let offset = addr.align_offset(align);
addr.wrapping_add(offset)
}