#![no_std]
#[cfg(test)]
#[macro_use]
extern crate std;
#[cfg(feature = "use_spin")]
extern crate spin;
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "use_spin")]
use core::alloc::GlobalAlloc;
use core::alloc::Layout;
use core::cmp::{max, min};
use core::fmt;
use core::mem::size_of;
#[cfg(feature = "use_spin")]
use core::ops::Deref;
use core::ptr::NonNull;
#[cfg(feature = "use_spin")]
use spin::Mutex;
#[cfg(feature = "alloc")]
mod frame;
pub mod linked_list;
#[cfg(test)]
mod test;
#[cfg(feature = "alloc")]
pub use frame::*;
pub struct Heap<const ORDER: usize> {
free_list: [linked_list::LinkedList; ORDER],
user: usize,
allocated: usize,
total: usize,
}
impl<const ORDER: usize> Heap<ORDER> {
pub const fn new() -> Self {
Heap {
free_list: [linked_list::LinkedList::new(); ORDER],
user: 0,
allocated: 0,
total: 0,
}
}
pub const fn empty() -> Self {
Self::new()
}
pub unsafe fn add_to_heap(&mut self, mut start: usize, mut end: usize) {
start = (start + size_of::<usize>() - 1) & (!size_of::<usize>() + 1);
end &= !size_of::<usize>() + 1;
assert!(start <= end);
let mut total = 0;
let mut current_start = start;
while current_start + size_of::<usize>() <= end {
let lowbit = current_start & (!current_start + 1);
let mut size = min(lowbit, prev_power_of_two(end - current_start));
let mut order = size.trailing_zeros() as usize;
if order > ORDER - 1 {
order = ORDER - 1;
size = 1 << order;
}
total += size;
unsafe {
self.free_list[order].push(current_start as *mut usize);
}
current_start += size;
}
self.total += total;
}
pub unsafe fn init(&mut self, start: usize, size: usize) {
unsafe {
self.add_to_heap(start, start + size);
}
}
pub fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, ()> {
let size = max(
layout.size().next_power_of_two(),
max(layout.align(), size_of::<usize>()),
);
let class = size.trailing_zeros() as usize;
for i in class..self.free_list.len() {
if !self.free_list[i].is_empty() {
for j in (class + 1..i + 1).rev() {
if let Some(block) = self.free_list[j].pop() {
unsafe {
self.free_list[j - 1]
.push((block as usize + (1 << (j - 1))) as *mut usize);
self.free_list[j - 1].push(block);
}
} else {
return Err(());
}
}
let result = NonNull::new(
self.free_list[class]
.pop()
.expect("current block should have free space now")
as *mut u8,
);
if let Some(result) = result {
self.user += layout.size();
self.allocated += size;
return Ok(result);
} else {
return Err(());
}
}
}
Err(())
}
pub unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
let size = max(
layout.size().next_power_of_two(),
max(layout.align(), size_of::<usize>()),
);
let class = size.trailing_zeros() as usize;
unsafe {
self.free_list[class].push(ptr.as_ptr() as *mut usize);
}
let mut current_ptr = ptr.as_ptr() as usize;
let mut current_class = class;
while current_class < self.free_list.len() - 1 {
let buddy = current_ptr ^ (1 << current_class);
let mut flag = false;
for block in self.free_list[current_class].iter_mut() {
if block.value() as usize == buddy {
block.pop();
flag = true;
break;
}
}
if flag {
self.free_list[current_class].pop();
current_ptr = min(current_ptr, buddy);
current_class += 1;
unsafe {
self.free_list[current_class].push(current_ptr as *mut usize);
}
} else {
break;
}
}
self.user -= layout.size();
self.allocated -= size;
}
pub fn stats_alloc_user(&self) -> usize {
self.user
}
pub fn stats_alloc_actual(&self) -> usize {
self.allocated
}
pub fn stats_total_bytes(&self) -> usize {
self.total
}
}
impl<const ORDER: usize> Default for Heap<ORDER> {
fn default() -> Self {
Self::new()
}
}
impl<const ORDER: usize> fmt::Debug for Heap<ORDER> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.debug_struct("Heap")
.field("user", &self.user)
.field("allocated", &self.allocated)
.field("total", &self.total)
.finish()
}
}
#[cfg(feature = "use_spin")]
#[derive(Default)]
pub struct LockedHeap<const ORDER: usize>(Mutex<Heap<ORDER>>);
#[cfg(feature = "use_spin")]
impl<const ORDER: usize> LockedHeap<ORDER> {
pub const fn new() -> Self {
LockedHeap(Mutex::new(Heap::<ORDER>::new()))
}
pub const fn empty() -> Self {
LockedHeap(Mutex::new(Heap::<ORDER>::new()))
}
}
#[cfg(feature = "use_spin")]
impl<const ORDER: usize> Deref for LockedHeap<ORDER> {
type Target = Mutex<Heap<ORDER>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[cfg(feature = "use_spin")]
unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeap<ORDER> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
self.0
.lock()
.alloc(layout)
.ok()
.map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
unsafe { self.0.lock().dealloc(NonNull::new_unchecked(ptr), layout) }
}
}
#[cfg(feature = "use_spin")]
pub struct LockedHeapWithRescue<const ORDER: usize> {
inner: Mutex<Heap<ORDER>>,
rescue: fn(&mut Heap<ORDER>, &Layout),
}
#[cfg(feature = "use_spin")]
impl<const ORDER: usize> LockedHeapWithRescue<ORDER> {
pub const fn new(rescue: fn(&mut Heap<ORDER>, &Layout)) -> Self {
LockedHeapWithRescue {
inner: Mutex::new(Heap::<ORDER>::new()),
rescue,
}
}
}
#[cfg(feature = "use_spin")]
impl<const ORDER: usize> Deref for LockedHeapWithRescue<ORDER> {
type Target = Mutex<Heap<ORDER>>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
#[cfg(feature = "use_spin")]
unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeapWithRescue<ORDER> {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let mut inner = self.inner.lock();
match inner.alloc(layout) {
Ok(allocation) => allocation.as_ptr(),
Err(_) => {
(self.rescue)(&mut inner, &layout);
inner
.alloc(layout)
.ok()
.map_or(core::ptr::null_mut(), |allocation| allocation.as_ptr())
}
}
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
unsafe {
self.inner
.lock()
.dealloc(NonNull::new_unchecked(ptr), layout)
}
}
}
pub(crate) fn prev_power_of_two(num: usize) -> usize {
1 << (usize::BITS as usize - num.leading_zeros() as usize - 1)
}