pub static HEAP: Mutex<Option<Heap<'static>>> = Mutex::new(None);
pub const HEAP_START: usize = 0x4444_4444_0000;
pub const HEAP_SIZE: usize = 100 * 1024;
const MIN_HEAP_ALIGN: usize = 4096;
pub unsafe fn build_heap(
mapper: &mut impl Mapper<Size4KiB>,
allocator: &mut impl FrameAllocator<Size4KiB>,
) -> Result<(), MapToError<Size4KiB>> {
let page_range = {
let heapStart = VirtAddr::new(HEAP_START as u64);
let heapEnd = heapStart + HEAP_SIZE - 1u64;
let heapStartPage = Page::containing_address(heapStart);
let heapEndPage = Page::containing_address(heapEnd);
Page::range_inclusive(heapStartPage, heapEndPage)
};
for page in page_range {
let frame = allocator.allocate_frame()
.ok_or(MapToError::FrameAllocationFailed)?;
let flags = PageTableFlags::PRESENT | PageTableFlags::WRITABLE;
unsafe{ mapper.map_to(page, frame, flags, allocator)?.flush() };
}
return Ok(());
}
pub struct FreeBlock {
next: *mut FreeBlock,
}
impl FreeBlock {
pub fn new(next: *mut FreeBlock) -> Self {
FreeBlock { next }
}
}
pub struct Heap<'a> {
heapBase: *mut u8,
heapSize: usize,
freeLists: &'a mut [*mut FreeBlock],
minBlockSize: usize,
minBlockSizeLog2: u8,
}
unsafe impl<'a> Send for Heap<'a> {}
impl<'a> Heap<'a> {
pub unsafe fn new(heapBase: *mut u8, heapSize: usize, freeLists: &mut [*mut FreeBlock]) -> Heap {
assert_ne!(heapBase, ptr::null_mut());
assert!(freeLists.len() > 0);
let minBlockSize = heapSize >> (freeLists.len() - 1);
assert_eq!(heapBase as usize & (MIN_HEAP_ALIGN - 1), 0);
assert!(heapSize >= minBlockSize);
assert!(minBlockSize >= size_of::<FreeBlock>());
assert!(heapSize.powerOf2());
assert_eq!(
minBlockSize * (2u32.pow(freeLists.len() as u32 - 1)) as usize,
heapSize
);
for pointer in freeLists.iter_mut() {
*pointer = ptr::null_mut();
}
let mut result = Heap {
heapBase,
heapSize,
freeLists,
minBlockSize,
minBlockSizeLog2: minBlockSize.log2(),
};
let order = result
.allocationOrder(heapSize, 1)
.expect("Failed to calculate order for root heap block");
result.freeListInsert(order, heapBase);
return result;
}
pub fn allocationSize(&self, mut size: usize, align: usize) -> Option<usize> {
if !align.powerOf2() {
return None;
}
if align > MIN_HEAP_ALIGN {
return None;
}
if align > size {
size = align;
}
size = max(size, self.minBlockSize);
size = size.nextPowerOf2();
if size > self.heapSize {
return None;
}
Some(size)
}
pub fn allocationOrder(&self, size: usize, align: usize) -> Option<usize> {
self
.allocationSize(size, align)
.map(|s| (s.log2() - self.minBlockSizeLog2) as usize)
}
fn orderSize(&self, order: usize) -> usize {
1 << (self.minBlockSizeLog2 as usize + order)
}
unsafe fn freeListPop(&mut self, order: usize) -> Option<*mut u8> {
let candidate = self.freeLists[order];
return if candidate != ptr::null_mut() {
self.freeLists[order] = (*candidate).next;
Some(candidate as *mut u8)
} else {
None
};
}
unsafe fn freeListInsert(&mut self, order: usize, block: *mut u8) {
let freePointer = block as *mut FreeBlock;
*freePointer = FreeBlock::new(self.freeLists[order]);
self.freeLists[order] = freePointer;
}
unsafe fn freeListRemove(&mut self, order: usize, block: *mut u8) -> bool {
let blockPointer = block as *mut FreeBlock;
let mut checking: *mut *mut FreeBlock = &mut self.freeLists[order];
while *checking != ptr::null_mut() {
if *checking == blockPointer {
*checking = (*(*checking)).next;
return true;
}
checking = &mut ((*(*checking)).next);
}
return false;
}
unsafe fn splitFreeBlock(&mut self, block: *mut u8, mut order: usize, order_needed: usize) {
let mut size_to_split = self.orderSize(order);
while order > order_needed {
size_to_split >>= 1;
order -= 1;
let split = block.offset(size_to_split as isize);
self.freeListInsert(order, split);
}
}
pub unsafe fn allocate(&mut self, size: usize, align: usize) -> *mut u8 {
if let Some(order_needed) = self.allocationOrder(size, align) {
for order in order_needed..self.freeLists.len() {
if let Some(block) = self.freeListPop(order) {
if order > order_needed {
self.splitFreeBlock(block, order, order_needed);
}
return block;
}
}
ptr::null_mut()
} else {
ptr::null_mut()
}
}
pub unsafe fn buddy(&self, order: usize, block: *mut u8) -> Option<*mut u8> {
let relative = (block as usize) - (self.heapBase as usize);
let size = self.orderSize(order);
return if size >= self.heapSize {
None
} else {
Some(self.heapBase.offset((relative ^ size) as isize))
};
}
pub unsafe fn deallocate(&mut self, pointer: *mut u8, oldSize: usize, align: usize) {
let initial_order = self
.allocationOrder(oldSize, align)
.expect("Tried to dispose of invalid block");
let mut block = pointer;
for order in initial_order..self.freeLists.len() {
if let Some(buddy) = self.buddy(order, block) {
if self.freeListRemove(order, buddy) {
block = min(block, buddy);
continue;
}
}
self.freeListInsert(order, block);
return;
}
}
}
use {
super::AllocationError,
crate::math::PowersOf2,
core::{
cmp::{max, min},
mem::size_of,
ptr,
},
spin::Mutex,
x86_64::{
structures::paging::{
mapper::MapToError, FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB,
},
VirtAddr,
},
};