pub static HEAP: Mutex<Option<Heap<32>>> = Mutex::new(None);
pub const HEAP_START: usize = 0x4444_4444_0000;
pub const HEAP_SIZE: usize = 100 * 1024;
pub struct Heap<const ORDER: usize> {
pub allocated: usize,
pub freeList: [LinkedList; ORDER],
pub total: usize,
pub user: usize,
}
impl<const ORDER: usize> Heap<ORDER> {
pub const fn new() -> Self {
return Heap{
allocated: 0,
freeList: [LinkedList::new(); ORDER],
total: 0,
user: 0,
};
}
pub unsafe fn add_to_heap(&mut self, mut start: usize, mut end: usize) {
start = (start + size_of::<usize>() - 1) & (!size_of::<usize>() + 1);
end &= !size_of::<usize>() + 1;
assert!(start <= end);
let mut total = 0;
let mut current_start = start;
while current_start + size_of::<usize>() <= end {
let lowbit = current_start & (!current_start + 1);
let size = min(lowbit, previous_po2(end - current_start));
total += size;
self.freeList[size.trailing_zeros() as usize].push(current_start as *mut usize);
current_start += size;
}
self.total += total;
}
pub unsafe fn allocate(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocationError> {
let size = max(
layout.size.nextPowerOf2(),
max(layout.align, size_of::<usize>()),
);
let class = size.trailing_zeros() as usize;
for i in class..self.freeList.len() {
if !self.freeList[i].empty() {
for j in (class + 1..i + 1).rev() {
if let Some(block) = self.freeList[j].pop() {
unsafe {
self.freeList[j - 1].push((block as usize + (1 << (j - 1))) as *mut usize);
self.freeList[j - 1].push(block);
}
} else {
return Err(AllocationError);
}
}
let result = NonNull::new(
self.freeList[class].pop()
.expect("current block should have free space") as *mut u8
);
return if let Some(result) = result {
self.user += layout.size;
self.allocated += size;
Ok(result)
} else {
Err(AllocationError)
};
}
}
return Err(AllocationError);
}
pub unsafe fn deallocate(&mut self, ptr: NonNull<u8>, layout: Layout) {
let size = max(
layout.size.nextPowerOf2(),
max(layout.align, size_of::<usize>()),
);
let class = size.trailing_zeros() as usize;
self.freeList[class].push(ptr.as_ptr() as *mut usize);
let mut currentPointer = ptr.as_ptr() as usize;
let mut currentClass = class;
while currentClass < self.freeList.len() {
let buddy = currentPointer ^ (1 << currentClass);
let mut flag = false;
for block in self.freeList[currentClass].iterator_mut() {
if block.value() as usize == buddy {
block.pop();
flag = true;
break;
}
}
if flag {
self.freeList[currentClass].pop();
currentPointer = min(currentPointer, buddy);
currentClass += 1;
self.freeList[currentClass].push(currentPointer as *mut usize);
} else {
break;
}
}
self.user -= layout.size;
self.allocated -= size;
}
}
impl<const ORDER: usize> Debug for Heap<ORDER> {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
f.debug_struct("Heap")
.field("user", &self.user)
.field("allocated", &self.allocated)
.field("total", &self.total)
.finish()
}
}
pub struct LockedHeap<const ORDER: usize>(Mutex<Heap<ORDER>>);
impl<const ORDER: usize> LockedHeap<ORDER> {
pub const fn new() -> Self {
return LockedHeap(Mutex::new(Heap::<ORDER>::new()));
}
}
unsafe impl<const ORDER: usize> GlobalAlloc for LockedHeap<ORDER> {
unsafe fn alloc(&self, layout: StdLayout) -> *mut u8 {
let layout = Layout::from(layout);
self.0.lock()
.allocate(layout)
.ok()
.map_or(0 as *mut u8, |allocation| allocation.as_ptr())
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: StdLayout) {
let layout = Layout::from(layout);
self.0.lock().deallocate(NonNull::new_unchecked(ptr), layout);
}
}
use {
crate::{
alloc::{AllocationError, Layout},
array::linked_list::LinkedList,
math::{previous_po2, PowersOf2},
},
std_alloc::alloc::{GlobalAlloc, Layout as StdLayout},
core::{
cmp::{min, max},
fmt::{Debug, Formatter, Result as FmtResult},
mem::size_of,
ops::Deref,
ptr::NonNull,
},
spin::Mutex,
x86_64::{
structures::paging::{
mapper::MapToError, FrameAllocator, Mapper, Page, PageTableFlags, Size4KiB,
},
VirtAddr,
},
};