extern crate alloc;
use crate::spinlock::Spinlock;
use crate::task::get_cur_task;
use alloc::boxed::Box;
use alloc::vec::Vec;
use core::alloc::{GlobalAlloc, Layout};
use core::mem::size_of;
use core::ptr::null_mut;
use fe_osi::allocator::LayoutFFI;
const HEADER_ALIGN: usize = (2 * size_of::<usize>()) - 1;
const SIZE_MASK: usize = !HEADER_ALIGN;
const ALLOC_MASK: usize = 0x4;
const FIRST_MASK: usize = 0x2;
const LAST_MASK: usize = 0x1;
static ALLOC_LOCK: Spinlock = Spinlock::new();
static mut HEAP: *mut u8 = null_mut();
static mut HEAP_SIZE: usize = 0;
static mut HEAP_REMAINING: usize = 0;
struct AllocData {
ptr: *mut u8,
pid: usize,
}
static mut ALLOC_LIST: Vec<AllocData> = Vec::new();
pub struct KernelAllocator;
unsafe impl GlobalAlloc for KernelAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
let layout_ffi = LayoutFFI {
size: layout.size(),
align: layout.align(),
};
alloc(layout_ffi)
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
let layout_ffi = LayoutFFI {
size: layout.size(),
align: layout.align(),
};
dealloc(ptr, layout_ffi);
}
}
pub(crate) unsafe fn alloc(layout: LayoutFFI) -> *mut u8 {
static mut ADDING_TO_LIST: bool = false;
if (HEAP as usize) == 0 {
init_heap();
}
let mut header_ptr = HEAP as usize;
let mut data_ptr = null_mut();
let heap_max = header_ptr + HEAP_SIZE;
let size_needed = {
let mut intermediate = size_of::<usize>();
intermediate = (intermediate + layout.align - 1) & !(layout.align - 1);
intermediate += layout.size + size_of::<usize>();
intermediate = (intermediate + HEADER_ALIGN) & !HEADER_ALIGN;
intermediate
};
ALLOC_LOCK.take();
while header_ptr < heap_max {
let header_data = *(header_ptr as *const usize);
if (header_data & ALLOC_MASK) == 0 && (header_data & SIZE_MASK) >= size_needed {
data_ptr = alloc_block(header_ptr, size_needed, layout);
break;
}
header_ptr += header_data & SIZE_MASK;
}
if !data_ptr.is_null() && !ADDING_TO_LIST {
ADDING_TO_LIST = true;
ALLOC_LIST.push(AllocData {
ptr: data_ptr,
pid: get_cur_task().pid,
});
ADDING_TO_LIST = false;
}
ALLOC_LOCK.give();
data_ptr
}
pub(crate) unsafe fn dealloc(ptr: *mut u8, layout: LayoutFFI) {
let header = if layout.align > size_of::<usize>() {
*(((ptr as usize) - size_of::<usize>()) as *mut usize)
} else {
(ptr as usize) - size_of::<usize>()
};
let header_data = *(header as *const usize);
let old_size = header_data & SIZE_MASK;
let is_first = (header_data & FIRST_MASK) == FIRST_MASK;
let is_last = (header_data & LAST_MASK) == LAST_MASK;
ALLOC_LOCK.take();
if ptr != ALLOC_LIST.as_mut_ptr() as *mut u8 {
let mut index: usize = usize::MAX;
for (i, entry) in ALLOC_LIST.iter().enumerate() {
if entry.ptr == ptr {
index = i;
break;
}
}
if index != usize::MAX {
ALLOC_LIST.swap_remove(index);
}
}
let next_header = header + old_size;
let next_hdr_data = if is_last {
0
} else {
*(next_header as *const usize)
};
let prev_footer = header - size_of::<usize>();
let prev_ftr_data = if is_first {
0
} else {
*(prev_footer as *const usize)
};
let left_coalesce = !is_first && (prev_ftr_data & ALLOC_MASK) == 0;
let right_coalesce = !is_last && (next_hdr_data & ALLOC_MASK) == 0;
let new_header = if left_coalesce {
header - (prev_ftr_data & SIZE_MASK)
} else {
header
};
let new_footer = if right_coalesce {
next_header + (next_hdr_data & SIZE_MASK) - size_of::<usize>()
} else {
next_header - size_of::<usize>()
};
let new_size = new_footer - new_header + size_of::<usize>();
*(new_header as *mut usize) = new_size & SIZE_MASK;
if left_coalesce {
*(new_header as *mut usize) |= prev_ftr_data & FIRST_MASK;
} else {
*(new_header as *mut usize) |= header_data & FIRST_MASK;
}
if right_coalesce {
*(new_header as *mut usize) |= next_hdr_data & LAST_MASK;
} else {
*(new_header as *mut usize) |= header_data & LAST_MASK;
}
*(new_footer as *mut usize) = *(new_header as *mut usize);
HEAP_REMAINING += old_size;
ALLOC_LOCK.give();
}
unsafe fn init_heap() {
extern "C" {
static mut _sheap: u8;
static mut _eheap: u8;
}
HEAP = &mut _sheap as *mut u8;
HEAP_SIZE = (&mut _eheap as *mut u8 as usize) - (HEAP as usize);
HEAP_REMAINING = HEAP_SIZE;
ALLOC_LOCK.take();
let heap_ptr = HEAP as usize;
let header = heap_ptr as *mut usize;
let footer = (heap_ptr + (HEAP_SIZE & SIZE_MASK) - size_of::<usize>()) as *mut usize;
*header = (HEAP_SIZE & SIZE_MASK) | FIRST_MASK | LAST_MASK;
*footer = (HEAP_SIZE & SIZE_MASK) | FIRST_MASK | LAST_MASK;
ALLOC_LOCK.give();
}
unsafe fn alloc_block(header_ptr: usize, size: usize, layout: LayoutFFI) -> *mut u8 {
let alloc_header = header_ptr as *mut usize;
let alloc_footer = (header_ptr + size - size_of::<usize>()) as *mut usize;
let old_size = *alloc_header & SIZE_MASK;
let unalloc_header = (header_ptr + size) as *mut usize;
let unalloc_footer = (header_ptr + old_size - size_of::<usize>()) as *mut usize;
let is_first = *alloc_header & FIRST_MASK;
let is_last = *alloc_header & LAST_MASK;
*alloc_header = size | ALLOC_MASK | is_first;
*alloc_footer = *alloc_header;
if old_size > size {
*unalloc_header = ((old_size - size) & SIZE_MASK) | is_last;
*unalloc_footer = *unalloc_header;
}
let data_ptr = (header_ptr + size_of::<usize>() + layout.align - 1) & !(layout.align - 1);
if layout.align > size_of::<usize>() {
let ptr = (data_ptr - size_of::<usize>()) as *mut usize;
*ptr = alloc_header as usize;
}
HEAP_REMAINING -= size;
data_ptr as *mut u8
}
pub(crate) unsafe fn clear_deleted_task(pid: usize) {
let mut i = 0;
ALLOC_LOCK.take();
while i < ALLOC_LIST.len() {
if ALLOC_LIST[i].pid == pid {
Box::from_raw(ALLOC_LIST[i].ptr);
} else {
i += 1;
}
}
ALLOC_LOCK.give();
}
pub fn get_heap_remaining() -> usize {
unsafe { HEAP_REMAINING }
}