use crate::*;
#[cfg(feature = "unstable")]
const fn cmin(a: usize, b: usize) -> usize {
[a, b][(a > b) as usize]
}
#[cfg(not(feature = "unstable"))]
fn cmin(a: usize, b: usize) -> usize {
core::cmp::min(a, b)
}
pub struct SCAllocator<'a, P: AllocablePage> {
pub(crate) size: usize,
pub(crate) allocation_count: usize,
pub(crate) obj_per_page: usize,
pub(crate) empty_slabs: PageList<'a, P>,
pub(crate) slabs: PageList<'a, P>,
pub(crate) full_slabs: PageList<'a, P>,
}
macro_rules! new_sc_allocator {
($size:expr) => {
SCAllocator {
size: $size,
allocation_count: 0,
obj_per_page: cmin((P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD) / $size, 8 * 64),
empty_slabs: PageList::new(),
slabs: PageList::new(),
full_slabs: PageList::new(),
}
};
}
impl<'a, P: AllocablePage> SCAllocator<'a, P> {
const REBALANCE_COUNT: usize = 64;
#[cfg(feature = "unstable")]
pub const fn new(size: usize) -> SCAllocator<'a, P> {
new_sc_allocator!(size)
}
#[cfg(not(feature = "unstable"))]
pub fn new(size: usize) -> SCAllocator<'a, P> {
new_sc_allocator!(size)
}
pub fn size(&self) -> usize {
self.size
}
fn insert_partial_slab(&mut self, new_head: &'a mut P) {
self.slabs.insert_front(new_head);
}
fn insert_empty(&mut self, new_head: &'a mut P) {
assert_eq!(
new_head as *const P as usize % P::SIZE,
0,
"Inserted page is not aligned to page-size."
);
self.empty_slabs.insert_front(new_head);
}
pub(crate) fn check_page_assignments(&mut self) {
for slab_page in self.full_slabs.iter_mut() {
if !slab_page.is_full() {
trace!("move {:p} full -> partial", slab_page);
self.move_full_to_partial(slab_page);
}
}
for slab_page in self.slabs.iter_mut() {
if slab_page.is_empty(self.obj_per_page) {
trace!("move {:p} partial -> empty", slab_page);
self.move_to_empty(slab_page);
}
}
}
fn move_to_empty(&mut self, page: &'a mut P) {
let page_ptr = page as *const P;
debug_assert!(self.slabs.contains(page_ptr));
debug_assert!(
!self.empty_slabs.contains(page_ptr),
"Page {:p} already in emtpy_slabs",
page_ptr
);
self.slabs.remove_from_list(page);
self.empty_slabs.insert_front(page);
debug_assert!(!self.slabs.contains(page_ptr));
debug_assert!(self.empty_slabs.contains(page_ptr));
}
fn move_partial_to_full(&mut self, page: &'a mut P) {
let page_ptr = page as *const P;
debug_assert!(self.slabs.contains(page_ptr));
debug_assert!(!self.full_slabs.contains(page_ptr));
self.slabs.remove_from_list(page);
self.full_slabs.insert_front(page);
debug_assert!(!self.slabs.contains(page_ptr));
debug_assert!(self.full_slabs.contains(page_ptr));
}
fn move_full_to_partial(&mut self, page: &'a mut P) {
let page_ptr = page as *const P;
debug_assert!(!self.slabs.contains(page_ptr));
debug_assert!(self.full_slabs.contains(page_ptr));
self.full_slabs.remove_from_list(page);
self.slabs.insert_front(page);
debug_assert!(self.slabs.contains(page_ptr));
debug_assert!(!self.full_slabs.contains(page_ptr));
}
fn try_allocate_from_pagelist(&mut self, sc_layout: Layout) -> *mut u8 {
for slab_page in self.slabs.iter_mut() {
let ptr = slab_page.allocate(sc_layout);
if !ptr.is_null() {
if slab_page.is_full() {
trace!("move {:p} partial -> full", slab_page);
self.move_partial_to_full(slab_page);
}
self.allocation_count += 1;
return ptr;
} else {
continue;
}
}
if self.allocation_count > SCAllocator::<P>::REBALANCE_COUNT {
self.check_page_assignments();
self.allocation_count = 0;
}
ptr::null_mut()
}
pub fn try_reclaim_pages<F>(&mut self, to_reclaim: usize, dealloc: &mut F) -> usize
where
F: FnMut(*mut P),
{
self.check_page_assignments();
let mut reclaimed = 0;
while reclaimed < to_reclaim {
if let Some(page) = self.empty_slabs.pop() {
dealloc(page as *mut P);
reclaimed += 1;
} else {
break;
}
}
reclaimed
}
pub unsafe fn refill(&mut self, page: &'a mut P) {
page.bitfield_mut()
.initialize(self.size, P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD);
*page.prev() = Rawlink::none();
*page.next() = Rawlink::none();
trace!("adding page to SCAllocator {:p}", page);
self.insert_empty(page);
}
pub fn allocate(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocationError> {
trace!(
"SCAllocator({}) is trying to allocate {:?}",
self.size,
layout
);
assert!(layout.size() <= self.size);
assert!(self.size <= (P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD));
let new_layout = unsafe { Layout::from_size_align_unchecked(self.size, layout.align()) };
assert!(new_layout.size() >= layout.size());
let ptr = {
let ptr = self.try_allocate_from_pagelist(new_layout);
if ptr.is_null() && self.empty_slabs.head.is_some() {
let empty_page = self.empty_slabs.pop().expect("We checked head.is_some()");
debug_assert!(!self.empty_slabs.contains(empty_page));
let ptr = empty_page.allocate(layout);
debug_assert!(!ptr.is_null(), "Allocation must have succeeded here.");
trace!(
"move {:p} empty -> partial empty count {}",
empty_page,
self.empty_slabs.elements
);
self.insert_partial_slab(empty_page);
ptr
} else {
ptr
}
};
let res = NonNull::new(ptr).ok_or(AllocationError::OutOfMemory);
if !ptr.is_null() {
trace!(
"SCAllocator({}) allocated ptr=0x{:x}",
self.size,
ptr as usize
);
}
res
}
pub fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) -> Result<(), AllocationError> {
assert!(layout.size() <= self.size);
assert!(self.size <= (P::SIZE - OBJECT_PAGE_METADATA_OVERHEAD));
trace!(
"SCAllocator({}) is trying to deallocate ptr = {:p} layout={:?} P.size= {}",
self.size,
ptr,
layout,
P::SIZE
);
let page = (ptr.as_ptr() as usize) & !(P::SIZE - 1) as usize;
let slab_page = unsafe { mem::transmute::<VAddr, &'a mut P>(page) };
let new_layout = unsafe { Layout::from_size_align_unchecked(self.size, layout.align()) };
let ret = slab_page.deallocate(ptr, new_layout);
debug_assert!(ret.is_ok(), "Slab page deallocate won't fail at the moment");
ret
}
}