use crate::alloc::{AllocError, Allocator, Global};
use std::{
alloc::Layout,
cell::RefCell,
marker::PhantomData,
mem,
ptr::{NonNull, null, slice_from_raw_parts_mut},
};
thread_local! {
static TA: RefCell<BumpAllocator> = RefCell::new(BumpAllocator::new());
static LA: RefCell<ChainAllocator> = RefCell::new(ChainAllocator::new());
}
const NOT_MIRI: bool = !cfg!(miri);
const K: usize = 1024;
const MAX_ALIGN: usize = 128;
const BSIZE: usize = 1024 * K; const MAX_SIZE: usize = BSIZE / 16; const MAX_SC: usize = 14;
const MIN_SIZE: usize = mem::size_of::<FreeMem>();
const L2_MIN_SIZE: usize = MIN_SIZE.ilog2() as usize;
#[derive(Default, Clone)]
pub struct Temp {
pd: PhantomData<NonNull<()>>, }
impl Temp {
pub const fn new() -> Self {
Self { pd: PhantomData }
}
}
unsafe impl Allocator for Temp {
fn allocate(&self, lay: Layout) -> Result<NonNull<[u8]>, AllocError> {
TA.with_borrow_mut(|a| a.allocate(lay))
}
unsafe fn deallocate(&self, p: NonNull<u8>, lay: Layout) {
TA.with_borrow_mut(|a| a.deallocate(p, lay));
}
}
#[derive(Default, Clone)]
pub struct Local {
pd: PhantomData<NonNull<()>>, }
impl Local {
pub const fn new() -> Self {
Self { pd: PhantomData }
}
}
unsafe impl Allocator for Local {
fn allocate(&self, lay: Layout) -> Result<NonNull<[u8]>, AllocError> {
LA.with_borrow_mut(|a| a.allocate(lay))
}
unsafe fn deallocate(&self, p: NonNull<u8>, lay: Layout) {
LA.with_borrow_mut(|a| a.deallocate(p, lay));
}
}
struct Block(NonNull<u8>);
impl Block {
fn new() -> Self {
let lay = Layout::from_size_align(BSIZE, MAX_ALIGN).unwrap();
let p = Global::allocate(&Global, lay).unwrap();
let p = p.as_ptr().cast::<u8>();
let nn = unsafe { NonNull::new_unchecked(p) };
Self(nn)
}
fn alloc(&self, i: usize, n: usize) -> NonNull<[u8]> {
let p = unsafe { self.0.as_ptr().add(i) };
let p = slice_from_raw_parts_mut(p, n);
unsafe { NonNull::new_unchecked(p) }
}
fn drop(&mut self) {
if self.0 != NonNull::dangling() {
let lay = Layout::from_size_align(BSIZE, MAX_ALIGN).unwrap();
unsafe { Global::deallocate(&Global, self.0, lay) }
self.0 = NonNull::dangling();
}
}
}
struct BumpAllocator {
alloc_count: u64, idx: usize, cur: Block, overflow: std::vec::Vec<Block>, _alloc_bytes: usize, _max_alloc: usize,
_reset_count: usize,
_total_count: usize,
_total_alloc: usize,
}
impl BumpAllocator {
fn new() -> Self {
Self {
alloc_count: 0,
idx: 0,
cur: Block::new(),
overflow: Vec::new(),
_alloc_bytes: 0,
_max_alloc: 0,
_reset_count: 0,
_total_count: 0,
_total_alloc: 0,
}
}
fn allocate(&mut self, lay: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_count += 1;
let (n, m) = (lay.size(), lay.align());
if NOT_MIRI && n < MAX_SIZE {
#[cfg(feature = "log-bump")]
{
self._alloc_bytes += n;
self._total_count += 1;
self._total_alloc += n;
}
let mut i = self.idx.checked_next_multiple_of(m).unwrap();
let e = i + n;
if e >= BSIZE && (e > BSIZE || n == 0) {
let old = mem::replace(&mut self.cur, Block::new());
self.overflow.push(old);
i = 0;
}
self.idx = i + n;
Ok(self.cur.alloc(i, n))
} else {
Global::allocate(&Global, lay)
}
}
fn deallocate(&mut self, p: NonNull<u8>, lay: Layout) {
self.alloc_count -= 1;
if NOT_MIRI && lay.size() < MAX_SIZE {
if self.alloc_count == 0 {
#[cfg(feature = "log-bump")]
{
self._reset_count += 1;
self._max_alloc = std::cmp::max(self._max_alloc, self._alloc_bytes);
self._alloc_bytes = 0;
}
self.idx = 0;
self.reset_overflow();
}
} else {
unsafe {
Global::deallocate(&Global, p, lay);
}
}
}
fn reset_overflow(&mut self) {
while let Some(mut b) = self.overflow.pop() {
b.drop();
}
}
}
impl Drop for BumpAllocator {
fn drop(&mut self) {
#[cfg(feature = "log-bump")]
println!(
"Bump Allocator Dropped total_count={} total_alloc={} max_alloc={} reset_count={}",
self._total_count, self._total_alloc, self._max_alloc, self._reset_count
);
if self.alloc_count != 0 {
println!(
"BumpAllocator has {} outstanding allocations, aborting",
self.alloc_count,
);
std::process::abort();
}
self.cur.drop();
self.reset_overflow();
}
}
struct FreeMem {
next: *const FreeMem, }
struct ChainAllocator {
free: [*const FreeMem; MAX_SC], alloc_count: u64, idx: usize, cur: Block, overflow: std::vec::Vec<Block>, _alloc_bytes: usize, _max_alloc: usize,
_reset_count: usize,
_total_count: usize,
_total_alloc: usize,
}
impl ChainAllocator {
fn new() -> Self {
Self {
free: [null(); MAX_SC],
alloc_count: 0,
idx: 0,
cur: Block::new(),
overflow: Vec::new(),
_alloc_bytes: 0,
_max_alloc: 0,
_reset_count: 0,
_total_count: 0,
_total_alloc: 0,
}
}
const fn sc(mut n: usize) -> (usize, usize) {
if n < MIN_SIZE { n = MIN_SIZE; }
let sc = (((n - 1).ilog2() + 1) as usize) - L2_MIN_SIZE;
let xn = 2 << (sc + L2_MIN_SIZE);
(sc,xn)
}
fn allocate(&mut self, lay: Layout) -> Result<NonNull<[u8]>, AllocError> {
self.alloc_count += 1;
let n = lay.size();
if NOT_MIRI && n <= MAX_SIZE {
#[cfg(feature = "log-bump")]
{
self._alloc_bytes += n;
self._total_count += 1;
self._total_alloc += n;
};
let (sc,xn) = Self::sc(n);
let p = self.free[sc];
if !p.is_null() {
let next = unsafe { (*p).next };
self.free[sc] = next;
let p = p as *mut u8;
let p = slice_from_raw_parts_mut(p, n);
Ok(unsafe { NonNull::new_unchecked(p) })
} else {
let m = lay.align();
let mut i = self.idx.checked_next_multiple_of(m).unwrap();
let e = i + xn;
if e > BSIZE {
let old = mem::replace(&mut self.cur, Block::new());
self.overflow.push(old);
i = 0;
}
self.idx = i + xn;
Ok(self.cur.alloc(i, n))
}
} else {
Global::allocate(&Global, lay)
}
}
fn deallocate(&mut self, p: NonNull<u8>, lay: Layout) {
self.alloc_count -= 1;
if NOT_MIRI && lay.size() <= MAX_SIZE {
if self.alloc_count == 0 {
#[cfg(feature = "log-bump")]
{
self._reset_count += 1;
self._max_alloc = std::cmp::max(self._max_alloc, self._alloc_bytes);
self._alloc_bytes = 0;
}
self.idx = 0;
self.reset_overflow();
self.free = [null(); MAX_SC];
} else {
let sc = Self::sc(lay.size()).0;
let p = p.as_ptr() as *mut FreeMem;
unsafe {
(*p).next = self.free[sc];
}
self.free[sc] = p;
}
} else {
unsafe {
Global::deallocate(&Global, p, lay);
}
}
}
fn reset_overflow(&mut self) {
while let Some(mut b) = self.overflow.pop() {
b.drop();
}
}
}
impl Drop for ChainAllocator {
fn drop(&mut self) {
#[cfg(feature = "log-bump")]
println!(
"Chain Allocator Dropped total_count={} total_alloc={} max_alloc={} reset_count={}",
self._total_count, self._total_alloc, self._max_alloc, self._reset_count
);
if self.alloc_count != 0 {
println!(
"ChainAllocator has {} outstanding allocations, aborting",
self.alloc_count,
);
std::process::abort();
}
self.cur.drop();
self.reset_overflow();
}
}
#[test]
fn test_alloc() {
let x = crate::Box::new_in(99, Local::new());
assert!(*x == 99);
{
let x = crate::Box::new_in(99, Local::new());
assert!(*x == 99);
}
let x = crate::Box::new_in(99, Local::new());
assert!(*x == 99);
}