#[cfg(not(feature = "std"))]
use core::{
alloc::{GlobalAlloc, Layout},
cell::UnsafeCell,
ptr::{self, NonNull},
sync::atomic::{AtomicUsize, Ordering},
};
#[cfg(feature = "std")]
use std::{
alloc::{GlobalAlloc, Layout},
cell::UnsafeCell,
ptr::{self, NonNull},
sync::atomic::{AtomicUsize, Ordering},
};
pub struct FixedPool<const SIZE: usize, const ALIGN: usize> {
buffer: UnsafeCell<[u8; SIZE]>,
free_list: AtomicUsize,
block_size: usize,
num_blocks: usize,
}
impl<const SIZE: usize, const ALIGN: usize> FixedPool<SIZE, ALIGN> {
#[allow(clippy::manual_div_ceil)] pub const fn new(block_size: usize) -> Self {
let aligned_block_size = ((block_size + ALIGN - 1) / ALIGN) * ALIGN;
let num_blocks = SIZE / aligned_block_size;
Self {
buffer: UnsafeCell::new([0u8; SIZE]),
free_list: AtomicUsize::new(0),
block_size: aligned_block_size,
num_blocks,
}
}
pub fn alloc(&self) -> Option<NonNull<u8>> {
loop {
let free_idx = self.free_list.load(Ordering::Acquire);
if free_idx >= self.num_blocks {
return None; }
if self
.free_list
.compare_exchange(free_idx, free_idx + 1, Ordering::AcqRel, Ordering::Acquire)
.is_ok()
{
let offset = free_idx * self.block_size;
let ptr = unsafe {
let buf = &*self.buffer.get();
buf.as_ptr().add(offset) as *mut u8
};
return NonNull::new(ptr);
}
}
}
pub unsafe fn dealloc(&self, _ptr: NonNull<u8>) {
let current = self.free_list.load(Ordering::Acquire);
if current > 0 {
self.free_list.fetch_sub(1, Ordering::Release);
}
}
pub unsafe fn reset(&self) {
self.free_list.store(0, Ordering::Release);
}
pub fn available(&self) -> usize {
let used = self.free_list.load(Ordering::Acquire);
self.num_blocks.saturating_sub(used)
}
pub const fn capacity(&self) -> usize {
self.num_blocks
}
pub const fn block_size(&self) -> usize {
self.block_size
}
}
unsafe impl<const SIZE: usize, const ALIGN: usize> Send for FixedPool<SIZE, ALIGN> {}
unsafe impl<const SIZE: usize, const ALIGN: usize> Sync for FixedPool<SIZE, ALIGN> {}
pub struct BumpAllocator<const SIZE: usize> {
buffer: UnsafeCell<[u8; SIZE]>,
offset: AtomicUsize,
}
impl<const SIZE: usize> BumpAllocator<SIZE> {
pub const fn new() -> Self {
Self {
buffer: UnsafeCell::new([0u8; SIZE]),
offset: AtomicUsize::new(0),
}
}
pub fn alloc(&self, layout: Layout) -> Option<NonNull<u8>> {
let size = layout.size();
let align = layout.align();
loop {
let current_offset = self.offset.load(Ordering::Acquire);
let aligned_offset = (current_offset + align - 1) & !(align - 1);
let new_offset = aligned_offset + size;
if new_offset > SIZE {
return None; }
if self
.offset
.compare_exchange(
current_offset,
new_offset,
Ordering::AcqRel,
Ordering::Acquire,
)
.is_ok()
{
let ptr = unsafe {
let buf = &*self.buffer.get();
buf.as_ptr().add(aligned_offset) as *mut u8
};
return NonNull::new(ptr);
}
}
}
pub unsafe fn reset(&self) {
self.offset.store(0, Ordering::Release);
}
pub fn used(&self) -> usize {
self.offset.load(Ordering::Acquire)
}
pub fn available(&self) -> usize {
SIZE.saturating_sub(self.used())
}
pub const fn capacity(&self) -> usize {
SIZE
}
}
unsafe impl<const SIZE: usize> Send for BumpAllocator<SIZE> {}
unsafe impl<const SIZE: usize> Sync for BumpAllocator<SIZE> {}
impl<const SIZE: usize> Default for BumpAllocator<SIZE> {
fn default() -> Self {
Self::new()
}
}
pub struct StackAllocator<const SIZE: usize> {
buffer: UnsafeCell<[u8; SIZE]>,
offset: AtomicUsize,
}
impl<const SIZE: usize> StackAllocator<SIZE> {
pub const fn new() -> Self {
Self {
buffer: UnsafeCell::new([0u8; SIZE]),
offset: AtomicUsize::new(0),
}
}
pub fn push(&self, layout: Layout) -> Option<(NonNull<u8>, usize)> {
let size = layout.size();
let align = layout.align();
loop {
let current_offset = self.offset.load(Ordering::Acquire);
let aligned_offset = (current_offset + align - 1) & !(align - 1);
let new_offset = aligned_offset + size;
if new_offset > SIZE {
return None; }
if self
.offset
.compare_exchange(
current_offset,
new_offset,
Ordering::AcqRel,
Ordering::Acquire,
)
.is_ok()
{
let ptr = unsafe {
let buf = &*self.buffer.get();
buf.as_ptr().add(aligned_offset) as *mut u8
};
return NonNull::new(ptr).map(|p| (p, current_offset));
}
}
}
pub unsafe fn pop(&self, saved_offset: usize) {
self.offset.store(saved_offset, Ordering::Release);
}
pub fn depth(&self) -> usize {
self.offset.load(Ordering::Acquire)
}
pub fn available(&self) -> usize {
SIZE.saturating_sub(self.depth())
}
pub const fn capacity(&self) -> usize {
SIZE
}
}
unsafe impl<const SIZE: usize> Send for StackAllocator<SIZE> {}
unsafe impl<const SIZE: usize> Sync for StackAllocator<SIZE> {}
impl<const SIZE: usize> Default for StackAllocator<SIZE> {
fn default() -> Self {
Self::new()
}
}
pub struct EmbeddedAllocator<const SIZE: usize, const POOL_SIZE: usize> {
bump: BumpAllocator<SIZE>,
pool: FixedPool<POOL_SIZE, 8>,
}
impl<const SIZE: usize, const POOL_SIZE: usize> EmbeddedAllocator<SIZE, POOL_SIZE> {
pub const fn new(block_size: usize) -> Self {
Self {
bump: BumpAllocator::new(),
pool: FixedPool::new(block_size),
}
}
pub unsafe fn reset(&self) {
self.bump.reset();
self.pool.reset();
}
pub fn used(&self) -> usize {
self.bump.used() + (self.pool.capacity() - self.pool.available()) * self.pool.block_size()
}
pub const fn capacity(&self) -> usize {
SIZE + POOL_SIZE
}
}
unsafe impl<const SIZE: usize, const POOL_SIZE: usize> GlobalAlloc
for EmbeddedAllocator<SIZE, POOL_SIZE>
{
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
if layout.size() <= self.pool.block_size() {
if let Some(ptr) = self.pool.alloc() {
return ptr.as_ptr();
}
}
self.bump
.alloc(layout)
.map(|p| p.as_ptr())
.unwrap_or(ptr::null_mut())
}
unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) {
if let Some(nonnull) = NonNull::new(ptr) {
self.pool.dealloc(nonnull);
}
}
}
unsafe impl<const SIZE: usize, const POOL_SIZE: usize> Send for EmbeddedAllocator<SIZE, POOL_SIZE> {}
unsafe impl<const SIZE: usize, const POOL_SIZE: usize> Sync for EmbeddedAllocator<SIZE, POOL_SIZE> {}
pub struct StackGuard<'a, const SIZE: usize> {
allocator: &'a StackAllocator<SIZE>,
saved_offset: usize,
}
impl<'a, const SIZE: usize> Drop for StackGuard<'a, SIZE> {
fn drop(&mut self) {
unsafe {
self.allocator.pop(self.saved_offset);
}
}
}
impl<const SIZE: usize> StackAllocator<SIZE> {
pub fn scoped_alloc(&self, layout: Layout) -> Option<(NonNull<u8>, StackGuard<'_, SIZE>)> {
self.push(layout).map(|(ptr, saved_offset)| {
let guard = StackGuard {
allocator: self,
saved_offset,
};
(ptr, guard)
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_fixed_pool_basic() {
let pool: FixedPool<1024, 8> = FixedPool::new(64);
assert_eq!(pool.capacity(), 16); assert_eq!(pool.available(), 16);
let ptr1 = pool.alloc().expect("Failed to allocate");
assert_eq!(pool.available(), 15);
let ptr2 = pool.alloc().expect("Failed to allocate");
assert_eq!(pool.available(), 14);
assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
unsafe {
pool.dealloc(ptr1);
pool.dealloc(ptr2);
}
}
#[test]
fn test_fixed_pool_exhaustion() {
let pool: FixedPool<128, 8> = FixedPool::new(32);
assert_eq!(pool.capacity(), 4);
let mut ptrs = vec![];
for _ in 0..4 {
ptrs.push(pool.alloc().expect("Failed to allocate"));
}
assert!(pool.alloc().is_none());
assert_eq!(pool.available(), 0);
}
#[test]
fn test_bump_allocator_basic() {
let bump: BumpAllocator<1024> = BumpAllocator::new();
assert_eq!(bump.capacity(), 1024);
assert_eq!(bump.used(), 0);
let layout = Layout::from_size_align(64, 8).unwrap();
let ptr1 = bump.alloc(layout).expect("Failed to allocate");
assert_eq!(bump.used(), 64);
let ptr2 = bump.alloc(layout).expect("Failed to allocate");
assert_eq!(bump.used(), 128);
assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
}
#[test]
fn test_bump_allocator_alignment() {
let bump: BumpAllocator<1024> = BumpAllocator::new();
let layout1 = Layout::from_size_align(1, 1).unwrap();
let _ptr1 = bump.alloc(layout1).expect("Failed to allocate");
let layout2 = Layout::from_size_align(8, 8).unwrap();
let ptr2 = bump.alloc(layout2).expect("Failed to allocate");
assert_eq!(ptr2.as_ptr() as usize % 8, 0);
}
#[test]
fn test_bump_allocator_reset() {
let bump: BumpAllocator<1024> = BumpAllocator::new();
let layout = Layout::from_size_align(64, 8).unwrap();
let _ptr = bump.alloc(layout).expect("Failed to allocate");
assert_eq!(bump.used(), 64);
unsafe {
bump.reset();
}
assert_eq!(bump.used(), 0);
}
#[test]
fn test_stack_allocator_basic() {
let stack: StackAllocator<1024> = StackAllocator::new();
assert_eq!(stack.capacity(), 1024);
assert_eq!(stack.depth(), 0);
let layout = Layout::from_size_align(64, 8).unwrap();
let (ptr1, offset1) = stack.push(layout).expect("Failed to allocate");
assert!(stack.depth() >= 64);
let (ptr2, offset2) = stack.push(layout).expect("Failed to allocate");
assert!(stack.depth() >= 128);
assert_ne!(ptr1.as_ptr(), ptr2.as_ptr());
unsafe {
stack.pop(offset2);
assert!(stack.depth() < 128);
stack.pop(offset1);
assert_eq!(stack.depth(), 0);
}
}
#[test]
fn test_stack_allocator_scoped() {
let stack: StackAllocator<1024> = StackAllocator::new();
let layout = Layout::from_size_align(64, 8).unwrap();
{
let (_ptr1, _guard1) = stack.scoped_alloc(layout).expect("Failed to allocate");
assert!(stack.depth() >= 64);
{
let (_ptr2, _guard2) = stack.scoped_alloc(layout).expect("Failed to allocate");
assert!(stack.depth() >= 128);
}
assert!(stack.depth() < 128);
}
}
#[test]
fn test_embedded_allocator_basic() {
let allocator: EmbeddedAllocator<1024, 512> = EmbeddedAllocator::new(32);
assert_eq!(allocator.capacity(), 1536);
unsafe {
let layout = Layout::from_size_align(16, 8).unwrap();
let ptr1 = allocator.alloc(layout);
assert!(!ptr1.is_null());
let ptr2 = allocator.alloc(layout);
assert!(!ptr2.is_null());
assert_ne!(ptr1, ptr2);
allocator.dealloc(ptr1, layout);
allocator.dealloc(ptr2, layout);
}
}
#[test]
fn test_embedded_allocator_small_large() {
let allocator: EmbeddedAllocator<2048, 1024> = EmbeddedAllocator::new(64);
unsafe {
let small_layout = Layout::from_size_align(32, 8).unwrap();
let small_ptr = allocator.alloc(small_layout);
assert!(!small_ptr.is_null());
let large_layout = Layout::from_size_align(128, 8).unwrap();
let large_ptr = allocator.alloc(large_layout);
assert!(!large_ptr.is_null());
assert_ne!(small_ptr, large_ptr);
allocator.dealloc(small_ptr, small_layout);
allocator.dealloc(large_ptr, large_layout);
}
}
#[test]
fn test_fixed_pool_reset() {
let pool: FixedPool<1024, 8> = FixedPool::new(64);
let _ptr1 = pool.alloc().expect("Failed to allocate");
let _ptr2 = pool.alloc().expect("Failed to allocate");
assert_eq!(pool.available(), 14);
unsafe {
pool.reset();
}
assert_eq!(pool.available(), 16);
}
}