#![no_std]
#![forbid(warnings)]
#![forbid(missing_docs)]
extern crate alloc;
#[derive(Debug)]
pub struct NAlloc<T> {
wrapped: T,
state: core::sync::atomic::AtomicU64,
}
impl<T> NAlloc<T> {
pub const fn new(wrapped: T) -> NAlloc<T> {
Self {
wrapped,
state: core::sync::atomic::AtomicU64::new(0),
}
}
#[must_use = "The lock must stay alive as long as no allocations are allowed."]
pub fn deny<'a>(&'a self) -> AllocationLocker<'a, T> {
if self
.state
.fetch_add(1, core::sync::atomic::Ordering::Release)
== u64::MAX
{
panic!("Allocation counter wrapped around");
}
AllocationLocker { allocator: self }
}
fn unlock<'a>(&'a self) {
if self
.state
.fetch_sub(1, core::sync::atomic::Ordering::Release)
== 0
{
panic!("Allocation counter wrapped around");
}
}
}
unsafe impl<T: alloc::alloc::GlobalAlloc> alloc::alloc::GlobalAlloc for NAlloc<T> {
unsafe fn alloc(&self, layout: alloc::alloc::Layout) -> *mut u8 {
if self.state.load(core::sync::atomic::Ordering::Relaxed) == 0 {
self.wrapped.alloc(layout)
} else {
alloc::alloc::handle_alloc_error(layout)
}
}
unsafe fn dealloc(&self, ptr: *mut u8, layout: alloc::alloc::Layout) {
self.wrapped.dealloc(ptr, layout)
}
unsafe fn alloc_zeroed(&self, layout: alloc::alloc::Layout) -> *mut u8 {
if self.state.load(core::sync::atomic::Ordering::Relaxed) == 0 {
self.wrapped.alloc_zeroed(layout)
} else {
alloc::alloc::handle_alloc_error(layout)
}
}
unsafe fn realloc(&self, ptr: *mut u8, layout: alloc::alloc::Layout, new_size: usize) -> *mut u8 {
if self.state.load(core::sync::atomic::Ordering::Relaxed) == 0 {
self.wrapped.realloc(ptr, layout, new_size)
} else {
alloc::alloc::handle_alloc_error(layout)
}
}
}
pub struct AllocationLocker<'a, T> {
allocator: &'a NAlloc<T>,
}
impl<'a, T> Drop for AllocationLocker<'a, T> {
fn drop(&mut self) {
self.allocator.unlock()
}
}