use core::{
alloc::{Layout, LayoutError},
cell::{Cell, RefCell},
ptr::null_mut,
};
use core::marker::PhantomData;
#[cfg(not(feature = "std"))]
use portable_atomic::{AtomicUsize, Ordering};
#[cfg(feature = "std")]
use std::sync::atomic::{AtomicUsize, Ordering};
use crate::{
error::{ContiguousMemoryError, LockSource, LockingError},
range::ByteRange,
refs::{sealed::*, ContiguousMemoryRef, SyncContiguousMemoryRef},
tracker::AllocationTracker,
types::*,
BaseLocation, ContiguousMemoryState,
};
pub trait ImplBase: Sized {
type StorageState: Clone;
type ReferenceType<T: ?Sized>: Clone;
type LockResult<T>;
const USES_LOCKS: bool = false;
}
#[cfg_attr(feature = "debug", derive(Debug))]
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct ImplDefault;
impl ImplBase for ImplDefault {
type StorageState = Rc<ContiguousMemoryState<Self>>;
type ReferenceType<T: ?Sized> = ContiguousMemoryRef<T>;
type LockResult<T> = T;
}
#[cfg_attr(feature = "debug", derive(Debug))]
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct ImplConcurrent;
impl ImplBase for ImplConcurrent {
type StorageState = Arc<ContiguousMemoryState<Self>>;
type ReferenceType<T: ?Sized> = SyncContiguousMemoryRef<T>;
type LockResult<T> = Result<T, LockingError>;
const USES_LOCKS: bool = true;
}
#[cfg_attr(feature = "debug", derive(Debug))]
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct ImplUnsafe;
impl ImplBase for ImplUnsafe {
type StorageState = ContiguousMemoryState<Self>;
type ReferenceType<T: ?Sized> = *mut T;
type LockResult<T> = T;
}
pub trait StorageDetails: ImplBase {
type Base;
type AllocationTracker;
type SizeType;
type StoreResult<T>;
fn build_state(
base: *mut u8,
capacity: usize,
align: usize,
) -> Result<Self::StorageState, LayoutError>;
fn deref_state(state: &Self::StorageState) -> &ContiguousMemoryState<Self>;
fn get_base(base: &Self::Base) -> Self::LockResult<*mut u8>;
fn get_capacity(capacity: &Self::SizeType) -> usize;
fn resize_container(
state: &mut Self::StorageState,
new_capacity: usize,
) -> Result<Option<*mut u8>, ContiguousMemoryError>;
fn deallocate(base: &Self::Base, layout: Layout);
fn resize_tracker(
state: &mut Self::StorageState,
new_capacity: usize,
) -> Result<(), ContiguousMemoryError>;
fn shrink_tracker(state: &mut Self::StorageState) -> Result<Option<usize>, LockingError>;
fn store_next(
state: &mut Self::StorageState,
layout: Layout,
) -> Result<ByteRange, ContiguousMemoryError>;
fn peek_next(
state: &Self::StorageState,
layout: Layout,
) -> Result<Option<ByteRange>, ContiguousMemoryError>;
}
impl StorageDetails for ImplConcurrent {
type Base = RwLock<*mut u8>;
type AllocationTracker = Mutex<AllocationTracker>;
type SizeType = AtomicUsize;
type StoreResult<T> = Result<Self::ReferenceType<T>, LockingError>;
fn build_state(
base: *mut u8,
capacity: usize,
align: usize,
) -> Result<Self::StorageState, LayoutError> {
let layout = Layout::from_size_align(capacity, align)?;
Ok(Arc::new(ContiguousMemoryState {
base: BaseLocation(RwLock::new(base)),
capacity: AtomicUsize::new(layout.size()),
alignment: layout.align(),
tracker: Mutex::new(AllocationTracker::new(capacity)),
}))
}
fn deref_state(state: &Self::StorageState) -> &ContiguousMemoryState<Self> {
&state
}
fn get_base(base: &Self::Base) -> Self::LockResult<*mut u8> {
base.read_named(LockSource::BaseAddress)
.map(|result| *result)
}
fn get_capacity(capacity: &Self::SizeType) -> usize {
capacity.load(Ordering::Acquire)
}
fn resize_container(
state: &mut Self::StorageState,
new_capacity: usize,
) -> Result<Option<*mut u8>, ContiguousMemoryError> {
let layout = Layout::from_size_align(Self::get_capacity(&state.capacity), state.alignment)?;
let mut base_addr = state.base.write_named(LockSource::BaseAddress)?;
let prev_addr = *base_addr;
*base_addr = unsafe { allocator::realloc(*base_addr, layout, new_capacity) };
state.capacity.store(new_capacity, Ordering::Release);
Ok(if *base_addr != prev_addr {
Some(*base_addr)
} else {
None
})
}
fn deallocate(base: &Self::Base, layout: Layout) {
if let Ok(mut lock) = base.write_named(LockSource::BaseAddress) {
unsafe { allocator::dealloc(*lock, layout) };
*lock = null_mut();
}
}
fn resize_tracker(
state: &mut Self::StorageState,
new_capacity: usize,
) -> Result<(), ContiguousMemoryError> {
let mut lock = state.tracker.lock_named(LockSource::AllocationTracker)?;
lock.resize(new_capacity)?;
Ok(())
}
fn shrink_tracker(state: &mut Self::StorageState) -> Result<Option<usize>, LockingError> {
let mut lock = state.tracker.lock_named(LockSource::AllocationTracker)?;
Ok(lock.shrink_to_fit())
}
fn store_next(
state: &mut Self::StorageState,
layout: Layout,
) -> Result<ByteRange, ContiguousMemoryError> {
let mut lock = state.tracker.lock_named(LockSource::AllocationTracker)?;
lock.take_next(layout)
}
fn peek_next(
state: &Self::StorageState,
layout: Layout,
) -> Result<Option<ByteRange>, ContiguousMemoryError> {
let lock = state.tracker.lock_named(LockSource::AllocationTracker)?;
Ok(lock.peek_next(layout))
}
}
impl StorageDetails for ImplDefault {
type Base = Cell<*mut u8>;
type AllocationTracker = RefCell<AllocationTracker>;
type SizeType = Cell<usize>;
type StoreResult<T> = ContiguousMemoryRef<T>;
fn build_state(
base: *mut u8,
capacity: usize,
align: usize,
) -> Result<Self::StorageState, LayoutError> {
let layout: Layout = Layout::from_size_align(capacity, align)?;
Ok(Rc::new(ContiguousMemoryState {
base: BaseLocation(Cell::new(base)),
capacity: Cell::new(layout.size()),
alignment: layout.align(),
tracker: RefCell::new(AllocationTracker::new(capacity)),
}))
}
fn deref_state(state: &Self::StorageState) -> &ContiguousMemoryState<Self> {
&state
}
fn get_base(base: &Self::Base) -> Self::LockResult<*mut u8> {
base.get()
}
fn get_capacity(capacity: &Self::SizeType) -> usize {
capacity.get()
}
fn resize_container(
state: &mut Self::StorageState,
new_capacity: usize,
) -> Result<Option<*mut u8>, ContiguousMemoryError> {
let layout = Layout::from_size_align(Self::get_capacity(&state.capacity), state.alignment)?;
let prev_base = state.base.get();
let new_base = unsafe { allocator::realloc(prev_base, layout, new_capacity) };
state.base.set(new_base);
state.capacity.set(new_capacity);
Ok(if new_base != prev_base {
Some(new_base)
} else {
None
})
}
fn deallocate(base: &Self::Base, layout: Layout) {
unsafe { allocator::dealloc(base.get(), layout) };
base.set(null_mut())
}
fn resize_tracker(
state: &mut Self::StorageState,
new_capacity: usize,
) -> Result<(), ContiguousMemoryError> {
state.tracker.borrow_mut().resize(new_capacity)
}
fn shrink_tracker(state: &mut Self::StorageState) -> Result<Option<usize>, LockingError> {
Ok(state.tracker.borrow_mut().shrink_to_fit())
}
fn store_next(
state: &mut Self::StorageState,
layout: Layout,
) -> Result<ByteRange, ContiguousMemoryError> {
let mut tracker = state
.tracker
.try_borrow_mut()
.map_err(|_| ContiguousMemoryError::TrackerInUse)?;
tracker.take_next(layout)
}
fn peek_next(
state: &Self::StorageState,
layout: Layout,
) -> Result<Option<ByteRange>, ContiguousMemoryError> {
let tracker = state
.tracker
.try_borrow()
.map_err(|_| ContiguousMemoryError::TrackerInUse)?;
Ok(tracker.peek_next(layout))
}
}
impl StorageDetails for ImplUnsafe {
type Base = *mut u8;
type AllocationTracker = AllocationTracker;
type SizeType = usize;
type StoreResult<T> = Result<*mut T, ContiguousMemoryError>;
fn build_state(
base: *mut u8,
capacity: usize,
align: usize,
) -> Result<Self::StorageState, LayoutError> {
let layout = Layout::from_size_align(capacity, align)?;
Ok(ContiguousMemoryState {
base: BaseLocation(base),
capacity: layout.size(),
alignment: layout.align(),
tracker: AllocationTracker::new(capacity),
})
}
fn deref_state(state: &Self::StorageState) -> &ContiguousMemoryState<Self> {
&state
}
fn get_base(base: &Self::Base) -> Self::LockResult<*mut u8> {
*base
}
fn get_capacity(capacity: &Self::SizeType) -> usize {
*capacity
}
fn resize_container(
state: &mut Self::StorageState,
new_capacity: usize,
) -> Result<Option<*mut u8>, ContiguousMemoryError> {
let layout = Layout::from_size_align(state.capacity, state.alignment)?;
let prev_base = *state.base;
state.base = BaseLocation(unsafe { allocator::realloc(prev_base, layout, new_capacity) });
state.capacity = new_capacity;
Ok(if *state.base != prev_base {
Some(*state.base)
} else {
None
})
}
fn deallocate(base: &Self::Base, layout: Layout) {
unsafe {
allocator::dealloc(*base, layout);
}
}
fn resize_tracker(
state: &mut Self::StorageState,
new_capacity: usize,
) -> Result<(), ContiguousMemoryError> {
state.tracker.resize(new_capacity)
}
fn shrink_tracker(state: &mut Self::StorageState) -> Result<Option<usize>, LockingError> {
Ok(state.tracker.shrink_to_fit())
}
fn store_next(
state: &mut Self::StorageState,
layout: Layout,
) -> Result<ByteRange, ContiguousMemoryError> {
state.tracker.take_next(layout)
}
fn peek_next(
state: &Self::StorageState,
layout: Layout,
) -> Result<Option<ByteRange>, ContiguousMemoryError> {
Ok(state.tracker.peek_next(layout))
}
}
pub trait ReferenceDetails: ImplBase {
type RefState<T: ?Sized>: Clone;
type BorrowLock;
type ReadGuard<'a>: DebugReq;
type WriteGuard<'a>: DebugReq;
fn free_region(state: &mut Self::StorageState, range: ByteRange) -> Option<*mut ()>;
fn build_ref<T: StoreRequirements>(
state: &Self::StorageState,
addr: *mut T,
range: &ByteRange,
) -> Self::ReferenceType<T>;
fn unborrow_ref<T: ?Sized>(_state: &Self::RefState<T>, _kind: BorrowKind) {}
}
impl ReferenceDetails for ImplConcurrent {
type RefState<T: ?Sized> = Arc<ReferenceState<T, Self>>;
type BorrowLock = RwLock<()>;
type ReadGuard<'a> = RwLockReadGuard<'a, ()>;
type WriteGuard<'a> = RwLockWriteGuard<'a, ()>;
fn free_region(state: &mut Self::StorageState, range: ByteRange) -> Option<*mut ()> {
if let Ok(mut lock) = state.tracker.lock_named(LockSource::AllocationTracker) {
let _ = lock.release(range);
if let Ok(base) = state.base.read_named(LockSource::BaseAddress) {
unsafe { Some(base.add(range.0) as *mut ()) }
} else {
None
}
} else {
None
}
}
fn build_ref<T: StoreRequirements>(
state: &Self::StorageState,
_addr: *mut T,
range: &ByteRange,
) -> Self::ReferenceType<T> {
SyncContiguousMemoryRef {
inner: Arc::new(ReferenceState {
state: state.clone(),
range: range.clone(),
borrow_kind: RwLock::new(()),
#[cfg(feature = "ptr_metadata")]
drop_metadata: static_metadata::<T, dyn HandleDrop>(),
_phantom: PhantomData,
}),
#[cfg(feature = "ptr_metadata")]
metadata: (),
#[cfg(not(feature = "ptr_metadata"))]
_phantom: PhantomData,
}
}
}
impl ReferenceDetails for ImplDefault {
type RefState<T: ?Sized> = Rc<ReferenceState<T, Self>>;
type BorrowLock = Cell<BorrowState>;
type ReadGuard<'a> = ();
type WriteGuard<'a> = ();
fn free_region(state: &mut Self::StorageState, range: ByteRange) -> Option<*mut ()> {
if let Ok(mut tracker) = state.tracker.try_borrow_mut() {
let _ = tracker.release(range);
let base = state.base.get();
unsafe { Some(base.add(range.0) as *mut ()) }
} else {
None
}
}
fn build_ref<T: StoreRequirements>(
state: &Self::StorageState,
_addr: *mut T,
range: &ByteRange,
) -> Self::ReferenceType<T> {
ContiguousMemoryRef {
inner: Rc::new(ReferenceState {
state: state.clone(),
range: range.clone(),
borrow_kind: Cell::new(BorrowState::Read(0)),
#[cfg(feature = "ptr_metadata")]
drop_metadata: static_metadata::<T, dyn HandleDrop>(),
_phantom: PhantomData,
}),
#[cfg(feature = "ptr_metadata")]
metadata: (),
#[cfg(not(feature = "ptr_metadata"))]
_phantom: PhantomData,
}
}
fn unborrow_ref<T: ?Sized>(state: &Self::RefState<T>, _kind: BorrowKind) {
let next = match state.borrow_kind.get() {
BorrowState::Read(count) => BorrowState::Read(count - 1),
BorrowState::Write => BorrowState::Read(0),
};
state.borrow_kind.set(next)
}
}
impl ReferenceDetails for ImplUnsafe {
type RefState<T: ?Sized> = ();
type BorrowLock = ();
type ReadGuard<'a> = ();
type WriteGuard<'a> = ();
fn free_region(state: &mut Self::StorageState, range: ByteRange) -> Option<*mut ()> {
let _ = state.tracker.release(range);
unsafe { Some(state.base.add(range.0) as *mut ()) }
}
fn build_ref<T>(
_base: &Self::StorageState,
addr: *mut T,
_range: &ByteRange,
) -> Self::ReferenceType<T> {
addr
}
}
pub trait ImplDetails: ImplBase + StorageDetails + ReferenceDetails + DebugReq {}
impl<Impl: ImplBase + StorageDetails + ReferenceDetails + DebugReq> ImplDetails for Impl {}