use core::{
alloc::Layout,
cell::Cell,
fmt::{self, Debug},
marker::PhantomData,
mem::{ManuallyDrop, MaybeUninit},
num::NonZeroUsize,
ops::Range,
panic::{RefUnwindSafe, UnwindSafe},
ptr::NonNull,
};
use allocator_api2::alloc::{AllocError, Allocator};
#[cfg(feature = "alloc")]
use allocator_api2::alloc::Global;
use crate::{
bump_align_guard::BumpAlignGuard,
bump_common_methods, bump_scope_methods,
chunk_size::ChunkSize,
const_param_assert, doc_align_cant_decrease,
polyfill::{nonnull, pointer},
stats::UninitStats,
ArrayLayout, BumpScopeGuard, Checkpoint, Chunk, ErrorBehavior, LayoutTrait, MinimumAlignment, RawChunk,
SizedTypeProperties, Stats, SupportedMinimumAlignment, WithoutDealloc, WithoutShrink, DEFAULT_START_CHUNK_SIZE,
};
#[cfg(not(no_global_oom_handling))]
use crate::infallible;
#[cfg(test)]
use crate::WithDrop;
#[repr(transparent)]
pub struct BumpScope<
'a,
#[cfg(feature = "alloc")] A = Global,
#[cfg(not(feature = "alloc"))] A,
const MIN_ALIGN: usize = 1,
const UP: bool = true,
const INIT: bool = true,
> {
pub(crate) chunk: Cell<RawChunk<UP, A>>,
marker: PhantomData<&'a ()>,
}
impl<const MIN_ALIGN: usize, const UP: bool, const INIT: bool, A> UnwindSafe for BumpScope<'_, A, MIN_ALIGN, UP, INIT>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: Allocator + Clone + UnwindSafe,
{
}
impl<const MIN_ALIGN: usize, const UP: bool, const INIT: bool, A> RefUnwindSafe for BumpScope<'_, A, MIN_ALIGN, UP, INIT>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: Allocator + Clone + UnwindSafe,
{
}
impl<A: Allocator + Clone, const MIN_ALIGN: usize, const UP: bool, const INIT: bool> Debug
for BumpScope<'_, A, MIN_ALIGN, UP, INIT>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.uninit_stats().debug_format("BumpScope", f)
}
}
impl<'a, A: Allocator + Clone, const MIN_ALIGN: usize, const UP: bool> BumpScope<'a, A, MIN_ALIGN, UP, true>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
{
bump_scope_methods!(BumpScopeGuard, true);
}
impl<'a, A: Allocator + Clone, const MIN_ALIGN: usize, const UP: bool, const INIT: bool>
BumpScope<'a, A, MIN_ALIGN, UP, INIT>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
{
#[inline(always)]
pub(crate) unsafe fn new_unchecked(chunk: RawChunk<UP, A>) -> Self {
Self {
chunk: Cell::new(chunk),
marker: PhantomData,
}
}
#[must_use]
#[inline(always)]
pub(crate) fn uninit_stats(&self) -> UninitStats<'a, UP> {
UninitStats {
current: Chunk::new(self),
}
}
#[inline(always)]
pub(crate) fn ensure_init<E: ErrorBehavior>(&self) -> Result<(), E> {
if INIT {
return Ok(());
}
if self.chunk.get().is_the_empty_chunk() {
self.allocate_first_chunk()?;
}
Ok(())
}
#[cold]
#[inline(never)]
fn allocate_first_chunk<E: ErrorBehavior>(&self) -> Result<(), E> {
debug_assert!(self.chunk.get().is_the_empty_chunk());
assert!(!INIT);
#[allow(clippy::uninit_assumed_init)]
let allocator: A = unsafe { MaybeUninit::uninit().assume_init() };
let chunk = RawChunk::new_in(ChunkSize::new(DEFAULT_START_CHUNK_SIZE)?, None, allocator)?;
self.chunk.set(chunk);
Ok(())
}
#[inline(always)]
pub(crate) unsafe fn consolidate_greed<T>(&mut self, mut start: NonNull<T>, len: usize, cap: usize) -> NonNull<[T]> {
let end = nonnull::add(start, len);
if UP {
self.set_pos(nonnull::addr(end), T::ALIGN);
nonnull::slice_from_raw_parts(start, len)
} else {
{
let dst_end = nonnull::add(start, cap);
let dst = nonnull::sub(dst_end, len);
if dst >= end {
nonnull::copy_nonoverlapping(start, dst, len);
start = dst;
}
}
self.set_pos(nonnull::addr(start), T::ALIGN);
nonnull::slice_from_raw_parts(start, len)
}
}
#[inline(always)]
pub(crate) unsafe fn consolidate_greed_rev<T>(&self, mut end: NonNull<T>, len: usize, cap: usize) -> NonNull<[T]> {
let mut start = nonnull::sub(end, len);
if UP {
{
let dst = nonnull::sub(end, cap);
let dst_end = nonnull::add(dst, len);
if dst_end <= start {
nonnull::copy_nonoverlapping(start, dst, len);
start = dst;
end = dst_end;
}
}
self.set_pos(nonnull::addr(end), T::ALIGN);
nonnull::slice_from_raw_parts(start, len)
} else {
self.set_pos(nonnull::addr(start), T::ALIGN);
nonnull::slice_from_raw_parts(start, len)
}
}
#[inline(always)]
fn set_pos(&self, pos: NonZeroUsize, current_align: usize) {
let chunk = self.chunk.get();
debug_assert_eq!(nonnull::addr(chunk.pos()).get() % current_align, 0);
unsafe { chunk.set_pos_addr(pos.get()) }
if current_align < MIN_ALIGN {
chunk.align_pos_to::<MIN_ALIGN>();
}
}
#[inline(always)]
pub(crate) fn alloc_greedy<B: ErrorBehavior, T>(&mut self, cap: usize) -> Result<(NonNull<T>, usize), B> {
let Range { start, end } = self.alloc_greedy_range::<B, T>(cap)?;
let capacity = unsafe { nonnull::byte_sub_ptr(end, start) } / T::SIZE;
Ok((start, capacity))
}
#[inline(always)]
pub(crate) fn alloc_greedy_rev<B: ErrorBehavior, T>(&mut self, cap: usize) -> Result<(NonNull<T>, usize), B> {
let Range { start, end } = self.alloc_greedy_range::<B, T>(cap)?;
let capacity = unsafe { nonnull::byte_sub_ptr(end, start) } / T::SIZE;
Ok((end, capacity))
}
#[inline(always)]
fn alloc_greedy_range<B: ErrorBehavior, T>(&mut self, cap: usize) -> Result<Range<NonNull<T>>, B> {
let layout = match ArrayLayout::array::<T>(cap) {
Ok(ok) => ok,
Err(_) => return Err(B::capacity_overflow()),
};
let range = match self.chunk.get().alloc_greedy::<MIN_ALIGN, true>(layout) {
Some(ptr) => ptr,
None => self.alloc_greedy_in_another_chunk(layout)?,
};
Ok(range.start.cast::<T>()..range.end.cast::<T>())
}
#[cold]
#[inline(never)]
pub(crate) fn alloc_greedy_in_another_chunk<E: ErrorBehavior>(
&self,
layout: ArrayLayout,
) -> Result<Range<NonNull<u8>>, E> {
unsafe { self.do_custom_alloc_in_another_chunk(layout, RawChunk::alloc_greedy::<MIN_ALIGN, false>) }
}
#[inline(always)]
pub(crate) fn alloc_in_current_chunk(&self, layout: Layout) -> Option<NonNull<u8>> {
self.chunk.get().alloc::<MIN_ALIGN, false, false, _>(layout)
}
#[cold]
#[inline(never)]
pub(crate) fn alloc_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
unsafe { self.do_custom_alloc_in_another_chunk(layout, RawChunk::alloc::<MIN_ALIGN, false, false, _>) }
}
#[inline(always)]
pub(crate) fn do_alloc_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
let result = match self.chunk.get().alloc::<MIN_ALIGN, true, true, _>(ArrayLayout::new::<T>()) {
Some(ptr) => Ok(ptr),
None => self.do_alloc_sized_in_another_chunk::<E, T>(),
};
match result {
Ok(ptr) => Ok(ptr.cast()),
Err(error) => Err(error),
}
}
#[cold]
#[inline(never)]
pub(crate) fn do_alloc_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
{
let layout = Layout::new::<T>();
self.alloc_in_another_chunk(layout)
}
#[inline(always)]
pub(crate) fn do_alloc_slice<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<T>, E> {
let layout = match ArrayLayout::array::<T>(len) {
Ok(layout) => layout,
Err(_) => return Err(E::capacity_overflow()),
};
let result = match self.chunk.get().alloc::<MIN_ALIGN, false, true, _>(layout) {
Some(ptr) => Ok(ptr),
None => unsafe { self.do_alloc_slice_in_another_chunk::<E, T>(len) },
};
match result {
Ok(ptr) => Ok(ptr.cast()),
Err(error) => Err(error),
}
}
#[inline(always)]
pub(crate) fn do_alloc_slice_for<E: ErrorBehavior, T>(&self, value: &[T]) -> Result<NonNull<[T]>, E> {
let layout = ArrayLayout::for_value(value);
let result = match self.chunk.get().alloc::<MIN_ALIGN, false, true, _>(layout) {
Some(ptr) => Ok(ptr),
None => unsafe { self.do_alloc_slice_in_another_chunk::<E, T>(value.len()) },
};
match result {
Ok(ptr) => Ok(nonnull::slice_from_raw_parts(ptr.cast(), value.len())),
Err(error) => Err(error),
}
}
#[cold]
#[inline(never)]
pub(crate) unsafe fn do_alloc_slice_in_another_chunk<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<u8>, E>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
{
let layout = match Layout::array::<T>(len) {
Ok(layout) => layout,
Err(_) => return Err(E::capacity_overflow()),
};
self.alloc_in_another_chunk(layout)
}
#[inline(always)]
pub(crate) fn align<const ALIGN: usize>(&self)
where
MinimumAlignment<ALIGN>: SupportedMinimumAlignment,
{
if ALIGN > MIN_ALIGN {
self.chunk.get().align_pos_to::<ALIGN>();
}
}
#[inline(always)]
pub(crate) fn must_align_more<const NEW_MIN_ALIGN: usize>(&self)
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
const_param_assert! {
(const MIN_ALIGN: usize, const NEW_MIN_ALIGN: usize) => NEW_MIN_ALIGN >= MIN_ALIGN, "`into_aligned` or `as_aligned_mut` can't decrease the minimum alignment"
}
self.align::<NEW_MIN_ALIGN>();
}
pub(crate) fn do_alloc_no_bump_for<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
let result = match self.chunk.get().alloc_no_bump_for::<MIN_ALIGN, T>() {
Some(ptr) => Ok(ptr),
None => unsafe {
self.do_custom_alloc_in_another_chunk(Layout::new::<T>(), |chunk, _| {
chunk.alloc_no_bump_for::<MIN_ALIGN, T>()
})
},
};
match result {
Ok(ptr) => Ok(ptr.cast()),
Err(error) => Err(error),
}
}
#[inline(always)]
pub(crate) unsafe fn do_custom_alloc_in_another_chunk<E: ErrorBehavior, L: LayoutTrait, R>(
&self,
layout: L,
mut allocate: impl FnMut(RawChunk<UP, A>, L) -> Option<R>,
) -> Result<R, E> {
let new_chunk = if !INIT && self.chunk.get().is_the_empty_chunk() {
#[allow(clippy::uninit_assumed_init)]
let allocator: A = MaybeUninit::uninit().assume_init();
RawChunk::new_in(ChunkSize::for_capacity(layout.layout())?, None, allocator)
} else {
while let Some(chunk) = self.chunk.get().next() {
chunk.reset();
self.chunk.set(chunk);
if let Some(ptr) = allocate(chunk, layout) {
return Ok(ptr);
}
}
self.chunk.get().append_for(layout.layout())
}?;
self.chunk.set(new_chunk);
if let Some(ptr) = allocate(new_chunk, layout) {
Ok(ptr)
} else {
core::hint::unreachable_unchecked()
}
}
bump_common_methods!();
#[inline(always)]
pub fn as_scope(&self) -> &Self {
self
}
#[inline(always)]
pub fn as_mut_scope(&mut self) -> &mut Self {
self
}
#[doc = doc_align_cant_decrease!()]
#[inline(always)]
pub fn into_aligned<const NEW_MIN_ALIGN: usize>(self) -> BumpScope<'a, A, NEW_MIN_ALIGN, UP, INIT>
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
self.must_align_more::<NEW_MIN_ALIGN>();
unsafe { self.cast_align() }
}
#[doc = doc_align_cant_decrease!()]
#[inline(always)]
pub fn as_aligned_mut<const NEW_MIN_ALIGN: usize>(&mut self) -> &mut BumpScope<'a, A, NEW_MIN_ALIGN, UP, INIT>
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
self.must_align_more::<NEW_MIN_ALIGN>();
unsafe { self.cast_align_mut() }
}
#[inline(always)]
pub(crate) unsafe fn cast_align<const NEW_MIN_ALIGN: usize>(self) -> BumpScope<'a, A, NEW_MIN_ALIGN, UP, INIT>
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
BumpScope {
chunk: self.chunk,
marker: PhantomData,
}
}
#[inline(always)]
pub(crate) unsafe fn cast_align_mut<const NEW_MIN_ALIGN: usize>(
&mut self,
) -> &mut BumpScope<'a, A, NEW_MIN_ALIGN, UP, INIT>
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
&mut *pointer::from_mut(self).cast::<BumpScope<'a, A, NEW_MIN_ALIGN, UP, INIT>>()
}
#[cfg(not(no_global_oom_handling))]
pub fn into_init(self) -> BumpScope<'a, A, MIN_ALIGN, UP, true> {
infallible(self.generic_into_init())
}
pub fn try_into_init(self) -> Result<BumpScope<'a, A, MIN_ALIGN, UP, true>, AllocError> {
self.generic_into_init()
}
fn generic_into_init<E: ErrorBehavior>(self) -> Result<BumpScope<'a, A, MIN_ALIGN, UP, true>, E> {
self.as_scope().ensure_init()?;
Ok(unsafe { self.cast_init() })
}
#[cfg(not(no_global_oom_handling))]
pub fn as_init(&self) -> &BumpScope<'a, A, MIN_ALIGN, UP, true> {
infallible(self.generic_as_init())
}
pub fn try_as_init(&self) -> Result<&BumpScope<'a, A, MIN_ALIGN, UP, true>, AllocError> {
self.generic_as_init()
}
fn generic_as_init<E: ErrorBehavior>(&self) -> Result<&BumpScope<'a, A, MIN_ALIGN, UP, true>, E> {
self.as_scope().ensure_init()?;
Ok(unsafe { self.cast_init_ref() })
}
#[cfg(not(no_global_oom_handling))]
pub fn as_init_mut(&mut self) -> &mut BumpScope<'a, A, MIN_ALIGN, UP, true> {
infallible(self.generic_as_init_mut())
}
pub fn try_as_init_mut(&mut self) -> Result<&mut BumpScope<'a, A, MIN_ALIGN, UP, true>, AllocError> {
self.generic_as_init_mut()
}
fn generic_as_init_mut<E: ErrorBehavior>(&mut self) -> Result<&mut BumpScope<'a, A, MIN_ALIGN, UP, true>, E> {
self.as_scope().ensure_init()?;
Ok(unsafe { self.cast_init_mut() })
}
#[inline(always)]
pub(crate) unsafe fn cast_init(self) -> BumpScope<'a, A, MIN_ALIGN, UP, true> {
BumpScope {
chunk: self.chunk,
marker: PhantomData,
}
}
#[inline(always)]
pub(crate) unsafe fn cast_init_ref(&self) -> &BumpScope<'a, A, MIN_ALIGN, UP, true> {
&*pointer::from_ref(self).cast::<BumpScope<'a, A, MIN_ALIGN, UP, true>>()
}
#[inline(always)]
pub(crate) unsafe fn cast_init_mut(&mut self) -> &mut BumpScope<'a, A, MIN_ALIGN, UP, true> {
&mut *pointer::from_mut(self).cast::<BumpScope<'a, A, MIN_ALIGN, UP, true>>()
}
#[inline(always)]
pub(crate) unsafe fn clone_unchecked(&self) -> BumpScope<'a, A, MIN_ALIGN, UP, INIT> {
BumpScope::new_unchecked(self.chunk.get())
}
#[inline]
#[must_use]
pub fn into_raw(self) -> NonNull<()> {
let this = ManuallyDrop::new(self);
this.chunk.get().header_ptr().cast()
}
#[inline]
#[must_use]
pub unsafe fn from_raw(ptr: NonNull<()>) -> Self {
let chunk = Cell::new(RawChunk::from_header(ptr.cast()));
Self {
chunk,
marker: PhantomData,
}
}
}
impl<'a, const MIN_ALIGN: usize, const UP: bool, A> BumpScope<'a, A, MIN_ALIGN, UP, true>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: Allocator + Clone,
{
#[doc = crate::doc_fn_stats!(Stats)]
#[must_use]
#[inline(always)]
pub fn stats(&self) -> Stats<'a, UP> {
Stats {
current: Chunk::new_init(self),
}
}
}
impl<'a, const MIN_ALIGN: usize, const UP: bool, A> BumpScope<'a, A, MIN_ALIGN, UP, false>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: Allocator + Clone,
{
#[doc = crate::doc_fn_stats!(UninitStats)]
#[must_use]
#[inline(always)]
pub fn stats(&self) -> UninitStats<'a, UP> {
UninitStats {
current: Chunk::new(self),
}
}
}