#[cfg(test)]
use crate::WithDrop;
use crate::{
bump_align_guard::BumpAlignGuard,
bump_common_methods, bump_scope_methods,
bumping::{bump_down, bump_up, BumpUp},
chunk_size::ChunkSize,
const_param_assert, doc_align_cant_decrease, down_align_usize, exact_size_iterator_bad_len,
layout::{ArrayLayout, CustomLayout, LayoutProps, SizedLayout},
polyfill::{nonnull, pointer},
up_align_usize_unchecked, BaseAllocator, BumpBox, BumpScopeGuard, BumpString, BumpVec, Checkpoint, ErrorBehavior,
FixedBumpString, FixedBumpVec, GuaranteedAllocatedStats, MinimumAlignment, MutBumpString, MutBumpVec, MutBumpVecRev,
NoDrop, RawChunk, SizedTypeProperties, Stats, SupportedMinimumAlignment, WithoutDealloc, WithoutShrink,
};
#[cfg(not(no_global_oom_handling))]
use crate::{infallible, Infallibly};
use allocator_api2::alloc::AllocError;
use core::{
alloc::Layout,
cell::Cell,
fmt::{self, Debug},
marker::PhantomData,
mem::{ManuallyDrop, MaybeUninit},
num::NonZeroUsize,
ops::Range,
panic::{RefUnwindSafe, UnwindSafe},
ptr::NonNull,
};
macro_rules! bump_scope_declaration {
($($allocator_parameter:tt)*) => {
#[repr(transparent)]
pub struct BumpScope<
'a,
$($allocator_parameter)*,
const MIN_ALIGN: usize = 1,
const UP: bool = true,
const GUARANTEED_ALLOCATED: bool = true,
> {
pub(crate) chunk: Cell<RawChunk<UP, A>>,
marker: PhantomData<&'a ()>,
}
};
}
crate::maybe_default_allocator!(bump_scope_declaration);
impl<const MIN_ALIGN: usize, const UP: bool, const GUARANTEED_ALLOCATED: bool, A> UnwindSafe
for BumpScope<'_, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: BaseAllocator<GUARANTEED_ALLOCATED> + UnwindSafe,
{
}
impl<const MIN_ALIGN: usize, const UP: bool, const GUARANTEED_ALLOCATED: bool, A> RefUnwindSafe
for BumpScope<'_, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: BaseAllocator<GUARANTEED_ALLOCATED> + UnwindSafe,
{
}
impl<A, const MIN_ALIGN: usize, const UP: bool, const GUARANTEED_ALLOCATED: bool> Debug
for BumpScope<'_, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: BaseAllocator<GUARANTEED_ALLOCATED>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.stats().debug_format("BumpScope", f)
}
}
impl<'a, A, const MIN_ALIGN: usize, const UP: bool> BumpScope<'a, A, MIN_ALIGN, UP>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: BaseAllocator<true>,
{
bump_scope_methods!(BumpScopeGuard, true);
}
impl<'a, A, const MIN_ALIGN: usize, const UP: bool, const GUARANTEED_ALLOCATED: bool>
BumpScope<'a, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: BaseAllocator<GUARANTEED_ALLOCATED>,
{
#[inline(always)]
pub(crate) unsafe fn new_unchecked(chunk: RawChunk<UP, A>) -> Self {
Self {
chunk: Cell::new(chunk),
marker: PhantomData,
}
}
#[inline(always)]
pub(crate) fn ensure_allocated<E: ErrorBehavior>(&self) -> Result<(), E> {
if self.is_unallocated() {
self.allocate_first_chunk()?;
}
Ok(())
}
#[cold]
#[inline(never)]
fn allocate_first_chunk<B: ErrorBehavior>(&self) -> Result<(), B> {
debug_assert!(self.chunk.get().is_unallocated());
let allocator = A::default_or_panic();
let chunk = RawChunk::new_in(ChunkSize::DEFAULT_START, None, allocator)?;
self.chunk.set(chunk);
Ok(())
}
#[inline(always)]
pub(crate) unsafe fn consolidate_greed<T>(&mut self, mut start: NonNull<T>, len: usize, cap: usize) -> NonNull<[T]> {
let end = nonnull::add(start, len);
if UP {
self.set_pos(nonnull::addr(end), T::ALIGN);
nonnull::slice_from_raw_parts(start, len)
} else {
{
let dst_end = nonnull::add(start, cap);
let dst = nonnull::sub(dst_end, len);
nonnull::copy(start, dst, len);
start = dst;
}
self.set_pos(nonnull::addr(start), T::ALIGN);
nonnull::slice_from_raw_parts(start, len)
}
}
#[inline(always)]
pub(crate) unsafe fn consolidate_greed_rev<T>(&self, mut end: NonNull<T>, len: usize, cap: usize) -> NonNull<[T]> {
let mut start = nonnull::sub(end, len);
if UP {
{
let dst = nonnull::sub(end, cap);
let dst_end = nonnull::add(dst, len);
nonnull::copy(start, dst, len);
start = dst;
end = dst_end;
}
self.set_pos(nonnull::addr(end), T::ALIGN);
nonnull::slice_from_raw_parts(start, len)
} else {
self.set_pos(nonnull::addr(start), T::ALIGN);
nonnull::slice_from_raw_parts(start, len)
}
}
#[inline(always)]
fn set_pos(&self, pos: NonZeroUsize, current_align: usize) {
let chunk = self.chunk.get();
debug_assert_eq!(pos.get() % current_align, 0);
unsafe { chunk.set_pos_addr(pos.get()) }
if current_align < MIN_ALIGN {
chunk.align_pos_to::<MIN_ALIGN>();
}
}
#[inline(always)]
pub(crate) fn alloc_greedy<B: ErrorBehavior, T>(&mut self, cap: usize) -> Result<(NonNull<T>, usize), B> {
let Range { start, end } = self.alloc_greedy_range::<B, T>(cap)?;
let capacity = unsafe { nonnull::byte_sub_ptr(end, start) } / T::SIZE;
Ok((start, capacity))
}
#[inline(always)]
pub(crate) fn alloc_greedy_rev<B: ErrorBehavior, T>(&mut self, cap: usize) -> Result<(NonNull<T>, usize), B> {
let Range { start, end } = self.alloc_greedy_range::<B, T>(cap)?;
let capacity = unsafe { nonnull::byte_sub_ptr(end, start) } / T::SIZE;
Ok((end, capacity))
}
#[inline(always)]
fn alloc_greedy_range<B: ErrorBehavior, T>(&mut self, cap: usize) -> Result<Range<NonNull<T>>, B> {
let layout = match ArrayLayout::array::<T>(cap) {
Ok(ok) => ok,
Err(_) => return Err(B::capacity_overflow()),
};
let range = match self.chunk.get().alloc_greedy(MinimumAlignment::<MIN_ALIGN>, layout) {
Some(ptr) => ptr,
None => self.alloc_greedy_in_another_chunk(*layout)?,
};
Ok(range.start.cast::<T>()..range.end.cast::<T>())
}
#[cold]
#[inline(never)]
pub(crate) fn alloc_greedy_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<Range<NonNull<u8>>, E> {
let layout = CustomLayout(layout);
unsafe {
self.do_custom_alloc_in_another_chunk(layout, |chunk, layout| {
chunk.alloc_greedy(MinimumAlignment::<MIN_ALIGN>, layout)
})
}
}
#[inline(always)]
pub(crate) fn alloc_in_current_chunk(&self, layout: Layout) -> Option<NonNull<u8>> {
self.chunk.get().alloc(MinimumAlignment::<MIN_ALIGN>, CustomLayout(layout))
}
#[cold]
#[inline(never)]
pub(crate) fn alloc_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
unsafe {
self.do_custom_alloc_in_another_chunk(CustomLayout(layout), |chunk, layout| {
chunk.alloc(MinimumAlignment::<MIN_ALIGN>, layout)
})
}
}
#[cold]
#[inline(never)]
pub(crate) fn reserve_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
unsafe {
self.do_custom_alloc_in_another_chunk(CustomLayout(layout), |chunk, layout| {
chunk.reserve(MinimumAlignment::<MIN_ALIGN>, layout)
})
}
}
#[inline(always)]
pub(crate) fn do_alloc_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
E::alloc_or_else(
self.chunk.get(),
MinimumAlignment::<MIN_ALIGN>,
SizedLayout::new::<T>(),
|| self.do_alloc_sized_in_another_chunk::<E, T>(),
)
.map(NonNull::cast)
}
#[inline(always)]
pub(crate) fn do_reserve_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
E::reserve_or_else(
self.chunk.get(),
MinimumAlignment::<MIN_ALIGN>,
SizedLayout::new::<T>(),
|| self.do_reserve_sized_in_another_chunk::<E, T>(),
)
.map(NonNull::cast)
}
#[cold]
#[inline(never)]
pub(crate) fn do_alloc_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
{
self.alloc_in_another_chunk(Layout::new::<T>())
}
#[cold]
#[inline(never)]
pub(crate) fn do_reserve_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
{
self.reserve_in_another_chunk(Layout::new::<T>())
}
#[inline(always)]
pub(crate) fn do_alloc_slice<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<T>, E> {
let layout = match ArrayLayout::array::<T>(len) {
Ok(layout) => layout,
Err(_) => return Err(E::capacity_overflow()),
};
E::alloc_or_else(self.chunk.get(), MinimumAlignment::<MIN_ALIGN>, layout, || unsafe {
self.do_alloc_slice_in_another_chunk::<E, T>(len)
})
.map(NonNull::cast)
}
#[inline(always)]
pub(crate) fn do_alloc_slice_for<E: ErrorBehavior, T>(&self, value: &[T]) -> Result<NonNull<T>, E> {
let layout = ArrayLayout::for_value(value);
E::alloc_or_else(self.chunk.get(), MinimumAlignment::<MIN_ALIGN>, layout, || unsafe {
self.do_alloc_slice_in_another_chunk::<E, T>(value.len())
})
.map(NonNull::cast)
}
#[cold]
#[inline(never)]
pub(crate) unsafe fn do_alloc_slice_in_another_chunk<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<u8>, E>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
{
let layout = match Layout::array::<T>(len) {
Ok(layout) => layout,
Err(_) => return Err(E::capacity_overflow()),
};
self.alloc_in_another_chunk(layout)
}
#[inline(always)]
pub(crate) fn align<const ALIGN: usize>(&self)
where
MinimumAlignment<ALIGN>: SupportedMinimumAlignment,
{
if ALIGN > MIN_ALIGN {
self.chunk.get().align_pos_to::<ALIGN>();
}
}
#[inline(always)]
pub(crate) fn must_align_more<const NEW_MIN_ALIGN: usize>(&self)
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
const_param_assert! {
(const MIN_ALIGN: usize, const NEW_MIN_ALIGN: usize) => NEW_MIN_ALIGN >= MIN_ALIGN, "`into_aligned` or `as_aligned_mut` can't decrease the minimum alignment"
}
self.align::<NEW_MIN_ALIGN>();
}
#[inline(always)]
pub(crate) unsafe fn do_custom_alloc_in_another_chunk<B: ErrorBehavior, L: LayoutProps, R>(
&self,
layout: L,
mut allocate: impl FnMut(RawChunk<UP, A>, L) -> Option<R>,
) -> Result<R, B> {
let new_chunk = if self.is_unallocated() {
let allocator = A::default_or_panic();
RawChunk::new_in(
ChunkSize::for_capacity(*layout).ok_or_else(B::capacity_overflow)?,
None,
allocator,
)
} else {
while let Some(chunk) = self.chunk.get().next() {
chunk.reset();
self.chunk.set(chunk);
if let Some(ptr) = allocate(chunk, layout) {
return Ok(ptr);
}
}
self.chunk.get().append_for(*layout)
}?;
self.chunk.set(new_chunk);
if let Some(ptr) = allocate(new_chunk, layout) {
Ok(ptr)
} else {
core::hint::unreachable_unchecked()
}
}
bump_common_methods!(true);
#[inline(always)]
pub fn as_scope(&self) -> &Self {
self
}
#[inline(always)]
pub fn as_mut_scope(&mut self) -> &mut Self {
self
}
#[doc = doc_align_cant_decrease!()]
#[inline(always)]
pub fn into_aligned<const NEW_MIN_ALIGN: usize>(self) -> BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
self.must_align_more::<NEW_MIN_ALIGN>();
unsafe { self.cast_align() }
}
#[doc = doc_align_cant_decrease!()]
#[inline(always)]
pub fn as_aligned_mut<const NEW_MIN_ALIGN: usize>(
&mut self,
) -> &mut BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
self.must_align_more::<NEW_MIN_ALIGN>();
unsafe { self.cast_align_mut() }
}
#[inline(always)]
pub(crate) unsafe fn cast_align<const NEW_MIN_ALIGN: usize>(
self,
) -> BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
BumpScope {
chunk: self.chunk,
marker: PhantomData,
}
}
#[inline(always)]
pub(crate) unsafe fn cast_align_mut<const NEW_MIN_ALIGN: usize>(
&mut self,
) -> &mut BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
{
&mut *pointer::from_mut(self).cast::<BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>>()
}
#[cfg(not(no_global_oom_handling))]
pub fn into_guaranteed_allocated(self) -> BumpScope<'a, A, MIN_ALIGN, UP> {
infallible(self.generic_into_guaranteed_allocated())
}
pub fn try_into_guaranteed_allocated(self) -> Result<BumpScope<'a, A, MIN_ALIGN, UP>, AllocError> {
self.generic_into_guaranteed_allocated()
}
fn generic_into_guaranteed_allocated<E: ErrorBehavior>(self) -> Result<BumpScope<'a, A, MIN_ALIGN, UP>, E> {
self.as_scope().ensure_allocated()?;
Ok(unsafe { self.cast_allocated() })
}
#[cfg(not(no_global_oom_handling))]
pub fn as_guaranteed_allocated(&self) -> &BumpScope<'a, A, MIN_ALIGN, UP> {
infallible(self.generic_as_guaranteed_allocated())
}
pub fn try_as_guaranteed_allocated(&self) -> Result<&BumpScope<'a, A, MIN_ALIGN, UP>, AllocError> {
self.generic_as_guaranteed_allocated()
}
fn generic_as_guaranteed_allocated<E: ErrorBehavior>(&self) -> Result<&BumpScope<'a, A, MIN_ALIGN, UP>, E> {
self.as_scope().ensure_allocated()?;
Ok(unsafe { self.cast_allocated_ref() })
}
#[cfg(not(no_global_oom_handling))]
pub fn as_guaranteed_allocated_mut(&mut self) -> &mut BumpScope<'a, A, MIN_ALIGN, UP> {
infallible(self.generic_as_guaranteed_allocated_mut())
}
pub fn try_as_guaranteed_allocated_mut(&mut self) -> Result<&mut BumpScope<'a, A, MIN_ALIGN, UP>, AllocError> {
self.generic_as_guaranteed_allocated_mut()
}
fn generic_as_guaranteed_allocated_mut<E: ErrorBehavior>(&mut self) -> Result<&mut BumpScope<'a, A, MIN_ALIGN, UP>, E> {
self.as_scope().ensure_allocated()?;
Ok(unsafe { self.cast_allocated_mut() })
}
#[inline(always)]
pub(crate) unsafe fn cast_allocated(self) -> BumpScope<'a, A, MIN_ALIGN, UP> {
BumpScope {
chunk: self.chunk,
marker: PhantomData,
}
}
#[inline(always)]
pub(crate) unsafe fn cast_allocated_ref(&self) -> &BumpScope<'a, A, MIN_ALIGN, UP> {
&*pointer::from_ref(self).cast::<BumpScope<'a, A, MIN_ALIGN, UP>>()
}
#[inline(always)]
pub(crate) unsafe fn cast_allocated_mut(&mut self) -> &mut BumpScope<'a, A, MIN_ALIGN, UP> {
&mut *pointer::from_mut(self).cast::<BumpScope<'a, A, MIN_ALIGN, UP>>()
}
#[inline(always)]
pub(crate) unsafe fn clone_unchecked(&self) -> BumpScope<'a, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED> {
BumpScope::new_unchecked(self.chunk.get())
}
#[inline]
#[must_use]
pub fn into_raw(self) -> NonNull<()> {
let this = ManuallyDrop::new(self);
this.chunk.get().header_ptr().cast()
}
#[inline]
#[must_use]
pub unsafe fn from_raw(ptr: NonNull<()>) -> Self {
let chunk = Cell::new(RawChunk::from_header(ptr.cast()));
Self {
chunk,
marker: PhantomData,
}
}
}
impl<A, const MIN_ALIGN: usize, const UP: bool> NoDrop for BumpScope<'_, A, MIN_ALIGN, UP> {}
impl<'a, A, const MIN_ALIGN: usize, const UP: bool, const GUARANTEED_ALLOCATED: bool>
BumpScope<'a, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: BaseAllocator<GUARANTEED_ALLOCATED>,
{
#[inline(always)]
pub(crate) fn generic_alloc<B: ErrorBehavior, T>(&self, value: T) -> Result<BumpBox<'a, T>, B> {
self.generic_alloc_with(|| value)
}
#[inline(always)]
pub(crate) fn generic_alloc_with<B: ErrorBehavior, T>(&self, f: impl FnOnce() -> T) -> Result<BumpBox<'a, T>, B> {
if T::IS_ZST {
let value = f();
return Ok(BumpBox::zst(value));
}
let chunk = self.chunk.get();
let props = chunk.bump_props(MinimumAlignment::<MIN_ALIGN>, crate::layout::SizedLayout::new::<T>());
unsafe {
let ptr = if UP {
if let Some(BumpUp { new_pos, ptr }) = bump_up(props) {
chunk.set_pos_addr(new_pos);
chunk.with_addr(ptr)
} else {
self.do_alloc_sized_in_another_chunk::<B, T>()?
}
} else {
if let Some(addr) = bump_down(props) {
chunk.set_pos_addr(addr);
chunk.with_addr(addr)
} else {
self.do_alloc_sized_in_another_chunk::<B, T>()?
}
};
let ptr = ptr.cast::<T>();
nonnull::write_with(ptr, f);
Ok(BumpBox::from_raw(ptr))
}
}
#[inline(always)]
pub(crate) fn generic_alloc_default<B: ErrorBehavior, T: Default>(&self) -> Result<BumpBox<'a, T>, B> {
self.generic_alloc_with(Default::default)
}
#[inline(always)]
pub(crate) fn generic_alloc_slice_copy<B: ErrorBehavior, T: Copy>(&self, slice: &[T]) -> Result<BumpBox<'a, [T]>, B> {
if T::IS_ZST {
return Ok(BumpBox::zst_slice_clone(slice));
}
let len = slice.len();
let src = slice.as_ptr();
let dst = self.do_alloc_slice_for(slice)?;
unsafe {
core::ptr::copy_nonoverlapping(src, dst.as_ptr(), len);
Ok(BumpBox::from_raw(nonnull::slice_from_raw_parts(dst, len)))
}
}
#[inline(always)]
pub(crate) fn generic_alloc_slice_clone<B: ErrorBehavior, T: Clone>(&self, slice: &[T]) -> Result<BumpBox<'a, [T]>, B> {
if T::IS_ZST {
return Ok(BumpBox::zst_slice_clone(slice));
}
Ok(self.generic_alloc_uninit_slice_for(slice)?.init_clone(slice))
}
#[inline(always)]
pub(crate) fn generic_alloc_slice_fill<B: ErrorBehavior, T: Clone>(
&self,
len: usize,
value: T,
) -> Result<BumpBox<'a, [T]>, B> {
if T::IS_ZST {
return Ok(BumpBox::zst_slice_fill(len, value));
}
Ok(self.generic_alloc_uninit_slice(len)?.init_fill(value))
}
#[inline(always)]
pub(crate) fn generic_alloc_slice_fill_with<B: ErrorBehavior, T>(
&self,
len: usize,
f: impl FnMut() -> T,
) -> Result<BumpBox<'a, [T]>, B> {
if T::IS_ZST {
return Ok(BumpBox::zst_slice_fill_with(len, f));
}
Ok(self.generic_alloc_uninit_slice(len)?.init_fill_with(f))
}
#[inline(always)]
pub(crate) fn generic_alloc_str<B: ErrorBehavior>(&self, src: &str) -> Result<BumpBox<'a, str>, B> {
let slice = self.generic_alloc_slice_copy(src.as_bytes())?;
Ok(unsafe { slice.into_boxed_str_unchecked() })
}
#[inline(always)]
pub(crate) fn generic_alloc_fmt<B: ErrorBehavior>(&self, args: fmt::Arguments) -> Result<BumpBox<'a, str>, B> {
if let Some(string) = args.as_str() {
return self.generic_alloc_str(string);
}
let mut string = BumpString::new_in(self);
let string = if B::IS_FALLIBLE {
if fmt::Write::write_fmt(&mut string, args).is_err() {
return Err(B::format_trait_error());
}
string
} else {
#[cfg(not(no_global_oom_handling))]
{
let mut string = Infallibly(string);
if fmt::Write::write_fmt(&mut string, args).is_err() {
return Err(B::format_trait_error());
}
string.0
}
#[cfg(no_global_oom_handling)]
{
unreachable!()
}
};
Ok(string.into_boxed_str())
}
#[inline(always)]
pub(crate) fn generic_alloc_fmt_mut<B: ErrorBehavior>(&mut self, args: fmt::Arguments) -> Result<BumpBox<'a, str>, B> {
if let Some(string) = args.as_str() {
return self.generic_alloc_str(string);
}
let mut string = MutBumpString::generic_with_capacity_in(0, self)?;
let string = if B::IS_FALLIBLE {
if fmt::Write::write_fmt(&mut string, args).is_err() {
return Err(B::format_trait_error());
}
string
} else {
#[cfg(not(no_global_oom_handling))]
{
let mut string = Infallibly(string);
if fmt::Write::write_fmt(&mut string, args).is_err() {
return Err(B::format_trait_error());
}
string.0
}
#[cfg(no_global_oom_handling)]
{
unreachable!()
}
};
Ok(string.into_boxed_str())
}
#[inline(always)]
pub(crate) fn generic_alloc_iter<B: ErrorBehavior, T>(
&self,
iter: impl IntoIterator<Item = T>,
) -> Result<BumpBox<'a, [T]>, B> {
let iter = iter.into_iter();
let capacity = iter.size_hint().0;
let mut vec = BumpVec::<T, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>::generic_with_capacity_in(capacity, self)?;
for value in iter {
vec.generic_push(value)?;
}
Ok(vec.into_boxed_slice())
}
#[inline(always)]
pub(crate) fn generic_alloc_iter_exact<B: ErrorBehavior, T, I>(
&self,
iter: impl IntoIterator<Item = T, IntoIter = I>,
) -> Result<BumpBox<'a, [T]>, B>
where
I: ExactSizeIterator<Item = T>,
{
let mut iter = iter.into_iter();
let len = iter.len();
let uninit = self.generic_alloc_uninit_slice(len)?;
let mut initializer = uninit.initializer();
while !initializer.is_full() {
let value = match iter.next() {
Some(value) => value,
None => exact_size_iterator_bad_len(),
};
initializer.push(value);
}
Ok(initializer.into_init())
}
#[inline(always)]
pub(crate) fn generic_alloc_iter_mut<B: ErrorBehavior, T>(
&mut self,
iter: impl IntoIterator<Item = T>,
) -> Result<BumpBox<'a, [T]>, B> {
let iter = iter.into_iter();
let capacity = iter.size_hint().0;
let mut vec = MutBumpVec::<T, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>::generic_with_capacity_in(capacity, self)?;
for value in iter {
vec.generic_push(value)?;
}
Ok(vec.into_boxed_slice())
}
#[inline(always)]
pub(crate) fn generic_alloc_iter_mut_rev<B: ErrorBehavior, T>(
&mut self,
iter: impl IntoIterator<Item = T>,
) -> Result<BumpBox<'a, [T]>, B> {
let iter = iter.into_iter();
let capacity = iter.size_hint().0;
let mut vec = MutBumpVecRev::<T, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>::generic_with_capacity_in(capacity, self)?;
for value in iter {
vec.generic_push(value)?;
}
Ok(vec.into_boxed_slice())
}
#[inline(always)]
pub(crate) fn generic_alloc_uninit<B: ErrorBehavior, T>(&self) -> Result<BumpBox<'a, MaybeUninit<T>>, B> {
if T::IS_ZST {
return Ok(BumpBox::zst(MaybeUninit::uninit()));
}
let ptr = self.do_alloc_sized::<B, T>()?.cast::<MaybeUninit<T>>();
unsafe { Ok(BumpBox::from_raw(ptr)) }
}
#[inline(always)]
pub(crate) fn generic_alloc_uninit_slice<B: ErrorBehavior, T>(
&self,
len: usize,
) -> Result<BumpBox<'a, [MaybeUninit<T>]>, B> {
if T::IS_ZST {
return Ok(BumpBox::uninit_zst_slice(len));
}
let ptr = self.do_alloc_slice::<B, T>(len)?.cast::<MaybeUninit<T>>();
unsafe {
let ptr = nonnull::slice_from_raw_parts(ptr, len);
Ok(BumpBox::from_raw(ptr))
}
}
#[inline(always)]
pub(crate) fn generic_alloc_uninit_slice_for<B: ErrorBehavior, T>(
&self,
slice: &[T],
) -> Result<BumpBox<'a, [MaybeUninit<T>]>, B> {
if T::IS_ZST {
return Ok(BumpBox::uninit_zst_slice(slice.len()));
}
let ptr = self.do_alloc_slice_for::<B, T>(slice)?.cast::<MaybeUninit<T>>();
unsafe {
let ptr = nonnull::slice_from_raw_parts(ptr, slice.len());
Ok(BumpBox::from_raw(ptr))
}
}
#[inline(always)]
pub(crate) fn generic_alloc_fixed_vec<B: ErrorBehavior, T>(&self, capacity: usize) -> Result<FixedBumpVec<'a, T>, B> {
Ok(FixedBumpVec::from_uninit(self.generic_alloc_uninit_slice(capacity)?))
}
#[inline(always)]
pub(crate) fn generic_alloc_fixed_string<B: ErrorBehavior>(&self, capacity: usize) -> Result<FixedBumpString<'a>, B> {
Ok(FixedBumpString::from_uninit(self.generic_alloc_uninit_slice(capacity)?))
}
#[inline(always)]
pub(crate) fn generic_alloc_layout<B: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, B> {
match self.chunk.get().alloc(MinimumAlignment::<MIN_ALIGN>, CustomLayout(layout)) {
Some(ptr) => Ok(ptr),
None => self.alloc_in_another_chunk(layout),
}
}
#[inline(always)]
pub(crate) fn generic_reserve_bytes<B: ErrorBehavior>(&self, additional: usize) -> Result<(), B> {
let layout = match Layout::from_size_align(additional, 1) {
Ok(ok) => ok,
Err(_) => return Err(B::capacity_overflow()),
};
if self.is_unallocated() {
let allocator = A::default_or_panic();
let new_chunk = RawChunk::new_in(
ChunkSize::for_capacity(layout).ok_or_else(B::capacity_overflow)?,
None,
allocator,
)?;
self.chunk.set(new_chunk);
return Ok(());
}
let mut additional = additional;
let mut chunk = self.chunk.get();
loop {
if let Some(rest) = additional.checked_sub(chunk.remaining()) {
additional = rest;
} else {
return Ok(());
}
if let Some(next) = chunk.next() {
chunk = next;
} else {
break;
}
}
chunk.append_for(layout).map(drop)
}
}
impl<'a, A, const MIN_ALIGN: usize, const UP: bool> BumpScope<'a, A, MIN_ALIGN, UP>
where
MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
A: BaseAllocator,
{
#[inline(always)]
pub(crate) fn generic_alloc_try_with<B: ErrorBehavior, T, E>(
&self,
f: impl FnOnce() -> Result<T, E>,
) -> Result<Result<BumpBox<'a, T>, E>, B> {
if T::IS_ZST {
return match f() {
Ok(value) => Ok(Ok(BumpBox::zst(value))),
Err(error) => Ok(Err(error)),
};
}
let checkpoint_before_alloc = self.checkpoint();
let uninit = self.generic_alloc_uninit::<B, Result<T, E>>()?;
let ptr = BumpBox::into_raw(uninit).cast::<Result<T, E>>();
let pos = if UP { self.chunk.get().pos() } else { ptr.cast() };
Ok(unsafe {
nonnull::write_with(ptr, f);
let can_shrink = pos == self.chunk.get().pos();
match nonnull::result(ptr) {
Ok(value) => Ok({
if can_shrink {
let new_pos = if UP {
let pos = nonnull::addr(nonnull::add(value, 1)).get();
up_align_usize_unchecked(pos, MIN_ALIGN)
} else {
let pos = nonnull::addr(value).get();
down_align_usize(pos, MIN_ALIGN)
};
self.chunk.get().set_pos_addr(new_pos);
}
BumpBox::from_raw(value)
}),
Err(error) => Err({
let error = error.as_ptr().read();
if can_shrink {
self.reset_to(checkpoint_before_alloc);
}
error
}),
}
})
}
#[inline(always)]
pub(crate) fn generic_alloc_try_with_mut<B: ErrorBehavior, T, E>(
&mut self,
f: impl FnOnce() -> Result<T, E>,
) -> Result<Result<BumpBox<'a, T>, E>, B> {
if T::IS_ZST {
return match f() {
Ok(value) => Ok(Ok(BumpBox::zst(value))),
Err(error) => Ok(Err(error)),
};
}
let checkpoint = self.checkpoint();
let ptr = self.do_reserve_sized::<B, Result<T, E>>()?;
Ok(unsafe {
nonnull::write_with(ptr, f);
match nonnull::result(ptr) {
Ok(value) => Ok({
let new_pos = if UP {
let pos = nonnull::addr(nonnull::add(value, 1)).get();
up_align_usize_unchecked(pos, MIN_ALIGN)
} else {
let pos = nonnull::addr(value).get();
down_align_usize(pos, MIN_ALIGN)
};
self.chunk.get().set_pos_addr(new_pos);
BumpBox::from_raw(value)
}),
Err(error) => Err({
let error = error.as_ptr().read();
self.reset_to(checkpoint);
error
}),
}
})
}
}