use core::{
alloc::Layout,
cell::Cell,
marker::PhantomData,
num::NonZeroUsize,
ops::{Deref, Range},
ptr::{self, NonNull},
};
use crate::{
BaseAllocator, Checkpoint, SizedTypeProperties, align_pos,
alloc::{AllocError, Allocator},
bumping::{BumpProps, BumpUp, MIN_CHUNK_ALIGN, bump_down, bump_prepare_down, bump_prepare_up, bump_up},
chunk::{ChunkHeader, ChunkSize, ChunkSizeHint},
error_behavior::{self, ErrorBehavior},
layout::{ArrayLayout, CustomLayout, LayoutProps, SizedLayout},
polyfill::non_null,
settings::{BumpAllocatorSettings, False, MinimumAlignment, SupportedMinimumAlignment},
stats::Stats,
};
pub(crate) struct RawBump<A, S> {
pub(crate) chunk: Cell<RawChunk<A, S>>,
}
impl<A, S> Clone for RawBump<A, S> {
fn clone(&self) -> Self {
Self {
chunk: self.chunk.clone(),
}
}
}
impl<A, S> RawBump<A, S>
where
S: BumpAllocatorSettings,
{
#[inline(always)]
pub(crate) const fn new() -> Self
where
S: BumpAllocatorSettings<GuaranteedAllocated = False>,
{
Self {
chunk: Cell::new(RawChunk::UNALLOCATED),
}
}
#[inline(always)]
pub(crate) fn is_claimed(&self) -> bool {
self.chunk.get().is_claimed()
}
#[inline(always)]
pub(crate) fn allocator<'a>(&self) -> Option<&'a A> {
match self.chunk.get().classify() {
ChunkClass::Claimed | ChunkClass::Unallocated => None,
ChunkClass::NonDummy(chunk) => Some(chunk.allocator()),
}
}
}
impl<A, S> RawBump<A, S>
where
A: Allocator,
S: BumpAllocatorSettings,
{
#[inline(always)]
pub(crate) fn with_size<E: ErrorBehavior>(size: ChunkSize<A, S>, allocator: A) -> Result<Self, E> {
Ok(Self {
chunk: Cell::new(NonDummyChunk::new::<E>(size, None, allocator)?.raw),
})
}
#[inline(always)]
pub(crate) fn reset(&self) {
let Some(mut chunk) = self.chunk.get().as_non_dummy() else {
return;
};
unsafe {
chunk.for_each_prev(|chunk| chunk.deallocate());
while let Some(next) = chunk.next() {
chunk.deallocate();
chunk = next;
}
chunk.header.as_ref().prev.set(None);
}
chunk.reset();
self.chunk.set(chunk.raw);
}
#[inline]
pub(crate) fn reset_to_start(&self) {
if let Some(mut chunk) = self.chunk.get().as_non_dummy() {
while let Some(prev) = chunk.prev() {
chunk = prev;
}
chunk.reset();
self.chunk.set(chunk.raw);
}
}
pub(crate) unsafe fn manually_drop(&mut self) {
match self.chunk.get().classify() {
ChunkClass::Claimed => {
}
ChunkClass::Unallocated => (),
ChunkClass::NonDummy(chunk) => unsafe {
chunk.for_each_prev(|chunk| chunk.deallocate());
chunk.for_each_next(|chunk| chunk.deallocate());
chunk.deallocate();
},
}
}
}
impl<A, S> RawBump<A, S>
where
A: BaseAllocator<S::GuaranteedAllocated>,
S: BumpAllocatorSettings,
{
#[inline(always)]
pub(crate) fn claim(&self) -> RawBump<A, S> {
const {
assert!(S::CLAIMABLE, "`claim` is only available with the setting `CLAIMABLE = true`");
}
#[cold]
#[inline(never)]
fn already_claimed() {
panic!("bump allocator is already claimed");
}
if self.chunk.get().is_claimed() {
already_claimed();
}
RawBump {
chunk: Cell::new(self.chunk.replace(RawChunk::<A, S>::CLAIMED)),
}
}
#[inline(always)]
pub(crate) fn reclaim(&self, claimant: &RawBump<A, S>) {
self.chunk.set(claimant.chunk.get());
}
#[inline(always)]
pub(crate) fn checkpoint(&self) -> Checkpoint {
Checkpoint::new(self.chunk.get())
}
#[inline]
pub(crate) unsafe fn reset_to(&self, checkpoint: Checkpoint) {
if !S::GUARANTEED_ALLOCATED && checkpoint.chunk == ChunkHeader::unallocated::<S>() {
self.reset_to_start();
return;
}
#[cfg(debug_assertions)]
{
assert_ne!(
checkpoint.chunk,
ChunkHeader::claimed::<S>(),
"the checkpoint must not have been created by a claimed bump allocator"
);
assert_ne!(
self.chunk.get().header.cast(),
ChunkHeader::claimed::<S>(),
"this function must not be called on a claimed bump allocator"
);
assert_ne!(
checkpoint.chunk,
ChunkHeader::unallocated::<S>(),
"the checkpoint must not have been created by a `!GUARANTEED_ALLOCATED` when self is `GUARANTEED_ALLOCATED`"
);
let chunk = self
.stats()
.small_to_big()
.find(|chunk| chunk.header() == checkpoint.chunk.cast())
.expect("this checkpoint does not refer to any chunk in this bump allocator");
assert!(
chunk.chunk.contains_addr_or_end(checkpoint.address.get()),
"checkpoint address does not point within its chunk"
);
}
unsafe {
checkpoint.reset_within_chunk();
self.chunk.set(RawChunk {
header: checkpoint.chunk.cast(),
marker: PhantomData,
});
}
}
#[inline(always)]
pub(crate) fn reserve<E: ErrorBehavior>(&self, additional: usize) -> Result<(), E>
where
A: BaseAllocator<S::GuaranteedAllocated>,
{
let chunk = self.chunk.get();
match chunk.classify() {
ChunkClass::Claimed => Err(E::claimed()),
ChunkClass::Unallocated => {
let Ok(layout) = Layout::from_size_align(additional, 1) else {
return Err(E::capacity_overflow());
};
let new_chunk = NonDummyChunk::<A, S>::new(
ChunkSize::<A, S>::from_capacity(layout).ok_or_else(E::capacity_overflow)?,
None,
A::default_or_panic(),
)?;
self.chunk.set(new_chunk.raw);
Ok(())
}
ChunkClass::NonDummy(mut chunk) => {
let mut additional = additional;
if let Some(rest) = additional.checked_sub(chunk.remaining()) {
additional = rest;
} else {
return Ok(());
}
while let Some(next) = chunk.next() {
chunk = next;
if let Some(rest) = additional.checked_sub(chunk.capacity()) {
additional = rest;
} else {
return Ok(());
}
}
if additional == 0 {
return Ok(());
}
let Ok(layout) = Layout::from_size_align(additional, 1) else {
return Err(E::capacity_overflow());
};
chunk.append_for(layout).map(drop)
}
}
}
#[inline(always)]
pub(crate) fn alloc<B: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, B> {
match self.chunk.get().alloc(CustomLayout(layout)) {
Some(ptr) => Ok(ptr),
None => self.alloc_in_another_chunk(layout),
}
}
#[inline(always)]
pub(crate) fn alloc_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
match self.chunk.get().alloc(SizedLayout::new::<T>()) {
Some(ptr) => Ok(ptr.cast()),
None => match self.alloc_sized_in_another_chunk::<E, T>() {
Ok(ptr) => Ok(ptr.cast()),
Err(err) => Err(err),
},
}
}
#[inline(always)]
pub(crate) fn alloc_slice<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<T>, E> {
let Ok(layout) = ArrayLayout::array::<T>(len) else {
return Err(E::capacity_overflow());
};
match self.chunk.get().alloc(layout) {
Some(ptr) => Ok(ptr.cast()),
None => match self.alloc_slice_in_another_chunk::<E, T>(len) {
Ok(ptr) => Ok(ptr.cast()),
Err(err) => Err(err),
},
}
}
#[inline(always)]
pub(crate) fn alloc_slice_for<E: ErrorBehavior, T>(&self, value: &[T]) -> Result<NonNull<T>, E> {
let layout = ArrayLayout::for_value(value);
match self.chunk.get().alloc(layout) {
Some(ptr) => Ok(ptr.cast()),
None => match self.alloc_slice_in_another_chunk::<E, T>(value.len()) {
Ok(ptr) => Ok(ptr.cast()),
Err(err) => Err(err),
},
}
}
#[inline(always)]
pub(crate) fn prepare_sized_allocation<B: ErrorBehavior, T>(&self) -> Result<NonNull<T>, B> {
match self.chunk.get().prepare_allocation(SizedLayout::new::<T>()) {
Some(ptr) => Ok(ptr.cast()),
None => match self.prepare_allocation_in_another_chunk::<B, T>() {
Ok(ptr) => Ok(ptr.cast()),
Err(err) => Err(err),
},
}
}
#[inline(always)]
pub(crate) fn prepare_slice_allocation<B: ErrorBehavior, T>(&self, min_cap: usize) -> Result<NonNull<[T]>, B> {
let range = self.prepare_allocation_range::<B, T>(min_cap)?;
let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
let ptr = if S::UP { range.start } else { unsafe { range.end.sub(cap) } };
Ok(NonNull::slice_from_raw_parts(ptr, cap))
}
#[inline(always)]
pub(crate) fn prepare_slice_allocation_rev<B: ErrorBehavior, T>(
&self,
min_cap: usize,
) -> Result<(NonNull<T>, usize), B> {
let range = self.prepare_allocation_range::<B, T>(min_cap)?;
let cap = unsafe { non_null::byte_offset_from_unsigned(range.end, range.start) } / T::SIZE;
let end = if S::UP { unsafe { range.start.add(cap) } } else { range.end };
Ok((end, cap))
}
#[inline(always)]
fn prepare_allocation_range<B: ErrorBehavior, T>(&self, cap: usize) -> Result<Range<NonNull<T>>, B> {
let Ok(layout) = ArrayLayout::array::<T>(cap) else {
return Err(B::capacity_overflow());
};
let range = match self.chunk.get().prepare_allocation_range(layout) {
Some(ptr) => ptr,
None => self.prepare_allocation_range_in_another_chunk(layout)?,
};
Ok(range.start.cast::<T>()..range.end.cast::<T>())
}
#[cold]
#[inline(never)]
pub(crate) fn alloc_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
unsafe { self.in_another_chunk(CustomLayout(layout), RawChunk::alloc) }
}
#[cold]
#[inline(never)]
fn alloc_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
self.alloc_in_another_chunk(Layout::new::<T>())
}
#[cold]
#[inline(never)]
fn alloc_slice_in_another_chunk<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<u8>, E> {
let Ok(layout) = Layout::array::<T>(len) else {
return Err(E::capacity_overflow());
};
self.alloc_in_another_chunk(layout)
}
#[cold]
#[inline(never)]
pub(crate) fn prepare_allocation_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E> {
let layout = CustomLayout(Layout::new::<T>());
unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation) }
}
#[cold]
#[inline(never)]
fn prepare_allocation_range_in_another_chunk<E: ErrorBehavior>(
&self,
layout: ArrayLayout,
) -> Result<Range<NonNull<u8>>, E> {
unsafe { self.in_another_chunk(layout, RawChunk::prepare_allocation_range) }
}
#[inline(always)]
pub(crate) unsafe fn in_another_chunk<E: ErrorBehavior, R, L: LayoutProps>(
&self,
layout: L,
mut f: impl FnMut(RawChunk<A, S>, L) -> Option<R>,
) -> Result<R, E> {
let new_chunk: NonDummyChunk<A, S> = match self.chunk.get().classify() {
ChunkClass::Claimed => Err(E::claimed()),
ChunkClass::Unallocated => NonDummyChunk::new(
ChunkSize::from_capacity(*layout).ok_or_else(E::capacity_overflow)?,
None,
A::default_or_panic(),
),
ChunkClass::NonDummy(mut chunk) => {
while let Some(next_chunk) = chunk.next() {
chunk = next_chunk;
chunk.reset();
self.chunk.set(chunk.raw);
if let Some(ptr) = f(chunk.raw, layout) {
return Ok(ptr);
}
}
chunk.append_for(*layout)
}
}?;
self.chunk.set(new_chunk.raw);
match f(new_chunk.raw, layout) {
Some(ptr) => Ok(ptr),
_ => {
unsafe { core::hint::unreachable_unchecked() }
}
}
}
pub(crate) fn make_allocated<E: ErrorBehavior>(&self) -> Result<(), E> {
match self.chunk.get().classify() {
ChunkClass::Claimed => Err(E::claimed()),
ChunkClass::Unallocated => {
let new_chunk = NonDummyChunk::new(ChunkSize::MINIMUM, None, A::default_or_panic())?;
self.chunk.set(new_chunk.raw);
Ok(())
}
ChunkClass::NonDummy(_) => Ok(()),
}
}
}
impl<A, S> RawBump<A, S>
where
S: BumpAllocatorSettings,
{
#[must_use]
#[inline(always)]
pub fn stats<'a>(&self) -> Stats<'a, A, S> {
Stats::from_raw_chunk(self.chunk.get())
}
#[inline(always)]
pub(crate) fn align<const ALIGN: usize>(&self)
where
MinimumAlignment<ALIGN>: SupportedMinimumAlignment,
{
self.align_to::<MinimumAlignment<ALIGN>>();
}
#[inline(always)]
pub(crate) fn align_to<MinimumAlignment>(&self)
where
MinimumAlignment: SupportedMinimumAlignment,
{
if MinimumAlignment::VALUE > S::MIN_ALIGN {
if let Some(chunk) = self.chunk.get().as_non_dummy() {
let pos = chunk.pos().addr().get();
let addr = align_pos(S::UP, MinimumAlignment::VALUE, pos);
unsafe { chunk.set_pos_addr(addr) };
}
}
}
pub(crate) fn ensure_satisfies_settings<NewS>(&self)
where
NewS: BumpAllocatorSettings,
{
const {
assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
}
if !NewS::CLAIMABLE && self.chunk.get().is_claimed() {
error_behavior::panic::claimed();
}
if NewS::GUARANTEED_ALLOCATED && self.chunk.get().is_unallocated() {
error_behavior::panic::unallocated();
}
self.align_to::<NewS::MinimumAlignment>();
}
pub(crate) fn ensure_scope_satisfies_settings<NewS>(&self)
where
NewS: BumpAllocatorSettings,
{
const {
assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
assert!(
NewS::MIN_ALIGN >= S::MIN_ALIGN,
"can't decrease minimum alignment using `BumpScope::with_settings`"
);
}
if !NewS::CLAIMABLE && self.chunk.get().is_claimed() {
error_behavior::panic::claimed();
}
self.align_to::<NewS::MinimumAlignment>();
}
#[expect(clippy::unused_self)]
pub(crate) fn ensure_satisfies_settings_for_borrow<NewS>(&self)
where
NewS: BumpAllocatorSettings,
{
const {
assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
assert!(
NewS::MIN_ALIGN == S::MIN_ALIGN,
"can't change minimum alignment using `Bump(Scope)::borrow_with_settings`"
);
assert!(
NewS::CLAIMABLE == S::CLAIMABLE,
"can't change claimable property using `Bump(Scope)::borrow_with_settings`"
);
assert!(
NewS::GUARANTEED_ALLOCATED <= S::GUARANTEED_ALLOCATED,
"can't increase guaranteed-allocated property using `Bump(Scope)::borrow_with_settings`"
);
}
}
pub(crate) fn ensure_satisfies_settings_for_borrow_mut<NewS>(&self)
where
NewS: BumpAllocatorSettings,
{
const {
assert!(NewS::UP == S::UP, "can't change `UP` setting of `Bump(Scope)`");
assert!(
NewS::MIN_ALIGN >= S::MIN_ALIGN,
"can't decrease minimum alignment using `Bump(Scope)::borrow_mut_with_settings`"
);
assert!(
NewS::CLAIMABLE == S::CLAIMABLE,
"can't change claimable property using `Bump(Scope)::borrow_mut_with_settings`"
);
assert!(
NewS::GUARANTEED_ALLOCATED == S::GUARANTEED_ALLOCATED,
"can't change guaranteed-allocated property using `Bump(Scope)::borrow_mut_with_settings`"
);
}
self.align_to::<NewS::MinimumAlignment>();
}
#[inline]
pub(crate) fn into_raw(self) -> NonNull<()> {
self.chunk.get().header.cast()
}
#[inline]
pub(crate) unsafe fn from_raw(ptr: NonNull<()>) -> Self {
Self {
chunk: Cell::new(RawChunk {
header: ptr.cast(),
marker: PhantomData,
}),
}
}
}
pub(crate) struct RawChunk<A, S> {
pub(crate) header: NonNull<ChunkHeader<A>>,
pub(crate) marker: PhantomData<fn() -> (A, S)>,
}
impl<A, S> Clone for RawChunk<A, S> {
fn clone(&self) -> Self {
*self
}
}
impl<A, S> Copy for RawChunk<A, S> {}
pub(crate) struct NonDummyChunk<A, S> {
raw: RawChunk<A, S>,
}
impl<A, S> Copy for NonDummyChunk<A, S> {}
impl<A, S> Clone for NonDummyChunk<A, S> {
fn clone(&self) -> Self {
*self
}
}
impl<A, S> Deref for NonDummyChunk<A, S> {
type Target = RawChunk<A, S>;
fn deref(&self) -> &Self::Target {
&self.raw
}
}
impl<A, S> RawChunk<A, S>
where
S: BumpAllocatorSettings,
{
pub(crate) const UNALLOCATED: Self = {
assert!(!S::GUARANTEED_ALLOCATED);
Self {
header: ChunkHeader::unallocated::<S>().cast(),
marker: PhantomData,
}
};
const CLAIMED: Self = {
assert!(S::CLAIMABLE);
Self {
header: ChunkHeader::claimed::<S>().cast(),
marker: PhantomData,
}
};
#[inline(always)]
pub(crate) fn header(self) -> NonNull<ChunkHeader<A>> {
self.header
}
#[inline(always)]
fn is_claimed(self) -> bool {
S::CLAIMABLE && self.header.cast() == ChunkHeader::claimed::<S>()
}
#[inline(always)]
pub(crate) fn is_unallocated(self) -> bool {
!S::GUARANTEED_ALLOCATED && self.header.cast() == ChunkHeader::unallocated::<S>()
}
#[inline(always)]
pub(crate) fn classify(self) -> ChunkClass<A, S> {
if self.is_claimed() {
return ChunkClass::Claimed;
}
if self.is_unallocated() {
return ChunkClass::Unallocated;
}
ChunkClass::NonDummy(NonDummyChunk { raw: self })
}
#[inline(always)]
pub(crate) fn as_non_dummy(self) -> Option<NonDummyChunk<A, S>> {
match self.classify() {
ChunkClass::Claimed | ChunkClass::Unallocated => None,
ChunkClass::NonDummy(chunk) => Some(chunk),
}
}
#[inline(always)]
pub(crate) fn alloc(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
let props = self.bump_props(layout);
if S::UP {
let BumpUp { new_pos, ptr } = bump_up(props)?;
unsafe {
let chunk = self.as_non_dummy_unchecked();
chunk.set_pos_addr(new_pos);
Some(chunk.content_ptr_from_addr(ptr))
}
} else {
let ptr = bump_down(props)?;
unsafe {
let chunk = self.as_non_dummy_unchecked();
chunk.set_pos_addr(ptr);
Some(chunk.content_ptr_from_addr(ptr))
}
}
}
#[inline(always)]
pub(crate) fn prepare_allocation(self, layout: impl LayoutProps) -> Option<NonNull<u8>> {
let props = self.bump_props(layout);
let ptr = if S::UP { bump_up(props)?.ptr } else { bump_down(props)? };
unsafe {
let chunk = self.as_non_dummy_unchecked();
Some(chunk.content_ptr_from_addr(ptr))
}
}
#[inline(always)]
pub(crate) fn prepare_allocation_range(self, layout: impl LayoutProps) -> Option<Range<NonNull<u8>>> {
let props = self.bump_props(layout);
let range = if S::UP {
bump_prepare_up(props)
} else {
bump_prepare_down(props)
}?;
unsafe {
let chunk = self.as_non_dummy_unchecked();
Some(chunk.content_ptr_from_addr_range(range))
}
}
#[inline(always)]
fn bump_props<L>(self, layout: L) -> BumpProps
where
L: LayoutProps,
{
let pos = self.pos().addr().get();
let end = unsafe { self.header.as_ref() }.end.addr().get();
let start = if S::UP { pos } else { end };
let end = if S::UP { end } else { pos };
#[cfg(debug_assertions)]
if !matches!(self.classify(), ChunkClass::NonDummy(_)) {
assert!(start > end);
}
BumpProps {
start,
end,
layout: *layout,
min_align: S::MIN_ALIGN,
align_is_const: L::ALIGN_IS_CONST,
size_is_const: L::SIZE_IS_CONST,
size_is_multiple_of_align: L::SIZE_IS_MULTIPLE_OF_ALIGN,
}
}
#[inline(always)]
pub(crate) fn pos(self) -> NonNull<u8> {
unsafe { self.header.as_ref().pos.get() }
}
#[inline(always)]
pub(crate) unsafe fn as_non_dummy_unchecked(self) -> NonDummyChunk<A, S> {
debug_assert!(matches!(self.classify(), ChunkClass::NonDummy(_)));
NonDummyChunk { raw: self }
}
}
impl<A, S> NonDummyChunk<A, S>
where
S: BumpAllocatorSettings,
{
pub(crate) fn new<E>(
chunk_size: ChunkSize<A, S>,
prev: Option<NonDummyChunk<A, S>>,
allocator: A,
) -> Result<NonDummyChunk<A, S>, E>
where
A: Allocator,
E: ErrorBehavior,
{
let layout = chunk_size.layout().ok_or_else(E::capacity_overflow)?;
let allocation = match allocator.allocate(layout) {
Ok(ok) => ok,
Err(AllocError) => return Err(E::allocation(layout)),
};
let ptr = non_null::as_non_null_ptr(allocation);
let size = allocation.len();
let size = ChunkSize::<A, S>::align_allocation_size(size);
debug_assert!(size >= layout.size());
debug_assert!(size % MIN_CHUNK_ALIGN == 0);
let prev = Cell::new(prev.map(|c| c.header));
let next = Cell::new(None);
let header = unsafe {
if S::UP {
let header = ptr.cast::<ChunkHeader<A>>();
header.write(ChunkHeader {
pos: Cell::new(header.add(1).cast()),
end: ptr.add(size),
prev,
next,
allocator,
});
header
} else {
let header = ptr.add(size).cast::<ChunkHeader<A>>().sub(1);
header.write(ChunkHeader {
pos: Cell::new(header.cast()),
end: ptr,
prev,
next,
allocator,
});
header
}
};
Ok(NonDummyChunk {
raw: RawChunk {
header,
marker: PhantomData,
},
})
}
pub(crate) fn append_for<B: ErrorBehavior>(self, layout: Layout) -> Result<Self, B>
where
A: Allocator + Clone,
{
debug_assert!(self.next().is_none());
let required_size = ChunkSizeHint::for_capacity(layout).ok_or_else(B::capacity_overflow)?;
let grown_size = self.grow_size()?;
let size = required_size.max(grown_size).calc_size().ok_or_else(B::capacity_overflow)?;
let allocator = unsafe { self.header.as_ref().allocator.clone() };
let new_chunk = Self::new::<B>(size, Some(self), allocator)?;
unsafe {
self.header.as_ref().next.set(Some(new_chunk.header));
}
Ok(new_chunk)
}
#[inline(always)]
fn grow_size<B: ErrorBehavior>(self) -> Result<ChunkSizeHint<A, S>, B> {
let Some(size) = self.size().get().checked_mul(2) else {
return Err(B::capacity_overflow());
};
Ok(ChunkSizeHint::new(size))
}
#[inline(always)]
pub(crate) fn allocator<'a>(self) -> &'a A {
unsafe { &self.header.as_ref().allocator }
}
#[inline(always)]
pub(crate) fn prev(self) -> Option<NonDummyChunk<A, S>> {
unsafe {
Some(NonDummyChunk {
raw: RawChunk {
header: self.header.as_ref().prev.get()?,
marker: PhantomData,
},
})
}
}
#[inline(always)]
pub(crate) fn next(self) -> Option<NonDummyChunk<A, S>> {
unsafe {
Some(NonDummyChunk {
raw: RawChunk {
header: self.header.as_ref().next.get()?,
marker: PhantomData,
},
})
}
}
#[inline(always)]
pub(crate) fn size(self) -> NonZeroUsize {
let start = self.chunk_start().addr().get();
let end = self.chunk_end().addr().get();
unsafe { NonZeroUsize::new_unchecked(end - start) }
}
#[inline(always)]
pub(crate) fn capacity(self) -> usize {
let start = self.content_start().addr().get();
let end = self.content_end().addr().get();
end - start
}
#[inline(always)]
pub(crate) fn allocated(self) -> usize {
let range = self.allocated_range();
let start = range.start.addr().get();
let end = range.end.addr().get();
end - start
}
#[inline(always)]
pub(crate) fn remaining(self) -> usize {
let range = self.remaining_range();
let start = range.start.addr().get();
let end = range.end.addr().get();
end - start
}
#[inline(always)]
fn reset(self) {
unsafe {
if S::UP {
self.set_pos(self.content_start());
} else {
self.set_pos(self.content_end());
}
}
}
#[inline(always)]
pub(crate) fn chunk_start(self) -> NonNull<u8> {
unsafe { if S::UP { self.header.cast() } else { self.header.as_ref().end } }
}
#[inline(always)]
pub(crate) fn chunk_end(self) -> NonNull<u8> {
unsafe {
if S::UP {
self.header.as_ref().end
} else {
self.after_header()
}
}
}
#[inline(always)]
pub(crate) fn content_start(self) -> NonNull<u8> {
if S::UP { self.after_header() } else { self.chunk_start() }
}
#[inline(always)]
pub(crate) fn content_end(self) -> NonNull<u8> {
if S::UP { self.chunk_end() } else { self.header.cast() }
}
#[inline(always)]
pub(crate) unsafe fn set_pos(self, ptr: NonNull<u8>) {
unsafe { self.set_pos_addr(ptr.addr().get()) };
}
#[inline(always)]
pub(crate) unsafe fn set_pos_addr(self, addr: usize) {
unsafe { self.header.as_ref().pos.set(self.content_ptr_from_addr(addr)) };
}
#[inline(always)]
pub(crate) unsafe fn set_pos_addr_and_align(self, pos: usize) {
unsafe {
let addr = align_pos(S::UP, S::MIN_ALIGN, pos);
self.set_pos_addr(addr);
}
}
#[inline(always)]
pub(crate) unsafe fn set_pos_addr_and_align_from(self, mut pos: usize, pos_align: usize) {
debug_assert_eq!(pos % pos_align, 0);
if pos_align < S::MIN_ALIGN {
pos = align_pos(S::UP, S::MIN_ALIGN, pos);
}
unsafe { self.set_pos_addr(pos) };
}
#[inline(always)]
unsafe fn content_ptr_from_addr(self, addr: usize) -> NonNull<u8> {
unsafe {
debug_assert!(self.contains_addr_or_end(addr));
let ptr = self.header.cast();
let addr = NonZeroUsize::new_unchecked(addr);
ptr.with_addr(addr)
}
}
#[inline(always)]
pub(crate) unsafe fn content_ptr_from_addr_range(self, range: Range<usize>) -> Range<NonNull<u8>> {
unsafe {
debug_assert!(range.start <= range.end);
let start = self.content_ptr_from_addr(range.start);
let end = self.content_ptr_from_addr(range.end);
start..end
}
}
#[inline(always)]
fn contains_addr_or_end(self, addr: usize) -> bool {
let start = self.content_start().addr().get();
let end = self.content_end().addr().get();
addr >= start && addr <= end
}
#[inline(always)]
fn allocated_range(self) -> Range<NonNull<u8>> {
if S::UP {
self.content_start()..self.pos()
} else {
self.pos()..self.content_end()
}
}
#[inline(always)]
fn remaining_range(self) -> Range<NonNull<u8>> {
if S::UP {
let start = self.pos();
let end = self.content_end();
start..end
} else {
let start = self.content_start();
let end = self.pos();
start..end
}
}
#[inline(always)]
fn after_header(self) -> NonNull<u8> {
unsafe { self.header.add(1).cast() }
}
fn for_each_prev(self, mut f: impl FnMut(NonDummyChunk<A, S>)) {
let mut iter = self.prev();
while let Some(chunk) = iter {
iter = chunk.prev();
f(chunk);
}
}
fn for_each_next(self, mut f: impl FnMut(NonDummyChunk<A, S>)) {
let mut iter = self.next();
while let Some(chunk) = iter {
iter = chunk.next();
f(chunk);
}
}
unsafe fn deallocate(self)
where
A: Allocator,
{
let allocator = unsafe { ptr::read(&raw const self.header.as_ref().allocator) };
let ptr = self.chunk_start();
let layout = self.layout();
unsafe {
allocator.deallocate(ptr, layout);
}
}
#[inline(always)]
pub(crate) fn layout(self) -> Layout {
unsafe { Layout::from_size_align_unchecked(self.size().get(), align_of::<ChunkHeader<A>>()) }
}
}
pub(crate) enum ChunkClass<A, S: BumpAllocatorSettings> {
Claimed,
Unallocated,
NonDummy(NonDummyChunk<A, S>),
}