use core::{alloc::Layout, num::NonZeroUsize, ptr::NonNull};
use crate::{
BaseAllocator, alloc::AllocError, bump_down, layout::CustomLayout, polyfill::non_null, raw_bump::RawBump,
settings::BumpAllocatorSettings, up_align_usize_unchecked,
};
#[inline(always)]
pub fn allocate<A, S>(bump: &RawBump<A, S>, layout: Layout) -> Result<NonNull<[u8]>, AllocError>
where
A: BaseAllocator<S::GuaranteedAllocated>,
S: BumpAllocatorSettings,
{
Ok(NonNull::slice_from_raw_parts(
bump.alloc::<AllocError>(layout)?,
layout.size(),
))
}
#[inline(always)]
pub unsafe fn deallocate<A, S>(bump: &RawBump<A, S>, ptr: NonNull<u8>, layout: Layout)
where
A: BaseAllocator<S::GuaranteedAllocated>,
S: BumpAllocatorSettings,
{
if !S::DEALLOCATES {
return;
}
unsafe {
if is_last(bump, ptr, layout) {
deallocate_assume_last(bump, ptr, layout);
}
}
}
#[inline(always)]
unsafe fn deallocate_assume_last<A, S>(bump: &RawBump<A, S>, ptr: NonNull<u8>, layout: Layout)
where
A: BaseAllocator<S::GuaranteedAllocated>,
S: BumpAllocatorSettings,
{
if !S::DEALLOCATES {
return;
}
unsafe {
let chunk = bump.chunk.get().as_non_dummy_unchecked();
if S::UP {
chunk.set_pos_addr_and_align(ptr.addr().get());
} else {
let mut addr = ptr.addr().get();
addr += layout.size();
chunk.set_pos_addr_and_align(addr);
}
}
}
#[inline(always)]
unsafe fn is_last<A, S>(bump: &RawBump<A, S>, ptr: NonNull<u8>, layout: Layout) -> bool
where
A: BaseAllocator<S::GuaranteedAllocated>,
S: BumpAllocatorSettings,
{
if S::UP {
(unsafe { ptr.add(layout.size()) }) == bump.chunk.get().pos()
} else {
ptr == bump.chunk.get().pos()
}
}
#[inline(always)]
pub unsafe fn grow<A, S>(
bump: &RawBump<A, S>,
old_ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError>
where
A: BaseAllocator<S::GuaranteedAllocated>,
S: BumpAllocatorSettings,
{
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
unsafe {
if S::UP {
if is_last(bump, old_ptr, old_layout) & align_fits(old_ptr, old_layout, new_layout) {
let chunk = bump.chunk.get().as_non_dummy_unchecked();
let chunk_end = chunk.content_end();
let remaining = chunk_end.addr().get() - old_ptr.addr().get();
if new_layout.size() <= remaining {
let old_addr = old_ptr.addr();
let new_pos = up_align_usize_unchecked(old_addr.get() + new_layout.size(), S::MIN_ALIGN);
chunk.set_pos_addr(new_pos);
Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
} else {
let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
}
} else {
let new_ptr = bump.alloc::<AllocError>(new_layout)?;
old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
}
} else {
if is_last(bump, old_ptr, old_layout) {
let chunk = bump.chunk.get().as_non_dummy_unchecked();
let additional_size = new_layout.size() - old_layout.size();
let old_addr = old_ptr.addr();
let new_addr = bump_down(old_addr, additional_size, new_layout.align().max(S::MIN_ALIGN));
let very_start = chunk.content_start().addr();
if new_addr >= very_start.get() {
let new_addr = NonZeroUsize::new_unchecked(new_addr);
let new_addr_end = new_addr.get() + new_layout.size();
let new_ptr = old_ptr.with_addr(new_addr);
if new_addr_end < old_addr.get() {
old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
} else {
old_ptr.copy_to(new_ptr, old_layout.size());
}
chunk.set_pos_addr(new_addr.get());
Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
} else {
let new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
}
} else {
let new_ptr = bump.alloc::<AllocError>(new_layout)?;
old_ptr.copy_to_nonoverlapping(new_ptr, old_layout.size());
Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
}
}
}
}
#[inline(always)]
pub unsafe fn grow_zeroed<A, S>(
bump: &RawBump<A, S>,
old_ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError>
where
A: BaseAllocator<S::GuaranteedAllocated>,
S: BumpAllocatorSettings,
{
unsafe {
let new_ptr = grow(bump, old_ptr, old_layout, new_layout)?;
let delta = new_layout.size() - old_layout.size();
new_ptr.cast::<u8>().add(old_layout.size()).write_bytes(0, delta);
Ok(new_ptr)
}
}
#[inline(always)]
pub unsafe fn shrink<A, S>(
bump: &RawBump<A, S>,
old_ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError>
where
A: BaseAllocator<S::GuaranteedAllocated>,
S: BumpAllocatorSettings,
{
#[cold]
#[inline(never)]
unsafe fn shrink_unfit<A, S>(
bump: &RawBump<A, S>,
old_ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError>
where
A: BaseAllocator<S::GuaranteedAllocated>,
S: BumpAllocatorSettings,
{
unsafe {
if S::SHRINKS && is_last(bump, old_ptr, old_layout) {
let old_pos = bump.chunk.get().pos();
deallocate_assume_last(bump, old_ptr, old_layout);
let overlaps;
let new_ptr;
if let Some(in_chunk) = bump.chunk.get().alloc(CustomLayout(new_layout)) {
new_ptr = in_chunk;
overlaps = if S::UP {
let old_ptr_end = old_ptr.add(new_layout.size());
old_ptr_end > new_ptr
} else {
let new_ptr_end = new_ptr.add(new_layout.size());
new_ptr_end > old_ptr
}
} else {
bump.chunk.get().as_non_dummy_unchecked().set_pos(old_pos);
new_ptr = bump.alloc_in_another_chunk::<AllocError>(new_layout)?;
overlaps = false;
}
if overlaps {
old_ptr.copy_to(new_ptr, new_layout.size());
} else {
old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
}
Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
} else {
let new_ptr = bump.alloc::<AllocError>(new_layout)?;
old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
}
}
}
debug_assert!(
new_layout.size() <= old_layout.size(),
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
);
unsafe {
if !align_fits(old_ptr, old_layout, new_layout) {
return shrink_unfit(bump, old_ptr, old_layout, new_layout);
}
if !S::SHRINKS || !is_last(bump, old_ptr, old_layout) {
return Ok(NonNull::slice_from_raw_parts(old_ptr, old_layout.size()));
}
if S::UP {
let end = old_ptr.addr().get() + new_layout.size();
let new_pos = up_align_usize_unchecked(end, S::MIN_ALIGN);
bump.chunk.get().as_non_dummy_unchecked().set_pos_addr(new_pos);
Ok(NonNull::slice_from_raw_parts(old_ptr, new_layout.size()))
} else {
let old_addr = old_ptr.addr();
let old_end_addr = NonZeroUsize::new_unchecked(old_addr.get() + old_layout.size());
let new_addr = bump_down(old_end_addr, new_layout.size(), new_layout.align().max(S::MIN_ALIGN));
let new_addr = NonZeroUsize::new_unchecked(new_addr);
let new_ptr = old_ptr.with_addr(new_addr);
let copy_src_end = NonZeroUsize::new_unchecked(old_addr.get() + new_layout.size());
let copy_dst_start = new_addr;
let overlaps = copy_src_end > copy_dst_start;
if overlaps {
old_ptr.copy_to(new_ptr, new_layout.size());
} else {
old_ptr.copy_to_nonoverlapping(new_ptr, new_layout.size());
}
bump.chunk.get().as_non_dummy_unchecked().set_pos(new_ptr);
Ok(NonNull::slice_from_raw_parts(new_ptr, new_layout.size()))
}
}
}
#[inline(always)]
fn align_fits(old_ptr: NonNull<u8>, _old_layout: Layout, new_layout: Layout) -> bool {
non_null::is_aligned_to(old_ptr, new_layout.align())
}