use super::ALLOC_OVERHEAD;
use crate::util::{self, TypeProps};
use alloc::alloc::Layout;
use core::cell::Cell;
use core::marker::PhantomData;
use core::ptr::{self, NonNull};
#[derive(Debug)]
pub(crate) struct ChunkFooter {
data: NonNull<u8>,
ptr: Cell<NonNull<u8>>,
layout: Layout,
prev: Cell<NonNull<ChunkFooter>>,
next: Cell<NonNull<ChunkFooter>>,
}
pub(crate) struct Chunk<T>(NonNull<ChunkFooter>, PhantomData<*mut T>);
pub(crate) unsafe fn wrap<T>(footer: NonNull<ChunkFooter>) -> Chunk<T> {
Chunk(footer, PhantomData)
}
impl<T> Chunk<T> {
const FOOTER_ALIGN: usize = util::max(ChunkFooter::ALIGN, T::ALIGN);
const FOOTER_SIZE: usize = ChunkFooter::SIZE;
pub(crate) const FOOTER_IS_END: bool = {
assert!(util::is_aligned_to(Self::CHUNK_ALIGN, T::ALIGN));
assert!(util::is_aligned_to(Self::CHUNK_ALIGN, Self::FOOTER_ALIGN));
Self::FOOTER_ALIGN.is_multiple_of(T::SIZE) || T::SIZE.is_multiple_of(Self::FOOTER_ALIGN)
};
pub(crate) const CHUNK_ALIGN: usize = util::max(T::ALIGN, Self::FOOTER_ALIGN);
pub(crate) const CHUNK_MIN_SIZE: usize = Self::chunk_size_for(1);
pub(crate) const CHUNK_FIRST_SIZE: usize = {
let chunk_512b = 0x200 - ALLOC_OVERHEAD;
let size = if T::SIZE > 1024 {
Self::chunk_size_for(2)
} else {
util::max(chunk_512b, Self::chunk_size_for(8))
};
assert!((size + ALLOC_OVERHEAD).is_power_of_two());
size
};
pub(crate) const CHUNK_MAX_SIZE: usize = {
let part_of_entire_memory = util::round_down_to_pow2(isize::MAX as usize >> 2);
let common_sensible_in_2025 = 4 << 30; let size = util::min(part_of_entire_memory, common_sensible_in_2025);
assert!(size.is_power_of_two());
assert!(size <= isize::MAX as usize);
size - ALLOC_OVERHEAD
};
pub(crate) const fn chunk_size_for(mut elements_count: usize) -> usize {
if elements_count < 2 {
elements_count = 2;
}
let mut chunk_size = elements_count * T::SIZE;
assert!(util::is_aligned_to(chunk_size, T::ALIGN));
let overhead = ALLOC_OVERHEAD + Self::FOOTER_SIZE;
chunk_size += overhead.next_multiple_of(Self::FOOTER_ALIGN);
chunk_size.next_power_of_two() - ALLOC_OVERHEAD
}
pub(crate) unsafe fn init(data: NonNull<u8>, layout: Layout) -> (NonNull<ChunkFooter>, usize) {
let chunk_start = data.as_ptr();
let chunk_end = chunk_start.wrapping_byte_add(layout.size());
let mut footer_addr = {
let addr = chunk_end.wrapping_byte_sub(Chunk::<T>::FOOTER_SIZE);
util::round_mut_ptr_down_to(addr, Chunk::<T>::FOOTER_ALIGN)
};
debug_assert!(chunk_start < footer_addr);
debug_assert!(footer_addr < chunk_end);
debug_assert!(util::ptr_is_aligned_to(chunk_start, T::ALIGN));
let chunk_cap_in_bytes = footer_addr as usize - chunk_start as usize;
let chunk_capacity = chunk_cap_in_bytes / T::SIZE;
let buffer_size = chunk_capacity * T::SIZE;
let new_ptr = chunk_start.wrapping_byte_add(buffer_size);
debug_assert!(new_ptr <= footer_addr);
if const { T::SIZE.is_multiple_of(Chunk::<T>::FOOTER_ALIGN) } {
footer_addr = chunk_start.wrapping_byte_add(buffer_size);
}
if const { Chunk::<T>::FOOTER_IS_END } {
debug_assert!(new_ptr == footer_addr);
}
unsafe {
let footer_ptr = footer_addr as *mut ChunkFooter;
util::write_with(footer_ptr, || ChunkFooter {
data: NonNull::new_unchecked(chunk_start),
ptr: Cell::new(NonNull::new_unchecked(new_ptr)),
layout,
prev: Cell::new(DEAD_CHUNK.footer()),
next: Cell::new(DEAD_CHUNK.footer()),
});
(NonNull::new_unchecked(footer_ptr), chunk_capacity)
}
}
pub(crate) unsafe fn drop(&self) -> usize {
unsafe {
let ptr = self.ptr().cast::<T>().as_ptr();
let end = self.0.as_ptr();
let len = (end as usize - ptr as usize) / T::SIZE;
ptr::drop_in_place(ptr::slice_from_raw_parts_mut(ptr, len));
let new_ptr = NonNull::new_unchecked(ptr.wrapping_add(len));
self.set_ptr(new_ptr.cast());
len
}
}
pub(crate) fn footer(&self) -> NonNull<ChunkFooter> {
self.0
}
pub(crate) fn layout(&self) -> Layout {
unsafe { self.0.as_ref().layout }
}
pub(crate) fn size(&self) -> usize {
unsafe { self.0.as_ref().layout.size() }
}
pub(crate) fn data(&self) -> NonNull<u8> {
unsafe { self.0.as_ref().data.cast() }
}
pub(crate) fn start(&self) -> NonNull<T> {
self.data().cast()
}
pub(crate) fn end(&self) -> NonNull<u8> {
self.0.cast()
}
pub(crate) fn end_aligned(&self) -> NonNull<T> {
if const { Chunk::<T>::FOOTER_IS_END } {
self.end().cast()
} else {
let buffer_size = self.capacity() * T::SIZE;
unsafe { self.start().byte_add(buffer_size) }
}
}
pub(crate) fn ptr(&self) -> NonNull<T> {
unsafe { self.0.as_ref().ptr.get().cast() }
}
pub(crate) fn set_ptr(&self, ptr: NonNull<T>) {
unsafe { self.0.as_ref().ptr.set(ptr.cast()) }
}
pub(crate) fn next(&self) -> NonNull<ChunkFooter> {
unsafe { self.0.as_ref().next.get().cast() }
}
pub(crate) fn set_next(&self, chunk_ptr: NonNull<ChunkFooter>) {
unsafe { self.0.as_ref().next.set(chunk_ptr) }
}
pub(crate) fn prev(&self) -> NonNull<ChunkFooter> {
unsafe { self.0.as_ref().prev.get().cast() }
}
pub(crate) fn set_prev(&self, chunk_ptr: NonNull<ChunkFooter>) {
unsafe { self.0.as_ref().prev.set(chunk_ptr) }
}
pub(crate) fn is_dead(&self) -> bool {
ptr::eq(self.0.as_ptr(), &DEAD_CHUNK.0)
}
pub(crate) fn capacity(&self) -> usize {
let footer = unsafe { self.0.as_ref() };
let end = self.0.as_ptr() as usize;
let start = footer.data.as_ptr() as usize;
debug_assert!(start <= end);
(end - start) / T::SIZE
}
pub(crate) fn is_full(&self) -> bool {
let footer = unsafe { self.0.as_ref() };
let start = footer.data.as_ptr() as usize;
let ptr = footer.ptr.get().as_ptr() as usize;
debug_assert!(start <= ptr);
start == ptr
}
pub(crate) fn is_empty(&self) -> bool {
let end = self.0.as_ptr() as usize;
let ptr = self.ptr().as_ptr() as usize;
debug_assert!(ptr <= end);
if const { Self::FOOTER_IS_END } {
end == ptr
} else {
end - ptr < T::SIZE
}
}
pub(crate) unsafe fn slice<'a>(&self) -> &'a [T] {
let ptr = self.ptr().as_ptr();
let end = self.0.as_ptr();
let len = (end as usize - ptr as usize) / T::SIZE;
unsafe { core::slice::from_raw_parts(ptr, len) }
}
pub(crate) unsafe fn slice_mut<'a>(&self) -> &'a mut [T] {
let ptr = self.ptr().as_ptr();
let end = self.0.as_ptr();
let len = (end as usize - ptr as usize) / T::SIZE;
unsafe { core::slice::from_raw_parts_mut(ptr, len) }
}
pub(crate) unsafe fn first_unchecked(&self) -> NonNull<T> {
if const { T::IS_ZST } {
util::zst_ptr::<T>()
} else {
debug_assert!(!self.is_empty());
let ptr = self.ptr().as_ptr();
let end = self.0.as_ptr();
let elems_num = (end as usize - ptr as usize) / T::SIZE;
unsafe { NonNull::new_unchecked(ptr).add(elems_num - 1) }
}
}
pub(crate) unsafe fn last_unchecked(&self) -> NonNull<T> {
if const { T::IS_ZST } {
util::zst_ptr::<T>()
} else {
debug_assert!(!self.is_empty());
self.ptr()
}
}
#[inline(always)]
pub(crate) fn alloc_element(&self) -> Option<NonNull<T>> {
debug_assert!(!T::IS_ZST);
if self.is_full() {
return None;
}
let ptr = self.ptr().as_ptr();
let new_ptr = unsafe {
let ptr = ptr.wrapping_byte_sub(T::SIZE);
NonNull::new_unchecked(ptr)
};
self.set_ptr(new_ptr);
Some(new_ptr.cast())
}
#[inline(always)]
pub(crate) unsafe fn dealloc_element(&self) -> Option<NonNull<T>> {
debug_assert!(!T::IS_ZST);
if self.is_empty() {
return None;
}
let ptr = self.ptr().as_ptr();
let end = self.0.as_ptr().cast();
let new_ptr = ptr.wrapping_byte_add(T::SIZE);
debug_assert!(new_ptr <= end);
let new_ptr = unsafe { NonNull::new_unchecked(new_ptr) };
self.set_ptr(new_ptr);
Some(unsafe { NonNull::new_unchecked(ptr.cast()) })
}
}
#[repr(transparent)]
pub(crate) struct DeadChunk(ChunkFooter);
unsafe impl Sync for DeadChunk {}
impl DeadChunk {
pub(crate) const fn footer(&'static self) -> NonNull<ChunkFooter> {
unsafe { NonNull::new_unchecked(&DEAD_CHUNK as *const DeadChunk as *mut ChunkFooter) }
}
}
pub(crate) static DEAD_CHUNK: DeadChunk = DeadChunk(ChunkFooter {
data: DEAD_CHUNK.footer().cast(),
ptr: Cell::new(DEAD_CHUNK.footer().cast()),
layout: Layout::new::<ChunkFooter>(),
prev: Cell::new(DEAD_CHUNK.footer()),
next: Cell::new(DEAD_CHUNK.footer()),
});
#[cfg(test)]
#[cfg(any(
all(target_os = "linux", target_arch = "x86_64"),
all(target_os = "macos", target_arch = "aarch64"),
))]
mod static_test {
use super::*;
macro_rules! assert {
($expr:expr $(,)?) => {
const _: [(); 0 - !{ $expr } as usize] = [];
};
}
macro_rules! assert_eq {
($l_expr:expr, $r_expr:expr $(,)?) => {
assert!(($l_expr) == ($r_expr));
};
}
struct T9 {
_m1: u64, _m2: u8, }
struct T35 {
_m1: u16, _m2: u64, _m3: u32, _m4: [u8; 16], _m5: u32, _m6: u8, }
struct T115 {
_m1: u16, _m2: u64, _m3: u32, _m4: [u32; 24], _m5: u32, _m6: u8, }
assert_eq!(u8::ALIGN, 1);
assert_eq!(u8::SIZE, 1);
assert_eq!(Chunk::<u8>::FOOTER_ALIGN, 8);
assert_eq!(Chunk::<u8>::FOOTER_SIZE, 48);
assert_eq!(Chunk::<u8>::CHUNK_ALIGN, 8);
assert_eq!(Chunk::<u8>::CHUNK_MIN_SIZE, 128 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<u8>::CHUNK_FIRST_SIZE, 512 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<u8>::CHUNK_MAX_SIZE, (4 << 30) - ALLOC_OVERHEAD);
assert_eq!(u16::ALIGN, 2);
assert_eq!(u16::SIZE, 2);
assert_eq!(Chunk::<u16>::FOOTER_ALIGN, 8);
assert_eq!(Chunk::<u16>::FOOTER_SIZE, 48);
assert_eq!(Chunk::<u16>::CHUNK_ALIGN, 8);
assert_eq!(Chunk::<u16>::CHUNK_MIN_SIZE, 128 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<u16>::CHUNK_FIRST_SIZE, 512 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<u16>::CHUNK_MAX_SIZE, (4 << 30) - ALLOC_OVERHEAD);
assert_eq!(u32::ALIGN, 4);
assert_eq!(u32::SIZE, 4);
assert_eq!(Chunk::<u32>::FOOTER_ALIGN, 8);
assert_eq!(Chunk::<u32>::FOOTER_SIZE, 48);
assert_eq!(Chunk::<u32>::CHUNK_ALIGN, 8);
assert_eq!(Chunk::<u32>::CHUNK_MIN_SIZE, 128 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<u32>::CHUNK_FIRST_SIZE, 512 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<u32>::CHUNK_MAX_SIZE, (4 << 30) - ALLOC_OVERHEAD);
assert_eq!(u64::ALIGN, 8);
assert_eq!(u64::SIZE, 8);
assert_eq!(Chunk::<u64>::FOOTER_ALIGN, 8);
assert_eq!(Chunk::<u64>::FOOTER_SIZE, 48);
assert_eq!(Chunk::<u64>::CHUNK_ALIGN, 8);
assert_eq!(Chunk::<u64>::CHUNK_MIN_SIZE, 128 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<u64>::CHUNK_FIRST_SIZE, 512 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<u64>::CHUNK_MAX_SIZE, (4 << 30) - ALLOC_OVERHEAD);
assert_eq!(T9::ALIGN, 8);
assert_eq!(T9::SIZE, 16);
assert_eq!(Chunk::<T9>::FOOTER_ALIGN, 8);
assert_eq!(Chunk::<T9>::FOOTER_SIZE, 48);
assert_eq!(Chunk::<T9>::CHUNK_ALIGN, 8);
assert_eq!(Chunk::<T9>::CHUNK_MIN_SIZE, 128 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<T9>::CHUNK_FIRST_SIZE, 512 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<T9>::CHUNK_MAX_SIZE, (4 << 30) - ALLOC_OVERHEAD);
assert!(T9::SIZE.is_multiple_of(T9::ALIGN));
assert_eq!(T35::ALIGN, 8);
assert_eq!(T35::SIZE, 40);
assert_eq!(Chunk::<T35>::FOOTER_ALIGN, 8);
assert_eq!(Chunk::<T35>::FOOTER_SIZE, 48);
assert_eq!(Chunk::<T35>::CHUNK_ALIGN, 8);
assert_eq!(Chunk::<T35>::CHUNK_MIN_SIZE, 256 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<T35>::CHUNK_FIRST_SIZE, 512 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<T35>::CHUNK_MAX_SIZE, (4 << 30) - ALLOC_OVERHEAD);
assert!(T35::SIZE.is_multiple_of(T35::ALIGN));
assert_eq!(T115::ALIGN, 8);
assert_eq!(T115::SIZE, 120);
assert_eq!(Chunk::<T115>::FOOTER_ALIGN, 8);
assert_eq!(Chunk::<T115>::FOOTER_SIZE, 48);
assert_eq!(Chunk::<T115>::CHUNK_ALIGN, 8);
assert_eq!(Chunk::<T115>::CHUNK_MIN_SIZE, 512 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<T115>::CHUNK_FIRST_SIZE, 1024 - ALLOC_OVERHEAD);
assert_eq!(Chunk::<T115>::CHUNK_MAX_SIZE, (4 << 30) - ALLOC_OVERHEAD);
assert!(T115::SIZE.is_multiple_of(T115::ALIGN));
}