use core::alloc::Layout;
use core::cmp;
use core::hint;
use core::marker::PhantomData;
use core::num::NonZero;
use core::ptr::NonNull;
use crate::alloc::{Allocator,Global};
use crate::error::TryReserveError;
use crate::error::TryReserveErrorKind::*;
use crate::fields::Fields;
#[track_caller]
fn capacity_overflow() -> ! {
panic!("capacity overflow");
}
enum AllocInit {
Uninitialized,
Zeroed,
}
pub struct RawColVec<const N:usize, T, A: Allocator> {
inner: RawColVecInner<N, A>,
_marker: PhantomData<T>,
}
unsafe impl<const N:usize, T: Send, A: Allocator> Send for RawColVec<N, T, A> {}
unsafe impl<const N:usize, T: Sync, A: Allocator> Sync for RawColVec<N, T, A> {}
struct RawColVecInner<const N:usize, A: Allocator> {
ptr: NonNull<u8>,
cap: usize,
alloc: A,
}
pub trait StructInfo<const N:usize> {
const LAYOUT:Layout;
const FIELDS:Fields::<N>;
}
impl<const N:usize, T: StructInfo<N>> RawColVec<N, T, Global> {
#[inline]
#[must_use]
pub const fn new() -> Self {
Self::new_in(Global)
}
}
impl<const N:usize> RawColVecInner<N, Global> {
}
const fn min_non_zero_cap(size: usize) -> usize {
if size == 1 {
8
} else if size <= 1024 {
4
} else {
1
}
}
impl<const N:usize, T: StructInfo<N>, A: Allocator> RawColVec<N, T, A> {
#[inline]
pub const fn new_in(alloc: A) -> Self {
Self {
inner: RawColVecInner::new_in(alloc, NonZero::new(T::LAYOUT.align()).unwrap()),
_marker: PhantomData,
}
}
#[inline]
#[track_caller]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self {
inner: RawColVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
_marker: PhantomData,
}
}
#[inline]
pub const fn capacity(&self) -> usize {
self.inner.capacity(T::LAYOUT.size())
}
#[inline]
#[track_caller]
pub fn reserve(&mut self, len: usize, additional: usize) {
self.inner.reserve(len, additional, T::LAYOUT, &T::FIELDS)
}
#[inline]
pub const fn ptr(&self) -> *mut u8 {
self.inner.ptr.as_ptr()
}
#[inline(never)]
#[track_caller]
pub fn grow_one(&mut self) {
self.inner.grow_one(T::LAYOUT,&T::FIELDS)
}
}
impl<const N:usize, A: Allocator> RawColVecInner<N, A> {
#[inline]
const fn new_in(alloc: A, align: NonZero<usize>) -> Self {
let ptr = NonNull::without_provenance(align);
Self { ptr, cap: 0, alloc }
}
#[inline]
#[track_caller]
fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) {
Ok(this) => {
unsafe {
hint::assert_unchecked(!this.needs_to_grow(0, capacity, elem_layout));
}
this
}
Err(err) => handle_error(err),
}
}
fn try_allocate_in(
capacity: usize,
init: AllocInit,
alloc: A,
elem_layout: Layout,
) -> Result<Self, TryReserveError> {
let capacity = capacity.next_multiple_of(elem_layout.align());
let layout = match layout_colvec(capacity, elem_layout) {
Ok(layout) => layout,
Err(_) => return Err(CapacityOverflow.into()),
};
if layout.size() == 0 {
return Ok(Self::new_in(alloc, unsafe{NonZero::new_unchecked(elem_layout.align())}));
}
if let Err(err) = alloc_guard(layout.size()) {
return Err(err);
}
let result = match init {
AllocInit::Uninitialized => alloc.allocate(layout),
AllocInit::Zeroed => alloc.allocate_zeroed(layout),
};
let ptr = match result {
Ok(ptr) => ptr,
Err(_) => return Err(AllocError { layout }.into()),
};
Ok(Self {
ptr: ptr.cast(),
cap: capacity,
alloc,
})
}
#[inline]
const fn capacity(&self, elem_size: usize) -> usize {
if elem_size == 0 { usize::MAX } else { self.cap }
}
#[inline]
#[track_caller]
fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout, fields: &Fields<N>) {
#[cold]
fn do_reserve_and_handle<const N:usize, A: Allocator>(
slf: &mut RawColVecInner<N, A>,
len: usize,
additional: usize,
elem_layout: Layout,
fields: &Fields<N>,
) {
if let Err(err) = slf.grow_amortized(len, additional, elem_layout, fields) {
handle_error(err);
}
}
if self.needs_to_grow(len, additional, elem_layout) {
do_reserve_and_handle(self, len, additional, elem_layout, fields);
}
}
#[inline]
#[track_caller]
fn grow_one(&mut self, elem_layout: Layout, fields: &Fields<N>) {
if let Err(err) = self.grow_amortized(self.cap, 1, elem_layout, fields) {
handle_error(err);
}
}
#[inline]
fn needs_to_grow(&self, len: usize, additional: usize, elem_layout: Layout) -> bool {
additional > self.capacity(elem_layout.size()).wrapping_sub(len)
}
#[inline]
fn current_memory(&self, elem_layout: Layout) -> Option<(NonNull<u8>, Layout)> {
if elem_layout.size() == 0 || self.cap == 0 {
None
} else {
unsafe {
let alloc_size = elem_layout.size().unchecked_mul(self.cap);
let layout = Layout::from_size_align_unchecked(alloc_size, elem_layout.align());
Some((self.ptr.into(), layout))
}
}
}
#[inline]
unsafe fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) {
self.ptr = ptr.cast();
self.cap = cap;
}
fn grow_amortized(
&mut self,
len: usize,
additional: usize,
elem_layout: Layout,
fields: &Fields<N>,
) -> Result<(), TryReserveError> {
debug_assert!(additional > 0);
if elem_layout.size() == 0 {
return Err(CapacityOverflow.into());
}
let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
let cap = cmp::max(self.cap * 2, required_cap);
let cap = cmp::max(min_non_zero_cap(elem_layout.size()), cap);
let cap = cap.next_multiple_of(elem_layout.align());
let new_layout = layout_colvec(cap, elem_layout)?;
let ptr = finish_grow(
new_layout,
self.current_memory(elem_layout),
&mut self.alloc,
fields,
self.cap,
cap,
len,
)?;
unsafe { self.set_ptr_and_cap(ptr, cap) };
Ok(())
}
}
#[cold]
fn finish_grow<const N:usize,A>(
new_layout: Layout,
current_memory: Option<(NonNull<u8>, Layout)>,
alloc: &mut A,
fields: &Fields<N>,
old_capacity: usize,
new_capacity: usize,
len: usize,
) -> Result<NonNull<[u8]>, TryReserveError>
where
A: Allocator,
{
alloc_guard(new_layout.size())?;
if let Some((ptr, old_layout)) = current_memory {
debug_assert_eq!(old_layout.align(), new_layout.align());
let memory = unsafe {
hint::assert_unchecked(old_layout.align() == new_layout.align());
alloc.grow(ptr, old_layout, new_layout)
};
let Ok(region) = memory else{
return Err(AllocError { layout: new_layout }.into());
};
unsafe{ fields.grow_fields(ptr.as_ptr(), old_capacity, new_capacity, len) }
Ok(region)
} else {
alloc.allocate(new_layout)
.map_err(|_| AllocError { layout: new_layout }.into())
}
}
#[cold]
#[track_caller]
fn handle_error(e: TryReserveError) -> ! {
match e.kind() {
CapacityOverflow => capacity_overflow(),
AllocError { layout, .. } => allocator_api2::alloc::handle_alloc_error(layout),
}
}
#[inline]
fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
if usize::BITS < 64 && alloc_size > isize::MAX as usize {
Err(CapacityOverflow.into())
} else {
Ok(())
}
}
struct LayoutError;
#[inline]
const fn repeat_packed(layout: &Layout, n: usize) -> Result<Layout, LayoutError> {
if let Some(size) = layout.size().checked_mul(n) {
Ok(unsafe{ Layout::from_size_align_unchecked(size, layout.align()) })
} else {
Err(LayoutError)
}
}
#[inline]
fn layout_colvec(cap: usize, elem_layout: Layout) -> Result<Layout, TryReserveError> {
repeat_packed(&elem_layout, cap).map_err(|_| CapacityOverflow.into())
}