#[cfg(not(no_global_oom_handling))]
use crate::infallible;
#[cfg(test)]
use crate::WithDrop;
use crate::{
    bump_align_guard::BumpAlignGuard,
    bump_common_methods, bump_scope_methods,
    chunk_size::ChunkSize,
    const_param_assert, doc_align_cant_decrease,
    layout::{ArrayLayout, CustomLayout, LayoutProps, SizedLayout},
    polyfill::{nonnull, pointer},
    BaseAllocator, BumpScopeGuard, Checkpoint, ErrorBehavior, GuaranteedAllocatedStats, MinimumAlignment, NoDrop, RawChunk,
    SizedTypeProperties, Stats, SupportedMinimumAlignment, WithoutDealloc, WithoutShrink, DEFAULT_START_CHUNK_SIZE,
};
use allocator_api2::alloc::AllocError;
use core::{
    alloc::Layout,
    cell::Cell,
    fmt::{self, Debug},
    marker::PhantomData,
    mem::ManuallyDrop,
    num::NonZeroUsize,
    ops::Range,
    panic::{RefUnwindSafe, UnwindSafe},
    ptr::NonNull,
};
macro_rules! bump_scope_declaration {
    ($($allocator_parameter:tt)*) => {
        #[repr(transparent)]
        pub struct BumpScope<
            'a,
            $($allocator_parameter)*,
            const MIN_ALIGN: usize = 1,
            const UP: bool = true,
            const GUARANTEED_ALLOCATED: bool = true,
        > {
            pub(crate) chunk: Cell<RawChunk<UP, A>>,
            marker: PhantomData<&'a ()>,
        }
    };
}
crate::maybe_default_allocator!(bump_scope_declaration);
impl<const MIN_ALIGN: usize, const UP: bool, const GUARANTEED_ALLOCATED: bool, A> UnwindSafe
    for BumpScope<'_, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
    A: BaseAllocator<GUARANTEED_ALLOCATED> + UnwindSafe,
{
}
impl<const MIN_ALIGN: usize, const UP: bool, const GUARANTEED_ALLOCATED: bool, A> RefUnwindSafe
    for BumpScope<'_, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
    A: BaseAllocator<GUARANTEED_ALLOCATED> + UnwindSafe,
{
}
impl<A, const MIN_ALIGN: usize, const UP: bool, const GUARANTEED_ALLOCATED: bool> Debug
    for BumpScope<'_, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
    A: BaseAllocator<GUARANTEED_ALLOCATED>,
{
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
        self.stats().debug_format("BumpScope", f)
    }
}
impl<'a, A, const MIN_ALIGN: usize, const UP: bool> BumpScope<'a, A, MIN_ALIGN, UP>
where
    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
    A: BaseAllocator<true>,
{
    bump_scope_methods!(BumpScopeGuard, true);
}
impl<'a, A, const MIN_ALIGN: usize, const UP: bool, const GUARANTEED_ALLOCATED: bool>
    BumpScope<'a, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
where
    MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
    A: BaseAllocator<GUARANTEED_ALLOCATED>,
{
    #[inline(always)]
    pub(crate) unsafe fn new_unchecked(chunk: RawChunk<UP, A>) -> Self {
        Self {
            chunk: Cell::new(chunk),
            marker: PhantomData,
        }
    }
    #[inline(always)]
    pub(crate) fn ensure_allocated<E: ErrorBehavior>(&self) -> Result<(), E> {
        if self.is_unallocated() {
            self.allocate_first_chunk()?;
        }
        Ok(())
    }
    #[cold]
    #[inline(never)]
    fn allocate_first_chunk<B: ErrorBehavior>(&self) -> Result<(), B> {
        debug_assert!(self.chunk.get().is_unallocated());
        let allocator = A::default_or_panic();
        let chunk = RawChunk::new_in(
            ChunkSize::new(DEFAULT_START_CHUNK_SIZE).ok_or_else(B::capacity_overflow)?,
            None,
            allocator,
        )?;
        self.chunk.set(chunk);
        Ok(())
    }
    #[inline(always)]
    pub(crate) unsafe fn consolidate_greed<T>(&mut self, mut start: NonNull<T>, len: usize, cap: usize) -> NonNull<[T]> {
        let end = nonnull::add(start, len);
        if UP {
            self.set_pos(nonnull::addr(end), T::ALIGN);
            nonnull::slice_from_raw_parts(start, len)
        } else {
            {
                let dst_end = nonnull::add(start, cap);
                let dst = nonnull::sub(dst_end, len);
                nonnull::copy(start, dst, len);
                start = dst;
            }
            self.set_pos(nonnull::addr(start), T::ALIGN);
            nonnull::slice_from_raw_parts(start, len)
        }
    }
    #[inline(always)]
    pub(crate) unsafe fn consolidate_greed_rev<T>(&self, mut end: NonNull<T>, len: usize, cap: usize) -> NonNull<[T]> {
        let mut start = nonnull::sub(end, len);
        if UP {
            {
                let dst = nonnull::sub(end, cap);
                let dst_end = nonnull::add(dst, len);
                nonnull::copy(start, dst, len);
                start = dst;
                end = dst_end;
            }
            self.set_pos(nonnull::addr(end), T::ALIGN);
            nonnull::slice_from_raw_parts(start, len)
        } else {
            self.set_pos(nonnull::addr(start), T::ALIGN);
            nonnull::slice_from_raw_parts(start, len)
        }
    }
    #[inline(always)]
    fn set_pos(&self, pos: NonZeroUsize, current_align: usize) {
        let chunk = self.chunk.get();
        debug_assert_eq!(pos.get() % current_align, 0);
        unsafe { chunk.set_pos_addr(pos.get()) }
        if current_align < MIN_ALIGN {
            chunk.align_pos_to::<MIN_ALIGN>();
        }
    }
    #[inline(always)]
    pub(crate) fn alloc_greedy<B: ErrorBehavior, T>(&mut self, cap: usize) -> Result<(NonNull<T>, usize), B> {
        let Range { start, end } = self.alloc_greedy_range::<B, T>(cap)?;
        let capacity = unsafe { nonnull::byte_sub_ptr(end, start) } / T::SIZE;
        Ok((start, capacity))
    }
    #[inline(always)]
    pub(crate) fn alloc_greedy_rev<B: ErrorBehavior, T>(&mut self, cap: usize) -> Result<(NonNull<T>, usize), B> {
        let Range { start, end } = self.alloc_greedy_range::<B, T>(cap)?;
        let capacity = unsafe { nonnull::byte_sub_ptr(end, start) } / T::SIZE;
        Ok((end, capacity))
    }
    #[inline(always)]
    fn alloc_greedy_range<B: ErrorBehavior, T>(&mut self, cap: usize) -> Result<Range<NonNull<T>>, B> {
        let layout = match ArrayLayout::array::<T>(cap) {
            Ok(ok) => ok,
            Err(_) => return Err(B::capacity_overflow()),
        };
        let range = match self.chunk.get().alloc_greedy(MinimumAlignment::<MIN_ALIGN>, layout) {
            Some(ptr) => ptr,
            None => self.alloc_greedy_in_another_chunk(*layout)?,
        };
        Ok(range.start.cast::<T>()..range.end.cast::<T>())
    }
    #[cold]
    #[inline(never)]
    pub(crate) fn alloc_greedy_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<Range<NonNull<u8>>, E> {
        let layout = CustomLayout(layout);
        unsafe {
            self.do_custom_alloc_in_another_chunk(layout, |chunk, layout| {
                chunk.alloc_greedy(MinimumAlignment::<MIN_ALIGN>, layout)
            })
        }
    }
    #[inline(always)]
    pub(crate) fn alloc_in_current_chunk(&self, layout: Layout) -> Option<NonNull<u8>> {
        self.chunk.get().alloc(MinimumAlignment::<MIN_ALIGN>, CustomLayout(layout))
    }
    #[cold]
    #[inline(never)]
    pub(crate) fn alloc_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
        unsafe {
            self.do_custom_alloc_in_another_chunk(CustomLayout(layout), |chunk, layout| {
                chunk.alloc(MinimumAlignment::<MIN_ALIGN>, layout)
            })
        }
    }
    #[cold]
    #[inline(never)]
    pub(crate) fn reserve_in_another_chunk<E: ErrorBehavior>(&self, layout: Layout) -> Result<NonNull<u8>, E> {
        unsafe {
            self.do_custom_alloc_in_another_chunk(CustomLayout(layout), |chunk, layout| {
                chunk.reserve(MinimumAlignment::<MIN_ALIGN>, layout)
            })
        }
    }
    #[inline(always)]
    pub(crate) fn do_alloc_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
        E::alloc_or_else(
            self.chunk.get(),
            MinimumAlignment::<MIN_ALIGN>,
            SizedLayout::new::<T>(),
            || self.do_alloc_sized_in_another_chunk::<E, T>(),
        )
        .map(NonNull::cast)
    }
    #[inline(always)]
    pub(crate) fn do_reserve_sized<E: ErrorBehavior, T>(&self) -> Result<NonNull<T>, E> {
        E::reserve_or_else(
            self.chunk.get(),
            MinimumAlignment::<MIN_ALIGN>,
            SizedLayout::new::<T>(),
            || self.do_reserve_sized_in_another_chunk::<E, T>(),
        )
        .map(NonNull::cast)
    }
    #[cold]
    #[inline(never)]
    pub(crate) fn do_alloc_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E>
    where
        MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
    {
        self.alloc_in_another_chunk(Layout::new::<T>())
    }
    #[cold]
    #[inline(never)]
    pub(crate) fn do_reserve_sized_in_another_chunk<E: ErrorBehavior, T>(&self) -> Result<NonNull<u8>, E>
    where
        MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
    {
        self.reserve_in_another_chunk(Layout::new::<T>())
    }
    #[inline(always)]
    pub(crate) fn do_alloc_slice<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<T>, E> {
        let layout = match ArrayLayout::array::<T>(len) {
            Ok(layout) => layout,
            Err(_) => return Err(E::capacity_overflow()),
        };
        E::alloc_or_else(self.chunk.get(), MinimumAlignment::<MIN_ALIGN>, layout, || unsafe {
            self.do_alloc_slice_in_another_chunk::<E, T>(len)
        })
        .map(NonNull::cast)
    }
    #[inline(always)]
    pub(crate) fn do_alloc_slice_for<E: ErrorBehavior, T>(&self, value: &[T]) -> Result<NonNull<T>, E> {
        let layout = ArrayLayout::for_value(value);
        E::alloc_or_else(self.chunk.get(), MinimumAlignment::<MIN_ALIGN>, layout, || unsafe {
            self.do_alloc_slice_in_another_chunk::<E, T>(value.len())
        })
        .map(NonNull::cast)
    }
    #[cold]
    #[inline(never)]
    pub(crate) unsafe fn do_alloc_slice_in_another_chunk<E: ErrorBehavior, T>(&self, len: usize) -> Result<NonNull<u8>, E>
    where
        MinimumAlignment<MIN_ALIGN>: SupportedMinimumAlignment,
    {
        let layout = match Layout::array::<T>(len) {
            Ok(layout) => layout,
            Err(_) => return Err(E::capacity_overflow()),
        };
        self.alloc_in_another_chunk(layout)
    }
    #[inline(always)]
    pub(crate) fn align<const ALIGN: usize>(&self)
    where
        MinimumAlignment<ALIGN>: SupportedMinimumAlignment,
    {
        if ALIGN > MIN_ALIGN {
            self.chunk.get().align_pos_to::<ALIGN>();
        }
    }
    #[inline(always)]
    pub(crate) fn must_align_more<const NEW_MIN_ALIGN: usize>(&self)
    where
        MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
    {
        const_param_assert! {
            (const MIN_ALIGN: usize, const NEW_MIN_ALIGN: usize) => NEW_MIN_ALIGN >= MIN_ALIGN, "`into_aligned` or `as_aligned_mut` can't decrease the minimum alignment"
        }
        self.align::<NEW_MIN_ALIGN>();
    }
    #[inline(always)]
    pub(crate) unsafe fn do_custom_alloc_in_another_chunk<B: ErrorBehavior, L: LayoutProps, R>(
        &self,
        layout: L,
        mut allocate: impl FnMut(RawChunk<UP, A>, L) -> Option<R>,
    ) -> Result<R, B> {
        let new_chunk = if self.is_unallocated() {
            let allocator = A::default_or_panic();
            RawChunk::new_in(
                ChunkSize::for_capacity(*layout).ok_or_else(B::capacity_overflow)?,
                None,
                allocator,
            )
        } else {
            while let Some(chunk) = self.chunk.get().next() {
                chunk.reset();
                self.chunk.set(chunk);
                if let Some(ptr) = allocate(chunk, layout) {
                    return Ok(ptr);
                }
            }
            self.chunk.get().append_for(*layout)
        }?;
        self.chunk.set(new_chunk);
        if let Some(ptr) = allocate(new_chunk, layout) {
            Ok(ptr)
        } else {
            core::hint::unreachable_unchecked()
        }
    }
    bump_common_methods!(true);
    #[inline(always)]
    pub fn as_scope(&self) -> &Self {
        self
    }
    #[inline(always)]
    pub fn as_mut_scope(&mut self) -> &mut Self {
        self
    }
    #[doc = doc_align_cant_decrease!()]
    #[inline(always)]
    pub fn into_aligned<const NEW_MIN_ALIGN: usize>(self) -> BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
    where
        MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
    {
        self.must_align_more::<NEW_MIN_ALIGN>();
        unsafe { self.cast_align() }
    }
    #[doc = doc_align_cant_decrease!()]
    #[inline(always)]
    pub fn as_aligned_mut<const NEW_MIN_ALIGN: usize>(
        &mut self,
    ) -> &mut BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
    where
        MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
    {
        self.must_align_more::<NEW_MIN_ALIGN>();
        unsafe { self.cast_align_mut() }
    }
    #[inline(always)]
    pub(crate) unsafe fn cast_align<const NEW_MIN_ALIGN: usize>(
        self,
    ) -> BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
    where
        MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
    {
        BumpScope {
            chunk: self.chunk,
            marker: PhantomData,
        }
    }
    #[inline(always)]
    pub(crate) unsafe fn cast_align_mut<const NEW_MIN_ALIGN: usize>(
        &mut self,
    ) -> &mut BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>
    where
        MinimumAlignment<NEW_MIN_ALIGN>: SupportedMinimumAlignment,
    {
        &mut *pointer::from_mut(self).cast::<BumpScope<'a, A, NEW_MIN_ALIGN, UP, GUARANTEED_ALLOCATED>>()
    }
    #[cfg(not(no_global_oom_handling))]
    pub fn into_guaranteed_allocated(self) -> BumpScope<'a, A, MIN_ALIGN, UP> {
        infallible(self.generic_into_guaranteed_allocated())
    }
    pub fn try_into_guaranteed_allocated(self) -> Result<BumpScope<'a, A, MIN_ALIGN, UP>, AllocError> {
        self.generic_into_guaranteed_allocated()
    }
    fn generic_into_guaranteed_allocated<E: ErrorBehavior>(self) -> Result<BumpScope<'a, A, MIN_ALIGN, UP>, E> {
        self.as_scope().ensure_allocated()?;
        Ok(unsafe { self.cast_allocated() })
    }
    #[cfg(not(no_global_oom_handling))]
    pub fn as_guaranteed_allocated(&self) -> &BumpScope<'a, A, MIN_ALIGN, UP> {
        infallible(self.generic_as_guaranteed_allocated())
    }
    pub fn try_as_guaranteed_allocated(&self) -> Result<&BumpScope<'a, A, MIN_ALIGN, UP>, AllocError> {
        self.generic_as_guaranteed_allocated()
    }
    fn generic_as_guaranteed_allocated<E: ErrorBehavior>(&self) -> Result<&BumpScope<'a, A, MIN_ALIGN, UP>, E> {
        self.as_scope().ensure_allocated()?;
        Ok(unsafe { self.cast_allocated_ref() })
    }
    #[cfg(not(no_global_oom_handling))]
    pub fn as_guaranteed_allocated_mut(&mut self) -> &mut BumpScope<'a, A, MIN_ALIGN, UP> {
        infallible(self.generic_as_guaranteed_allocated_mut())
    }
    pub fn try_as_guaranteed_allocated_mut(&mut self) -> Result<&mut BumpScope<'a, A, MIN_ALIGN, UP>, AllocError> {
        self.generic_as_guaranteed_allocated_mut()
    }
    fn generic_as_guaranteed_allocated_mut<E: ErrorBehavior>(&mut self) -> Result<&mut BumpScope<'a, A, MIN_ALIGN, UP>, E> {
        self.as_scope().ensure_allocated()?;
        Ok(unsafe { self.cast_allocated_mut() })
    }
    #[inline(always)]
    pub(crate) unsafe fn cast_allocated(self) -> BumpScope<'a, A, MIN_ALIGN, UP> {
        BumpScope {
            chunk: self.chunk,
            marker: PhantomData,
        }
    }
    #[inline(always)]
    pub(crate) unsafe fn cast_allocated_ref(&self) -> &BumpScope<'a, A, MIN_ALIGN, UP> {
        &*pointer::from_ref(self).cast::<BumpScope<'a, A, MIN_ALIGN, UP>>()
    }
    #[inline(always)]
    pub(crate) unsafe fn cast_allocated_mut(&mut self) -> &mut BumpScope<'a, A, MIN_ALIGN, UP> {
        &mut *pointer::from_mut(self).cast::<BumpScope<'a, A, MIN_ALIGN, UP>>()
    }
    #[inline(always)]
    pub(crate) unsafe fn clone_unchecked(&self) -> BumpScope<'a, A, MIN_ALIGN, UP, GUARANTEED_ALLOCATED> {
        BumpScope::new_unchecked(self.chunk.get())
    }
    #[inline]
    #[must_use]
    pub fn into_raw(self) -> NonNull<()> {
        let this = ManuallyDrop::new(self);
        this.chunk.get().header_ptr().cast()
    }
    #[inline]
    #[must_use]
    pub unsafe fn from_raw(ptr: NonNull<()>) -> Self {
        let chunk = Cell::new(RawChunk::from_header(ptr.cast()));
        Self {
            chunk,
            marker: PhantomData,
        }
    }
}
impl<A, const MIN_ALIGN: usize, const UP: bool> NoDrop for BumpScope<'_, A, MIN_ALIGN, UP> {}