use crate::{
    bumping::{bump_down, bump_greedy_down, bump_greedy_up, bump_up, BumpProps, BumpUp},
    down_align_usize,
    layout::LayoutProps,
    polyfill::{const_unwrap, nonnull, pointer},
    unallocated_chunk_header, up_align_usize_unchecked, ChunkHeader, ChunkSize, ErrorBehavior, MinimumAlignment,
    SupportedMinimumAlignment, CHUNK_ALIGN_MIN,
};
use allocator_api2::alloc::{AllocError, Allocator};
use core::{alloc::Layout, cell::Cell, mem::align_of, num::NonZeroUsize, ops::Range, ptr::NonNull};
#[repr(transparent)]
pub(crate) struct RawChunk<const UP: bool, A> {
    header: NonNull<ChunkHeader<A>>,
}
impl<const UP: bool, A> Copy for RawChunk<UP, A> {}
#[allow(clippy::expl_impl_clone_on_copy)]
impl<const UP: bool, A> Clone for RawChunk<UP, A> {
    #[inline(always)]
    fn clone(&self) -> Self {
        *self
    }
}
impl<const UP: bool, A> PartialEq for RawChunk<UP, A> {
    #[inline(always)]
    fn eq(&self, other: &Self) -> bool {
        self.header == other.header
    }
    #[inline(always)]
    fn ne(&self, other: &Self) -> bool {
        self.header != other.header
    }
}
impl<const UP: bool, A> Eq for RawChunk<UP, A> {}
impl<const UP: bool, A> RawChunk<UP, A> {
    pub(crate) fn new_in<E: ErrorBehavior>(size: ChunkSize<UP, A>, prev: Option<Self>, allocator: A) -> Result<Self, E>
    where
        A: Allocator,
        for<'a> &'a A: Allocator,
    {
        let layout = size.layout();
        let allocation = match allocator.allocate(layout) {
            Ok(ok) => ok,
            Err(AllocError) => return Err(E::allocation(layout)),
        };
        let downwards_align = if UP {
            CHUNK_ALIGN_MIN
        } else {
            CHUNK_ALIGN_MIN.max(align_of::<ChunkHeader<A>>())
        };
        let size = down_align_usize(allocation.len(), downwards_align);
        debug_assert!(size >= layout.size());
        debug_assert!(size % CHUNK_ALIGN_MIN == 0);
        let ptr = allocation.cast::<u8>();
        let prev = prev.map(|c| c.header);
        let next = Cell::new(None);
        let header = unsafe {
            if UP {
                let header = ptr.cast::<ChunkHeader<A>>();
                header.as_ptr().write(ChunkHeader {
                    pos: Cell::new(nonnull::add(header, 1).cast()),
                    end: nonnull::add(ptr, size),
                    prev,
                    next,
                    allocator,
                });
                header
            } else {
                let header = nonnull::sub(nonnull::add(ptr, size).cast::<ChunkHeader<A>>(), 1);
                header.as_ptr().write(ChunkHeader {
                    pos: Cell::new(header.cast()),
                    end: ptr,
                    prev,
                    next,
                    allocator,
                });
                header
            }
        };
        Ok(RawChunk { header })
    }
    pub(crate) fn header_ptr(self) -> NonNull<ChunkHeader<A>> {
        self.header
    }
    pub(crate) const unsafe fn from_header(header: NonNull<ChunkHeader<A>>) -> Self {
        Self { header }
    }
    pub(crate) fn is_unallocated(self) -> bool {
        self.header.cast() == unallocated_chunk_header()
    }
    #[inline(always)]
    pub(crate) fn alloc<M, L>(self, minimum_alignment: M, layout: L) -> Option<NonNull<u8>>
    where
        M: SupportedMinimumAlignment,
        L: LayoutProps,
    {
        self.alloc_or_else(minimum_alignment, layout, || Err(())).ok()
    }
    #[inline(always)]
    pub(crate) fn alloc_or_else<M, L, E, F>(self, minimum_alignment: M, layout: L, f: F) -> Result<NonNull<u8>, E>
    where
        M: SupportedMinimumAlignment,
        L: LayoutProps,
        F: FnOnce() -> Result<NonNull<u8>, E>,
    {
        let props = self.bump_props(minimum_alignment, layout);
        unsafe {
            if UP {
                match bump_up(props) {
                    Some(BumpUp { new_pos, ptr }) => {
                        self.set_pos(self.with_addr(new_pos));
                        Ok(self.with_addr(ptr))
                    }
                    None => f(),
                }
            } else {
                match bump_down(props) {
                    Some(ptr) => {
                        let ptr = self.with_addr(ptr);
                        self.set_pos(ptr);
                        Ok(ptr)
                    }
                    None => f(),
                }
            }
        }
    }
    #[inline(always)]
    pub(crate) fn reserve<M, L>(self, minimum_alignment: M, layout: L) -> Option<NonNull<u8>>
    where
        M: SupportedMinimumAlignment,
        L: LayoutProps,
    {
        self.reserve_or_else(minimum_alignment, layout, || Err(())).ok()
    }
    #[inline(always)]
    pub(crate) fn reserve_or_else<M, L, E, F>(self, minimum_alignment: M, layout: L, f: F) -> Result<NonNull<u8>, E>
    where
        M: SupportedMinimumAlignment,
        L: LayoutProps,
        F: FnOnce() -> Result<NonNull<u8>, E>,
    {
        let props = self.bump_props(minimum_alignment, layout);
        unsafe {
            if UP {
                match bump_up(props) {
                    Some(BumpUp { ptr, .. }) => Ok(self.with_addr(ptr)),
                    None => f(),
                }
            } else {
                match bump_down(props) {
                    Some(ptr) => Ok(self.with_addr(ptr)),
                    None => f(),
                }
            }
        }
    }
    #[inline(always)]
    pub(crate) fn bump_props<M, L>(self, _: M, layout: L) -> BumpProps
    where
        M: SupportedMinimumAlignment,
        L: LayoutProps,
    {
        debug_assert!(nonnull::is_aligned_to(self.pos(), M::MIN_ALIGN));
        let remaining = self.remaining_range();
        BumpProps {
            start: nonnull::addr(remaining.start).get(),
            end: nonnull::addr(remaining.end).get(),
            layout: *layout,
            min_align: M::MIN_ALIGN,
            align_is_const: L::ALIGN_IS_CONST,
            size_is_const: L::SIZE_IS_CONST,
            size_is_multiple_of_align: L::SIZE_IS_MULTIPLE_OF_ALIGN,
        }
    }
    #[inline(always)]
    pub(crate) fn alloc_greedy<M, L>(self, minimum_alignment: M, layout: L) -> Option<Range<NonNull<u8>>>
    where
        M: SupportedMinimumAlignment,
        L: LayoutProps,
    {
        debug_assert_ne!(layout.size(), 0);
        let props = self.bump_props(minimum_alignment, layout);
        unsafe {
            if UP {
                let range = bump_greedy_up(props)?;
                Some(self.with_addr_range(range))
            } else {
                let range = bump_greedy_down(props)?;
                Some(self.with_addr_range(range))
            }
        }
    }
    #[inline(always)]
    pub(crate) fn align_pos_to<const ALIGN: usize>(self)
    where
        MinimumAlignment<ALIGN>: SupportedMinimumAlignment,
    {
        let mut pos = nonnull::addr(self.pos()).get();
        if UP {
            pos = up_align_usize_unchecked(pos, ALIGN);
        } else {
            pos = down_align_usize(pos, ALIGN);
        }
        unsafe { self.set_pos(self.with_addr(pos)) }
    }
    #[inline(always)]
    fn after_header(self) -> NonNull<u8> {
        unsafe { nonnull::add(self.header, 1).cast() }
    }
    #[inline(always)]
    pub(crate) fn chunk_start(self) -> NonNull<u8> {
        unsafe {
            if UP {
                self.header.cast()
            } else {
                self.header.as_ref().end
            }
        }
    }
    #[inline(always)]
    pub(crate) fn chunk_end(self) -> NonNull<u8> {
        unsafe {
            if UP {
                self.header.as_ref().end
            } else {
                self.after_header()
            }
        }
    }
    #[inline(always)]
    pub(crate) fn content_start(self) -> NonNull<u8> {
        if UP {
            self.after_header()
        } else {
            self.chunk_start()
        }
    }
    #[inline(always)]
    pub(crate) fn content_end(self) -> NonNull<u8> {
        if UP {
            self.chunk_end()
        } else {
            self.header.cast()
        }
    }
    #[inline(always)]
    pub(crate) fn pos(self) -> NonNull<u8> {
        unsafe { self.header.as_ref().pos.get() }
    }
    #[inline(always)]
    pub(crate) fn set_pos(self, ptr: NonNull<u8>) {
        unsafe { self.header.as_ref().pos.set(ptr) }
    }
    #[inline(always)]
    pub(crate) unsafe fn set_pos_addr(self, addr: usize) {
        let ptr = self.with_addr(addr);
        self.set_pos(ptr);
    }
    #[inline(always)]
    pub(crate) unsafe fn with_addr(self, addr: usize) -> NonNull<u8> {
        debug_assert!(self.contains_addr_or_end(addr));
        let ptr = self.header.cast();
        let addr = NonZeroUsize::new_unchecked(addr);
        nonnull::with_addr(ptr, addr)
    }
    #[inline(always)]
    pub(crate) unsafe fn with_addr_range(self, range: Range<usize>) -> Range<NonNull<u8>> {
        debug_assert!(range.start <= range.end);
        let start = self.with_addr(range.start);
        let end = self.with_addr(range.end);
        start..end
    }
    #[inline(always)]
    pub(crate) fn contains_addr_or_end(self, addr: usize) -> bool {
        let start = nonnull::addr(self.content_start()).get();
        let end = nonnull::addr(self.content_end()).get();
        addr >= start && addr <= end
    }
    #[inline(always)]
    pub(crate) fn prev(self) -> Option<Self> {
        unsafe { Some(Self::from_header(self.header.as_ref().prev?)) }
    }
    #[inline(always)]
    pub(crate) fn next(self) -> Option<Self> {
        unsafe { Some(Self::from_header(self.header.as_ref().next.get()?)) }
    }
    #[inline(always)]
    pub(crate) fn capacity(self) -> usize {
        let start = nonnull::addr(self.content_start()).get();
        let end = nonnull::addr(self.content_end()).get();
        end - start
    }
    #[inline(always)]
    fn allocated_range(self) -> Range<NonNull<u8>> {
        if UP {
            self.content_start()..self.pos()
        } else {
            self.pos()..self.content_end()
        }
    }
    #[inline(always)]
    pub(crate) fn allocated(self) -> usize {
        let range = self.allocated_range();
        let start = nonnull::addr(range.start).get();
        let end = nonnull::addr(range.end).get();
        end - start
    }
    #[inline(always)]
    pub(crate) fn remaining(self) -> usize {
        let range = self.remaining_range();
        let start = nonnull::addr(range.start).get();
        let end = nonnull::addr(range.end).get();
        end - start
    }
    pub(crate) fn remaining_range(self) -> Range<NonNull<u8>> {
        if UP {
            let start = self.pos();
            let end = self.content_end();
            start..end
        } else {
            let start = self.content_start();
            let end = self.pos();
            start..end
        }
    }
    #[inline(always)]
    pub(crate) fn size(self) -> NonZeroUsize {
        let start = nonnull::addr(self.chunk_start()).get();
        let end = nonnull::addr(self.chunk_end()).get();
        unsafe { NonZeroUsize::new_unchecked(end - start) }
    }
    #[inline(always)]
    pub(crate) fn layout(self) -> Layout {
        unsafe { Layout::from_size_align_unchecked(self.size().get(), align_of::<ChunkHeader<A>>()) }
    }
    #[inline(always)]
    fn grow_size<B: ErrorBehavior>(self) -> Result<ChunkSize<UP, A>, B> {
        const TWO: NonZeroUsize = const_unwrap(NonZeroUsize::new(2));
        let size = match self.size().checked_mul(TWO) {
            Some(size) => size,
            None => return Err(B::capacity_overflow()),
        };
        ChunkSize::<UP, A>::new(size.get()).ok_or_else(B::capacity_overflow)
    }
    pub(crate) fn append_for<B: ErrorBehavior>(self, layout: Layout) -> Result<Self, B>
    where
        A: Allocator + Clone,
    {
        debug_assert!(self.next().is_none());
        let required_size = ChunkSize::for_capacity(layout).ok_or_else(B::capacity_overflow)?;
        let grown_size = self.grow_size()?;
        let size = required_size.max(grown_size);
        let allocator = unsafe { self.header.as_ref().allocator.clone() };
        let new_chunk = RawChunk::new_in::<B>(size, Some(self), allocator)?;
        self.set_next(Some(new_chunk));
        Ok(new_chunk)
    }
    #[inline(always)]
    pub(crate) fn reset(self) {
        if UP {
            self.set_pos(self.content_start());
        } else {
            self.set_pos(self.content_end());
        }
    }
    #[inline(always)]
    pub(crate) unsafe fn reset_to(self, addr: usize) {
        let ptr = self.with_addr(addr);
        self.set_pos(ptr);
    }
    pub(crate) unsafe fn deallocate(self)
    where
        A: Allocator,
    {
        let ptr = self.chunk_start();
        let layout = self.layout();
        let allocator_ptr = pointer::from_ref(&self.header.as_ref().allocator);
        let allocator = allocator_ptr.read();
        allocator.deallocate(ptr, layout);
    }
    #[inline(always)]
    pub(crate) fn set_prev(mut self, value: Option<Self>) {
        unsafe {
            self.header.as_mut().prev = value.map(|c| c.header);
        }
    }
    #[inline(always)]
    pub(crate) fn set_next(self, value: Option<Self>) {
        unsafe {
            self.header.as_ref().next.set(value.map(|c| c.header));
        }
    }
    pub(crate) fn for_each_prev(self, mut f: impl FnMut(Self)) {
        let mut iter = self.prev();
        while let Some(chunk) = iter {
            iter = chunk.prev();
            f(chunk);
        }
    }
    pub(crate) fn for_each_next(self, mut f: impl FnMut(Self)) {
        let mut iter = self.next();
        while let Some(chunk) = iter {
            iter = chunk.next();
            f(chunk);
        }
    }
    #[inline(always)]
    pub(crate) fn allocator(self) -> NonNull<A> {
        unsafe { NonNull::from(&self.header.as_ref().allocator) }
    }
    #[inline(always)]
    pub(crate) fn without_allocator(self) -> RawChunk<UP, ()> {
        RawChunk {
            header: self.header.cast(),
        }
    }
}