use super::types::OgreAllocator;
use std::{
    sync::atomic::{
            self,
            AtomicU32,
            Ordering::{Acquire, Relaxed, Release},
        },
    ops::{Deref, DerefMut},
    fmt::{Debug, Display, Formatter},
    marker::PhantomData,
    ptr::NonNull,
};
pub struct OgreArc<DataType:          Debug + Send + Sync,
                   OgreAllocatorType: OgreAllocator<DataType> + Send + Sync + 'static> {
    inner:    NonNull<InnerOgreArc<DataType, OgreAllocatorType>>,
}
impl<DataType:          Debug + Send + Sync,
     OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
OgreArc<DataType, OgreAllocatorType> {
    #[inline(always)]
    pub fn new(allocator: &OgreAllocatorType) -> Option<(Self, &mut DataType)> {
        allocator.alloc_ref()
            .map(|(reference, data_id)| {
                ( Self::from_allocated(data_id, allocator),
                  reference )
        })
    }
    #[inline(always)]
    pub fn new_with<F: FnOnce(&mut DataType)>(setter: F, allocator: &OgreAllocatorType) -> Option<Self> {
        Self::new_with_clones::<1, F>(setter, allocator)
            .map(|ogre_arcs| unsafe { ogre_arcs.into_iter().next().unwrap_unchecked() })
    }
    #[inline(always)]
    pub fn new_with_clones<const COUNT: usize,
                           F:           FnOnce(&mut DataType)>
                          (setter:    F,
                           allocator: &OgreAllocatorType)
                          -> Option<[Self; COUNT]> {
        allocator.alloc_ref()
            .map(|(slot_ref, slot_id)| {
                setter(slot_ref);
                Self::from_allocated_with_clones::<COUNT>(slot_id, allocator)
            })
    }
    #[inline(always)]
    pub fn from_allocated(data_id: u32, allocator: &OgreAllocatorType) -> Self {
        unsafe { Self::from_allocated_with_clones::<1>(data_id, allocator).into_iter().next().unwrap_unchecked() }
    }
    #[inline(always)]
    pub fn from_allocated_with_clones<const COUNT: usize>(data_id: u32, allocator: &OgreAllocatorType) -> [Self; COUNT] {
        let inner = Box::new(InnerOgreArc {     allocator: unsafe { &*(allocator as *const OgreAllocatorType) },
            data_id,
            references_count: AtomicU32::new(COUNT as u32),
            _phantom:         PhantomData,
        });
        let inner: NonNull<InnerOgreArc<DataType, OgreAllocatorType>> = Box::leak(inner).into();
        [0; COUNT].map(|_| Self {
            inner,
        })
    }
    #[inline(always)]
    pub unsafe fn increment_references(&self, count: u32) -> &Self {
        let inner = unsafe { self.inner.as_ref() };
        inner.references_count.fetch_add(count, Relaxed);
        self
    }
    #[inline(always)]
    pub unsafe fn raw_copy(&self) -> Self {
        Self {
            inner: self.inner,
        }
    }
    #[inline(always)]
    pub fn references_count(&self) -> u32 {
        let inner = unsafe { self.inner.as_ref() };
        inner.references_count.load(Relaxed)
    }
}
impl<DataType:          Debug + Send + Sync,
     OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
Default for
OgreArc<DataType, OgreAllocatorType> {
    #[inline(always)]
    fn default() -> Self {
        Self { inner: NonNull::dangling() }
    }
}
impl<DataType:          Debug + Send + Sync,
     OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
Deref for
OgreArc<DataType, OgreAllocatorType> {
    type Target = DataType;
    #[inline(always)]
    fn deref(&self) -> &Self::Target {
        let inner = unsafe { self.inner.as_ref() };
        inner.allocator.ref_from_id(inner.data_id)
    }
}
impl<DataType:          Debug + Send + Sync,
     OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
DerefMut for
OgreArc<DataType, OgreAllocatorType> {
    #[inline(always)]
    fn deref_mut(&mut self) -> &mut Self::Target {
        let inner = unsafe { self.inner.as_ref() };
        inner.allocator.ref_from_id(inner.data_id)
    }
}
impl<DataType:         Debug + Send + Sync,
    OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
Clone for
OgreArc<DataType, OgreAllocatorType> {
    #[inline(always)]
    fn clone(&self) -> Self {
        let inner = unsafe { self.inner.as_ref() };
        inner.references_count.fetch_add(1, Relaxed);
        Self {
            inner: self.inner,
        }
    }
}
impl<DataType:          Debug + Send + Sync + Display,
     OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
Display for
OgreArc<DataType, OgreAllocatorType> {
    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
        std::fmt::Display::fmt(&self.deref(), f)
    }
}
impl<DataType:          Debug + Send + Sync,
     OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
Debug for
OgreArc<DataType, OgreAllocatorType> {
    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
        let inner = unsafe { self.inner.as_ref() };
        inner.fmt(f)
    }
}
impl<DataType:          Debug + Send + Sync,
     OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
PartialEq<Self> for
OgreArc<DataType, OgreAllocatorType>
where DataType: PartialEq {
    fn eq(&self, other: &Self) -> bool {
        self.deref().eq(other)
    }
}
impl<DataType:          Debug + Send + Sync,
     OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
Drop for
OgreArc<DataType, OgreAllocatorType> {
    #[inline(always)]
    fn drop(&mut self) {
        let inner = unsafe { self.inner.as_mut() };
        let references = inner.references_count.fetch_sub(1, Release);
        if references != 1 {
            return;
        }
        atomic::fence(Acquire);
        inner.allocator.dealloc_id(inner.data_id);
        let boxed = unsafe { Box::from_raw(inner) };
        drop(boxed);
    }
}
unsafe impl<DataType:          Debug + Send + Sync,
            OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
Send for
OgreArc<DataType, OgreAllocatorType> {}
unsafe impl<DataType:          Debug + Send + Sync,
            OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
Sync for
OgreArc<DataType, OgreAllocatorType> {}
struct InnerOgreArc<DataType:          Debug + Send + Sync,
                    OgreAllocatorType: OgreAllocator<DataType> + Send + Sync + 'static> {
    allocator:        &'static OgreAllocatorType,
    data_id:          u32,
    references_count: AtomicU32,
    _phantom:         PhantomData<DataType>,
}
impl<DataType:         Debug + Send + Sync,
    OgreAllocatorType: OgreAllocator<DataType> + Send + Sync>
Debug for
InnerOgreArc<DataType, OgreAllocatorType> {
    fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
        write!(f, "InnerOgreArc {{allocator: {:?}, data_id: #{}:{:?}, references_count: {}}}",
               self.allocator, self.data_id, self.allocator.ref_from_id(self.data_id), self.references_count.load(Relaxed))
    }
}
#[cfg(any(test,doc))]
mod tests {
    use super::*;
    use crate::prelude::advanced::AllocatorAtomicArray;
    #[cfg_attr(not(doc),test)]
    pub fn happy_path_usage() {
        let allocator = AllocatorAtomicArray::<u128, 128>::new();
        let arc1 = OgreArc::new_with(|slot| *slot = 128, &allocator).expect("Allocation should have been done");
        println!("arc1 is {arc1} -- {:?}", arc1);
        assert_eq!((*arc1.deref(), arc1.references_count()), (128, 1), "Starting Value and Reference Counts don't match for `arc1`");
        let arc2 = arc1.clone();
        println!("arc2 is {arc2} -- {:?}", arc2);
        assert_eq!((*arc2.deref(), arc2.references_count()), (128, 2), "Cloned Value and Reference Counts don't match for `arc2`");
        drop(arc1);
        println!("arc2 is still {arc2} -- {:?}", arc2);
        assert_eq!((*arc2.deref(), arc2.references_count()), (128, 1), "Value and Reference Counts don't match for `arc2` after dropping `arc1`");
        drop(arc2);
        println!("all is free:");
        println!("{:?}", allocator);
    }
}