use crate::{Allocator, Boxed, GenericAlloc, PoolAlloc, Result};
use core::alloc::Layout;
use core::any::Any;
use core::cmp::{Eq, Ord, Ordering, PartialEq, PartialOrd};
use core::fmt::{self, Debug, Display, Formatter};
use core::hash::{Hash, Hasher};
use core::marker::PhantomData;
use core::mem::ManuallyDrop;
use core::ops::Deref;
use core::ptr::{self, NonNull};
use core::sync::atomic::{AtomicIsize, Ordering::Relaxed};
#[repr(C)]
struct ArcInner<'a, A: Allocator> {
cnt: AtomicIsize,
data_layout: Layout,
data_alloc: &'a A,
alloc: &'a A,
}
#[repr(C)]
pub struct Arc<'a, T: ?Sized + 'a, A: Allocator + 'a = PoolAlloc> {
inner: NonNull<ArcInner<'a, A>>,
data: NonNull<T>,
mark: PhantomData<(ArcInner<'a, A>, T)>,
}
impl<T: ?Sized + 'static> Arc<'static, T, PoolAlloc> {
pub fn from_boxed(data: Boxed<'static, T, PoolAlloc>) -> Result<Self> {
Arc::from_boxed_in(&PoolAlloc, data)
}
}
impl<T: Sized + 'static> Arc<'static, T, PoolAlloc> {
pub fn new(val: T) -> Result<Self> {
Arc::new_in(&PoolAlloc, val)
}
}
impl<T: Clone + 'static> Arc<'static, [T], PoolAlloc> {
pub fn new_slice(len: usize, val: T) -> Result<Self> {
Arc::new_slice_in(&PoolAlloc, len, val)
}
}
impl Arc<'static, (), PoolAlloc> {
pub fn new_then<T, F>(f: F) -> Result<Arc<'static, T, PoolAlloc>>
where
F: FnOnce() -> Result<T>,
{
Arc::new_then_in(&PoolAlloc, f)
}
pub fn new_slice_then<T, F>(len: usize, f: F) -> Result<Arc<'static, [T], PoolAlloc>>
where
F: FnMut(usize) -> Result<T>,
{
Arc::new_slice_then_in(&PoolAlloc, len, f)
}
}
impl<'a, T: ?Sized, A: Allocator + 'a> Arc<'a, T, A> {
pub fn from_boxed_in(alloc: &'a A, data: Boxed<'a, T, A>) -> Result<Self> {
let boxed = Boxed::new_in(
alloc,
ArcInner {
cnt: AtomicIsize::new(1),
data_layout: data.layout(),
data_alloc: data.allocator(),
alloc,
},
)?;
Ok(Self {
inner: boxed.leak().0.into(),
data: data.leak().0.into(),
mark: PhantomData,
})
}
pub fn as_ptr(&self) -> *const T {
self.data.as_ptr()
}
pub fn get_mut(&mut self) -> Option<&mut T> {
if self.get_inner().cnt.load(Relaxed) == 1 {
Some(unsafe { self.get_mut_unchecked() })
} else {
None
}
}
pub unsafe fn get_mut_unchecked(&mut self) -> &mut T {
self.data.as_mut()
}
fn get_inner(&self) -> &ArcInner<'a, A> {
unsafe { self.inner.as_ref() }
}
}
impl<'a, T: Sized, A: Allocator + 'a> Arc<'a, T, A> {
pub fn new_in(alloc: &'a A, val: T) -> Result<Self> {
let data = Boxed::new_in(alloc, val)?;
Arc::from_boxed_in(alloc, data)
}
}
impl<'a, T: Clone + 'a, A: Allocator + 'a> Arc<'a, [T], A> {
pub fn new_slice_in(alloc: &'a A, len: usize, val: T) -> Result<Self> {
let data = Boxed::new_slice_in(alloc, len, val)?;
Arc::from_boxed_in(alloc, data)
}
}
impl<'a, A: Allocator + 'a> Arc<'a, (), A> {
pub fn new_then_in<T, F>(alloc: &'a A, f: F) -> Result<Arc<'a, T, A>>
where
F: FnOnce() -> Result<T>,
{
let data = Boxed::new_then_in(alloc, f)?;
Arc::from_boxed_in(alloc, data)
}
pub fn new_slice_then_in<T, F>(alloc: &'a A, len: usize, f: F) -> Result<Arc<'a, [T], A>>
where
F: FnMut(usize) -> Result<T>,
{
let data = Boxed::new_slice_then_in(alloc, len, f)?;
Arc::from_boxed_in(alloc, data)
}
}
impl<'a, A: Allocator> Arc<'a, dyn Any, A> {
pub fn downcast<T: Any>(self) -> core::result::Result<Arc<'a, T, A>, Self> {
if <dyn Any>::is::<T>(self.as_ref()) {
let data = self.data.cast::<T>();
let this = ManuallyDrop::new(self);
Ok(Arc {
inner: this.inner,
data,
mark: PhantomData,
})
} else {
Err(self)
}
}
}
impl<'a, T: Any, A: Allocator> Arc<'a, T, A> {
pub fn to_any(self) -> Arc<'a, dyn Any, A> {
let any: &dyn Any = self.as_ref();
let data = NonNull::from(any);
let other = ManuallyDrop::new(self);
Arc {
inner: other.inner,
data,
mark: PhantomData,
}
}
}
impl<'a, T: ?Sized, A: Allocator> Arc<'a, T, A> {
pub fn upcast<U: ?Sized>(
self,
f: impl FnOnce(&T) -> &U,
) -> core::result::Result<Arc<'a, U, A>, Self> {
let data = NonNull::from(f(unsafe { self.data.as_ref() }));
if !ptr::eq(data.cast::<u8>().as_ptr(), self.data.cast::<u8>().as_ptr()) {
return Err(self);
}
let this = ManuallyDrop::new(self);
Ok(Arc {
inner: this.inner,
data,
mark: PhantomData,
})
}
pub unsafe fn cast_unchecked<U>(self) -> Arc<'a, U, A> {
let this = ManuallyDrop::new(self);
Arc {
inner: this.inner,
data: this.data.cast::<U>(),
mark: PhantomData,
}
}
}
impl<'a, T: ?Sized, A: Allocator> Drop for Arc<'a, T, A> {
fn drop(&mut self) {
let inner = self.get_inner();
if inner.cnt.fetch_sub(1, Relaxed) == 1 {
unsafe { self.inner.as_ptr().drop_in_place() };
unsafe { self.data.as_ptr().drop_in_place() };
unsafe { inner.data_alloc.release_with(self.data, inner.data_layout) };
let layout = Layout::new::<ArcInner<'a, A>>();
unsafe { inner.alloc.release_with(self.inner, layout) };
}
}
}
impl<T: ?Sized, A: Allocator> AsRef<T> for Arc<'_, T, A> {
#[inline(always)]
fn as_ref(&self) -> &T {
unsafe { self.data.as_ref() }
}
}
impl<T: ?Sized, A: Allocator> Clone for Arc<'_, T, A> {
fn clone(&self) -> Self {
self.get_inner().cnt.fetch_add(1, Relaxed);
Self {
inner: self.inner,
data: self.data,
mark: PhantomData,
}
}
}
unsafe impl<T: Sync + ?Sized, A: Allocator + crate::Pool> Send for Arc<'static, T, A> {}
unsafe impl<T: Sync + ?Sized, A: Allocator> Sync for Arc<'static, T, A> {}
impl<T: Unpin + ?Sized, A: Allocator> Unpin for Arc<'_, T, A> {}
impl<T: ?Sized, A: Allocator> Deref for Arc<'_, T, A> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
impl<T: Display + ?Sized, A: Allocator> Display for Arc<'_, T, A> {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Display::fmt(&**self, f)
}
}
impl<T: Debug + ?Sized, A: Allocator> Debug for Arc<'_, T, A> {
#[inline]
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
Debug::fmt(&**self, f)
}
}
impl<T: ?Sized, A: Allocator> fmt::Pointer for Arc<'_, T, A> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let ptr = self.as_ref() as *const _;
fmt::Pointer::fmt(&ptr, f)
}
}
impl<T: PartialEq + ?Sized, A: Allocator> PartialEq for Arc<'_, T, A> {
#[inline]
fn eq(&self, other: &Self) -> bool {
PartialEq::eq(&**self, &**other)
}
}
impl<T: PartialOrd + ?Sized, A: Allocator> PartialOrd for Arc<'_, T, A> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other)
}
#[inline]
fn lt(&self, other: &Self) -> bool {
PartialOrd::lt(&**self, &**other)
}
#[inline]
fn le(&self, other: &Self) -> bool {
PartialOrd::le(&**self, &**other)
}
#[inline]
fn gt(&self, other: &Self) -> bool {
PartialOrd::gt(&**self, &**other)
}
#[inline]
fn ge(&self, other: &Self) -> bool {
PartialOrd::ge(&**self, &**other)
}
}
impl<T: Ord + ?Sized, A: Allocator> Ord for Arc<'_, T, A> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
Ord::cmp(&**self, &**other)
}
}
impl<T: Eq + ?Sized, A: Allocator> Eq for Arc<'_, T, A> {}
impl<T: Hash + ?Sized, A: Allocator> Hash for Arc<'_, T, A> {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state)
}
}
#[cfg(test)]
mod test {
use crate::{Arc, MemPool};
#[test]
fn test() {
let rc = Arc::new(100).unwrap();
let cnt = *rc + 100;
assert_eq!(200, cnt);
let mut rc2 = rc.clone();
let ret = rc2.get_mut();
assert_eq!(ret, None);
}
#[test]
fn test_pool() {
static mut DROP: usize = 0;
struct Foo;
impl Drop for Foo {
fn drop(&mut self) {
unsafe { DROP += 1 };
}
}
let pool = MemPool::new_boxed(0).unwrap();
unsafe { DROP = 0 };
{
let arc = Arc::new_in(&pool, Foo).unwrap();
{
let _rc1 = arc.clone();
}
unsafe { assert_eq!(DROP, 0) };
}
unsafe { assert_eq!(DROP, 1) };
}
#[test]
fn test_cast() {
trait Trait {}
impl Trait for i8 {}
let arc = Arc::new(100_i8).unwrap();
let arc = arc.to_any();
let arc = arc.downcast::<i8>().unwrap();
let arc = arc.upcast::<dyn Trait>(|val| val);
assert!(arc.is_ok());
let _ = unsafe { arc.unwrap().cast_unchecked::<i8>() };
}
}