use std::{
alloc::{self, Layout},
cmp::Ordering,
hash::{Hash, Hasher},
marker::PhantomData,
mem::{self, ManuallyDrop},
ops::Deref,
ptr,
sync::atomic::{
self,
Ordering::{Acquire, Relaxed, Release},
},
};
use memoffset::offset_of;
const MAX_REFCOUNT: usize = (isize::MAX) as usize;
#[repr(C)]
pub(crate) struct ArcInner<T: ?Sized> {
pub(crate) count: atomic::AtomicUsize,
pub(crate) data: T,
}
unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
#[repr(transparent)]
pub(crate) struct Arc<T: ?Sized> {
pub(crate) p: ptr::NonNull<ArcInner<T>>,
pub(crate) phantom: PhantomData<T>,
}
unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
impl<T> Arc<T> {
#[inline]
pub(crate) unsafe fn from_raw(ptr: *const T) -> Self {
let ptr = (ptr as *const u8).sub(offset_of!(ArcInner<T>, data));
Arc { p: ptr::NonNull::new_unchecked(ptr as *mut ArcInner<T>), phantom: PhantomData }
}
}
impl<T: ?Sized> Arc<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
unsafe { &*self.ptr() }
}
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let _ = Box::from_raw(self.ptr());
}
#[inline]
pub(crate) fn ptr_eq(this: &Self, other: &Self) -> bool {
this.ptr() == other.ptr()
}
pub(crate) fn ptr(&self) -> *mut ArcInner<T> {
self.p.as_ptr()
}
}
impl<T: ?Sized> Clone for Arc<T> {
#[inline]
fn clone(&self) -> Self {
let old_size = self.inner().count.fetch_add(1, Relaxed);
if old_size > MAX_REFCOUNT {
std::process::abort();
}
unsafe { Arc { p: ptr::NonNull::new_unchecked(self.ptr()), phantom: PhantomData } }
}
}
impl<T: ?Sized> Deref for Arc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.inner().data
}
}
impl<T: ?Sized> Arc<T> {
#[inline]
pub(crate) fn get_mut(this: &mut Self) -> Option<&mut T> {
if this.is_unique() {
unsafe {
Some(&mut (*this.ptr()).data)
}
} else {
None
}
}
pub(crate) fn is_unique(&self) -> bool {
self.inner().count.load(Acquire) == 1
}
}
impl<T: ?Sized> Drop for Arc<T> {
#[inline]
fn drop(&mut self) {
if self.inner().count.fetch_sub(1, Release) != 1 {
return;
}
self.inner().count.load(Acquire);
unsafe {
self.drop_slow();
}
}
}
impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
fn eq(&self, other: &Arc<T>) -> bool {
Self::ptr_eq(self, other) || *(*self) == *(*other)
}
fn ne(&self, other: &Arc<T>) -> bool {
!Self::ptr_eq(self, other) && *(*self) != *(*other)
}
}
impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
fn lt(&self, other: &Arc<T>) -> bool {
*(*self) < *(*other)
}
fn le(&self, other: &Arc<T>) -> bool {
*(*self) <= *(*other)
}
fn gt(&self, other: &Arc<T>) -> bool {
*(*self) > *(*other)
}
fn ge(&self, other: &Arc<T>) -> bool {
*(*self) >= *(*other)
}
}
impl<T: ?Sized + Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering {
(**self).cmp(&**other)
}
}
impl<T: ?Sized + Eq> Eq for Arc<T> {}
impl<T: ?Sized + Hash> Hash for Arc<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state)
}
}
#[derive(Debug, Eq, PartialEq, Hash, PartialOrd)]
#[repr(C)]
pub(crate) struct HeaderSlice<H, T: ?Sized> {
pub(crate) header: H,
length: usize,
slice: T,
}
impl<H, T> HeaderSlice<H, [T]> {
pub(crate) fn slice(&self) -> &[T] {
&self.slice
}
}
impl<H, T> Deref for HeaderSlice<H, [T; 0]> {
type Target = HeaderSlice<H, [T]>;
fn deref(&self) -> &Self::Target {
unsafe {
let len = self.length;
let fake_slice: *const [T] =
ptr::slice_from_raw_parts(self as *const _ as *const T, len);
&*(fake_slice as *const HeaderSlice<H, [T]>)
}
}
}
#[repr(transparent)]
pub(crate) struct ThinArc<H, T> {
ptr: ptr::NonNull<ArcInner<HeaderSlice<H, [T; 0]>>>,
phantom: PhantomData<(H, T)>,
}
unsafe impl<H: Sync + Send, T: Sync + Send> Send for ThinArc<H, T> {}
unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
fn thin_to_thick<H, T>(
thin: *mut ArcInner<HeaderSlice<H, [T; 0]>>,
) -> *mut ArcInner<HeaderSlice<H, [T]>> {
let len = unsafe { (*thin).data.length };
let fake_slice: *mut [T] = ptr::slice_from_raw_parts_mut(thin as *mut T, len);
fake_slice as *mut ArcInner<HeaderSlice<H, [T]>>
}
impl<H, T> ThinArc<H, T> {
#[inline]
pub(crate) fn with_arc<F, U>(&self, f: F) -> U
where
F: FnOnce(&Arc<HeaderSlice<H, [T]>>) -> U,
{
let transient = unsafe {
ManuallyDrop::new(Arc {
p: ptr::NonNull::new_unchecked(thin_to_thick(self.ptr.as_ptr())),
phantom: PhantomData,
})
};
let result = f(&transient);
result
}
pub(crate) fn from_header_and_iter<I>(header: H, mut items: I) -> Self
where
I: Iterator<Item = T> + ExactSizeIterator,
{
assert_ne!(mem::size_of::<T>(), 0, "Need to think about ZST");
let num_items = items.len();
let inner_to_data_offset = offset_of!(ArcInner<HeaderSlice<H, [T; 0]>>, data);
let data_to_slice_offset = offset_of!(HeaderSlice<H, [T; 0]>, slice);
let slice_offset = inner_to_data_offset + data_to_slice_offset;
let slice_size = mem::size_of::<T>().checked_mul(num_items).expect("size overflows");
let usable_size = slice_offset.checked_add(slice_size).expect("size overflows");
let align = mem::align_of::<ArcInner<HeaderSlice<H, [T; 0]>>>();
let size = usable_size.wrapping_add(align - 1) & !(align - 1);
assert!(size >= usable_size, "size overflows");
let layout = Layout::from_size_align(size, align).expect("invalid layout");
let ptr: *mut ArcInner<HeaderSlice<H, [T; 0]>>;
unsafe {
let buffer = alloc::alloc(layout);
if buffer.is_null() {
alloc::handle_alloc_error(layout);
}
ptr = buffer as *mut _;
let count = atomic::AtomicUsize::new(1);
ptr::write(ptr::addr_of_mut!((*ptr).count), count);
ptr::write(ptr::addr_of_mut!((*ptr).data.header), header);
ptr::write(ptr::addr_of_mut!((*ptr).data.length), num_items);
if num_items != 0 {
let mut current = ptr::addr_of_mut!((*ptr).data.slice) as *mut T;
debug_assert_eq!(current as usize - buffer as usize, slice_offset);
for _ in 0..num_items {
ptr::write(
current,
items.next().expect("ExactSizeIterator over-reported length"),
);
current = current.offset(1);
}
assert!(items.next().is_none(), "ExactSizeIterator under-reported length");
debug_assert_eq!(current as *mut u8, buffer.add(usable_size));
}
assert!(items.next().is_none(), "ExactSizeIterator under-reported length");
}
ThinArc { ptr: unsafe { ptr::NonNull::new_unchecked(ptr) }, phantom: PhantomData }
}
}
impl<H, T> Deref for ThinArc<H, T> {
type Target = HeaderSlice<H, [T]>;
#[inline]
fn deref(&self) -> &Self::Target {
unsafe { &(*thin_to_thick(self.ptr.as_ptr())).data }
}
}
impl<H, T> Clone for ThinArc<H, T> {
#[inline]
fn clone(&self) -> Self {
ThinArc::with_arc(self, |a| Arc::into_thin(a.clone()))
}
}
impl<H, T> Drop for ThinArc<H, T> {
#[inline]
fn drop(&mut self) {
let _ = Arc::from_thin(ThinArc { ptr: self.ptr, phantom: PhantomData });
}
}
impl<H, T> Arc<HeaderSlice<H, [T]>> {
#[inline]
pub(crate) fn into_thin(a: Self) -> ThinArc<H, T> {
assert_eq!(a.length, a.slice.len(), "Length needs to be correct for ThinArc to work");
let fat_ptr: *mut ArcInner<HeaderSlice<H, [T]>> = a.ptr();
mem::forget(a);
let thin_ptr = fat_ptr as *mut [usize] as *mut usize;
ThinArc {
ptr: unsafe {
ptr::NonNull::new_unchecked(thin_ptr as *mut ArcInner<HeaderSlice<H, [T; 0]>>)
},
phantom: PhantomData,
}
}
#[inline]
pub(crate) fn from_thin(a: ThinArc<H, T>) -> Self {
let ptr = thin_to_thick(a.ptr.as_ptr());
mem::forget(a);
unsafe { Arc { p: ptr::NonNull::new_unchecked(ptr), phantom: PhantomData } }
}
}
impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
#[inline]
fn eq(&self, other: &ThinArc<H, T>) -> bool {
**self == **other
}
}
impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
impl<H: Hash, T: Hash> Hash for ThinArc<H, T> {
fn hash<HSR: Hasher>(&self, state: &mut HSR) {
(**self).hash(state)
}
}