#![cfg_attr(feature = "std", doc = "[arc]: std::sync::Arc")]
#![cfg_attr(
not(feature = "std"),
doc = "[arc]: https://doc.rust-lang.org/stable/std/sync/struct.Arc.html"
)]
use core::borrow;
use core::cell::{Cell, RefCell};
use core::cmp::Ordering;
use core::convert::From;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::intrinsics::abort;
use core::marker::{PhantomData, Unpin};
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::ops::Deref;
use core::pin::Pin;
use core::ptr::{self, NonNull};
use alloc::alloc::handle_alloc_error;
use alloc::alloc::{AllocError, Allocator, Global, Layout};
use alloc::boxed::Box;
use crate::link::Links;
#[cfg(test)]
mod tests;
#[repr(C)]
pub(crate) struct RcBox<T> {
strong: Cell<usize>,
weak: Cell<usize>,
pub links: MaybeUninit<RefCell<Links<T>>>,
pub value: MaybeUninit<T>,
}
impl<T> RcBox<T> {
#[inline]
pub(crate) unsafe fn links(&self) -> &RefCell<Links<T>> {
let links = &self.links;
let pointer_to_links = links as *const MaybeUninit<RefCell<Links<T>>>;
&*(pointer_to_links.cast::<RefCell<Links<T>>>())
}
}
pub struct Rc<T> {
pub(crate) ptr: NonNull<RcBox<T>>,
phantom: PhantomData<RcBox<T>>,
}
mod rc_is_not_send {}
mod rc_is_not_sync {}
impl<T> Rc<T> {
#[inline(always)]
pub(crate) fn inner(&self) -> &RcBox<T> {
unsafe { self.ptr.as_ref() }
}
fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
Self {
ptr,
phantom: PhantomData,
}
}
unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
Self::from_inner(NonNull::new_unchecked(ptr))
}
}
impl<T> Rc<T> {
pub fn new(value: T) -> Rc<T> {
Self::from_inner(
Box::leak(Box::new(RcBox {
strong: Cell::new(1),
weak: Cell::new(1),
links: MaybeUninit::new(RefCell::new(Links::new())),
value: MaybeUninit::new(value),
}))
.into(),
)
}
#[must_use]
pub fn new_uninit() -> Rc<MaybeUninit<T>> {
unsafe {
Rc::from_ptr(Rc::allocate_for_layout(
Layout::new::<T>(),
|layout| Global.allocate(layout),
<*mut u8>::cast,
))
}
}
pub fn pin(value: T) -> Pin<Rc<T>> {
unsafe { Pin::new_unchecked(Rc::new(value)) }
}
#[inline]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
if Rc::strong_count(&this) == 1 {
unsafe {
let val = ptr::read(&*this);
this.inner().dec_strong();
let _weak = Weak {
ptr: this.ptr,
phantom: PhantomData,
};
mem::forget(this);
Ok(val)
}
} else {
Err(this)
}
}
}
impl<T> Rc<MaybeUninit<T>> {
#[inline]
#[must_use]
pub unsafe fn assume_init(self) -> Rc<T> {
Rc::from_inner(ManuallyDrop::new(self).ptr.cast())
}
}
impl<T> Rc<T> {
#[must_use]
pub fn into_raw(this: Self) -> *const T {
let ptr = Self::as_ptr(&this);
mem::forget(this);
ptr
}
#[must_use]
pub fn as_ptr(this: &Self) -> *const T {
let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
unsafe {
ptr::addr_of_mut!((*ptr).value).cast::<T>()
}
}
pub unsafe fn from_raw(ptr: *const T) -> Self {
let offset = data_offset(ptr);
let rc_ptr = (ptr as *mut u8)
.offset(-offset)
.with_metadata_of(ptr as *mut RcBox<T>);
Self::from_ptr(rc_ptr)
}
#[must_use]
pub fn downgrade(this: &Self) -> Weak<T> {
this.inner().inc_weak();
debug_assert!(!is_dangling(this.ptr.as_ptr()));
Weak {
ptr: this.ptr,
phantom: PhantomData,
}
}
#[inline]
#[must_use]
pub fn weak_count(this: &Self) -> usize {
this.inner().weak() - 1
}
#[inline]
#[must_use]
pub fn strong_count(this: &Self) -> usize {
this.inner().strong()
}
#[inline]
pub unsafe fn increment_strong_count(ptr: *const T) {
let rc = ManuallyDrop::new(Rc::<T>::from_raw(ptr));
let _rc_clone: ManuallyDrop<_> = rc.clone();
}
#[inline]
pub unsafe fn decrement_strong_count(ptr: *const T) {
drop(Rc::from_raw(ptr));
}
#[inline]
fn is_unique(this: &Self) -> bool {
Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
}
#[inline]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) {
unsafe { Some(Rc::get_mut_unchecked(this)) }
} else {
None
}
}
#[inline]
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
debug_assert!(!this.inner().is_dead());
let value = &mut (*this.ptr.as_ptr()).value;
let pointer_to_value = (value as *mut MaybeUninit<T>).cast::<T>();
&mut *(pointer_to_value)
}
#[inline]
#[must_use]
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.ptr.as_ptr() == other.ptr.as_ptr()
}
}
impl<T: Clone> Rc<T> {
#[inline]
pub fn make_mut(this: &mut Self) -> &mut T {
if Rc::strong_count(this) != 1 {
let mut rc = Self::new_uninit();
unsafe {
let data = Rc::get_mut_unchecked(&mut rc);
data.as_mut_ptr().write((**this).clone());
*this = rc.assume_init();
}
} else if Rc::weak_count(this) != 0 {
let mut rc = Self::new_uninit();
unsafe {
let data: &mut MaybeUninit<T> = mem::transmute(Rc::get_mut_unchecked(&mut rc));
data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
this.inner().dec_strong();
this.inner().dec_weak();
ptr::write(this, rc.assume_init());
}
}
unsafe {
let value = &mut this.ptr.as_mut().value;
let pointer_to_value = (value as *mut MaybeUninit<T>).cast::<T>();
&mut *(pointer_to_value)
}
}
}
impl<T> Rc<T> {
unsafe fn allocate_for_layout(
value_layout: Layout,
allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
) -> *mut RcBox<T> {
let layout = Layout::new::<RcBox<()>>()
.extend(value_layout)
.unwrap()
.0
.pad_to_align();
Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox)
.unwrap_or_else(|_| handle_alloc_error(layout))
}
#[inline]
unsafe fn try_allocate_for_layout(
value_layout: Layout,
allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
) -> Result<*mut RcBox<T>, AllocError> {
let layout = Layout::new::<RcBox<()>>()
.extend(value_layout)
.unwrap()
.0
.pad_to_align();
let ptr = allocate(layout)?;
let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
debug_assert_eq!(Layout::for_value(&*inner), layout);
ptr::write(&mut (*inner).strong, Cell::new(1));
ptr::write(&mut (*inner).weak, Cell::new(1));
ptr::write(
&mut (*inner).links,
MaybeUninit::new(RefCell::new(Links::new())),
);
Ok(inner)
}
unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
Self::allocate_for_layout(
Layout::for_value(&*ptr),
|layout| Global.allocate(layout),
|mem| mem.with_metadata_of(ptr as *mut RcBox<T>),
)
}
fn from_box(v: Box<T>) -> Rc<T> {
unsafe {
let (box_unique, alloc) = Box::into_raw_with_allocator(v);
let box_unique = NonNull::new_unchecked(box_unique);
let box_ptr = box_unique.as_ptr();
let value_size = mem::size_of_val(&*box_ptr);
let ptr = Self::allocate_for_ptr(box_ptr);
ptr::copy_nonoverlapping(
(box_ptr as *const T).cast::<u8>(),
ptr::addr_of_mut!((*ptr).value).cast::<u8>(),
value_size,
);
box_free(box_unique, alloc);
Self::from_ptr(ptr)
}
}
}
impl<T> Deref for Rc<T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
unsafe {
let value = &self.inner().value;
let pointer_to_value = (value as *const MaybeUninit<T>).cast::<T>();
&*(pointer_to_value)
}
}
}
impl<T> Clone for Rc<T> {
#[inline]
fn clone(&self) -> Rc<T> {
self.inner().inc_strong();
Self::from_inner(self.ptr)
}
}
impl<T: Default> Default for Rc<T> {
#[inline]
fn default() -> Rc<T> {
Rc::new(Default::default())
}
}
impl<T: PartialEq> PartialEq for Rc<T> {
#[inline]
fn eq(&self, other: &Rc<T>) -> bool {
**self == **other
}
#[inline]
#[allow(clippy::partialeq_ne_impl)]
fn ne(&self, other: &Rc<T>) -> bool {
**self != **other
}
}
impl<T: Eq> Eq for Rc<T> {}
impl<T: PartialOrd> PartialOrd for Rc<T> {
#[inline(always)]
fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
(**self).partial_cmp(&**other)
}
#[inline(always)]
fn lt(&self, other: &Rc<T>) -> bool {
**self < **other
}
#[inline(always)]
fn le(&self, other: &Rc<T>) -> bool {
**self <= **other
}
#[inline(always)]
fn gt(&self, other: &Rc<T>) -> bool {
**self > **other
}
#[inline(always)]
fn ge(&self, other: &Rc<T>) -> bool {
**self >= **other
}
}
impl<T: Ord> Ord for Rc<T> {
#[inline]
fn cmp(&self, other: &Rc<T>) -> Ordering {
(**self).cmp(&**other)
}
}
impl<T: Hash> Hash for Rc<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
(**self).hash(state);
}
}
impl<T: fmt::Display> fmt::Display for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl<T: fmt::Debug> fmt::Debug for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&**self, f)
}
}
impl<T> fmt::Pointer for Rc<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&ptr::addr_of!(**self), f)
}
}
impl<T> From<T> for Rc<T> {
fn from(t: T) -> Self {
Rc::new(t)
}
}
impl<T> From<Box<T>> for Rc<T> {
#[inline]
fn from(v: Box<T>) -> Rc<T> {
Rc::from_box(v)
}
}
pub struct Weak<T> {
ptr: NonNull<RcBox<T>>,
phantom: PhantomData<RcBox<T>>,
}
mod weak_is_not_send {}
mod weak_is_not_sync {}
impl<T> Weak<T> {
#[must_use]
pub fn new() -> Weak<T> {
Weak {
ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0"),
phantom: PhantomData,
}
}
}
pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
let address = ptr.cast::<()>() as usize;
address == usize::MAX
}
struct WeakInner<'a> {
weak: &'a Cell<usize>,
strong: &'a Cell<usize>,
}
impl<T> Weak<T> {
#[must_use]
pub fn as_ptr(&self) -> *const T {
let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
if is_dangling(ptr) {
ptr as *const T
} else {
unsafe { ptr::addr_of_mut!((*ptr).value) as *const T }
}
}
#[must_use]
pub fn into_raw(self) -> *const T {
let result = self.as_ptr();
mem::forget(self);
result
}
pub unsafe fn from_raw(ptr: *const T) -> Self {
let ptr = if is_dangling(ptr as *mut T) {
ptr as *mut RcBox<T>
} else {
let offset = data_offset(ptr);
(ptr as *mut u8)
.offset(-offset)
.with_metadata_of(ptr as *mut RcBox<T>)
};
Weak {
ptr: NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
#[must_use]
pub fn upgrade(&self) -> Option<Rc<T>> {
let inner = self.inner()?;
if inner.is_dead() {
None
} else {
inner.inc_strong();
Some(Rc::from_inner(self.ptr))
}
}
#[must_use]
pub fn strong_count(&self) -> usize {
if let Some(inner) = self.inner() {
if inner.is_uninit() {
0
} else {
inner.strong()
}
} else {
0
}
}
#[must_use]
pub fn weak_count(&self) -> usize {
self.inner().map_or(0, |inner| {
if inner.is_uninit() {
0
} else if inner.strong() > 0 {
inner.weak() - 1 } else {
0
}
})
}
#[inline]
#[must_use]
fn inner(&self) -> Option<WeakInner<'_>> {
if is_dangling(self.ptr.as_ptr()) {
None
} else {
Some(unsafe {
let ptr = self.ptr.as_ptr();
WeakInner {
strong: &(*ptr).strong,
weak: &(*ptr).weak,
}
})
}
}
#[inline]
#[must_use]
pub fn ptr_eq(&self, other: &Self) -> bool {
self.ptr.as_ptr() == other.ptr.as_ptr()
}
}
unsafe impl<#[may_dangle] T> Drop for Weak<T> {
fn drop(&mut self) {
let inner = if let Some(inner) = self.inner() {
inner
} else {
return;
};
inner.dec_weak();
if inner.weak() == 0 {
unsafe {
let layout = Layout::for_value_raw(self.ptr.as_ptr());
Global.deallocate(self.ptr.cast(), layout);
}
}
}
}
impl<T> Clone for Weak<T> {
#[inline]
fn clone(&self) -> Weak<T> {
if let Some(inner) = self.inner() {
inner.inc_weak();
}
Weak {
ptr: self.ptr,
phantom: PhantomData,
}
}
}
impl<T: fmt::Debug> fmt::Debug for Weak<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(Weak)")
}
}
impl<T> Default for Weak<T> {
fn default() -> Weak<T> {
Weak::new()
}
}
#[doc(hidden)]
pub(crate) trait RcInnerPtr {
fn weak_ref(&self) -> &Cell<usize>;
fn strong_ref(&self) -> &Cell<usize>;
#[inline]
fn strong(&self) -> usize {
self.strong_ref().get()
}
#[inline]
fn inc_strong(&self) {
let strong = self.strong();
if strong == 0 || strong == usize::MAX {
abort();
}
if strong + 1 == usize::MAX {
abort();
}
self.strong_ref().set(strong + 1);
}
#[inline]
fn dec_strong(&self) {
self.strong_ref().set(self.strong() - 1);
}
#[inline]
fn weak(&self) -> usize {
self.weak_ref().get()
}
#[inline]
fn inc_weak(&self) {
let weak = self.weak();
if weak == 0 || weak == usize::MAX {
abort();
}
self.weak_ref().set(weak + 1);
}
#[inline]
fn dec_weak(&self) {
self.weak_ref().set(self.weak() - 1);
}
#[inline]
fn kill(&self) {
self.strong_ref().set(0);
}
#[inline]
fn is_dead(&self) -> bool {
self.strong() == 0 || self.is_uninit()
}
#[inline]
fn is_uninit(&self) -> bool {
self.strong() == usize::MAX
}
#[inline]
fn make_uninit(&self) {
self.strong_ref().set(usize::MAX);
}
}
impl<T> RcInnerPtr for RcBox<T> {
#[inline(always)]
fn weak_ref(&self) -> &Cell<usize> {
&self.weak
}
#[inline(always)]
fn strong_ref(&self) -> &Cell<usize> {
&self.strong
}
}
impl<'a> RcInnerPtr for WeakInner<'a> {
#[inline(always)]
fn weak_ref(&self) -> &Cell<usize> {
self.weak
}
#[inline(always)]
fn strong_ref(&self) -> &Cell<usize> {
self.strong
}
}
impl<T> borrow::Borrow<T> for Rc<T> {
fn borrow(&self) -> &T {
self
}
}
impl<T> AsRef<T> for Rc<T> {
fn as_ref(&self) -> &T {
self
}
}
impl<T> Unpin for Rc<T> {}
unsafe fn data_offset<T>(ptr: *const T) -> isize {
let _ = ptr;
let rcbox = MaybeUninit::<RcBox<T>>::uninit();
let base_ptr = rcbox.as_ptr();
let base_ptr = base_ptr as usize;
let field_ptr = ptr::addr_of!((*(base_ptr as *const RcBox<T>)).value);
let field_ptr = field_ptr as usize;
(field_ptr - base_ptr) as isize
}
#[inline]
unsafe fn box_free<T, A: Allocator>(ptr: NonNull<T>, alloc: A) {
let layout = Layout::for_value_raw(ptr.as_ptr());
alloc.deallocate(ptr.cast(), layout);
}