1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132
use std::mem::ManuallyDrop;
use std::ops::Deref;
use std::ptr::NonNull;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::{self, Relaxed};
use super::Collectible;
/// [`RefCounted`] stores an instance of type `T`, and a union of the link to the next
/// [`RefCounted`] or the reference counter.
pub(super) struct RefCounted<T> {
instance: T,
next_or_refcnt: LinkOrRefCnt,
}
impl<T> RefCounted<T> {
// Creates a new underlying instance.
#[inline]
pub(super) const fn new(t: T) -> RefCounted<T> {
RefCounted {
instance: t,
next_or_refcnt: LinkOrRefCnt::new(),
}
}
/// Tries to add a strong reference to the underlying instance.
///
// `order` must be as strong as `Acquire` for the caller to correctly validate the newest state
// of the pointer.
#[inline]
pub(super) fn try_add_ref(&self, order: Ordering) -> bool {
self.ref_cnt()
.fetch_update(
order,
order,
|r| {
if r % 2 == 1 {
Some(r + 2)
} else {
None
}
},
)
.is_ok()
}
/// Returns a mutable reference to the instance if it is owned exclusively.
#[inline]
pub(super) fn get_mut(&mut self) -> Option<&mut T> {
if self.ref_cnt().load(Relaxed) == 1 {
Some(&mut self.instance)
} else {
None
}
}
/// Adds a strong reference to the underlying instance.
#[inline]
pub(super) fn add_ref(&self) {
let mut current = self.ref_cnt().load(Relaxed);
debug_assert_eq!(current % 2, 1);
debug_assert!(current <= usize::MAX - 2, "reference count overflow");
while let Err(actual) =
self.ref_cnt()
.compare_exchange(current, current + 2, Relaxed, Relaxed)
{
current = actual;
}
}
/// Drops a strong reference to the underlying instance.
///
/// It returns `true` if it the last reference was dropped.
#[inline]
pub(super) fn drop_ref(&self) -> bool {
// It does not have to be a load-acquire as everything's synchronized via the global
// epoch. In addition to that, it also does not have to be read-modify-write as a
// reference count increment is guaranteed to be observed by the one that decrements
// the last reference.
let mut current = self.ref_cnt().load(Relaxed);
debug_assert_ne!(current, 0);
loop {
let new = if current <= 1 { 0 } else { current - 2 };
if let Err(actual) = self
.ref_cnt()
.compare_exchange(current, new, Relaxed, Relaxed)
{
current = actual;
} else {
break;
}
}
current == 1
}
/// Returns a reference to its reference count.
#[inline]
pub(super) fn ref_cnt(&self) -> &AtomicUsize {
unsafe { &self.next_or_refcnt.refcnt.0 }
}
}
impl<T> Deref for RefCounted<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.instance
}
}
impl<T> Collectible for RefCounted<T> {
#[inline]
fn next_ptr_mut(&mut self) -> &mut Option<NonNull<dyn Collectible>> {
unsafe { &mut self.next_or_refcnt.next }
}
}
/// [`LinkOrRefCnt`] is a union of a dynamic pointer to [`Collectible`] and a reference count.
pub(super) union LinkOrRefCnt {
next: Option<NonNull<dyn Collectible>>,
refcnt: ManuallyDrop<(AtomicUsize, usize)>,
}
impl LinkOrRefCnt {
#[inline]
const fn new() -> Self {
LinkOrRefCnt {
refcnt: ManuallyDrop::new((AtomicUsize::new(1), 0)),
}
}
}