use std::mem::ManuallyDrop;
use std::ops::Deref;
use std::ptr::NonNull;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::{self, Relaxed};
use super::Collectible;
pub(super) struct RefCounted<T> {
instance: T,
next_or_refcnt: LinkOrRefCnt,
}
impl<T> RefCounted<T> {
#[inline]
pub(super) fn new(t: T) -> RefCounted<T> {
RefCounted {
instance: t,
next_or_refcnt: LinkOrRefCnt::default(),
}
}
#[inline]
pub(super) fn try_add_ref(&self, order: Ordering) -> bool {
self.ref_cnt()
.fetch_update(
order,
order,
|r| {
if r % 2 == 1 {
Some(r + 2)
} else {
None
}
},
)
.is_ok()
}
#[inline]
pub(super) fn get_mut(&mut self) -> Option<&mut T> {
if self.ref_cnt().load(Relaxed) == 1 {
Some(&mut self.instance)
} else {
None
}
}
#[inline]
pub(super) fn add_ref(&self) {
let mut current = self.ref_cnt().load(Relaxed);
debug_assert_eq!(current % 2, 1);
debug_assert!(current <= usize::MAX - 2, "reference count overflow");
while let Err(actual) =
self.ref_cnt()
.compare_exchange(current, current + 2, Relaxed, Relaxed)
{
current = actual;
}
}
#[inline]
pub(super) fn drop_ref(&self) -> bool {
let mut current = self.ref_cnt().load(Relaxed);
debug_assert_ne!(current, 0);
loop {
let new = if current <= 1 { 0 } else { current - 2 };
if let Err(actual) = self
.ref_cnt()
.compare_exchange(current, new, Relaxed, Relaxed)
{
current = actual;
} else {
break;
}
}
current == 1
}
#[inline]
pub(super) fn ref_cnt(&self) -> &AtomicUsize {
unsafe { &self.next_or_refcnt.refcnt.0 }
}
}
impl<T> Deref for RefCounted<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.instance
}
}
impl<T> Collectible for RefCounted<T> {
#[inline]
fn next_ptr_mut(&mut self) -> &mut Option<NonNull<dyn Collectible>> {
unsafe { &mut self.next_or_refcnt.next }
}
}
pub(super) union LinkOrRefCnt {
next: Option<NonNull<dyn Collectible>>,
refcnt: ManuallyDrop<(AtomicUsize, usize)>,
}
impl Default for LinkOrRefCnt {
#[inline]
fn default() -> Self {
LinkOrRefCnt {
refcnt: ManuallyDrop::new((AtomicUsize::new(1), 0)),
}
}
}