use crate::inner::Inner;
use std::cmp::Ordering;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::ops::Deref;
use std::ptr::NonNull;
use std::sync::atomic::{self, AtomicUsize};
const DEFAULT_WEIGHT: usize = 1 << 16;
pub struct Wrc<T: ?Sized> {
weight: AtomicUsize,
ptr: NonNull<Inner<T>>,
}
impl<T> Wrc<T> {
#[inline]
pub fn new(data: T) -> Wrc<T> {
let ptr = Box::new(Inner::new(data, DEFAULT_WEIGHT, DEFAULT_WEIGHT));
Wrc {
weight: AtomicUsize::new(DEFAULT_WEIGHT),
ptr: NonNull::new(Box::into_raw(ptr)).unwrap(),
}
}
#[inline]
pub fn get_mut(this: &mut Wrc<T>) -> Option<&mut T> {
let local_weight = this.weight.load(atomic::Ordering::Acquire);
let total_weight = this.inner().strong_weight();
let weak_weight = this.inner().weak_weight();
if local_weight == total_weight && weak_weight == DEFAULT_WEIGHT {
Some(unsafe { &mut this.ptr.as_mut().data })
} else {
None
}
}
pub fn downgrade(this: &Wrc<T>) -> Weak<T> {
this.inner().add_weak_weight(DEFAULT_WEIGHT);
Weak {
weight: AtomicUsize::new(DEFAULT_WEIGHT),
ptr: this.ptr,
}
}
pub fn total_weight(wrc: &Wrc<T>) -> usize {
wrc.inner().strong_weight()
}
pub fn ptr_eq(this: &Wrc<T>, other: &Wrc<T>) -> bool {
this.ptr == other.ptr
}
#[inline]
fn inner(&self) -> &Inner<T> {
unsafe { self.ptr.as_ref() }
}
}
unsafe impl<T: ?Sized + Sync + Send> Send for Wrc<T> {}
unsafe impl<T: ?Sized + Sync + Send> Sync for Wrc<T> {}
impl<T: ?Sized> Wrc<T> {
#[inline]
pub(crate) fn local_weight(&self) -> usize {
self.weight.load(atomic::Ordering::Relaxed)
}
#[inline]
fn drop_local_weight(&self, weight: usize) -> usize {
let old = self.weight.fetch_sub(weight, atomic::Ordering::Relaxed);
debug_assert!(old >= weight, "local weight underflow");
old - weight
}
}
impl<T> Clone for Wrc<T> {
fn clone(&self) -> Self {
let existing_weight = self.local_weight();
if existing_weight > 1 {
let new_weight = existing_weight >> 1;
self.drop_local_weight(new_weight);
Wrc {
weight: AtomicUsize::new(new_weight),
ptr: self.ptr,
}
} else {
self.inner().add_strong_weight(DEFAULT_WEIGHT);
Wrc {
weight: AtomicUsize::new(DEFAULT_WEIGHT),
ptr: self.ptr,
}
}
}
}
impl<T: ?Sized> Drop for Wrc<T> {
fn drop(&mut self) {
let inner = unsafe { self.ptr.as_ref() };
let existing_weight = self.local_weight();
let new_weight = inner.drop_strong_weight(existing_weight);
if new_weight > 0 {
return;
}
atomic::fence(atomic::Ordering::Acquire);
unsafe {
std::mem::ManuallyDrop::drop(&mut self.ptr.as_mut().data);
}
let new_weak_weight = inner.drop_weak_weight(DEFAULT_WEIGHT);
if new_weak_weight > 0 {
return;
}
atomic::fence(atomic::Ordering::Acquire);
unsafe {
let _ = Box::from_raw(self.ptr.as_ptr());
}
}
}
impl<T> Deref for Wrc<T> {
type Target = T;
#[inline]
fn deref(&self) -> &Self::Target {
&self.inner().data
}
}
impl<T: fmt::Display> fmt::Display for Wrc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&*self.inner().data, f)
}
}
impl<T: fmt::Debug> fmt::Debug for Wrc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&*self.inner().data, f)
}
}
impl<T: PartialEq> PartialEq for Wrc<T> {
fn eq(&self, other: &Wrc<T>) -> bool {
*self.inner().data == *other.inner().data
}
}
impl<T: PartialOrd> PartialOrd for Wrc<T> {
fn partial_cmp(&self, other: &Wrc<T>) -> Option<Ordering> {
(*self.inner().data).partial_cmp(&*other.inner().data)
}
}
impl<T: Ord> Ord for Wrc<T> {
fn cmp(&self, other: &Wrc<T>) -> Ordering {
(*self.inner().data).cmp(&*other.inner().data)
}
}
impl<T: Eq> Eq for Wrc<T> {}
impl<T: Default> Default for Wrc<T> {
fn default() -> Wrc<T> {
Wrc::new(Default::default())
}
}
impl<T: Hash> Hash for Wrc<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
(*self.inner().data).hash(state)
}
}
impl<T> AsRef<T> for Wrc<T> {
fn as_ref(&self) -> &T {
self.deref()
}
}
impl<T> std::borrow::Borrow<T> for Wrc<T> {
fn borrow(&self) -> &T {
self.deref()
}
}
impl<T> From<T> for Wrc<T> {
fn from(value: T) -> Self {
Wrc::new(value)
}
}
pub struct Weak<T: ?Sized> {
weight: AtomicUsize,
ptr: NonNull<Inner<T>>,
}
unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
impl<T: ?Sized> Weak<T> {
#[inline]
fn inner(&self) -> &Inner<T> {
unsafe { self.ptr.as_ref() }
}
#[inline]
fn get_weight(&self) -> usize {
self.weight.load(atomic::Ordering::Relaxed)
}
#[inline]
fn drop_weight(&self, weight: usize) -> usize {
let old = self.weight.fetch_sub(weight, atomic::Ordering::Relaxed);
debug_assert!(old >= weight, "weak local weight underflow");
old - weight
}
}
impl<T> Weak<T> {
pub fn upgrade(&self) -> Option<Wrc<T>> {
if self.inner().try_add_strong_weight(DEFAULT_WEIGHT) {
Some(Wrc {
weight: AtomicUsize::new(DEFAULT_WEIGHT),
ptr: self.ptr,
})
} else {
None
}
}
}
impl<T: ?Sized> Clone for Weak<T> {
fn clone(&self) -> Self {
let existing_weight = self.get_weight();
if existing_weight > 1 {
let new_weight = existing_weight >> 1;
self.drop_weight(new_weight);
Weak {
weight: AtomicUsize::new(new_weight),
ptr: self.ptr,
}
} else {
self.inner().add_weak_weight(DEFAULT_WEIGHT);
Weak {
weight: AtomicUsize::new(DEFAULT_WEIGHT),
ptr: self.ptr,
}
}
}
}
impl<T: ?Sized> Drop for Weak<T> {
fn drop(&mut self) {
let inner = unsafe { self.ptr.as_ref() };
let existing_weight = self.get_weight();
let new_weight = inner.drop_weak_weight(existing_weight);
if new_weight > 0 {
return;
}
atomic::fence(atomic::Ordering::Acquire);
unsafe {
let _ = Box::from_raw(self.ptr.as_ptr());
}
}
}
impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "(Weak)")
}
}