use std::{fmt::Debug, ops::Deref, sync::atomic::AtomicU32};
use crate::TinyPtr;
#[derive(Debug)]
struct RefCounted<T> {
count: AtomicU32,
value: T,
}
#[derive(Debug)]
pub struct TinyWeak<T>(TinyPtr<RefCounted<T>>);
unsafe impl<T: Send + Sync> Send for TinyWeak<T> {}
unsafe impl<T: Send + Sync> Sync for TinyWeak<T> {}
impl<T> Clone for TinyWeak<T> {
fn clone(&self) -> Self {
Self(self.0)
}
}
crate::boxed::impl_traits!(TinyArc);
impl<T> TinyWeak<T> {
pub fn upgrade(&self) -> TinyArc<T> {
let arc = TinyArc(self.0);
TinyArc::increase_count(&arc);
arc
}
}
pub struct TinyArc<T>(TinyPtr<RefCounted<T>>);
unsafe impl<T: Send + Sync> Send for TinyArc<T> {}
unsafe impl<T: Send + Sync> Sync for TinyArc<T> {}
impl<T> TinyArc<T> {
pub fn new(value: T) -> Self {
Self(TinyPtr::new(RefCounted {
count: AtomicU32::new(1),
value,
}))
}
pub fn new_cyclic<F>(data_fn: F) -> Self where F: FnOnce(TinyWeak<T>) -> T {
let mut ptr = TinyPtr::new(RefCounted {
count: AtomicU32::new(0),
value: unsafe { std::mem::MaybeUninit::<T>::uninit().assume_init() },
});
let data = data_fn(TinyWeak(ptr));
unsafe {
let ptr = ptr.get_mut();
std::ptr::addr_of_mut!(ptr.value).write(data);
}
let this = Self(ptr);
Self::increase_count(&this);
this
}
pub fn as_ptr(this: &Self) -> *const T {
&this.get().value
}
pub fn ptr_eq(this: &Self, other: &Self) -> bool {
this.0.id() == other.0.id()
}
pub fn downgrade(this: &Self) -> TinyWeak<T> {
TinyWeak(this.0)
}
fn get(&self) -> &RefCounted<T> {
unsafe { &*self.0.get() }
}
fn increase_count(this: &Self) -> u32 {
this.get()
.count
.fetch_add(1, std::sync::atomic::Ordering::Relaxed)
}
fn decrease_count(this: &Self) -> u32 {
this.get()
.count
.fetch_sub(1, std::sync::atomic::Ordering::Relaxed)
}
}
impl<T: Debug> Debug for TinyArc<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("TinyArc")
.field("refcount", self.get())
.finish()
}
}
impl<T> Deref for TinyArc<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
let refcounted = self.get();
if refcounted.count.load(std::sync::atomic::Ordering::Relaxed) == 0 {
panic!("Attempted to dereference a TinyArc before it was built")
}
&refcounted.value
}
}
impl<T> Clone for TinyArc<T> {
fn clone(&self) -> Self {
Self::increase_count(self);
Self(self.0)
}
}
impl<T> std::ops::Drop for TinyArc<T> {
fn drop(&mut self) {
let owners = Self::decrease_count(self);
if owners == 1 {
self.0.take();
}
}
}
#[cfg(test)]
mod tests {
use std::sync::atomic::AtomicBool;
use super::*;
use crate::tests::{*, make_drop_indicator};
#[test]
fn multiple_thread_access() {
make_drop_indicator!(__ind, p2, 42);
let p2 = TinyArc::new(p2);
let p1 = p2.clone();
let t1 = std::thread::spawn(move || {
assert_eq!(*p1, 42);
});
let t2 = std::thread::spawn(move || {
assert_eq!(*p2, 42);
});
t1.join().unwrap();
t2.join().unwrap();
assert_dropped!(__ind);
}
#[test]
fn assert_optimization_test() {
assert_eq!(
std::mem::size_of::<Option<TinyArc<u8>>>(),
std::mem::size_of::<TinyArc<u8>>()
);
}
#[test]
fn single_arc_test() {
make_drop_indicator!(__ind, b, 42);
let b = TinyArc::new(b);
assert_eq!(*b, 42);
std::mem::drop(b);
assert_dropped!(__ind)
}
#[test]
#[cfg_attr(feature = "1byteid", ignore = "uses too much memory")]
fn multiple_arc_test() {
for i in 0..100 {
make_drop_indicator!(__ind, val, i);
{
let b = TinyArc::new(val);
assert_eq!(*b, i);
}
assert_dropped!(__ind)
}
}
#[test]
fn multiple_refs_test() {
make_drop_indicator!(__ind, v, 30);
let i = TinyArc::new(v);
for _x in 0..200 {
let j = i.clone();
assert_eq!(*j, 30);
}
std::mem::drop(i);
assert_dropped!(__ind)
}
#[test]
fn make_cyclic_test() {
#[derive(Debug)]
struct Narcissus {
_drop_indicator: DropIndicator<()>,
self_: TinyWeak<Narcissus>,
}
make_drop_indicator!(__ind, ind, ());
let narc = TinyArc::new_cyclic(|weak| {
Narcissus{self_: weak, _drop_indicator: ind}
});
assert!(TinyArc::ptr_eq(&narc, &narc.self_.upgrade()));
std::mem::drop(narc);
assert_dropped!(__ind);
}
#[test]
#[should_panic]
fn make_cyclic_panic_test() {
TinyArc::<()>::new_cyclic(|weak| {
weak.upgrade();
});
}
}