#![cfg_attr(not(test), no_std)]
#![warn(clippy::pedantic, clippy::cargo)]
use core::cell::UnsafeCell;
use core::marker::PhantomData;
use core::mem::MaybeUninit;
use core::ops::Deref;
use core::sync::atomic::{AtomicU8, AtomicUsize, Ordering};
use core::ptr::drop_in_place;
#[derive(Debug)]
pub struct AtomicDestroy<T> {
held_count: AtomicUsize,
drop_state: AtomicU8,
value: UnsafeCell<MaybeUninit<T>>,
}
impl<T> AtomicDestroy<T> {
#[must_use]
pub const fn new(value: T) -> Self {
Self {
held_count: AtomicUsize::new(0),
drop_state: AtomicU8::new(0),
value: UnsafeCell::new(MaybeUninit::new(value)),
}
}
#[must_use]
pub const fn empty() -> Self {
Self {
held_count: AtomicUsize::new(0),
drop_state: AtomicU8::new(2),
value: UnsafeCell::new(MaybeUninit::uninit()),
}
}
#[must_use]
pub fn maybe_new(value: Option<T>) -> Self {
match value {
Some(v) => Self::new(v),
None => Self::empty(),
}
}
pub fn get(&self) -> Option<Value<T, &Self>> {
Value::new(self)
}
pub fn with<R>(&self, f: impl FnOnce(&T) -> R) -> Option<R> {
self.get().map(|v| f(&*v))
}
pub fn destroy(&self) {
if self.drop_state.compare_and_swap(0, 1, Ordering::SeqCst) == 0
&& self.held_count.load(Ordering::SeqCst) == 0
&& self.drop_state.swap(2, Ordering::SeqCst) == 1
{
unsafe {
self.drop_value();
}
}
}
unsafe fn drop_value(&self) {
drop_in_place((*self.value.get()).as_mut_ptr());
}
}
unsafe impl<T: Send + Sync> Send for AtomicDestroy<T> {}
unsafe impl<T: Send + Sync> Sync for AtomicDestroy<T> {}
impl<T> Drop for AtomicDestroy<T> {
fn drop(&mut self) {
if self.drop_state.load(Ordering::SeqCst) < 2 {
unsafe { self.drop_value() };
}
}
}
impl<T: Clone> Clone for AtomicDestroy<T> {
fn clone(&self) -> Self {
Self::maybe_new(self.get().as_deref().cloned())
}
}
#[derive(Debug)]
pub struct Value<T, R: Deref<Target = AtomicDestroy<T>>> {
inner: R,
phantom: PhantomData<T>,
}
impl<T, R: Deref<Target = AtomicDestroy<T>>> Value<T, R> {
pub fn new(inner: R) -> Option<Self> {
inner.held_count.fetch_add(1, Ordering::SeqCst);
let this = Self {
inner,
phantom: PhantomData,
};
if this.inner.drop_state.load(Ordering::SeqCst) > 0 {
None
} else {
Some(this)
}
}
}
impl<T, R: Deref<Target = AtomicDestroy<T>>> Deref for Value<T, R> {
type Target = T;
fn deref(&self) -> &Self::Target {
unsafe { &*(*self.inner.value.get()).as_ptr() }
}
}
impl<T, R: Deref<Target = AtomicDestroy<T>>> Drop for Value<T, R> {
fn drop(&mut self) {
if self.inner.held_count.fetch_sub(1, Ordering::SeqCst) == 1
&& self
.inner
.drop_state
.compare_and_swap(1, 2, Ordering::SeqCst)
== 1
{
unsafe {
self.inner.drop_value();
}
}
}
}
#[cfg(test)]
mod tests {
use crate::AtomicDestroy;
#[test]
fn test_simple() {
let value = AtomicDestroy::new(Box::new(5));
assert_eq!(**value.get().unwrap(), 5);
assert_eq!(**value.get().unwrap(), 5);
value.destroy();
assert!(value.get().is_none());
}
#[test]
fn test_keep_alive() {
let value = AtomicDestroy::new(Box::new(5));
let contents_1 = value.get().unwrap();
let contents_2 = value.get().unwrap();
assert_eq!(**contents_1, 5);
assert_eq!(**contents_2, 5);
value.destroy();
assert_eq!(**contents_1, 5);
assert_eq!(**contents_2, 5);
assert!(value.get().is_none());
drop(contents_1);
assert_eq!(**contents_2, 5);
assert!(value.get().is_none());
drop(contents_2);
assert!(value.get().is_none());
}
#[test]
fn test_empty() {
assert!(<AtomicDestroy<()>>::empty().get().is_none());
}
use std::{thread, iter};
use std::sync::Arc;
use std::time::{Instant, Duration};
#[test]
fn stress_test() {
let limit = Instant::now() + Duration::from_secs(3);
let value = Arc::new(AtomicDestroy::new(Box::new(5)));
let mut threads = iter::repeat_with(|| {
let value = value.clone();
thread::spawn(move || {
while Instant::now() < limit {
match value.get() {
Some(v) => assert_eq!(**v, 5),
None => break,
}
}
})
}).take(5).collect::<Vec<_>>();
thread::sleep(Duration::from_secs(1));
threads.extend(iter::repeat_with(|| {
let value = value.clone();
thread::spawn(move || {
for _ in 0..800 {
value.destroy();
}
})
}).take(5));
for thread in threads {
thread.join().unwrap();
}
}
}