#![no_std]
#![deny(missing_docs)]
#[cfg(not(feature = "std"))]
mod imports {
extern crate alloc;
pub(super) use alloc::boxed::Box;
}
#[cfg(feature = "std")]
mod imports {
extern crate std;
pub(super) use std::boxed::Box;
pub(super) use std::fmt;
}
use imports::*;
use core::mem;
use core::ops;
use core::pin::Pin;
use core::ptr;
use core::ptr::NonNull;
use core::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
pub type TryUnwrapResult<T> = Result<T, TryUnwrapError<T>>;
pub enum TryUnwrapError<T> {
WouldLock(ParentArc<T>),
WouldBlock(ParentArc<T>),
}
#[cfg(feature = "std")]
impl<T> fmt::Debug for TryUnwrapError<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
TryUnwrapError::WouldLock(_) => write!(f, "WouldLock(...)"),
TryUnwrapError::WouldBlock(_) => write!(f, "WouldBlock(...)"),
}
}
}
pub struct ParentArc<T> {
ptr: NonNull<Womb<T>>,
}
impl<T> ParentArc<T> {
pub fn new(data: T) -> Self {
Self {
ptr: Womb::as_nnptr(data),
}
}
pub fn pin(data: T) -> Pin<ParentArc<T>> {
unsafe { Pin::new_unchecked(ParentArc::new(data)) }
}
pub fn lock(&self) {
let lock = &self.inner().lock;
while lock.compare_and_swap(false, true, Ordering::Release) {}
}
pub fn is_locked(&self) -> bool {
self.inner().lock.load(Ordering::Relaxed)
}
pub fn unlock(&self) {
let lock = &self.inner().lock;
while lock.compare_and_swap(true, false, Ordering::Release) {}
}
pub fn downgrade(other: &Self) -> LockWeak<T> {
LockWeak { ptr: other.ptr }
}
pub fn try_downgrade(other: &Self) -> Option<LockWeak<T>> {
if other.inner().lock.load(Ordering::Relaxed) {
return None;
}
Some(LockWeak { ptr: other.ptr })
}
pub fn block_into_inner(self) -> T {
let this = self.inner();
self.lock();
while this.strong.load(Ordering::Acquire) != 0 {}
unsafe {
let elem = ptr::read(&this.data);
mem::forget(self);
elem
}
}
pub fn try_unwrap(other: Self) -> TryUnwrapResult<T> {
let this = other.inner();
if !this.lock.load(Ordering::Relaxed) && this.strong.load(Ordering::Relaxed) > 0 {
return Err(TryUnwrapError::WouldLock(other));
}
if this.strong.load(Ordering::Relaxed) != 0 {
return Err(TryUnwrapError::WouldBlock(other));
}
unsafe {
let elem = ptr::read(&this.data);
mem::forget(other);
Ok(elem)
}
}
fn inner(&self) -> &Womb<T> {
unsafe { self.ptr.as_ref() } }
}
impl<T> AsRef<T> for ParentArc<T> {
fn as_ref(&self) -> &T {
&self.inner().data
}
}
impl<T> ops::Deref for ParentArc<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.inner().data
}
}
impl<T> Drop for ParentArc<T> {
fn drop(&mut self) {
let this = self.inner();
while this.strong.load(Ordering::Acquire) != 0 {}
}
}
struct Womb<T> {
data: T,
lock: AtomicBool,
strong: AtomicUsize,
}
impl<T> Womb<T> {
fn as_nnptr(data: T) -> NonNull<Self> {
let x = Box::new(Self {
data,
lock: AtomicBool::new(false),
strong: AtomicUsize::new(0),
});
unsafe { NonNull::new_unchecked(Box::into_raw(x)) }
}
}
pub struct LockWeak<T> {
ptr: NonNull<Womb<T>>,
}
impl<T> LockWeak<T> {
pub fn upgrade(&self) -> Option<ChildArc<T>> {
let this = self.inner()?;
if this.lock.load(Ordering::Relaxed) {
return None;
}
let mut n = this.strong.load(Ordering::Relaxed);
loop {
match this
.strong
.compare_exchange_weak(n, n + 1, Ordering::SeqCst, Ordering::Relaxed)
{
Ok(_) => break,
Err(old) => n = old,
}
}
Some(ChildArc::from(self.ptr))
}
fn inner(&self) -> Option<&Womb<T>> {
let address = self.ptr.as_ptr() as *mut () as usize;
if address == core::usize::MAX {
None
} else {
Some(unsafe { self.ptr.as_ref() })
}
}
}
unsafe impl<T> Send for LockWeak<T> {}
pub struct ChildArc<T> {
ptr: NonNull<Womb<T>>,
}
impl<T> ChildArc<T> {
fn from(ptr: NonNull<Womb<T>>) -> Self {
Self { ptr }
}
fn inner(&self) -> &Womb<T> {
unsafe { self.ptr.as_ref() }
}
}
impl<T> AsRef<T> for ChildArc<T> {
fn as_ref(&self) -> &T {
&self.inner().data
}
}
impl<T> ops::Deref for ChildArc<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.inner().data
}
}
impl<T> Drop for ChildArc<T> {
fn drop(&mut self) {
let strong = &self.inner().strong;
let mut n = strong.load(Ordering::Relaxed);
loop {
match strong.compare_exchange_weak(n, n - 1, Ordering::SeqCst, Ordering::Relaxed) {
Ok(_) => break,
Err(old) => n = old,
}
}
}
}
#[cfg(all(test, not(feature = "no_std")))]
mod tests {
extern crate std;
use super::*;
use std::sync;
use std::thread;
use std::vec::Vec;
#[test]
fn new() {
let _ = ParentArc::new(2);
}
#[test]
fn one_simple_thread() {
let m = ParentArc::new(sync::Mutex::new(0));
let _ = thread::spawn({
let weak = ParentArc::downgrade(&m);
move || match weak.upgrade() {
Some(mutex) => *mutex.lock().unwrap() += 1,
None => {}
}
})
.join();
let _: sync::Mutex<usize> = m.block_into_inner();
}
#[test]
fn join_after_thread() {
let m = ParentArc::new(sync::Mutex::new(0));
let h = thread::spawn({
let weak = ParentArc::downgrade(&m);
move || match weak.upgrade() {
Some(mutex) => *mutex.lock().unwrap() += 1,
None => {}
}
});
let _: sync::Mutex<usize> = m.block_into_inner();
let _ = h.join();
}
#[test]
fn multiple_threads() {
let m = ParentArc::new(sync::Mutex::new(0));
for _ in 0..10 {
let _ = thread::spawn({
let weak = ParentArc::downgrade(&m);
move || match weak.upgrade() {
Some(mutex) => *mutex.lock().unwrap() += 1,
None => {}
}
})
.join();
}
let _: sync::Mutex<usize> = m.block_into_inner();
}
#[test]
fn loop_read_thread() {
let m = ParentArc::new(sync::Mutex::new(0));
let h = thread::spawn({
let weak = ParentArc::downgrade(&m);
move || loop {
match weak.upgrade() {
Some(mutex) => *mutex.lock().unwrap() += 1,
None => break,
}
}
});
let _: sync::Mutex<usize> = m.block_into_inner();
let _ = h.join();
}
#[test]
fn many_loop_read_threads() {
let m = ParentArc::new(sync::Mutex::new(0));
let mut vh = Vec::new();
for _ in 0..10 {
let h = thread::spawn({
let weak = ParentArc::downgrade(&m);
move || loop {
match weak.upgrade() {
Some(mutex) => *mutex.lock().unwrap() += 1,
None => break,
}
}
});
vh.push(h);
}
let _: sync::Mutex<usize> = m.block_into_inner();
for h in vh {
let _ = h.join();
}
}
#[test]
#[should_panic]
fn one_panic_read_threads() {
let m = ParentArc::new(sync::atomic::AtomicUsize::new(0));
let mut vh = Vec::new();
for i in 0..10 {
let h = thread::spawn({
let weak = ParentArc::downgrade(&m);
move || loop {
match weak.upgrade() {
Some(at) => {
if i != 1 {
at.store(1, sync::atomic::Ordering::SeqCst);
} else {
panic!()
}
}
None => break,
}
}
});
vh.push(h);
}
thread::sleep(std::time::Duration::new(0, 100));
let _: sync::atomic::AtomicUsize = m.block_into_inner();
for h in vh {
h.join().unwrap(); }
}
}