basin2_lib/ilib/
atomic_ref.rs

1use std::fmt::{Debug, self};
2use std::sync::atomic::{ AtomicPtr, Ordering };
3use std::sync::Arc;
4use std::ptr::null_mut;
5use std::ops::Deref;
6
7// n-settable, any-readonly-gettable option
8pub struct AtomicRef<T: Send + Sync + ?Sized + 'static> {
9    item: AtomicPtr<Arc<T>>,
10}
11
12impl<T: Send + Sync + ?Sized + 'static> Default for AtomicRef<T> {
13    fn default() -> AtomicRef<T> {
14        AtomicRef::new()
15    }
16}
17
18impl<T: Debug + Send + Sync + ?Sized + 'static> Debug for AtomicRef<T> {
19    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
20        (*self.get()).fmt(f)
21    }
22}
23
24impl<T: Clone + Send + Sync + ?Sized + 'static> Clone for AtomicRef<T> {
25    fn clone(&self) -> Self {
26        let ptr = self.item.load(Ordering::Relaxed);
27        let ptr = if ptr == null_mut::<Arc<T>>() {
28            ptr
29        } else {
30            Box::into_raw(Box::new(self.get())) as *mut Arc<T>
31        };
32        AtomicRef {
33            item: AtomicPtr::new(ptr),
34        }
35    }
36}
37
38impl<T: PartialEq + Send + Sync + ?Sized + 'static> PartialEq for AtomicRef<T> {
39    fn eq(&self, other: &Self) -> bool {
40        return (*self.get()) == (*other.get());
41    }
42}
43
44impl<T: Send + Sync + ?Sized + 'static> Drop for AtomicRef<T> {
45    fn drop(&mut self) {
46        let ptr = self.item.load(Ordering::Relaxed);
47        if ptr != null_mut() {
48            drop(unsafe { Box::from_raw(ptr) });
49        }
50    }
51}
52
53impl<T: Send + Sync + ?Sized + 'static> From<Arc<T>> for AtomicRef<T> {
54    fn from(item: Arc<T>) -> AtomicRef<T> {
55        return AtomicRef {
56            item: AtomicPtr::new(Box::into_raw(Box::new(item)) as *mut Arc<T>),
57        };
58    }
59}
60
61impl<T: Send + Sync + ?Sized + 'static> Deref for AtomicRef<T> {
62    type Target = T;
63
64    fn deref(&self) -> &T {
65        let ptr = self.item.load(Ordering::Relaxed);
66        if ptr == null_mut::<Arc<T>>() {
67            panic!("attempt to get value of unset AtomicRef!");
68        }
69        unsafe { ptr.as_ref().unwrap() }
70    }
71}
72
73impl<T: Send + Sync + ?Sized + 'static> AtomicRef<T> {
74    
75    pub fn new() -> AtomicRef<T> {
76        AtomicRef {
77            item: AtomicPtr::new(null_mut::<Arc<T>>()),
78        }
79    }
80
81    pub fn set(&self, item: Arc<T>) {
82        let boxed_item = Box::into_raw(Box::new(item)) as *mut Arc<T>;
83        let old_item = self.item.swap(boxed_item, Ordering::Relaxed);
84        if old_item != null_mut() {
85            drop(unsafe { Box::from_raw(old_item) });
86        }
87    }
88
89    pub fn get(&self) -> Arc<T> {
90        let ptr = self.item.load(Ordering::Relaxed);
91        if ptr == null_mut::<Arc<T>>() {
92            panic!("attempt to get value of unset AtomicRef!");
93        }
94        let original_arc = unsafe { Box::from_raw(ptr) };
95        let returning_arc = (*original_arc).clone();
96        Box::into_raw(original_arc);
97        return returning_arc;
98    }
99
100    pub fn is_set(&self) -> bool {
101        let ptr = self.item.load(Ordering::Relaxed);
102        ptr != null_mut::<Arc<T>>()
103    }
104}
105
106#[cfg(test)]
107mod tests {
108    use super::*;
109
110    #[derive(Debug, PartialEq, Eq)]
111    struct TestStruct {
112        value: u32,
113    }
114
115    #[test]
116    fn test_ref_can_set() {
117        let setter = AtomicRef::<TestStruct>::new();
118        assert_eq!(setter.is_set(), false);
119        setter.set(Arc::new(TestStruct { value: 243523 }));
120        assert_eq!(setter.is_set(), true);
121
122        assert_eq!(*setter.get(), TestStruct { value: 243523 });
123        assert_eq!(setter.is_set(), true);
124        let gotten = setter.get();
125        drop(setter);
126        assert_eq!(*gotten, TestStruct { value: 243523 });
127        assert_eq!(Arc::strong_count(&gotten), 1);
128    }
129
130}