1use crate::sync::{RawMutex, WatchGuardMut, WatchGuardRef};
2use crossbeam_utils::CachePadded;
3use std::cell::UnsafeCell;
4use std::mem::{self, MaybeUninit};
5use std::panic::{RefUnwindSafe, UnwindSafe};
6use std::sync::atomic::{self, AtomicUsize, Ordering};
7
8struct Inner<T> {
12 val: UnsafeCell<MaybeUninit<T>>,
14 state: RawMutex,
16 ref_count: CachePadded<AtomicUsize>,
18}
19
20impl<T> Inner<T> {
21 fn new(val: T) -> Self {
23 Self {
24 val: UnsafeCell::new(MaybeUninit::new(val)),
25 state: RawMutex::new(),
26 ref_count: CachePadded::new(AtomicUsize::new(1)),
27 }
28 }
29}
30
31unsafe impl<T: Send> Send for AtomicCell<T> {}
33unsafe impl<T: Sync> Sync for AtomicCell<T> {}
34
35impl<T> UnwindSafe for AtomicCell<T> {}
37impl<T> RefUnwindSafe for AtomicCell<T> {}
38
39#[repr(transparent)]
41pub struct AtomicCell<T> {
42 ptr: *const Inner<T>,
43}
44
45impl<T> AtomicCell<T> {
46 pub fn new(val: T) -> Self {
48 let inner = Box::new(Inner::new(val));
49 let ptr = Box::into_raw(inner);
50 Self { ptr }
51 }
52
53 #[inline(always)]
55 fn inner(&self) -> &Inner<T> {
56 unsafe { &*self.ptr }
57 }
58
59 pub fn get(&self) -> WatchGuardRef<'_, T> {
61 let lock = &self.inner().state;
62 lock.lock_shared();
63 let val = unsafe { (&*self.inner().val.get()).assume_init_ref() };
64 WatchGuardRef::new(val, lock)
65 }
66
67 pub fn with<R>(&self, f: impl FnOnce(&T) -> R) -> R {
69 let guard = self.get();
70 f(&guard)
71 }
72
73 pub fn get_mut(&self) -> WatchGuardMut<'_, T> {
75 let lock = &self.inner().state;
76 lock.lock_exclusive();
77 let val = unsafe { (&mut *self.inner().val.get()).assume_init_mut() };
78 WatchGuardMut::new(val, lock)
79 }
80
81 pub fn with_mut<R>(&self, f: impl FnOnce(&mut T) -> R) -> R {
83 let mut guard = self.get_mut();
84 f(&mut guard)
85 }
86
87 #[inline]
89 pub fn as_ptr(&self) -> *mut T {
90 self.inner().val.get().cast::<T>()
91 }
92
93 pub fn into_inner(self) -> T {
98 let ref_count = self.inner().ref_count.load(Ordering::Acquire);
100 assert_eq!(
101 ref_count, 1,
102 "cannot call into_inner with multiple references"
103 );
104
105 let ptr = self.ptr as *mut Inner<T>;
107 mem::forget(self);
108
109 let boxed = unsafe { Box::from_raw(ptr) };
111
112 let value = unsafe { boxed.val.get().read().assume_init() };
115
116 value
118 }
119
120 pub fn update<F>(&self, mut f: F)
122 where
123 F: FnMut(&mut T),
124 {
125 let mut guard = self.get_mut();
126 f(&mut *guard);
127 }
128
129 pub fn swap(&self, val: T) -> T {
131 let mut guard = self.get_mut();
132 mem::replace(&mut *guard, val)
133 }
134
135 pub fn store(&self, val: T) {
137 let mut guard = self.get_mut();
138 *guard = val;
139 }
140}
141
142impl<T: Copy + Eq> AtomicCell<T> {
143 pub fn compare_exchange(&self, current: T, new: T) -> Result<T, WatchGuardMut<'_, T>> {
146 let mut guard = self.get_mut();
147 if *guard == current {
148 let old = mem::replace(&mut *guard, new);
149 Ok(old)
150 } else {
151 Err(guard)
152 }
153 }
154}
155
156impl<T> Clone for AtomicCell<T> {
157 fn clone(&self) -> Self {
158 self.inner().ref_count.fetch_add(1, Ordering::Relaxed);
159 Self { ptr: self.ptr }
160 }
161}
162
163impl<T> Drop for AtomicCell<T> {
164 fn drop(&mut self) {
165 let inner = unsafe { &*self.ptr };
166 if inner.ref_count.fetch_sub(1, Ordering::Release) == 1 {
167 atomic::fence(Ordering::Acquire);
169
170 unsafe {
172 let val_ptr = (*self.ptr).val.get();
174 std::ptr::drop_in_place((*val_ptr).assume_init_mut());
175
176 drop(Box::from_raw(self.ptr as *mut Inner<T>));
178 }
179 }
180 }
181}