Skip to main content

potential_well/
atomic.rs

1//! Atomic containers.
2use core::{
3    fmt,
4    mem::ManuallyDrop,
5    ops::Deref,
6    pin::Pin,
7    ptr::{NonNull, null_mut},
8    sync::atomic::{AtomicPtr, Ordering, fence},
9};
10
11use crate::traits::{Well, WellMut};
12
13/// Potentially empty atomic potential well.
14///
15/// Internally, this just wraps a pointer to `<T as Deref>::Target` and uses atomic pointer
16/// operations to access it. However, the number of operations on the pointer is limited to
17/// ensure correctness in safe code.
18pub struct AtomicOption<T: Well>(AtomicPtr<<T as Deref>::Target>);
19
20/// By default, nothing is stored in the atomic.
21impl<T: Well> Default for AtomicOption<T> {
22    #[inline]
23    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
24    fn default() -> Self {
25        AtomicOption::none()
26    }
27}
28impl<T: Well> AtomicOption<T> {
29    /// Creates atomic without anything inside.
30    #[inline]
31    pub fn none() -> AtomicOption<T> {
32        AtomicOption(AtomicPtr::new(null_mut()))
33    }
34
35    /// Creates atomic with something inside.
36    #[inline]
37    pub fn some(well: T) -> AtomicOption<T> {
38        AtomicOption(AtomicPtr::new(well.remove().as_ptr()))
39    }
40
41    /// Gives access to the underlying [`AtomicPtr`].
42    ///
43    /// # Safety
44    ///
45    /// The pointer inside the atomic must always be null, or a valid pointer from [`Well::remove`].
46    /// Additionally, keep in mind that this atomic *owns* the pointer, and if you want to move it
47    /// out, you must put a different pointer in its place first.
48    #[inline]
49    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
50    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<<T as Deref>::Target> {
51        &self.0
52    }
53
54    /// Loads the inner data as a value.
55    ///
56    /// Uses [`ManuallyDrop`] to avoid accidentally dropping the value
57    /// while the atomic is still in use.
58    #[inline]
59    fn load_value(&self, ordering: Ordering) -> ManuallyDrop<Option<T>> {
60        let loaded = self.0.load(ordering);
61
62        // SAFETY: Data was originally `remove`d from a well.
63        ManuallyDrop::new(NonNull::new(loaded).map(|ptr| unsafe { T::insert(ptr) }))
64    }
65
66    /// Loads the inner data as an immutable reference.
67    ///
68    /// This is equivalent to an atomic [`load`].
69    ///
70    /// [`load`]: AtomicPtr::load
71    #[inline]
72    pub fn load(&self, ordering: Ordering) -> Option<&<T as Deref>::Target> {
73        let loaded = self.0.load(ordering);
74        match NonNull::new(loaded) {
75            // SAFETY: We can read the data from a `Well`.
76            Some(ptr) => Some(unsafe { ptr.as_ref() }),
77            None => None,
78        }
79    }
80
81    /// Atomically swaps the data inside the well.
82    ///
83    /// This is equivalent to an atomic [`swap`].
84    ///
85    /// [`swap`]: AtomicPtr::swap
86    #[inline]
87    pub fn swap(&self, well: T, ordering: Ordering) -> Option<T> {
88        let old = self.0.swap(well.remove().as_ptr(), ordering);
89
90        // SAFETY: Data was originally `remove`d from a well.
91        NonNull::new(old).map(|old| unsafe { T::insert(old) })
92    }
93
94    /// Takes the data out of the well.
95    ///
96    /// This is equivalent to an atomic [`swap`] with a null pointer.
97    ///
98    /// [`swap`]: AtomicPtr::swap
99    #[inline]
100    pub fn take(&self, ordering: Ordering) -> Option<T> {
101        let old = self.0.swap(null_mut(), ordering);
102
103        // SAFETY: Data was originally `remove`d from a well.
104        NonNull::new(old).map(|old| unsafe { T::insert(old) })
105    }
106
107    /// Inserts data into the well.
108    ///
109    /// This uses [`compare_exchange`] to avoid inserting into the well if it's already full. If you
110    /// want to use [`compare_exchange_weak`] instead, use [`insert_weak`].
111    ///
112    /// [`insert_weak`]: AtomicOption::insert_weak
113    /// [`compare_exchange`]: AtomicPtr::compare_exchange
114    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
115    #[inline]
116    pub fn insert(&self, well: T, success: Ordering, failure: Ordering) -> Result<(), T> {
117        let ptr = well.remove();
118        if self
119            .0
120            .compare_exchange(null_mut(), ptr.as_ptr(), success, failure)
121            .is_err()
122        {
123            // SAFETY: This is just the `Well` we passed in.
124            Err(unsafe { T::insert(ptr) })
125        } else {
126            Ok(())
127        }
128    }
129    /// Inserts data into the well, sometimes failing spuriously.
130    ///
131    /// This uses [`compare_exchange_weak`] to avoid inserting into the well if it's already full,
132    /// which may spuriously fail. If you want to use [`compare_exchange`] instead, use [`insert`].
133    ///
134    /// [`insert`]: AtomicOption::insert
135    /// [`compare_exchange`]: AtomicPtr::compare_exchange
136    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
137    #[inline]
138    pub fn insert_weak(&self, well: T, success: Ordering, failure: Ordering) -> Result<(), T> {
139        let ptr = well.remove();
140        if self
141            .0
142            .compare_exchange_weak(null_mut(), ptr.as_ptr(), success, failure)
143            .is_err()
144        {
145            // SAFETY: This is just the `Well` we passed in.
146            Err(unsafe { T::insert(ptr) })
147        } else {
148            Ok(())
149        }
150    }
151}
152impl<T: WellMut<Target: Unpin>> AtomicOption<T> {
153    /// Loads the inner data as a mutable reference.
154    ///
155    /// This performs a non-atomic access since the atomic is mutably borrowed.
156    #[inline]
157    pub fn load_mut(&mut self) -> Option<&mut <T as Deref>::Target> {
158        let loaded = self.0.get_mut();
159        match NonNull::new(*loaded) {
160            // SAFETY: We can mutate the data from a `WellMut`.
161            Some(mut ptr) => Some(unsafe { ptr.as_mut() }),
162            None => None,
163        }
164    }
165}
166impl<T: WellMut> AtomicOption<Pin<T>>
167where
168    Pin<T>: Well,
169{
170    /// Loads the inner data as a pinned mutable reference.
171    ///
172    /// This is a version of [`load_mut`] that works with pinned values.
173    ///
174    /// [`load_mut`]: AtomicOption::load_mut
175    #[inline]
176    pub fn load_mut_pinned(&mut self) -> Option<Pin<&mut <Pin<T> as Deref>::Target>> {
177        let loaded = self.0.get_mut();
178        match NonNull::new(*loaded) {
179            // SAFETY: We can pull out a pinned pointer safely.
180            Some(mut ptr) => Some(unsafe { Pin::new_unchecked(ptr.as_mut()) }),
181            None => None,
182        }
183    }
184}
185impl<T: Well> From<T> for AtomicOption<T> {
186    #[inline]
187    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
188    fn from(well: T) -> Self {
189        AtomicOption::some(well)
190    }
191}
192impl<T: Well> From<Option<T>> for AtomicOption<T> {
193    #[inline]
194    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
195    fn from(well: Option<T>) -> Self {
196        match well {
197            Some(well) => AtomicOption::some(well),
198            None => AtomicOption::none(),
199        }
200    }
201}
202impl<T: Well + Clone> AtomicOption<T> {
203    /// Loads a clone of the inner data.
204    ///
205    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
206    /// pointer is cloned instead.
207    ///
208    /// [`load`]: AtomicPtr::load
209    #[inline]
210    pub fn load_clone(&self, ordering: Ordering) -> Option<T> {
211        (*self.load_value(ordering)).clone()
212    }
213}
214impl<T: Well> Drop for AtomicOption<T> {
215    #[inline]
216    fn drop(&mut self) {
217        fence(Ordering::SeqCst);
218
219        // SAFETY: We're dropping, so, it's okay to drop.
220        unsafe {
221            ManuallyDrop::drop(&mut self.load_value(Ordering::SeqCst));
222        }
223    }
224}
225impl<T: Well<Target: fmt::Debug>> fmt::Debug for AtomicOption<T> {
226    #[inline]
227    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
228    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
229        fmt::Debug::fmt(&self.load(Ordering::Relaxed), f)
230    }
231}
232
233/// Atomic potential well.
234///
235/// Internally, this just wraps a pointer to `<T as Deref>::Target` and uses atomic pointer
236/// operations to access it. However, the number of operations on the pointer is limited to
237/// ensure correctness in safe code.
238pub struct Atomic<T: Well>(AtomicPtr<<T as Deref>::Target>);
239impl<T: Well + Default> Default for Atomic<T> {
240    #[inline]
241    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
242    fn default() -> Self {
243        Atomic::new(Default::default())
244    }
245}
246impl<T: Well> Atomic<T> {
247    /// Creates atomic with a value.
248    #[inline]
249    pub fn new(well: T) -> Atomic<T> {
250        Atomic(AtomicPtr::new(well.remove().as_ptr()))
251    }
252
253    /// Gives access to the underlying [`AtomicPtr`].
254    ///
255    /// # Safety
256    ///
257    /// The pointer inside the atomic must always a valid pointer from [`Well::remove`] and
258    /// therefore must not be null. Additionally, keep in mind that this atomic *owns* the
259    /// pointer, and if you want to move it out, you must put a different pointer in its place
260    /// first.
261    #[inline]
262    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
263    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<<T as Deref>::Target> {
264        &self.0
265    }
266
267    /// Loads the inner data as an immutable reference.
268    ///
269    /// Uses [`ManuallyDrop`] to avoid accidentally dropping the value
270    /// while the atomic is still in use.
271    #[inline]
272    fn load_value(&self, ordering: Ordering) -> ManuallyDrop<T> {
273        let loaded = self.0.load(ordering);
274
275        // SAFETY: Data was originally `remove`d from a well, and is never null.
276        ManuallyDrop::new(unsafe { T::insert(NonNull::new_unchecked(loaded)) })
277    }
278
279    /// Loads the inner data as an immutable reference.
280    ///
281    /// This is equivalent to an atomic [`load`].
282    ///
283    /// [`load`]: AtomicPtr::load
284    #[inline]
285    pub fn load(&self, ordering: Ordering) -> &<T as Deref>::Target {
286        let loaded = self.0.load(ordering);
287
288        // SAFETY: We can read the data from a `Well`, and the data is never null.
289        unsafe { NonNull::new_unchecked(loaded).as_ref() }
290    }
291
292    /// Atomically swaps the data inside the well.
293    ///
294    /// This is equivalent to an atomic [`swap`].
295    ///
296    /// [`swap`]: AtomicPtr::swap
297    #[inline]
298    pub fn swap(&self, well: T, ordering: Ordering) -> T {
299        let old = self.0.swap(well.remove().as_ptr(), ordering);
300
301        // SAFETY: Data was originally `remove`d from a well, and is never null.
302        unsafe { T::insert(NonNull::new_unchecked(old)) }
303    }
304}
305impl<T: WellMut<Target: Unpin>> Atomic<T> {
306    /// Loads the inner data as a mutable reference.
307    ///
308    /// This performs a non-atomic access since the atomic is mutably borrowed.
309    #[inline]
310    pub fn load_mut(&mut self) -> &mut <T as Deref>::Target {
311        let loaded = self.0.get_mut();
312
313        // SAFETY: We can mutate the data from a `WellMut`, and it is never null.
314        unsafe { NonNull::new_unchecked(*loaded).as_mut() }
315    }
316}
317impl<T: WellMut> Atomic<Pin<T>>
318where
319    Pin<T>: Well,
320{
321    /// Loads the inner data as a mutable reference.
322    ///
323    /// This is a version of [`load_mut`] that works with pinned values.
324    ///
325    /// [`load_mut`]: AtomicOption::load_mut
326    #[inline]
327    pub fn load_mut_pinned(&mut self) -> Pin<&mut <Pin<T> as Deref>::Target> {
328        let loaded = self.0.get_mut();
329
330        // SAFETY: We can mutate the data from a `WellMut` as long as it's pinned.
331        unsafe { Pin::new_unchecked(NonNull::new_unchecked(*loaded).as_mut()) }
332    }
333}
334impl<T: Well> From<T> for Atomic<T> {
335    #[inline]
336    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
337    fn from(well: T) -> Self {
338        Atomic::new(well)
339    }
340}
341impl<T: Well + Clone> Atomic<T> {
342    /// Loads a clone of the inner data.
343    ///
344    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
345    /// pointer is cloned instead.
346    ///
347    /// [`load`]: AtomicPtr::load
348    #[inline]
349    pub fn load_clone(&self, ordering: Ordering) -> T {
350        (*self.load_value(ordering)).clone()
351    }
352}
353impl<T: Well> Drop for Atomic<T> {
354    #[inline]
355    fn drop(&mut self) {
356        fence(Ordering::SeqCst);
357
358        // SAFETY: We're dropping, so, it's okay to drop.
359        unsafe {
360            ManuallyDrop::drop(&mut self.load_value(Ordering::SeqCst));
361        }
362    }
363}
364impl<T: Well<Target: fmt::Debug>> fmt::Debug for Atomic<T> {
365    #[inline]
366    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
367    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
368        fmt::Debug::fmt(&self.load(Ordering::Relaxed), f)
369    }
370}