Skip to main content

potential_well/
atomic.rs

1//! Atomic containers.
2use core::{
3    fmt,
4    marker::PhantomData,
5    mem::ManuallyDrop,
6    ops::Deref,
7    pin::Pin,
8    ptr::{NonNull, null_mut},
9    sync::atomic::{AtomicPtr, Ordering, fence},
10};
11
12use crate::traits::{KineticWell, PotentialWell, Well, WellMut};
13
14/// Potentially empty atomic potential well.
15///
16/// Internally, this just wraps a pointer to `<T as Deref>::Target` and uses atomic pointer
17/// operations to access it. However, the number of operations on the pointer is limited to
18/// ensure correctness in safe code.
19pub struct AtomicOption<T: Well>(AtomicPtr<<T as Deref>::Target>);
20
21/// By default, nothing is stored in the atomic.
22impl<T: Well> Default for AtomicOption<T> {
23    #[inline]
24    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
25    fn default() -> Self {
26        AtomicOption::none()
27    }
28}
29impl<T: Well> AtomicOption<T> {
30    /// Creates atomic without anything inside.
31    #[inline]
32    pub fn none() -> AtomicOption<T> {
33        AtomicOption(AtomicPtr::new(null_mut()))
34    }
35
36    /// Creates atomic with something inside.
37    #[inline]
38    pub fn some(well: T) -> AtomicOption<T> {
39        AtomicOption(AtomicPtr::new(well.remove().as_ptr()))
40    }
41
42    /// Gives access to the underlying [`AtomicPtr`].
43    ///
44    /// # Safety
45    ///
46    /// The pointer inside the atomic must always be null, or a valid pointer from [`Well::remove`].
47    /// Additionally, keep in mind that this atomic *owns* the pointer, and if you want to move it
48    /// out, you must put a different pointer in its place first.
49    #[inline]
50    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
51    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<<T as Deref>::Target> {
52        &self.0
53    }
54
55    /// Loads the inner data as a value.
56    ///
57    /// Uses [`ManuallyDrop`] to avoid accidentally dropping the value
58    /// while the atomic is still in use.
59    #[inline]
60    fn load_value(&self, ordering: Ordering) -> ManuallyDrop<Option<T>> {
61        let loaded = self.0.load(ordering);
62
63        // SAFETY: Data was originally `remove`d from a well.
64        ManuallyDrop::new(NonNull::new(loaded).map(|ptr| unsafe { T::insert(ptr) }))
65    }
66
67    /// Loads the inner data as an immutable reference.
68    ///
69    /// This is equivalent to an atomic [`load`].
70    ///
71    /// [`load`]: AtomicPtr::load
72    #[inline]
73    pub fn load(&self, ordering: Ordering) -> Option<&<T as Deref>::Target> {
74        let loaded = self.0.load(ordering);
75        match NonNull::new(loaded) {
76            // SAFETY: We can read the data from a `Well`.
77            Some(ptr) => Some(unsafe { ptr.as_ref() }),
78            None => None,
79        }
80    }
81
82    /// Atomically swaps the data inside the well.
83    ///
84    /// This is equivalent to an atomic [`swap`].
85    ///
86    /// [`swap`]: AtomicPtr::swap
87    #[inline]
88    pub fn swap(&self, well: T, ordering: Ordering) -> Option<T> {
89        let old = self.0.swap(well.remove().as_ptr(), ordering);
90
91        // SAFETY: Data was originally `remove`d from a well.
92        NonNull::new(old).map(|old| unsafe { T::insert(old) })
93    }
94
95    /// Takes the data out of the well.
96    ///
97    /// This is equivalent to an atomic [`swap`] with a null pointer.
98    ///
99    /// [`swap`]: AtomicPtr::swap
100    #[inline]
101    pub fn take(&self, ordering: Ordering) -> Option<T> {
102        let old = self.0.swap(null_mut(), ordering);
103
104        // SAFETY: Data was originally `remove`d from a well.
105        NonNull::new(old).map(|old| unsafe { T::insert(old) })
106    }
107
108    /// Inserts data into the well.
109    ///
110    /// This uses [`compare_exchange`] to avoid inserting into the well if it's already full. If you
111    /// want to use [`compare_exchange_weak`] instead, use [`insert_weak`].
112    ///
113    /// [`insert_weak`]: AtomicOption::insert_weak
114    /// [`compare_exchange`]: AtomicPtr::compare_exchange
115    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
116    #[inline]
117    pub fn insert(&self, well: T, success: Ordering, failure: Ordering) -> Result<(), T> {
118        let ptr = well.remove();
119        if self
120            .0
121            .compare_exchange(null_mut(), ptr.as_ptr(), success, failure)
122            .is_err()
123        {
124            // SAFETY: This is just the `Well` we passed in.
125            Err(unsafe { T::insert(ptr) })
126        } else {
127            Ok(())
128        }
129    }
130    /// Inserts data into the well, sometimes failing spuriously.
131    ///
132    /// This uses [`compare_exchange_weak`] to avoid inserting into the well if it's already full,
133    /// which may spuriously fail. If you want to use [`compare_exchange`] instead, use [`insert`].
134    ///
135    /// [`insert`]: AtomicOption::insert
136    /// [`compare_exchange`]: AtomicPtr::compare_exchange
137    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
138    #[inline]
139    pub fn insert_weak(&self, well: T, success: Ordering, failure: Ordering) -> Result<(), T> {
140        let ptr = well.remove();
141        if self
142            .0
143            .compare_exchange_weak(null_mut(), ptr.as_ptr(), success, failure)
144            .is_err()
145        {
146            // SAFETY: This is just the `Well` we passed in.
147            Err(unsafe { T::insert(ptr) })
148        } else {
149            Ok(())
150        }
151    }
152}
153impl<T: WellMut<Target: Unpin>> AtomicOption<T> {
154    /// Loads the inner data as a mutable reference.
155    ///
156    /// This performs a non-atomic access since the atomic is mutably borrowed.
157    #[inline]
158    pub fn load_mut(&mut self) -> Option<&mut <T as Deref>::Target> {
159        let loaded = self.0.get_mut();
160        match NonNull::new(*loaded) {
161            // SAFETY: We can mutate the data from a `WellMut`.
162            Some(mut ptr) => Some(unsafe { ptr.as_mut() }),
163            None => None,
164        }
165    }
166}
167impl<T: WellMut> AtomicOption<Pin<T>>
168where
169    Pin<T>: Well,
170{
171    /// Loads the inner data as a pinned mutable reference.
172    ///
173    /// This is a version of [`load_mut`] that works with pinned values.
174    ///
175    /// [`load_mut`]: AtomicOption::load_mut
176    #[inline]
177    pub fn load_mut_pinned(&mut self) -> Option<Pin<&mut <Pin<T> as Deref>::Target>> {
178        let loaded = self.0.get_mut();
179        match NonNull::new(*loaded) {
180            // SAFETY: We can pull out a pinned pointer safely.
181            Some(mut ptr) => Some(unsafe { Pin::new_unchecked(ptr.as_mut()) }),
182            None => None,
183        }
184    }
185}
186impl<T: Well> From<T> for AtomicOption<T> {
187    #[inline]
188    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
189    fn from(well: T) -> Self {
190        AtomicOption::some(well)
191    }
192}
193impl<T: Well> From<Option<T>> for AtomicOption<T> {
194    #[inline]
195    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
196    fn from(well: Option<T>) -> Self {
197        match well {
198            Some(well) => AtomicOption::some(well),
199            None => AtomicOption::none(),
200        }
201    }
202}
203impl<T: Well + Clone> AtomicOption<T> {
204    /// Loads a clone of the inner data.
205    ///
206    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
207    /// pointer is cloned instead.
208    ///
209    /// [`load`]: AtomicPtr::load
210    #[inline]
211    pub fn load_clone(&self, ordering: Ordering) -> Option<T> {
212        (*self.load_value(ordering)).clone()
213    }
214}
215impl<T: Well> Drop for AtomicOption<T> {
216    #[inline]
217    fn drop(&mut self) {
218        fence(Ordering::SeqCst);
219
220        // SAFETY: We're dropping, so, it's okay to drop.
221        unsafe {
222            ManuallyDrop::drop(&mut self.load_value(Ordering::SeqCst));
223        }
224    }
225}
226impl<T: Well<Target: fmt::Debug>> fmt::Debug for AtomicOption<T> {
227    #[inline]
228    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
229    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
230        fmt::Debug::fmt(&self.load(Ordering::Relaxed), f)
231    }
232}
233
234/// Atomic potential well.
235///
236/// Internally, this just wraps a pointer to `<T as Deref>::Target` and uses atomic pointer
237/// operations to access it. However, the number of operations on the pointer is limited to
238/// ensure correctness in safe code.
239pub struct Atomic<T: Well>(AtomicPtr<<T as Deref>::Target>);
240impl<T: Well + Default> Default for Atomic<T> {
241    #[inline]
242    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
243    fn default() -> Self {
244        Atomic::new(Default::default())
245    }
246}
247impl<T: Well> Atomic<T> {
248    /// Creates atomic with a value.
249    #[inline]
250    pub fn new(well: T) -> Atomic<T> {
251        Atomic(AtomicPtr::new(well.remove().as_ptr()))
252    }
253
254    /// Gives access to the underlying [`AtomicPtr`].
255    ///
256    /// # Safety
257    ///
258    /// The pointer inside the atomic must always a valid pointer from [`Well::remove`] and
259    /// therefore must not be null. Additionally, keep in mind that this atomic *owns* the
260    /// pointer, and if you want to move it out, you must put a different pointer in its place
261    /// first.
262    #[inline]
263    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
264    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<<T as Deref>::Target> {
265        &self.0
266    }
267
268    /// Loads the inner data as an immutable reference.
269    ///
270    /// Uses [`ManuallyDrop`] to avoid accidentally dropping the value
271    /// while the atomic is still in use.
272    #[inline]
273    fn load_value(&self, ordering: Ordering) -> ManuallyDrop<T> {
274        let loaded = self.0.load(ordering);
275
276        // SAFETY: Data was originally `remove`d from a well, and is never null.
277        ManuallyDrop::new(unsafe { T::insert(NonNull::new_unchecked(loaded)) })
278    }
279
280    /// Loads the inner data as an immutable reference.
281    ///
282    /// This is equivalent to an atomic [`load`].
283    ///
284    /// [`load`]: AtomicPtr::load
285    #[inline]
286    pub fn load(&self, ordering: Ordering) -> &<T as Deref>::Target {
287        let loaded = self.0.load(ordering);
288
289        // SAFETY: We can read the data from a `Well`, and the data is never null.
290        unsafe { NonNull::new_unchecked(loaded).as_ref() }
291    }
292
293    /// Atomically swaps the data inside the well.
294    ///
295    /// This is equivalent to an atomic [`swap`].
296    ///
297    /// [`swap`]: AtomicPtr::swap
298    #[inline]
299    pub fn swap(&self, well: T, ordering: Ordering) -> T {
300        let old = self.0.swap(well.remove().as_ptr(), ordering);
301
302        // SAFETY: Data was originally `remove`d from a well, and is never null.
303        unsafe { T::insert(NonNull::new_unchecked(old)) }
304    }
305}
306impl<T: WellMut<Target: Unpin>> Atomic<T> {
307    /// Loads the inner data as a mutable reference.
308    ///
309    /// This performs a non-atomic access since the atomic is mutably borrowed.
310    #[inline]
311    pub fn load_mut(&mut self) -> &mut <T as Deref>::Target {
312        let loaded = self.0.get_mut();
313
314        // SAFETY: We can mutate the data from a `WellMut`, and it is never null.
315        unsafe { NonNull::new_unchecked(*loaded).as_mut() }
316    }
317}
318impl<T: WellMut> Atomic<Pin<T>>
319where
320    Pin<T>: Well,
321{
322    /// Loads the inner data as a mutable reference.
323    ///
324    /// This is a version of [`load_mut`] that works with pinned values.
325    ///
326    /// [`load_mut`]: AtomicOption::load_mut
327    #[inline]
328    pub fn load_mut_pinned(&mut self) -> Pin<&mut <Pin<T> as Deref>::Target> {
329        let loaded = self.0.get_mut();
330
331        // SAFETY: We can mutate the data from a `WellMut` as long as it's pinned.
332        unsafe { Pin::new_unchecked(NonNull::new_unchecked(*loaded).as_mut()) }
333    }
334}
335impl<T: Well> From<T> for Atomic<T> {
336    #[inline]
337    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
338    fn from(well: T) -> Self {
339        Atomic::new(well)
340    }
341}
342impl<T: Well + Clone> Atomic<T> {
343    /// Loads a clone of the inner data.
344    ///
345    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
346    /// pointer is cloned instead.
347    ///
348    /// [`load`]: AtomicPtr::load
349    #[inline]
350    pub fn load_clone(&self, ordering: Ordering) -> T {
351        (*self.load_value(ordering)).clone()
352    }
353}
354impl<T: Well> Drop for Atomic<T> {
355    #[inline]
356    fn drop(&mut self) {
357        fence(Ordering::SeqCst);
358
359        // SAFETY: We're dropping, so, it's okay to drop.
360        unsafe {
361            ManuallyDrop::drop(&mut self.load_value(Ordering::SeqCst));
362        }
363    }
364}
365impl<T: Well<Target: fmt::Debug>> fmt::Debug for Atomic<T> {
366    #[inline]
367    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
368    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
369        fmt::Debug::fmt(&self.load(Ordering::Relaxed), f)
370    }
371}
372
373/// Type-hoisted [`AtomicOption`].
374///
375/// Uses [`PotentialWell`] to allow for recursive structures at the cost of some usability.
376/// See the documentation for [`PotentialWell`] for more information.
377pub struct PotentialAtomicOption<T, W: PotentialWell> {
378    /// Actually atomic pointer.
379    pointer: AtomicPtr<()>,
380
381    /// Phantom data marker.
382    marker: PhantomData<Option<KineticWell<T, W>>>,
383}
384
385/// By default, nothing is stored in the atomic.
386impl<T, W: PotentialWell> Default for PotentialAtomicOption<T, W> {
387    #[inline]
388    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
389    fn default() -> Self {
390        PotentialAtomicOption::none()
391    }
392}
393impl<T, W: PotentialWell> PotentialAtomicOption<T, W> {
394    /// Creates atomic without anything inside.
395    #[inline]
396    pub fn none() -> PotentialAtomicOption<T, W> {
397        PotentialAtomicOption {
398            pointer: AtomicPtr::new(null_mut()),
399            marker: PhantomData,
400        }
401    }
402
403    /// Creates atomic with something inside.
404    #[inline]
405    pub fn some(well: KineticWell<T, W>) -> PotentialAtomicOption<T, W> {
406        PotentialAtomicOption {
407            pointer: AtomicPtr::new(well.remove().as_ptr().cast()),
408            marker: PhantomData,
409        }
410    }
411
412    /// Gives access to the underlying [`AtomicPtr`].
413    ///
414    /// # Safety
415    ///
416    /// The pointer inside the atomic must always be null, or a valid pointer from [`Well::remove`].
417    /// Additionally, keep in mind that this atomic *owns* the pointer, and if you want to move it
418    /// out, you must put a different pointer in its place first.
419    #[inline]
420    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
421    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<()> {
422        &self.pointer
423    }
424
425    /// Loads the inner data as a value.
426    ///
427    /// Uses [`ManuallyDrop`] to avoid accidentally dropping the value
428    /// while the atomic is still in use.
429    #[inline]
430    fn load_value(&self, ordering: Ordering) -> ManuallyDrop<Option<KineticWell<T, W>>> {
431        let loaded = self.pointer.load(ordering).cast::<T>();
432
433        // SAFETY: Data was originally `remove`d from a well.
434        ManuallyDrop::new(
435            NonNull::new(loaded).map(|ptr| unsafe { <KineticWell<T, W>>::insert(ptr) }),
436        )
437    }
438
439    /// Loads the inner data as an immutable reference.
440    ///
441    /// This is equivalent to an atomic [`load`].
442    ///
443    /// [`load`]: AtomicPtr::load
444    #[inline]
445    pub fn load(&self, ordering: Ordering) -> Option<&T> {
446        let loaded = self.pointer.load(ordering).cast::<T>();
447        match NonNull::new(loaded) {
448            // SAFETY: We can read the data from a `Well`.
449            Some(ptr) => Some(unsafe { ptr.as_ref() }),
450            None => None,
451        }
452    }
453
454    /// Atomically swaps the data inside the well.
455    ///
456    /// This is equivalent to an atomic [`swap`].
457    ///
458    /// [`swap`]: AtomicPtr::swap
459    #[inline]
460    pub fn swap(&self, well: KineticWell<T, W>, ordering: Ordering) -> Option<KineticWell<T, W>> {
461        let old = self
462            .pointer
463            .swap(well.remove().as_ptr().cast(), ordering)
464            .cast::<T>();
465
466        // SAFETY: Data was originally `remove`d from a well.
467        NonNull::new(old).map(|old| unsafe { <KineticWell<T, W>>::insert(old) })
468    }
469
470    /// Takes the data out of the well.
471    ///
472    /// This is equivalent to an atomic [`swap`] with a null pointer.
473    ///
474    /// [`swap`]: AtomicPtr::swap
475    #[inline]
476    pub fn take(&self, ordering: Ordering) -> Option<KineticWell<T, W>> {
477        let old = self.pointer.swap(null_mut(), ordering).cast::<T>();
478
479        // SAFETY: Data was originally `remove`d from a well.
480        NonNull::new(old).map(|old| unsafe { <KineticWell<T, W>>::insert(old) })
481    }
482
483    /// Inserts data into the well.
484    ///
485    /// This uses [`compare_exchange`] to avoid inserting into the well if it's already full. If you
486    /// want to use [`compare_exchange_weak`] instead, use [`insert_weak`].
487    ///
488    /// [`insert_weak`]: PotentialAtomicOption::insert_weak
489    /// [`compare_exchange`]: AtomicPtr::compare_exchange
490    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
491    #[inline]
492    pub fn insert(
493        &self,
494        well: KineticWell<T, W>,
495        success: Ordering,
496        failure: Ordering,
497    ) -> Result<(), KineticWell<T, W>> {
498        let ptr = well.remove();
499        if self
500            .pointer
501            .compare_exchange(null_mut(), ptr.as_ptr().cast(), success, failure)
502            .is_err()
503        {
504            // SAFETY: This is just the `Well` we passed in.
505            Err(unsafe { <KineticWell<T, W>>::insert(ptr) })
506        } else {
507            Ok(())
508        }
509    }
510    /// Inserts data into the well, sometimes failing spuriously.
511    ///
512    /// This uses [`compare_exchange_weak`] to avoid inserting into the well if it's already full,
513    /// which may spuriously fail. If you want to use [`compare_exchange`] instead, use [`insert`].
514    ///
515    /// [`insert`]: PotentialAtomicOption::insert
516    /// [`compare_exchange`]: AtomicPtr::compare_exchange
517    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
518    #[inline]
519    pub fn insert_weak(
520        &self,
521        well: KineticWell<T, W>,
522        success: Ordering,
523        failure: Ordering,
524    ) -> Result<(), KineticWell<T, W>> {
525        let ptr = well.remove();
526        if self
527            .pointer
528            .compare_exchange_weak(null_mut(), ptr.as_ptr().cast(), success, failure)
529            .is_err()
530        {
531            // SAFETY: This is just the `Well` we passed in.
532            Err(unsafe { <KineticWell<T, W>>::insert(ptr) })
533        } else {
534            Ok(())
535        }
536    }
537}
538impl<T: Unpin, W: PotentialWell> PotentialAtomicOption<T, W>
539where
540    KineticWell<T, W>: WellMut,
541{
542    /// Loads the inner data as a mutable reference.
543    ///
544    /// This performs a non-atomic access since the atomic is mutably borrowed.
545    #[inline]
546    pub fn load_mut(&mut self) -> Option<&mut T> {
547        let loaded = self.pointer.get_mut();
548        match NonNull::new((*loaded).cast::<T>()) {
549            // SAFETY: We can mutate the data from a `WellMut`.
550            Some(mut ptr) => Some(unsafe { ptr.as_mut() }),
551            None => None,
552        }
553    }
554}
555impl<T, W: PotentialWell> PotentialAtomicOption<T, Pin<W>>
556where
557    Pin<W>: PotentialWell,
558    KineticWell<T, W>: WellMut,
559{
560    /// Loads the inner data as a pinned mutable reference.
561    ///
562    /// This is a version of [`load_mut`] that works with pinned values.
563    ///
564    /// [`load_mut`]: PotentialAtomicOption::load_mut
565    #[inline]
566    pub fn load_mut_pinned(&mut self) -> Option<Pin<&mut T>> {
567        let loaded = self.pointer.get_mut();
568        match NonNull::new((*loaded).cast::<T>()) {
569            // SAFETY: We can pull out a pinned pointer safely.
570            Some(mut ptr) => Some(unsafe { Pin::new_unchecked(ptr.as_mut()) }),
571            None => None,
572        }
573    }
574}
575impl<T, W: PotentialWell> From<Option<KineticWell<T, W>>> for PotentialAtomicOption<T, W> {
576    #[inline]
577    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
578    fn from(well: Option<KineticWell<T, W>>) -> Self {
579        match well {
580            Some(well) => PotentialAtomicOption::some(well),
581            None => PotentialAtomicOption::none(),
582        }
583    }
584}
585impl<T, W: PotentialWell> PotentialAtomicOption<T, W>
586where
587    KineticWell<T, W>: Clone,
588{
589    /// Loads a clone of the inner data.
590    ///
591    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
592    /// pointer is cloned instead.
593    ///
594    /// [`load`]: AtomicPtr::load
595    #[inline]
596    pub fn load_clone(&self, ordering: Ordering) -> Option<KineticWell<T, W>> {
597        (*self.load_value(ordering)).clone()
598    }
599}
600impl<T, W: PotentialWell> Drop for PotentialAtomicOption<T, W> {
601    #[inline]
602    fn drop(&mut self) {
603        fence(Ordering::SeqCst);
604
605        // SAFETY: We're dropping, so, it's okay to drop.
606        unsafe {
607            ManuallyDrop::drop(&mut self.load_value(Ordering::SeqCst));
608        }
609    }
610}
611impl<T: fmt::Debug, W: PotentialWell> fmt::Debug for PotentialAtomicOption<T, W> {
612    #[inline]
613    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
614    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
615        fmt::Debug::fmt(&self.load(Ordering::Relaxed), f)
616    }
617}
618
619/// Type-hoisted [`Atomic`].
620///
621/// Uses [`PotentialWell`] to allow for recursive structures at the cost of some usability.
622/// See the documentation for [`PotentialWell`] for more information.
623pub struct PotentialAtomic<T, W: PotentialWell> {
624    /// Atomic pointer.
625    pointer: AtomicPtr<()>,
626
627    /// Cursed phantom data marker.
628    marker: PhantomData<KineticWell<T, W>>,
629}
630impl<T: Default, W: PotentialWell> Default for PotentialAtomic<T, W>
631where
632    KineticWell<T, W>: Default,
633{
634    #[inline]
635    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
636    fn default() -> Self {
637        PotentialAtomic::new(Default::default())
638    }
639}
640impl<T, W: PotentialWell> PotentialAtomic<T, W> {
641    /// Creates atomic with a value.
642    #[inline]
643    pub fn new(well: KineticWell<T, W>) -> PotentialAtomic<T, W> {
644        PotentialAtomic {
645            pointer: AtomicPtr::new(well.remove().as_ptr().cast()),
646            marker: PhantomData,
647        }
648    }
649
650    /// Gives access to the underlying [`AtomicPtr`].
651    ///
652    /// # Safety
653    ///
654    /// The pointer inside the atomic must always a valid pointer from [`Well::remove`] and
655    /// therefore must not be null. Additionally, keep in mind that this atomic *owns* the
656    /// pointer, and if you want to move it out, you must put a different pointer in its place
657    /// first.
658    #[inline]
659    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
660    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<()> {
661        &self.pointer
662    }
663
664    /// Loads the inner data as an immutable reference.
665    ///
666    /// Uses [`ManuallyDrop`] to avoid accidentally dropping the value
667    /// while the atomic is still in use.
668    #[inline]
669    fn load_value(&self, ordering: Ordering) -> ManuallyDrop<KineticWell<T, W>> {
670        let loaded = self.pointer.load(ordering).cast::<T>();
671
672        // SAFETY: Data was originally `remove`d from a well, and is never null.
673        ManuallyDrop::new(unsafe { <KineticWell<T, W>>::insert(NonNull::new_unchecked(loaded)) })
674    }
675
676    /// Loads the inner data as an immutable reference.
677    ///
678    /// This is equivalent to an atomic [`load`].
679    ///
680    /// [`load`]: AtomicPtr::load
681    #[inline]
682    pub fn load(&self, ordering: Ordering) -> &T {
683        let loaded = self.pointer.load(ordering).cast::<T>();
684
685        // SAFETY: We can read the data from a `Well`, and the data is never null.
686        unsafe { NonNull::new_unchecked(loaded).as_ref() }
687    }
688
689    /// Atomically swaps the data inside the well.
690    ///
691    /// This is equivalent to an atomic [`swap`].
692    ///
693    /// [`swap`]: AtomicPtr::swap
694    #[inline]
695    pub fn swap(&self, well: KineticWell<T, W>, ordering: Ordering) -> KineticWell<T, W> {
696        let old = self
697            .pointer
698            .swap(well.remove().as_ptr().cast(), ordering)
699            .cast::<T>();
700
701        // SAFETY: Data was originally `remove`d from a well, and is never null.
702        unsafe { <KineticWell<T, W>>::insert(NonNull::new_unchecked(old)) }
703    }
704}
705impl<T: Unpin, W: PotentialWell> PotentialAtomic<T, W>
706where
707    KineticWell<T, W>: WellMut,
708{
709    /// Loads the inner data as a mutable reference.
710    ///
711    /// This performs a non-atomic access since the atomic is mutably borrowed.
712    #[inline]
713    pub fn load_mut(&mut self) -> &mut T {
714        let loaded = self.pointer.get_mut();
715
716        // SAFETY: We can mutate the data from a `WellMut`, and it is never null.
717        unsafe { NonNull::new_unchecked((*loaded).cast::<T>()).as_mut() }
718    }
719}
720impl<T, W: PotentialWell> PotentialAtomic<T, Pin<W>>
721where
722    Pin<W>: PotentialWell,
723    KineticWell<T, W>: WellMut,
724{
725    /// Loads the inner data as a mutable reference.
726    ///
727    /// This is a version of [`load_mut`] that works with pinned values.
728    ///
729    /// [`load_mut`]: PotentialAtomicOption::load_mut
730    #[inline]
731    pub fn load_mut_pinned(&mut self) -> Pin<&mut T> {
732        let loaded = self.pointer.get_mut();
733
734        // SAFETY: We can mutate the data from a `WellMut` as long as it's pinned.
735        unsafe { Pin::new_unchecked(NonNull::new_unchecked((*loaded).cast::<T>()).as_mut()) }
736    }
737}
738impl<T, W: PotentialWell> PotentialAtomic<T, W>
739where
740    KineticWell<T, W>: Clone,
741{
742    /// Loads a clone of the inner data.
743    ///
744    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
745    /// pointer is cloned instead.
746    ///
747    /// [`load`]: AtomicPtr::load
748    #[inline]
749    pub fn load_clone(&self, ordering: Ordering) -> KineticWell<T, W> {
750        (*self.load_value(ordering)).clone()
751    }
752}
753impl<T, W: PotentialWell> Drop for PotentialAtomic<T, W> {
754    #[inline]
755    fn drop(&mut self) {
756        fence(Ordering::SeqCst);
757
758        // SAFETY: We're dropping, so, it's okay to drop.
759        unsafe {
760            ManuallyDrop::drop(&mut self.load_value(Ordering::SeqCst));
761        }
762    }
763}
764impl<T: fmt::Debug, W: PotentialWell> fmt::Debug for PotentialAtomic<T, W> {
765    #[inline]
766    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
767    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
768        fmt::Debug::fmt(&self.load(Ordering::Relaxed), f)
769    }
770}