Skip to main content

potential_well/
atomic.rs

1//! Atomic primitives.
2use core::{
3    fmt,
4    marker::PhantomData,
5    mem::ManuallyDrop,
6    ops::{Deref, DerefMut},
7    pin::Pin,
8    sync::atomic::{AtomicPtr, Ordering},
9};
10
11use crate::{
12    inner,
13    traits::{Bucket, KineticWell, PotentialWell, StrongWell, StrongWellMut, WeakWell, Well},
14};
15
16/// Potentially empty atomic potential well.
17///
18/// Internally, this just wraps a pointer to `Bucket<T>` and uses atomic pointer
19/// operations to access it. However, the number of operations on the pointer is limited to
20/// ensure correctness in safe code.
21#[repr(transparent)]
22pub struct AtomicOption<W: Well> {
23    /// Inner pointer.
24    ptr: inner::AtomicOption<Bucket<W>>,
25
26    /// Data marker.
27    marker: PhantomData<Option<W>>,
28}
29
30/// By default, nothing is stored in the atomic.
31impl<W: Well> Default for AtomicOption<W> {
32    #[inline]
33    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
34    fn default() -> Self {
35        AtomicOption::none()
36    }
37}
38impl<W: Well> AtomicOption<W> {
39    /// Creates atomic without anything inside.
40    #[inline]
41    pub fn none() -> AtomicOption<W> {
42        AtomicOption::new(None)
43    }
44
45    /// Creates atomic with something inside.
46    #[inline]
47    pub fn some(well: W) -> AtomicOption<W> {
48        AtomicOption::new(Some(well))
49    }
50
51    /// Creates atomic.
52    #[inline]
53    pub fn new(well: Option<W>) -> AtomicOption<W> {
54        AtomicOption {
55            ptr: inner::AtomicOption::new(well.map(Well::remove)),
56            marker: PhantomData,
57        }
58    }
59
60    /// Gives access to the underlying [`AtomicPtr`].
61    ///
62    /// # Safety
63    ///
64    /// The pointer inside the atomic must always be null, or a valid pointer from [`Well::remove`].
65    /// Additionally, keep in mind that this atomic *owns* the pointer, and if you want to move it
66    /// out, you must put a different pointer in its place first.
67    #[inline]
68    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
69    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<Bucket<W>> {
70        self.ptr.as_raw()
71    }
72
73    /// Atomically swaps the data inside the well.
74    ///
75    /// This is equivalent to an atomic [`swap`].
76    ///
77    /// [`swap`]: AtomicPtr::swap
78    #[inline]
79    pub fn swap(&self, well: W, ordering: Ordering) -> Option<W> {
80        let ptr = self.ptr.swap(Some(well.remove()), ordering)?;
81
82        // SAFETY: We only insert pointers that were `remove`d from wells.
83        Some(unsafe { Well::insert(ptr) })
84    }
85
86    /// Takes the data out of the well.
87    ///
88    /// This is equivalent to an atomic [`swap`] with a null pointer.
89    ///
90    /// [`swap`]: AtomicPtr::swap
91    #[inline]
92    pub fn take(&self, ordering: Ordering) -> Option<W> {
93        let ptr = self.ptr.swap(None, ordering)?;
94
95        // SAFETY: We only insert pointers that were `remove`d from wells.
96        Some(unsafe { Well::insert(ptr) })
97    }
98
99    /// Inserts data into the well.
100    ///
101    /// This uses [`compare_exchange`] to avoid inserting into the well if it's already full. If you
102    /// want to use [`compare_exchange_weak`] instead, use [`insert_weak`].
103    ///
104    /// [`insert_weak`]: AtomicOption::insert_weak
105    /// [`compare_exchange`]: AtomicPtr::compare_exchange
106    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
107    #[inline]
108    pub fn insert(&self, well: W, success: Ordering, failure: Ordering) -> Result<(), W> {
109        let ptr = well.remove();
110        if self
111            .ptr
112            .compare_exchange(None, Some(ptr), success, failure)
113            .is_ok()
114        {
115            Ok(())
116        } else {
117            // SAFETY: We just `remove`d this from a well, and since it wasn't stored,
118            //   we can re`insert` it.
119            Err(unsafe { Well::insert(ptr) })
120        }
121    }
122
123    /// Inserts data into the well, sometimes failing spuriously.
124    ///
125    /// This uses [`compare_exchange_weak`] to avoid inserting into the well if it's already full,
126    /// which may spuriously fail. If you want to use [`compare_exchange`] instead, use [`insert`].
127    ///
128    /// [`insert`]: AtomicOption::insert
129    /// [`compare_exchange`]: AtomicPtr::compare_exchange
130    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
131    #[inline]
132    pub fn insert_weak(&self, well: W, success: Ordering, failure: Ordering) -> Result<(), W> {
133        let ptr = well.remove();
134        if self
135            .ptr
136            .compare_exchange_weak(None, Some(ptr), success, failure)
137            .is_ok()
138        {
139            Ok(())
140        } else {
141            // SAFETY: We just `remove`d this from a well, and since it wasn't stored,
142            //   we can re`insert` it.
143            Err(unsafe { Well::insert(ptr) })
144        }
145    }
146}
147impl<W: WeakWell> AtomicOption<W> {
148    /// Tries to load the inner data.
149    ///
150    /// This is equivalent to an atomic [`load`], but it may fail due to the weak reference. If
151    /// the reference fails to upgrade, it will still remain inside the well.
152    ///
153    /// [`load`]: AtomicPtr::load
154    #[inline]
155    pub fn try_load(&self, ordering: Ordering) -> Option<<W as WeakWell>::Access> {
156        let ptr = self.ptr.load(ordering)?;
157
158        // SAFETY: This was `remove`d from a well.
159        let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
160
161        WeakWell::access(&*ptr)
162    }
163}
164impl<W: StrongWell> AtomicOption<W> {
165    /// Loads the inner data as an immutable reference.
166    ///
167    /// This is equivalent to an atomic [`load`].
168    ///
169    /// [`load`]: AtomicPtr::load
170    #[inline]
171    pub fn load(&self, ordering: Ordering) -> Option<&Bucket<W>> {
172        let ptr = self.ptr.load(ordering)?;
173
174        // SAFETY: The pointer is stable.
175        Some(unsafe { ptr.as_ref() })
176    }
177
178    /// Atomically swaps the data inside the well and returns a reference to the new data.
179    ///
180    /// This is [`swap`], but with the unsafe deref hidden behind a safe API.
181    ///
182    /// [`swap`]: AtomicOption::swap
183    #[inline]
184    pub fn swap_get(&self, well: W, ordering: Ordering) -> (Option<W>, &Bucket<W>) {
185        let new = well.remove();
186        let old = self.ptr.swap(Some(new), ordering);
187
188        // SAFETY: The pointer is stable.
189        let new = unsafe { new.as_ref() };
190
191        (
192            // SAFETY: This was `remove`d from a well.
193            old.map(|old| unsafe { Well::insert(old) }),
194            new,
195        )
196    }
197
198    /// Inserts data into the well and returns a reference to the new data.
199    ///
200    /// This is [`insert`], but with the unsafe deref hidden behind a safe API.
201    ///
202    /// [`insert`]: AtomicOption::insert
203    #[inline]
204    pub fn insert_get(
205        &self,
206        well: W,
207        success: Ordering,
208        failure: Ordering,
209    ) -> Result<&Bucket<W>, W> {
210        let new = well.remove();
211        if self
212            .ptr
213            .compare_exchange(None, Some(new), success, failure)
214            .is_ok()
215        {
216            // SAFETY: The pointer is stable.
217            Ok(unsafe { new.as_ref() })
218        } else {
219            // SAFETY: We just `remove`d this from a well, and since it wasn't stored,
220            //   we can re`insert` it.
221            Err(unsafe { Well::insert(new) })
222        }
223    }
224
225    /// Inserts data into the well, returns reference to the new data, sometimes failing spuriously.
226    ///
227    /// This is [`insert_weak`], but with the unsafe deref hidden behind a safe API.
228    ///
229    /// [`insert_weak`]: AtomicOption::insert_weak
230    #[inline]
231    pub fn insert_weak_get(
232        &self,
233        well: W,
234        success: Ordering,
235        failure: Ordering,
236    ) -> Result<&Bucket<W>, W> {
237        let new = well.remove();
238        if self
239            .ptr
240            .compare_exchange_weak(None, Some(new), success, failure)
241            .is_ok()
242        {
243            // SAFETY: The pointer is stable.
244            Ok(unsafe { new.as_ref() })
245        } else {
246            // SAFETY: We just `remove`d this from a well, and since it wasn't stored,
247            //   we can re`insert` it.
248            Err(unsafe { Well::insert(new) })
249        }
250    }
251}
252impl<W: StrongWellMut + DerefMut<Target: Unpin>> AtomicOption<W> {
253    /// Loads the inner data as a mutable reference.
254    ///
255    /// This performs a non-atomic access since the atomic is mutably borrowed.
256    #[inline]
257    pub fn load_mut(&mut self) -> Option<&mut Bucket<W>> {
258        // SAFETY: The pointer is stable.
259        Some(unsafe { self.ptr.get_mut()?.as_mut() })
260    }
261}
262impl<W: StrongWellMut> AtomicOption<Pin<W>> {
263    /// Loads the inner data as a pinned mutable reference.
264    ///
265    /// This is a version of [`load_mut`] that works with pinned values.
266    ///
267    /// [`load_mut`]: AtomicOption::load_mut
268    #[inline]
269    pub fn load_mut_pinned(&mut self) -> Option<Pin<&mut <Pin<W> as Well>::Target>> {
270        // SAFETY: The pointer is stable, and we don't disrupt the pin.
271        Some(unsafe { Pin::new_unchecked(self.ptr.get_mut()?.as_mut()) })
272    }
273}
274impl<W: Well> From<W> for AtomicOption<W> {
275    #[inline]
276    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
277    fn from(well: W) -> Self {
278        AtomicOption::some(well)
279    }
280}
281impl<W: Well> From<Option<W>> for AtomicOption<W> {
282    #[inline]
283    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
284    fn from(well: Option<W>) -> Self {
285        AtomicOption::new(well)
286    }
287}
288impl<W: Well + Clone> AtomicOption<W> {
289    /// Loads a clone of the inner data.
290    ///
291    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
292    /// pointer is cloned instead.
293    ///
294    /// [`load`]: AtomicPtr::load
295    #[inline]
296    pub fn load_clone(&self, ordering: Ordering) -> Option<W> {
297        let ptr = self.ptr.load(ordering)?;
298
299        // SAFETY: This was `remove`d from a well.
300        let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
301
302        Some((*ptr).clone())
303    }
304}
305impl<W: Well> Drop for AtomicOption<W> {
306    #[inline]
307    fn drop(&mut self) {
308        if let Some(ptr) = self.ptr.load_drop() {
309            // SAFETY: This was `remove`d from a well.
310            unsafe {
311                drop(W::insert(ptr));
312            }
313        }
314    }
315}
316impl<W: Well + fmt::Debug> fmt::Debug for AtomicOption<W> {
317    #[inline]
318    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
319    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
320        let mut tuple = f.debug_tuple("AtomicOption");
321        let Some(ptr) = self.ptr.load_debug() else {
322            return tuple.field(&None::<W>).finish();
323        };
324
325        // SAFETY: This was `remove`d from a well.
326        let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
327
328        tuple.field(&Some(&*ptr)).finish()
329    }
330}
331
332/// Atomic potential well.
333///
334/// Internally, this just wraps a pointer to `Bucket<T>` and uses atomic pointer
335/// operations to access it. However, the number of operations on the pointer is limited to
336/// ensure correctness in safe code.
337#[repr(transparent)]
338pub struct Atomic<W: Well>(inner::Atomic<Bucket<W>>);
339impl<W: Well + Default> Default for Atomic<W> {
340    #[inline]
341    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
342    fn default() -> Self {
343        Atomic::new(Default::default())
344    }
345}
346impl<W: Well> Atomic<W> {
347    /// Creates atomic with a value.
348    #[inline]
349    pub fn new(well: W) -> Atomic<W> {
350        Atomic(inner::Atomic::new(well.remove()))
351    }
352
353    /// Gives access to the underlying [`AtomicPtr`].
354    ///
355    /// # Safety
356    ///
357    /// The pointer inside the atomic must always a valid pointer from [`Well::remove`] and
358    /// therefore must not be null. Additionally, keep in mind that this atomic *owns* the
359    /// pointer, and if you want to move it out, you must put a different pointer in its place
360    /// first.
361    #[inline]
362    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
363    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<Bucket<W>> {
364        self.0.as_raw()
365    }
366
367    /// Atomically swaps the data inside the well.
368    ///
369    /// This is equivalent to an atomic [`swap`].
370    ///
371    /// [`swap`]: AtomicPtr::swap
372    #[inline]
373    pub fn swap(&self, well: W, ordering: Ordering) -> W {
374        let ptr = self.0.swap(well.remove(), ordering);
375
376        // SAFETY: This was `remove`d from a well.
377        unsafe { Well::insert(ptr) }
378    }
379}
380impl<W: WeakWell> Atomic<W> {
381    /// Tries to load the inner data.
382    ///
383    /// This is equivalent to an atomic [`load`], but it may fail due to the weak reference.
384    ///
385    /// [`load`]: AtomicPtr::load
386    #[inline]
387    pub fn try_load(&self, ordering: Ordering) -> Option<<W as WeakWell>::Access> {
388        let ptr = self.0.load(ordering);
389
390        // SAFETY: This was `remove`d from a well.
391        let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
392
393        WeakWell::access(&*ptr)
394    }
395}
396impl<W: StrongWell + Deref<Target: Sized>> Atomic<W> {
397    /// Loads the inner data as an immutable reference.
398    ///
399    /// This is equivalent to an atomic [`load`].
400    ///
401    /// [`load`]: AtomicPtr::load
402    #[inline]
403    pub fn load(&self, ordering: Ordering) -> &Bucket<W> {
404        let ptr = self.0.load(ordering);
405
406        // SAFETY: The pointer is stable.
407        unsafe { ptr.as_ref() }
408    }
409
410    /// Atomically swaps the data inside the well and returns a reference to the new data.
411    ///
412    /// This is [`swap`], but with the unsafe deref hidden behind a safe API.
413    ///
414    /// [`swap`]: AtomicOption::swap
415    #[inline]
416    pub fn swap_get(&self, well: W, ordering: Ordering) -> (W, &Bucket<W>) {
417        let new = well.remove();
418        let old = self.0.swap(new, ordering);
419
420        // SAFETY: The pointer is stable.
421        let new = unsafe { new.as_ref() };
422
423        (
424            // SAFETY: This was `remove`d from a well.
425            unsafe { Well::insert(old) },
426            new,
427        )
428    }
429}
430impl<W: StrongWellMut + DerefMut<Target: Unpin>> Atomic<W> {
431    /// Loads the inner data as a mutable reference.
432    ///
433    /// This performs a non-atomic access since the atomic is mutably borrowed.
434    #[inline]
435    pub fn load_mut(&mut self) -> &mut Bucket<W> {
436        let mut ptr = self.0.get_mut();
437
438        // SAFETY: The pointer is stable.
439        unsafe { ptr.as_mut() }
440    }
441}
442impl<W: StrongWellMut> Atomic<Pin<W>> {
443    /// Loads the inner data as a mutable reference.
444    ///
445    /// This is a version of [`load_mut`] that works with pinned values.
446    ///
447    /// [`load_mut`]: AtomicOption::load_mut
448    #[inline]
449    pub fn load_mut_pinned(&mut self) -> Pin<&mut <Pin<W> as Well>::Target> {
450        let mut ptr = self.0.get_mut();
451
452        // SAFETY: The pointer is stable, and we don't disrupt the pin.
453        unsafe { Pin::new_unchecked(ptr.as_mut()) }
454    }
455}
456impl<W: Well> From<W> for Atomic<W> {
457    #[inline]
458    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
459    fn from(well: W) -> Self {
460        Atomic::new(well)
461    }
462}
463impl<W: Well + Clone> Atomic<W> {
464    /// Loads a clone of the inner data.
465    ///
466    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
467    /// pointer is cloned instead.
468    ///
469    /// [`load`]: AtomicPtr::load
470    #[inline]
471    pub fn load_clone(&self, ordering: Ordering) -> W {
472        let ptr = self.0.load(ordering);
473
474        // SAFETY: This was `remove`d from a well.
475        let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
476
477        (*ptr).clone()
478    }
479}
480impl<W: Well> Drop for Atomic<W> {
481    #[inline]
482    fn drop(&mut self) {
483        let ptr = self.0.load_drop();
484
485        // SAFETY: This was `remove`d from a well.
486        unsafe {
487            drop(W::insert(ptr));
488        }
489    }
490}
491impl<W: Well + fmt::Debug> fmt::Debug for Atomic<W> {
492    #[inline]
493    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
494    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
495        let ptr = self.0.load_debug();
496
497        // SAFETY: This was `remove`d from a well.
498        let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
499
500        f.debug_tuple("Atomic").field(&*ptr).finish()
501    }
502}
503
504/// Type-hoisted [`AtomicOption`].
505///
506/// Uses [`PotentialWell`] to allow for recursive structures at the cost of some usability.
507/// See the documentation for [`PotentialWell`] for more information.
508#[repr(transparent)]
509pub struct PotentialAtomicOption<T, W: PotentialWell>(AtomicOption<KineticWell<T, W>>);
510
511/// By default, nothing is stored in the atomic.
512impl<T, W: PotentialWell> Default for PotentialAtomicOption<T, W> {
513    #[inline]
514    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
515    fn default() -> Self {
516        PotentialAtomicOption::none()
517    }
518}
519impl<T, W: PotentialWell> PotentialAtomicOption<T, W> {
520    /// Creates atomic without anything inside.
521    #[inline]
522    pub fn none() -> PotentialAtomicOption<T, W> {
523        PotentialAtomicOption::new(None)
524    }
525
526    /// Creates atomic with something inside.
527    #[inline]
528    pub fn some(well: KineticWell<T, W>) -> PotentialAtomicOption<T, W> {
529        PotentialAtomicOption::new(Some(well))
530    }
531
532    /// Creates atomic.
533    pub fn new(well: Option<KineticWell<T, W>>) -> PotentialAtomicOption<T, W> {
534        PotentialAtomicOption(AtomicOption::new(well))
535    }
536
537    /// Gives access to the underlying [`AtomicPtr`].
538    ///
539    /// # Safety
540    ///
541    /// The pointer inside the atomic must always be null, or a valid pointer from [`Well::remove`].
542    /// Additionally, keep in mind that this atomic *owns* the pointer, and if you want to move it
543    /// out, you must put a different pointer in its place first.
544    #[inline]
545    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
546    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<T> {
547        // SAFETY: Ensured by caller.
548        unsafe { self.0.as_raw_unchecked() }
549    }
550
551    /// Atomically swaps the data inside the well.
552    ///
553    /// This is equivalent to an atomic [`swap`].
554    ///
555    /// [`swap`]: AtomicPtr::swap
556    #[inline]
557    pub fn swap(&self, well: KineticWell<T, W>, ordering: Ordering) -> Option<KineticWell<T, W>> {
558        self.0.swap(well, ordering)
559    }
560
561    /// Takes the data out of the well.
562    ///
563    /// This is equivalent to an atomic [`swap`] with a null pointer.
564    ///
565    /// [`swap`]: AtomicPtr::swap
566    #[inline]
567    pub fn take(&self, ordering: Ordering) -> Option<KineticWell<T, W>> {
568        self.0.take(ordering)
569    }
570
571    /// Inserts data into the well.
572    ///
573    /// This uses [`compare_exchange`] to avoid inserting into the well if it's already full. If you
574    /// want to use [`compare_exchange_weak`] instead, use [`insert_weak`].
575    ///
576    /// [`insert_weak`]: PotentialAtomicOption::insert_weak
577    /// [`compare_exchange`]: AtomicPtr::compare_exchange
578    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
579    #[inline]
580    pub fn insert(
581        &self,
582        well: KineticWell<T, W>,
583        success: Ordering,
584        failure: Ordering,
585    ) -> Result<(), KineticWell<T, W>> {
586        self.0.insert(well, success, failure)
587    }
588
589    /// Inserts data into the well, sometimes failing spuriously.
590    ///
591    /// This uses [`compare_exchange_weak`] to avoid inserting into the well if it's already full,
592    /// which may spuriously fail. If you want to use [`compare_exchange`] instead, use [`insert`].
593    ///
594    /// [`insert`]: PotentialAtomicOption::insert
595    /// [`compare_exchange`]: AtomicPtr::compare_exchange
596    /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
597    #[inline]
598    pub fn insert_weak(
599        &self,
600        well: KineticWell<T, W>,
601        success: Ordering,
602        failure: Ordering,
603    ) -> Result<(), KineticWell<T, W>> {
604        self.0.insert_weak(well, success, failure)
605    }
606}
607impl<T, W: PotentialWell> PotentialAtomicOption<T, W>
608where
609    KineticWell<T, W>: StrongWell + Deref<Target = T>,
610{
611    /// Loads the inner data as an immutable reference.
612    ///
613    /// This is equivalent to an atomic [`load`].
614    ///
615    /// [`load`]: AtomicPtr::load
616    #[inline]
617    pub fn load(&self, ordering: Ordering) -> Option<&T> {
618        self.0.load(ordering)
619    }
620
621    /// Atomically swaps the data inside the well and returns a reference to the new data.
622    ///
623    /// This is [`swap`], but with the unsafe deref hidden behind a safe API.
624    ///
625    /// [`swap`]: AtomicOption::swap
626    #[inline]
627    pub fn swap_get(
628        &self,
629        well: KineticWell<T, W>,
630        ordering: Ordering,
631    ) -> (Option<KineticWell<T, W>>, &T) {
632        self.0.swap_get(well, ordering)
633    }
634
635    /// Inserts data into the well and returns a reference to the new data.
636    ///
637    /// This is [`insert`], but with the unsafe deref hidden behind a safe API.
638    ///
639    /// [`insert`]: AtomicOption::insert
640    #[inline]
641    pub fn insert_get(
642        &self,
643        well: KineticWell<T, W>,
644        success: Ordering,
645        failure: Ordering,
646    ) -> Result<&T, KineticWell<T, W>> {
647        self.0.insert_get(well, success, failure)
648    }
649
650    /// Inserts data into the well, returns reference to the new data, sometimes failing spuriously.
651    ///
652    /// This is [`insert_weak`], but with the unsafe deref hidden behind a safe API.
653    ///
654    /// [`insert_weak`]: AtomicOption::insert_weak
655    #[inline]
656    pub fn insert_weak_get(
657        &self,
658        well: KineticWell<T, W>,
659        success: Ordering,
660        failure: Ordering,
661    ) -> Result<&T, KineticWell<T, W>> {
662        self.0.insert_weak_get(well, success, failure)
663    }
664}
665impl<T: Unpin, W: PotentialWell> PotentialAtomicOption<T, W>
666where
667    KineticWell<T, W>: StrongWellMut + Deref<Target = T>,
668{
669    /// Loads the inner data as a mutable reference.
670    ///
671    /// This performs a non-atomic access since the atomic is mutably borrowed.
672    #[inline]
673    pub fn load_mut(&mut self) -> Option<&mut T> {
674        self.0.load_mut()
675    }
676}
677impl<T, W: PotentialWell> PotentialAtomicOption<T, Pin<W>>
678where
679    KineticWell<T, W>: StrongWellMut + Deref<Target = T>,
680    Pin<W>: PotentialWell<Well<T> = Pin<KineticWell<T, W>>>,
681{
682    /// Loads the inner data as a pinned mutable reference.
683    ///
684    /// This is a version of [`load_mut`] that works with pinned values.
685    ///
686    /// [`load_mut`]: PotentialAtomicOption::load_mut
687    #[inline]
688    pub fn load_mut_pinned(&mut self) -> Option<Pin<&mut T>> {
689        self.0.load_mut_pinned()
690    }
691}
692impl<T, W: PotentialWell> From<Option<KineticWell<T, W>>> for PotentialAtomicOption<T, W> {
693    #[inline]
694    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
695    fn from(well: Option<KineticWell<T, W>>) -> Self {
696        match well {
697            Some(well) => PotentialAtomicOption::some(well),
698            None => PotentialAtomicOption::none(),
699        }
700    }
701}
702impl<T, W: PotentialWell> PotentialAtomicOption<T, W>
703where
704    KineticWell<T, W>: Clone,
705{
706    /// Loads a clone of the inner data.
707    ///
708    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
709    /// pointer is cloned instead.
710    ///
711    /// [`load`]: AtomicPtr::load
712    #[inline]
713    pub fn load_clone(&self, ordering: Ordering) -> Option<KineticWell<T, W>> {
714        self.0.load_clone(ordering)
715    }
716}
717impl<T, W: PotentialWell> fmt::Debug for PotentialAtomicOption<T, W>
718where
719    KineticWell<T, W>: fmt::Debug,
720{
721    #[inline]
722    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
723    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
724        fmt::Debug::fmt(&self.0, f)
725    }
726}
727
728/// Type-hoisted [`Atomic`].
729///
730/// Uses [`PotentialWell`] to allow for recursive structures at the cost of some usability.
731/// See the documentation for [`PotentialWell`] for more information.
732#[repr(transparent)]
733pub struct PotentialAtomic<T, W: PotentialWell>(Atomic<KineticWell<T, W>>);
734impl<T: Default, W: PotentialWell> Default for PotentialAtomic<T, W>
735where
736    KineticWell<T, W>: Default,
737{
738    #[inline]
739    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
740    fn default() -> Self {
741        PotentialAtomic::new(Default::default())
742    }
743}
744impl<T, W: PotentialWell> PotentialAtomic<T, W> {
745    /// Creates atomic with a value.
746    #[inline]
747    pub fn new(well: KineticWell<T, W>) -> PotentialAtomic<T, W> {
748        PotentialAtomic(Atomic::new(well))
749    }
750
751    /// Gives access to the underlying [`AtomicPtr`].
752    ///
753    /// # Safety
754    ///
755    /// The pointer inside the atomic must always a valid pointer from [`Well::remove`] and
756    /// therefore must not be null. Additionally, keep in mind that this atomic *owns* the
757    /// pointer, and if you want to move it out, you must put a different pointer in its place
758    /// first.
759    #[inline]
760    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
761    pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<T> {
762        // SAFETY: Ensured by caller.
763        unsafe { self.0.as_raw_unchecked() }
764    }
765
766    /// Atomically swaps the data inside the well.
767    ///
768    /// This is equivalent to an atomic [`swap`].
769    ///
770    /// [`swap`]: AtomicPtr::swap
771    #[inline]
772    pub fn swap(&self, well: KineticWell<T, W>, ordering: Ordering) -> KineticWell<T, W> {
773        self.0.swap(well, ordering)
774    }
775}
776impl<T, W: PotentialWell> PotentialAtomic<T, W>
777where
778    KineticWell<T, W>: StrongWell + Deref<Target = T>,
779{
780    /// Loads the inner data as an immutable reference.
781    ///
782    /// This is equivalent to an atomic [`load`].
783    ///
784    /// [`load`]: AtomicPtr::load
785    #[inline]
786    pub fn load(&self, ordering: Ordering) -> &T {
787        self.0.load(ordering)
788    }
789
790    /// Atomically swaps the data inside the well and returns a reference to the new data.
791    ///
792    /// This is [`swap`], but with the unsafe deref hidden behind a safe API.
793    ///
794    /// [`swap`]: AtomicOption::swap
795    #[inline]
796    pub fn swap_get(&self, well: KineticWell<T, W>, ordering: Ordering) -> (KineticWell<T, W>, &T) {
797        self.0.swap_get(well, ordering)
798    }
799}
800impl<T: Unpin, W: PotentialWell> PotentialAtomic<T, W>
801where
802    KineticWell<T, W>: StrongWellMut + Deref<Target = T>,
803{
804    /// Loads the inner data as a mutable reference.
805    ///
806    /// This performs a non-atomic access since the atomic is mutably borrowed.
807    #[inline]
808    pub fn load_mut(&mut self) -> &mut T {
809        self.0.load_mut()
810    }
811}
812impl<T, W: PotentialWell> PotentialAtomic<T, Pin<W>>
813where
814    KineticWell<T, W>: StrongWellMut + Deref<Target = T>,
815    Pin<W>: PotentialWell<Well<T> = Pin<KineticWell<T, W>>>,
816{
817    /// Loads the inner data as a mutable reference.
818    ///
819    /// This is a version of [`load_mut`] that works with pinned values.
820    ///
821    /// [`load_mut`]: PotentialAtomicOption::load_mut
822    #[inline]
823    pub fn load_mut_pinned(&mut self) -> Pin<&mut T> {
824        self.0.load_mut_pinned()
825    }
826}
827impl<T, W: PotentialWell> PotentialAtomic<T, W>
828where
829    KineticWell<T, W>: Clone,
830{
831    /// Loads a clone of the inner data.
832    ///
833    /// This still performs an atomic [`load`], but instead of offering a reference, the smart
834    /// pointer is cloned instead.
835    ///
836    /// [`load`]: AtomicPtr::load
837    #[inline]
838    pub fn load_clone(&self, ordering: Ordering) -> KineticWell<T, W> {
839        self.0.load_clone(ordering)
840    }
841}
842impl<T, W: PotentialWell> fmt::Debug for PotentialAtomic<T, W>
843where
844    KineticWell<T, W>: fmt::Debug,
845{
846    #[inline]
847    #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
848    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
849        fmt::Debug::fmt(&self.0, f)
850    }
851}