potential_well/atomic.rs
1//! Atomic primitives.
2use core::{
3 fmt,
4 marker::PhantomData,
5 mem::ManuallyDrop,
6 ops::{Deref, DerefMut},
7 pin::Pin,
8 sync::atomic::{AtomicPtr, Ordering},
9};
10
11use crate::{
12 inner,
13 traits::{PotentialWell, StrongWell, StrongWellMut, Target, WeakWell, Well},
14};
15
16/// Potentially empty atomic potential well.
17///
18/// Internally, this just wraps a pointer to `Bucket<T>` and uses atomic pointer
19/// operations to access it. However, the number of operations on the pointer is limited to
20/// ensure correctness in safe code.
21#[repr(transparent)]
22pub struct AtomicOption<W: Well> {
23 /// Inner pointer.
24 ptr: inner::AtomicOption<Target<W>>,
25
26 /// Data marker.
27 marker: PhantomData<Option<W>>,
28}
29
30/// By default, nothing is stored in the atomic.
31impl<W: Well> Default for AtomicOption<W> {
32 #[inline]
33 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
34 fn default() -> Self {
35 AtomicOption::none()
36 }
37}
38impl<W: Well> AtomicOption<W> {
39 /// Creates atomic without anything inside.
40 #[inline]
41 pub fn none() -> AtomicOption<W> {
42 AtomicOption::new(None)
43 }
44
45 /// Creates atomic with something inside.
46 #[inline]
47 pub fn some(well: W) -> AtomicOption<W> {
48 AtomicOption::new(Some(well))
49 }
50
51 /// Creates atomic.
52 #[inline]
53 pub fn new(well: Option<W>) -> AtomicOption<W> {
54 AtomicOption {
55 ptr: inner::AtomicOption::new(well.map(Well::remove)),
56 marker: PhantomData,
57 }
58 }
59
60 /// Gives access to the underlying [`AtomicPtr`].
61 ///
62 /// # Safety
63 ///
64 /// The pointer inside the atomic must always be null, or a valid pointer from [`Well::remove`].
65 /// Additionally, keep in mind that this atomic *owns* the pointer, and if you want to move it
66 /// out, you must put a different pointer in its place first.
67 #[inline]
68 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
69 pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<Target<W>> {
70 self.ptr.as_raw()
71 }
72
73 /// Atomically swaps the data inside the well.
74 ///
75 /// This is equivalent to an atomic [`swap`].
76 ///
77 /// [`swap`]: AtomicPtr::swap
78 #[inline]
79 pub fn swap(&self, well: W, ordering: Ordering) -> Option<W> {
80 let ptr = self.ptr.swap(Some(well.remove()), ordering)?;
81
82 // SAFETY: We only insert pointers that were `remove`d from wells.
83 Some(unsafe { Well::insert(ptr) })
84 }
85
86 /// Takes the data out of the well.
87 ///
88 /// This is equivalent to an atomic [`swap`] with a null pointer.
89 ///
90 /// [`swap`]: AtomicPtr::swap
91 #[inline]
92 pub fn take(&self, ordering: Ordering) -> Option<W> {
93 let ptr = self.ptr.swap(None, ordering)?;
94
95 // SAFETY: We only insert pointers that were `remove`d from wells.
96 Some(unsafe { Well::insert(ptr) })
97 }
98
99 /// Inserts data into the well.
100 ///
101 /// This uses [`compare_exchange`] to avoid inserting into the well if it's already full. If you
102 /// want to use [`compare_exchange_weak`] instead, use [`insert_weak`].
103 ///
104 /// [`insert_weak`]: AtomicOption::insert_weak
105 /// [`compare_exchange`]: AtomicPtr::compare_exchange
106 /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
107 #[inline]
108 pub fn insert(&self, well: W, success: Ordering, failure: Ordering) -> Result<(), W> {
109 let ptr = well.remove();
110 if self
111 .ptr
112 .compare_exchange(None, Some(ptr), success, failure)
113 .is_ok()
114 {
115 Ok(())
116 } else {
117 // SAFETY: We just `remove`d this from a well, and since it wasn't stored,
118 // we can re`insert` it.
119 Err(unsafe { Well::insert(ptr) })
120 }
121 }
122
123 /// Inserts data into the well, sometimes failing spuriously.
124 ///
125 /// This uses [`compare_exchange_weak`] to avoid inserting into the well if it's already full,
126 /// which may spuriously fail. If you want to use [`compare_exchange`] instead, use [`insert`].
127 ///
128 /// [`insert`]: AtomicOption::insert
129 /// [`compare_exchange`]: AtomicPtr::compare_exchange
130 /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
131 #[inline]
132 pub fn insert_weak(&self, well: W, success: Ordering, failure: Ordering) -> Result<(), W> {
133 let ptr = well.remove();
134 if self
135 .ptr
136 .compare_exchange_weak(None, Some(ptr), success, failure)
137 .is_ok()
138 {
139 Ok(())
140 } else {
141 // SAFETY: We just `remove`d this from a well, and since it wasn't stored,
142 // we can re`insert` it.
143 Err(unsafe { Well::insert(ptr) })
144 }
145 }
146}
147impl<W: WeakWell> AtomicOption<W> {
148 /// Tries to load the inner data.
149 ///
150 /// This is equivalent to an atomic [`load`], but it may fail due to the weak reference. If
151 /// the reference fails to upgrade, it will still remain inside the well.
152 ///
153 /// [`load`]: AtomicPtr::load
154 #[inline]
155 pub fn try_load(&self, ordering: Ordering) -> Option<<W as WeakWell>::Strong> {
156 let ptr = self.ptr.load(ordering)?;
157
158 // SAFETY: This was `remove`d from a well.
159 let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
160
161 WeakWell::access(&*ptr)
162 }
163}
164impl<W: StrongWell> AtomicOption<W> {
165 /// Loads the inner data as an immutable reference.
166 ///
167 /// This is equivalent to an atomic [`load`].
168 ///
169 /// [`load`]: AtomicPtr::load
170 #[inline]
171 pub fn load(&self, ordering: Ordering) -> Option<&Target<W>> {
172 let ptr = self.ptr.load(ordering)?;
173
174 // SAFETY: The pointer is stable.
175 Some(unsafe { ptr.as_ref() })
176 }
177
178 /// Atomically swaps the data inside the well and returns a reference to the new data.
179 ///
180 /// This is [`swap`], but with the unsafe deref hidden behind a safe API.
181 ///
182 /// [`swap`]: AtomicOption::swap
183 #[inline]
184 pub fn swap_get(&self, well: W, ordering: Ordering) -> (Option<W>, &Target<W>) {
185 let new = well.remove();
186 let old = self.ptr.swap(Some(new), ordering);
187
188 // SAFETY: The pointer is stable.
189 let new = unsafe { new.as_ref() };
190
191 (
192 // SAFETY: This was `remove`d from a well.
193 old.map(|old| unsafe { Well::insert(old) }),
194 new,
195 )
196 }
197
198 /// Inserts data into the well and returns a reference to the new data.
199 ///
200 /// This is [`insert`], but with the unsafe deref hidden behind a safe API.
201 ///
202 /// [`insert`]: AtomicOption::insert
203 #[inline]
204 pub fn insert_get(
205 &self,
206 well: W,
207 success: Ordering,
208 failure: Ordering,
209 ) -> Result<&Target<W>, W> {
210 let new = well.remove();
211 if self
212 .ptr
213 .compare_exchange(None, Some(new), success, failure)
214 .is_ok()
215 {
216 // SAFETY: The pointer is stable.
217 Ok(unsafe { new.as_ref() })
218 } else {
219 // SAFETY: We just `remove`d this from a well, and since it wasn't stored,
220 // we can re`insert` it.
221 Err(unsafe { Well::insert(new) })
222 }
223 }
224
225 /// Inserts data into the well, returns reference to the new data, sometimes failing spuriously.
226 ///
227 /// This is [`insert_weak`], but with the unsafe deref hidden behind a safe API.
228 ///
229 /// [`insert_weak`]: AtomicOption::insert_weak
230 #[inline]
231 pub fn insert_weak_get(
232 &self,
233 well: W,
234 success: Ordering,
235 failure: Ordering,
236 ) -> Result<&Target<W>, W> {
237 let new = well.remove();
238 if self
239 .ptr
240 .compare_exchange_weak(None, Some(new), success, failure)
241 .is_ok()
242 {
243 // SAFETY: The pointer is stable.
244 Ok(unsafe { new.as_ref() })
245 } else {
246 // SAFETY: We just `remove`d this from a well, and since it wasn't stored,
247 // we can re`insert` it.
248 Err(unsafe { Well::insert(new) })
249 }
250 }
251}
252impl<W: StrongWellMut + DerefMut<Target: Unpin>> AtomicOption<W> {
253 /// Loads the inner data as a mutable reference.
254 ///
255 /// This performs a non-atomic access since the atomic is mutably borrowed.
256 #[inline]
257 pub fn load_mut(&mut self) -> Option<&mut Target<W>> {
258 // SAFETY: The pointer is stable.
259 Some(unsafe { self.ptr.get_mut()?.as_mut() })
260 }
261}
262impl<W: StrongWellMut> AtomicOption<Pin<W>> {
263 /// Loads the inner data as a pinned mutable reference.
264 ///
265 /// This is a version of [`load_mut`] that works with pinned values.
266 ///
267 /// [`load_mut`]: AtomicOption::load_mut
268 #[inline]
269 pub fn load_mut_pinned(&mut self) -> Option<Pin<&mut <Pin<W> as Well>::Target>> {
270 // SAFETY: The pointer is stable, and we don't disrupt the pin.
271 Some(unsafe { Pin::new_unchecked(self.ptr.get_mut()?.as_mut()) })
272 }
273}
274impl<W: Well> From<W> for AtomicOption<W> {
275 #[inline]
276 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
277 fn from(well: W) -> Self {
278 AtomicOption::some(well)
279 }
280}
281impl<W: Well> From<Option<W>> for AtomicOption<W> {
282 #[inline]
283 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
284 fn from(well: Option<W>) -> Self {
285 AtomicOption::new(well)
286 }
287}
288impl<W: Well + Clone> AtomicOption<W> {
289 /// Loads a clone of the inner data.
290 ///
291 /// This still performs an atomic [`load`], but instead of offering a reference, the smart
292 /// pointer is cloned instead.
293 ///
294 /// [`load`]: AtomicPtr::load
295 #[inline]
296 pub fn load_clone(&self, ordering: Ordering) -> Option<W> {
297 let ptr = self.ptr.load(ordering)?;
298
299 // SAFETY: This was `remove`d from a well.
300 let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
301
302 Some((*ptr).clone())
303 }
304}
305impl<W: Well> Drop for AtomicOption<W> {
306 #[inline]
307 fn drop(&mut self) {
308 if let Some(ptr) = self.ptr.load_drop() {
309 // SAFETY: This was `remove`d from a well.
310 unsafe {
311 drop(W::insert(ptr));
312 }
313 }
314 }
315}
316impl<W: Well + fmt::Debug> fmt::Debug for AtomicOption<W> {
317 #[inline]
318 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
319 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
320 let mut tuple = f.debug_tuple("AtomicOption");
321 let Some(ptr) = self.ptr.load_debug() else {
322 return tuple.field(&None::<W>).finish();
323 };
324
325 // SAFETY: This was `remove`d from a well.
326 let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
327
328 tuple.field(&Some(&*ptr)).finish()
329 }
330}
331
332/// Atomic potential well.
333///
334/// Internally, this just wraps a pointer to `Bucket<T>` and uses atomic pointer
335/// operations to access it. However, the number of operations on the pointer is limited to
336/// ensure correctness in safe code.
337#[repr(transparent)]
338pub struct Atomic<W: Well>(inner::Atomic<Target<W>>);
339impl<W: Well + Default> Default for Atomic<W> {
340 #[inline]
341 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
342 fn default() -> Self {
343 Atomic::new(Default::default())
344 }
345}
346impl<W: Well> Atomic<W> {
347 /// Creates atomic with a value.
348 #[inline]
349 pub fn new(well: W) -> Atomic<W> {
350 Atomic(inner::Atomic::new(well.remove()))
351 }
352
353 /// Gives access to the underlying [`AtomicPtr`].
354 ///
355 /// # Safety
356 ///
357 /// The pointer inside the atomic must always a valid pointer from [`Well::remove`] and
358 /// therefore must not be null. Additionally, keep in mind that this atomic *owns* the
359 /// pointer, and if you want to move it out, you must put a different pointer in its place
360 /// first.
361 #[inline]
362 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
363 pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<Target<W>> {
364 self.0.as_raw()
365 }
366
367 /// Atomically swaps the data inside the well.
368 ///
369 /// This is equivalent to an atomic [`swap`].
370 ///
371 /// [`swap`]: AtomicPtr::swap
372 #[inline]
373 pub fn swap(&self, well: W, ordering: Ordering) -> W {
374 let ptr = self.0.swap(well.remove(), ordering);
375
376 // SAFETY: This was `remove`d from a well.
377 unsafe { Well::insert(ptr) }
378 }
379}
380impl<W: WeakWell> Atomic<W> {
381 /// Tries to load the inner data.
382 ///
383 /// This is equivalent to an atomic [`load`], but it may fail due to the weak reference.
384 ///
385 /// [`load`]: AtomicPtr::load
386 #[inline]
387 pub fn try_load(&self, ordering: Ordering) -> Option<<W as WeakWell>::Strong> {
388 let ptr = self.0.load(ordering);
389
390 // SAFETY: This was `remove`d from a well.
391 let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
392
393 WeakWell::access(&*ptr)
394 }
395}
396impl<W: StrongWell + Deref<Target: Sized>> Atomic<W> {
397 /// Loads the inner data as an immutable reference.
398 ///
399 /// This is equivalent to an atomic [`load`].
400 ///
401 /// [`load`]: AtomicPtr::load
402 #[inline]
403 pub fn load(&self, ordering: Ordering) -> &Target<W> {
404 let ptr = self.0.load(ordering);
405
406 // SAFETY: The pointer is stable.
407 unsafe { ptr.as_ref() }
408 }
409
410 /// Atomically swaps the data inside the well and returns a reference to the new data.
411 ///
412 /// This is [`swap`], but with the unsafe deref hidden behind a safe API.
413 ///
414 /// [`swap`]: AtomicOption::swap
415 #[inline]
416 pub fn swap_get(&self, well: W, ordering: Ordering) -> (W, &Target<W>) {
417 let new = well.remove();
418 let old = self.0.swap(new, ordering);
419
420 // SAFETY: The pointer is stable.
421 let new = unsafe { new.as_ref() };
422
423 (
424 // SAFETY: This was `remove`d from a well.
425 unsafe { Well::insert(old) },
426 new,
427 )
428 }
429}
430impl<W: StrongWellMut + DerefMut<Target: Unpin>> Atomic<W> {
431 /// Loads the inner data as a mutable reference.
432 ///
433 /// This performs a non-atomic access since the atomic is mutably borrowed.
434 #[inline]
435 pub fn load_mut(&mut self) -> &mut Target<W> {
436 let mut ptr = self.0.get_mut();
437
438 // SAFETY: The pointer is stable.
439 unsafe { ptr.as_mut() }
440 }
441}
442impl<W: StrongWellMut> Atomic<Pin<W>> {
443 /// Loads the inner data as a mutable reference.
444 ///
445 /// This is a version of [`load_mut`] that works with pinned values.
446 ///
447 /// [`load_mut`]: AtomicOption::load_mut
448 #[inline]
449 pub fn load_mut_pinned(&mut self) -> Pin<&mut <Pin<W> as Well>::Target> {
450 let mut ptr = self.0.get_mut();
451
452 // SAFETY: The pointer is stable, and we don't disrupt the pin.
453 unsafe { Pin::new_unchecked(ptr.as_mut()) }
454 }
455}
456impl<W: Well> From<W> for Atomic<W> {
457 #[inline]
458 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
459 fn from(well: W) -> Self {
460 Atomic::new(well)
461 }
462}
463impl<W: Well + Clone> Atomic<W> {
464 /// Loads a clone of the inner data.
465 ///
466 /// This still performs an atomic [`load`], but instead of offering a reference, the smart
467 /// pointer is cloned instead.
468 ///
469 /// [`load`]: AtomicPtr::load
470 #[inline]
471 pub fn load_clone(&self, ordering: Ordering) -> W {
472 let ptr = self.0.load(ordering);
473
474 // SAFETY: This was `remove`d from a well.
475 let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
476
477 (*ptr).clone()
478 }
479}
480impl<W: Well> Drop for Atomic<W> {
481 #[inline]
482 fn drop(&mut self) {
483 let ptr = self.0.load_drop();
484
485 // SAFETY: This was `remove`d from a well.
486 unsafe {
487 drop(W::insert(ptr));
488 }
489 }
490}
491impl<W: Well + fmt::Debug> fmt::Debug for Atomic<W> {
492 #[inline]
493 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
494 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
495 let ptr = self.0.load_debug();
496
497 // SAFETY: This was `remove`d from a well.
498 let ptr = unsafe { ManuallyDrop::new(W::insert(ptr)) };
499
500 f.debug_tuple("Atomic").field(&*ptr).finish()
501 }
502}
503
504/// Type-hoisted [`AtomicOption`].
505///
506/// Uses [`PotentialWell`] to allow for recursive structures at the cost of some usability.
507/// See the documentation for [`PotentialWell`] for more information.
508#[repr(transparent)]
509pub struct PotentialAtomicOption<T, W: PotentialWell>(AtomicOption<W::Well<T>>);
510
511/// By default, nothing is stored in the atomic.
512impl<T, W: PotentialWell> Default for PotentialAtomicOption<T, W> {
513 #[inline]
514 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
515 fn default() -> Self {
516 PotentialAtomicOption::none()
517 }
518}
519impl<T, W: PotentialWell> PotentialAtomicOption<T, W> {
520 /// Creates atomic without anything inside.
521 #[inline]
522 pub fn none() -> PotentialAtomicOption<T, W> {
523 PotentialAtomicOption::new(None)
524 }
525
526 /// Creates atomic with something inside.
527 #[inline]
528 pub fn some(well: W::Well<T>) -> PotentialAtomicOption<T, W> {
529 PotentialAtomicOption::new(Some(well))
530 }
531
532 /// Creates atomic.
533 pub fn new(well: Option<W::Well<T>>) -> PotentialAtomicOption<T, W> {
534 PotentialAtomicOption(AtomicOption::new(well))
535 }
536
537 /// Gives access to the underlying [`AtomicPtr`].
538 ///
539 /// # Safety
540 ///
541 /// The pointer inside the atomic must always be null, or a valid pointer from [`Well::remove`].
542 /// Additionally, keep in mind that this atomic *owns* the pointer, and if you want to move it
543 /// out, you must put a different pointer in its place first.
544 #[inline]
545 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
546 pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<T> {
547 // SAFETY: Ensured by caller.
548 unsafe { self.0.as_raw_unchecked() }
549 }
550
551 /// Atomically swaps the data inside the well.
552 ///
553 /// This is equivalent to an atomic [`swap`].
554 ///
555 /// [`swap`]: AtomicPtr::swap
556 #[inline]
557 pub fn swap(&self, well: W::Well<T>, ordering: Ordering) -> Option<W::Well<T>> {
558 self.0.swap(well, ordering)
559 }
560
561 /// Takes the data out of the well.
562 ///
563 /// This is equivalent to an atomic [`swap`] with a null pointer.
564 ///
565 /// [`swap`]: AtomicPtr::swap
566 #[inline]
567 pub fn take(&self, ordering: Ordering) -> Option<W::Well<T>> {
568 self.0.take(ordering)
569 }
570
571 /// Inserts data into the well.
572 ///
573 /// This uses [`compare_exchange`] to avoid inserting into the well if it's already full. If you
574 /// want to use [`compare_exchange_weak`] instead, use [`insert_weak`].
575 ///
576 /// [`insert_weak`]: PotentialAtomicOption::insert_weak
577 /// [`compare_exchange`]: AtomicPtr::compare_exchange
578 /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
579 #[inline]
580 pub fn insert(
581 &self,
582 well: W::Well<T>,
583 success: Ordering,
584 failure: Ordering,
585 ) -> Result<(), W::Well<T>> {
586 self.0.insert(well, success, failure)
587 }
588
589 /// Inserts data into the well, sometimes failing spuriously.
590 ///
591 /// This uses [`compare_exchange_weak`] to avoid inserting into the well if it's already full,
592 /// which may spuriously fail. If you want to use [`compare_exchange`] instead, use [`insert`].
593 ///
594 /// [`insert`]: PotentialAtomicOption::insert
595 /// [`compare_exchange`]: AtomicPtr::compare_exchange
596 /// [`compare_exchange_weak`]: AtomicPtr::compare_exchange_weak
597 #[inline]
598 pub fn insert_weak(
599 &self,
600 well: W::Well<T>,
601 success: Ordering,
602 failure: Ordering,
603 ) -> Result<(), W::Well<T>> {
604 self.0.insert_weak(well, success, failure)
605 }
606}
607impl<T, W: PotentialWell<Well<T>: StrongWell + Deref<Target = T>>> PotentialAtomicOption<T, W> {
608 /// Loads the inner data as an immutable reference.
609 ///
610 /// This is equivalent to an atomic [`load`].
611 ///
612 /// [`load`]: AtomicPtr::load
613 #[inline]
614 pub fn load(&self, ordering: Ordering) -> Option<&T> {
615 self.0.load(ordering)
616 }
617
618 /// Atomically swaps the data inside the well and returns a reference to the new data.
619 ///
620 /// This is [`swap`], but with the unsafe deref hidden behind a safe API.
621 ///
622 /// [`swap`]: AtomicOption::swap
623 #[inline]
624 pub fn swap_get(&self, well: W::Well<T>, ordering: Ordering) -> (Option<W::Well<T>>, &T) {
625 self.0.swap_get(well, ordering)
626 }
627
628 /// Inserts data into the well and returns a reference to the new data.
629 ///
630 /// This is [`insert`], but with the unsafe deref hidden behind a safe API.
631 ///
632 /// [`insert`]: AtomicOption::insert
633 #[inline]
634 pub fn insert_get(
635 &self,
636 well: W::Well<T>,
637 success: Ordering,
638 failure: Ordering,
639 ) -> Result<&T, W::Well<T>> {
640 self.0.insert_get(well, success, failure)
641 }
642
643 /// Inserts data into the well, returns reference to the new data, sometimes failing spuriously.
644 ///
645 /// This is [`insert_weak`], but with the unsafe deref hidden behind a safe API.
646 ///
647 /// [`insert_weak`]: AtomicOption::insert_weak
648 #[inline]
649 pub fn insert_weak_get(
650 &self,
651 well: W::Well<T>,
652 success: Ordering,
653 failure: Ordering,
654 ) -> Result<&T, W::Well<T>> {
655 self.0.insert_weak_get(well, success, failure)
656 }
657}
658impl<T: Unpin, W: PotentialWell<Well<T>: StrongWellMut + Deref<Target = T>>>
659 PotentialAtomicOption<T, W>
660{
661 /// Loads the inner data as a mutable reference.
662 ///
663 /// This performs a non-atomic access since the atomic is mutably borrowed.
664 #[inline]
665 pub fn load_mut(&mut self) -> Option<&mut T> {
666 self.0.load_mut()
667 }
668}
669impl<T, W: PotentialWell<Well<T>: StrongWellMut + Deref<Target = T>>>
670 PotentialAtomicOption<T, Pin<W>>
671where
672 Pin<W>: PotentialWell<Well<T> = Pin<W::Well<T>>>,
673{
674 /// Loads the inner data as a pinned mutable reference.
675 ///
676 /// This is a version of [`load_mut`] that works with pinned values.
677 ///
678 /// [`load_mut`]: PotentialAtomicOption::load_mut
679 #[inline]
680 pub fn load_mut_pinned(&mut self) -> Option<Pin<&mut T>> {
681 self.0.load_mut_pinned()
682 }
683}
684impl<T, W: PotentialWell> From<Option<W::Well<T>>> for PotentialAtomicOption<T, W> {
685 #[inline]
686 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
687 fn from(well: Option<W::Well<T>>) -> Self {
688 match well {
689 Some(well) => PotentialAtomicOption::some(well),
690 None => PotentialAtomicOption::none(),
691 }
692 }
693}
694impl<T, W: PotentialWell<Well<T>: Clone>> PotentialAtomicOption<T, W> {
695 /// Loads a clone of the inner data.
696 ///
697 /// This still performs an atomic [`load`], but instead of offering a reference, the smart
698 /// pointer is cloned instead.
699 ///
700 /// [`load`]: AtomicPtr::load
701 #[inline]
702 pub fn load_clone(&self, ordering: Ordering) -> Option<W::Well<T>> {
703 self.0.load_clone(ordering)
704 }
705}
706impl<T, W: PotentialWell<Well<T>: fmt::Debug>> fmt::Debug for PotentialAtomicOption<T, W> {
707 #[inline]
708 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
709 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
710 fmt::Debug::fmt(&self.0, f)
711 }
712}
713
714/// Type-hoisted [`Atomic`].
715///
716/// Uses [`PotentialWell`] to allow for recursive structures at the cost of some usability.
717/// See the documentation for [`PotentialWell`] for more information.
718#[repr(transparent)]
719pub struct PotentialAtomic<T, W: PotentialWell>(Atomic<W::Well<T>>);
720impl<T: Default, W: PotentialWell<Well<T>: Default>> Default for PotentialAtomic<T, W> {
721 #[inline]
722 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
723 fn default() -> Self {
724 PotentialAtomic::new(Default::default())
725 }
726}
727impl<T, W: PotentialWell> PotentialAtomic<T, W> {
728 /// Creates atomic with a value.
729 #[inline]
730 pub fn new(well: W::Well<T>) -> PotentialAtomic<T, W> {
731 PotentialAtomic(Atomic::new(well))
732 }
733
734 /// Gives access to the underlying [`AtomicPtr`].
735 ///
736 /// # Safety
737 ///
738 /// The pointer inside the atomic must always a valid pointer from [`Well::remove`] and
739 /// therefore must not be null. Additionally, keep in mind that this atomic *owns* the
740 /// pointer, and if you want to move it out, you must put a different pointer in its place
741 /// first.
742 #[inline]
743 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
744 pub unsafe fn as_raw_unchecked(&self) -> &AtomicPtr<T> {
745 // SAFETY: Ensured by caller.
746 unsafe { self.0.as_raw_unchecked() }
747 }
748
749 /// Atomically swaps the data inside the well.
750 ///
751 /// This is equivalent to an atomic [`swap`].
752 ///
753 /// [`swap`]: AtomicPtr::swap
754 #[inline]
755 pub fn swap(&self, well: W::Well<T>, ordering: Ordering) -> W::Well<T> {
756 self.0.swap(well, ordering)
757 }
758}
759impl<T, W: PotentialWell<Well<T>: StrongWell + Deref<Target = T>>> PotentialAtomic<T, W> {
760 /// Loads the inner data as an immutable reference.
761 ///
762 /// This is equivalent to an atomic [`load`].
763 ///
764 /// [`load`]: AtomicPtr::load
765 #[inline]
766 pub fn load(&self, ordering: Ordering) -> &T {
767 self.0.load(ordering)
768 }
769
770 /// Atomically swaps the data inside the well and returns a reference to the new data.
771 ///
772 /// This is [`swap`], but with the unsafe deref hidden behind a safe API.
773 ///
774 /// [`swap`]: AtomicOption::swap
775 #[inline]
776 pub fn swap_get(&self, well: W::Well<T>, ordering: Ordering) -> (W::Well<T>, &T) {
777 self.0.swap_get(well, ordering)
778 }
779}
780impl<T: Unpin, W: PotentialWell<Well<T>: StrongWellMut + Deref<Target = T>>> PotentialAtomic<T, W> {
781 /// Loads the inner data as a mutable reference.
782 ///
783 /// This performs a non-atomic access since the atomic is mutably borrowed.
784 #[inline]
785 pub fn load_mut(&mut self) -> &mut T {
786 self.0.load_mut()
787 }
788}
789impl<T, W: PotentialWell<Well<T>: StrongWellMut + Deref<Target = T>>> PotentialAtomic<T, Pin<W>>
790where
791 Pin<W>: PotentialWell<Well<T> = Pin<W::Well<T>>>,
792{
793 /// Loads the inner data as a mutable reference.
794 ///
795 /// This is a version of [`load_mut`] that works with pinned values.
796 ///
797 /// [`load_mut`]: PotentialAtomicOption::load_mut
798 #[inline]
799 pub fn load_mut_pinned(&mut self) -> Pin<&mut T> {
800 self.0.load_mut_pinned()
801 }
802}
803impl<T, W: PotentialWell<Well<T>: Clone>> PotentialAtomic<T, W> {
804 /// Loads a clone of the inner data.
805 ///
806 /// This still performs an atomic [`load`], but instead of offering a reference, the smart
807 /// pointer is cloned instead.
808 ///
809 /// [`load`]: AtomicPtr::load
810 #[inline]
811 pub fn load_clone(&self, ordering: Ordering) -> W::Well<T> {
812 self.0.load_clone(ordering)
813 }
814}
815impl<T, W: PotentialWell<Well<T>: fmt::Debug>> fmt::Debug for PotentialAtomic<T, W> {
816 #[inline]
817 #[cfg_attr(any(coverage_nightly, feature = "nightly"), coverage(off))]
818 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
819 fmt::Debug::fmt(&self.0, f)
820 }
821}