sdd/atomic_shared.rs
1use std::mem::forget;
2use std::panic::UnwindSafe;
3use std::ptr::{NonNull, null, null_mut};
4#[cfg(not(feature = "loom"))]
5use std::sync::atomic::AtomicPtr;
6use std::sync::atomic::Ordering::{self, Acquire, Relaxed};
7
8#[cfg(feature = "loom")]
9use loom::sync::atomic::AtomicPtr;
10
11use super::ref_counted::RefCounted;
12use super::{Guard, Ptr, Shared, Tag};
13
14/// [`AtomicShared`] owns the underlying instance, and allows users to perform atomic operations
15/// on the pointer to it.
16#[derive(Debug)]
17pub struct AtomicShared<T> {
18 ptr: AtomicPtr<RefCounted<T>>,
19}
20
21/// A pair of [`Shared`] and [`Ptr`] of the same type.
22pub type SharedPtrPair<'g, T> = (Option<Shared<T>>, Ptr<'g, T>);
23
24impl<T: 'static> AtomicShared<T> {
25 /// Creates a new [`AtomicShared`] from an instance of `T`.
26 ///
27 /// The type of the instance must be determined at compile-time, must not contain non-static
28 /// references, and must not be a non-static reference since the instance can theoretically
29 /// live as long as the process. For instance, `struct Disallowed<'l, T>(&'l T)` is not
30 /// allowed, because an instance of the type cannot outlive `'l` whereas the garbage collector
31 /// does not guarantee that the instance is dropped within `'l`.
32 ///
33 /// # Examples
34 ///
35 /// ```
36 /// use sdd::AtomicShared;
37 ///
38 /// let atomic_shared: AtomicShared<usize> = AtomicShared::new(10);
39 /// ```
40 #[inline]
41 pub fn new(t: T) -> Self {
42 Self {
43 ptr: AtomicPtr::new(RefCounted::new_shared(|| t).as_ptr()),
44 }
45 }
46}
47
48impl<T> AtomicShared<T> {
49 /// Creates a new [`AtomicShared`] from a [`Shared`] of `T`.
50 ///
51 /// # Examples
52 ///
53 /// ```
54 /// use sdd::{AtomicShared, Shared};
55 ///
56 /// let shared: Shared<usize> = Shared::new(10);
57 /// let atomic_shared: AtomicShared<usize> = AtomicShared::from(shared);
58 /// ```
59 #[cfg(not(feature = "loom"))]
60 #[inline]
61 #[must_use]
62 pub const fn from(shared: Shared<T>) -> Self {
63 let ptr = shared.underlying_ptr();
64 forget(shared);
65 let ptr: std::sync::atomic::AtomicPtr<RefCounted<T>> = AtomicPtr::new(ptr.cast_mut());
66 Self { ptr }
67 }
68
69 /// Creates a new [`AtomicShared`] from a [`Shared`] of `T`.
70 #[cfg(feature = "loom")]
71 #[inline]
72 #[must_use]
73 pub fn from(shared: Shared<T>) -> Self {
74 let ptr = shared.underlying_ptr();
75 forget(shared);
76 let ptr: loom::sync::atomic::AtomicPtr<RefCounted<T>> = AtomicPtr::new(ptr.cast_mut());
77 Self { ptr }
78 }
79
80 /// Creates a null [`AtomicShared`].
81 ///
82 /// # Examples
83 ///
84 /// ```
85 /// use sdd::AtomicShared;
86 ///
87 /// let atomic_shared: AtomicShared<usize> = AtomicShared::null();
88 /// ```
89 #[cfg(not(feature = "loom"))]
90 #[inline]
91 #[must_use]
92 pub const fn null() -> Self {
93 let ptr: std::sync::atomic::AtomicPtr<RefCounted<T>> = AtomicPtr::new(null_mut());
94 Self { ptr }
95 }
96
97 /// Creates a null [`AtomicShared`].
98 #[cfg(feature = "loom")]
99 #[inline]
100 #[must_use]
101 pub fn null() -> Self {
102 let ptr: loom::sync::atomic::AtomicPtr<RefCounted<T>> = AtomicPtr::new(null_mut());
103 Self { ptr }
104 }
105
106 /// Returns `true` if the [`AtomicShared`] is null.
107 ///
108 /// # Examples
109 ///
110 /// ```
111 /// use std::sync::atomic::Ordering::Relaxed;
112 ///
113 /// use sdd::{AtomicShared, Tag};
114 ///
115 /// let atomic_shared: AtomicShared<usize> = AtomicShared::null();
116 /// atomic_shared.update_tag_if(Tag::Both, |p| p.tag() == Tag::None, Relaxed, Relaxed);
117 /// assert!(atomic_shared.is_null(Relaxed));
118 /// ```
119 #[inline]
120 #[must_use]
121 pub fn is_null(&self, order: Ordering) -> bool {
122 Tag::unset_tag(self.ptr.load(order)).is_null()
123 }
124
125 /// Loads a pointer value from the [`AtomicShared`].
126 ///
127 /// # Examples
128 ///
129 /// ```
130 /// use std::sync::atomic::Ordering::Relaxed;
131 ///
132 /// use sdd::{AtomicShared, Guard};
133 ///
134 /// let atomic_shared: AtomicShared<usize> = AtomicShared::new(11);
135 /// let guard = Guard::new();
136 /// let ptr = atomic_shared.load(Relaxed, &guard);
137 /// assert_eq!(*ptr.as_ref().unwrap(), 11);
138 /// ```
139 #[inline]
140 #[must_use]
141 pub fn load<'g>(&self, order: Ordering, _guard: &'g Guard) -> Ptr<'g, T> {
142 Ptr::from(self.ptr.load(order))
143 }
144
145 /// Stores the given value into the [`AtomicShared`] and returns the original value.
146 ///
147 /// # Examples
148 ///
149 /// ```
150 /// use std::sync::atomic::Ordering::Relaxed;
151 ///
152 /// use sdd::{AtomicShared, Guard, Shared, Tag};
153 ///
154 /// let atomic_shared: AtomicShared<usize> = AtomicShared::new(14);
155 /// let guard = Guard::new();
156 /// let (old, tag) = atomic_shared.swap((Some(Shared::new(15)), Tag::Second), Relaxed);
157 /// assert_eq!(tag, Tag::None);
158 /// assert_eq!(*old.unwrap(), 14);
159 /// let (old, tag) = atomic_shared.swap((None, Tag::First), Relaxed);
160 /// assert_eq!(tag, Tag::Second);
161 /// assert_eq!(*old.unwrap(), 15);
162 /// let (old, tag) = atomic_shared.swap((None, Tag::None), Relaxed);
163 /// assert_eq!(tag, Tag::First);
164 /// assert!(old.is_none());
165 /// ```
166 #[inline]
167 pub fn swap(&self, new: (Option<Shared<T>>, Tag), order: Ordering) -> (Option<Shared<T>>, Tag) {
168 let desired = Tag::update_tag(
169 new.0.as_ref().map_or_else(null, Shared::underlying_ptr),
170 new.1,
171 )
172 .cast_mut();
173 let prev = self.ptr.swap(desired, order);
174 let tag = Tag::into_tag(prev);
175 let prev_ptr = Tag::unset_tag(prev).cast_mut();
176 forget(new);
177 (NonNull::new(prev_ptr).map(Shared::from), tag)
178 }
179
180 /// Returns its [`Tag`].
181 ///
182 /// # Examples
183 ///
184 /// ```
185 /// use std::sync::atomic::Ordering::Relaxed;
186 ///
187 /// use sdd::{AtomicShared, Tag};
188 ///
189 /// let atomic_shared: AtomicShared<usize> = AtomicShared::null();
190 /// assert_eq!(atomic_shared.tag(Relaxed), Tag::None);
191 /// ```
192 #[inline]
193 #[must_use]
194 pub fn tag(&self, order: Ordering) -> Tag {
195 Tag::into_tag(self.ptr.load(order))
196 }
197
198 /// Sets a new [`Tag`] if the given condition is met.
199 ///
200 /// Returns `true` if the new [`Tag`] has been successfully set.
201 ///
202 /// # Examples
203 ///
204 /// ```
205 /// use std::sync::atomic::Ordering::Relaxed;
206 ///
207 /// use sdd::{AtomicShared, Tag};
208 ///
209 /// let atomic_shared: AtomicShared<usize> = AtomicShared::null();
210 /// assert!(atomic_shared.update_tag_if(Tag::Both, |p| p.tag() == Tag::None, Relaxed, Relaxed));
211 /// assert_eq!(atomic_shared.tag(Relaxed), Tag::Both);
212 /// ```
213 #[inline]
214 pub fn update_tag_if<F: FnMut(Ptr<T>) -> bool>(
215 &self,
216 tag: Tag,
217 mut condition: F,
218 set_order: Ordering,
219 fetch_order: Ordering,
220 ) -> bool {
221 self.ptr
222 .fetch_update(set_order, fetch_order, |ptr| {
223 if condition(Ptr::from(ptr)) {
224 Some(Tag::update_tag(ptr, tag).cast_mut())
225 } else {
226 None
227 }
228 })
229 .is_ok()
230 }
231
232 /// Stores `new` into the [`AtomicShared`] if the current value is the same as `current`.
233 ///
234 /// Returns the previously held value and the updated [`Ptr`].
235 ///
236 /// # Errors
237 ///
238 /// Returns `Err` with the supplied [`Shared`] and the current [`Ptr`].
239 ///
240 /// # Examples
241 ///
242 /// ```
243 /// use std::sync::atomic::Ordering::Relaxed;
244 ///
245 /// use sdd::{AtomicShared, Guard, Shared, Tag};
246 ///
247 /// let atomic_shared: AtomicShared<usize> = AtomicShared::new(17);
248 /// let guard = Guard::new();
249 ///
250 /// let mut ptr = atomic_shared.load(Relaxed, &guard);
251 /// assert_eq!(*ptr.as_ref().unwrap(), 17);
252 ///
253 /// atomic_shared.update_tag_if(Tag::Both, |_| true, Relaxed, Relaxed);
254 /// assert!(atomic_shared.compare_exchange(
255 /// ptr, (Some(Shared::new(18)), Tag::First), Relaxed, Relaxed, &guard).is_err());
256 ///
257 /// ptr.set_tag(Tag::Both);
258 /// let old: Shared<usize> = atomic_shared.compare_exchange(
259 /// ptr,
260 /// (Some(Shared::new(18)), Tag::First),
261 /// Relaxed,
262 /// Relaxed,
263 /// &guard).unwrap().0.unwrap();
264 /// assert_eq!(*old, 17);
265 /// drop(old);
266 ///
267 /// assert!(atomic_shared.compare_exchange(
268 /// ptr, (Some(Shared::new(19)), Tag::None), Relaxed, Relaxed, &guard).is_err());
269 /// assert_eq!(*ptr.as_ref().unwrap(), 17);
270 /// ```
271 #[inline]
272 pub fn compare_exchange<'g>(
273 &self,
274 current: Ptr<'g, T>,
275 new: (Option<Shared<T>>, Tag),
276 success: Ordering,
277 failure: Ordering,
278 _guard: &'g Guard,
279 ) -> Result<SharedPtrPair<'g, T>, SharedPtrPair<'g, T>> {
280 let desired = Tag::update_tag(
281 new.0.as_ref().map_or_else(null, Shared::underlying_ptr),
282 new.1,
283 )
284 .cast_mut();
285 match self.ptr.compare_exchange(
286 current.underlying_ptr().cast_mut(),
287 desired,
288 success,
289 failure,
290 ) {
291 Ok(prev) => {
292 let prev_shared = NonNull::new(Tag::unset_tag(prev).cast_mut()).map(Shared::from);
293 forget(new);
294 Ok((prev_shared, Ptr::from(desired)))
295 }
296 Err(actual) => Err((new.0, Ptr::from(actual))),
297 }
298 }
299
300 /// Stores `new` into the [`AtomicShared`] if the current value is the same as `current`.
301 ///
302 /// This method is allowed to spuriously fail even when the comparison succeeds.
303 ///
304 /// Returns the previously held value and the updated [`Ptr`].
305 ///
306 /// # Errors
307 ///
308 /// Returns `Err` with the supplied [`Shared`] and the current [`Ptr`].
309 ///
310 /// # Examples
311 ///
312 /// ```
313 /// use std::sync::atomic::Ordering::Relaxed;
314 ///
315 /// use sdd::{AtomicShared, Guard, Shared, Tag};
316 ///
317 /// let atomic_shared: AtomicShared<usize> = AtomicShared::new(17);
318 /// let guard = Guard::new();
319 ///
320 /// let mut ptr = atomic_shared.load(Relaxed, &guard);
321 /// assert_eq!(*ptr.as_ref().unwrap(), 17);
322 ///
323 /// while let Err((_, actual)) = atomic_shared.compare_exchange_weak(
324 /// ptr,
325 /// (Some(Shared::new(18)), Tag::First),
326 /// Relaxed,
327 /// Relaxed,
328 /// &guard) {
329 /// ptr = actual;
330 /// }
331 ///
332 /// let mut ptr = atomic_shared.load(Relaxed, &guard);
333 /// assert_eq!(*ptr.as_ref().unwrap(), 18);
334 /// ```
335 #[inline]
336 pub fn compare_exchange_weak<'g>(
337 &self,
338 current: Ptr<'g, T>,
339 new: (Option<Shared<T>>, Tag),
340 success: Ordering,
341 failure: Ordering,
342 _guard: &'g Guard,
343 ) -> Result<SharedPtrPair<'g, T>, SharedPtrPair<'g, T>> {
344 let desired = Tag::update_tag(
345 new.0.as_ref().map_or_else(null, Shared::underlying_ptr),
346 new.1,
347 )
348 .cast_mut();
349 match self.ptr.compare_exchange_weak(
350 current.underlying_ptr().cast_mut(),
351 desired,
352 success,
353 failure,
354 ) {
355 Ok(prev) => {
356 let prev_shared = NonNull::new(Tag::unset_tag(prev).cast_mut()).map(Shared::from);
357 forget(new);
358 Ok((prev_shared, Ptr::from(desired)))
359 }
360 Err(actual) => Err((new.0, Ptr::from(actual))),
361 }
362 }
363
364 /// Clones `self` including tags.
365 ///
366 /// If `self` is not supposed to be an `AtomicShared::null`, this will never return an
367 /// `AtomicShared::null`.
368 ///
369 /// # Examples
370 ///
371 /// ```
372 /// use std::sync::atomic::Ordering::Relaxed;
373 ///
374 /// use sdd::{AtomicShared, Guard};
375 ///
376 /// let atomic_shared: AtomicShared<usize> = AtomicShared::new(59);
377 /// let guard = Guard::new();
378 /// let atomic_shared_clone = atomic_shared.clone(Relaxed, &guard);
379 /// let ptr = atomic_shared_clone.load(Relaxed, &guard);
380 /// assert_eq!(*ptr.as_ref().unwrap(), 59);
381 /// ```
382 #[inline]
383 #[must_use]
384 pub fn clone(&self, order: Ordering, guard: &Guard) -> AtomicShared<T> {
385 self.get_shared(order, guard)
386 .map_or_else(Self::null, |s| Self::from(s))
387 }
388
389 /// Tries to create a [`Shared`] out of `self`.
390 ///
391 /// If `self` is not supposed to be an `AtomicShared::null`, this will never return `None`.
392 ///
393 /// # Examples
394 ///
395 /// ```
396 /// use std::sync::atomic::Ordering::Relaxed;
397 ///
398 /// use sdd::{AtomicShared, Guard, Shared};
399 ///
400 /// let atomic_shared: AtomicShared<usize> = AtomicShared::new(47);
401 /// let guard = Guard::new();
402 /// let shared: Shared<usize> = atomic_shared.get_shared(Relaxed, &guard).unwrap();
403 /// assert_eq!(*shared, 47);
404 /// ```
405 #[inline]
406 #[must_use]
407 pub fn get_shared(&self, order: Ordering, _guard: &Guard) -> Option<Shared<T>> {
408 let mut ptr = Tag::unset_tag(self.ptr.load(order));
409 while !ptr.is_null() {
410 if unsafe { (*ptr).try_add_ref(Acquire) } {
411 return NonNull::new(ptr.cast_mut()).map(Shared::from);
412 }
413 ptr = Tag::unset_tag(self.ptr.load(order));
414 }
415 None
416 }
417
418 /// Converts `self` into a [`Shared`].
419 ///
420 /// Returns `None` if `self` did not hold a strong reference.
421 ///
422 /// # Examples
423 ///
424 /// ```
425 /// use std::sync::atomic::Ordering::Relaxed;
426 ///
427 /// use sdd::{AtomicShared, Shared};
428 ///
429 /// let atomic_shared: AtomicShared<usize> = AtomicShared::new(55);
430 /// let shared: Shared<usize> = atomic_shared.into_shared(Relaxed).unwrap();
431 /// assert_eq!(*shared, 55);
432 /// ```
433 #[inline]
434 #[must_use]
435 pub fn into_shared(self, order: Ordering) -> Option<Shared<T>> {
436 let ptr = self.ptr.swap(null_mut(), order);
437 if let Some(underlying_ptr) = NonNull::new(Tag::unset_tag(ptr).cast_mut()) {
438 return Some(Shared::from(underlying_ptr));
439 }
440 None
441 }
442}
443
444impl<T> Clone for AtomicShared<T> {
445 #[inline]
446 fn clone(&self) -> AtomicShared<T> {
447 self.clone(Acquire, &Guard::new())
448 }
449}
450
451impl<T> Default for AtomicShared<T> {
452 #[inline]
453 fn default() -> Self {
454 Self::null()
455 }
456}
457
458impl<T> Drop for AtomicShared<T> {
459 #[inline]
460 fn drop(&mut self) {
461 if let Some(ptr) = NonNull::new(Tag::unset_tag(self.ptr.load(Relaxed)).cast_mut()) {
462 drop(Shared::from(ptr));
463 }
464 }
465}
466
467unsafe impl<T: Send> Send for AtomicShared<T> {}
468
469unsafe impl<T: Send + Sync> Sync for AtomicShared<T> {}
470
471impl<T: UnwindSafe> UnwindSafe for AtomicShared<T> {}