prism3_atomic/atomic/
atomic_ref.rs

1/*******************************************************************************
2 *
3 *    Copyright (c) 2025.
4 *    3-Prism Co. Ltd.
5 *
6 *    All rights reserved.
7 *
8 ******************************************************************************/
9
10//! # Atomic Reference
11//!
12//! Provides an easy-to-use atomic reference type with sensible default memory
13//! orderings. Uses `Arc<T>` for thread-safe reference counting.
14//!
15//! # Author
16//!
17//! Haixing Hu
18
19use std::fmt;
20use std::sync::atomic::AtomicPtr;
21use std::sync::atomic::Ordering;
22use std::sync::Arc;
23
24use crate::atomic::traits::Atomic;
25
26/// Atomic reference type.
27///
28/// Provides easy-to-use atomic operations on references with automatic memory
29/// ordering selection. Uses `Arc<T>` for thread-safe reference counting.
30///
31/// # Memory Ordering Strategy
32///
33/// This type uses the same memory ordering strategy as other atomic types:
34///
35/// - **Read operations** (`load`): Use `Acquire` ordering to ensure that
36///   all writes from other threads that happened before a `Release` store
37///   are visible after this load.
38///
39/// - **Write operations** (`store`): Use `Release` ordering to ensure that
40///   all prior writes in this thread are visible to other threads that
41///   perform an `Acquire` load.
42///
43/// - **Read-Modify-Write operations** (`swap`, `compare_set`): Use
44///   `AcqRel` ordering to combine both `Acquire` and `Release` semantics.
45///
46/// - **CAS failure**: Use `Acquire` ordering on failure to observe the
47///   actual value written by another thread.
48///
49/// # Implementation Details
50///
51/// This type stores an `Arc<T>` as a raw pointer in `AtomicPtr<T>`. All
52/// operations properly manage reference counts to prevent memory leaks or
53/// use-after-free errors.
54///
55/// # Features
56///
57/// - Automatic memory ordering selection
58/// - Thread-safe reference counting via `Arc`
59/// - Functional update operations
60/// - Zero-cost abstraction with inline methods
61///
62/// # Example
63///
64/// ```rust
65/// use prism3_rust_concurrent::atomic::AtomicRef;
66/// use std::sync::Arc;
67///
68/// #[derive(Debug, Clone)]
69/// struct Config {
70///     timeout: u64,
71///     max_retries: u32,
72/// }
73///
74/// let config = Arc::new(Config {
75///     timeout: 1000,
76///     max_retries: 3,
77/// });
78///
79/// let atomic_config = AtomicRef::new(config);
80///
81/// // Update configuration
82/// let new_config = Arc::new(Config {
83///     timeout: 2000,
84///     max_retries: 5,
85/// });
86///
87/// let old_config = atomic_config.swap(new_config);
88/// assert_eq!(old_config.timeout, 1000);
89/// assert_eq!(atomic_config.load().timeout, 2000);
90/// ```
91///
92/// # Author
93///
94/// Haixing Hu
95pub struct AtomicRef<T> {
96    inner: AtomicPtr<T>,
97}
98
99impl<T> AtomicRef<T> {
100    /// Creates a new atomic reference.
101    ///
102    /// # Parameters
103    ///
104    /// * `value` - The initial reference.
105    ///
106    /// # Example
107    ///
108    /// ```rust
109    /// use prism3_rust_concurrent::atomic::AtomicRef;
110    /// use std::sync::Arc;
111    ///
112    /// let data = Arc::new(42);
113    /// let atomic = AtomicRef::new(data);
114    /// assert_eq!(*atomic.load(), 42);
115    /// ```
116    #[inline]
117    pub fn new(value: Arc<T>) -> Self {
118        let ptr = Arc::into_raw(value) as *mut T;
119        Self {
120            inner: AtomicPtr::new(ptr),
121        }
122    }
123
124    /// Gets the current reference.
125    ///
126    /// # Memory Ordering
127    ///
128    /// Uses `Acquire` ordering. This ensures that all writes from other
129    /// threads that happened before a `Release` store are visible after
130    /// this load.
131    ///
132    /// # Returns
133    ///
134    /// A cloned `Arc` pointing to the current value.
135    ///
136    /// # Example
137    ///
138    /// ```rust
139    /// use prism3_rust_concurrent::atomic::AtomicRef;
140    /// use std::sync::Arc;
141    ///
142    /// let atomic = AtomicRef::new(Arc::new(42));
143    /// let value = atomic.load();
144    /// assert_eq!(*value, 42);
145    /// ```
146    #[inline]
147    pub fn load(&self) -> Arc<T> {
148        let ptr = self.inner.load(Ordering::Acquire);
149        unsafe {
150            // Increment reference count but don't drop the original pointer
151            let arc = Arc::from_raw(ptr);
152            let cloned = arc.clone();
153            let _ = Arc::into_raw(arc); // Prevent dropping
154            cloned
155        }
156    }
157
158    /// Sets a new reference.
159    ///
160    /// # Memory Ordering
161    ///
162    /// Uses `Release` ordering. This ensures that all prior writes in this
163    /// thread are visible to other threads that perform an `Acquire` load.
164    ///
165    /// # Parameters
166    ///
167    /// * `value` - The new reference to set.
168    ///
169    /// # Example
170    ///
171    /// ```rust
172    /// use prism3_rust_concurrent::atomic::AtomicRef;
173    /// use std::sync::Arc;
174    ///
175    /// let atomic = AtomicRef::new(Arc::new(42));
176    /// atomic.store(Arc::new(100));
177    /// assert_eq!(*atomic.load(), 100);
178    /// ```
179    #[inline]
180    pub fn store(&self, value: Arc<T>) {
181        let new_ptr = Arc::into_raw(value) as *mut T;
182        let old_ptr = self.inner.swap(new_ptr, Ordering::AcqRel);
183        unsafe {
184            if !old_ptr.is_null() {
185                // Drop the old value
186                Arc::from_raw(old_ptr);
187            }
188        }
189    }
190
191    /// Swaps the current reference with a new reference, returning the old
192    /// reference.
193    ///
194    /// # Memory Ordering
195    ///
196    /// Uses `AcqRel` ordering. This provides full synchronization for this
197    /// read-modify-write operation.
198    ///
199    /// # Parameters
200    ///
201    /// * `value` - The new reference to swap in.
202    ///
203    /// # Returns
204    ///
205    /// The old reference.
206    ///
207    /// # Example
208    ///
209    /// ```rust
210    /// use prism3_rust_concurrent::atomic::AtomicRef;
211    /// use std::sync::Arc;
212    ///
213    /// let atomic = AtomicRef::new(Arc::new(10));
214    /// let old = atomic.swap(Arc::new(20));
215    /// assert_eq!(*old, 10);
216    /// assert_eq!(*atomic.load(), 20);
217    /// ```
218    #[inline]
219    pub fn swap(&self, value: Arc<T>) -> Arc<T> {
220        let new_ptr = Arc::into_raw(value) as *mut T;
221        let old_ptr = self.inner.swap(new_ptr, Ordering::AcqRel);
222        unsafe { Arc::from_raw(old_ptr) }
223    }
224
225    /// Compares and sets the reference atomically.
226    ///
227    /// If the current reference equals `current` (by pointer equality), sets
228    /// it to `new` and returns `Ok(())`. Otherwise, returns `Err(actual)`
229    /// where `actual` is the current reference.
230    ///
231    /// # Memory Ordering
232    ///
233    /// - **Success**: Uses `AcqRel` ordering to ensure full synchronization
234    ///   when the exchange succeeds.
235    /// - **Failure**: Uses `Acquire` ordering to observe the actual value
236    ///   written by another thread.
237    ///
238    /// # Parameters
239    ///
240    /// * `current` - The expected current reference.
241    /// * `new` - The new reference to set if current matches.
242    ///
243    /// # Returns
244    ///
245    /// `Ok(())` on success, or `Err(actual)` on failure.
246    ///
247    /// # Note
248    ///
249    /// Comparison uses pointer equality (`Arc::ptr_eq`), not value equality.
250    ///
251    /// # Example
252    ///
253    /// ```rust
254    /// use prism3_rust_concurrent::atomic::AtomicRef;
255    /// use std::sync::Arc;
256    ///
257    /// let atomic = AtomicRef::new(Arc::new(10));
258    /// let current = atomic.load();
259    ///
260    /// assert!(atomic.compare_set(&current, Arc::new(20)).is_ok());
261    /// assert_eq!(*atomic.load(), 20);
262    /// ```
263    #[inline]
264    pub fn compare_set(&self, current: &Arc<T>, new: Arc<T>) -> Result<(), Arc<T>> {
265        let current_ptr = Arc::as_ptr(current) as *mut T;
266        let new_ptr = Arc::into_raw(new) as *mut T;
267
268        match self
269            .inner
270            .compare_exchange(current_ptr, new_ptr, Ordering::AcqRel, Ordering::Acquire)
271        {
272            Ok(_) => Ok(()),
273            Err(actual_ptr) => unsafe {
274                // CAS failed, need to restore the new Arc and return actual
275                let _new_arc = Arc::from_raw(new_ptr);
276                let actual_arc = Arc::from_raw(actual_ptr);
277                let cloned = actual_arc.clone();
278                let _ = Arc::into_raw(actual_arc); // Prevent dropping
279                Err(cloned)
280            },
281        }
282    }
283
284    /// Weak version of compare-and-set.
285    ///
286    /// May spuriously fail even when the comparison succeeds. Should be used
287    /// in a loop.
288    ///
289    /// Uses `AcqRel` ordering on success and `Acquire` ordering on failure.
290    ///
291    /// # Parameters
292    ///
293    /// * `current` - The expected current reference.
294    /// * `new` - The new reference to set if current matches.
295    ///
296    /// # Returns
297    ///
298    /// `Ok(())` on success, or `Err(actual)` on failure.
299    ///
300    /// # Example
301    ///
302    /// ```rust
303    /// use prism3_rust_concurrent::atomic::AtomicRef;
304    /// use std::sync::Arc;
305    ///
306    /// let atomic = AtomicRef::new(Arc::new(10));
307    /// let mut current = atomic.load();
308    /// loop {
309    ///     match atomic.compare_set_weak(&current, Arc::new(20)) {
310    ///         Ok(_) => break,
311    ///         Err(actual) => current = actual,
312    ///     }
313    /// }
314    /// assert_eq!(*atomic.load(), 20);
315    /// ```
316    #[inline]
317    pub fn compare_set_weak(&self, current: &Arc<T>, new: Arc<T>) -> Result<(), Arc<T>> {
318        let current_ptr = Arc::as_ptr(current) as *mut T;
319        let new_ptr = Arc::into_raw(new) as *mut T;
320
321        match self.inner.compare_exchange_weak(
322            current_ptr,
323            new_ptr,
324            Ordering::AcqRel,
325            Ordering::Acquire,
326        ) {
327            Ok(_) => Ok(()),
328            Err(actual_ptr) => unsafe {
329                // CAS failed, need to restore the new Arc and return actual
330                let _new_arc = Arc::from_raw(new_ptr);
331                let actual_arc = Arc::from_raw(actual_ptr);
332                let cloned = actual_arc.clone();
333                let _ = Arc::into_raw(actual_arc); // Prevent dropping
334                Err(cloned)
335            },
336        }
337    }
338
339    /// Compares and exchanges the reference atomically, returning the
340    /// previous reference.
341    ///
342    /// If the current reference equals `current` (by pointer equality), sets
343    /// it to `new` and returns the old reference. Otherwise, returns the
344    /// actual current reference.
345    ///
346    /// Uses `AcqRel` ordering on success and `Acquire` ordering on failure.
347    ///
348    /// # Parameters
349    ///
350    /// * `current` - The expected current reference.
351    /// * `new` - The new reference to set if current matches.
352    ///
353    /// # Returns
354    ///
355    /// The reference before the operation.
356    ///
357    /// # Note
358    ///
359    /// Comparison uses pointer equality (`Arc::ptr_eq`), not value equality.
360    ///
361    /// # Example
362    ///
363    /// ```rust
364    /// use prism3_rust_concurrent::atomic::AtomicRef;
365    /// use std::sync::Arc;
366    ///
367    /// let atomic = AtomicRef::new(Arc::new(10));
368    /// let current = atomic.load();
369    ///
370    /// let prev = atomic.compare_and_exchange(&current, Arc::new(20));
371    /// assert!(Arc::ptr_eq(&prev, &current));
372    /// assert_eq!(*atomic.load(), 20);
373    /// ```
374    #[inline]
375    pub fn compare_and_exchange(&self, current: &Arc<T>, new: Arc<T>) -> Arc<T> {
376        let current_ptr = Arc::as_ptr(current) as *mut T;
377        let new_ptr = Arc::into_raw(new) as *mut T;
378
379        match self
380            .inner
381            .compare_exchange(current_ptr, new_ptr, Ordering::AcqRel, Ordering::Acquire)
382        {
383            Ok(prev_ptr) => unsafe { Arc::from_raw(prev_ptr) },
384            Err(actual_ptr) => unsafe {
385                // CAS failed, need to restore the new Arc and return actual
386                let _ = Arc::from_raw(new_ptr);
387                let actual_arc = Arc::from_raw(actual_ptr);
388                let cloned = actual_arc.clone();
389                let _ = Arc::into_raw(actual_arc); // Prevent dropping
390                cloned
391            },
392        }
393    }
394
395    /// Weak version of compare-and-exchange.
396    ///
397    /// May spuriously fail even when the comparison succeeds. Should be used
398    /// in a loop.
399    ///
400    /// Uses `AcqRel` ordering on success and `Acquire` ordering on failure.
401    ///
402    /// # Parameters
403    ///
404    /// * `current` - The expected current reference.
405    /// * `new` - The new reference to set if current matches.
406    ///
407    /// # Returns
408    ///
409    /// The reference before the operation.
410    ///
411    /// # Example
412    ///
413    /// ```rust
414    /// use prism3_rust_concurrent::atomic::AtomicRef;
415    /// use std::sync::Arc;
416    ///
417    /// let atomic = AtomicRef::new(Arc::new(10));
418    /// let mut current = atomic.load();
419    /// loop {
420    ///     let prev =
421    ///         atomic.compare_and_exchange_weak(&current, Arc::new(20));
422    ///     if Arc::ptr_eq(&prev, &current) {
423    ///         break;
424    ///     }
425    ///     current = prev;
426    /// }
427    /// assert_eq!(*atomic.load(), 20);
428    /// ```
429    #[inline]
430    pub fn compare_and_exchange_weak(&self, current: &Arc<T>, new: Arc<T>) -> Arc<T> {
431        let current_ptr = Arc::as_ptr(current) as *mut T;
432        let new_ptr = Arc::into_raw(new) as *mut T;
433
434        match self.inner.compare_exchange_weak(
435            current_ptr,
436            new_ptr,
437            Ordering::AcqRel,
438            Ordering::Acquire,
439        ) {
440            Ok(prev_ptr) => unsafe { Arc::from_raw(prev_ptr) },
441            Err(actual_ptr) => unsafe {
442                // CAS failed, need to restore the new Arc and return actual
443                let _ = Arc::from_raw(new_ptr);
444                let actual_arc = Arc::from_raw(actual_ptr);
445                let cloned = actual_arc.clone();
446                let _ = Arc::into_raw(actual_arc); // Prevent dropping
447                cloned
448            },
449        }
450    }
451
452    /// Updates the reference using a function, returning the old reference.
453    ///
454    /// # Memory Ordering
455    ///
456    /// Internally uses a CAS loop with `compare_set_weak`, which uses
457    /// `AcqRel` on success and `Acquire` on failure. The loop ensures
458    /// eventual consistency even under high contention.
459    ///
460    /// # Parameters
461    ///
462    /// * `f` - A function that takes the current reference and returns the
463    ///   new reference.
464    ///
465    /// # Returns
466    ///
467    /// The old reference before the update.
468    ///
469    /// # Example
470    ///
471    /// ```rust
472    /// use prism3_rust_concurrent::atomic::AtomicRef;
473    /// use std::sync::Arc;
474    ///
475    /// let atomic = AtomicRef::new(Arc::new(10));
476    /// let old = atomic.fetch_update(|x| Arc::new(*x * 2));
477    /// assert_eq!(*old, 10);
478    /// assert_eq!(*atomic.load(), 20);
479    /// ```
480    #[inline]
481    pub fn fetch_update<F>(&self, f: F) -> Arc<T>
482    where
483        F: Fn(&Arc<T>) -> Arc<T>,
484    {
485        let mut current = self.load();
486        loop {
487            let new = f(&current);
488            match self.compare_set_weak(&current, new) {
489                Ok(_) => return current,
490                Err(actual) => current = actual,
491            }
492        }
493    }
494
495    /// Gets a reference to the underlying standard library atomic type.
496    ///
497    /// This allows direct access to the standard library's atomic operations
498    /// for advanced use cases that require fine-grained control over memory
499    /// ordering.
500    ///
501    /// # Memory Ordering
502    ///
503    /// When using the returned reference, you have full control over memory
504    /// ordering. Choose the appropriate ordering based on your specific
505    /// synchronization requirements.
506    ///
507    /// # Returns
508    ///
509    /// A reference to the underlying `std::sync::atomic::AtomicPtr<T>`.
510    ///
511    /// # Warning
512    ///
513    /// Direct manipulation of the underlying pointer requires careful
514    /// management of Arc reference counts to avoid memory leaks or
515    /// use-after-free bugs.
516    #[inline]
517    pub fn inner(&self) -> &AtomicPtr<T> {
518        &self.inner
519    }
520}
521
522impl<T> Atomic for AtomicRef<T> {
523    type Value = Arc<T>;
524
525    #[inline]
526    fn load(&self) -> Arc<T> {
527        self.load()
528    }
529
530    #[inline]
531    fn store(&self, value: Arc<T>) {
532        self.store(value);
533    }
534
535    #[inline]
536    fn swap(&self, value: Arc<T>) -> Arc<T> {
537        self.swap(value)
538    }
539
540    #[inline]
541    fn compare_set(&self, current: Arc<T>, new: Arc<T>) -> Result<(), Arc<T>> {
542        self.compare_set(&current, new)
543    }
544
545    #[inline]
546    fn compare_set_weak(&self, current: Arc<T>, new: Arc<T>) -> Result<(), Arc<T>> {
547        self.compare_set_weak(&current, new)
548    }
549
550    #[inline]
551    fn compare_exchange(&self, current: Arc<T>, new: Arc<T>) -> Arc<T> {
552        match self.compare_set(&current, new.clone()) {
553            Ok(_) => current,
554            Err(_) => self.load(),
555        }
556    }
557
558    #[inline]
559    fn compare_exchange_weak(&self, current: Arc<T>, new: Arc<T>) -> Arc<T> {
560        match self.compare_set_weak(&current, new.clone()) {
561            Ok(_) => current,
562            Err(_) => self.load(),
563        }
564    }
565
566    #[inline]
567    fn fetch_update<F>(&self, f: F) -> Arc<T>
568    where
569        F: Fn(Arc<T>) -> Arc<T>,
570    {
571        self.fetch_update(|x| f(x.clone()))
572    }
573}
574
575impl<T> Clone for AtomicRef<T> {
576    /// Clones the atomic reference.
577    ///
578    /// Creates a new `AtomicRef` that initially points to the same value as
579    /// the original, but subsequent atomic operations are independent.
580    fn clone(&self) -> Self {
581        Self::new(self.load())
582    }
583}
584
585impl<T> Drop for AtomicRef<T> {
586    fn drop(&mut self) {
587        let ptr = self.inner.load(Ordering::Acquire);
588        unsafe {
589            if !ptr.is_null() {
590                Arc::from_raw(ptr);
591            }
592        }
593    }
594}
595
596unsafe impl<T: Send + Sync> Send for AtomicRef<T> {}
597unsafe impl<T: Send + Sync> Sync for AtomicRef<T> {}
598
599impl<T: fmt::Debug> fmt::Debug for AtomicRef<T> {
600    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
601        f.debug_struct("AtomicRef")
602            .field("value", &self.load())
603            .finish()
604    }
605}
606
607impl<T: fmt::Display> fmt::Display for AtomicRef<T> {
608    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
609        write!(f, "{}", self.load())
610    }
611}