Skip to main content

xiaoyong_value/sync/
atomic_once.rs

1//! Lightweight, lock-free alternative to `std::sync::OnceLock`.
2
3use std::{
4    ptr,
5    sync::atomic::{
6        AtomicPtr,
7        Ordering,
8    },
9};
10
11/// Lock-free, single-assignment cell.
12///
13/// Allows multiple threads to race to initialize a value. The
14/// winning thread stores its value, while losing threads silently discard their
15/// work.
16pub struct AtomicOnce<T> {
17    ptr: AtomicPtr<T>,
18}
19
20// SAFETY: `AtomicOnce` is safe to share across threads if the underlying data
21// `T` is safe to share across threads.
22unsafe impl<T: Sync + Send> Sync for AtomicOnce<T> {}
23// SAFETY: `AtomicOnce` is safe to send across threads if `T` is safe to send.
24unsafe impl<T: Send> Send for AtomicOnce<T> {}
25
26impl<T> Drop for AtomicOnce<T> {
27    fn drop(&mut self) {
28        let p = *self.ptr.get_mut();
29        if !p.is_null() {
30            // SAFETY: Reassert ownership of the heap allocation to drop it.
31            // This is only called once when the AtomicOnce itself goes out of scope.
32            unsafe { drop(Box::from_raw(p)) };
33        }
34    }
35}
36
37impl<T> AtomicOnce<T> {
38    /// Create a new instance.
39    pub const fn new() -> Self {
40        Self {
41            ptr: AtomicPtr::new(ptr::null_mut()),
42        }
43    }
44
45    /// Creates a new instance already initialized with the given value.
46    pub fn new_initialized(val: Box<T>) -> Self {
47        let ptr = Box::into_raw(val);
48        Self {
49            ptr: AtomicPtr::new(ptr),
50        }
51    }
52
53    /// Returns a reference to the initialized value, or `None` if
54    /// uninitialized.
55    pub fn get(&self) -> Option<&T> {
56        let p = self.ptr.load(Ordering::Acquire);
57        if p.is_null() {
58            None
59        } else {
60            // SAFETY: The pointer is either null or a valid pointer resulting
61            // from Box::into_raw. It is never mutated after initialization,
62            // and lives until the AtomicOnce is dropped.
63            Some(unsafe { &*p })
64        }
65    }
66
67    pub unsafe fn get_unchecked(&self) -> &T {
68        unsafe {
69            &**self.ptr.as_ptr()
70        }
71    }
72
73    /// Attempts to initialize the cell with the provided value.
74    ///
75    /// If the cell was already initialized or we lost the CAS race, returns
76    /// the reference to the initialized value and the owned value `val`.
77    pub fn init(&self, val: Box<T>) -> Result<(), (&T, Box<T>)> {
78        if let Some(existing) = self.get() {
79            return Err((existing, val));
80        }
81
82        let val_ptr = Box::into_raw(val);
83
84        match self
85            .ptr
86            .compare_exchange(ptr::null_mut(), val_ptr, Ordering::Release, Ordering::Acquire)
87        {
88            | Ok(_) => Ok(()),
89            | Err(existing_ptr) => {
90                // SAFETY: We just created this raw pointer from a Box. Since we lost
91                // the CAS race, we still have exclusive ownership over this specific
92                // allocation.
93                let this_candidate = unsafe { Box::from_raw(val_ptr) };
94
95                // SAFETY: `existing_ptr` was successfully written by the winning thread.
96                let existing = unsafe { &*existing_ptr };
97
98                Err((existing, this_candidate))
99            },
100        }
101    }
102
103    /// Returns a reference to the value, initializing it with `f` if necessary.
104    /// If the cell was already initialized or we lost the CAS race, returns
105    /// the reference to the initialized value and the owned value that was
106    /// computed by the `f`.
107    ///
108    /// Note that `f` may be executed multiple times concurrently if multiple
109    /// threads attempt initialization simultaneously. Only one result will
110    /// be retained.
111    pub fn get_or_init<F>(&self, f: F) -> Result<&T, (&T, Box<T>)>
112    where
113        F: FnOnce() -> Box<T>,
114    {
115        if let Some(val) = self.get() {
116            return Ok(val);
117        }
118
119        let val_ptr = Box::into_raw(f());
120
121        match self
122            .ptr
123            .compare_exchange(ptr::null_mut(), val_ptr, Ordering::Release, Ordering::Acquire)
124        {
125            | Ok(_) => {
126                // SAFETY: We won the race and successfully stored the pointer.
127                Ok(unsafe { &*val_ptr })
128            },
129            | Err(existing_ptr) => {
130                // SAFETY: We just created this raw pointer from a Box. Since we lost
131                // the CAS race, we still have exclusive ownership over this specific
132                // allocation.
133                let this_candidate = unsafe { Box::from_raw(val_ptr) };
134
135                // SAFETY: `existing_ptr` was successfully written by the winning thread.
136                let existing = unsafe { &*existing_ptr };
137
138                Err((existing, this_candidate))
139            },
140        }
141    }
142
143    /// Consumes the `AtomicOnce`, returning the inner value if initialized.
144    pub fn into_inner(mut self) -> Option<T> {
145        // Bypass atomics since we have an exclusive mutable reference.
146        let p = *self.ptr.get_mut();
147        if p.is_null() {
148            None
149        } else {
150            // Set the pointer to null so `Drop` doesn't double-free.
151            *self.ptr.get_mut() = ptr::null_mut();
152
153            // SAFETY: The pointer was valid, and we now take ownership.
154            Some(*unsafe { Box::from_raw(p) })
155        }
156    }
157}