embassy_sync/
lazy_lock.rs

1//! Synchronization primitive for initializing a value once, allowing others to get a reference to the value.
2
3use core::cell::UnsafeCell;
4use core::mem::ManuallyDrop;
5use core::sync::atomic::{AtomicBool, Ordering};
6
7/// The `LazyLock` is a synchronization primitive that allows for
8/// initializing a value once, and allowing others to obtain a
9/// reference to the value. This is useful for lazy initialization of
10/// a static value.
11///
12/// # Example
13/// ```
14/// use futures_executor::block_on;
15/// use embassy_sync::lazy_lock::LazyLock;
16///
17/// // Define a static value that will be lazily initialized
18/// // at runtime at the first access.
19/// static VALUE: LazyLock<u32> = LazyLock::new(|| 20);
20///
21/// let reference = VALUE.get();
22/// assert_eq!(reference, &20);
23/// ```
24#[derive(Debug)]
25pub struct LazyLock<T, F = fn() -> T> {
26    init: AtomicBool,
27    data: UnsafeCell<Data<T, F>>,
28}
29
30union Data<T, F> {
31    value: ManuallyDrop<T>,
32    f: ManuallyDrop<F>,
33}
34
35unsafe impl<T, F> Sync for LazyLock<T, F>
36where
37    T: Sync,
38    F: Sync,
39{
40}
41
42impl<T, F: FnOnce() -> T> LazyLock<T, F> {
43    /// Create a new uninitialized `StaticLock`.
44    pub const fn new(init_fn: F) -> Self {
45        Self {
46            init: AtomicBool::new(false),
47            data: UnsafeCell::new(Data {
48                f: ManuallyDrop::new(init_fn),
49            }),
50        }
51    }
52
53    /// Get a reference to the underlying value, initializing it if it
54    /// has not been done already.
55    #[inline]
56    pub fn get(&self) -> &T {
57        self.ensure_init_fast();
58        unsafe { &(*self.data.get()).value }
59    }
60
61    /// Get a mutable reference to the underlying value, initializing it if it
62    /// has not been done already.
63    #[inline]
64    pub fn get_mut(&mut self) -> &mut T {
65        self.ensure_init_fast();
66        unsafe { &mut (*self.data.get()).value }
67    }
68
69    /// Consume the `LazyLock`, returning the underlying value. The
70    /// initialization function will be called if it has not been
71    /// already.
72    #[inline]
73    pub fn into_inner(self) -> T {
74        self.ensure_init_fast();
75        let this = ManuallyDrop::new(self);
76        let data = unsafe { core::ptr::read(&this.data) }.into_inner();
77
78        ManuallyDrop::into_inner(unsafe { data.value })
79    }
80
81    /// Initialize the `LazyLock` if it has not been initialized yet.
82    /// This function is a fast track to [`Self::ensure_init`]
83    /// which does not require a critical section in most cases when
84    /// the value has been initialized already.
85    /// When this function returns, `self.data` is guaranteed to be
86    /// initialized and visible on the current core.
87    #[inline]
88    fn ensure_init_fast(&self) {
89        if !self.init.load(Ordering::Acquire) {
90            self.ensure_init();
91        }
92    }
93
94    /// Initialize the `LazyLock` if it has not been initialized yet.
95    /// When this function returns, `self.data` is guaranteed to be
96    /// initialized and visible on the current core.
97    fn ensure_init(&self) {
98        critical_section::with(|_| {
99            if !self.init.load(Ordering::Acquire) {
100                let data = unsafe { &mut *self.data.get() };
101                let f = unsafe { ManuallyDrop::take(&mut data.f) };
102                let value = f();
103                data.value = ManuallyDrop::new(value);
104
105                self.init.store(true, Ordering::Release);
106            }
107        });
108    }
109}
110
111impl<T, F> Drop for LazyLock<T, F> {
112    fn drop(&mut self) {
113        if self.init.load(Ordering::Acquire) {
114            unsafe { ManuallyDrop::drop(&mut self.data.get_mut().value) };
115        } else {
116            unsafe { ManuallyDrop::drop(&mut self.data.get_mut().f) };
117        }
118    }
119}
120
121#[cfg(test)]
122mod tests {
123    use core::sync::atomic::{AtomicU32, Ordering};
124
125    use super::*;
126
127    #[test]
128    fn test_lazy_lock() {
129        static VALUE: LazyLock<u32> = LazyLock::new(|| 20);
130        let reference = VALUE.get();
131        assert_eq!(reference, &20);
132    }
133    #[test]
134    fn test_lazy_lock_mutation() {
135        let mut value: LazyLock<u32> = LazyLock::new(|| 20);
136        *value.get_mut() = 21;
137        let reference = value.get();
138        assert_eq!(reference, &21);
139    }
140    #[test]
141    fn test_lazy_lock_into_inner() {
142        let lazy: LazyLock<u32> = LazyLock::new(|| 20);
143        let value = lazy.into_inner();
144        assert_eq!(value, 20);
145    }
146
147    static DROP_CHECKER: AtomicU32 = AtomicU32::new(0);
148    #[derive(Debug)]
149    struct DropCheck;
150
151    impl Drop for DropCheck {
152        fn drop(&mut self) {
153            DROP_CHECKER.fetch_add(1, Ordering::Acquire);
154        }
155    }
156
157    #[test]
158    fn test_lazy_drop() {
159        let lazy: LazyLock<DropCheck> = LazyLock::new(|| DropCheck);
160        assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 0);
161        lazy.get();
162        drop(lazy);
163        assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 1);
164
165        let dropper = DropCheck;
166        let lazy_fn: LazyLock<u32, _> = LazyLock::new(move || {
167            let _a = dropper;
168            20
169        });
170        assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 1);
171        drop(lazy_fn);
172        assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 2);
173    }
174}