embassy_sync/
lazy_lock.rs1use core::cell::UnsafeCell;
4use core::mem::ManuallyDrop;
5use core::sync::atomic::{AtomicBool, Ordering};
6
7#[derive(Debug)]
25pub struct LazyLock<T, F = fn() -> T> {
26 init: AtomicBool,
27 data: UnsafeCell<Data<T, F>>,
28}
29
30union Data<T, F> {
31 value: ManuallyDrop<T>,
32 f: ManuallyDrop<F>,
33}
34
35unsafe impl<T, F> Sync for LazyLock<T, F>
36where
37 T: Sync,
38 F: Sync,
39{
40}
41
42impl<T, F: FnOnce() -> T> LazyLock<T, F> {
43 pub const fn new(init_fn: F) -> Self {
45 Self {
46 init: AtomicBool::new(false),
47 data: UnsafeCell::new(Data {
48 f: ManuallyDrop::new(init_fn),
49 }),
50 }
51 }
52
53 #[inline]
56 pub fn get(&self) -> &T {
57 self.ensure_init_fast();
58 unsafe { &(*self.data.get()).value }
59 }
60
61 #[inline]
64 pub fn get_mut(&mut self) -> &mut T {
65 self.ensure_init_fast();
66 unsafe { &mut (*self.data.get()).value }
67 }
68
69 #[inline]
73 pub fn into_inner(self) -> T {
74 self.ensure_init_fast();
75 let this = ManuallyDrop::new(self);
76 let data = unsafe { core::ptr::read(&this.data) }.into_inner();
77
78 ManuallyDrop::into_inner(unsafe { data.value })
79 }
80
81 #[inline]
88 fn ensure_init_fast(&self) {
89 if !self.init.load(Ordering::Acquire) {
90 self.ensure_init();
91 }
92 }
93
94 fn ensure_init(&self) {
98 critical_section::with(|_| {
99 if !self.init.load(Ordering::Acquire) {
100 let data = unsafe { &mut *self.data.get() };
101 let f = unsafe { ManuallyDrop::take(&mut data.f) };
102 let value = f();
103 data.value = ManuallyDrop::new(value);
104
105 self.init.store(true, Ordering::Release);
106 }
107 });
108 }
109}
110
111impl<T, F> Drop for LazyLock<T, F> {
112 fn drop(&mut self) {
113 if self.init.load(Ordering::Acquire) {
114 unsafe { ManuallyDrop::drop(&mut self.data.get_mut().value) };
115 } else {
116 unsafe { ManuallyDrop::drop(&mut self.data.get_mut().f) };
117 }
118 }
119}
120
121#[cfg(test)]
122mod tests {
123 use core::sync::atomic::{AtomicU32, Ordering};
124
125 use super::*;
126
127 #[test]
128 fn test_lazy_lock() {
129 static VALUE: LazyLock<u32> = LazyLock::new(|| 20);
130 let reference = VALUE.get();
131 assert_eq!(reference, &20);
132 }
133 #[test]
134 fn test_lazy_lock_mutation() {
135 let mut value: LazyLock<u32> = LazyLock::new(|| 20);
136 *value.get_mut() = 21;
137 let reference = value.get();
138 assert_eq!(reference, &21);
139 }
140 #[test]
141 fn test_lazy_lock_into_inner() {
142 let lazy: LazyLock<u32> = LazyLock::new(|| 20);
143 let value = lazy.into_inner();
144 assert_eq!(value, 20);
145 }
146
147 static DROP_CHECKER: AtomicU32 = AtomicU32::new(0);
148 #[derive(Debug)]
149 struct DropCheck;
150
151 impl Drop for DropCheck {
152 fn drop(&mut self) {
153 DROP_CHECKER.fetch_add(1, Ordering::Acquire);
154 }
155 }
156
157 #[test]
158 fn test_lazy_drop() {
159 let lazy: LazyLock<DropCheck> = LazyLock::new(|| DropCheck);
160 assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 0);
161 lazy.get();
162 drop(lazy);
163 assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 1);
164
165 let dropper = DropCheck;
166 let lazy_fn: LazyLock<u32, _> = LazyLock::new(move || {
167 let _a = dropper;
168 20
169 });
170 assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 1);
171 drop(lazy_fn);
172 assert_eq!(DROP_CHECKER.load(Ordering::Acquire), 2);
173 }
174}