1#![cfg_attr(not(test), no_std)]
2
3use core::sync::atomic::AtomicUsize;
4
5const STORE_POISON_BIT: usize = 1;
6const COND_STORE_POISON_BIT: usize = 2;
7const POISON_BITS: usize = STORE_POISON_BIT | COND_STORE_POISON_BIT;
8
9pub struct LlscUsize {
10 value: AtomicUsize,
11 counter: AtomicUsize,
12}
13impl LlscUsize {
14 pub fn new(value: usize) -> Self {
15 Self {
16 value: AtomicUsize::new(value),
17 counter: AtomicUsize::new(0),
18 }
19 }
20 pub fn store(&self, new_value: usize) {
21 assert_eq!(new_value & POISON_BITS, 0);
22
23 self.counter
28 .fetch_add(1, core::sync::atomic::Ordering::SeqCst);
29
30 self.value.store(
35 new_value | STORE_POISON_BIT,
36 core::sync::atomic::Ordering::SeqCst,
37 );
38
39 self.counter
44 .fetch_add(1, core::sync::atomic::Ordering::SeqCst);
45
46 let _ = self.value.compare_exchange(
51 new_value | STORE_POISON_BIT,
52 new_value,
53 core::sync::atomic::Ordering::SeqCst,
54 core::sync::atomic::Ordering::SeqCst,
55 );
56
57 }
61 pub fn load(&self) -> usize {
62 loop {
63 let value = self.value.load(core::sync::atomic::Ordering::SeqCst);
64 if value & COND_STORE_POISON_BIT == 0 {
65 return value & (!STORE_POISON_BIT);
66 }
67 }
68 }
69 pub fn load_link(&self) -> LoadLink {
70 loop {
71 let result = LoadLink {
72 counter: self.counter.load(core::sync::atomic::Ordering::SeqCst),
73 raw_value: self.value.load(core::sync::atomic::Ordering::SeqCst),
74 };
75 if result.raw_value & COND_STORE_POISON_BIT == 0 {
76 return result;
77 }
78 }
79 }
80 pub fn store_conditional(&self, link: LoadLink, new_value: usize) -> bool {
82 if self
84 .value
85 .compare_exchange(
86 link.raw_value,
87 new_value | COND_STORE_POISON_BIT,
88 core::sync::atomic::Ordering::SeqCst,
89 core::sync::atomic::Ordering::SeqCst,
90 )
91 .is_err()
92 {
93 return false;
94 }
95 if self
97 .counter
98 .compare_exchange(
99 link.counter,
100 link.counter + 1,
101 core::sync::atomic::Ordering::SeqCst,
102 core::sync::atomic::Ordering::SeqCst,
103 )
104 .is_err()
105 {
106 let _ = self.value.compare_exchange(
108 new_value | COND_STORE_POISON_BIT,
109 link.raw_value,
110 core::sync::atomic::Ordering::SeqCst,
111 core::sync::atomic::Ordering::SeqCst,
112 );
113 return false;
114 }
115 let _ = self.value.compare_exchange(
117 new_value | COND_STORE_POISON_BIT,
118 new_value,
119 core::sync::atomic::Ordering::SeqCst,
120 core::sync::atomic::Ordering::SeqCst,
121 );
122 true
123 }
124}
125
126pub struct LoadLink {
127 raw_value: usize,
128 counter: usize,
129}
130impl LoadLink {
131 pub fn value(&self) -> usize {
132 self.raw_value & (!STORE_POISON_BIT)
133 }
134}
135
136#[cfg(test)]
137mod tests {
138 use core::{sync::atomic::AtomicBool, time::Duration};
139 use std::{sync::Arc, thread::JoinHandle};
140
141 use super::*;
142
143 #[test]
144 fn store_load() {
145 let llsc = LlscUsize::new(0);
146 for i in 0..3 {
147 llsc.store(i * 4);
148 assert_eq!(llsc.load(), i * 4);
149 }
150 }
151
152 #[test]
153 fn llsc_no_interruption() {
154 let llsc = LlscUsize::new(0);
155 for i in 0..3 {
156 let link = llsc.load_link();
157 assert_eq!(link.value(), i * 4);
158 assert!(llsc.store_conditional(link, (i + 1) * 4));
159 }
160 }
161
162 #[test]
163 fn llsc_sync_store_same_value_interruption() {
164 let llsc = LlscUsize::new(0);
165 for i in 0..3 {
166 let link = llsc.load_link();
167 assert_eq!(link.value(), i * 4);
168 llsc.store(i * 4);
169 assert!(!llsc.store_conditional(link, (i + 1) * 4));
170 llsc.store((i + 1) * 4);
171 }
172 }
173
174 #[test]
175 fn llsc_sync_store_different_value_interruption() {
176 let llsc = LlscUsize::new(0);
177 for i in 0..3 {
178 let link = llsc.load_link();
179 assert_eq!(link.value(), i * 4);
180 llsc.store((i + 1) * 4);
181 assert!(!llsc.store_conditional(link, (i + 1) * 4));
182 }
183 }
184
185 #[test]
186 fn llsc_sync_llsc_same_value_interruption() {
187 let llsc = LlscUsize::new(0);
188 for i in 0..3 {
189 let link = llsc.load_link();
190 assert_eq!(link.value(), i * 4);
191 let sub_link = llsc.load_link();
192 assert!(llsc.store_conditional(sub_link, i * 4));
193 assert!(!llsc.store_conditional(link, (i + 1) * 4));
194 llsc.store((i + 1) * 4);
195 }
196 }
197
198 #[test]
199 fn llsc_sync_llsc_different_value_interruption() {
200 let llsc = LlscUsize::new(0);
201 for i in 0..3 {
202 let link = llsc.load_link();
203 assert_eq!(link.value(), i * 4);
204 let sub_link = llsc.load_link();
205 assert!(llsc.store_conditional(sub_link, (i + 1) * 4));
206 assert!(!llsc.store_conditional(link, (i + 1) * 4));
207 }
208 }
209
210 #[test]
211 fn llsc_alternating_same_value() {
212 let llsc = LlscUsize::new(0);
213 for i in 0..3 {
214 let link_a = llsc.load_link();
215 assert_eq!(link_a.value(), i * 4);
216 let link_b = llsc.load_link();
217 assert_eq!(link_b.value(), i * 4);
218 assert!(llsc.store_conditional(link_a, i * 4));
219 assert!(!llsc.store_conditional(link_b, i * 4));
220 llsc.store((i + 1) * 4);
221 }
222 }
223
224 #[test]
225 fn llsc_alternating_different_value() {
226 let llsc = LlscUsize::new(0);
227 for i in 0..3 {
228 let link_a = llsc.load_link();
229 assert_eq!(link_a.value(), i * 4);
230 let link_b = llsc.load_link();
231 assert_eq!(link_b.value(), i * 4);
232 assert!(llsc.store_conditional(link_a, (i + 1) * 4));
233 assert!(!llsc.store_conditional(link_b, i * 4));
234 }
235 }
236
237 #[test]
238 fn llsc_failed_store_not_visible_sync_same_value() {
239 let llsc = LlscUsize::new(0);
240 for i in 0..3 {
241 let link = llsc.load_link();
242 assert_eq!(link.value(), i * 4);
243 llsc.store(i * 4);
244 assert!(!llsc.store_conditional(link, (i + 1) * 4));
245 assert_eq!(llsc.load(), i * 4);
246 llsc.store((i + 1) * 4);
247 }
248 }
249
250 #[test]
251 fn llsc_failed_store_not_visible_sync_different_value() {
252 let llsc = LlscUsize::new(0);
253 for i in 0..3 {
254 let link = llsc.load_link();
255 assert_eq!(link.value(), i * 4);
256 llsc.store((i + 1) * 4);
257 assert!(!llsc.store_conditional(link, i * 4));
258 assert_eq!(llsc.load(), (i + 1) * 4);
259 }
260 }
261
262 struct AsyncLlscTester {
263 llsc: Arc<LlscUsize>,
264 stop_running: Arc<AtomicBool>,
265 threads: Vec<JoinHandle<()>>,
266 }
267 impl AsyncLlscTester {
268 fn new(value: usize) -> Self {
269 Self {
270 llsc: Arc::new(LlscUsize::new(value)),
271 stop_running: Arc::new(AtomicBool::new(false)),
272 threads: Vec::new(),
273 }
274 }
275 fn spawn_loop<F: FnMut(&LlscUsize) + Send + 'static>(&mut self, mut f: F) {
276 let llsc = self.llsc.clone();
277 let stop_running = self.stop_running.clone();
278 let thread = std::thread::spawn(move || {
279 while !stop_running.load(core::sync::atomic::Ordering::Relaxed) {
280 f(&llsc)
281 }
282 });
283 self.threads.push(thread);
284 }
285 fn run_for_duration(&mut self, duration: Duration) {
286 std::thread::sleep(duration);
287 self.stop_running
288 .store(true, core::sync::atomic::Ordering::Relaxed);
289 for t in self.threads.drain(..) {
290 t.join().unwrap()
291 }
292 }
293 fn run_for_default_duration(&mut self) {
294 self.run_for_duration(Duration::from_secs(3));
295 }
296 }
297
298 #[test]
299 fn llsc_failed_store_not_visible_async() {
300 let mut llsc_tester = AsyncLlscTester::new(0);
301 llsc_tester.spawn_loop(|llsc| llsc.store(4));
302 llsc_tester.spawn_loop(|llsc| {
303 let link = llsc.load_link();
304 std::thread::sleep(Duration::from_millis(100));
305 assert!(!llsc.store_conditional(link, 8));
306 });
307 llsc_tester.spawn_loop(|llsc| assert_ne!(llsc.load(), 8));
308 llsc_tester.spawn_loop(|llsc| {
309 let link = llsc.load_link();
310 assert_ne!(link.value(), 8)
311 });
312 llsc_tester.run_for_default_duration()
313 }
314}