1use std::ptr::null_mut;
2use std::sync::atomic::{Ordering::*, *};
3pub struct BuggyStack<T> {
6 head: AtomicPtr<BuggyNode<T>>,
7 _boo: core::marker::PhantomData<T>,
8}
9
10struct BuggyNode<T> {
11 data: T,
12 next: AtomicPtr<BuggyNode<T>>,
13}
14impl<T> Drop for BuggyStack<T> {
15 fn drop(&mut self) {
16 while self.pop().is_some() {}
17 }
18}
19
20impl<T> BuggyStack<T> {
21 pub const fn new() -> Self {
22 Self {
23 head: AtomicPtr::new(null_mut()),
24 _boo: core::marker::PhantomData,
25 }
26 }
27}
28impl<T> BuggyStack<T> {
29 pub fn push(&self, data: T) {
30 let n = Box::into_raw(Box::new(BuggyNode {
31 next: AtomicPtr::new(null_mut()),
32 data,
33 }));
34 let mut next = self.head.load(Relaxed);
35 loop {
36 unsafe {
37 (*n).next.store(next, Relaxed);
38 }
39 match self.head.compare_exchange_weak(next, n, Release, Relaxed) {
40 Ok(_) => break,
41 Err(new) => next = new,
42 }
43 }
44 }
45 pub fn pop(&self) -> Option<T> {
46 let mut n = self.head.load(Acquire);
47 loop {
48 if n.is_null() {
49 return None;
50 }
51 let next = unsafe { (*n).next.load(Relaxed) };
52 match self.head.compare_exchange_weak(n, next, Acquire, Acquire) {
53 Ok(_) => break,
54 Err(h) => n = h,
55 }
56 }
57 debug_assert!(!n.is_null());
58 let n = unsafe { Box::from_raw(n) };
59 Some(n.data)
60 }
61}
62unsafe impl<T> Send for BuggyStack<T> where T: Send + 'static {}
64unsafe impl<T> Sync for BuggyStack<T> where T: Send + 'static {}
65
66fn main() {
67 cobb::run_test(cobb::TestCfg::<BuggyStack<usize>> {
68 threads: if cfg!(miri) { 8 } else { 16 },
69 iterations: if cfg!(miri) { 100 } else { 1000 },
70 sub_iterations: if cfg!(miri) { 10 } else { 20 },
71 setup: || BuggyStack::new(),
72 test: |stk, tctx| {
73 stk.push(tctx.thread_index());
74 let _ = stk.pop();
75 },
76 ..Default::default()
77 });
78}