lock_free/
stack_optimized.rs

1//! Stack with optimizations for reduced contention.
2
3use std::sync::atomic::{AtomicPtr, Ordering};
4use std::ptr;
5use std::cell::Cell;
6
7#[repr(align(128))] // Prevent false sharing
8struct Node<T> {
9    data: T,
10    next: *mut Node<T>,
11}
12
13/// A lock-free stack with exponential backoff
14pub struct Stack<T> {
15    head: AtomicPtr<Node<T>>,
16}
17
18impl<T> Stack<T> {
19    thread_local! {
20        static BACKOFF: Cell<u32> = Cell::new(1);
21    }
22    
23    #[inline]
24    fn backoff() {
25        Self::BACKOFF.with(|b| {
26            let current = b.get();
27            for _ in 0..current {
28                std::hint::spin_loop();
29            }
30            b.set((current * 2).min(1024));
31        });
32    }
33    
34    #[inline]
35    fn reset_backoff() {
36        Self::BACKOFF.with(|b| b.set(1));
37    }
38    
39    /// Creates a new empty stack
40    pub fn new() -> Self {
41        Stack {
42            head: AtomicPtr::new(ptr::null_mut()),
43        }
44    }
45
46    /// Pushes a value onto the stack
47    pub fn push(&self, data: T) {
48        let new_node = Box::into_raw(Box::new(Node {
49            data,
50            next: ptr::null_mut(),
51        }));
52
53        loop {
54            let head = self.head.load(Ordering::Acquire);
55            unsafe {
56                (*new_node).next = head;
57            }
58            
59            match self.head.compare_exchange_weak(
60                head,
61                new_node,
62                Ordering::Release,
63                Ordering::Acquire,
64            ) {
65                Ok(_) => {
66                    Self::reset_backoff();
67                    return;
68                }
69                Err(_) => Self::backoff(),
70            }
71        }
72    }
73
74    /// Pops a value from the stack
75    pub fn pop(&self) -> Option<T> {
76        loop {
77            let head_ptr = self.head.load(Ordering::Acquire);
78            
79            if head_ptr.is_null() {
80                return None;
81            }
82
83            let head_node = unsafe { &*head_ptr };
84            let next = head_node.next;
85
86            match self.head.compare_exchange_weak(
87                head_ptr,
88                next,
89                Ordering::Release,
90                Ordering::Acquire,
91            ) {
92                Ok(_) => {
93                    let data = unsafe { Box::from_raw(head_ptr).data };
94                    Self::reset_backoff();
95                    return Some(data);
96                }
97                Err(_) => Self::backoff(),
98            }
99        }
100    }
101
102    /// Returns true if the stack is empty
103    pub fn is_empty(&self) -> bool {
104        self.head.load(Ordering::Acquire).is_null()
105    }
106}
107
108impl<T> Default for Stack<T> {
109    fn default() -> Self {
110        Self::new()
111    }
112}
113
114impl<T> Drop for Stack<T> {
115    fn drop(&mut self) {
116        while self.pop().is_some() {}
117    }
118}
119
120unsafe impl<T: Send> Send for Stack<T> {}
121unsafe impl<T: Send> Sync for Stack<T> {}