use std::sync::atomic::{AtomicU64, AtomicUsize, Ordering};
use std::sync::Mutex;
const PTR_MASK: u64 = (1u64 << 48) - 1;
const VER_SHIFT: u32 = 48;
#[inline]
fn pack(version: u16, ptr: *mut u8) -> u64 {
((version as u64) << VER_SHIFT) | (ptr as u64 & PTR_MASK)
}
#[inline]
fn unpack_ptr<T>(raw: u64) -> *mut Node<T> {
let addr = raw & PTR_MASK;
addr as *mut Node<T>
}
#[inline]
fn unpack_version(raw: u64) -> u16 {
(raw >> VER_SHIFT) as u16
}
struct Node<T> {
value: T,
next: u64,
}
pub struct LockFreeStack<T> {
_phantom: std::marker::PhantomData<T>,
head: AtomicU64,
len: AtomicUsize,
retired: Mutex<Vec<*mut u8>>,
}
unsafe impl<T: Send> Send for LockFreeStack<T> {}
unsafe impl<T: Send> Sync for LockFreeStack<T> {}
impl<T> LockFreeStack<T> {
pub fn new() -> Self {
LockFreeStack {
_phantom: std::marker::PhantomData,
head: AtomicU64::new(0),
len: AtomicUsize::new(0),
retired: Mutex::new(Vec::new()),
}
}
pub fn push(&self, val: T) {
let node = Box::into_raw(Box::new(Node {
value: val,
next: 0,
}));
loop {
let old_head = self.head.load(Ordering::Acquire);
unsafe { (*node).next = old_head };
let old_version = unpack_version(old_head);
let new_version = old_version.wrapping_add(1);
let new_head = pack(new_version, node as *mut u8);
match self.head.compare_exchange_weak(
old_head,
new_head,
Ordering::Release,
Ordering::Relaxed,
) {
Ok(_) => {
self.len.fetch_add(1, Ordering::Relaxed);
return;
}
Err(_) => {
}
}
}
}
pub fn pop(&self) -> Option<T> {
loop {
let old_head = self.head.load(Ordering::Acquire);
if old_head & PTR_MASK == 0 {
return None; }
let node_ptr = unpack_ptr::<T>(old_head);
let next = unsafe { (*node_ptr).next };
match self.head.compare_exchange_weak(
old_head,
next,
Ordering::Release,
Ordering::Relaxed,
) {
Ok(_) => {
self.len.fetch_sub(1, Ordering::Relaxed);
let value = unsafe { std::ptr::read(&(*node_ptr).value) };
if let Ok(mut retired) = self.retired.lock() {
retired.push(node_ptr as *mut u8);
}
return Some(value);
}
Err(_) => {
}
}
}
}
pub fn is_empty(&self) -> bool {
self.head.load(Ordering::Relaxed) & PTR_MASK == 0
}
pub fn len(&self) -> usize {
self.len.load(Ordering::Relaxed)
}
}
impl<T> Default for LockFreeStack<T> {
fn default() -> Self {
Self::new()
}
}
impl<T> Drop for LockFreeStack<T> {
fn drop(&mut self) {
while self.pop().is_some() {}
if let Ok(retired) = self.retired.lock() {
let layout = std::alloc::Layout::new::<Node<T>>();
for ptr in retired.iter() {
unsafe {
std::alloc::dealloc(*ptr, layout);
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::Arc;
use std::thread;
#[test]
fn test_basic_push_pop() {
let s: LockFreeStack<i32> = LockFreeStack::new();
assert!(s.is_empty());
s.push(1);
s.push(2);
s.push(3);
assert_eq!(s.len(), 3);
assert_eq!(s.pop(), Some(3));
assert_eq!(s.pop(), Some(2));
assert_eq!(s.pop(), Some(1));
assert_eq!(s.pop(), None);
assert!(s.is_empty());
}
#[test]
fn test_lifo_ordering() {
let s: LockFreeStack<usize> = LockFreeStack::new();
for i in 0..100 {
s.push(i);
}
for i in (0..100).rev() {
assert_eq!(s.pop(), Some(i));
}
}
#[test]
fn test_concurrent_push_pop() {
const THREADS: usize = 8;
const OPS: usize = 10_000;
let stack = Arc::new(LockFreeStack::<usize>::new());
let counter = Arc::new(std::sync::atomic::AtomicUsize::new(0));
let handles: Vec<_> = (0..THREADS)
.map(|_| {
let s = Arc::clone(&stack);
let c = Arc::clone(&counter);
thread::spawn(move || {
for _ in 0..OPS {
s.push(1);
}
while let Some(v) = s.pop() {
c.fetch_add(v, std::sync::atomic::Ordering::Relaxed);
thread::yield_now();
}
})
})
.collect();
for h in handles {
h.join().expect("thread panicked");
}
while stack.pop().is_some() {}
assert!(stack.is_empty());
}
#[test]
fn test_drop_runs_destructors() {
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
let drops = Arc::new(AtomicUsize::new(0));
struct Bomb(Arc<AtomicUsize>);
impl Drop for Bomb {
fn drop(&mut self) {
self.0.fetch_add(1, Ordering::Relaxed);
}
}
{
let s: LockFreeStack<Bomb> = LockFreeStack::new();
for _ in 0..5 {
s.push(Bomb(Arc::clone(&drops)));
}
}
assert_eq!(drops.load(Ordering::Relaxed), 5);
}
#[test]
fn test_default_is_empty() {
let s: LockFreeStack<String> = LockFreeStack::default();
assert!(s.is_empty());
assert_eq!(s.len(), 0);
}
#[test]
fn test_single_element_roundtrip() {
let s: LockFreeStack<String> = LockFreeStack::new();
s.push("hello".to_string());
assert_eq!(s.len(), 1);
assert_eq!(s.pop(), Some("hello".to_string()));
assert_eq!(s.pop(), None);
}
}