integer_atomics/
atomic.rs

1use std::mem;
2use std::panic::RefUnwindSafe;
3use std::ops::{Add, BitAnd, BitOr, BitXor, Sub};
4use std::num::Wrapping;
5use std::cell::UnsafeCell;
6use std::sync::atomic::{AtomicUsize, Ordering};
7
8pub trait IntCast : Copy + Eq + Add<Output=Self> + BitAnd<Output=Self>
9    + BitOr<Output=Self> + BitXor<Output=Self> + Sub<Output=Self> {
10    type Public: PartialEq + Copy;
11
12    fn from(u: usize) -> Self;
13    fn to(self) -> usize;
14
15    fn new(p: Self::Public) -> Self;
16    fn unwrap(self) -> Self::Public;
17}
18
19macro_rules! intcast {
20    ($($type:ident)+) => {
21        $(
22            impl IntCast for Wrapping<$type> {
23                type Public = $type;
24
25                fn from(u: usize) -> Self {
26                    Wrapping(u as $type)
27                }
28                fn to(self) -> usize {
29                    self.0 as usize
30                }
31
32                fn new(p: $type) -> Self {
33                    Wrapping(p)
34                }
35
36                fn unwrap(self) -> $type {
37                    self.0
38                }
39            }
40        )+
41    }
42}
43intcast! { u8 i8 u16 i16 u32 i32 u64 i64 }
44
45pub struct Template<T> {
46    v: UnsafeCell<T>,
47}
48
49impl<T: Default + IntCast> Default for Template<T> {
50    fn default() -> Self {
51        Self::new(T::default().unwrap())
52    }
53}
54
55// TODO: impl Debug
56
57unsafe impl<T> Sync for Template<T> {}
58impl<T> RefUnwindSafe for Template<T> {}
59
60fn inject<T>(a: usize, b: usize, offset: usize) -> usize {
61    let mask = ((1 << (mem::size_of::<T>() * 8)) - 1) << offset;
62    (a & !mask) | (b << offset)
63}
64
65// straight from libcore's atomic.rs
66#[inline]
67fn strongest_failure_ordering(order: Ordering) -> Ordering {
68    use self::Ordering::*;
69    match order {
70        Release => Relaxed,
71        Relaxed => Relaxed,
72        SeqCst => SeqCst,
73        Acquire => Acquire,
74        AcqRel => Acquire,
75        _ => unreachable!(),
76    }
77}
78
79impl<T: IntCast> Template<T> {
80    #[inline]
81    fn proxy(&self) -> (&AtomicUsize, usize) {
82        let ptr = self.v.get() as usize;
83        let aligned = ptr & !(mem::size_of::<usize>() - 1);
84        (unsafe { &*(aligned as *const AtomicUsize) }, (ptr - aligned) * 8)
85    }
86
87    // TODO: make this const if const is stable first
88    #[inline]
89    pub /*const*/ fn new(v: T::Public) -> Self {
90        Template { v: UnsafeCell::new(T::new(v)) }
91    }
92
93    #[inline]
94    pub fn get_mut(&mut self) -> &mut T::Public {
95        unsafe { &mut *(self.v.get() as *mut T::Public) }
96    }
97
98    #[inline]
99    pub fn into_inner(self) -> T::Public {
100        unsafe { self.v.into_inner() }.unwrap()
101    }
102
103    #[inline]
104    pub fn load(&self, order: Ordering) -> T::Public {
105        let (p, o) = self.proxy();
106        T::from(p.load(order) >> o).unwrap()
107    }
108
109    #[inline]
110    fn op<F: Fn(T) -> Option<T>>(&self, f: F, order: Ordering) -> T::Public {
111        self.op_new(f, order, strongest_failure_ordering(order))
112    }
113
114    #[inline]
115    fn op_new<F: Fn(T) -> Option<T>>(&self, f: F, success: Ordering, failure: Ordering) -> T::Public {
116        let (p, o) = self.proxy();
117        let mut old = p.load(Ordering::Relaxed);
118        loop {
119            let old_t = T::from(old >> o);
120            let new_t = match f(old_t) {
121                Some(x) => x,
122                None => return old_t.unwrap(),
123            };
124
125            match Self::op_weak(p, o, old, new_t, success, failure) {
126                Ok(()) => return T::from(old >> o).unwrap(),
127                Err(prev) => old = prev,
128            };
129        }
130    }
131
132    #[inline]
133    fn op_weak(p: &AtomicUsize, o: usize, old: usize, new_t: T, success: Ordering, failure: Ordering) -> Result<(), usize> {
134        let new = inject::<T>(old, new_t.to(), o);
135        p.compare_exchange_weak(old, new, success, failure).map(|_| ())
136    }
137    
138    #[inline]
139    pub fn store(&self, val: T::Public, order: Ordering) {
140        self.op(|_| Some(T::new(val)), order);
141    }
142
143    #[inline]
144    pub fn swap(&self, val: T::Public, order: Ordering) -> T::Public {
145        self.op(|_| Some(T::new(val)), order)
146    }
147
148    #[inline]
149    pub fn compare_and_swap(&self, current: T::Public, new: T::Public, order: Ordering) -> T::Public {
150        self.op(|x| if x == T::new(current) { Some(T::new(new)) } else { None }, order)
151    }
152
153    #[inline]
154    pub fn compare_exchange(&self, current: T::Public, new: T::Public, success: Ordering, failure: Ordering) -> Result<T::Public, T::Public> {
155        match self.op_new(|x| if x == T::new(current) { Some(T::new(new)) } else { None }, success, failure) {
156            x if x == current => Ok(x),
157            x => Err(x),
158        }
159    }
160
161    #[inline]
162    pub fn compare_exchange_weak(&self, current: T::Public, new: T::Public, success: Ordering, failure: Ordering) -> Result<T::Public, T::Public> {
163        let (p, o) = self.proxy();
164        let old = p.load(Ordering::Relaxed);
165        let old_t = T::from(old >> o).unwrap();
166        if old_t != current {
167            return Err(old_t);
168        }
169
170        Self::op_weak(p, o, old, T::new(new), success, failure).map(|()| current).map_err(|x| T::from(x >> o).unwrap())
171    }
172
173    #[inline]
174    pub fn fetch_add(&self, val: T::Public, order: Ordering) -> T::Public {
175        self.op(|x| Some(x + T::new(val)), order)
176    }
177
178    #[inline]
179    pub fn fetch_sub(&self, val: T::Public, order: Ordering) -> T::Public {
180        self.op(|x| Some(x - T::new(val)), order)
181    }
182
183    #[inline]
184    pub fn fetch_and(&self, val: T::Public, order: Ordering) -> T::Public {
185        self.op(|x| Some(x & T::new(val)), order)
186    }
187
188    #[inline]
189    pub fn fetch_or(&self, val: T::Public, order: Ordering) -> T::Public {
190        self.op(|x| Some(x | T::new(val)), order)
191    }
192
193    #[inline]
194    pub fn fetch_xor(&self, val: T::Public, order: Ordering) -> T::Public {
195        self.op(|x| Some(x ^ T::new(val)), order)
196    }
197}
198
199pub type AtomicI8 = Template<Wrapping<i8>>;
200pub type AtomicU8 = Template<Wrapping<u8>>;
201pub type AtomicI16 = Template<Wrapping<i16>>;
202pub type AtomicU16 = Template<Wrapping<u16>>;
203pub type AtomicI32 = Template<Wrapping<i32>>;
204pub type AtomicU32 = Template<Wrapping<u32>>;
205pub type AtomicI64 = Template<Wrapping<i64>>;
206pub type AtomicU64 = Template<Wrapping<u64>>;
207
208
209#[cfg(test)]
210mod tests {
211    use super::*;
212    use std::sync::atomic::Ordering;
213    use std::u16;
214
215    #[test]
216    fn basics() {
217        let v = AtomicU16::new(1337);
218        let o = Ordering::Relaxed;
219        assert_eq!(v.swap(42, o), 1337);
220        assert_eq!(v.fetch_add(1, o), 42);
221        assert_eq!(v.fetch_sub(1, o), 43);
222        assert_eq!(v.fetch_and(0x20, o), 42);
223        assert_eq!(v.fetch_or(0x0a, o), 0x20);
224        assert_eq!(v.fetch_xor(42, o), 42);
225        assert_eq!(v.fetch_sub(1, o), 0);
226        assert_eq!(v.fetch_add(1, o), u16::MAX);
227        assert_eq!(v.compare_and_swap(1, 2, o), 0);
228        assert_eq!(v.compare_and_swap(0, 3, o), 0);
229        assert_eq!(v.load(o), 3);
230    }
231}