1#![warn(missing_copy_implementations, missing_debug_implementations, missing_docs)]
20
21use std::fmt;
22use std::ops;
23use std::ptr;
24use std::cell::{RefCell};
25use std::sync::atomic::{AtomicPtr};
26use std::sync::atomic::Ordering::*;
27
28pub trait Memory {
36 fn allocate<T>(&self, value: T) -> *mut T;
38 unsafe fn deallocate<T>(&self, pointer: *mut T);
40}
41
42#[cfg(target_pointer_width="32")]
49const POINTERS: usize = 32;
50#[cfg(target_pointer_width="64")]
51const POINTERS: usize = 16;
52
53#[repr(C)]
55pub struct AlignVec<T> {
56 vec: Vec<T>,
57 _padding: [usize; POINTERS - 3],
58}
59
60impl<T> AlignVec<T> {
61 pub fn new(vec: Vec<T>) -> Self {
65 AlignVec { vec: vec, _padding: [0; POINTERS - 3] }
66 }
67}
68
69impl<T> fmt::Debug for AlignVec<T> where T: fmt::Debug {
70 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
71 write!(formatter, "{:?}", &self.vec)
72 }
73}
74
75impl<T> ops::Deref for AlignVec<T> {
76 type Target = Vec<T>;
77
78 fn deref(&self) -> &Self::Target {
79 &self.vec
80 }
81}
82
83impl<T> ops::DerefMut for AlignVec<T> {
84 fn deref_mut(&mut self) -> &mut Self::Target {
85 &mut self.vec
86 }
87}
88
89#[derive(Copy, Clone, Debug)]
93pub struct BoxMemory;
94
95impl Memory for BoxMemory {
96 fn allocate<T>(&self, value: T) -> *mut T {
97 Box::into_raw(Box::new(value))
98 }
99
100 unsafe fn deallocate<T>(&self, pointer: *mut T) {
101 assert!(!pointer.is_null());
102 Box::from_raw(pointer);
103 }
104}
105
106#[repr(C)]
110pub struct Pointers<T, M> where M: Memory {
111 hazardous: AlignVec<Vec<AtomicPtr<T>>>,
112 retired: AlignVec<RefCell<Vec<*mut T>>>,
113 threshold: usize,
114 memory: M,
115}
116
117impl<T, M> Pointers<T, M> where M: Memory {
118 pub fn new(memory: M, threads: usize, domains: usize, threshold: usize) -> Self {
129 let hazardous = (0..threads).map(|_| {
130 (0..domains).map(|_| AtomicPtr::new(ptr::null_mut())).collect()
131 }).collect();
132 let retired = vec![RefCell::new(vec![]); threads];
133 Pointers {
134 hazardous: AlignVec::new(hazardous),
135 retired: AlignVec::new(retired),
136 threshold: threshold,
137 memory: memory,
138 }
139 }
140
141 pub fn mark(&self, thread: usize, domain: usize, pointer: &AtomicPtr<T>) -> *mut T {
147 loop {
148 let value = pointer.load(Acquire);
149 self.hazardous[thread][domain].store(value, Release);
150 if value == pointer.load(Acquire) {
151 return value;
152 }
153 }
154 }
155
156 pub fn mark_ptr(&self, thread: usize, domain: usize, pointer: *mut T) -> *mut T {
160 self.hazardous[thread][domain].store(pointer, Release);
161 pointer
162 }
163
164 pub fn clear(&self, thread: usize, domain: usize) {
168 self.hazardous[thread][domain].store(ptr::null_mut(), Release);
169 }
170
171 pub fn hazardous(&self, pointer: *mut T) -> bool {
175 self.hazardous.iter().any(|h| h.iter().any(|p| pointer == p.load(Acquire)))
176 }
177
178 fn kill(&self, pointer: *mut T) -> bool {
179 if self.hazardous(pointer) {
180 false
181 } else {
182 unsafe { self.memory.deallocate(pointer); }
183 true
184 }
185 }
186
187 pub fn retire(&self, thread: usize, pointer: *mut T) {
191 let mut retired = self.retired[thread].borrow_mut();
192 retired.push(pointer);
193 if retired.len() >= self.threshold {
194 retired.retain(|p| !self.kill(*p));
195 }
196 }
197}
198
199impl<T, M> Drop for Pointers<T, M> where M: Memory {
200 fn drop(&mut self) {
201 for retired in &*self.retired {
202 for pointer in &*retired.borrow() {
203 unsafe { self.memory.deallocate(*pointer); }
204 }
205 }
206 }
207}
208
209impl<T, M> fmt::Debug for Pointers<T, M> where M: Memory {
210 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
211 formatter.debug_struct("Pointers").field("hazardous", &self.hazardous).finish()
212 }
213}