#![warn(missing_copy_implementations, missing_debug_implementations, missing_docs)]
use std::fmt;
use std::ops;
use std::ptr;
use std::cell::{RefCell};
use std::sync::atomic::{AtomicPtr};
use std::sync::atomic::Ordering::*;
pub trait Memory {
fn allocate<T>(&self, value: T) -> *mut T;
unsafe fn deallocate<T>(&self, pointer: *mut T);
}
#[cfg(target_pointer_width="32")]
const POINTERS: usize = 32;
#[cfg(target_pointer_width="64")]
const POINTERS: usize = 16;
#[repr(C)]
pub struct AlignVec<T> {
vec: Vec<T>,
_padding: [usize; POINTERS - 3],
}
impl<T> AlignVec<T> {
pub fn new(vec: Vec<T>) -> Self {
AlignVec { vec: vec, _padding: [0; POINTERS - 3] }
}
}
impl<T> fmt::Debug for AlignVec<T> where T: fmt::Debug {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{:?}", &self.vec)
}
}
impl<T> ops::Deref for AlignVec<T> {
type Target = Vec<T>;
fn deref(&self) -> &Self::Target {
&self.vec
}
}
impl<T> ops::DerefMut for AlignVec<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.vec
}
}
#[derive(Copy, Clone, Debug)]
pub struct BoxMemory;
impl Memory for BoxMemory {
fn allocate<T>(&self, value: T) -> *mut T {
Box::into_raw(Box::new(value))
}
unsafe fn deallocate<T>(&self, pointer: *mut T) {
assert!(!pointer.is_null());
Box::from_raw(pointer);
}
}
#[repr(C)]
pub struct Pointers<T, M> where M: Memory {
hazardous: AlignVec<Vec<AtomicPtr<T>>>,
retired: AlignVec<RefCell<Vec<*mut T>>>,
threshold: usize,
memory: M,
}
impl<T, M> Pointers<T, M> where M: Memory {
pub fn new(memory: M, threads: usize, domains: usize, threshold: usize) -> Self {
let hazardous = (0..threads).map(|_| {
(0..domains).map(|_| AtomicPtr::new(ptr::null_mut())).collect()
}).collect();
let retired = vec![RefCell::new(vec![]); threads];
Pointers {
hazardous: AlignVec::new(hazardous),
retired: AlignVec::new(retired),
threshold: threshold,
memory: memory,
}
}
pub fn mark(&self, thread: usize, domain: usize, pointer: &AtomicPtr<T>) -> *mut T {
loop {
let value = pointer.load(Acquire);
self.hazardous[thread][domain].store(value, Release);
if value == pointer.load(Acquire) {
return value;
}
}
}
pub fn mark_ptr(&self, thread: usize, domain: usize, pointer: *mut T) -> *mut T {
self.hazardous[thread][domain].store(pointer, Release);
pointer
}
pub fn clear(&self, thread: usize, domain: usize) {
self.hazardous[thread][domain].store(ptr::null_mut(), Release);
}
pub fn hazardous(&self, pointer: *mut T) -> bool {
self.hazardous.iter().any(|h| h.iter().any(|p| pointer == p.load(Acquire)))
}
fn kill(&self, pointer: *mut T) -> bool {
if self.hazardous(pointer) {
false
} else {
unsafe { self.memory.deallocate(pointer); }
true
}
}
pub fn retire(&self, thread: usize, pointer: *mut T) {
let mut retired = self.retired[thread].borrow_mut();
retired.push(pointer);
if retired.len() >= self.threshold {
retired.retain(|p| !self.kill(*p));
}
}
}
impl<T, M> Drop for Pointers<T, M> where M: Memory {
fn drop(&mut self) {
for retired in &*self.retired {
for pointer in &*retired.borrow() {
unsafe { self.memory.deallocate(*pointer); }
}
}
}
}
impl<T, M> fmt::Debug for Pointers<T, M> where M: Memory {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.debug_struct("Pointers").field("hazardous", &self.hazardous).finish()
}
}