use std::fmt;
use std::marker::PhantomData;
use std::sync::atomic::{AtomicPtr, Ordering};
use crate::raw::{self, Reservation, Thread};
use crate::Collector;
pub trait Guard {
fn refresh(&mut self);
fn flush(&self);
fn collector(&self) -> &Collector;
fn thread_id(&self) -> usize;
fn protect<T>(&self, ptr: &AtomicPtr<T>, order: Ordering) -> *mut T {
ptr.load(raw::Collector::protect(order))
}
fn swap<T>(&self, ptr: &AtomicPtr<T>, value: *mut T, order: Ordering) -> *mut T {
ptr.swap(value, raw::Collector::protect(order))
}
fn compare_exchange<T>(
&self,
ptr: &AtomicPtr<T>,
current: *mut T,
new: *mut T,
success: Ordering,
failure: Ordering,
) -> Result<*mut T, *mut T> {
ptr.compare_exchange(
current,
new,
raw::Collector::protect(success),
raw::Collector::protect(failure),
)
}
fn compare_exchange_weak<T>(
&self,
ptr: &AtomicPtr<T>,
current: *mut T,
new: *mut T,
success: Ordering,
failure: Ordering,
) -> Result<*mut T, *mut T> {
ptr.compare_exchange_weak(
current,
new,
raw::Collector::protect(success),
raw::Collector::protect(failure),
)
}
unsafe fn defer_retire<T>(&self, ptr: *mut T, reclaim: unsafe fn(*mut T, &Collector));
}
pub struct LocalGuard<'a> {
collector: &'a Collector,
thread: Thread,
reservation: *const Reservation,
_unsend: PhantomData<*mut ()>,
}
impl LocalGuard<'_> {
#[inline]
pub(crate) fn enter(collector: &Collector) -> LocalGuard<'_> {
let thread = Thread::current();
let reservation = unsafe { collector.raw.reservation(thread) };
let guards = reservation.guards.get();
reservation.guards.set(guards + 1);
if guards == 0 {
unsafe { collector.raw.enter(reservation) };
}
LocalGuard {
thread,
reservation,
collector,
_unsend: PhantomData,
}
}
}
impl Guard for LocalGuard<'_> {
#[inline]
fn refresh(&mut self) {
let reservation = unsafe { &*self.reservation };
let guards = reservation.guards.get();
if guards == 1 {
unsafe { self.collector.raw.refresh(reservation) }
}
}
#[inline]
fn flush(&self) {
unsafe { self.collector.raw.try_retire_batch(self.thread) }
}
#[inline]
fn collector(&self) -> &Collector {
self.collector
}
#[inline]
fn thread_id(&self) -> usize {
self.thread.id
}
#[inline]
unsafe fn defer_retire<T>(&self, ptr: *mut T, reclaim: unsafe fn(*mut T, &Collector)) {
unsafe { self.collector.raw.add(ptr, reclaim, self.thread) }
}
}
impl Drop for LocalGuard<'_> {
#[inline]
fn drop(&mut self) {
let reservation = unsafe { &*self.reservation };
let guards = reservation.guards.get();
reservation.guards.set(guards - 1);
if guards == 1 {
unsafe { self.collector.raw.leave(reservation) };
}
}
}
impl fmt::Debug for LocalGuard<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("LocalGuard").finish()
}
}
pub struct OwnedGuard<'a> {
collector: &'a Collector,
thread: Thread,
reservation: *const Reservation,
}
unsafe impl Sync for OwnedGuard<'_> {}
unsafe impl Send for OwnedGuard<'_> {}
impl OwnedGuard<'_> {
#[inline]
pub(crate) fn enter(collector: &Collector) -> OwnedGuard<'_> {
let thread = Thread::create();
let reservation = unsafe { collector.raw.reservation(thread) };
unsafe { collector.raw.enter(reservation) };
OwnedGuard {
collector,
thread,
reservation,
}
}
}
impl Guard for OwnedGuard<'_> {
#[inline]
fn refresh(&mut self) {
let reservation = unsafe { &*self.reservation };
unsafe { self.collector.raw.refresh(reservation) }
}
#[inline]
fn flush(&self) {
let reservation = unsafe { &*self.reservation };
let _lock = reservation.lock.lock().unwrap();
unsafe { self.collector.raw.try_retire_batch(self.thread) }
}
#[inline]
fn collector(&self) -> &Collector {
self.collector
}
#[inline]
fn thread_id(&self) -> usize {
Thread::current().id
}
#[inline]
unsafe fn defer_retire<T>(&self, ptr: *mut T, reclaim: unsafe fn(*mut T, &Collector)) {
let reservation = unsafe { &*self.reservation };
let _lock = reservation.lock.lock().unwrap();
unsafe { self.collector.raw.add(ptr, reclaim, self.thread) }
}
}
impl Drop for OwnedGuard<'_> {
#[inline]
fn drop(&mut self) {
let reservation = unsafe { &*self.reservation };
unsafe { self.collector.raw.leave(reservation) };
unsafe { Thread::free(self.thread.id) };
}
}