use core::ops::Deref;
use core::ptr::NonNull;
use crossbeam_epoch::{pin, Guard};
use crossbeam_queue::SegQueue;
use std::{cell::RefCell, mem::ManuallyDrop};
#[cfg(not(loom))]
use core::sync::atomic::{AtomicUsize, Ordering};
#[cfg(loom)]
use loom::sync::atomic::{AtomicUsize, Ordering};
type GenerationCounter = &'static AtomicUsize;
static GLOBAL_RECYCLER: SegQueue<[GenerationCounter; BLOCK_SIZE]> = SegQueue::new();
thread_local! {
static LOCAL_RECYCLER: RefCell<Vec<GenerationCounter>> = RefCell::new(Vec::with_capacity(BLOCK_SIZE*2));
}
#[cfg(loom)]
const BLOCK_SIZE: usize = 16;
#[cfg(not(loom))]
const BLOCK_SIZE: usize = 256;
pub(crate) fn new_generation_counter() -> GenerationCounter {
LOCAL_RECYCLER.with_borrow_mut(|local_recycler| {
if let Some(counter) = local_recycler.pop() {
return counter;
}
if let Some(block) = GLOBAL_RECYCLER.pop() {
let [next, rest @ ..] = block;
local_recycler.extend(rest);
return next;
}
let block: [AtomicUsize; BLOCK_SIZE] = std::array::from_fn(|_| AtomicUsize::new(0));
let block = Box::leak(Box::new(block));
local_recycler.extend(block.iter());
local_recycler.pop().unwrap()
})
}
pub(crate) fn recycle_generation_counter(counter: GenerationCounter) {
LOCAL_RECYCLER.with_borrow_mut(|local_recycler| {
if local_recycler.len() == local_recycler.capacity() {
let block: [GenerationCounter; BLOCK_SIZE] =
std::array::from_fn(|_| local_recycler.pop().unwrap());
GLOBAL_RECYCLER.push(block);
}
local_recycler.push(counter);
})
}
#[allow(unused)]
pub(crate) fn empty_recycler() {
LOCAL_RECYCLER.with_borrow_mut(|r| r.clear());
while GLOBAL_RECYCLER.pop().is_some() {}
}
#[cfg(test)]
pub(crate) fn local_recycler_len() -> usize {
LOCAL_RECYCLER.with_borrow(|r| r.len())
}
#[cfg(test)]
pub(crate) fn global_recycler_len() -> usize {
GLOBAL_RECYCLER.len()
}
pub trait IsPtr {
type T: ?Sized;
fn into_raw_ptr(this: Self) -> NonNull<Self::T>;
unsafe fn from_raw_ptr(ptr: NonNull<Self::T>) -> Self;
fn on_killed(_this: &Self) {}
}
#[repr(transparent)]
pub struct Own<P: IsPtr + Send + 'static> {
#[doc(hidden)]
pub _weak: Ref<P::T>,
}
impl<P: IsPtr + Send + 'static> Own<P> {
pub fn new(ptr: P) -> Self {
Self::new_reuse(new_generation_counter(), ptr)
}
pub fn new_from<R: IsPtr + Send + 'static>(ptr: P, other: Own<R>) -> Self {
Self::new_reuse(other.kill(&pin()).unwrap(), ptr)
}
pub fn refer(&self) -> Ref<P::T> {
self._weak
}
pub fn clone_ptr(this: &Self) -> P
where
P: Clone,
{
let original = ManuallyDrop::new(unsafe { P::from_raw_ptr(this._weak.pointer.unwrap()) });
(*original).clone()
}
fn new_reuse(current_gen: GenerationCounter, ptr: P) -> Self {
let pointer = Some(P::into_raw_ptr(ptr));
let expected_gen = current_gen.load(Ordering::Acquire);
Own {
_weak: Ref {
current_gen,
expected_gen,
pointer,
},
}
}
fn kill(self, guard: &Guard) -> Option<GenerationCounter> {
let mut this = ManuallyDrop::new(self);
unsafe { this.kill_mut(guard) }
}
unsafe fn kill_mut(&mut self, guard: &Guard) -> Option<GenerationCounter> {
let new_gen = self._weak.expected_gen + 1;
if self
._weak
.current_gen
.compare_exchange(
self._weak.expected_gen,
new_gen,
Ordering::AcqRel,
Ordering::Relaxed,
)
.is_err()
{
panic!("Tried to drop a dead reference. Did you mutate Own._weak?");
}
let ptr = unsafe { P::from_raw_ptr(self._weak.pointer.take().unwrap()) };
P::on_killed(&ptr);
guard.defer(move || drop(ptr));
if new_gen != usize::MAX {
Some(self._weak.current_gen)
} else {
None
}
}
}
impl<P: IsPtr + Send + 'static> Drop for Own<P> {
fn drop(&mut self) {
let guard = pin();
if let Some(counter) = unsafe { self.kill_mut(&guard) } {
recycle_generation_counter(counter);
}
}
}
impl<P: IsPtr + Send + 'static> Deref for Own<P> {
type Target = P::T;
fn deref(&self) -> &Self::Target {
unsafe { self._weak.pointer.unwrap().as_ref() }
}
}
unsafe impl<P: IsPtr + Send> Send for Own<P> where P::T: Sync {}
unsafe impl<P: IsPtr + Send> Sync for Own<P> where P::T: Sync {}
fn non_null_from_ref<T: ?Sized>(r: &T) -> NonNull<T> {
NonNull::new((r as *const T).cast_mut()).unwrap()
}
#[repr(C)]
pub struct Ref<T: ?Sized> {
current_gen: GenerationCounter,
expected_gen: usize,
pointer: Option<NonNull<T>>,
}
unsafe impl<T: Sync + ?Sized> Send for Ref<T> {}
unsafe impl<T: Sync + ?Sized> Sync for Ref<T> {}
impl<T: ?Sized> Clone for Ref<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T: ?Sized> Copy for Ref<T> {}
impl<T: ?Sized> Ref<T> {
pub fn get(self, _guard: &Guard) -> Option<&T> {
let current_gen = self.current_gen.load(Ordering::Acquire);
if current_gen == self.expected_gen {
Some(unsafe { self.pointer?.as_ref() })
} else {
None
}
}
pub fn inspect<O>(self, func: impl FnOnce(&T) -> O) -> Option<O> {
self.get(&pin()).map(func)
}
pub fn map<R: ?Sized>(self, func: impl FnOnce(&T) -> &R) -> Ref<R> {
self.map_with(func, &pin())
}
pub unsafe fn map_unchecked<R: ?Sized>(
self,
func: impl FnOnce(*const T) -> *const R,
) -> Ref<R> {
Ref {
current_gen: self.current_gen,
expected_gen: self.expected_gen,
pointer: match self.pointer {
Some(ptr) => NonNull::new(func(ptr.as_ptr()).cast_mut()),
None => None,
},
}
}
pub fn map_with<R: ?Sized>(&self, func: impl FnOnce(&T) -> &R, guard: &Guard) -> Ref<R> {
Ref {
current_gen: self.current_gen,
expected_gen: self.expected_gen,
pointer: match self.get(guard) {
Some(value) => Some(non_null_from_ref(func(value))),
None => None,
},
}
}
pub fn filter_map<R: ?Sized>(self, func: impl FnOnce(&T) -> Option<&R>) -> Ref<R> {
self.filter_map_with(func, &pin())
}
pub fn filter_map_with<R: ?Sized>(
&self,
func: impl FnOnce(&T) -> Option<&R>,
guard: &Guard,
) -> Ref<R> {
Ref {
current_gen: self.current_gen,
expected_gen: self.expected_gen,
pointer: match self.get(guard) {
Some(value) => func(value).map(non_null_from_ref),
None => None,
},
}
}
pub fn is_alive(&self) -> bool {
let current_gen = self.current_gen.load(Ordering::Relaxed);
current_gen == self.expected_gen && self.pointer.is_some()
}
pub fn is_null(&self) -> bool {
self.pointer.is_none()
}
#[cfg(not(loom))]
pub const fn null() -> Self {
static STATIC_GEN: AtomicUsize = AtomicUsize::new(usize::MAX);
Ref {
current_gen: &STATIC_GEN,
expected_gen: 0,
pointer: None,
}
}
pub fn about(&self) -> (Option<NonNull<T>>, usize) {
(self.pointer, self.expected_gen)
}
}