use std::ptr::NonNull;
use std::marker::PhantomData;
use std::sync::atomic::{self, AtomicPtr, AtomicUsize, Ordering};
use zerogc::{Trace, GcSafe, GcErase, GcRebrand, GcVisitor, NullTrace, TraceImmutable, GcHandleSystem, GcBindHandle};
use crate::{Gc, WeakCollectorRef, CollectorId, CollectorContext, CollectorRef, CollectionManager};
use crate::collector::RawCollectorImpl;
const INITIAL_HANDLE_CAPACITY: usize = 64;
pub unsafe trait RawHandleImpl: RawCollectorImpl {
type TypeInfo: Sized;
fn type_info_of<T: GcSafe>() -> &'static Self::TypeInfo;
fn handle_list(&self) -> &GcHandleList<Self>;
}
pub struct GcHandleList<C: RawHandleImpl> {
last_bucket: AtomicPtr<GcHandleBucket<C>>,
last_free_slot: AtomicPtr<HandleSlot<C>>
}
impl<C: RawHandleImpl> GcHandleList<C> {
pub fn new() -> Self {
use std::ptr::null_mut;
GcHandleList {
last_bucket: AtomicPtr::new(null_mut()),
last_free_slot: AtomicPtr::new(null_mut()),
}
}
unsafe fn append_free_slot(&self, slot: *mut HandleSlot<C>) {
debug_assert_eq!(
(*slot).valid.value
.load(Ordering::SeqCst),
std::ptr::null_mut()
);
let mut last_free = self.last_free_slot
.load(Ordering::Acquire);
loop {
(*slot).freed.prev_free_slot
.store(last_free, Ordering::Release);
match self.last_free_slot.compare_exchange(
last_free, slot,
Ordering::AcqRel,
Ordering::Acquire
) {
Ok(actual) => {
debug_assert_eq!(actual, last_free);
return;
},
Err(actual) => {
last_free = actual;
}
}
}
}
#[inline]
pub(crate) unsafe fn alloc_raw_handle(&self, value: *mut ()) -> &GcRawHandle<C> {
let mut slot = self.last_free_slot.load(Ordering::Acquire);
while !slot.is_null() {
let prev = (*slot).freed.prev_free_slot.load(Ordering::Acquire);
match self.last_free_slot.compare_exchange(
slot, prev,
Ordering::AcqRel,
Ordering::Acquire
) {
Ok(actual_slot) => {
debug_assert_eq!(actual_slot, slot);
debug_assert_eq!(
(*slot).valid.value
.load(Ordering::SeqCst),
std::ptr::null_mut()
);
(*slot).valid.value
.store(value, Ordering::Release);
return &(*slot).valid;
},
Err(actual_slot) => {
slot = actual_slot;
}
}
}
self.alloc_handle_fallback(value)
}
#[cold]
#[inline(never)]
unsafe fn alloc_handle_fallback(&self, value: *mut ()) -> &GcRawHandle<C> {
let mut bucket = self.last_bucket.load(Ordering::Acquire);
loop {
let new_size: usize;
if bucket.is_null() {
new_size = INITIAL_HANDLE_CAPACITY;
} else {
if let Some(slot) = (*bucket).blindly_alloc_slot(value) {
return &slot.valid;
}
new_size = (*bucket).slots.len() * 2;
}
match self.init_bucket(bucket, new_size) {
Ok(new_bucket) | Err(new_bucket) => {
bucket = new_bucket as *const _
as *mut _;
}
}
}
}
unsafe fn init_bucket(
&self,
prev_bucket: *mut GcHandleBucket<C>,
desired_size: usize
) -> Result<&GcHandleBucket<C>, &GcHandleBucket<C>> {
let mut slots: Vec<HandleSlot<C>> = Vec::with_capacity(desired_size);
slots.as_mut_ptr().write_bytes(0, desired_size);
slots.set_len(desired_size);
let allocated_bucket = Box::into_raw(Box::new(GcHandleBucket {
slots: slots.into_boxed_slice(),
last_alloc: AtomicUsize::new(0),
prev: AtomicPtr::new(prev_bucket),
}));
match self.last_bucket.compare_exchange(
prev_bucket, allocated_bucket,
Ordering::SeqCst,
Ordering::SeqCst
) {
Ok(actual_bucket) => {
assert_eq!(actual_bucket, prev_bucket);
Ok(&*actual_bucket)
},
Err(actual_bucket) => {
drop(Box::from_raw(allocated_bucket));
Err(&*actual_bucket)
}
}
}
pub unsafe fn trace<F, E>(&mut self, mut visitor: F) -> Result<(), E>
where F: FnMut(*mut (), &C::TypeInfo) -> Result<(), E> {
atomic::fence(Ordering::Acquire);
let mut bucket = self.last_bucket.load(Ordering::Relaxed);
while !bucket.is_null() {
let slots = &mut *(*bucket).slots;
for slot in slots {
if slot.is_valid(Ordering::Relaxed) {
slot.valid.trace_inner(&mut visitor)?;
}
}
bucket = (*bucket).prev.load(Ordering::Relaxed);
}
atomic::fence(Ordering::Release);
Ok(())
}
}
impl<C: RawHandleImpl> Drop for GcHandleList<C> {
fn drop(&mut self) {
let mut bucket = self.last_bucket.load(Ordering::Acquire);
while !bucket.is_null() {
unsafe {
drop(Box::from_raw(bucket));
bucket = (*bucket).prev
.load(Ordering::Acquire);
}
}
}
}
struct GcHandleBucket<C: RawHandleImpl> {
slots: Box<[HandleSlot<C>]>,
last_alloc: AtomicUsize,
prev: AtomicPtr<GcHandleBucket<C>>
}
impl<C: RawHandleImpl> GcHandleBucket<C> {
unsafe fn blindly_alloc_slot(&self, value: *mut ()) -> Option<&HandleSlot<C>> {
let last_alloc = self.last_alloc.load(Ordering::Relaxed);
for (i, slot) in self.slots.iter().enumerate()
.skip(last_alloc) {
if slot.valid.value.compare_exchange(
std::ptr::null_mut(), value,
Ordering::AcqRel,
Ordering::Relaxed
).is_ok() {
self.last_alloc.fetch_max(i, Ordering::AcqRel);
return Some(&*slot);
}
}
None
}
}
#[repr(C)]
pub union HandleSlot<C: RawHandleImpl> {
freed: FreedHandleSlot<C>,
valid: GcRawHandle<C>
}
impl<C: RawHandleImpl> HandleSlot<C> {
#[inline]
fn is_valid(&self, ord: Ordering) -> bool {
unsafe { !self.valid.value.load(ord).is_null() }
}
}
#[repr(C)]
pub struct FreedHandleSlot<C: RawHandleImpl> {
_invalid_value: AtomicPtr<()>,
prev_free_slot: AtomicPtr<HandleSlot<C>>
}
#[repr(C)]
pub struct GcRawHandle<C: RawHandleImpl> {
value: AtomicPtr<()>,
pub(crate) type_info: AtomicPtr<C::TypeInfo>,
pub(crate) refcnt: AtomicUsize
}
impl<C: RawHandleImpl> GcRawHandle<C> {
unsafe fn trace_inner<F, E>(&self, trace: &mut F) -> Result<(), E>
where F: FnMut(*mut (), &C::TypeInfo) -> Result<(), E> {
let value = self.value.load(Ordering::Relaxed);
if value.is_null() {
debug_assert_eq!(
self.refcnt.load(Ordering::Relaxed),
0
);
return Ok(());
}
debug_assert_ne!(
self.refcnt.load(Ordering::Relaxed),
0
);
let type_info = &*self.type_info.load(Ordering::Relaxed);
trace(value, type_info)
}
}
pub struct GcHandle<T: GcSafe, C: RawHandleImpl> {
inner: NonNull<GcRawHandle<C>>,
collector: WeakCollectorRef<C>,
marker: PhantomData<*mut T>
}
impl<T: GcSafe, C: RawHandleImpl> GcHandle<T, C> {
#[inline]
pub(crate) unsafe fn new(
inner: NonNull<GcRawHandle<C>>,
collector: WeakCollectorRef<C>
) -> Self {
GcHandle {
inner, collector,
marker: PhantomData
}
}
}
unsafe impl<T: GcSafe, C: RawHandleImpl> ::zerogc::GcHandle<T> for GcHandle<T, C> {
type System = CollectorRef<C>;
type Id = CollectorId<C>;
fn use_critical<R>(&self, func: impl FnOnce(&T) -> R) -> R {
self.collector.ensure_valid(|collector| unsafe {
C::Manager::prevent_collection(collector.as_ref(), || {
let value = self.inner.as_ref().value
.load(Ordering::Acquire) as *mut T;
func(&*value)
})
})
}
}
unsafe impl<'new_gc, T, C> GcBindHandle<'new_gc, T> for GcHandle<T, C>
where T: GcSafe, T: GcRebrand<'new_gc, CollectorId<C>>,
T::Branded: GcSafe, C: RawHandleImpl {
#[inline]
fn bind_to(&self, context: &'new_gc CollectorContext<C>) -> Gc<'new_gc, T::Branded, CollectorId<C>> {
unsafe {
let collector = self.collector.assume_valid();
assert_eq!(
collector.as_ref() as *const C,
context.collector() as *const C,
"Collectors mismatch"
);
let inner = self.inner.as_ref();
let value = inner.value.load(Ordering::Acquire)
as *mut T as *mut T::Branded;
debug_assert!(!value.is_null());
Gc::from_raw(
collector,
NonNull::new_unchecked(value)
)
}
}
}
unsafe impl<T: GcSafe, C: RawHandleImpl> Trace for GcHandle<T, C> {
const NEEDS_TRACE: bool = false;
#[inline(always)]
fn visit<V>(&mut self, _visitor: &mut V) -> Result<(), V::Err>
where V: zerogc::GcVisitor {
Ok(())
}
}
unsafe impl<T: GcSafe, C: RawHandleImpl> TraceImmutable for GcHandle<T, C> {
#[inline(always)]
fn visit_immutable<V>(&self, _visitor: &mut V) -> Result<(), V::Err>
where V: GcVisitor {
Ok(())
}
}
unsafe impl<T: GcSafe, C: RawHandleImpl> NullTrace for GcHandle<T, C> {}
impl<T: GcSafe, C: RawHandleImpl> Clone for GcHandle<T, C> {
fn clone(&self) -> Self {
let collector = self.collector
.ensure_valid(|id| unsafe { id.weak_ref() });
let inner = unsafe { self.inner.as_ref() };
debug_assert!(
!inner.value
.load(Ordering::SeqCst)
.is_null(),
"Pointer is invalid"
);
let mut old_refcnt = inner.refcnt.load(Ordering::Relaxed);
loop {
assert_ne!(
old_refcnt, isize::max_value() as usize,
"Reference count overflow"
);
match inner.refcnt.compare_exchange_weak(
old_refcnt, old_refcnt + 1,
Ordering::AcqRel,
Ordering::Relaxed
) {
Ok(_) => break,
Err(val) => {
old_refcnt = val;
}
}
}
GcHandle {
inner: self.inner,
collector, marker: PhantomData
}
}
}
impl<T: GcSafe, C: RawHandleImpl> Drop for GcHandle<T, C> {
fn drop(&mut self) {
self.collector.try_ensure_valid(|id| {
let collector = match id {
None => {
return;
},
Some(ref id) => unsafe { id.as_ref() },
};
let inner = unsafe { self.inner.as_ref() };
debug_assert!(!inner.value
.load(Ordering::SeqCst)
.is_null(),
"Pointer already invalid"
);
let prev = inner.refcnt
.fetch_sub(1, Ordering::AcqRel);
match prev {
0 => {
eprintln!("GcHandle refcnt Underflow");
std::process::abort();
},
1 => {
},
_ => {},
}
inner.value.store(
std::ptr::null_mut(),
Ordering::Release
);
unsafe {
collector.handle_list().append_free_slot(
self.inner.as_ptr() as *mut HandleSlot<C>
);
}
});
}
}
unsafe impl<T: GcSafe + Sync, C: RawHandleImpl + Sync> Send for GcHandle<T, C> {}
unsafe impl<T: GcSafe + Sync, C: RawHandleImpl + Sync> Sync for GcHandle<T, C> {}
unsafe impl<'gc, 'a, T, C> GcHandleSystem<'gc, 'a, T> for CollectorRef<C>
where C: RawHandleImpl,
T: GcSafe + 'gc,
T: GcErase<'a, CollectorId<C>>,
T::Erased: GcSafe {
type Handle = GcHandle<T::Erased, C>;
#[inline]
fn create_handle(gc: Gc<'gc, T, CollectorId<C>>) -> Self::Handle {
unsafe {
let collector = gc.collector_id();
let value = gc.as_raw_ptr();
let raw = collector.as_ref().handle_list()
.alloc_raw_handle(value as *mut ());
raw.type_info.store(
C::type_info_of::<T>()
as *const C::TypeInfo
as *mut C::TypeInfo,
Ordering::Release
);
raw.refcnt.store(1, Ordering::Release);
let weak_collector = collector.weak_ref();
GcHandle::new(NonNull::from(raw), weak_collector)
}
}
}