use std::alloc::{Layout, alloc, dealloc};
use std::mem::forget;
use std::panic::catch_unwind;
use std::ptr::null_mut;
use std::sync::atomic::AtomicPtr;
use std::sync::atomic::Ordering::{Acquire, Relaxed, Release};
use super::collector::Collector;
use super::link::Link;
use super::ref_counted::RefCounted;
use super::{Guard, Owned, Shared};
#[derive(Debug)]
pub struct PrivateCollector {
id: AtomicPtr<u8>,
backup_garbage_bag: AtomicPtr<Link>,
}
impl PrivateCollector {
#[inline]
#[must_use]
pub const fn new() -> Self {
Self {
id: AtomicPtr::new(null_mut()),
backup_garbage_bag: AtomicPtr::new(null_mut()),
}
}
#[inline]
pub unsafe fn collect_owned<T>(&self, owned: Owned<T>, guard: &Guard) {
let ptr = RefCounted::link_ptr(owned.underlying_ptr()).cast_mut();
forget(owned);
self.push(ptr, guard);
}
#[inline]
#[must_use]
pub unsafe fn collect_shared<T>(&self, shared: Shared<T>, guard: &Guard) -> bool {
let ptr = shared.underlying_ptr();
forget(shared);
if unsafe { (*ptr).drop_ref() } {
self.push(RefCounted::link_ptr(ptr).cast_mut(), guard);
true
} else {
false
}
}
#[inline]
fn push(&self, ptr: *mut Link, guard: &Guard) {
let (head, tail, len) = if self.backup_garbage_bag.load(Relaxed).is_null() {
(ptr, ptr, 1)
} else {
self.export_then_splice(ptr)
};
if catch_unwind(|| {
let id = self.id();
guard.collect_private(head, tail, len, id);
})
.is_err()
{
self.fallback(head, tail);
}
}
#[inline]
fn id(&self) -> usize {
let id = self.id.load(Relaxed);
if id.is_null() {
self.alloc_id()
} else {
id.addr()
}
}
#[cold]
#[inline(never)]
fn alloc_id(&self) -> usize {
let new_id = unsafe { alloc(Layout::new::<u8>()) };
assert!(!new_id.is_null(), "Memory allocation failed");
unsafe {
new_id.write(0);
}
match self
.id
.compare_exchange(null_mut(), new_id, Release, Relaxed)
{
Ok(_) => new_id.addr(),
Err(id) => {
unsafe {
dealloc(new_id, Layout::new::<u8>());
}
id.addr()
}
}
}
#[cold]
#[inline(never)]
fn fallback(&self, head: *mut Link, tail: *mut Link) {
let result = self
.backup_garbage_bag
.fetch_update(Release, Relaxed, |ptr| unsafe {
(*tail).set_next_ptr(ptr, Relaxed);
Some(head)
})
.is_ok();
debug_assert!(result);
}
#[cold]
#[inline(never)]
fn export_then_splice(&self, ptr: *mut Link) -> (*mut Link, *mut Link, usize) {
let Ok(head) = self
.backup_garbage_bag
.fetch_update(Relaxed, Acquire, |ptr| {
if ptr.is_null() {
None
} else {
Some(null_mut())
}
})
else {
return (ptr, ptr, 1);
};
let mut tail = head;
let mut link_len = 2;
loop {
let next = unsafe { (*tail).next_ptr(Relaxed) };
if next.is_null() {
break;
}
link_len += 1;
tail = next;
}
unsafe {
(*tail).set_next_ptr(ptr, Relaxed);
(head, ptr, link_len)
}
}
}
impl Default for PrivateCollector {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl Drop for PrivateCollector {
#[inline]
fn drop(&mut self) {
let id = self.id.swap(null_mut(), Acquire);
if !id.is_null() {
let guard = Guard::new();
guard.purge(id.addr());
unsafe {
dealloc(id, Layout::new::<u8>());
}
}
let link = self.backup_garbage_bag.swap(null_mut(), Acquire);
Collector::dealloc(link);
}
}