use super::ref_count::ThreadedRefCount;
use super::ThreadedCc;
use crate::cc::CcDummy;
use crate::cc::CcDyn;
use crate::collect;
use crate::collect::AbstractObjectSpace;
use crate::collect::Linked;
use crate::debug;
use crate::Trace;
use parking_lot::Mutex;
use parking_lot::RwLock;
use std::cell::Cell;
use std::mem;
use std::ops::Deref;
use std::pin::Pin;
use std::sync::Arc;
#[repr(C)]
pub struct Header {
next: Cell<*const Header>,
prev: Cell<*const Header>,
ccdyn_vptr: *const (),
linked_list_lock: Arc<Mutex<()>>,
}
pub struct ThreadedObjectSpace {
list: Pin<Box<Header>>,
collector_lock: Arc<RwLock<()>>,
}
unsafe impl Send for ThreadedObjectSpace {}
unsafe impl Sync for ThreadedObjectSpace {}
impl AbstractObjectSpace for ThreadedObjectSpace {
type RefCount = ThreadedRefCount;
type Header = Header;
fn insert(&self, header: &mut Self::Header, value: &dyn CcDyn) {
debug_assert!(Arc::ptr_eq(
&header.linked_list_lock,
&self.list.linked_list_lock
));
debug_assert!(self.list.linked_list_lock.try_lock().is_none());
let prev: &Header = &self.list;
debug_assert!(!collect::is_collecting(prev));
debug_assert!(header.next.get().is_null());
let next = prev.next.get();
header.prev.set(prev.deref());
header.next.set(next);
unsafe {
(&*next).prev.set(header);
let fat_ptr: [*mut (); 2] = mem::transmute(value);
header.ccdyn_vptr = fat_ptr[1];
}
prev.next.set(header);
}
#[inline]
fn remove(header: &Self::Header) {
let _linked_list_lock = header.linked_list_lock.lock();
let header: &Header = &header;
debug_assert!(!collect::is_collecting(header));
debug_assert!(!header.next.get().is_null());
debug_assert!(!header.prev.get().is_null());
let next = header.next.get();
let prev = header.prev.get();
unsafe {
(*prev).next.set(next);
(*next).prev.set(prev);
}
header.next.set(std::ptr::null_mut());
}
#[inline]
fn new_ref_count(&self, tracked: bool) -> Self::RefCount {
ThreadedRefCount::new(tracked, self.collector_lock.clone())
}
fn empty_header(&self) -> Self::Header {
let linked_list_lock = self.list.linked_list_lock.clone();
Self::Header {
linked_list_lock,
next: Cell::new(std::ptr::null()),
prev: Cell::new(std::ptr::null()),
ccdyn_vptr: CcDummy::ccdyn_vptr(),
}
}
}
impl Default for ThreadedObjectSpace {
fn default() -> Self {
let linked_list_lock = Arc::new(Mutex::new(()));
let pinned = Box::pin(Header {
prev: Cell::new(std::ptr::null()),
next: Cell::new(std::ptr::null()),
ccdyn_vptr: CcDummy::ccdyn_vptr(),
linked_list_lock,
});
let header: &Header = &pinned;
header.prev.set(header);
header.next.set(header);
ThreadedObjectSpace {
list: pinned,
collector_lock: Default::default(),
}
}
}
impl ThreadedObjectSpace {
pub fn count_tracked(&self) -> usize {
let _linked_list_lock = self.list.linked_list_lock.lock();
let list: &Header = &self.list;
let mut count = 0;
collect::visit_list(list, |_| count += 1);
count
}
pub fn collect_cycles(&self) -> usize {
let collector_lock = self.collector_lock.write();
let linked_list_lock = self.list.linked_list_lock.lock();
debug::log(|| ("ThreadedObjectSpace", "start collect_cycles"));
let list: &Header = &self.list;
let result = collect::collect_list(list, (linked_list_lock, collector_lock));
debug::log(|| ("ThreadedObjectSpace", "end collect_cycles"));
result
}
pub fn create<T: Trace + Send + Sync>(&self, value: T) -> ThreadedCc<T> {
let _linked_list_lock = self.list.linked_list_lock.lock();
ThreadedCc::new_in_space(value, self)
}
}
impl Linked for Header {
#[inline]
fn next(&self) -> *const Self {
self.next.get()
}
#[inline]
fn prev(&self) -> *const Self {
self.prev.get()
}
#[inline]
fn set_prev(&self, other: *const Self) {
self.prev.set(other)
}
#[inline]
fn value(&self) -> &dyn CcDyn {
unsafe {
let fat_ptr: (*const (), *const ()) =
((self as *const Self).offset(1) as _, self.ccdyn_vptr);
mem::transmute(fat_ptr)
}
}
}