use std::marker::PhantomData;
use std::mem;
use std::ptr::drop_in_place;
use std::sync::atomic::{AtomicPtr, Ordering};
use std::sync::Arc;
use crate::collector::{GcData, InternalGcRef, COLLECTOR};
use crate::marker::{GcDeref, GcSafe};
use crate::{Finalize, Gc, Scan, Scanner};
#[derive(Clone, Debug)]
pub struct AtomicGc<T: Scan> {
atomic_ptr: Arc<AtomicPtr<GcData>>,
backing_handle: InternalGcRef,
_mark: PhantomData<Gc<T>>,
}
impl<T: Scan> AtomicGc<T> {
#[must_use]
pub fn new(data: &Gc<T>) -> Self {
data.assert_live();
let data_arc = data.internal_handle_ref().data();
let data_ptr = Arc::as_ptr(data_arc);
let atomic_ptr = Arc::new(AtomicPtr::new(data_ptr as _));
Self {
atomic_ptr: atomic_ptr.clone(),
backing_handle: COLLECTOR.new_handle_for_atomic(atomic_ptr),
_mark: PhantomData,
}
}
pub(crate) fn internal_handle(&self) -> InternalGcRef {
self.backing_handle.clone()
}
#[must_use]
pub fn load(&self, ordering: Ordering) -> Gc<T> {
let ptr;
let internal_handle;
{
let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();
let gc_data_ptr = self.atomic_ptr.load(ordering);
let gc_data_temp = unsafe { Arc::from_raw(gc_data_ptr) };
let new_gc_data_ref = gc_data_temp.clone();
mem::forget(gc_data_temp);
ptr = new_gc_data_ref.scan_ptr().cast();
internal_handle = COLLECTOR.handle_from_data(new_gc_data_ref);
}
Gc::new_raw(internal_handle, ptr)
}
pub fn store(&self, v: &Gc<T>, ordering: Ordering) {
v.assert_live();
let data = v.internal_handle_ref().data();
let raw_data_ptr = Arc::as_ptr(data);
{
let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();
self.atomic_ptr.store(raw_data_ptr as _, ordering);
}
}
#[must_use]
pub fn swap(&self, v: &Gc<T>, ordering: Ordering) -> Gc<T> {
v.assert_live();
let data = v.internal_handle_ref().data();
let raw_data_ptr = Arc::as_ptr(data);
let ptr;
let internal_handle;
{
let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();
let old_data_ptr = self.atomic_ptr.swap(raw_data_ptr as _, ordering);
let old_data_arc = unsafe { Arc::from_raw(old_data_ptr) };
let gc_data = old_data_arc.clone();
mem::forget(old_data_arc);
ptr = gc_data.scan_ptr().cast();
internal_handle = COLLECTOR.handle_from_data(gc_data);
}
Gc::new_raw(internal_handle, ptr)
}
#[allow(clippy::must_use_candidate)]
#[allow(deprecated)]
pub fn compare_and_swap(&self, current: &Gc<T>, new: &Gc<T>, ordering: Ordering) -> bool {
new.assert_live();
let guess_data = current.internal_handle_ref().data();
let guess_data_raw = Arc::as_ptr(guess_data) as _;
let new_data = new.internal_handle_ref().data();
let new_data_raw = Arc::as_ptr(new_data) as _;
let compare_res;
{
let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();
compare_res = self
.atomic_ptr
.compare_and_swap(guess_data_raw, new_data_raw, ordering);
}
compare_res == guess_data_raw
}
#[allow(clippy::must_use_candidate)]
pub fn compare_exchange(
&self,
current: &Gc<T>,
new: &Gc<T>,
success: Ordering,
failure: Ordering,
) -> bool {
new.assert_live();
let guess_data = current.internal_handle_ref().data();
let guess_data_raw = Arc::as_ptr(guess_data) as _;
let new_data = new.internal_handle_ref().data();
let new_data_raw = Arc::as_ptr(new_data) as _;
let swap_result;
{
let _collection_blocker = COLLECTOR.get_collection_blocker_spinlock();
swap_result =
self.atomic_ptr
.compare_exchange(guess_data_raw, new_data_raw, success, failure);
}
swap_result.is_ok()
}
}
unsafe impl<T: Scan> Scan for AtomicGc<T> {
fn scan(&self, scanner: &mut Scanner<'_>) {
scanner.add_internal_handle(self.internal_handle());
}
}
unsafe impl<T: Scan> GcSafe for AtomicGc<T> {}
unsafe impl<T: Scan + Send + Sync> GcDeref for AtomicGc<T> {}
unsafe impl<T: Scan> Finalize for AtomicGc<T> {
unsafe fn finalize(&mut self) {
drop_in_place(self)
}
}
impl<T: Scan> Drop for AtomicGc<T> {
fn drop(&mut self) {
self.backing_handle.invalidate();
}
}