use crate::vm::slot::{MemorySlice, Slot};
use crate::vm::ObjectModel;
use crate::{
util::{metadata::MetadataSpec, *},
vm::VMBinding,
};
use atomic::Ordering;
use downcast_rs::Downcast;
#[derive(Copy, Clone, Debug, PartialEq)]
pub enum BarrierSelector {
NoBarrier,
ObjectBarrier,
SATBBarrier,
}
impl BarrierSelector {
pub const fn equals(&self, other: BarrierSelector) -> bool {
*self as u8 == other as u8
}
}
pub trait Barrier<VM: VMBinding>: 'static + Send + Downcast {
fn flush(&mut self) {}
fn load_weak_reference(&mut self, _referent: ObjectReference) {}
fn object_reference_write(
&mut self,
src: ObjectReference,
slot: VM::VMSlot,
target: ObjectReference,
) {
self.object_reference_write_pre(src, slot, Some(target));
slot.store(target);
self.object_reference_write_post(src, slot, Some(target));
}
fn object_reference_write_pre(
&mut self,
_src: ObjectReference,
_slot: VM::VMSlot,
_target: Option<ObjectReference>,
) {
}
fn object_reference_write_post(
&mut self,
_src: ObjectReference,
_slot: VM::VMSlot,
_target: Option<ObjectReference>,
) {
}
fn object_reference_write_slow(
&mut self,
_src: ObjectReference,
_slot: VM::VMSlot,
_target: Option<ObjectReference>,
) {
}
fn memory_region_copy(&mut self, src: VM::VMMemorySlice, dst: VM::VMMemorySlice) {
self.memory_region_copy_pre(src.clone(), dst.clone());
VM::VMMemorySlice::copy(&src, &dst);
self.memory_region_copy_post(src, dst);
}
fn memory_region_copy_pre(&mut self, _src: VM::VMMemorySlice, _dst: VM::VMMemorySlice) {}
fn memory_region_copy_post(&mut self, _src: VM::VMMemorySlice, _dst: VM::VMMemorySlice) {}
fn object_probable_write(&mut self, _obj: ObjectReference) {}
}
impl_downcast!(Barrier<VM> where VM: VMBinding);
pub struct NoBarrier;
impl<VM: VMBinding> Barrier<VM> for NoBarrier {}
pub trait BarrierSemantics: 'static + Send {
type VM: VMBinding;
const UNLOG_BIT_SPEC: MetadataSpec =
*<Self::VM as VMBinding>::VMObjectModel::GLOBAL_LOG_BIT_SPEC.as_spec();
fn flush(&mut self);
fn object_reference_write_slow(
&mut self,
src: ObjectReference,
slot: <Self::VM as VMBinding>::VMSlot,
target: Option<ObjectReference>,
);
fn memory_region_copy_slow(
&mut self,
src: <Self::VM as VMBinding>::VMMemorySlice,
dst: <Self::VM as VMBinding>::VMMemorySlice,
);
fn object_probable_write_slow(&mut self, _obj: ObjectReference) {}
fn load_weak_reference(&mut self, _o: ObjectReference) {}
}
pub struct ObjectBarrier<S: BarrierSemantics> {
semantics: S,
}
impl<S: BarrierSemantics> ObjectBarrier<S> {
pub fn new(semantics: S) -> Self {
Self { semantics }
}
fn object_is_unlogged(&self, object: ObjectReference) -> bool {
unsafe { S::UNLOG_BIT_SPEC.load::<S::VM, u8>(object, None) != 0 }
}
fn log_object(&self, object: ObjectReference) -> bool {
#[cfg(all(feature = "vo_bit", feature = "extreme_assertions"))]
debug_assert!(
crate::util::metadata::vo_bit::is_vo_bit_set(object),
"object bit is unset"
);
loop {
let old_value =
S::UNLOG_BIT_SPEC.load_atomic::<S::VM, u8>(object, None, Ordering::SeqCst);
if old_value == 0 {
return false;
}
if S::UNLOG_BIT_SPEC
.compare_exchange_metadata::<S::VM, u8>(
object,
1,
0,
None,
Ordering::SeqCst,
Ordering::SeqCst,
)
.is_ok()
{
return true;
}
}
}
}
impl<S: BarrierSemantics> Barrier<S::VM> for ObjectBarrier<S> {
fn flush(&mut self) {
self.semantics.flush();
}
fn object_reference_write_post(
&mut self,
src: ObjectReference,
slot: <S::VM as VMBinding>::VMSlot,
target: Option<ObjectReference>,
) {
if self.object_is_unlogged(src) {
self.object_reference_write_slow(src, slot, target);
}
}
fn object_reference_write_slow(
&mut self,
src: ObjectReference,
slot: <S::VM as VMBinding>::VMSlot,
target: Option<ObjectReference>,
) {
if self.log_object(src) {
self.semantics
.object_reference_write_slow(src, slot, target);
}
}
fn memory_region_copy_post(
&mut self,
src: <S::VM as VMBinding>::VMMemorySlice,
dst: <S::VM as VMBinding>::VMMemorySlice,
) {
self.semantics.memory_region_copy_slow(src, dst);
}
fn object_probable_write(&mut self, obj: ObjectReference) {
if self.object_is_unlogged(obj) {
self.semantics.object_probable_write_slow(obj);
}
}
}
pub struct SATBBarrier<S: BarrierSemantics> {
weak_ref_barrier_enabled: bool,
semantics: S,
}
impl<S: BarrierSemantics> SATBBarrier<S> {
pub fn new(semantics: S) -> Self {
Self {
weak_ref_barrier_enabled: false,
semantics,
}
}
pub(crate) fn set_weak_ref_barrier_enabled(&mut self, value: bool) {
self.weak_ref_barrier_enabled = value;
}
fn object_is_unlogged(&self, object: ObjectReference) -> bool {
S::UNLOG_BIT_SPEC.load_atomic::<S::VM, u8>(object, None, Ordering::SeqCst) != 0
}
}
impl<S: BarrierSemantics> Barrier<S::VM> for SATBBarrier<S> {
fn flush(&mut self) {
self.semantics.flush();
}
fn load_weak_reference(&mut self, o: ObjectReference) {
if self.weak_ref_barrier_enabled {
self.semantics.load_weak_reference(o)
}
}
fn object_probable_write(&mut self, obj: ObjectReference) {
self.semantics.object_probable_write_slow(obj);
}
fn object_reference_write_pre(
&mut self,
src: ObjectReference,
slot: <S::VM as VMBinding>::VMSlot,
target: Option<ObjectReference>,
) {
if self.object_is_unlogged(src) {
self.semantics
.object_reference_write_slow(src, slot, target);
}
}
fn object_reference_write_post(
&mut self,
_src: ObjectReference,
_slot: <S::VM as VMBinding>::VMSlot,
_target: Option<ObjectReference>,
) {
unimplemented!()
}
fn object_reference_write_slow(
&mut self,
src: ObjectReference,
slot: <S::VM as VMBinding>::VMSlot,
target: Option<ObjectReference>,
) {
self.semantics
.object_reference_write_slow(src, slot, target);
}
fn memory_region_copy_pre(
&mut self,
src: <S::VM as VMBinding>::VMMemorySlice,
dst: <S::VM as VMBinding>::VMMemorySlice,
) {
self.semantics.memory_region_copy_slow(src, dst);
}
fn memory_region_copy_post(
&mut self,
_src: <S::VM as VMBinding>::VMMemorySlice,
_dst: <S::VM as VMBinding>::VMMemorySlice,
) {
unimplemented!()
}
}