use crate::util::Address;
use crate::util::ObjectReference;
use crate::vm::ObjectModel;
use crate::vm::VMBinding;
use crate::vm::VMLocalMarkBitSpec;
use std::sync::atomic::Ordering;
impl VMLocalMarkBitSpec {
pub fn mark<VM: VMBinding>(&self, object: ObjectReference, ordering: Ordering) {
self.store_atomic::<VM, u8>(object, 1, None, ordering);
}
pub fn is_marked<VM: VMBinding>(&self, object: ObjectReference, ordering: Ordering) -> bool {
self.load_atomic::<VM, u8>(object, None, ordering) == 1
}
}
pub struct MarkState {
state: u8,
}
impl MarkState {
pub fn new() -> Self {
Self { state: 1 }
}
fn unmarked_state(&self) -> u8 {
self.state ^ 1
}
pub fn is_marked<VM: VMBinding>(&self, object: ObjectReference) -> bool {
let state = VM::VMObjectModel::LOCAL_MARK_BIT_SPEC.load_atomic::<VM, u8>(
object,
None,
Ordering::SeqCst,
);
state == self.state
}
pub fn test_and_mark<VM: VMBinding>(&self, object: ObjectReference) -> bool {
loop {
let old_value = VM::VMObjectModel::LOCAL_MARK_BIT_SPEC.load_atomic::<VM, u8>(
object,
None,
Ordering::SeqCst,
);
if old_value == self.state {
return false;
}
if VM::VMObjectModel::LOCAL_MARK_BIT_SPEC
.compare_exchange_metadata::<VM, u8>(
object,
old_value,
self.state,
None,
Ordering::SeqCst,
Ordering::SeqCst,
)
.is_ok()
{
break;
}
}
true
}
pub fn on_object_metadata_initialization<VM: VMBinding>(&self, object: ObjectReference) {
if VM::VMObjectModel::LOCAL_MARK_BIT_SPEC.is_in_header() {
VM::VMObjectModel::LOCAL_MARK_BIT_SPEC.store_atomic::<VM, u8>(
object,
self.unmarked_state(),
None,
Ordering::SeqCst,
);
}
}
pub fn on_global_prepare<VM: VMBinding>(&mut self) {}
pub fn on_block_reset<VM: VMBinding>(&self, start: Address, size: usize) {
if let crate::util::metadata::MetadataSpec::OnSide(side) =
*VM::VMObjectModel::LOCAL_MARK_BIT_SPEC
{
side.bzero_metadata(start, size);
}
}
pub fn on_global_release<VM: VMBinding>(&mut self) {
if VM::VMObjectModel::LOCAL_MARK_BIT_SPEC.is_in_header() {
self.state = self.unmarked_state()
}
}
}