use crate::plan::global::CommonPlan;
use crate::plan::ObjectQueue;
use crate::plan::Plan;
use crate::plan::PlanConstraints;
use crate::policy::copyspace::CopySpace;
use crate::policy::space::Space;
use crate::scheduler::*;
use crate::util::conversions;
use crate::util::copy::CopySemantics;
use crate::util::heap::layout::heap_layout::Mmapper;
use crate::util::heap::layout::heap_layout::VMMap;
use crate::util::heap::HeapMeta;
use crate::util::heap::VMRequest;
use crate::util::metadata::side_metadata::SideMetadataSanity;
use crate::util::metadata::side_metadata::SideMetadataSpec;
use crate::util::options::Options;
use crate::util::statistics::counter::EventCounter;
use crate::util::ObjectReference;
use crate::util::VMWorkerThread;
use crate::vm::{ObjectModel, VMBinding};
use std::sync::atomic::AtomicBool;
use std::sync::atomic::Ordering;
use std::sync::{Arc, Mutex};
use mmtk_macros::PlanTraceObject;
#[derive(PlanTraceObject)]
pub struct Gen<VM: VMBinding> {
#[trace(CopySemantics::PromoteToMature)]
pub nursery: CopySpace<VM>,
#[fallback_trace]
pub common: CommonPlan<VM>,
pub gc_full_heap: AtomicBool,
pub next_gc_full_heap: AtomicBool,
pub full_heap_gc_count: Arc<Mutex<EventCounter>>,
}
impl<VM: VMBinding> Gen<VM> {
pub fn new(
mut heap: HeapMeta,
global_metadata_specs: Vec<SideMetadataSpec>,
constraints: &'static PlanConstraints,
vm_map: &'static VMMap,
mmapper: &'static Mmapper,
options: Arc<Options>,
) -> Self {
let nursery = CopySpace::new(
"nursery",
false,
true,
VMRequest::fixed_extent(options.get_max_nursery(), false),
global_metadata_specs.clone(),
vm_map,
mmapper,
&mut heap,
);
let common = CommonPlan::new(
vm_map,
mmapper,
options,
heap,
constraints,
global_metadata_specs,
);
let full_heap_gc_count = common.base.stats.new_event_counter("majorGC", true, true);
Gen {
nursery,
common,
gc_full_heap: AtomicBool::default(),
next_gc_full_heap: AtomicBool::new(false),
full_heap_gc_count,
}
}
pub fn verify_side_metadata_sanity(&self, sanity: &mut SideMetadataSanity) {
self.common.verify_side_metadata_sanity(sanity);
self.nursery.verify_side_metadata_sanity(sanity);
}
pub fn get_spaces(&self) -> Vec<&dyn Space<VM>> {
let mut ret = self.common.get_spaces();
ret.push(&self.nursery);
ret
}
pub fn prepare(&mut self, tls: VMWorkerThread) {
let full_heap = !self.is_current_gc_nursery();
if full_heap {
self.full_heap_gc_count.lock().unwrap().inc();
}
self.common.prepare(tls, full_heap);
self.nursery.prepare(true);
self.nursery
.set_copy_for_sft_trace(Some(CopySemantics::PromoteToMature));
}
pub fn release(&mut self, tls: VMWorkerThread) {
let full_heap = !self.is_current_gc_nursery();
self.common.release(tls, full_heap);
self.nursery.release();
}
fn virtual_memory_exhausted<P: Plan>(&self, plan: &P) -> bool {
((plan.get_collection_reserved_pages() as f64
* VM::VMObjectModel::VM_WORST_CASE_COPY_EXPANSION) as usize)
> plan.get_mature_physical_pages_available()
}
pub fn collection_required<P: Plan>(
&self,
plan: &P,
space_full: bool,
space: Option<&dyn Space<VM>>,
) -> bool {
let nursery_full = self.nursery.reserved_pages()
>= (conversions::bytes_to_pages_up(self.common.base.options.get_max_nursery()));
if nursery_full {
return true;
}
if self.virtual_memory_exhausted(plan) {
return true;
}
let is_triggered_by_nursery = space.map_or(false, |s| {
s.common().descriptor == self.nursery.common().descriptor
});
if space_full && !is_triggered_by_nursery {
self.next_gc_full_heap.store(true, Ordering::SeqCst);
}
self.common.base.collection_required(plan, space_full)
}
pub fn force_full_heap_collection(&self) {
self.next_gc_full_heap.store(true, Ordering::Relaxed);
}
pub fn last_collection_full_heap(&self) -> bool {
self.gc_full_heap.load(Ordering::Relaxed)
}
pub fn requires_full_heap_collection<P: Plan>(&self, plan: &P) -> bool {
#[allow(clippy::if_same_then_else)]
let is_full_heap = if crate::plan::generational::FULL_NURSERY_GC {
true
} else if self
.common
.base
.user_triggered_collection
.load(Ordering::SeqCst)
&& *self.common.base.options.full_heap_system_gc
{
true
} else if self.next_gc_full_heap.load(Ordering::SeqCst)
|| self
.common
.base
.cur_collection_attempts
.load(Ordering::SeqCst)
> 1
{
true
} else if self.virtual_memory_exhausted(plan) {
true
} else {
plan.get_total_pages() <= plan.get_reserved_pages()
};
self.gc_full_heap.store(is_full_heap, Ordering::SeqCst);
info!(
"{}",
if is_full_heap {
"Full heap GC"
} else {
"Nursery GC"
}
);
is_full_heap
}
pub fn trace_object_full_heap<Q: ObjectQueue>(
&self,
queue: &mut Q,
object: ObjectReference,
worker: &mut GCWorker<VM>,
) -> ObjectReference {
if self.nursery.in_space(object) {
return self.nursery.trace_object::<Q>(
queue,
object,
Some(CopySemantics::PromoteToMature),
worker,
);
}
self.common.trace_object::<Q>(queue, object, worker)
}
pub fn trace_object_nursery<Q: ObjectQueue>(
&self,
queue: &mut Q,
object: ObjectReference,
worker: &mut GCWorker<VM>,
) -> ObjectReference {
if self.nursery.in_space(object) {
return self.nursery.trace_object::<Q>(
queue,
object,
Some(CopySemantics::PromoteToMature),
worker,
);
}
if self.common.get_los().in_space(object) {
return self.common.get_los().trace_object::<Q>(queue, object);
}
object
}
pub fn is_current_gc_nursery(&self) -> bool {
!self.gc_full_heap.load(Ordering::SeqCst)
}
pub fn should_next_gc_be_full_heap(plan: &dyn Plan<VM = VM>) -> bool {
plan.get_available_pages()
< conversions::bytes_to_pages_up(plan.base().options.get_min_nursery())
}
pub fn set_next_gc_full_heap(&self, next_gc_full_heap: bool) {
self.next_gc_full_heap
.store(next_gc_full_heap, Ordering::SeqCst);
}
pub fn get_collection_reserved_pages(&self) -> usize {
self.nursery.reserved_pages()
}
pub fn get_used_pages(&self) -> usize {
self.nursery.reserved_pages() + self.common.get_used_pages()
}
}