use crate::set_data_ptr;
use crate::trace::Trace;
use std::alloc::{alloc, dealloc, Layout};
use std::cell::{Cell, RefCell};
use std::mem;
use std::ptr::{self, NonNull};
#[cfg(feature = "nightly")]
use std::marker::Unsize;
struct GcState {
stats: GcStats,
config: GcConfig,
boxes_start: Option<NonNull<GcBox<dyn Trace>>>,
}
impl Drop for GcState {
fn drop(&mut self) {
if !self.config.leak_on_drop {
collect_garbage(self);
}
}
}
thread_local!(pub static GC_DROPPING: Cell<bool> = const { Cell::new(false) });
struct DropGuard;
impl DropGuard {
fn new() -> DropGuard {
GC_DROPPING.with(|dropping| dropping.set(true));
DropGuard
}
}
impl Drop for DropGuard {
fn drop(&mut self) {
GC_DROPPING.with(|dropping| dropping.set(false));
}
}
#[must_use]
pub fn finalizer_safe() -> bool {
GC_DROPPING.with(|dropping| !dropping.get())
}
thread_local!(static GC_STATE: RefCell<GcState> = RefCell::new(GcState {
stats: GcStats::default(),
config: GcConfig::default(),
boxes_start: None,
}));
const MARK_MASK: usize = 1 << (usize::BITS - 1);
const ROOTS_MASK: usize = !MARK_MASK;
const ROOTS_MAX: usize = ROOTS_MASK;
pub(crate) struct GcBoxHeader {
roots: Cell<usize>, next: Cell<Option<NonNull<GcBox<dyn Trace>>>>,
}
impl GcBoxHeader {
#[inline]
pub fn new() -> Self {
GcBoxHeader {
roots: Cell::new(1), next: Cell::new(None),
}
}
#[inline]
pub fn roots(&self) -> usize {
self.roots.get() & ROOTS_MASK
}
#[inline]
pub fn inc_roots(&self) {
let roots = self.roots.get();
if (roots & ROOTS_MASK) < ROOTS_MAX {
self.roots.set(roots + 1); } else {
panic!("roots counter overflow");
}
}
#[inline]
pub fn dec_roots(&self) {
self.roots.set(self.roots.get() - 1); }
#[inline]
pub fn is_marked(&self) -> bool {
self.roots.get() & MARK_MASK != 0
}
#[inline]
pub fn mark(&self) {
self.roots.set(self.roots.get() | MARK_MASK);
}
#[inline]
pub fn unmark(&self) {
self.roots.set(self.roots.get() & !MARK_MASK);
}
}
#[repr(C)] pub(crate) struct GcBox<T: ?Sized + 'static> {
header: GcBoxHeader,
data: T,
}
impl<T: Trace> GcBox<T> {
pub(crate) fn new(value: T) -> NonNull<Self> {
let gcbox = NonNull::from(Box::leak(Box::new(GcBox {
header: GcBoxHeader::new(),
data: value,
})));
unsafe { insert_gcbox(gcbox) };
gcbox
}
}
impl<
#[cfg(not(feature = "nightly"))] T: Trace,
#[cfg(feature = "nightly")] T: Trace + Unsize<dyn Trace> + ?Sized,
> GcBox<T>
{
pub(crate) fn from_box(value: Box<T>) -> NonNull<Self> {
let header_layout = Layout::new::<GcBoxHeader>();
let value_layout = Layout::for_value::<T>(&*value);
let gcbox_layout = header_layout.extend(value_layout).unwrap().0.pad_to_align();
unsafe {
let gcbox_addr = alloc(gcbox_layout);
let value = Box::into_raw(value);
let gcbox = set_data_ptr(value as *mut GcBox<T>, gcbox_addr);
ptr::addr_of_mut!((*gcbox).header).write(GcBoxHeader::new());
ptr::addr_of_mut!((*gcbox).data)
.cast::<u8>()
.copy_from_nonoverlapping(value.cast::<u8>(), value_layout.size());
if value_layout.size() != 0 {
dealloc(value.cast::<u8>(), value_layout);
}
let gcbox = NonNull::new_unchecked(gcbox);
insert_gcbox(gcbox);
gcbox
}
}
}
unsafe fn insert_gcbox(gcbox: NonNull<GcBox<dyn Trace>>) {
GC_STATE.with(|st| {
let mut st = st.borrow_mut();
if st.stats.bytes_allocated > st.config.threshold {
collect_garbage(&mut st);
if st.stats.bytes_allocated as f64
> st.config.threshold as f64 * st.config.used_space_ratio
{
st.config.threshold =
(st.stats.bytes_allocated as f64 / st.config.used_space_ratio) as usize;
}
}
let next = st.boxes_start.replace(gcbox);
gcbox.as_ref().header.next.set(next);
st.stats.bytes_allocated += mem::size_of_val::<GcBox<_>>(gcbox.as_ref());
});
}
impl<T: ?Sized> GcBox<T> {
pub(crate) fn ptr_eq(this: &GcBox<T>, other: &GcBox<T>) -> bool {
ptr::eq(&this.header, &other.header)
}
}
impl<T: Trace + ?Sized> GcBox<T> {
pub(crate) unsafe fn trace_inner(&self) {
if !self.header.is_marked() {
self.header.mark();
self.data.trace();
}
}
}
impl<T: ?Sized> GcBox<T> {
pub(crate) unsafe fn root_inner(&self) {
self.header.inc_roots();
}
pub(crate) unsafe fn unroot_inner(&self) {
self.header.dec_roots();
}
pub(crate) fn value_ptr(this: *const GcBox<T>) -> *const T {
unsafe { ptr::addr_of!((*this).data) }
}
pub(crate) fn value(&self) -> &T {
&self.data
}
}
fn collect_garbage(st: &mut GcState) {
struct Unmarked<'a> {
incoming: &'a Cell<Option<NonNull<GcBox<dyn Trace>>>>,
this: NonNull<GcBox<dyn Trace>>,
}
unsafe fn mark(head: &Cell<Option<NonNull<GcBox<dyn Trace>>>>) -> Vec<Unmarked<'_>> {
let mut mark_head = head.get();
while let Some(node) = mark_head {
if node.as_ref().header.roots() > 0 {
node.as_ref().trace_inner();
}
mark_head = node.as_ref().header.next.get();
}
let mut unmarked = Vec::new();
let mut unmark_head = head;
while let Some(node) = unmark_head.get() {
if node.as_ref().header.is_marked() {
node.as_ref().header.unmark();
} else {
unmarked.push(Unmarked {
incoming: unmark_head,
this: node,
});
}
unmark_head = &node.as_ref().header.next;
}
unmarked
}
unsafe fn sweep(finalized: Vec<Unmarked<'_>>, bytes_allocated: &mut usize) {
let _guard = DropGuard::new();
for node in finalized.into_iter().rev() {
if node.this.as_ref().header.is_marked() {
continue;
}
let incoming = node.incoming;
let node = Box::from_raw(node.this.as_ptr());
*bytes_allocated -= mem::size_of_val::<GcBox<_>>(&*node);
incoming.set(node.header.next.take());
}
}
st.stats.collections_performed += 1;
unsafe {
let head = Cell::from_mut(&mut st.boxes_start);
let unmarked = mark(head);
if unmarked.is_empty() {
return;
}
for node in &unmarked {
Trace::finalize_glue(&node.this.as_ref().data);
}
mark(head);
sweep(unmarked, &mut st.stats.bytes_allocated);
}
}
pub fn force_collect() {
GC_STATE.with(|st| {
let mut st = st.borrow_mut();
collect_garbage(&mut st);
});
}
pub struct GcConfig {
pub threshold: usize,
pub used_space_ratio: f64,
pub leak_on_drop: bool,
}
impl Default for GcConfig {
fn default() -> Self {
Self {
used_space_ratio: 0.7,
threshold: 100,
leak_on_drop: false,
}
}
}
#[allow(dead_code)]
pub fn configure(configurer: impl FnOnce(&mut GcConfig)) {
GC_STATE.with(|st| {
let mut st = st.borrow_mut();
configurer(&mut st.config);
});
}
#[derive(Clone, Default)]
pub struct GcStats {
pub bytes_allocated: usize,
pub collections_performed: usize,
}
#[allow(dead_code)]
#[must_use]
pub fn stats() -> GcStats {
GC_STATE.with(|st| st.borrow().stats.clone())
}