use super::Color;
use super::arena::GcRef;
use crate::vm::closure::{Closure, Upvalue, UpvalueState};
use crate::vm::proto::Proto;
use crate::vm::state::{Gc, LuaState, LuaThread};
use crate::vm::string::LuaString;
use crate::vm::table::Table;
use crate::vm::value::{Userdata, Val};
use crate::{LuaError, LuaResult};
#[derive(Clone, Copy)]
pub enum GrayItem {
Table(GcRef<Table>),
Closure(GcRef<Closure>),
Thread(GcRef<LuaThread>),
MainThread,
}
pub enum PropagateResult {
Done(usize),
NeedMainThread,
Empty,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum GcPhase {
Pause,
Propagate,
SweepString,
Sweep,
Finalize,
}
#[derive(Clone, Copy, Debug)]
pub struct SweepCursor {
pub arena_index: u8,
pub slot_position: u32,
}
pub const DEFAULT_GC_PAUSE: u32 = 200;
pub const DEFAULT_GC_STEPMUL: u32 = 200;
const INITIAL_GC_THRESHOLD: usize = 64 * 1024;
pub const GCSTEPSIZE: usize = 1024;
const GCSWEEPMAX: u32 = 80;
const GCSWEEPCOST: usize = 10;
pub const GCFINALIZECOST: usize = 100;
pub const EST_STRING_SIZE: usize = 48;
pub const EST_TABLE_SIZE: usize = 64;
pub const EST_CLOSURE_SIZE: usize = 48;
pub const EST_UPVALUE_SIZE: usize = 24;
pub const EST_USERDATA_SIZE: usize = 48;
pub const EST_THREAD_SIZE: usize = 256;
pub struct GcState {
pub gray: Vec<GrayItem>,
pub grayagain: Vec<GrayItem>,
pub weak_tables: Vec<GcRef<Table>>,
pub total_bytes: usize,
pub gc_threshold: usize,
pub gc_pause: u32,
pub gc_stepmul: u32,
pub gc_running: bool,
pub phase: GcPhase,
pub sweep_cursor: SweepCursor,
pub estimate: usize,
pub gc_debt: i64,
pub tmudata: Vec<GcRef<Userdata>>,
pub ud_alloc_seq: u64,
pub alloc_limit: usize,
pub max_bytes: usize,
}
impl GcState {
pub fn new() -> Self {
Self {
gray: Vec::new(),
grayagain: Vec::new(),
weak_tables: Vec::new(),
total_bytes: 0,
gc_threshold: INITIAL_GC_THRESHOLD,
gc_pause: DEFAULT_GC_PAUSE,
gc_stepmul: DEFAULT_GC_STEPMUL,
gc_running: false,
phase: GcPhase::Pause,
sweep_cursor: SweepCursor {
arena_index: 0,
slot_position: 0,
},
estimate: 0,
gc_debt: 0,
tmudata: Vec::new(),
ud_alloc_seq: 0,
alloc_limit: usize::MAX,
max_bytes: 0,
}
}
#[inline]
pub fn track_alloc(&mut self, size: usize) {
self.total_bytes += size;
if self.total_bytes > self.max_bytes {
self.max_bytes = self.total_bytes;
}
}
}
impl Default for GcState {
fn default() -> Self {
Self::new()
}
}
impl Gc {
#[inline]
pub fn mark_value(&mut self, val: Val) {
match val {
Val::Str(r) => self.mark_string(r),
Val::Table(r) => self.mark_table(r),
Val::Function(r) => self.mark_closure(r),
Val::Userdata(r) => self.mark_userdata(r),
Val::Thread(r) => self.mark_thread(r),
Val::Nil | Val::Bool(_) | Val::Num(_) | Val::LightUserdata(_) => {}
}
}
#[inline]
fn mark_string(&mut self, r: GcRef<LuaString>) {
if let Some(color) = self.string_arena.color(r)
&& color.is_white()
{
self.string_arena.set_color(r, Color::Black);
}
}
#[inline]
fn mark_table(&mut self, r: GcRef<Table>) {
if let Some(color) = self.tables.color(r)
&& color.is_white()
{
self.tables.set_color(r, Color::Gray);
self.gc_state.gray.push(GrayItem::Table(r));
}
}
#[inline]
fn mark_closure(&mut self, r: GcRef<Closure>) {
if let Some(color) = self.closures.color(r)
&& color.is_white()
{
self.closures.set_color(r, Color::Gray);
self.gc_state.gray.push(GrayItem::Closure(r));
}
}
#[inline]
fn mark_thread(&mut self, r: GcRef<LuaThread>) {
if let Some(color) = self.threads.color(r)
&& color.is_white()
{
self.threads.set_color(r, Color::Gray);
self.gc_state.gray.push(GrayItem::Thread(r));
}
}
fn mark_userdata(&mut self, r: GcRef<Userdata>) {
if let Some(color) = self.userdata.color(r)
&& color.is_white()
{
let (mt, env) = {
if let Some(ud) = self.userdata.get(r) {
(ud.metatable(), ud.env())
} else {
return;
}
};
self.userdata.set_color(r, Color::Black);
if let Some(mt) = mt {
self.mark_table(mt);
}
if let Some(env) = env {
self.mark_table(env);
}
}
}
#[inline]
fn mark_upvalue(&mut self, r: GcRef<Upvalue>) {
if let Some(color) = self.upvalues.color(r)
&& color.is_white()
{
let closed_val = {
if let Some(uv) = self.upvalues.get(r) {
match &uv.state {
UpvalueState::Closed { value } => Some(*value),
UpvalueState::Open { .. } => None,
}
} else {
return;
}
};
self.upvalues.set_color(r, Color::Black);
if let Some(val) = closed_val {
self.mark_value(val);
}
}
}
fn mark_gc_roots(&mut self) {
for r in self.type_metatables.into_iter().flatten() {
self.mark_table(r);
}
for r in self.tm_names.into_iter().flatten() {
self.mark_string(r);
}
}
fn traverse_table(&mut self, r: GcRef<Table>) {
let (metatable, array_len, hash_count, weak_keys, weak_values) = {
let Some(table) = self.tables.get(r) else {
return;
};
let mt = table.metatable();
let (wk, wv) = self.check_weak_mode(mt);
(mt, table.array_len(), table.hash_node_count(), wk, wv)
};
self.tables.set_color(r, Color::Black);
if let Some(mt) = metatable {
self.mark_table(mt);
}
let is_weak = weak_keys || weak_values;
if is_weak {
self.gc_state.weak_tables.push(r);
}
if weak_keys && weak_values {
return;
}
if !weak_values {
for i in 0..array_len {
let val = self.tables.get(r).and_then(|t| t.array_get(i));
if let Some(val) = val {
self.mark_value(val);
}
}
}
for i in 0..hash_count {
let kv = self.tables.get(r).and_then(|t| t.hash_node_kv(i));
let Some((key, val)) = kv else { continue };
if val.is_nil() {
continue;
}
if !weak_keys {
self.mark_value(key);
}
if !weak_values {
self.mark_value(val);
}
}
}
fn check_weak_mode(&self, mt: Option<GcRef<Table>>) -> (bool, bool) {
let Some(mt_ref) = mt else {
return (false, false);
};
let Some(mt) = self.tables.get(mt_ref) else {
return (false, false);
};
if let Some(mode_str_ref) = self.find_interned_string(b"__mode") {
let mode_val = mt.get(Val::Str(mode_str_ref), &self.string_arena);
if let Val::Str(s) = mode_val
&& let Some(ls) = self.string_arena.get(s)
{
let data = ls.data();
return (data.contains(&b'k'), data.contains(&b'v'));
}
}
(false, false)
}
fn find_interned_string(&self, name: &[u8]) -> Option<GcRef<LuaString>> {
for tm_name in &self.tm_names {
if let Some(r) = tm_name
&& let Some(ls) = self.string_arena.get(*r)
&& ls.data() == name
{
return Some(*r);
}
}
for (r, ls, _) in &self.string_arena {
if ls.data() == name {
return Some(r);
}
}
None
}
fn traverse_closure(&mut self, r: GcRef<Closure>) {
enum ClosureData {
Lua {
env: GcRef<Table>,
upvalue_count: usize,
proto: crate::vm::proto::ProtoRef,
},
Rust {
env: Option<GcRef<Table>>,
upvalue_count: usize,
},
}
let data = {
let Some(cl) = self.closures.get(r) else {
return;
};
match cl {
Closure::Lua(lc) => ClosureData::Lua {
env: lc.env,
upvalue_count: lc.upvalues.len(),
proto: crate::vm::proto::ProtoRef::clone(&lc.proto),
},
Closure::Rust(rc) => ClosureData::Rust {
env: rc.env,
upvalue_count: rc.upvalues.len(),
},
}
};
self.closures.set_color(r, Color::Black);
match data {
ClosureData::Lua {
env,
upvalue_count,
proto,
} => {
self.mark_table(env);
for i in 0..upvalue_count {
let uv_ref = self.closures.get(r).and_then(|cl| match cl {
Closure::Lua(lc) => lc.upvalues.get(i).copied(),
Closure::Rust(_) => None,
});
if let Some(uv_ref) = uv_ref {
self.mark_upvalue(uv_ref);
}
}
self.mark_proto_constants(&proto);
}
ClosureData::Rust { env, upvalue_count } => {
if let Some(env) = env {
self.mark_table(env);
}
for i in 0..upvalue_count {
let val = self.closures.get(r).and_then(|cl| match cl {
Closure::Rust(rc) => rc.upvalues.get(i).copied(),
Closure::Lua(_) => None,
});
if let Some(val) = val {
self.mark_value(val);
}
}
}
}
}
fn mark_proto_constants(&mut self, proto: &Proto) {
for val in &proto.constants {
self.mark_value(*val);
}
for nested in &proto.protos {
self.mark_proto_constants(nested);
}
}
fn traverse_thread(&mut self, r: GcRef<LuaThread>) {
let (stack_top, open_uv_len, suspended_uv_len, thread_global, hook_func) = {
if let Some(thread) = self.threads.get(r) {
(
thread.top.min(thread.stack.len()),
thread.open_upvalues.len(),
thread.suspended_upvals.len(),
thread.global,
thread.hook.hook_func,
)
} else {
return;
}
};
self.threads.set_color(r, Color::Black);
self.mark_table(thread_global);
self.mark_value(hook_func);
for i in 0..stack_top {
let val = self.threads.get(r).map(|t| t.stack[i]);
if let Some(val) = val {
self.mark_value(val);
}
}
for i in 0..open_uv_len {
let uv_ref = self.threads.get(r).map(|t| t.open_upvalues[i]);
if let Some(uv_ref) = uv_ref {
self.mark_upvalue(uv_ref);
}
}
for i in 0..suspended_uv_len {
let uv_ref = self.threads.get(r).map(|t| t.suspended_upvals[i].0);
if let Some(uv_ref) = uv_ref {
self.mark_upvalue(uv_ref);
}
}
self.gc_state.grayagain.push(GrayItem::Thread(r));
}
pub fn propagate_one(&mut self) -> PropagateResult {
let Some(item) = self.gc_state.gray.pop() else {
return PropagateResult::Empty;
};
match item {
GrayItem::Table(r) => {
let size = if let Some(t) = self.tables.get(r) {
EST_TABLE_SIZE + t.array_slice().len() * 16 + t.hash_size() as usize * 32
} else {
EST_TABLE_SIZE
};
self.traverse_table(r);
PropagateResult::Done(size)
}
GrayItem::Closure(r) => {
let size = if let Some(cl) = self.closures.get(r) {
match cl {
Closure::Lua(lc) => EST_CLOSURE_SIZE + lc.upvalues.len() * 8,
Closure::Rust(rc) => EST_CLOSURE_SIZE + rc.upvalues.len() * 16,
}
} else {
EST_CLOSURE_SIZE
};
self.traverse_closure(r);
PropagateResult::Done(size)
}
GrayItem::Thread(r) => {
let size = if let Some(th) = self.threads.get(r) {
EST_THREAD_SIZE + th.stack.len() * 16
} else {
EST_THREAD_SIZE
};
self.traverse_thread(r);
PropagateResult::Done(size)
}
GrayItem::MainThread => PropagateResult::NeedMainThread,
}
}
fn propagate_all(&mut self) -> usize {
let mut total = 0;
loop {
match self.propagate_one() {
PropagateResult::Done(cost) => total += cost,
PropagateResult::NeedMainThread => {
}
PropagateResult::Empty => break,
}
}
total
}
fn atomic(&mut self) {
let grayagain = std::mem::take(&mut self.gc_state.grayagain);
for item in grayagain {
match item {
GrayItem::Thread(r) => {
let (stack_top, open_uv_len, suspended_uv_len, thread_global) = {
if let Some(thread) = self.threads.get(r) {
(
thread.top.min(thread.stack.len()),
thread.open_upvalues.len(),
thread.suspended_upvals.len(),
thread.global,
)
} else {
continue;
}
};
self.threads.set_color(r, Color::Black);
self.mark_table(thread_global);
for i in 0..stack_top {
let val = self.threads.get(r).map(|t| t.stack[i]);
if let Some(val) = val {
self.mark_value(val);
}
}
for i in 0..open_uv_len {
let uv_ref = self.threads.get(r).map(|t| t.open_upvalues[i]);
if let Some(uv_ref) = uv_ref {
self.mark_upvalue(uv_ref);
}
}
for i in 0..suspended_uv_len {
let uv_ref = self.threads.get(r).map(|t| t.suspended_upvals[i].0);
if let Some(uv_ref) = uv_ref {
self.mark_upvalue(uv_ref);
}
}
}
GrayItem::Table(r) => {
self.tables.set_color(r, Color::Gray);
self.traverse_table(r);
}
GrayItem::Closure(r) => {
self.closures.set_color(r, Color::Gray);
self.traverse_closure(r);
}
GrayItem::MainThread => {
}
}
}
self.propagate_all();
let ud_size = self.separate_userdata();
self.mark_tmudata();
self.propagate_all();
self.current_white = self.current_white.other_white();
self.clear_weak_tables();
self.gc_state.estimate = self.gc_state.total_bytes.saturating_sub(ud_size);
self.gc_state.sweep_cursor = SweepCursor {
arena_index: 0,
slot_position: 0,
};
self.gc_state.phase = GcPhase::SweepString;
}
fn separate_userdata(&mut self) -> usize {
let dead_white = self.current_white;
let mut dead_size = 0usize;
let mut to_finalize = Vec::new();
for (r, ud, color) in &self.userdata {
if color != dead_white {
continue; }
if ud.finalized() {
continue; }
if let Some(mt_ref) = ud.metatable()
&& self.has_gc_metamethod(mt_ref)
{
to_finalize.push(r);
dead_size += EST_USERDATA_SIZE;
}
}
to_finalize.sort_by(|a, b| {
let seq_a = self.userdata.get(*a).map_or(0, Userdata::alloc_seq);
let seq_b = self.userdata.get(*b).map_or(0, Userdata::alloc_seq);
seq_a.cmp(&seq_b)
});
for r in &to_finalize {
if let Some(ud) = self.userdata.get_mut(*r) {
ud.set_finalized(true);
}
self.userdata.set_color(*r, Color::Gray);
self.gc_state.tmudata.push(*r);
}
dead_size
}
fn has_gc_metamethod(&self, mt_ref: GcRef<Table>) -> bool {
if let Some(gc_name) = self.find_interned_string(b"__gc")
&& let Some(mt) = self.tables.get(mt_ref)
{
let val = mt.get(Val::Str(gc_name), &self.string_arena);
return !val.is_nil();
}
false
}
fn mark_tmudata(&mut self) {
let tmudata: Vec<GcRef<Userdata>> = self.gc_state.tmudata.clone();
for r in &tmudata {
let (mt, env) = {
if let Some(ud) = self.userdata.get(*r) {
(ud.metatable(), ud.env())
} else {
continue;
}
};
self.userdata.set_color(*r, Color::Black);
if let Some(mt) = mt {
self.mark_table(mt);
}
if let Some(env) = env {
self.mark_table(env);
}
}
}
fn clear_weak_tables(&mut self) {
let weak_refs = std::mem::take(&mut self.gc_state.weak_tables);
let other_white = self.current_white.other_white();
let current_white = self.current_white;
for &table_ref in &weak_refs {
let Some(table) = self.tables.get(table_ref) else {
continue;
};
let array_len = table.array_len();
let hash_count = table.hash_node_count();
for i in 0..array_len {
if let Some(Val::Str(s)) = table.array_get(i) {
self.string_arena.set_color(s, current_white);
}
}
for i in 0..hash_count {
if let Some((key, val)) = table.hash_node_kv(i) {
if let Val::Str(s) = key {
self.string_arena.set_color(s, current_white);
}
if let Val::Str(s) = val {
self.string_arena.set_color(s, current_white);
}
}
}
}
for table_ref in weak_refs {
let (weak_keys, weak_values) = {
if let Some(table) = self.tables.get(table_ref) {
self.get_weak_mode(table.metatable())
} else {
continue;
}
};
if !weak_keys && !weak_values {
continue;
}
let dead_array_indices: Vec<usize> = if weak_values {
if let Some(table) = self.tables.get(table_ref) {
table
.array_slice()
.iter()
.enumerate()
.filter(|(_, val)| self.is_dead_collectable(**val, other_white, false))
.map(|(i, _)| i)
.collect()
} else {
continue;
}
} else {
Vec::new()
};
let dead_hash_indices: Vec<usize> = {
if let Some(table) = self.tables.get(table_ref) {
table.find_dead_hash_entries(weak_keys, weak_values, |val, is_key| {
self.is_dead_collectable(val, other_white, is_key)
})
} else {
continue;
}
};
if let Some(table) = self.tables.get_mut(table_ref) {
for idx in &dead_array_indices {
table.nil_array_entry(*idx);
}
table.nil_hash_entries(&dead_hash_indices);
}
}
}
fn is_dead_collectable(&self, val: Val, other_white: Color, is_key: bool) -> bool {
#[allow(clippy::match_same_arms)]
match val {
Val::Str(_) => false,
Val::Table(r) => self.tables.color(r) == Some(other_white),
Val::Function(r) => self.closures.color(r) == Some(other_white),
Val::Userdata(r) => {
self.userdata.color(r) == Some(other_white)
|| (!is_key
&& self
.userdata
.get(r)
.is_some_and(super::super::value::Userdata::finalized))
}
Val::Thread(r) => self.threads.color(r) == Some(other_white),
_ => false,
}
}
fn get_weak_mode(&self, mt: Option<GcRef<Table>>) -> (bool, bool) {
let Some(mt_ref) = mt else {
return (false, false);
};
let Some(mt) = self.tables.get(mt_ref) else {
return (false, false);
};
if let Some(mode_str_ref) = self.find_interned_string(b"__mode") {
let mode_val = mt.get(Val::Str(mode_str_ref), &self.string_arena);
if let Val::Str(s) = mode_val
&& let Some(ls) = self.string_arena.get(s)
{
let data = ls.data();
return (data.contains(&b'k'), data.contains(&b'v'));
}
}
(false, false)
}
fn sweep_strings_step(&mut self) -> usize {
let dead = self.current_white.other_white();
let new_white = self.current_white;
let start = self.gc_state.sweep_cursor.slot_position;
let (freed, next_pos, is_done) = self
.string_arena
.sweep_partial(dead, new_white, start, GCSWEEPMAX);
let freed_bytes = freed as usize * EST_STRING_SIZE;
self.gc_state.total_bytes = self.gc_state.total_bytes.saturating_sub(freed_bytes);
self.gc_state.estimate = self.gc_state.estimate.saturating_sub(freed_bytes);
if is_done {
self.strings.sweep_dead(&self.string_arena);
self.gc_state.phase = GcPhase::Sweep;
self.gc_state.sweep_cursor = SweepCursor {
arena_index: 0,
slot_position: 0,
};
} else {
self.gc_state.sweep_cursor.slot_position = next_pos;
}
GCSWEEPCOST
}
fn sweep_other_step(&mut self) -> usize {
let dead = self.current_white.other_white();
let new_white = self.current_white;
let arena_idx = self.gc_state.sweep_cursor.arena_index;
let start = self.gc_state.sweep_cursor.slot_position;
let (freed, est_size, next_pos, is_arena_done) = match arena_idx {
0 => {
let (f, np, done) = self
.tables
.sweep_partial(dead, new_white, start, GCSWEEPMAX);
(f, EST_TABLE_SIZE, np, done)
}
1 => {
let (f, np, done) = self
.closures
.sweep_partial(dead, new_white, start, GCSWEEPMAX);
(f, EST_CLOSURE_SIZE, np, done)
}
2 => {
let (f, np, done) = self
.upvalues
.sweep_partial(dead, new_white, start, GCSWEEPMAX);
(f, EST_UPVALUE_SIZE, np, done)
}
3 => {
let (f, np, done) = self
.userdata
.sweep_partial(dead, new_white, start, GCSWEEPMAX);
(f, EST_USERDATA_SIZE, np, done)
}
_ => {
let (f, np, done) = self
.threads
.sweep_partial(dead, new_white, start, GCSWEEPMAX);
(f, EST_THREAD_SIZE, np, done)
}
};
let freed_bytes = freed as usize * est_size;
self.gc_state.total_bytes = self.gc_state.total_bytes.saturating_sub(freed_bytes);
self.gc_state.estimate = self.gc_state.estimate.saturating_sub(freed_bytes);
if is_arena_done {
if arena_idx >= 4 {
self.gc_state.phase = GcPhase::Finalize;
} else {
self.gc_state.sweep_cursor.arena_index = arena_idx + 1;
self.gc_state.sweep_cursor.slot_position = 0;
}
} else {
self.gc_state.sweep_cursor.slot_position = next_pos;
}
GCSWEEPMAX as usize * GCSWEEPCOST
}
pub fn barrier_back(&mut self, parent: GcRef<Table>) {
if self.gc_state.phase != GcPhase::Propagate {
return;
}
if let Some(color) = self.tables.color(parent)
&& color.is_black()
{
self.tables.set_color(parent, Color::Gray);
self.gc_state.grayagain.push(GrayItem::Table(parent));
}
}
pub fn barrier_forward_val(&mut self, parent_color: Color, child: Val) {
if self.gc_state.phase != GcPhase::Propagate {
return;
}
if !parent_color.is_black() {
return;
}
let child_is_white = match child {
Val::Str(r) => self.string_arena.color(r) == Some(self.current_white),
Val::Table(r) => self.tables.color(r) == Some(self.current_white),
Val::Function(r) => self.closures.color(r) == Some(self.current_white),
Val::Userdata(r) => self.userdata.color(r) == Some(self.current_white),
Val::Thread(r) => self.threads.color(r) == Some(self.current_white),
_ => false,
};
if child_is_white {
self.mark_value(child);
}
}
pub fn estimate_memory(&self) -> usize {
let strings = self.string_arena.len() as usize * EST_STRING_SIZE;
let tables = self.tables.len() as usize * EST_TABLE_SIZE;
let closures = self.closures.len() as usize * EST_CLOSURE_SIZE;
let upvalues = self.upvalues.len() as usize * EST_UPVALUE_SIZE;
let userdata = self.userdata.len() as usize * EST_USERDATA_SIZE;
let threads = self.threads.len() as usize * EST_THREAD_SIZE;
strings + tables + closures + upvalues + userdata + threads
}
pub fn should_collect(&self) -> bool {
!self.gc_state.gc_running && self.gc_state.total_bytes >= self.gc_state.gc_threshold
}
pub fn update_threshold(&mut self) {
let estimate = self.gc_state.estimate.max(self.estimate_memory());
self.gc_state.total_bytes = self.estimate_memory();
self.gc_state.gc_threshold =
(estimate / 100).saturating_mul(self.gc_state.gc_pause as usize);
if self.gc_state.gc_threshold < 4096 {
self.gc_state.gc_threshold = 4096;
}
}
}
impl LuaState {
pub fn gc_singlestep(&mut self) -> LuaResult<usize> {
let result = match self.gc.gc_state.phase {
GcPhase::Pause => {
self.gc.gc_state.gray.clear();
self.gc.gc_state.grayagain.clear();
self.gc.gc_state.weak_tables.clear();
self.mark_roots();
self.gc.gc_state.phase = GcPhase::Propagate;
0
}
GcPhase::Propagate => {
match self.gc.propagate_one() {
PropagateResult::Done(cost) => cost,
PropagateResult::NeedMainThread => {
self.traverse_main_thread()
}
PropagateResult::Empty => {
self.traverse_main_thread_for_atomic();
self.gc.atomic();
0
}
}
}
GcPhase::SweepString => self.gc.sweep_strings_step(),
GcPhase::Sweep => self.gc.sweep_other_step(),
GcPhase::Finalize => {
if self.call_gc_finalizer()? {
if self.gc.gc_state.estimate > GCFINALIZECOST {
self.gc.gc_state.estimate -= GCFINALIZECOST;
}
GCFINALIZECOST
} else {
self.gc.gc_state.phase = GcPhase::Pause;
self.gc.gc_state.gc_debt = 0;
0
}
}
};
Ok(result)
}
pub fn gc_step(&mut self, mut budget: i64) -> LuaResult<bool> {
if self.gc.gc_state.gc_running {
return Ok(false);
}
self.gc.gc_state.gc_running = true;
let result = (|| {
loop {
let cost = self.gc_singlestep()?;
budget -= cost as i64;
if self.gc.gc_state.phase == GcPhase::Pause {
self.gc.update_threshold();
return Ok(true);
}
if budget <= 0 {
break;
}
}
if self.gc.gc_state.gc_debt < GCSTEPSIZE as i64 {
self.gc.gc_state.gc_threshold =
self.gc.gc_state.total_bytes.saturating_add(GCSTEPSIZE);
} else {
self.gc.gc_state.gc_debt -= GCSTEPSIZE as i64;
self.gc.gc_state.gc_threshold = self.gc.gc_state.total_bytes;
}
Ok(false)
})();
self.gc.gc_state.gc_running = false;
result
}
pub fn full_gc(&mut self) -> LuaResult<()> {
if self.gc.gc_state.gc_running {
return Ok(());
}
self.gc.gc_state.gc_running = true;
let result = self.full_gc_inner();
self.gc.gc_state.gc_running = false;
result
}
fn full_gc_inner(&mut self) -> LuaResult<()> {
let phase = self.gc.gc_state.phase;
if phase == GcPhase::Pause || phase == GcPhase::Propagate {
self.gc.gc_state.gray.clear();
self.gc.gc_state.grayagain.clear();
self.gc.gc_state.weak_tables.clear();
self.gc.gc_state.phase = GcPhase::SweepString;
self.gc.gc_state.sweep_cursor = SweepCursor {
arena_index: 0,
slot_position: 0,
};
}
while self.gc.gc_state.phase != GcPhase::Pause
&& self.gc.gc_state.phase != GcPhase::Propagate
{
self.gc.gc_state.gc_running = false;
self.gc_singlestep()?;
self.gc.gc_state.gc_running = true;
}
self.gc.gc_state.gray.clear();
self.gc.gc_state.grayagain.clear();
self.gc.gc_state.weak_tables.clear();
self.mark_roots();
self.gc.gc_state.phase = GcPhase::Propagate;
while self.gc.gc_state.phase != GcPhase::Pause {
self.gc.gc_state.gc_running = false;
self.gc_singlestep()?;
self.gc.gc_state.gc_running = true;
}
self.gc.update_threshold();
Ok(())
}
fn call_gc_finalizer(&mut self) -> LuaResult<bool> {
let Some(ud_ref) = self.gc.gc_state.tmudata.pop() else {
return Ok(false);
};
let gc_fn = {
let mt_ref = self
.gc
.userdata
.get(ud_ref)
.and_then(super::super::value::Userdata::metatable);
if let Some(mt_ref) = mt_ref {
if let Some(gc_name) = self.gc.find_interned_string(b"__gc") {
if let Some(mt) = self.gc.tables.get(mt_ref) {
let v = mt.get(Val::Str(gc_name), &self.gc.string_arena);
if v.is_nil() { None } else { Some(v) }
} else {
None
}
} else {
None
}
} else {
None
}
};
let Some(gc_fn) = gc_fn else {
return Ok(true); };
self.gc.gc_state.gc_threshold = self.gc.gc_state.total_bytes.saturating_add(GCSTEPSIZE);
let saved_ci = self.ci;
let saved_top = self.top;
let base = self.top;
self.push(gc_fn);
self.push(Val::Userdata(ud_ref));
let result = self.call_function(base, 0);
if result.is_err() {
self.ci = saved_ci;
self.base = self.call_stack[saved_ci].base;
self.top = saved_top;
}
result?;
self.gc.gc_state.gc_threshold =
self.gc.gc_state.total_bytes * self.gc.gc_state.gc_pause as usize / 100;
Ok(true)
}
#[inline]
pub fn gc_check(&mut self) -> LuaResult<()> {
if !self.gc.gc_state.gc_running
&& self.gc.gc_state.total_bytes >= self.gc.gc_state.gc_threshold
{
self.gc_step_auto()?;
}
if self.gc.gc_state.total_bytes > self.gc.gc_state.alloc_limit {
return Err(LuaError::Memory);
}
Ok(())
}
pub fn gc_step_auto(&mut self) -> LuaResult<()> {
let stepmul = i64::from(self.gc.gc_state.gc_stepmul);
let budget = if stepmul == 0 {
i64::MAX / 2 } else {
(GCSTEPSIZE as i64 / 100) * stepmul
};
self.gc.gc_state.gc_debt +=
self.gc.gc_state.total_bytes as i64 - self.gc.gc_state.gc_threshold as i64;
self.gc_step(budget)?;
Ok(())
}
fn mark_roots(&mut self) {
self.gc.gc_state.gray.push(GrayItem::MainThread);
let global = self.global;
let registry = self.registry;
self.gc.mark_table(global);
self.gc.mark_table(registry);
self.gc.mark_gc_roots();
}
fn traverse_main_thread(&mut self) -> usize {
let top = self.top;
for i in 0..top {
let val = self.stack[i];
self.gc.mark_value(val);
}
let open_upvals: Vec<GcRef<Upvalue>> = self.open_upvalues.clone();
for uv_ref in &open_upvals {
self.gc.mark_upvalue(*uv_ref);
}
if let Some(err_val) = self.error_object {
self.gc.mark_value(err_val);
}
let hook_func = self.hook.hook_func;
self.gc.mark_value(hook_func);
for ci_idx in 0..=self.ci {
if ci_idx < self.call_stack.len() {
let func_idx = self.call_stack[ci_idx].func;
if func_idx < self.stack.len() {
let func_val = self.stack[func_idx];
self.gc.mark_value(func_val);
}
}
}
for saved in &self.saved_threads {
for i in 0..saved.top {
if i < saved.stack.len() {
self.gc.mark_value(saved.stack[i]);
}
}
for uv_ref in &saved.open_upvalues {
self.gc.mark_upvalue(*uv_ref);
}
for &(uv_ref, _) in &saved.suspended_upvals {
self.gc.mark_upvalue(uv_ref);
}
if let Some(err_val) = saved.error_object {
self.gc.mark_value(err_val);
}
self.gc.mark_value(saved.hook.hook_func);
for ci_idx in 0..=saved.ci {
if ci_idx < saved.call_stack.len() {
let func_idx = saved.call_stack[ci_idx].func;
if func_idx < saved.stack.len() {
self.gc.mark_value(saved.stack[func_idx]);
}
}
}
}
self.gc.gc_state.grayagain.push(GrayItem::MainThread);
EST_THREAD_SIZE + self.stack.len() * 16 + self.call_stack.len() * 40
}
fn traverse_main_thread_for_atomic(&mut self) {
let top = self.top;
for i in 0..top {
let val = self.stack[i];
self.gc.mark_value(val);
}
let open_upvals: Vec<GcRef<Upvalue>> = self.open_upvalues.clone();
for uv_ref in &open_upvals {
self.gc.mark_upvalue(*uv_ref);
}
if let Some(err_val) = self.error_object {
self.gc.mark_value(err_val);
}
let hook_func = self.hook.hook_func;
self.gc.mark_value(hook_func);
for ci_idx in 0..=self.ci {
if ci_idx < self.call_stack.len() {
let func_idx = self.call_stack[ci_idx].func;
if func_idx < self.stack.len() {
let func_val = self.stack[func_idx];
self.gc.mark_value(func_val);
}
}
}
for saved in &self.saved_threads {
for i in 0..saved.top {
if i < saved.stack.len() {
self.gc.mark_value(saved.stack[i]);
}
}
for uv_ref in &saved.open_upvalues {
self.gc.mark_upvalue(*uv_ref);
}
for &(uv_ref, _) in &saved.suspended_upvals {
self.gc.mark_upvalue(uv_ref);
}
if let Some(err_val) = saved.error_object {
self.gc.mark_value(err_val);
}
self.gc.mark_value(saved.hook.hook_func);
for ci_idx in 0..=saved.ci {
if ci_idx < saved.call_stack.len() {
let func_idx = saved.call_stack[ci_idx].func;
if func_idx < saved.stack.len() {
self.gc.mark_value(saved.stack[func_idx]);
}
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::vm::state::LuaState;
#[test]
fn full_gc_does_not_crash() {
let mut state = LuaState::new();
let _ = state.full_gc();
}
#[test]
fn full_gc_collects_unreachable_table() {
let mut state = LuaState::new();
let _orphan = state.gc.alloc_table(Table::new());
let initial_count = state.gc.tables.len();
let _ = state.full_gc();
let after_count = state.gc.tables.len();
assert!(
after_count < initial_count,
"expected orphan table to be collected: before={initial_count}, after={after_count}"
);
}
#[test]
fn full_gc_preserves_reachable_table() {
let mut state = LuaState::new();
let t = state.gc.alloc_table(Table::new());
let key = state.gc.intern_string(b"mytable");
if let Some(global) = state.gc.tables.get_mut(state.global) {
let _ = global.raw_set(Val::Str(key), Val::Table(t), &state.gc.string_arena);
}
let _ = state.full_gc();
assert!(
state.gc.tables.is_valid(t),
"reachable table should survive GC"
);
}
#[test]
fn full_gc_preserves_stack_values() {
let mut state = LuaState::new();
let s = state.gc.intern_string(b"hello gc");
state.stack[1] = Val::Str(s);
state.top = 2;
let _ = state.full_gc();
assert!(
state.gc.string_arena.is_valid(s),
"stack string should survive GC"
);
}
#[test]
fn gc_threshold_updates_after_collection() {
let mut state = LuaState::new();
state.gc.gc_state.gc_threshold = 0;
state.gc.gc_state.total_bytes = 0;
let _ = state.full_gc();
assert!(
state.gc.gc_state.gc_threshold >= 4096,
"threshold should be at least 4096 after collection"
);
}
#[test]
fn gc_collects_unreachable_string() {
let mut state = LuaState::new();
let _s = state.gc.intern_string(b"unique_orphan_string_12345");
let initial = state.gc.string_arena.len();
let _ = state.full_gc();
let after = state.gc.string_arena.len();
assert!(
after < initial,
"orphan string should be collected: before={initial}, after={after}"
);
}
#[test]
fn gc_collects_unreachable_closure() {
use crate::vm::closure::{Closure, RustClosure};
let mut state = LuaState::new();
let rc = RustClosure::new(|_| Ok(0), "orphan");
let _orphan = state.gc.alloc_closure(Closure::Rust(rc));
let initial = state.gc.closures.len();
let _ = state.full_gc();
let after = state.gc.closures.len();
assert!(
after < initial,
"orphan closure should be collected: before={initial}, after={after}"
);
}
#[test]
fn gc_preserves_closure_on_stack() {
use crate::vm::closure::{Closure, RustClosure};
let mut state = LuaState::new();
let rc = RustClosure::new(|_| Ok(0), "on_stack");
let cl = state.gc.alloc_closure(Closure::Rust(rc));
state.stack[1] = Val::Function(cl);
state.top = 2;
let _ = state.full_gc();
assert!(
state.gc.closures.is_valid(cl),
"stack closure should survive GC"
);
}
}