Skip to main content

aver/vm/
types.rs

1use crate::nan_value::NanValue;
2
3/// A compiled function chunk — bytecode + metadata.
4#[derive(Debug, Clone)]
5pub struct FnChunk {
6    pub name: String,
7    pub arity: u8,
8    pub local_count: u16,
9    pub code: Vec<u8>,
10    pub constants: Vec<NanValue>,
11    /// Declared effects (e.g. `! [Console.print, Http]`). Empty for pure functions.
12    pub effects: Vec<String>,
13    /// Conservatively classified "thin" function: likely to return without
14    /// creating any frame-local heap survivors or dirtying globals.
15    pub thin: bool,
16    /// Narrow wrapper-like helper that borrows the caller young region and
17    /// skips ordinary-return handoff as long as it stays out of yard/handoff.
18    pub parent_thin: bool,
19}
20
21/// Minimal call frame: 16 bytes of metadata, no closure/upvalue fields.
22#[derive(Debug, Clone)]
23pub struct CallFrame {
24    /// Index into `CodeStore::functions`.
25    pub fn_id: u32,
26    /// Current instruction pointer (byte offset into `FnChunk::code`).
27    pub ip: u32,
28    /// Base pointer: index into VM stack where this frame's locals start.
29    pub bp: u32,
30    /// Number of local slots (params + local bindings).
31    pub local_count: u16,
32    /// Arena length at function entry; allocations above this mark are local
33    /// to the frame unless promoted on return/tail-call.
34    pub arena_mark: u32,
35    /// Yard length at function entry; reused TCO frames compact this suffix
36    /// so loop-carried survivors do not accumulate across iterations.
37    pub yard_base: u32,
38    /// Current yard suffix owned by this frame iteration. Reused TCO frames
39    /// may advance this mark so older carried survivors become the shared
40    /// prefix for the next iteration.
41    pub yard_mark: u32,
42    /// Handoff length at function entry; ordinary returns compact this suffix
43    /// so helper results can survive into the caller without polluting stable.
44    pub handoff_mark: u32,
45    /// Whether this frame stored a young-region value into globals.
46    pub globals_dirty: bool,
47    /// Whether ordinary returns introduced caller-yard survivors that should
48    /// be pruned on the next tail-call boundary.
49    pub yard_dirty: bool,
50    /// Whether helper returns introduced handoff survivors that should be
51    /// pruned on the next boundary of this frame.
52    pub handoff_dirty: bool,
53    /// Conservatively classified as cheap enough for a fast return path.
54    pub thin: bool,
55    /// Uses the caller young region as its allocation lane and skips
56    /// ordinary-return handoff while it remains a pure wrapper frame.
57    pub parent_thin: bool,
58}
59
60/// All compiled bytecode for a program.
61#[derive(Debug, Clone)]
62pub struct CodeStore {
63    pub functions: Vec<FnChunk>,
64    /// Map from function name to index in `functions`.
65    pub fn_index: std::collections::HashMap<String, u32>,
66}
67
68impl Default for CodeStore {
69    fn default() -> Self {
70        Self::new()
71    }
72}
73
74impl CodeStore {
75    pub fn new() -> Self {
76        CodeStore {
77            functions: Vec::new(),
78            fn_index: std::collections::HashMap::new(),
79        }
80    }
81
82    pub fn add_function(&mut self, chunk: FnChunk) -> u32 {
83        let id = self.functions.len() as u32;
84        self.fn_index.insert(chunk.name.clone(), id);
85        self.functions.push(chunk);
86        id
87    }
88
89    pub fn get(&self, id: u32) -> &FnChunk {
90        &self.functions[id as usize]
91    }
92
93    pub fn find(&self, name: &str) -> Option<u32> {
94        self.fn_index.get(name).copied()
95    }
96}
97
98/// VM runtime error.
99#[derive(Debug)]
100pub enum VmError {
101    /// Runtime error with message.
102    Runtime(String),
103    /// Type error (e.g. adding int + string).
104    Type(String),
105    /// Non-exhaustive match at source line.
106    MatchFail(u16),
107    /// Stack underflow (bug in compiler).
108    StackUnderflow,
109}
110
111impl std::fmt::Display for VmError {
112    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
113        match self {
114            VmError::Runtime(msg) => write!(f, "Runtime error: {}", msg),
115            VmError::Type(msg) => write!(f, "Type error: {}", msg),
116            VmError::MatchFail(line) => write!(f, "Non-exhaustive match at line {}", line),
117            VmError::StackUnderflow => write!(f, "Internal error: stack underflow"),
118        }
119    }
120}
121
122impl std::error::Error for VmError {}