Skip to main content

aver/vm/
types.rs

1use crate::nan_value::NanValue;
2
3use super::symbol::VmSymbolTable;
4
5/// A compiled function chunk — bytecode + metadata.
6#[derive(Debug, Clone)]
7pub struct FnChunk {
8    pub name: String,
9    pub arity: u8,
10    pub local_count: u16,
11    pub code: Vec<u8>,
12    pub constants: Vec<NanValue>,
13    /// Declared effects (e.g. `! [Console.print, Http]`). Empty for pure functions.
14    pub effects: Vec<u32>,
15    /// Conservatively classified "thin" function: likely to return without
16    /// creating any frame-local heap survivors or dirtying globals.
17    pub thin: bool,
18    /// Narrow wrapper-like helper that borrows the caller young region and
19    /// skips ordinary-return handoff as long as it stays out of yard/handoff.
20    pub parent_thin: bool,
21    /// Leaf function: no CALL_KNOWN or CALL_VALUE in bytecode (only builtins
22    /// and opcodes). When also thin and args-only (local_count == arity),
23    /// can be called without pushing a CallFrame.
24    pub leaf: bool,
25    /// Source file path for this function (empty for synthetic/unknown).
26    pub source_file: String,
27    /// Run-length encoded line table: `(bytecode_offset, source_line)`.
28    /// Sorted by offset. Lookup: find last entry where offset <= target ip.
29    pub line_table: Vec<(u16, u16)>,
30}
31
32/// Minimal call frame: 16 bytes of metadata, no closure/upvalue fields.
33#[derive(Debug, Clone)]
34pub struct CallFrame {
35    /// Index into `CodeStore::functions`.
36    pub fn_id: u32,
37    /// Current instruction pointer (byte offset into `FnChunk::code`).
38    pub ip: u32,
39    /// Base pointer: index into VM stack where this frame's locals start.
40    pub bp: u32,
41    /// Number of local slots (params + local bindings).
42    pub local_count: u16,
43    /// Arena length at function entry; allocations above this mark are local
44    /// to the frame unless promoted on return/tail-call.
45    pub arena_mark: u32,
46    /// Yard length at function entry; reused TCO frames compact this suffix
47    /// so loop-carried survivors do not accumulate across iterations.
48    pub yard_base: u32,
49    /// Current yard suffix owned by this frame iteration. Reused TCO frames
50    /// may advance this mark so older carried survivors become the shared
51    /// prefix for the next iteration.
52    pub yard_mark: u32,
53    /// Handoff length at function entry; ordinary returns compact this suffix
54    /// so helper results can survive into the caller without polluting stable.
55    pub handoff_mark: u32,
56    /// Whether this frame stored a young-region value into globals.
57    pub globals_dirty: bool,
58    /// Whether ordinary returns introduced caller-yard survivors that should
59    /// be pruned on the next tail-call boundary.
60    pub yard_dirty: bool,
61    /// Whether helper returns introduced handoff survivors that should be
62    /// pruned on the next boundary of this frame.
63    pub handoff_dirty: bool,
64    /// Conservatively classified as cheap enough for a fast return path.
65    pub thin: bool,
66    /// Uses the caller young region as its allocation lane and skips
67    /// ordinary-return handoff while it remains a pure wrapper frame.
68    pub parent_thin: bool,
69}
70
71/// All compiled bytecode for a program.
72#[derive(Debug, Clone)]
73pub struct CodeStore {
74    pub functions: Vec<FnChunk>,
75    /// Map from function name to index in `functions`.
76    pub fn_index: std::collections::HashMap<String, u32>,
77    /// Compile-time-known symbol table for functions, builtins, effects, and other names.
78    pub(crate) symbols: VmSymbolTable,
79    /// Per-record-type field slot lookup: (type_id, field_symbol_id) -> field_idx.
80    pub(crate) record_field_slots: std::collections::HashMap<(u32, u32), u8>,
81}
82
83impl Default for CodeStore {
84    fn default() -> Self {
85        Self::new()
86    }
87}
88
89impl CodeStore {
90    pub fn new() -> Self {
91        CodeStore {
92            functions: Vec::new(),
93            fn_index: std::collections::HashMap::new(),
94            symbols: VmSymbolTable::default(),
95            record_field_slots: std::collections::HashMap::new(),
96        }
97    }
98
99    pub fn add_function(&mut self, chunk: FnChunk) -> u32 {
100        let id = self.functions.len() as u32;
101        self.fn_index.insert(chunk.name.clone(), id);
102        self.functions.push(chunk);
103        id
104    }
105
106    pub fn get(&self, id: u32) -> &FnChunk {
107        &self.functions[id as usize]
108    }
109
110    pub fn find(&self, name: &str) -> Option<u32> {
111        self.fn_index.get(name).copied()
112    }
113
114    pub fn register_record_fields(&mut self, type_id: u32, field_symbol_ids: &[u32]) {
115        for (field_idx, symbol_id) in field_symbol_ids.iter().copied().enumerate() {
116            self.record_field_slots
117                .insert((type_id, symbol_id), field_idx as u8);
118        }
119    }
120
121    /// Resolve a bytecode position to (source_file, source_line).
122    /// Returns None if line table is empty or fn_id is invalid.
123    pub fn resolve_source_location(&self, fn_id: u32, ip: u32) -> Option<(&str, u16)> {
124        let chunk = self.functions.get(fn_id as usize)?;
125        if chunk.line_table.is_empty() {
126            return None;
127        }
128        // Binary search: find last entry where offset <= ip
129        let ip16 = ip as u16;
130        let idx = match chunk
131            .line_table
132            .binary_search_by_key(&ip16, |&(off, _)| off)
133        {
134            Ok(i) => i,
135            Err(0) => return None,
136            Err(i) => i - 1,
137        };
138        let (_, line) = chunk.line_table[idx];
139        let file = if chunk.source_file.is_empty() {
140            None
141        } else {
142            Some(chunk.source_file.as_str())
143        };
144        Some((file.unwrap_or(""), line))
145    }
146}
147
148/// Source location resolved from line table (cold-path only).
149#[derive(Debug, Default, Clone)]
150pub struct VmSourceLoc {
151    pub file: String,
152    pub line: u16,
153    pub fn_name: String,
154}
155
156/// VM runtime error.
157#[derive(Debug)]
158pub enum VmError {
159    /// Runtime error with message and optional source line.
160    Runtime { msg: String, line: u16 },
161    /// Type error (e.g. adding int + string).
162    Type { msg: String, line: u16 },
163    /// Non-exhaustive match at source line.
164    MatchFail(u16),
165    /// Stack underflow (bug in compiler).
166    StackUnderflow,
167}
168
169impl VmError {
170    pub fn runtime(msg: impl Into<String>) -> Self {
171        VmError::Runtime {
172            msg: msg.into(),
173            line: 0,
174        }
175    }
176
177    pub fn type_err(msg: impl Into<String>) -> Self {
178        VmError::Type {
179            msg: msg.into(),
180            line: 0,
181        }
182    }
183
184    /// Attach resolved source location (cold path).
185    pub fn with_location(self, loc: Option<VmSourceLoc>) -> Self {
186        let Some(loc) = loc else { return self };
187        if loc.line == 0 {
188            return self;
189        }
190        match self {
191            VmError::Runtime { msg, line: 0 } => VmError::Runtime {
192                msg,
193                line: loc.line,
194            },
195            VmError::Type { msg, line: 0 } => VmError::Type {
196                msg,
197                line: loc.line,
198            },
199            other => other,
200        }
201    }
202}
203
204impl std::fmt::Display for VmError {
205    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
206        match self {
207            VmError::Runtime { msg, line } if *line > 0 => {
208                write!(f, "Runtime error [line {}]: {}", line, msg)
209            }
210            VmError::Runtime { msg, .. } => write!(f, "Runtime error: {}", msg),
211            VmError::Type { msg, line } if *line > 0 => {
212                write!(f, "Type error [line {}]: {}", line, msg)
213            }
214            VmError::Type { msg, .. } => write!(f, "Type error: {}", msg),
215            VmError::MatchFail(line) => write!(f, "Non-exhaustive match at line {}", line),
216            VmError::StackUnderflow => write!(f, "Internal error: stack underflow"),
217        }
218    }
219}
220
221impl std::error::Error for VmError {}