aver/vm/types.rs
1use crate::nan_value::NanValue;
2
3use super::symbol::VmSymbolTable;
4
5/// A compiled function chunk — bytecode + metadata.
6#[derive(Debug, Clone)]
7pub struct FnChunk {
8 pub name: String,
9 pub arity: u8,
10 pub local_count: u16,
11 pub code: Vec<u8>,
12 pub constants: Vec<NanValue>,
13 /// Declared effects (e.g. `! [Console.print, Http]`). Empty for pure functions.
14 pub effects: Vec<u32>,
15 /// Conservatively classified "thin" function: likely to return without
16 /// creating any frame-local heap survivors or dirtying globals.
17 pub thin: bool,
18 /// Narrow wrapper-like helper that borrows the caller young region and
19 /// skips ordinary-return handoff as long as it stays out of yard/handoff.
20 pub parent_thin: bool,
21 /// Leaf function: no CALL_KNOWN or CALL_VALUE in bytecode (only builtins
22 /// and opcodes). When also thin and args-only (local_count == arity),
23 /// can be called without pushing a CallFrame.
24 pub leaf: bool,
25 /// Pure no-alloc function (per shared `ir::compute_alloc_info` under
26 /// `VmAllocPolicy`): the body never produces a heap object. Disjoint from
27 /// `thin` because mutual-TCO peers can be no-alloc but not bytecode-thin.
28 /// `TAIL_CALL_KNOWN` skips `finalize_frame_locals_for_tail_call` when
29 /// the target chunk has this flag set — the runtime guard is guaranteed
30 /// to be a no-op for pure no-alloc bodies.
31 pub no_alloc: bool,
32 /// Source file path for this function (empty for synthetic/unknown).
33 pub source_file: String,
34 /// Run-length encoded line table: `(bytecode_offset, source_line)`.
35 /// Sorted by offset. Lookup: find last entry where offset <= target ip.
36 pub line_table: Vec<(u16, u16)>,
37}
38
39/// Minimal call frame: 16 bytes of metadata, no closure/upvalue fields.
40#[derive(Debug, Clone)]
41pub struct CallFrame {
42 /// Index into `CodeStore::functions`.
43 pub fn_id: u32,
44 /// Current instruction pointer (byte offset into `FnChunk::code`).
45 pub ip: u32,
46 /// Base pointer: index into VM stack where this frame's locals start.
47 pub bp: u32,
48 /// Number of local slots (params + local bindings).
49 pub local_count: u16,
50 /// Arena length at function entry; allocations above this mark are local
51 /// to the frame unless promoted on return/tail-call.
52 pub arena_mark: u32,
53 /// Yard length at function entry; reused TCO frames compact this suffix
54 /// so loop-carried survivors do not accumulate across iterations.
55 pub yard_base: u32,
56 /// Current yard suffix owned by this frame iteration. Reused TCO frames
57 /// may advance this mark so older carried survivors become the shared
58 /// prefix for the next iteration.
59 pub yard_mark: u32,
60 /// Handoff length at function entry; ordinary returns compact this suffix
61 /// so helper results can survive into the caller without polluting stable.
62 pub handoff_mark: u32,
63 /// Whether this frame stored a young-region value into globals.
64 pub globals_dirty: bool,
65 /// Whether ordinary returns introduced caller-yard survivors that should
66 /// be pruned on the next tail-call boundary.
67 pub yard_dirty: bool,
68 /// Whether helper returns introduced handoff survivors that should be
69 /// pruned on the next boundary of this frame.
70 pub handoff_dirty: bool,
71 /// Conservatively classified as cheap enough for a fast return path.
72 pub thin: bool,
73 /// Uses the caller young region as its allocation lane and skips
74 /// ordinary-return handoff while it remains a pure wrapper frame.
75 pub parent_thin: bool,
76}
77
78/// All compiled bytecode for a program.
79#[derive(Debug, Clone)]
80pub struct CodeStore {
81 pub functions: Vec<FnChunk>,
82 /// Map from function name to index in `functions`.
83 pub fn_index: std::collections::HashMap<String, u32>,
84 /// Compile-time-known symbol table for functions, builtins, effects, and other names.
85 pub(crate) symbols: VmSymbolTable,
86 /// Per-record-type field slot lookup: (type_id, field_symbol_id) -> field_idx.
87 pub(crate) record_field_slots: std::collections::HashMap<(u32, u32), u8>,
88}
89
90impl Default for CodeStore {
91 fn default() -> Self {
92 Self::new()
93 }
94}
95
96impl CodeStore {
97 pub fn new() -> Self {
98 CodeStore {
99 functions: Vec::new(),
100 fn_index: std::collections::HashMap::new(),
101 symbols: VmSymbolTable::default(),
102 record_field_slots: std::collections::HashMap::new(),
103 }
104 }
105
106 pub fn add_function(&mut self, chunk: FnChunk) -> u32 {
107 let id = self.functions.len() as u32;
108 self.fn_index.insert(chunk.name.clone(), id);
109 self.functions.push(chunk);
110 id
111 }
112
113 pub fn get(&self, id: u32) -> &FnChunk {
114 &self.functions[id as usize]
115 }
116
117 pub fn find(&self, name: &str) -> Option<u32> {
118 self.fn_index.get(name).copied()
119 }
120
121 pub fn register_record_fields(&mut self, type_id: u32, field_symbol_ids: &[u32]) {
122 for (field_idx, symbol_id) in field_symbol_ids.iter().copied().enumerate() {
123 self.record_field_slots
124 .insert((type_id, symbol_id), field_idx as u8);
125 }
126 }
127
128 /// Resolve a bytecode position to (source_file, source_line).
129 /// Returns None if line table is empty or fn_id is invalid.
130 pub fn resolve_source_location(&self, fn_id: u32, ip: u32) -> Option<(&str, u16)> {
131 let chunk = self.functions.get(fn_id as usize)?;
132 if chunk.line_table.is_empty() {
133 return None;
134 }
135 // Binary search: find last entry where offset <= ip
136 let ip16 = ip as u16;
137 let idx = match chunk
138 .line_table
139 .binary_search_by_key(&ip16, |&(off, _)| off)
140 {
141 Ok(i) => i,
142 Err(0) => return None,
143 Err(i) => i - 1,
144 };
145 let (_, line) = chunk.line_table[idx];
146 let file = if chunk.source_file.is_empty() {
147 None
148 } else {
149 Some(chunk.source_file.as_str())
150 };
151 Some((file.unwrap_or(""), line))
152 }
153}
154
155/// Source location resolved from line table (cold-path only).
156#[derive(Debug, Default, Clone)]
157pub struct VmSourceLoc {
158 pub file: String,
159 pub line: u16,
160 pub fn_name: String,
161}
162
163/// VM runtime error.
164#[derive(Debug)]
165pub enum VmError {
166 /// Runtime error with message and optional source line.
167 Runtime { msg: String, line: u16 },
168 /// Type error (e.g. adding int + string).
169 Type { msg: String, line: u16 },
170 /// Non-exhaustive match at source line.
171 MatchFail(u16),
172 /// Stack underflow (bug in compiler).
173 StackUnderflow,
174}
175
176impl VmError {
177 pub fn runtime(msg: impl Into<String>) -> Self {
178 VmError::Runtime {
179 msg: msg.into(),
180 line: 0,
181 }
182 }
183
184 pub fn type_err(msg: impl Into<String>) -> Self {
185 VmError::Type {
186 msg: msg.into(),
187 line: 0,
188 }
189 }
190
191 /// Attach resolved source location (cold path).
192 pub fn with_location(self, loc: Option<VmSourceLoc>) -> Self {
193 let Some(loc) = loc else { return self };
194 if loc.line == 0 {
195 return self;
196 }
197 match self {
198 VmError::Runtime { msg, line: 0 } => VmError::Runtime {
199 msg,
200 line: loc.line,
201 },
202 VmError::Type { msg, line: 0 } => VmError::Type {
203 msg,
204 line: loc.line,
205 },
206 other => other,
207 }
208 }
209}
210
211impl std::fmt::Display for VmError {
212 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
213 match self {
214 VmError::Runtime { msg, line } if *line > 0 => {
215 write!(f, "Runtime error [line {}]: {}", line, msg)
216 }
217 VmError::Runtime { msg, .. } => write!(f, "Runtime error: {}", msg),
218 VmError::Type { msg, line } if *line > 0 => {
219 write!(f, "Type error [line {}]: {}", line, msg)
220 }
221 VmError::Type { msg, .. } => write!(f, "Type error: {}", msg),
222 VmError::MatchFail(line) => write!(f, "Non-exhaustive match at line {}", line),
223 VmError::StackUnderflow => write!(f, "Internal error: stack underflow"),
224 }
225 }
226}
227
228impl std::error::Error for VmError {}