aver/vm/types.rs
1use crate::nan_value::NanValue;
2
3use super::symbol::VmSymbolTable;
4
5/// A compiled function chunk — bytecode + metadata.
6#[derive(Debug, Clone)]
7pub struct FnChunk {
8 pub name: String,
9 pub arity: u8,
10 pub local_count: u16,
11 pub code: Vec<u8>,
12 pub constants: Vec<NanValue>,
13 /// Declared effects (e.g. `! [Console.print, Http]`). Empty for pure functions.
14 pub effects: Vec<u32>,
15 /// Conservatively classified "thin" function: likely to return without
16 /// creating any frame-local heap survivors or dirtying globals.
17 pub thin: bool,
18 /// Narrow wrapper-like helper that borrows the caller young region and
19 /// skips ordinary-return handoff as long as it stays out of yard/handoff.
20 pub parent_thin: bool,
21 /// Leaf function: no CALL_KNOWN or CALL_VALUE in bytecode (only builtins
22 /// and opcodes). When also thin and args-only (local_count == arity),
23 /// can be called without pushing a CallFrame.
24 pub leaf: bool,
25}
26
27/// Minimal call frame: 16 bytes of metadata, no closure/upvalue fields.
28#[derive(Debug, Clone)]
29pub struct CallFrame {
30 /// Index into `CodeStore::functions`.
31 pub fn_id: u32,
32 /// Current instruction pointer (byte offset into `FnChunk::code`).
33 pub ip: u32,
34 /// Base pointer: index into VM stack where this frame's locals start.
35 pub bp: u32,
36 /// Number of local slots (params + local bindings).
37 pub local_count: u16,
38 /// Arena length at function entry; allocations above this mark are local
39 /// to the frame unless promoted on return/tail-call.
40 pub arena_mark: u32,
41 /// Yard length at function entry; reused TCO frames compact this suffix
42 /// so loop-carried survivors do not accumulate across iterations.
43 pub yard_base: u32,
44 /// Current yard suffix owned by this frame iteration. Reused TCO frames
45 /// may advance this mark so older carried survivors become the shared
46 /// prefix for the next iteration.
47 pub yard_mark: u32,
48 /// Handoff length at function entry; ordinary returns compact this suffix
49 /// so helper results can survive into the caller without polluting stable.
50 pub handoff_mark: u32,
51 /// Whether this frame stored a young-region value into globals.
52 pub globals_dirty: bool,
53 /// Whether ordinary returns introduced caller-yard survivors that should
54 /// be pruned on the next tail-call boundary.
55 pub yard_dirty: bool,
56 /// Whether helper returns introduced handoff survivors that should be
57 /// pruned on the next boundary of this frame.
58 pub handoff_dirty: bool,
59 /// Conservatively classified as cheap enough for a fast return path.
60 pub thin: bool,
61 /// Uses the caller young region as its allocation lane and skips
62 /// ordinary-return handoff while it remains a pure wrapper frame.
63 pub parent_thin: bool,
64}
65
66/// All compiled bytecode for a program.
67#[derive(Debug, Clone)]
68pub struct CodeStore {
69 pub functions: Vec<FnChunk>,
70 /// Map from function name to index in `functions`.
71 pub fn_index: std::collections::HashMap<String, u32>,
72 /// Compile-time-known symbol table for functions, builtins, effects, and other names.
73 pub(crate) symbols: VmSymbolTable,
74 /// Per-record-type field slot lookup: (type_id, field_symbol_id) -> field_idx.
75 pub(crate) record_field_slots: std::collections::HashMap<(u32, u32), u8>,
76}
77
78impl Default for CodeStore {
79 fn default() -> Self {
80 Self::new()
81 }
82}
83
84impl CodeStore {
85 pub fn new() -> Self {
86 CodeStore {
87 functions: Vec::new(),
88 fn_index: std::collections::HashMap::new(),
89 symbols: VmSymbolTable::default(),
90 record_field_slots: std::collections::HashMap::new(),
91 }
92 }
93
94 pub fn add_function(&mut self, chunk: FnChunk) -> u32 {
95 let id = self.functions.len() as u32;
96 self.fn_index.insert(chunk.name.clone(), id);
97 self.functions.push(chunk);
98 id
99 }
100
101 pub fn get(&self, id: u32) -> &FnChunk {
102 &self.functions[id as usize]
103 }
104
105 pub fn find(&self, name: &str) -> Option<u32> {
106 self.fn_index.get(name).copied()
107 }
108
109 pub fn register_record_fields(&mut self, type_id: u32, field_symbol_ids: &[u32]) {
110 for (field_idx, symbol_id) in field_symbol_ids.iter().copied().enumerate() {
111 self.record_field_slots
112 .insert((type_id, symbol_id), field_idx as u8);
113 }
114 }
115}
116
117/// VM runtime error.
118#[derive(Debug)]
119pub enum VmError {
120 /// Runtime error with message.
121 Runtime(String),
122 /// Type error (e.g. adding int + string).
123 Type(String),
124 /// Non-exhaustive match at source line.
125 MatchFail(u16),
126 /// Stack underflow (bug in compiler).
127 StackUnderflow,
128}
129
130impl std::fmt::Display for VmError {
131 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
132 match self {
133 VmError::Runtime(msg) => write!(f, "Runtime error: {}", msg),
134 VmError::Type(msg) => write!(f, "Type error: {}", msg),
135 VmError::MatchFail(line) => write!(f, "Non-exhaustive match at line {}", line),
136 VmError::StackUnderflow => write!(f, "Internal error: stack underflow"),
137 }
138 }
139}
140
141impl std::error::Error for VmError {}