Skip to main content

ternlang_core/codegen/
betbc.rs

1use crate::ast::*;
2use crate::vm::bet::pack_trits;
3use crate::trit::Trit;
4
5pub struct BytecodeEmitter {
6    code: Vec<u8>,
7    symbols: std::collections::HashMap<String, u8>,
8    func_addrs: std::collections::HashMap<String, u16>,
9    function_patches: std::collections::HashMap<String, Vec<usize>>,
10    break_patches: Vec<usize>,
11    continue_patches: Vec<usize>,
12    next_reg: usize,
13    pub struct_layouts: std::collections::HashMap<String, Vec<String>>,
14    agent_type_ids: std::collections::HashMap<String, u16>,
15    agent_handlers: Vec<(u16, u16)>,
16    /// Snapshots of the local symbol table for each function, keyed by function name.
17    /// Captured just before scope is restored so callers can map reg→varname after execution.
18    function_symbols: std::collections::HashMap<String, std::collections::HashMap<String, u8>>,
19}
20
21impl BytecodeEmitter {
22    pub fn new() -> Self {
23        Self {
24            code: Vec::new(),
25            symbols: std::collections::HashMap::new(),
26            func_addrs: std::collections::HashMap::new(),
27            function_patches: std::collections::HashMap::new(),
28            break_patches: Vec::new(),
29            continue_patches: Vec::new(),
30            next_reg: 0,
31            struct_layouts: std::collections::HashMap::new(),
32            agent_type_ids: std::collections::HashMap::new(),
33            agent_handlers: Vec::new(),
34            function_symbols: std::collections::HashMap::new(),
35        }
36    }
37
38    /// Returns the top-level variable-name → register-number map.
39    pub fn get_symbols(&self) -> &std::collections::HashMap<String, u8> {
40        &self.symbols
41    }
42
43    /// Returns the local symbol snapshot for a specific function (e.g. "main").
44    /// Used by `ternlang-cli --emit-symbols` to correlate VM register dumps with source variable names.
45    pub fn get_function_symbols(&self, name: &str) -> Option<&std::collections::HashMap<String, u8>> {
46        self.function_symbols.get(name)
47    }
48
49    pub fn register_agents(&self, vm: &mut crate::vm::BetVm) {
50        for &(type_id, addr) in &self.agent_handlers {
51            vm.register_agent_type(type_id, addr as usize);
52        }
53    }
54
55    /// Emit a single agent definition incrementally (used by the WASM fallback loop
56    /// when the full `parse_program` path is unavailable).
57    pub fn emit_agent_def(&mut self, agent: &crate::AgentDef) {
58        let type_id = self.agent_type_ids.len() as u16;
59        self.agent_type_ids.insert(agent.name.clone(), type_id);
60        let mut handler_addr: Option<u16> = None;
61        for method in &agent.methods {
62            let addr = self.code.len() as u16;
63            if handler_addr.is_none() {
64                handler_addr = Some(addr);
65            }
66            self.emit_function(method);
67            self.func_addrs.insert(format!("{}::{}", agent.name, method.name), addr);
68        }
69        if let Some(addr) = handler_addr {
70            self.agent_handlers.push((type_id, addr));
71        }
72    }
73
74    pub fn emit_header_jump(&mut self) -> usize {
75        let patch_pos = self.code.len() + 1;
76        self.code.push(0x0b); // TJMP
77        self.code.extend_from_slice(&[0u8, 0u8]);
78        patch_pos
79    }
80
81    pub fn patch_header_jump(&mut self, patch_pos: usize) {
82        let addr = self.code.len() as u16;
83        self.patch_u16(patch_pos, addr);
84    }
85
86    pub fn emit_program(&mut self, program: &Program) {
87        let parent_next_reg = self.next_reg;
88        for s in &program.structs {
89            let names: Vec<String> = s.fields.iter().map(|(n, _)| n.clone()).collect();
90            self.struct_layouts.insert(s.name.clone(), names);
91        }
92        for (idx, agent) in program.agents.iter().enumerate() {
93            self.agent_type_ids.insert(agent.name.clone(), idx as u16);
94        }
95
96        // PASS 1: Addresses
97        let real_code = std::mem::take(&mut self.code);
98        let real_func_addrs = std::mem::take(&mut self.func_addrs);
99        let real_agent_handlers = std::mem::take(&mut self.agent_handlers);
100        let base_addr = real_code.len() as u16;
101
102        for agent in &program.agents {
103            let type_id = self.agent_type_ids[&agent.name];
104            let mut handler_addr = None;
105            for method in &agent.methods {
106                let addr = base_addr + self.code.len() as u16;
107                if handler_addr.is_none() { handler_addr = Some(addr); }
108                self.emit_function(method);
109                // Restore correct absolute address overwritten by emit_function (TCALL-BUG fix):
110                self.func_addrs.insert(format!("{}::{}", agent.name, method.name), addr);
111            }
112            if let Some(addr) = handler_addr { self.agent_handlers.push((type_id, addr)); }
113        }
114        for func in &program.functions {
115            let addr = base_addr + self.code.len() as u16;
116            self.func_addrs.insert(func.name.clone(), addr);
117            // Ensure any global symbols or previous definitions are visible
118            self.emit_function(func);
119            // emit_function overwrites func_addrs[name] with a temp-buffer offset that
120            // omits base_addr. Restore the correct absolute address so that forward
121            // references resolved later in PASS 1 get the right TCALL target.
122            self.func_addrs.insert(func.name.clone(), addr);
123        }
124
125        let final_func_addrs = std::mem::replace(&mut self.func_addrs, real_func_addrs);
126        let final_agent_handlers = std::mem::replace(&mut self.agent_handlers, real_agent_handlers);
127        self.code = real_code;
128        self.func_addrs = final_func_addrs;
129        self.agent_handlers = final_agent_handlers;
130        self.next_reg = parent_next_reg;
131
132        // PASS 2: Real
133        for agent in &program.agents {
134            for method in &agent.methods { self.emit_function(method); }
135        }
136        for func in &program.functions { self.emit_function(func); }
137    }
138
139    pub fn emit_function(&mut self, func: &Function) {
140        let func_addr = self.code.len() as u16;
141        self.func_addrs.insert(func.name.clone(), func_addr);
142        if let Some(patches) = self.function_patches.remove(&func.name) {
143            for p in patches {
144                self.code[p..p + 2].copy_from_slice(&func_addr.to_le_bytes());
145            }
146        }
147        let parent_symbols = self.symbols.clone();
148        let parent_next_reg = self.next_reg;
149        self.next_reg = 0;
150
151        // If function has @sparseskip, we could emit a special header here.
152        // For now, it's just a marker in the AST.
153
154        for (name, ty) in func.params.iter().rev() {
155            if let Type::Named(s_name) = ty {
156                if let Some(fields) = self.struct_layouts.get(s_name).cloned() {
157                    // Structs are passed as a bundle: [field1, field2, ..., root_dummy]
158                    // We must pop root dummy first, then fields.
159                    
160                    // Pop root dummy
161                    let root_reg = self.alloc_reg();
162                    self.symbols.insert(name.clone(), root_reg);
163                    self.code.push(0x08); self.code.push(root_reg);
164
165                    // Pop fields in reverse order of how they were pushed
166                    for f_name in fields.iter().rev() {
167                        let f_reg = self.alloc_reg();
168                        let key = format!("{}.{}", name, f_name);
169                        self.symbols.insert(key, f_reg);
170                        self.code.push(0x08); self.code.push(f_reg);
171                    }
172                    continue;
173                }
174            }
175            let reg = self.alloc_reg();
176            self.symbols.insert(name.clone(), reg);
177            self.code.push(0x08); self.code.push(reg);
178        }
179        for stmt in &func.body { self.emit_stmt(stmt); }
180        // Snapshot local symbols before scope is restored — used by --emit-symbols
181        self.function_symbols.insert(func.name.clone(), self.symbols.clone());
182        self.symbols = parent_symbols;
183        self.next_reg = parent_next_reg;
184        self.code.push(0x11); // TRET
185    }
186
187    pub fn emit_stmt(&mut self, stmt: &Stmt) {
188        match stmt {
189            Stmt::Let { name, ty, value } => {
190                let mut handled = false;
191                match ty {
192                    Type::TritTensor { dims } => {
193                        // Auto-alloc for any zero-initializer (TritLiteral(0) or IntLiteral(0))
194                        let is_zero_init = matches!(value, Expr::TritLiteral(0) | Expr::IntLiteral(0));
195                        if !dims.is_empty() && !dims.contains(&0) && is_zero_init {
196                            let rows = dims[0];
197                            let cols = if dims.len() > 1 { dims[1] } else { 1 };
198                            self.code.push(0x0f); // TALLOC (trit)
199                            self.code.extend_from_slice(&(rows as u32).to_le_bytes());
200                            self.code.extend_from_slice(&(cols as u32).to_le_bytes());
201                            handled = true;
202                        }
203                    }
204                    Type::PackedTritTensor { dims } => {
205                        let is_zero_init = matches!(value, Expr::TritLiteral(0) | Expr::IntLiteral(0));
206                        if !dims.is_empty() && !dims.contains(&0) && is_zero_init {
207                            let rows = dims[0];
208                            let cols = if dims.len() > 1 { dims[1] } else { 1 };
209                            self.code.push(0x56); // TALLOC_PACKED
210                            self.code.extend_from_slice(&(rows as u32).to_le_bytes());
211                            self.code.extend_from_slice(&(cols as u32).to_le_bytes());
212                            handled = true;
213                        }
214                    }
215                    Type::IntTensor { dims } => {
216                        let is_zero_init = matches!(value, Expr::TritLiteral(0) | Expr::IntLiteral(0));
217                        if !dims.is_empty() && !dims.contains(&0) && is_zero_init {
218                            let rows = dims[0];
219                            let cols = if dims.len() > 1 { dims[1] } else { 1 };
220                            self.code.push(0x3c); // TALLOC_Int
221                            self.code.extend_from_slice(&(rows as u32).to_le_bytes());
222                            self.code.extend_from_slice(&(cols as u32).to_le_bytes());
223                            handled = true;
224                        }
225                    }
226                    Type::FloatTensor { dims } => {
227                        let is_zero_init = matches!(value, Expr::TritLiteral(0) | Expr::IntLiteral(0));
228                        if !dims.is_empty() && !dims.contains(&0) && is_zero_init {
229                            let rows = dims[0];
230                            let cols = if dims.len() > 1 { dims[1] } else { 1 };
231                            self.code.push(0x3d); // TALLOC_Float
232                            self.code.extend_from_slice(&(rows as u32).to_le_bytes());
233                            self.code.extend_from_slice(&(cols as u32).to_le_bytes());
234                            handled = true;
235                        }
236                    }
237                    Type::Named(_) => {
238                        if let Expr::StructLiteral { fields, .. } = value {
239                            // Flatten struct fields into mangled registers
240                            for (f_name, f_val) in fields {
241                                self.emit_expr(f_val);
242                                let reg = self.alloc_reg();
243                                let key = format!("{}.{}", name, f_name);
244                                self.symbols.insert(key, reg);
245                                self.code.push(0x08); self.code.push(reg);
246                            }
247                            // Now we let the normal path emit the root variable's dummy value
248                        }
249                    }
250                    _ => {}
251                }
252                if !handled {
253                    self.emit_expr(value);
254                }
255                let reg = self.alloc_reg();
256                self.symbols.insert(name.clone(), reg);
257                self.code.push(0x08); self.code.push(reg); // TSTORE
258            }
259            Stmt::Set { name, value } => {
260                self.emit_expr(value);
261                if let Some(&reg) = self.symbols.get(name) {
262                    self.code.push(0x08); self.code.push(reg);
263                }
264            }
265            Stmt::FieldSet { object, field, value } => {
266                let key = format!("{}.{}", object, field);
267                self.emit_expr(value);
268                if let Some(&reg) = self.symbols.get(&key) {
269                    self.code.push(0x08); self.code.push(reg);
270                }
271            }
272            Stmt::IndexSet { object, row, col, value } => {
273                if let Some(&reg) = self.symbols.get(object) {
274                    self.code.push(0x09); self.code.push(reg);
275                    self.emit_expr(row);
276                    self.emit_expr(col);
277                    self.emit_expr(value);
278                    self.code.push(0x23);
279                }
280            }
281            Stmt::IfTernary { condition, on_pos, on_zero, on_neg } => {
282                let pre_reg = self.next_reg;
283                self.emit_expr(condition);
284                let cond_reg = self.alloc_reg();
285                self.code.push(0x08); self.code.push(cond_reg); // Tstore
286                
287                // Load condition for checks
288                self.code.push(0x09); self.code.push(cond_reg); // Tload
289                
290                // Check POS
291                let pos_patch = self.code.len() + 1;
292                self.code.push(0x05); self.code.extend_from_slice(&[0, 0]); // TJMP_POS
293                
294                // Check ZERO
295                let zero_patch = self.code.len() + 1;
296                self.code.push(0x06); self.code.extend_from_slice(&[0, 0]); // TJMP_ZERO
297                
298                // NEG arm: pop the condition and execute
299                self.code.push(0x0c); // TPOP
300                self.emit_stmt(on_neg);
301                let exit_patch = self.code.len() + 1;
302                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]); // TJMP to end
303                
304                // POS arm
305                let pos_addr = self.code.len() as u16;
306                self.patch_u16(pos_patch, pos_addr);
307                self.code.push(0x0c); // TPOP
308                self.emit_stmt(on_pos);
309                let exit_pos = self.code.len() + 1;
310                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
311                
312                // ZERO arm
313                let zero_addr = self.code.len() as u16;
314                self.patch_u16(zero_patch, zero_addr);
315                self.code.push(0x0c); // TPOP
316                self.emit_stmt(on_zero);
317                
318                let end = self.code.len() as u16;
319                self.patch_u16(exit_patch, end);
320                self.patch_u16(exit_pos, end);
321                self.next_reg = pre_reg;
322            }
323            Stmt::Match { condition, arms } => {
324                let pre_reg = self.next_reg;
325                self.emit_expr(condition);
326                let cond_reg = self.alloc_reg();
327                self.code.push(0x08); self.code.push(cond_reg); // Tstore
328
329                let mut end_patches = Vec::new();
330                let mut next_arm_patch = None;
331
332                for (pattern, stmt) in arms {
333                    if let Some(p) = next_arm_patch {
334                        let addr = self.code.len() as u16;
335                        self.patch_u16(p, addr);
336                    }
337
338                    // Load condition for this arm
339                    self.code.push(0x09); self.code.push(cond_reg); // Tload
340
341                    let match_patch;
342                    match pattern {
343                        Pattern::Trit(1) | Pattern::Int(1) => {
344                            self.code.push(0x05); // TjmpPos (peeks)
345                            match_patch = self.code.len();
346                            self.code.extend_from_slice(&[0, 0]);
347                        }
348                        Pattern::Trit(0) | Pattern::Int(0) => {
349                            self.code.push(0x06); // TjmpZero (peeks)
350                            match_patch = self.code.len();
351                            self.code.extend_from_slice(&[0, 0]);
352                        }
353                        Pattern::Trit(-1) | Pattern::Int(-1) => {
354                            self.code.push(0x07); // TjmpNeg (peeks)
355                            match_patch = self.code.len();
356                            self.code.extend_from_slice(&[0, 0]);
357                        }
358                        Pattern::Int(v) => {
359                            self.code.push(0x25); // TjmpEqInt (peeks)
360                            self.code.extend_from_slice(&v.to_le_bytes());
361                            match_patch = self.code.len();
362                            self.code.extend_from_slice(&[0, 0]);
363                        }
364                        Pattern::Trit(v) => {
365                            self.code.push(0x25); // TjmpEqInt (peeks)
366                            self.code.extend_from_slice(&(*v as i64).to_le_bytes());
367                            match_patch = self.code.len();
368                            self.code.extend_from_slice(&[0, 0]);
369                        }
370                        Pattern::Float(v) => {
371                            self.code.push(0x2a); // TjmpEqFloat (peeks)
372                            self.code.extend_from_slice(&v.to_le_bytes());
373                            match_patch = self.code.len();
374                            self.code.extend_from_slice(&[0, 0]);
375                        }
376                        Pattern::Wildcard => {
377                            // Wildcard always matches — unconditional jump to body.
378                            // Do NOT pop here: the body's shared TPOP below will clean
379                            // the TLOAD value from the stack, keeping it balanced.
380                            match_patch = self.code.len() + 1;
381                            self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]); // TJMP placeholder
382                        }
383                    }
384
385                    // Mismatch: the conditional test above PEEKS (doesn't pop), so if it
386                    // didn't jump the TLOAD result is still on the stack. Pop it before
387                    // jumping to the next arm to keep the stack balanced.
388                    // (Wildcard never reaches here — it always jumps above.)
389                    self.code.push(0x0c); // TPOP — discard unmatched arm's cond value
390                    let skip_patch = self.code.len() + 1;
391                    self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
392                    next_arm_patch = Some(skip_patch);
393
394                    // Match found: execute body
395                    let body_addr = self.code.len() as u16;
396                    self.patch_u16(match_patch, body_addr);
397                    
398                    // Body: first pop the condition we were peeking at
399                    self.code.push(0x0c); // Tpop
400                    self.emit_stmt(stmt);
401                    
402                    // After body, jump to end of match
403                    let end_patch = self.code.len() + 1;
404                    self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
405                    end_patches.push(end_patch);
406                }
407
408                if let Some(p) = next_arm_patch {
409                    let addr = self.code.len() as u16;
410                    self.patch_u16(p, addr);
411                }
412                
413                if !arms.is_empty() {
414                    // Each arm's mismatch path now does its own TPOP (see per-arm fix above),
415                    // so the stack is already clean when we reach the fallback.
416                    // VM-MATCH-001: non-exhaustive match — no arm was taken.
417                    // Push a Tend (hold/undefined) placeholder so the stack is balanced
418                    // even if the caller expects a return value from this match expression.
419                    self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend]));
420                }
421
422                let end_addr = self.code.len() as u16;
423                for p in end_patches { self.patch_u16(p, end_addr); }
424                self.next_reg = pre_reg;
425            }
426            Stmt::ForIn { var, iter, body } => {
427                // Save next_reg so loop-internal registers are freed after the loop ends.
428                // Without this, each for-in permanently consumes 4 registers, exhausting
429                // the register file after 6-7 loops in a single function.
430                let pre_loop_reg = self.next_reg;
431
432                self.emit_expr(iter);
433                let it_reg = self.alloc_reg();
434                self.code.push(0x08); self.code.push(it_reg);
435                self.code.push(0x09); self.code.push(it_reg);
436                self.code.push(0x24); // TSHAPE: pushes rows then cols (cols on top)
437                self.code.push(0x0c); // pop cols — iterate over rows, not cols
438                let r_reg = self.alloc_reg();
439                self.code.push(0x08); self.code.push(r_reg); // store rows as loop bound
440                let i_reg = self.alloc_reg();
441                self.code.push(0x17); self.code.extend_from_slice(&0i64.to_le_bytes());
442                self.code.push(0x08); self.code.push(i_reg);
443
444                // Use a register for the loop comparison so we avoid TDUP accumulation.
445                // Previously: TDUP + TjmpNeg/TjmpZero (peek) left 2 values on the stack per
446                // iteration, causing a stack leak that corrupted subsequent operations.
447                let cmp_reg = self.alloc_reg();
448
449                let top = self.code.len() as u16;
450                let pre_break = self.break_patches.len();
451                let pre_cont = self.continue_patches.len();
452
453                // Compute i < r → cmp_reg (stack neutral: push then immediately store)
454                self.code.push(0x09); self.code.push(i_reg);
455                self.code.push(0x09); self.code.push(r_reg);
456                self.code.push(0x14);                        // Tless → [cmp]
457                self.code.push(0x08); self.code.push(cmp_reg); // TSTORE cmp → []
458
459                // Load and test for NEG (i >= r → Reject → exit)
460                self.code.push(0x09); self.code.push(cmp_reg); // [cmp]
461                let neg = self.code.len() + 1;
462                self.code.push(0x07); self.code.extend_from_slice(&[0, 0]); // TjmpNeg → peeks
463                self.code.push(0x0c); // TPOP — clean up after failed neg check
464
465                // Load and test for ZERO (i == r → Tend → exit)
466                self.code.push(0x09); self.code.push(cmp_reg); // [cmp]
467                let zero = self.code.len() + 1;
468                self.code.push(0x06); self.code.extend_from_slice(&[0, 0]); // TjmpZero → peeks
469                self.code.push(0x0c); // TPOP — clean up after failed zero check, body runs clean
470
471                // Body: load element tensor[it, i, 0] → v_reg
472                self.code.push(0x09); self.code.push(it_reg);
473                self.code.push(0x09); self.code.push(i_reg);
474                self.code.push(0x17); self.code.extend_from_slice(&0i64.to_le_bytes());
475                self.code.push(0x22);
476                let v_reg = self.alloc_reg();
477                self.symbols.insert(var.clone(), v_reg);
478                self.code.push(0x08); self.code.push(v_reg);
479                self.emit_stmt(body);
480
481                let cont_addr = self.code.len() as u16;
482                let cs: Vec<usize> = self.continue_patches.drain(pre_cont..).collect();
483                for p in cs { self.patch_u16(p, cont_addr); }
484
485                self.code.push(0x09); self.code.push(i_reg);
486                self.code.push(0x17); self.code.extend_from_slice(&1i64.to_le_bytes());
487                self.code.push(0x18);
488                self.code.push(0x08); self.code.push(i_reg);
489                let back = self.code.len() + 1;
490                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
491                self.patch_u16(back, top);
492
493                // Exit paths for neg/zero: the TjmpNeg/TjmpZero PEEK so the cmp value
494                // is still on the stack when they jump. Add a TPOP cleanup then TJMP end.
495                let neg_exit_addr = self.code.len() as u16;
496                self.patch_u16(neg, neg_exit_addr);
497                self.code.push(0x0c); // TPOP — clean peeked cmp
498                let neg_to_end = self.code.len() + 1;
499                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
500
501                let zero_exit_addr = self.code.len() as u16;
502                self.patch_u16(zero, zero_exit_addr);
503                self.code.push(0x0c); // TPOP — clean peeked cmp
504                let zero_to_end = self.code.len() + 1;
505                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
506
507                let end = self.code.len() as u16;
508                self.patch_u16(neg_to_end, end);
509                self.patch_u16(zero_to_end, end);
510                let bs: Vec<usize> = self.break_patches.drain(pre_break..).collect();
511                for p in bs { self.patch_u16(p, end); }
512
513                // Free loop registers: loop variable is out of scope, and the 4
514                // internal registers (it, r, i, v) are no longer needed.
515                self.symbols.remove(var);
516                self.next_reg = pre_loop_reg;
517            }
518            Stmt::WhileTernary { condition, on_pos, on_zero, on_neg } => {
519                let pre_reg = self.next_reg;
520                let cond_reg = self.alloc_reg();
521                let top = self.code.len() as u16;
522                let pre_break = self.break_patches.len();
523                let pre_cont = self.continue_patches.len();
524
525                self.emit_expr(condition);
526                self.code.push(0x08); self.code.push(cond_reg); // Tstore
527                
528                // Load condition for checks
529                self.code.push(0x09); self.code.push(cond_reg); // Tload
530                
531                // Check POS
532                let pos_patch = self.code.len() + 1;
533                self.code.push(0x05); self.code.extend_from_slice(&[0, 0]); // TJMP_POS
534                
535                // Check ZERO
536                let zero_patch = self.code.len() + 1;
537                self.code.push(0x06); self.code.extend_from_slice(&[0, 0]); // TJMP_ZERO
538                
539                // NEG ARM: pop and execute and EXIT (don't loop back)
540                self.code.push(0x0c); // TPOP
541                self.emit_stmt(on_neg);
542                let exit_neg = self.code.len() + 1;
543                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]); // TJMP to end
544
545                // POS ARM: pop and execute and LOOP BACK
546                let pos_addr = self.code.len() as u16;
547                self.patch_u16(pos_patch, pos_addr);
548                self.code.push(0x0c); // TPOP
549                self.emit_stmt(on_pos);
550                let back_pos = self.code.len() + 1;
551                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
552                self.patch_u16(back_pos, top);
553
554                // ZERO ARM: pop and execute and EXIT (don't loop back)
555                let zero_addr = self.code.len() as u16;
556                self.patch_u16(zero_patch, zero_addr);
557                self.code.push(0x0c); // TPOP
558                self.emit_stmt(on_zero);
559                
560                let end = self.code.len() as u16;
561                self.patch_u16(exit_neg, end);
562
563                let cs: Vec<usize> = self.continue_patches.drain(pre_cont..).collect();
564                for p in cs { self.patch_u16(p, top); }
565                let bs: Vec<usize> = self.break_patches.drain(pre_break..).collect();
566                for p in bs { self.patch_u16(p, end); }
567                self.next_reg = pre_reg;
568            }
569            Stmt::Loop { body } => {
570                let top = self.code.len() as u16;
571                let pre_break = self.break_patches.len();
572                let pre_cont = self.continue_patches.len();
573                self.emit_stmt(body);
574                let back = self.code.len() + 1;
575                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
576                self.patch_u16(back, top);
577                let end = self.code.len() as u16;
578                let cs: Vec<usize> = self.continue_patches.drain(pre_cont..).collect();
579                for p in cs { self.patch_u16(p, top); }
580                let bs: Vec<usize> = self.break_patches.drain(pre_break..).collect();
581                for p in bs { self.patch_u16(p, end); }
582            }
583            Stmt::Break => {
584                let p = self.code.len() + 1;
585                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
586                self.break_patches.push(p);
587            }
588            Stmt::Continue => {
589                let p = self.code.len() + 1;
590                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
591                self.continue_patches.push(p);
592            }
593            Stmt::Send { target, message } => {
594                self.emit_expr(target);
595                self.emit_expr(message);
596                self.code.push(0x31); // TSEND
597            }
598            Stmt::Return(e) => { self.emit_expr(e); self.code.push(0x11); }
599            Stmt::Block(ss) => { for s in ss { self.emit_stmt(s); } }
600            Stmt::Expr(e) => { self.emit_expr(e); self.code.push(0x0c); }
601            Stmt::Decorated { directive: _, stmt } => { self.emit_stmt(stmt); }
602            _ => {}
603        }
604    }
605
606    fn emit_expr(&mut self, expr: &Expr) {
607        match expr {
608            Expr::TritLiteral(v) => {
609                self.code.push(0x01);
610                self.code.extend(pack_trits(&[Trit::from(*v)]));
611            }
612            Expr::IntLiteral(v) => {
613                self.code.push(0x17);
614                self.code.extend_from_slice(&v.to_le_bytes());
615            }
616            Expr::FloatLiteral(val) => {
617                self.code.push(0x19);
618                self.code.extend_from_slice(&val.to_le_bytes());
619            }
620            Expr::StringLiteral(val) => {
621                self.code.push(0x21); // TPUSH_STRING
622                let bytes = val.as_bytes();
623                self.code.extend_from_slice(&(bytes.len() as u16).to_le_bytes());
624                self.code.extend_from_slice(bytes);
625            }
626            Expr::Ident(name) => {
627                // COMP-BOOL-001: `true`/`false` are not keywords in the lexer — they arrive
628                // as Token::Ident. Handle them here so they produce a value instead of
629                // causing a stack underflow when no symbol matches.
630                match name.as_str() {
631                    "true" => {
632                        self.code.push(0x17); // TpushInt
633                        self.code.extend_from_slice(&1i64.to_le_bytes());
634                    }
635                    "false" => {
636                        self.code.push(0x17); // TpushInt
637                        self.code.extend_from_slice(&0i64.to_le_bytes());
638                    }
639                    // COMP-TRIT-001: trit aliases that arrive as Ident if lexer misses them
640                    "affirm" => { self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Affirm])); }
641                    "hold" | "tend" => { self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend])); }
642                    "reject" => { self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Reject])); }
643                    _ => {
644                        if let Some(&r) = self.symbols.get(name) {
645                            self.code.push(0x09); self.code.push(r);
646                        }
647                    }
648                }
649            }
650            Expr::BinaryOp { op, lhs, rhs } => {
651                self.emit_expr(lhs); self.emit_expr(rhs);
652                match op {
653                    BinOp::Add => self.code.push(0x02),
654                    BinOp::Mul => self.code.push(0x03),
655                    BinOp::Div => self.code.push(0x1e),
656                    BinOp::Mod => self.code.push(0x1f),
657                    BinOp::Sub => { self.code.push(0x04); self.code.push(0x02); }
658                    BinOp::Equal => self.code.push(0x16),
659                    BinOp::NotEqual => { self.code.push(0x16); self.code.push(0x04); }
660                    BinOp::And => self.code.push(0x28), // TAND = min(a,b)
661                    BinOp::Or  => self.code.push(0x29), // TOR  = max(a,b)
662                    BinOp::Less => self.code.push(0x14),
663                    BinOp::Greater => self.code.push(0x15),
664                    BinOp::LessEqual => self.code.push(0x26),
665                    BinOp::GreaterEqual => self.code.push(0x27),
666                }
667            }
668            Expr::UnaryOp { op, expr } => {
669                self.emit_expr(expr);
670                match op { UnOp::Neg => self.code.push(0x04) }
671            }
672            Expr::Call { callee, args } => {
673                match callee.as_str() {
674                    // `print` is an alias for `println` — same TPRINT opcode (0x20)
675                    "println" | "print" => {
676                        if args.is_empty() {
677                            // print newline only (not implemented, but let's push dummy)
678                        } else {
679                            for a in args {
680                                self.emit_expr(a);
681                                self.code.push(0x20); // TPRINT
682                            }
683                        }
684                        self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend])); // return hold()
685                    }
686                    "opent" => {
687                        if args.len() == 2 {
688                            for a in args { self.emit_expr(a); }
689                            self.code.push(0x33); // TOPENT (pushes Int handle)
690                        } else {
691                            // error but push dummy
692                            self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend]));
693                        }
694                    }
695                    "readt" => {
696                        if args.len() == 1 {
697                            self.emit_expr(&args[0]);
698                            self.code.push(0x34); // TREADT (pushes Trit)
699                        } else {
700                            self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend]));
701                        }
702                    }
703                    "writet" => {
704                        if args.len() == 2 {
705                            for a in args { self.emit_expr(a); }
706                            self.code.push(0x35); // TWRITET
707                        }
708                        self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend])); // push void/hold result
709                    }
710                    "consensus" => {
711
712                        for a in args { self.emit_expr(a); }
713                        if args.len() == 2 { self.code.push(0x0e); }
714                    }
715                    "length" => {
716                        if args.len() == 1 {
717                            self.emit_expr(&args[0]);
718                            self.code.push(0x24); // TSHAPE
719                            self.code.push(0x0c); // TPOP (cols)
720                        }
721                    }
722                    // VM-BUILTIN-001: `invert(t)` = ternary negation (Tneg, opcode 0x04)
723                    "invert" => {
724                        if args.len() == 1 {
725                            self.emit_expr(&args[0]);
726                            self.code.push(0x04); // Tneg
727                        }
728                    }
729                    // VM-BUILTIN-002: `len(arr)` is an alias for `length(arr)`
730                    "len" => {
731                        if args.len() == 1 {
732                            self.emit_expr(&args[0]);
733                            self.code.push(0x24); // TSHAPE
734                            self.code.push(0x0c); // TPOP (cols — TSHAPE pushes rows then cols)
735                        }
736                    }
737                    // VM-BUILTIN-001: `abs(n)` — inline: dup, push 0, less-than, branch on negative
738                    "abs" => {
739                        if args.len() == 1 {
740                            self.emit_expr(&args[0]);          // stack: [x]
741                            self.code.push(0x0a);              // TDUP   → [x, x]
742                            self.code.push(0x17);              // TpushInt 0
743                            self.code.extend_from_slice(&0i64.to_le_bytes()); // → [x, x, 0]
744                            self.code.push(0x14);              // Tless: (x < 0) → Affirm; [x, cmp]
745                            // TjmpPos (peek) to negate branch
746                            let neg_patch = self.code.len() + 1;
747                            self.code.push(0x05); self.code.extend_from_slice(&[0, 0]);
748                            // not negative: pop cmp, jump to end
749                            self.code.push(0x0c);              // TPOP → [x]
750                            let end_patch = self.code.len() + 1;
751                            self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
752                            // negate branch: pop cmp, negate x
753                            let neg_addr = self.code.len() as u16;
754                            self.patch_u16(neg_patch, neg_addr);
755                            self.code.push(0x0c);              // TPOP → [x]
756                            self.code.push(0x04);              // Tneg → [-x] (positive when x<0)
757                            let end_addr = self.code.len() as u16;
758                            self.patch_u16(end_patch, end_addr);
759                        }
760                    }
761                    // VM-BUILTIN-001: `min(a, b)` — inline with temp registers
762                    "min" => {
763                        if args.len() == 2 {
764                            let a_reg = self.alloc_reg();
765                            let b_reg = self.alloc_reg();
766                            self.emit_expr(&args[0]);
767                            self.code.push(0x08); self.code.push(a_reg); // TSTORE a
768                            self.emit_expr(&args[1]);
769                            self.code.push(0x08); self.code.push(b_reg); // TSTORE b
770                            self.code.push(0x09); self.code.push(a_reg); // TLOAD a
771                            self.code.push(0x09); self.code.push(b_reg); // TLOAD b
772                            self.code.push(0x14);                         // Tless: a < b → Affirm
773                            // TjmpPos → a is smaller, return a
774                            let a_smaller_patch = self.code.len() + 1;
775                            self.code.push(0x05); self.code.extend_from_slice(&[0, 0]);
776                            // a >= b: return b
777                            self.code.push(0x0c);              // TPOP cmp
778                            self.code.push(0x09); self.code.push(b_reg); // TLOAD b
779                            let end_patch = self.code.len() + 1;
780                            self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
781                            // a < b: return a
782                            let a_smaller_addr = self.code.len() as u16;
783                            self.patch_u16(a_smaller_patch, a_smaller_addr);
784                            self.code.push(0x0c);              // TPOP cmp
785                            self.code.push(0x09); self.code.push(a_reg); // TLOAD a
786                            let end_addr = self.code.len() as u16;
787                            self.patch_u16(end_patch, end_addr);
788                        }
789                    }
790                    // VM-BUILTIN-001: `max(a, b)` — inline with temp registers
791                    "max" => {
792                        if args.len() == 2 {
793                            let a_reg = self.alloc_reg();
794                            let b_reg = self.alloc_reg();
795                            self.emit_expr(&args[0]);
796                            self.code.push(0x08); self.code.push(a_reg); // TSTORE a
797                            self.emit_expr(&args[1]);
798                            self.code.push(0x08); self.code.push(b_reg); // TSTORE b
799                            self.code.push(0x09); self.code.push(b_reg); // TLOAD b
800                            self.code.push(0x09); self.code.push(a_reg); // TLOAD a
801                            self.code.push(0x14);                         // Tless: b < a → Affirm
802                            // TjmpPos → a is larger, return a
803                            let a_larger_patch = self.code.len() + 1;
804                            self.code.push(0x05); self.code.extend_from_slice(&[0, 0]);
805                            // b >= a: return b
806                            self.code.push(0x0c);              // TPOP cmp
807                            self.code.push(0x09); self.code.push(b_reg); // TLOAD b
808                            let end_patch = self.code.len() + 1;
809                            self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
810                            // b < a: return a
811                            let a_larger_addr = self.code.len() as u16;
812                            self.patch_u16(a_larger_patch, a_larger_addr);
813                            self.code.push(0x0c);              // TPOP cmp
814                            self.code.push(0x09); self.code.push(a_reg); // TLOAD a
815                            let end_addr = self.code.len() as u16;
816                            self.patch_u16(end_patch, end_addr);
817                        }
818                    }
819                    // `pow(base, exp)` — integer power via loop: result = 1; while exp>0 { result*=base; exp-=1; }
820                    "pow" => {
821                        if args.len() == 2 {
822                            let b_reg = self.alloc_reg(); // base
823                            let e_reg = self.alloc_reg(); // exponent
824                            let r_reg = self.alloc_reg(); // result
825                            // store base
826                            self.emit_expr(&args[0]);
827                            self.code.push(0x08); self.code.push(b_reg);
828                            // store exp
829                            self.emit_expr(&args[1]);
830                            self.code.push(0x08); self.code.push(e_reg);
831                            // result = 1
832                            self.code.push(0x17); self.code.extend_from_slice(&1i64.to_le_bytes());
833                            self.code.push(0x08); self.code.push(r_reg);
834                            // loop_start: check e > 0
835                            let loop_start = self.code.len() as u16;
836                            self.code.push(0x09); self.code.push(e_reg);  // TLOAD e
837                            self.code.push(0x17); self.code.extend_from_slice(&0i64.to_le_bytes()); // push 0
838                            self.code.push(0x15);  // Tgreater: e > 0 → Affirm
839                            // TjmpPos → jump to loop body
840                            let body_patch = self.code.len() + 1;
841                            self.code.push(0x05); self.code.extend_from_slice(&[0, 0]);
842                            // e <= 0: pop cmp, jump to end
843                            self.code.push(0x0c);
844                            let end_patch = self.code.len() + 1;
845                            self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]);
846                            // loop body:
847                            let body_addr = self.code.len() as u16;
848                            self.patch_u16(body_patch, body_addr);
849                            self.code.push(0x0c);  // TPOP cmp
850                            // r = r * b
851                            self.code.push(0x09); self.code.push(r_reg);
852                            self.code.push(0x09); self.code.push(b_reg);
853                            self.code.push(0x03);  // Tmul (handles int*int)
854                            self.code.push(0x08); self.code.push(r_reg);
855                            // e = e - 1
856                            self.code.push(0x09); self.code.push(e_reg);
857                            self.code.push(0x17); self.code.extend_from_slice(&(-1i64).to_le_bytes());
858                            self.code.push(0x18);  // TaddInt
859                            self.code.push(0x08); self.code.push(e_reg);
860                            // jump back to loop_start
861                            self.code.push(0x0b); self.code.extend_from_slice(&loop_start.to_le_bytes());
862                            // end:
863                            let end_addr = self.code.len() as u16;
864                            self.patch_u16(end_patch, end_addr);
865                            // push result
866                            self.code.push(0x09); self.code.push(r_reg);
867                        }
868                    }
869                    // `push(arr, val)` / `pop(arr)` — tensor mutation not yet implemented.
870                    // Emit argument expressions for side-effects then push a Tend stub so
871                    // callers get a value without falling through to an unresolved TCALL 0x0000
872                    // (which causes infinite recursion via jump-to-program-start).
873                    "push" => {
874                        for a in args { self.emit_expr(a); self.code.push(0x0c); } // eval + discard
875                        self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend])); // stub result
876                    }
877                    "pop" => {
878                        for a in args { self.emit_expr(a); self.code.push(0x0c); } // eval + discard
879                        self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend])); // stub result
880                    }
881                    "pack" => {
882                        for a in args { self.emit_expr(a); }
883                        if args.len() == 5 { self.code.push(0x50); }
884                    }
885                    "unpack" => {
886                        if args.len() == 1 { self.emit_expr(&args[0]); self.code.push(0x51); }
887                    }
888                    "v_add" => {
889                        if args.len() == 2 { for a in args { self.emit_expr(a); } self.code.push(0x52); }
890                    }
891                    "v_neg" => {
892                        if args.len() == 1 { self.emit_expr(&args[0]); self.code.push(0x53); }
893                    }
894                    "v_con" => {
895                        if args.len() == 2 { for a in args { self.emit_expr(a); } self.code.push(0x54); }
896                    }
897                    "bind" => {
898                        if args.len() == 2 {
899                            if let Expr::Ident(name) = &args[0] {
900                                if let Some(&reg) = self.symbols.get(name) {
901                                    self.emit_expr(&args[1]);
902                                    self.code.push(0x42); // TBIND
903                                    self.code.push(reg);
904                                }
905                            }
906                        }
907                        self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend]));
908                    }
909                    "mul" => {
910                        for a in args { self.emit_expr(a); }
911                        if args.len() == 2 { self.code.push(0x03); }
912                    }
913                    "truth" => { self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Affirm])); }
914                    "hold" => { self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend])); }
915                    "conflict" => { self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Reject])); }
916                    _ => {
917                        for a in args {
918                            // If argument is a struct, we need to push all its flattened fields + root dummy
919                            let mut pushed_as_struct = false;
920                            if let Expr::Ident(name) = a {
921                                // We don't have the variable type here, but we can try to find if it's a struct
922                                // by looking for any mangled keys starting with "name.".
923                                // To get the correct field order, we'd need the struct name.
924                                // Let's try to find which struct layout matches the existing mangled keys.
925                                let mut fields_found = Vec::new();
926                                for (_s_name, s_fields) in &self.struct_layouts {
927                                    let mut all_present = true;
928                                    let mut current_regs = Vec::new();
929                                    for f in s_fields {
930                                        let key = format!("{}.{}", name, f);
931                                        if let Some(&r) = self.symbols.get(&key) {
932                                            current_regs.push(r);
933                                        } else {
934                                            all_present = false;
935                                            break;
936                                        }
937                                    }
938                                    if all_present && !s_fields.is_empty() {
939                                        fields_found = current_regs;
940                                        break;
941                                    }
942                                }
943
944                                if !fields_found.is_empty() {
945                                    for reg in fields_found {
946                                        self.code.push(0x09); self.code.push(reg); // TLOAD field
947                                    }
948                                    // Push root dummy
949                                    if let Some(&reg) = self.symbols.get(name) {
950                                        self.code.push(0x09); self.code.push(reg); // TLOAD root
951                                    }
952                                    pushed_as_struct = true;
953                                }
954                            }
955                            
956                            if !pushed_as_struct {
957                                self.emit_expr(a);
958                            }
959                        }
960                        self.code.push(0x10); // TCALL
961                        if let Some(&addr) = self.func_addrs.get(callee) {
962                            self.code.extend_from_slice(&addr.to_le_bytes());
963                        } else {
964                            let patch = self.code.len();
965                            self.code.extend_from_slice(&[0, 0]);
966                            self.function_patches.entry(callee.to_string()).or_default().push(patch);
967                        }
968                    }
969                }
970            }
971            Expr::Spawn { agent_name, .. } => {
972                if let Some(&type_id) = self.agent_type_ids.get(agent_name) {
973                    self.code.push(0x30); // TSPAWN
974                    self.code.extend_from_slice(&type_id.to_le_bytes());
975                } else {
976                    self.code.push(0x01); self.code.extend(pack_trits(&[Trit::Tend]));
977                }
978            }
979            Expr::Await { target } => {
980                self.emit_expr(target);
981                self.code.push(0x32); // TAWAIT
982            }
983            Expr::Slice { object, start, end, stride } => {
984                self.emit_expr(object);
985                self.emit_expr(start);
986                // Compute length = end - start
987                self.emit_expr(end);
988                self.emit_expr(start);
989                self.code.push(0x04); self.code.push(0x02); // TSUB (end - start)
990                self.emit_expr(stride);
991                self.code.push(0x55); // TVIEW
992            }
993            Expr::TritTensorLiteral(vs) => {
994                let rows = vs.len();
995                let cols = 1;
996                self.code.push(0x0f);
997                self.code.extend_from_slice(&(rows as u32).to_le_bytes());
998                self.code.extend_from_slice(&(cols as u32).to_le_bytes());
999                let tr = self.next_reg; self.next_reg += 1;
1000                self.code.push(0x08); self.code.push(tr.try_into().unwrap());
1001                for (idx, &v) in vs.iter().enumerate() {
1002                    self.code.push(0x09); self.code.push(tr.try_into().unwrap());
1003                    self.code.push(0x17); self.code.extend_from_slice(&(idx as i64).to_le_bytes());
1004                    self.code.push(0x17); self.code.extend_from_slice(&0i64.to_le_bytes());
1005                    self.code.push(0x01); self.code.extend(pack_trits(&[Trit::from(v)]));
1006                    self.code.push(0x23);
1007                }
1008                self.code.push(0x09); self.code.push(tr.try_into().unwrap());
1009            }
1010            Expr::StructLiteral { fields, .. } => {
1011                for (_, f_val) in fields {
1012                    self.emit_expr(f_val);
1013                }
1014                self.code.push(0x40); // TSTRUCT
1015                self.code.push(fields.len() as u8);
1016                // We pop in reverse order of pushing
1017                for (f_name, _) in fields.iter().rev() {
1018                    self.code.push(f_name.len() as u8);
1019                    self.code.extend_from_slice(f_name.as_bytes());
1020                }
1021            }
1022            Expr::Propagate { expr } => {
1023                self.emit_expr(expr);
1024                self.code.push(0x0a); // TDUP
1025                let patch = self.code.len() + 1;
1026                self.code.push(0x07); self.code.extend_from_slice(&[0, 0]); // TJMP_NEG
1027                let skip = self.code.len() + 1;
1028                self.code.push(0x0b); self.code.extend_from_slice(&[0, 0]); // TJMP
1029                let early_ret = self.code.len() as u16;
1030                self.patch_u16(patch, early_ret);
1031                self.code.push(0x11); // TRET
1032                let next = self.code.len() as u16;
1033                self.patch_u16(skip, next);
1034            }
1035            Expr::Index { object, row, col } => {
1036                self.emit_expr(object); self.emit_expr(row); self.emit_expr(col);
1037                self.code.push(0x22);
1038            }
1039            Expr::FieldAccess { object, field } => {
1040                if let Expr::Ident(obj_name) = object.as_ref() {
1041                    let key = format!("{}.{}", obj_name, field);
1042                    if let Some(&r) = self.symbols.get(&key) {
1043                        self.code.push(0x09); self.code.push(r); // TLOAD
1044                    }
1045                }
1046            }
1047            Expr::Cast { expr, .. } => {
1048                // cast() is a type annotation hint only — pass inner expression through
1049                self.emit_expr(expr);
1050            }
1051            Expr::NodeId => {
1052                // Emit TNODEID (0x36): defers binding to runtime so that
1053                // `--node-addr` / vm.set_node_id() is actually respected.
1054                // Previously this emitted a hardcoded "127.0.0.1:7373" string
1055                // literal at compile time, which meant distributed modules always
1056                // announced the wrong address when deployed with a custom node addr.
1057                self.code.push(0x36); // TNODEID — pushes Value::String(vm.node_id)
1058            }
1059        }
1060    }
1061
1062    pub fn emit_entry_call(&mut self, name: &str) {
1063        if let Some(&addr) = self.func_addrs.get(name) {
1064            self.code.push(0x10); self.code.extend_from_slice(&addr.to_le_bytes());
1065        }
1066    }
1067
1068    /// Allocate the next register, returning its index as `u8` (the bytecode register width).
1069    /// Emits a stderr diagnostic if the function requires more than 255 registers — programs
1070    /// that hit this have much bigger structural problems anyway.
1071    fn alloc_reg(&mut self) -> u8 {
1072        let r = self.next_reg;
1073        self.next_reg += 1;
1074        if r > 255 {
1075            eprintln!(
1076                "[CODEGEN] Warning: register #{r} exceeds u8 range — \
1077                 this function has too many local variables (max 255). \
1078                 Split the function or reduce scope depth."
1079            );
1080        }
1081        r as u8
1082    }
1083
1084    pub fn get_agent_handlers(&self) -> Vec<(u16, usize)> {
1085        self.agent_handlers.iter().map(|&(id, addr)| (id, addr as usize)).collect()
1086    }
1087
1088    pub fn finalize(&mut self) -> Vec<u8> { std::mem::take(&mut self.code) }
1089
1090    fn patch_u16(&mut self, pos: usize, val: u16) {
1091        let b = val.to_le_bytes();
1092        self.code[pos] = b[0]; self.code[pos + 1] = b[1];
1093    }
1094}