Skip to main content

ternlang_core/vm/
mod.rs

1pub mod bet;
2
3use crate::trit::Trit;
4use crate::vm::bet::{unpack_trits, BetFault};
5
6use std::fmt;
7use std::sync::Arc;
8
9// ─── Remote transport trait ───────────────────────────────────────────────────
10
11pub trait RemoteTransport: Send + Sync {
12    fn remote_send(&self, node_addr: &str, agent_id: usize, trit: i8) -> std::io::Result<()>;
13    fn remote_await(&self, node_addr: &str, agent_id: usize) -> std::io::Result<i8>;
14}
15
16/// Maximum call depth before the VM returns a `CallStackOverflow` error.
17/// Prevents OOM/freeze when programs contain unbounded recursion or
18/// mutual recursion across imported modules.
19const MAX_CALL_DEPTH: usize = 4096;
20
21#[derive(Debug, PartialEq, Eq)]
22pub enum VmError {
23    StackUnderflow,
24    BetFault(BetFault),
25    Halt,
26    InvalidOpcode(u8),
27    InvalidRegister(u8),
28    PcOutOfBounds(usize),
29    TypeMismatch { expected: String, found: String },
30    // ── Tensor errors ────────────────────────────────────────────────────────
31    TensorIndexOutOfBounds { tensor_id: usize, index: usize, size: usize },
32    TensorNotAllocated(usize),
33    // ── Agent errors ─────────────────────────────────────────────────────────
34    AgentTypeNotRegistered(u16),
35    AgentIdInvalid(usize),
36    RuntimeError(String),
37    CallStackOverflow,
38    // ── File I/O errors ──────────────────────────────────────────────────────
39    FileOpenError(String),
40    FileReadError(String),
41    FileWriteError(String),
42    FileNotOpen(usize),
43    AssertionFailed,
44}
45
46impl fmt::Display for VmError {
47    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
48        match self {
49            VmError::StackUnderflow =>
50                write!(f, "[BET-001] Stack underflow — you tried to pop a truth that wasn't there.\n          → details: stdlib/errors/BET-001.tern  |  ternlang errors BET-001"),
51            VmError::BetFault(fault) =>
52                write!(f, "[BET-002] BET encoding fault: {fault:?}. The 0b00 state is forbidden — only 01/10/11 are valid trit bits.\n          → details: stdlib/errors/BET-002.tern  |  ternlang errors BET-002"),
53            VmError::Halt =>
54                write!(f, "[BET-003] VM halted cleanly. Execution reached the end. This is not an error — this is peace.\n          → details: stdlib/errors/BET-003.tern  |  ternlang errors BET-003"),
55            VmError::InvalidOpcode(op) =>
56                write!(f, "[BET-004] Unknown opcode 0x{op:02x} — the machine has never seen this instruction. Delete cached .ternbc files and recompile.\n          → details: stdlib/errors/BET-004.tern  |  ternlang errors BET-004"),
57            VmError::InvalidRegister(reg) =>
58                write!(f, "[BET-005] Register {reg} is out of range. The BET has exactly 27 registers (0–26). That's 3³. No more.\n          → details: stdlib/errors/BET-005.tern  |  ternlang errors BET-005"),
59            VmError::PcOutOfBounds(pc) =>
60                write!(f, "[BET-006] PC {pc} is out of bounds — you jumped outside the known universe. Recompile from source.\n          → details: stdlib/errors/BET-006.tern  |  ternlang errors BET-006"),
61            VmError::TypeMismatch { expected, found } =>
62                write!(f, "[BET-007] Runtime type mismatch — expected {expected} but found {found}. Square peg, round hole.\n          → details: stdlib/errors/BET-007.tern  |  ternlang errors BET-007"),
63            VmError::TensorIndexOutOfBounds { tensor_id, index, size } =>
64                write!(f, "[BET-008] Tensor[{tensor_id}]: index {index} is out of bounds — tensor only has {size} element(s). Trittensors don't grow on access.\n          → details: stdlib/errors/BET-008.tern  |  ternlang errors BET-008"),
65            VmError::TensorNotAllocated(idx) =>
66                write!(f, "[BET-009] TensorRef({idx}) doesn't exist — you never allocated it. TALLOC first, then TIDX.\n          → details: stdlib/errors/BET-009.tern  |  ternlang errors BET-009"),
67            VmError::AgentTypeNotRegistered(type_id) =>
68                write!(f, "[BET-010] Agent type_id 0x{type_id:04x} was never registered. You can't spawn what was never declared.\n          → details: stdlib/errors/BET-010.tern  |  ternlang errors BET-010"),
69            VmError::AgentIdInvalid(id) =>
70                write!(f, "[BET-011] Agent #{id} doesn't exist — no agent was spawned at this ID. TSEND and TAWAIT require a live agent.\n          → details: stdlib/errors/BET-011.tern  |  ternlang errors BET-011"),
71            VmError::RuntimeError(msg) =>
72                write!(f, "[BET-012] Runtime error: {msg}"),
73            VmError::CallStackOverflow =>
74                write!(f, "[BET-013] Call stack overflow — max depth ({MAX_CALL_DEPTH}) exceeded. Infinite recursion or unbounded cross-module mutual calls detected.\n          → details: stdlib/errors/BET-013.tern  |  ternlang errors BET-013"),
75            VmError::FileOpenError(e) =>
76                write!(f, "[IO-001] File open error: {e}"),
77            VmError::FileReadError(e) =>
78                write!(f, "[IO-002] File read error: {e}"),
79            VmError::FileWriteError(e) =>
80                write!(f, "[IO-003] File write error: {e}"),
81            VmError::FileNotOpen(id) =>
82                write!(f, "[IO-004] File handle {id} is not open or was closed."),
83            VmError::AssertionFailed =>
84                write!(f, "[ASSERT-001] Assertion failed: an assert() condition evaluated to reject or tend."),
85        }
86    }
87}
88
89#[derive(Debug, Clone, PartialEq)]
90pub enum Value {
91    Trit(Trit),
92    Int(i64),
93    Float(f64),
94    String(String),
95    TensorRef(usize),
96    TensorView {
97        tensor_id: usize,
98        offset: usize,
99        length: usize,
100        stride: usize,
101    },
102    AgentRef(usize, Option<String>),
103    Struct(std::collections::HashMap<String, Value>),
104}
105
106impl Default for Value {
107    fn default() -> Self {
108        Value::Trit(Trit::Tend)
109    }
110}
111
112enum TensorData {
113    Trit(Vec<Trit>),
114    PackedTrit(Vec<u8>, usize),
115    Float(Vec<f64>),
116    Int(Vec<i64>),
117}
118
119impl TensorData {
120    fn len(&self) -> usize {
121        match self {
122            TensorData::Trit(v) => v.len(),
123            TensorData::PackedTrit(_, len) => *len,
124            TensorData::Float(v) => v.len(),
125            TensorData::Int(v) => v.len(),
126        }
127    }
128}
129
130struct TensorInstance {
131    data: TensorData,
132    rows: usize,
133    cols: usize,
134}
135
136struct AgentInstance {
137    handler_addr: usize,
138    mailbox: std::collections::VecDeque<Value>,
139}
140
141pub struct BetVm {
142    /// Dynamic register file — grows on demand so programs with > 27 locals work correctly
143    /// instead of silently dropping stores and returning zero on reads.
144    registers: Vec<Value>,
145    register_stack: Vec<Vec<Value>>,
146    carry_reg: Trit,
147    stack: Vec<Value>,
148    call_stack: Vec<usize>,
149    tensors: Vec<TensorInstance>,
150    agents: Vec<AgentInstance>,
151    agent_types: std::collections::HashMap<u16, usize>,
152    pc: usize,
153    code: Vec<u8>,
154    node_id: String,
155    pub sparse_dropped: bool,
156    remote: Option<Arc<dyn RemoteTransport>>,
157    open_files: Vec<Option<std::fs::File>>,
158    bindings: std::collections::HashMap<usize, Value>,
159    _instructions_count: u64,
160    pub print_log: Vec<String>,
161}
162
163impl BetVm {
164    pub fn new(code: Vec<u8>) -> Self {
165        Self {
166            registers: vec![Value::default(); 27],
167            register_stack: Vec::new(),
168            carry_reg: Trit::Tend,
169            stack: Vec::new(),
170            call_stack: Vec::new(),
171            tensors: Vec::new(),
172            agents: Vec::new(),
173            agent_types: std::collections::HashMap::new(),
174            pc: 0,
175            code,
176            node_id: "127.0.0.1".into(),
177            sparse_dropped: false,
178            remote: None,
179            open_files: Vec::new(),
180            bindings: std::collections::HashMap::new(),
181            _instructions_count: 0,
182            print_log: Vec::new(),
183        }
184    }
185
186
187    /// Drain all lines printed by `print()`/`println()` during execution.
188    pub fn take_output(&mut self) -> Vec<String> {
189        std::mem::take(&mut self.print_log)
190    }
191
192    pub fn set_node_id(&mut self, node_id: String) {
193        self.node_id = node_id;
194    }
195
196    pub fn set_remote(&mut self, transport: Arc<dyn RemoteTransport>) {
197        self.remote = Some(transport);
198    }
199
200    pub fn register_agent_type(&mut self, type_id: u16, handler_addr: usize) {
201        self.agent_types.insert(type_id, handler_addr);
202    }
203
204    pub fn peek_stack(&self) -> Option<Value> {
205        self.stack.last().cloned()
206    }
207
208    pub fn get_registers(&self) -> Vec<Value> {
209        self.registers.clone()
210    }
211
212    pub fn get_register(&self, reg: u8) -> Value {
213        self.registers.get(reg as usize).cloned().unwrap_or_default()
214    }
215
216    pub fn node_id(&self) -> &str {
217        &self.node_id
218    }
219
220    pub fn run(&mut self) -> Result<(), VmError> {
221        loop {
222            if self.pc >= self.code.len() { break; }
223            let opcode = self.code[self.pc];
224            self.pc += 1;
225
226            match opcode {
227                0x01 => { // Tpush
228                    let packed = self.read_u8()?;
229                    let trits = unpack_trits(&[packed], 1).map_err(VmError::BetFault)?;
230                    self.stack.push(Value::Trit(trits[0]));
231                }
232                0x02 => { // Tadd
233                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
234                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
235                    match (a.clone(), b.clone()) {
236                        (Value::Trit(av), Value::Trit(bv)) => {
237                            let (sum, carry) = av + bv;
238                            self.stack.push(Value::Trit(sum));
239                            self.carry_reg = carry;
240                        }
241                        (Value::Int(av), Value::Int(bv)) => self.stack.push(Value::Int(av + bv)),
242                        (Value::Float(av), Value::Float(bv)) => self.stack.push(Value::Float(av + bv)),
243                        (Value::Int(av), Value::Trit(bv)) => self.stack.push(Value::Int(av + bv as i64)),
244                        (Value::Trit(av), Value::Int(bv)) => self.stack.push(Value::Int(av as i64 + bv)),
245                        (Value::Float(av), Value::Trit(bv)) => self.stack.push(Value::Float(av + (bv as i8 as f64))),
246                        (Value::Trit(av), Value::Float(bv)) => self.stack.push(Value::Float((av as i8 as f64) + bv)),
247                        (Value::Float(av), Value::Int(bv)) => self.stack.push(Value::Float(av + (bv as f64))),
248                        (Value::Int(av), Value::Float(bv)) => self.stack.push(Value::Float((av as f64) + bv)),
249                        // PARSER-STR-001: string concatenation via + operator
250                        (Value::String(av), Value::String(bv)) => self.stack.push(Value::String(av + &bv)),
251                        _ => return Err(VmError::TypeMismatch { expected: "Numeric".into(), found: format!("{:?}", (a, b)) }),
252                    }
253                }
254                0x03 => { // Tmul
255                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
256                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
257                    match (a.clone(), b.clone()) {
258                        (Value::Trit(av), Value::Trit(bv)) => self.stack.push(Value::Trit(av * bv)),
259                        (Value::Int(av), Value::Int(bv)) => self.stack.push(Value::Int(av * bv)),
260                        (Value::Float(av), Value::Float(bv)) => self.stack.push(Value::Float(av * bv)),
261                        (Value::Int(av), Value::Trit(bv)) => self.stack.push(Value::Int(av * bv as i64)),
262                        (Value::Trit(av), Value::Int(bv)) => self.stack.push(Value::Int(av as i64 * bv)),
263                        (Value::Float(av), Value::Trit(bv)) => self.stack.push(Value::Float(av * (bv as i8 as f64))),
264                        (Value::Trit(av), Value::Float(bv)) => self.stack.push(Value::Float((av as i8 as f64) * bv)),
265                        (Value::Float(av), Value::Int(bv)) => self.stack.push(Value::Float(av * (bv as f64))),
266                        (Value::Int(av), Value::Float(bv)) => self.stack.push(Value::Float((av as f64) * bv)),
267                        _ => return Err(VmError::TypeMismatch { expected: "Numeric".into(), found: format!("{:?}", (a, b)) }),
268                    }
269                }
270                0x04 => { // Tneg
271                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
272                    match a.clone() {
273                        Value::Trit(av) => self.stack.push(Value::Trit(-av)),
274                        Value::Int(av) => self.stack.push(Value::Int(-av)),
275                        Value::Float(av) => self.stack.push(Value::Float(-av)),
276                        _ => return Err(VmError::TypeMismatch { expected: "Numeric".into(), found: format!("{:?}", a) }),
277                    }
278                }
279                0x05 => { // TjmpPos — jumps if top is positive (Affirm / any int > 0)
280                    let addr = self.read_u16()?;
281                    let val = self.stack.last().ok_or(VmError::StackUnderflow)?;
282                    let is_pos = match val {
283                        Value::Trit(Trit::Affirm) => true,
284                        Value::Int(v) => *v > 0,
285                        Value::Float(f) => *f > 0.0,
286                        _ => false,
287                    };
288                    if is_pos { self.pc = addr as usize; }
289                }
290                0x06 => { // TjmpZero — jumps if top is zero (Tend / 0)
291                    let addr = self.read_u16()?;
292                    let val = self.stack.last().ok_or(VmError::StackUnderflow)?;
293                    let is_zero = match val {
294                        Value::Trit(Trit::Tend) => true,
295                        Value::Int(v) => *v == 0,
296                        Value::Float(f) => *f == 0.0,
297                        _ => false,
298                    };
299                    if is_zero { self.pc = addr as usize; }
300                }
301                0x07 => { // TjmpNeg — jumps if top is negative (Reject / any int < 0)
302                    let addr = self.read_u16()?;
303                    let val = self.stack.last().ok_or(VmError::StackUnderflow)?;
304                    let is_neg = match val {
305                        Value::Trit(Trit::Reject) => true,
306                        Value::Int(v) => *v < 0,
307                        Value::Float(f) => *f < 0.0,
308                        _ => false,
309                    };
310                    if is_neg { self.pc = addr as usize; }
311                }
312                0x08 => { // Tstore
313                    let reg = self.read_u8()? as usize;
314                    let val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
315                    self.bindings.remove(&reg);
316                    if reg >= self.registers.len() { self.registers.resize(reg + 1, Value::default()); }
317                    self.registers[reg] = val;
318                }
319                0x09 => { // Tload
320                    let reg = self.read_u8()? as usize;
321                    let val = self.bindings.get(&reg).cloned().unwrap_or_else(|| {
322                        if reg >= self.registers.len() { self.registers.resize(reg + 1, Value::default()); }
323                        self.registers[reg].clone()
324                    });
325                    self.stack.push(val);
326                }
327                0x0a => { // Tdup
328                    let val = self.stack.last().ok_or(VmError::StackUnderflow)?;
329                    self.stack.push(val.clone());
330                }
331                0x0b => { // Tjmp
332                    let addr = self.read_u16()?;
333                    self.pc = addr as usize;
334                }
335                0x0c => { // Tpop
336                    self.stack.pop().ok_or(VmError::StackUnderflow)?;
337                }
338                0x0e => { // Tcons
339                    let b_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
340                    let a_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
341                    
342                    let a = match a_val {
343                        Value::Trit(t) => t,
344                        Value::Int(v) if v == 1 => Trit::Affirm,
345                        Value::Int(v) if v == 0 => Trit::Tend,
346                        Value::Int(v) if v == -1 => Trit::Reject,
347                        _ => return Err(VmError::TypeMismatch { expected: "Trit or Int(-1..1)".into(), found: format!("{:?}", a_val) }),
348                    };
349                    let b = match b_val {
350                        Value::Trit(t) => t,
351                        Value::Int(v) if v == 1 => Trit::Affirm,
352                        Value::Int(v) if v == 0 => Trit::Tend,
353                        Value::Int(v) if v == -1 => Trit::Reject,
354                        _ => return Err(VmError::TypeMismatch { expected: "Trit or Int(-1..1)".into(), found: format!("{:?}", b_val) }),
355                    };
356
357                    let result = match (a, b) {
358                        (Trit::Affirm, Trit::Affirm) => Trit::Affirm,
359                        (Trit::Reject, Trit::Reject) => Trit::Reject,
360                        (Trit::Tend, x) => x,
361                        (x, Trit::Tend) => x,
362                        _ => Trit::Tend,
363                    };
364                    self.stack.push(Value::Trit(result));
365                }
366                0x0f => { // Talloc (trit tensor)
367                    let rows = self.read_u32()? as usize;
368                    let cols = self.read_u32()? as usize;
369                    let size = rows * cols;
370                    let idx = self.tensors.len();
371                    self.tensors.push(TensorInstance {
372                        data: TensorData::Trit(vec![Trit::Tend; size]),
373                        rows,
374                        cols,
375                    });
376                    self.stack.push(Value::TensorRef(idx));
377                }
378                0x3c => { // Talloc_Int (int tensor)
379                    let rows = self.read_u32()? as usize;
380                    let cols = self.read_u32()? as usize;
381                    let size = rows * cols;
382                    let idx = self.tensors.len();
383                    self.tensors.push(TensorInstance {
384                        data: TensorData::Int(vec![0i64; size]),
385                        rows,
386                        cols,
387                    });
388                    self.stack.push(Value::TensorRef(idx));
389                }
390                0x3d => { // Talloc_Float (float tensor)
391                    let rows = self.read_u32()? as usize;
392                    let cols = self.read_u32()? as usize;
393                    let size = rows * cols;
394                    let idx = self.tensors.len();
395                    self.tensors.push(TensorInstance {
396                        data: TensorData::Float(vec![0.0f64; size]),
397                        rows,
398                        cols,
399                    });
400                    self.stack.push(Value::TensorRef(idx));
401                }
402                0x10 => { // Tcall
403                    if self.call_stack.len() >= MAX_CALL_DEPTH {
404                        return Err(VmError::CallStackOverflow);
405                    }
406                    let addr = self.read_u16()? as usize;
407                    self.register_stack.push(self.registers.clone());
408                    self.call_stack.push(self.pc);
409                    self.pc = addr;
410                }
411                0x11 => { // Tret
412                    if let Some(prev) = self.register_stack.pop() {
413                        self.registers = prev;
414                    }
415                    match self.call_stack.pop() {
416                        Some(ret) => self.pc = ret,
417                        None => return Ok(()),
418                    }
419                }
420                0x14 => { // Tless
421                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
422                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
423                    match (a.clone(), b.clone()) {
424                        (Value::Int(x), Value::Int(y)) => {
425                            let r = if x < y { Trit::Affirm } else if x == y { Trit::Tend } else { Trit::Reject };
426                            self.stack.push(Value::Trit(r));
427                        }
428                        (Value::Float(x), Value::Float(y)) => {
429                            let r = if x < y { Trit::Affirm } else if (x - y).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
430                            self.stack.push(Value::Trit(r));
431                        }
432                        (Value::Int(x), Value::Trit(y)) => {
433                            let bv = y as i64;
434                            let r = if x < bv { Trit::Affirm } else if x == bv { Trit::Tend } else { Trit::Reject };
435                            self.stack.push(Value::Trit(r));
436                        }
437                        (Value::Trit(x), Value::Int(y)) => {
438                            let av = x as i64;
439                            let r = if av < y { Trit::Affirm } else if av == y { Trit::Tend } else { Trit::Reject };
440                            self.stack.push(Value::Trit(r));
441                        }
442                        (Value::Int(av), Value::Float(bv)) => {
443                            let a_val = av as f64;
444                            let r = if a_val < bv { Trit::Affirm } else if (a_val - bv).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
445                            self.stack.push(Value::Trit(r));
446                        }
447                        (Value::Float(av), Value::Int(bv)) => {
448                            let b_val = bv as f64;
449                            let r = if av < b_val { Trit::Affirm } else if (av - b_val).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
450                            self.stack.push(Value::Trit(r));
451                        }
452                        (Value::Trit(av), Value::Float(bv)) => {
453                            let a_val = av as i8 as f64;
454                            let r = if a_val < bv { Trit::Affirm } else if (a_val - bv).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
455                            self.stack.push(Value::Trit(r));
456                        }
457                        (Value::Float(av), Value::Trit(bv)) => {
458                            let b_val = bv as i8 as f64;
459                            let r = if av < b_val { Trit::Affirm } else if (av - b_val).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
460                            self.stack.push(Value::Trit(r));
461                        }
462                        (Value::Trit(x), Value::Trit(y)) => {
463                            let av = x as i64;
464                            let bv = y as i64;
465                            let r = if av < bv { Trit::Affirm } else if av == bv { Trit::Tend } else { Trit::Reject };
466                            self.stack.push(Value::Trit(r));
467                        }
468                        _ => return Err(VmError::TypeMismatch { expected: "Numeric".into(), found: format!("{:?}", (a, b)) }),
469                    }
470                }
471                0x15 => { // Tgreater
472                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
473                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
474                    match (a.clone(), b.clone()) {
475                        (Value::Int(x), Value::Int(y)) => {
476                            let r = if x > y { Trit::Affirm } else if x == y { Trit::Tend } else { Trit::Reject };
477                            self.stack.push(Value::Trit(r));
478                        }
479                        (Value::Float(x), Value::Float(y)) => {
480                            let r = if x > y { Trit::Affirm } else if (x - y).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
481                            self.stack.push(Value::Trit(r));
482                        }
483                        (Value::Int(x), Value::Trit(y)) => {
484                            let bv = y as i64;
485                            let r = if x > bv { Trit::Affirm } else if x == bv { Trit::Tend } else { Trit::Reject };
486                            self.stack.push(Value::Trit(r));
487                        }
488                        (Value::Trit(x), Value::Int(y)) => {
489                            let av = x as i64;
490                            let r = if av > y { Trit::Affirm } else if av == y { Trit::Tend } else { Trit::Reject };
491                            self.stack.push(Value::Trit(r));
492                        }
493                        (Value::Int(av), Value::Float(bv)) => {
494                            let a_val = av as f64;
495                            let r = if a_val > bv { Trit::Affirm } else if (a_val - bv).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
496                            self.stack.push(Value::Trit(r));
497                        }
498                        (Value::Float(av), Value::Int(bv)) => {
499                            let b_val = bv as f64;
500                            let r = if av > b_val { Trit::Affirm } else if (av - b_val).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
501                            self.stack.push(Value::Trit(r));
502                        }
503                        (Value::Trit(av), Value::Float(bv)) => {
504                            let a_val = av as i8 as f64;
505                            let r = if a_val > bv { Trit::Affirm } else if (a_val - bv).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
506                            self.stack.push(Value::Trit(r));
507                        }
508                        (Value::Float(av), Value::Trit(bv)) => {
509                            let b_val = bv as i8 as f64;
510                            let r = if av > b_val { Trit::Affirm } else if (av - b_val).abs() < f64::EPSILON { Trit::Tend } else { Trit::Reject };
511                            self.stack.push(Value::Trit(r));
512                        }
513                        (Value::Trit(x), Value::Trit(y)) => {
514                            let av = x as i64;
515                            let bv = y as i64;
516                            let r = if av > bv { Trit::Affirm } else if av == bv { Trit::Tend } else { Trit::Reject };
517                            self.stack.push(Value::Trit(r));
518                        }
519                        _ => return Err(VmError::TypeMismatch { expected: "Numeric".into(), found: format!("{:?}", (a, b)) }),
520                    }
521                }
522                0x16 => { // Teq
523                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
524                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
525                    let is_eq = match (a.clone(), b.clone()) {
526                        (Value::Int(av), Value::Trit(bv)) => av == bv as i64,
527                        (Value::Trit(av), Value::Int(bv)) => av as i64 == bv,
528                        (Value::Float(av), Value::Float(bv)) => (av - bv).abs() < f64::EPSILON,
529                        (Value::Float(av), Value::Trit(bv)) => (av - (bv as i8 as f64)).abs() < f64::EPSILON,
530                        (Value::Trit(av), Value::Float(bv)) => ((av as i8 as f64) - bv).abs() < f64::EPSILON,
531                        (Value::Float(av), Value::Int(bv)) => (av - (bv as f64)).abs() < f64::EPSILON,
532                        (Value::Int(av), Value::Float(bv)) => ((av as f64) - bv).abs() < f64::EPSILON,
533                        _ => a == b,
534                    };
535                    let r = if is_eq { Trit::Affirm } else { Trit::Reject };
536                    self.stack.push(Value::Trit(r));
537                }
538                0x17 => { // TpushInt
539                    let mut b = [0u8; 8];
540                    for i in 0..8 { b[i] = self.read_u8()?; }
541                    self.stack.push(Value::Int(i64::from_le_bytes(b)));
542                }
543                0x18 => { // TaddInt
544                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
545                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
546                    match (a.clone(), b.clone()) {
547                        (Value::Int(x), Value::Int(y)) => self.stack.push(Value::Int(x + y)),
548                        _ => return Err(VmError::TypeMismatch { expected: "Int".into(), found: format!("{:?}", (a, b)) }),
549                    }
550                }
551                0x19 => { // TpushFloat
552                    let mut b = [0u8; 8];
553                    for i in 0..8 { b[i] = self.read_u8()?; }
554                    self.stack.push(Value::Float(f64::from_le_bytes(b)));
555                }
556                0x1e => { // Tdiv
557                    let b_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
558                    let a_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
559                    match (a_val.clone(), b_val.clone()) {
560                        (Value::Int(av), Value::Int(bv)) => {
561                            if bv == 0 { return Err(VmError::RuntimeError("Division by zero".into())); }
562                            self.stack.push(Value::Int(av / bv));
563                        }
564                        (Value::Float(av), Value::Float(bv)) => {
565                            if bv == 0.0 { return Err(VmError::RuntimeError("Division by zero".into())); }
566                            self.stack.push(Value::Float(av / bv));
567                        }
568                        (Value::Int(av), Value::Trit(bv)) => {
569                            let b = bv as i64;
570                            if b == 0 { return Err(VmError::RuntimeError("Division by zero".into())); }
571                            self.stack.push(Value::Int(av / b));
572                        }
573                        (Value::Trit(av), Value::Int(bv)) => {
574                            if bv == 0 { return Err(VmError::RuntimeError("Division by zero".into())); }
575                            self.stack.push(Value::Int(av as i64 / bv));
576                        }
577                        (Value::Float(av), Value::Trit(bv)) => {
578                            let b = bv as i8 as f64;
579                            if b == 0.0 { return Err(VmError::RuntimeError("Division by zero".into())); }
580                            self.stack.push(Value::Float(av / b));
581                        }
582                        (Value::Trit(av), Value::Float(bv)) => {
583                            if bv == 0.0 { return Err(VmError::RuntimeError("Division by zero".into())); }
584                            self.stack.push(Value::Float(av as i8 as f64 / bv));
585                        }
586                        (Value::Float(av), Value::Int(bv)) => {
587                            let b = bv as f64;
588                            if b == 0.0 { return Err(VmError::RuntimeError("Division by zero".into())); }
589                            self.stack.push(Value::Float(av / b));
590                        }
591                        (Value::Int(av), Value::Float(bv)) => {
592                            if bv == 0.0 { return Err(VmError::RuntimeError("Division by zero".into())); }
593                            self.stack.push(Value::Float(av as f64 / bv));
594                        }
595                        _ => return Err(VmError::TypeMismatch { expected: "Numeric".into(), found: format!("{:?}", (a_val, b_val)) }),
596                    }
597                }
598                0x1f => { // Tmod
599                    let b_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
600                    let a_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
601                    match (a_val.clone(), b_val.clone()) {
602                        (Value::Int(av), Value::Int(bv)) => {
603                            if bv == 0 { return Err(VmError::RuntimeError("Modulo by zero".into())); }
604                            self.stack.push(Value::Int(av % bv));
605                        }
606                        (Value::Int(av), Value::Trit(bv)) => {
607                            let b = bv as i64;
608                            if b == 0 { return Err(VmError::RuntimeError("Modulo by zero".into())); }
609                            self.stack.push(Value::Int(av % b));
610                        }
611                        (Value::Trit(av), Value::Int(bv)) => {
612                            if bv == 0 { return Err(VmError::RuntimeError("Modulo by zero".into())); }
613                            self.stack.push(Value::Int(av as i64 % bv));
614                        }
615                        (Value::Float(av), Value::Float(bv)) => {
616                             if bv == 0.0 { return Err(VmError::RuntimeError("Modulo by zero".into())); }
617                             self.stack.push(Value::Float(av % bv));
618                        }
619                        (Value::Float(av), Value::Trit(bv)) => {
620                             let b = bv as i8 as f64;
621                             if b == 0.0 { return Err(VmError::RuntimeError("Modulo by zero".into())); }
622                             self.stack.push(Value::Float(av % b));
623                        }
624                        (Value::Trit(av), Value::Float(bv)) => {
625                             if bv == 0.0 { return Err(VmError::RuntimeError("Modulo by zero".into())); }
626                             self.stack.push(Value::Float(av as i8 as f64 % bv));
627                        }
628                        (Value::Float(av), Value::Int(bv)) => {
629                             let b = bv as f64;
630                             if b == 0.0 { return Err(VmError::RuntimeError("Modulo by zero".into())); }
631                             self.stack.push(Value::Float(av % b));
632                        }
633                        (Value::Int(av), Value::Float(bv)) => {
634                             if bv == 0.0 { return Err(VmError::RuntimeError("Modulo by zero".into())); }
635                             self.stack.push(Value::Float(av as f64 % bv));
636                        }
637                        _ => return Err(VmError::TypeMismatch { expected: "Int or Trit".into(), found: format!("{:?}", (a_val, b_val)) }),
638                    }
639                }
640                0x20 => { // Tprint
641                    let val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
642                    let line = match &val {
643                        Value::Trit(t) => format!("{:?}", t),
644                        Value::Int(i) => format!("{}", i),
645                        Value::Float(f) => format!("{}", f),
646                        Value::String(s) => s.clone(),
647                        Value::TensorRef(idx) => format!("TensorRef({})", idx),
648                        Value::TensorView { tensor_id, offset, length, .. } => format!("TensorView({}[{}..{}])", tensor_id, offset, offset + length),
649                        Value::AgentRef(idx, addr) => format!("AgentRef({}, {:?})", idx, addr),
650                        Value::Struct(fields) => format!("Struct({:?})", fields),
651                    };
652                    println!("{}", line);
653                    self.print_log.push(line);
654                }
655                0x21 => { // TpushString
656                    let len = self.read_u16()? as usize;
657                    let mut bytes = vec![0u8; len];
658                    for i in 0..len { bytes[i] = self.read_u8()?; }
659                    let s = String::from_utf8(bytes).map_err(|_| VmError::RuntimeError("Invalid UTF-8 string".into()))?;
660                    self.stack.push(Value::String(s));
661                }
662                0x22 => { // Tidx
663                    let col = self.stack.pop().ok_or(VmError::StackUnderflow)?;
664                    let row = self.stack.pop().ok_or(VmError::StackUnderflow)?;
665                    let rf = self.stack.pop().ok_or(VmError::StackUnderflow)?;
666                    let r = match row { Value::Int(v) => v, Value::Trit(t) => t as i64, _ => return Err(VmError::TypeMismatch { expected: "Int or Trit".into(), found: format!("{:?}", row) }) };
667                    let c = match col { Value::Int(v) => v, Value::Trit(t) => t as i64, _ => return Err(VmError::TypeMismatch { expected: "Int or Trit".into(), found: format!("{:?}", col) }) };
668                    
669                    let (idx, pos) = self.get_pos(&rf, r, c)?;
670                    let tensor = &self.tensors[idx];
671                    let data_len = tensor.data.len();
672                    if pos >= data_len {
673                        return Err(VmError::TensorIndexOutOfBounds { tensor_id: idx, index: pos, size: data_len });
674                    }
675                    let pushed = match &tensor.data {
676                        TensorData::Trit(v) => Value::Trit(v[pos]),
677                        TensorData::PackedTrit(v, _) => {
678                            let byte_idx = pos / 5;
679                            let trit_idx = pos % 5;
680                            let trits = crate::trit::unpack_5_trits(v[byte_idx]);
681                            Value::Trit(trits[trit_idx])
682                        }
683                        TensorData::Float(v) => Value::Float(v[pos]),
684                        TensorData::Int(v) => Value::Int(v[pos]),
685                    };
686                    self.stack.push(pushed);
687                }
688                0x23 => { // Tset
689                    let val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
690                    let col = self.stack.pop().ok_or(VmError::StackUnderflow)?;
691                    let row = self.stack.pop().ok_or(VmError::StackUnderflow)?;
692                    let rf = self.stack.pop().ok_or(VmError::StackUnderflow)?;
693                    let r = match row { Value::Int(v) => v, Value::Trit(t) => t as i64, _ => return Err(VmError::TypeMismatch { expected: "Int or Trit".into(), found: format!("{:?}", row) }) };
694                    let c = match col { Value::Int(v) => v, Value::Trit(t) => t as i64, _ => return Err(VmError::TypeMismatch { expected: "Int or Trit".into(), found: format!("{:?}", col) }) };
695                    
696                    let (idx, pos) = self.get_pos(&rf, r, c)?;
697                    let tensor = &mut self.tensors[idx];
698                    let data_len = tensor.data.len();
699                    if pos >= data_len { return Err(VmError::TensorIndexOutOfBounds { tensor_id: idx, index: pos, size: data_len }); }
700                    match (&mut tensor.data, val.clone()) {
701                        (TensorData::Trit(v), Value::Trit(t)) => v[pos] = t,
702                        (TensorData::Trit(v), Value::Int(i)) => v[pos] = if i > 0 { Trit::Affirm } else if i < 0 { Trit::Reject } else { Trit::Tend },
703                        (TensorData::PackedTrit(v, _), val_v) => {
704                            let byte_idx = pos / 5;
705                            let trit_idx = pos % 5;
706                            let mut trits = crate::trit::unpack_5_trits(v[byte_idx]);
707                            trits[trit_idx] = match val_v {
708                                Value::Trit(t) => t,
709                                Value::Int(i) => if i > 0 { Trit::Affirm } else if i < 0 { Trit::Reject } else { Trit::Tend },
710                                _ => return Err(VmError::TypeMismatch { expected: "Trit or Int".into(), found: format!("{:?}", val_v) }),
711                            };
712                            v[byte_idx] = crate::trit::pack_5_trits(trits);
713                        }
714                        (TensorData::Float(v), Value::Float(f)) => v[pos] = f,
715                        (TensorData::Float(v), Value::Int(i)) => v[pos] = i as f64,
716                        (TensorData::Int(v), Value::Int(i)) => v[pos] = i,
717                        (TensorData::Int(v), Value::Float(f)) => v[pos] = f as i64,
718                        (TensorData::Int(v), Value::Trit(t)) => v[pos] = t as i64,
719                        _ => return Err(VmError::TypeMismatch { expected: "compatible value for tensor type".into(), found: format!("{:?}", val) }),
720                    }
721                }
722                0x24 => { // Tshape — TensorRef/View → (rows, cols); String → (len, 1)
723                    let rf = self.stack.pop().ok_or(VmError::StackUnderflow)?;
724                    match rf {
725                        Value::TensorRef(idx) => {
726                            if idx >= self.tensors.len() { return Err(VmError::TensorNotAllocated(idx)); }
727                            let tensor = &self.tensors[idx];
728                            self.stack.push(Value::Int(tensor.rows as i64));
729                            self.stack.push(Value::Int(tensor.cols as i64));
730                        }
731                        Value::TensorView { length, .. } => {
732                            self.stack.push(Value::Int(length as i64));
733                            self.stack.push(Value::Int(1));
734                        }
735                        Value::String(s) => {
736                            let n = s.chars().count() as i64;
737                            self.stack.push(Value::Int(n));
738                            self.stack.push(Value::Int(1));
739                        }
740                        _ => return Err(VmError::TypeMismatch { expected: "TensorRef, TensorView, or String".into(), found: format!("{:?}", rf) }),
741                    }
742                }
743                0x30 => { // Tspawn — (type_id) → AgentRef
744                    let type_id = self.read_u16()?;
745                    if let Some(&handler_addr) = self.agent_types.get(&type_id) {
746                        let id = self.agents.len();
747                        self.agents.push(AgentInstance { handler_addr, mailbox: Default::default() });
748                        self.stack.push(Value::AgentRef(id, None));
749                    } else {
750                        return Err(VmError::AgentTypeNotRegistered(type_id));
751                    }
752                }
753                0x31 => { // Tsend — msg, target → void
754                    let msg = self.stack.pop().ok_or(VmError::StackUnderflow)?;
755                    let target = self.stack.pop().ok_or(VmError::StackUnderflow)?;
756                    if let Value::AgentRef(id, None) = target {
757                        if id < self.agents.len() {
758                            self.agents[id].mailbox.push_back(msg);
759                        } else {
760                            return Err(VmError::AgentIdInvalid(id));
761                        }
762                    } else {
763                        return Err(VmError::TypeMismatch { expected: "Local AgentRef".into(), found: format!("{:?}", target) });
764                    }
765                }
766                0x32 => { // Tawait — target → result
767                    let target = self.stack.pop().ok_or(VmError::StackUnderflow)?;
768                    if let Value::AgentRef(id, None) = target {
769                        if id < self.agents.len() {
770                            if self.call_stack.len() >= MAX_CALL_DEPTH {
771                                return Err(VmError::CallStackOverflow);
772                            }
773                            let handler_addr = self.agents[id].handler_addr;
774                            let msg = self.agents[id].mailbox.pop_front().unwrap_or(Value::default());
775                            // Synchronous handler dispatch — identical to TCALL
776                            self.register_stack.push(self.registers.clone());
777                            self.call_stack.push(self.pc);
778                            self.pc = handler_addr;
779                            self.stack.push(msg);
780                        } else {
781                            return Err(VmError::AgentIdInvalid(id));
782                        }
783                    } else {
784                        return Err(VmError::TypeMismatch { expected: "Local AgentRef".into(), found: format!("{:?}", target) });
785                    }
786                }
787                0x25 => { // TjmpEqInt — imm_int, imm_addr → peek, jumps if eq
788                    let mut b = [0u8; 8];
789                    for i in 0..8 { b[i] = self.read_u8()?; }
790                    let target_val = i64::from_le_bytes(b);
791                    let addr = self.read_u16()?;
792                    let val = self.stack.last().ok_or(VmError::StackUnderflow)?;
793                    let is_eq = match val {
794                        Value::Int(v) => *v == target_val,
795                        Value::Trit(t) => (*t as i8) as i64 == target_val,
796                        _ => false,
797                    };
798                    if is_eq { self.pc = addr as usize; }
799                }
800                0x26 => { // TlessEqual
801                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
802                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
803                    let is_le = match (a.clone(), b.clone()) {
804                        (Value::Int(x), Value::Int(y)) => x <= y,
805                        (Value::Float(x), Value::Float(y)) => x <= y || (x - y).abs() < f64::EPSILON,
806                        (Value::Int(x), Value::Trit(y)) => x <= y as i64,
807                        (Value::Trit(x), Value::Int(y)) => (x as i64) <= y,
808                        (Value::Trit(x), Value::Trit(y)) => (x as i64) <= (y as i64),
809                        _ => false,
810                    };
811                    self.stack.push(Value::Trit(if is_le { Trit::Affirm } else { Trit::Reject }));
812                }
813                0x27 => { // TgreaterEqual
814                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
815                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
816                    let is_ge = match (a.clone(), b.clone()) {
817                        (Value::Int(x), Value::Int(y)) => x >= y,
818                        (Value::Float(x), Value::Float(y)) => x >= y || (x - y).abs() < f64::EPSILON,
819                        (Value::Int(x), Value::Trit(y)) => x >= y as i64,
820                        (Value::Trit(x), Value::Int(y)) => (x as i64) >= y,
821                        (Value::Trit(x), Value::Trit(y)) => (x as i64) >= (y as i64),
822                        _ => false,
823                    };
824                    self.stack.push(Value::Trit(if is_ge { Trit::Affirm } else { Trit::Reject }));
825                }
826                0x28 => { // Tand — min(a, b) in balanced ternary (logical AND)
827                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
828                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
829                    let to_trit = |v: Value| -> Result<Trit, VmError> {
830                        match v {
831                            Value::Trit(t) => Ok(t),
832                            Value::Int(n) if n > 0 => Ok(Trit::Affirm),
833                            Value::Int(0) => Ok(Trit::Tend),
834                            Value::Int(_) => Ok(Trit::Reject),
835                            other => Err(VmError::TypeMismatch { expected: "Trit or Int".into(), found: format!("{:?}", other) }),
836                        }
837                    };
838                    let ta = to_trit(a)?;
839                    let tb = to_trit(b)?;
840                    let result = if (ta as i8) <= (tb as i8) { ta } else { tb };
841                    self.stack.push(Value::Trit(result));
842                }
843                0x29 => { // Tor — max(a, b) in balanced ternary (logical OR)
844                    let b = self.stack.pop().ok_or(VmError::StackUnderflow)?;
845                    let a = self.stack.pop().ok_or(VmError::StackUnderflow)?;
846                    let to_trit = |v: Value| -> Result<Trit, VmError> {
847                        match v {
848                            Value::Trit(t) => Ok(t),
849                            Value::Int(n) if n > 0 => Ok(Trit::Affirm),
850                            Value::Int(0) => Ok(Trit::Tend),
851                            Value::Int(_) => Ok(Trit::Reject),
852                            other => Err(VmError::TypeMismatch { expected: "Trit or Int".into(), found: format!("{:?}", other) }),
853                        }
854                    };
855                    let ta = to_trit(a)?;
856                    let tb = to_trit(b)?;
857                    let result = if (ta as i8) >= (tb as i8) { ta } else { tb };
858                    self.stack.push(Value::Trit(result));
859                }
860                0x2a => { // TjmpEqFloat — peek stack, jump if top Float equals embedded f64 literal
861                    // Emitted by betbc.rs for float match-arm patterns (PARSER-002 fix).
862                    // Layout: [opcode: u8] [target_f64: 8 bytes LE] [jump_addr: u16 LE]
863                    // Peeks the top of stack (does NOT consume it), jumps if value matches
864                    // within machine epsilon.
865                    let mut fb = [0u8; 8];
866                    for i in 0..8 { fb[i] = self.read_u8()?; }
867                    let target_f = f64::from_le_bytes(fb);
868                    let addr = self.read_u16()?;
869                    let val = self.stack.last().ok_or(VmError::StackUnderflow)?;
870                    if let Value::Float(f) = val {
871                        if (f - target_f).abs() < 1e-9 {
872                            self.pc = addr as usize;
873                        }
874                    }
875                }
876                0x33 => { // Topent — path_str, mode_int → handle_int
877                    let mode = self.stack.pop().ok_or(VmError::StackUnderflow)?;
878                    let path = self.stack.pop().ok_or(VmError::StackUnderflow)?;
879                    if let (Value::String(p), Value::Int(m)) = (path, mode) {
880                        use std::fs::OpenOptions;
881                        let mut options = OpenOptions::new();
882                        match m {
883                            0 => { options.read(true); } // Read
884                            1 => { options.write(true).create(true).truncate(true); } // Write
885                            2 => { options.append(true).create(true); } // Append
886                            _ => return Err(VmError::RuntimeError(format!("Invalid file mode: {m}"))),
887                        }
888                        let file = options.open(&p).map_err(|e| VmError::FileOpenError(e.to_string()))?;
889                        let handle = self.open_files.len();
890                        self.open_files.push(Some(file));
891                        self.stack.push(Value::Int(handle as i64));
892                    } else {
893                        return Err(VmError::TypeMismatch { expected: "String, Int".into(), found: "Unknown".into() });
894                    }
895                }
896                0x34 => { // Treadt — handle_int → trit
897                    let handle_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
898                    if let Value::Int(h) = handle_val {
899                        let h = h as usize;
900                        if h >= self.open_files.len() || self.open_files[h].is_none() {
901                            return Err(VmError::FileNotOpen(h));
902                        }
903                        let file = self.open_files[h].as_mut().unwrap();
904                        let mut buf = [0u8; 1];
905                        use std::io::Read;
906                        match file.read_exact(&mut buf) {
907                            Ok(_) => {
908                                let t = match buf[0] {
909                                    b'+' | b'1' => Trit::Affirm,
910                                    b'-' => Trit::Reject,
911                                    _ => Trit::Tend,
912                                };
913                                self.stack.push(Value::Trit(t));
914                            }
915                            Err(e) if e.kind() == std::io::ErrorKind::UnexpectedEof => {
916                                self.stack.push(Value::Trit(Trit::Tend)); // EOF as Tend
917                            }
918                            Err(e) => return Err(VmError::FileReadError(e.to_string())),
919                        }
920                    } else {
921                        return Err(VmError::TypeMismatch { expected: "Int".into(), found: format!("{:?}", handle_val) });
922                    }
923                }
924                0x35 => { // Twritet — handle_int, trit → void
925                    let t_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
926                    let h_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
927                    if let (Value::Int(h), Value::Trit(t)) = (h_val, t_val) {
928                        let h = h as usize;
929                        if h >= self.open_files.len() || self.open_files[h].is_none() {
930                            return Err(VmError::FileNotOpen(h));
931                        }
932                        let file = self.open_files[h].as_mut().unwrap();
933                        let out = match t {
934                            Trit::Affirm => b'+',
935                            Trit::Reject => b'-',
936                            Trit::Tend   => b'0',
937                        };
938                        use std::io::Write;
939                        file.write_all(&[out]).map_err(|e| VmError::FileWriteError(e.to_string()))?;
940                    } else {
941                        return Err(VmError::TypeMismatch { expected: "Int, Trit".into(), found: "Unknown".into() });
942                    }
943                }
944                0x36 => { // Tnodeid — push this node's runtime address as a String
945                    // Defers the binding to runtime so --node-addr is respected.
946                    // Previously, Expr::NodeId emitted a hardcoded "127.0.0.1:7373"
947                    // string at compile time, ignoring vm.set_node_id().
948                    self.stack.push(Value::String(self.node_id.clone()));
949                }
950                0x37 => { // Tassert
951                    let val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
952                    let is_affirm = match val {
953                        Value::Trit(Trit::Affirm) => true,
954                        Value::Int(1) => true,
955                        _ => false,
956                    };
957                    if !is_affirm {
958                        return Err(VmError::AssertionFailed);
959                    }
960                }
961                0x38 => { // TSparseMatmul (@sparseskip)
962                    // Layout: [opcode] [a_rows: u8] [a_cols: u8] [b_cols: u8]
963                    // Pops: A_tensor, B_tensor
964                    // Pushes: Result_tensor
965                    let a_rows = self.read_u8()? as usize;
966                    let a_cols = self.read_u8()? as usize;
967                    let b_cols = self.read_u8()? as usize;
968                    let b_ref = self.stack.pop().ok_or(VmError::StackUnderflow)?;
969                    let a_ref = self.stack.pop().ok_or(VmError::StackUnderflow)?;
970                    
971                    if let (Value::TensorRef(a_idx), Value::TensorRef(b_idx)) = (a_ref, b_ref) {
972                        let (a_data, b_data) = {
973                            let a = self.tensors.get(a_idx).ok_or(VmError::TensorNotAllocated(a_idx))?;
974                            let b = self.tensors.get(b_idx).ok_or(VmError::TensorNotAllocated(b_idx))?;
975                            
976                            let a_data = match &a.data {
977                                TensorData::Trit(v) => v,
978                                _ => return Err(VmError::TypeMismatch { expected: "TritTensor".into(), found: "Other".into() }),
979                            };
980                            let b_data = match &b.data {
981                                TensorData::Trit(v) => v,
982                                _ => return Err(VmError::TypeMismatch { expected: "TritTensor".into(), found: "Other".into() }),
983                            };
984                            (a_data.clone(), b_data.clone())
985                        };
986
987                        let mut result = vec![Trit::Tend; a_rows * b_cols];
988                        let mut skipped = false;
989
990                        for i in 0..a_rows {
991                            for k in 0..a_cols {
992                                let a_val = a_data[i * a_cols + k];
993                                if a_val == Trit::Tend {
994                                    skipped = true;
995                                    continue;
996                                }
997                                for j in 0..b_cols {
998                                    let b_val = b_data[k * b_cols + j];
999                                    if b_val == Trit::Tend { continue; }
1000                                    let prod = a_val * b_val;
1001                                    let (sum, _) = result[i * b_cols + j] + prod;
1002                                    result[i * b_cols + j] = sum;
1003                                }
1004                            }
1005                        }
1006
1007                        if skipped { self.sparse_dropped = true; }
1008                        let res_idx = self.tensors.len();
1009                        self.tensors.push(TensorInstance {
1010                            data: TensorData::Trit(result),
1011                            rows: a_rows,
1012                            cols: b_cols,
1013                        });
1014                        self.stack.push(Value::TensorRef(res_idx));
1015                    } else {
1016                        return Err(VmError::TypeMismatch { expected: "TensorRef, TensorRef".into(), found: "Unknown".into() });
1017                    }
1018                }
1019                0x40 => { // Tstruct
1020                    let num_fields = self.read_u8()? as usize;
1021                    let mut fields = std::collections::HashMap::new();
1022                    for _ in 0..num_fields {
1023                        let name_len = self.read_u8()? as usize;
1024                        let mut name_bytes = vec![0u8; name_len];
1025                        for i in 0..name_len { name_bytes[i] = self.read_u8()?; }
1026                        let name = String::from_utf8(name_bytes).unwrap();
1027                        let val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1028                        fields.insert(name, val);
1029                    }
1030                    self.stack.push(Value::Struct(fields));
1031                }
1032                0x41 => { // Tfield
1033                    let name_len = self.read_u8()? as usize;
1034                    let mut name_bytes = vec![0u8; name_len];
1035                    for i in 0..name_len { name_bytes[i] = self.read_u8()?; }
1036                    let name = String::from_utf8(name_bytes).unwrap();
1037                    let obj = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1038                    if let Value::Struct(fields) = obj {
1039                        let val = fields.get(&name).cloned().unwrap_or_default();
1040                        self.stack.push(val);
1041                    } else {
1042                        return Err(VmError::TypeMismatch { expected: "Struct".into(), found: format!("{:?}", obj) });
1043                    }
1044                }
1045                0x42 => { // TBIND reg, view_reg
1046                    let reg = self.read_u8()? as usize;
1047                    let view_val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1048                    if let Value::TensorView { .. } = view_val {
1049                        self.bindings.insert(reg, view_val);
1050                    } else {
1051                        return Err(VmError::TypeMismatch { expected: "TensorView".into(), found: format!("{:?}", view_val) });
1052                    }
1053                }
1054                0x55 => { // TVIEW tensor_ref/view, offset, length, stride → TensorView
1055                    let stride = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1056                    let length = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1057                    let offset = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1058                    let rf = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1059                    
1060                    if let (Value::Int(o), Value::Int(l), Value::Int(s)) = (&offset, &length, &stride) {
1061                        match rf {
1062                            Value::TensorRef(id) => {
1063                                self.stack.push(Value::TensorView {
1064                                    tensor_id: id,
1065                                    offset: *o as usize,
1066                                    length: *l as usize,
1067                                    stride: *s as usize,
1068                                });
1069                            }
1070                            Value::TensorView { tensor_id, offset: v_off, stride: v_stride, .. } => {
1071                                // Slicing a view: absolute offset = view_offset + (slice_offset * view_stride)
1072                                // New stride = view_stride * slice_stride
1073                                self.stack.push(Value::TensorView {
1074                                    tensor_id,
1075                                    offset: v_off + (*o as usize * v_stride),
1076                                    length: *l as usize,
1077                                    stride: v_stride * (*s as usize),
1078                                });
1079                            }
1080                            _ => return Err(VmError::TypeMismatch { expected: "TensorRef or TensorView".into(), found: format!("{:?}", rf) }),
1081                        }
1082                    } else {
1083                        return Err(VmError::TypeMismatch { expected: "Int, Int, Int".into(), found: format!("{:?}, {:?}, {:?}", offset, length, stride) });
1084                    }
1085                }
1086                0x50 => { // TPACK: pops 5 trits, pushes 1 packed byte (as Int)
1087                    let mut trits = [Trit::Tend; 5];
1088                    for i in (0..5).rev() {
1089                        let t = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1090                        trits[i] = match t {
1091                            Value::Trit(tv) => tv,
1092                            _ => return Err(VmError::TypeMismatch { expected: "Trit".into(), found: format!("{:?}", t) }),
1093                        };
1094                    }
1095                    let packed = crate::trit::pack_5_trits(trits);
1096                    self.stack.push(Value::Int(packed as i64));
1097                }
1098                0x51 => { // TUNPACK: pops 1 packed byte, pushes 5 trits
1099                    let val = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1100                    if let Value::Int(packed) = val {
1101                        let trits = crate::trit::unpack_5_trits(packed as u8);
1102                        for t in trits {
1103                            self.stack.push(Value::Trit(t));
1104                        }
1105                    } else {
1106                        return Err(VmError::TypeMismatch { expected: "Int (packed byte)".into(), found: format!("{:?}", val) });
1107                    }
1108                }
1109                0x56 => { // TALLOC_PACKED (packed trit tensor)
1110                    let rows = self.read_u32()? as usize;
1111                    let cols = self.read_u32()? as usize;
1112                    let size = rows * cols;
1113                    let num_bytes = (size + 4) / 5;
1114                    let idx = self.tensors.len();
1115                    self.tensors.push(TensorInstance {
1116                        data: TensorData::PackedTrit(vec![0x00; num_bytes], size), // 0x00 is Reject Reject Reject Reject Reject
1117                        rows,
1118                        cols,
1119                    });
1120                    self.stack.push(Value::TensorRef(idx));
1121                }
1122                0x52 => { // TV_ADD: Vectorized addition of two packed tensors
1123                    let b_ref = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1124                    let a_ref = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1125                    if let (Value::TensorRef(a_idx), Value::TensorRef(b_idx)) = (a_ref, b_ref) {
1126                        let res_idx = self.tensors.len();
1127                        let (rows, cols, data) = {
1128                            let a = self.tensors.get(a_idx).ok_or(VmError::TensorNotAllocated(a_idx))?;
1129                            let b = self.tensors.get(b_idx).ok_or(VmError::TensorNotAllocated(b_idx))?;
1130                            if a.rows != b.rows || a.cols != b.cols {
1131                                return Err(VmError::RuntimeError("Tensor dimension mismatch in TV_ADD".into()));
1132                            }
1133                            match (&a.data, &b.data) {
1134                                (TensorData::PackedTrit(av, alen), TensorData::PackedTrit(bv, _)) => {
1135                                    let mut res_v = vec![0u8; av.len()];
1136                                    for i in 0..av.len() {
1137                                        res_v[i] = crate::trit::packed_add(av[i], bv[i]);
1138                                    }
1139                                    (a.rows, a.cols, TensorData::PackedTrit(res_v, *alen))
1140                                }
1141                                _ => return Err(VmError::TypeMismatch { expected: "PackedTrit tensors".into(), found: "Other".into() }),
1142                            }
1143                        };
1144                        self.tensors.push(TensorInstance { data, rows, cols });
1145                        self.stack.push(Value::TensorRef(res_idx));
1146                    } else {
1147                        return Err(VmError::TypeMismatch { expected: "TensorRef, TensorRef".into(), found: "Unknown".into() });
1148                    }
1149                }
1150                0x53 => { // TV_NEG: Vectorized negation of a packed tensor
1151                    let a_ref = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1152                    if let Value::TensorRef(idx) = a_ref {
1153                        let res_idx = self.tensors.len();
1154                        let (rows, cols, data) = {
1155                            let a = self.tensors.get(idx).ok_or(VmError::TensorNotAllocated(idx))?;
1156                            match &a.data {
1157                                TensorData::PackedTrit(v, len) => {
1158                                    let mut res_v = vec![0u8; v.len()];
1159                                    for i in 0..v.len() {
1160                                        res_v[i] = crate::trit::packed_neg(v[i]);
1161                                    }
1162                                    (a.rows, a.cols, TensorData::PackedTrit(res_v, *len))
1163                                }
1164                                _ => return Err(VmError::TypeMismatch { expected: "PackedTrit tensor".into(), found: "Other".into() }),
1165                            }
1166                        };
1167                        self.tensors.push(TensorInstance { data, rows, cols });
1168                        self.stack.push(Value::TensorRef(res_idx));
1169                    } else {
1170                        return Err(VmError::TypeMismatch { expected: "TensorRef".into(), found: format!("{:?}", a_ref) });
1171                    }
1172                }
1173                0x54 => { // TV_CON: Vectorized consensus of two packed tensors
1174                    let b_ref = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1175                    let a_ref = self.stack.pop().ok_or(VmError::StackUnderflow)?;
1176                    if let (Value::TensorRef(a_idx), Value::TensorRef(b_idx)) = (a_ref, b_ref) {
1177                        let res_idx = self.tensors.len();
1178                        let (rows, cols, data) = {
1179                            let a = self.tensors.get(a_idx).ok_or(VmError::TensorNotAllocated(a_idx))?;
1180                            let b = self.tensors.get(b_idx).ok_or(VmError::TensorNotAllocated(b_idx))?;
1181                            if a.rows != b.rows || a.cols != b.cols {
1182                                return Err(VmError::RuntimeError("Tensor dimension mismatch in TV_CON".into()));
1183                            }
1184                            match (&a.data, &b.data) {
1185                                (TensorData::PackedTrit(av, alen), TensorData::PackedTrit(bv, _)) => {
1186                                    let mut res_v = vec![0u8; av.len()];
1187                                    for i in 0..av.len() {
1188                                        res_v[i] = crate::trit::packed_consensus(av[i], bv[i]);
1189                                    }
1190                                    (a.rows, a.cols, TensorData::PackedTrit(res_v, *alen))
1191                                }
1192                                _ => return Err(VmError::TypeMismatch { expected: "PackedTrit tensors".into(), found: "Other".into() }),
1193                            }
1194                        };
1195                        self.tensors.push(TensorInstance { data, rows, cols });
1196                        self.stack.push(Value::TensorRef(res_idx));
1197                    } else {
1198                        return Err(VmError::TypeMismatch { expected: "TensorRef, TensorRef".into(), found: "Unknown".into() });
1199                    }
1200                }
1201                0x00 => return Ok(()),
1202                _ => return Err(VmError::InvalidOpcode(opcode)),
1203            }
1204        }
1205        Ok(())
1206    }
1207
1208    fn read_u8(&mut self) -> Result<u8, VmError> {
1209        if self.pc >= self.code.len() { return Err(VmError::PcOutOfBounds(self.pc)); }
1210        let val = self.code[self.pc];
1211        self.pc += 1;
1212        Ok(val)
1213    }
1214
1215    fn read_u16(&mut self) -> Result<u16, VmError> {
1216        if self.pc + 1 >= self.code.len() { return Err(VmError::PcOutOfBounds(self.pc)); }
1217        let val = u16::from_le_bytes([self.code[self.pc], self.code[self.pc + 1]]);
1218        self.pc += 2;
1219        Ok(val)
1220    }
1221
1222    fn read_u32(&mut self) -> Result<u32, VmError> {
1223        if self.pc + 3 >= self.code.len() { return Err(VmError::PcOutOfBounds(self.pc)); }
1224        let val = u32::from_le_bytes([
1225            self.code[self.pc], self.code[self.pc + 1],
1226            self.code[self.pc + 2], self.code[self.pc + 3]
1227        ]);
1228        self.pc += 4;
1229        Ok(val)
1230    }
1231
1232    fn get_pos(&self, rf: &Value, row: i64, col: i64) -> Result<(usize, usize), VmError> {
1233        match rf {
1234            Value::TensorRef(idx) => {
1235                let tensor = self.tensors.get(*idx).ok_or(VmError::TensorNotAllocated(*idx))?;
1236                let pos = if tensor.cols > 1 && col >= 0 {
1237                    row as usize * tensor.cols + col as usize
1238                } else {
1239                    row as usize
1240                };
1241                Ok((*idx, pos))
1242            }
1243            Value::TensorView { tensor_id, offset, stride, .. } => {
1244                let pos = *offset + (row as usize * *stride);
1245                Ok((*tensor_id, pos))
1246            }
1247            _ => Err(VmError::TypeMismatch { expected: "TensorRef or TensorView".into(), found: format!("{:?}", rf) }),
1248        }
1249    }
1250}