Skip to main content

lex_bytecode/
op.rs

1//! Bytecode instruction set per spec §8.2.
2
3use serde::{Deserialize, Serialize};
4
5/// Constant pool entry.
6#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
7pub enum Const {
8    Int(i64),
9    Float(f64),
10    Bool(bool),
11    Str(String),
12    Bytes(Vec<u8>),
13    Unit,
14    /// A field name, used by MAKE_RECORD/GET_FIELD.
15    FieldName(String),
16    /// A variant tag, used by MAKE_VARIANT/TEST_VARIANT/GET_VARIANT.
17    VariantName(String),
18    /// An AST NodeId, attached to Call / EffectCall for trace keying (§10.1).
19    NodeId(String),
20}
21
22#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq)]
23pub enum Op {
24    // stack manipulation
25    PushConst(u32),
26    Pop,
27    Dup,
28
29    // locals
30    LoadLocal(u16),
31    StoreLocal(u16),
32
33    // constructors / pattern matching
34    /// Builds a record by interning its field-name shape in
35    /// `Program.record_shapes` (#461). `shape_idx` indexes that
36    /// side-table; `field_count` is `shape.len()` cached inline so the
37    /// stack-effect verifier can compute its delta without needing a
38    /// `Program` reference. The VM pops `field_count` values off the
39    /// stack and pairs them with `Program.record_shapes[shape_idx]`.
40    ///
41    /// Externalizing the field-name vec is what lets `Op` be `Copy`,
42    /// which is the precondition for direct-threaded dispatch
43    /// (`code[pc]` becomes a register-sized read instead of an
44    /// every-step `Vec` clone).
45    MakeRecord { shape_idx: u32, field_count: u16 },
46    MakeTuple(u16),
47    MakeList(u32),
48    MakeVariant { name_idx: u32, arity: u16 },
49    GetField(u32),         // field name const idx
50    GetElem(u16),          // tuple element index
51    TestVariant(u32),      // pushes Bool: top-of-stack matches variant name?
52    GetVariant(u32),       // extracts payload (replaces variant on stack with its args list)
53    GetVariantArg(u16),    // pop variant, push its i'th arg
54    GetListLen,
55    GetListElem(u32),
56    /// Pop [list, value]; push list with `value` appended.
57    ListAppend,
58    /// Pop list; push it indexed by the integer on top.
59    /// Stack: [list, idx] → [list[idx]]. (Like GetListElem(u32) but
60    /// the index is dynamic.)
61    GetListElemDyn,
62
63    // control flow
64    Jump(i32),
65    JumpIf(i32),     // pops Bool
66    JumpIfNot(i32),
67    Call { fn_id: u32, arity: u16, node_id_idx: u32 },
68    TailCall { fn_id: u32, arity: u16, node_id_idx: u32 },
69    /// Build a Value::Closure: pop `capture_count` values (in order) and
70    /// pair them with `fn_id`.
71    MakeClosure { fn_id: u32, capture_count: u16 },
72    /// Call a closure: pop `arity` args + 1 closure (top of stack), invoke.
73    CallClosure { arity: u16, node_id_idx: u32 },
74    /// Stable sort-by-key (#338). Stack: `[xs, f]` (xs underneath).
75    /// Pops the key-fn `f` and the list `xs`, applies `f` to each
76    /// element to derive a sortable key, returns the list reordered
77    /// so keys ascend. Keys must be one of `Int` / `Float` / `Str`;
78    /// other key types pair-wise compare as equal (preserving
79    /// insertion order). `node_id_idx` is the originating NodeId.
80    SortByKey { node_id_idx: u32 },
81    /// Parallel map (#305 slice 1). Stack: `[xs, f]` (xs underneath).
82    /// Pops the closure `f` and the list `xs`, applies `f` to each
83    /// element in parallel via OS threads, pushes the result list
84    /// in input order. `node_id_idx` is the originating NodeId for
85    /// trace keying. The pool size is capped by
86    /// `LEX_PAR_MAX_CONCURRENCY` (default = available CPU cores).
87    ///
88    /// Slice 1 limitation: closures invoking effects fail at
89    /// runtime with `VmError::Effect`. The per-thread effect handler
90    /// split is queued as slice 2.
91    ParallelMap { node_id_idx: u32 },
92    /// EFFECT_CALL `<effect_kind_const_idx>` `<op_name_const_idx>` `<arity>`.
93    /// Pops `arity` args, dispatches to a host effect handler, pushes result.
94    /// `node_id_idx` points to a `Const::NodeId` for trace keying.
95    EffectCall { kind_idx: u32, op_idx: u32, arity: u16, node_id_idx: u32 },
96    Return,
97    Panic(u32),     // pushes constant message and aborts
98
99    // arithmetic — typed (per spec §8.2). `NumAdd`/etc. dispatch on operand
100    // type at runtime; emitted when the compiler doesn't have type info.
101    // The post-M5 plan is to lower NumAdd → IntAdd|FloatAdd in a typed pass.
102    IntAdd, IntSub, IntMul, IntDiv, IntMod, IntNeg,
103    IntEq, IntLt, IntLe,
104    FloatAdd, FloatSub, FloatMul, FloatDiv, FloatNeg,
105    FloatEq, FloatLt, FloatLe,
106    NumAdd, NumSub, NumMul, NumDiv, NumMod, NumNeg,
107    NumEq, NumLt, NumLe,
108    BoolAnd, BoolOr, BoolNot,
109
110    // strings
111    StrConcat, StrLen, StrEq,
112    BytesLen, BytesEq,
113
114    // superinstructions (#461)
115    //
116    // Fused opcodes emitted by the compiler's peephole pass to skip
117    // dispatch on common multi-op patterns. The pass leaves the
118    // original primitive ops in place at the trailing slots — the
119    // dispatch loop overrides its default `pc += 1` to step past
120    // them. Keeping `code.len()` invariant means existing
121    // Jump/JumpIf offsets remain valid without a renumbering pass.
122    /// Fused `LoadLocal(local_idx) + PushConst(imm_const_idx) +
123    /// IntAdd`. `imm_const_idx` must point to a `Const::Int`. The
124    /// dispatch arm reads the local, adds the constant, pushes the
125    /// result, and advances pc by 3 (past this op and the two
126    /// inert PushConst + IntAdd slots that follow). For
127    /// `body_hash` stability (#222) the canonical encoding decomposes
128    /// this op back to a standalone `LoadLocal(local_idx)` at hash
129    /// time; the unchanged PushConst / IntAdd at the next two
130    /// slots hash normally, so the total bytes match pre-fusion.
131    LoadLocalAddIntConst { local_idx: u16, imm_const_idx: u32 },
132}