aver/vm/opcode.rs
1// Aver VM bytecode opcodes.
2//
3// Stack-based: operands are pushed/popped from the operand stack.
4// Variable-width encoding: opcode (1 byte) + operands (0-3 bytes).
5
6// -- Stack / locals ----------------------------------------------------------
7
8/// No-op, used as padding after superinstruction fusion.
9pub const NOP: u8 = 0x00;
10
11/// Push `stack[bp + slot]` onto the operand stack.
12pub const LOAD_LOCAL: u8 = 0x01; // slot:u8
13
14/// Push `stack[bp + slot]` and clear the slot (move semantics).
15/// Used for last-use variables: caller releases sole ownership so
16/// callees and builtins see refcount=1 and can mutate in-place.
17pub const MOVE_LOCAL: u8 = 0x48; // slot:u8
18
19/// Pop top and store into `stack[bp + slot]`.
20pub const STORE_LOCAL: u8 = 0x02; // slot:u8
21
22/// Push `constants[idx]` onto the operand stack.
23pub const LOAD_CONST: u8 = 0x03; // idx:u16
24
25/// Push `globals[idx]` onto the operand stack.
26pub const LOAD_GLOBAL: u8 = 0x04; // idx:u16
27
28/// Pop top and store into `globals[idx]`.
29pub const STORE_GLOBAL: u8 = 0x0A; // idx:u16
30
31/// Discard the top value.
32pub const POP: u8 = 0x05;
33
34/// Duplicate the top value.
35pub const DUP: u8 = 0x06;
36
37/// Push `NanValue::UNIT`.
38pub const LOAD_UNIT: u8 = 0x07;
39
40/// Push `NanValue::TRUE`.
41pub const LOAD_TRUE: u8 = 0x08;
42
43/// Push `NanValue::FALSE`.
44pub const LOAD_FALSE: u8 = 0x09;
45
46// -- Arithmetic --------------------------------------------------------------
47
48/// Pop b, pop a, push a + b.
49pub const ADD: u8 = 0x10;
50
51/// Pop b, pop a, push a - b.
52pub const SUB: u8 = 0x11;
53
54/// Pop b, pop a, push a * b.
55pub const MUL: u8 = 0x12;
56
57/// Pop b, pop a, push a / b.
58pub const DIV: u8 = 0x13;
59
60/// Pop b, pop a, push a % b.
61pub const MOD: u8 = 0x14;
62
63/// Pop a, push -a.
64pub const NEG: u8 = 0x15;
65
66/// Pop a, push !a (boolean not).
67pub const NOT: u8 = 0x16;
68
69/// Typed `+` for two `Int` operands. `as_int` decode + `wrapping_add`,
70/// boxing back through `NanValue::new_int`. Skips the tag-dispatch
71/// chain in `arith_add`. Emitted when both operand types resolve to
72/// `Type::Int`.
73pub const ADD_INT: u8 = 0x17;
74/// Typed `-` for two `Int` operands; same shape as `ADD_INT`.
75pub const SUB_INT: u8 = 0x18;
76/// Typed `*` for two `Int` operands; same shape as `ADD_INT`.
77pub const MUL_INT: u8 = 0x19;
78
79/// Typed `+` for two `Float` operands. `as_float` decode (raw
80/// `f64::from_bits`), IEEE 754 add, push as `NanValue::new_float`.
81/// Skips `arith_add` tag-dispatch + cross-type promotion. Hot in
82/// numeric loops (Mandelbrot inner step is 3 muls + 1 add per iter).
83pub const ADD_FLOAT: u8 = 0x1A;
84/// Typed `-` for two `Float` operands; same shape as `ADD_FLOAT`.
85pub const SUB_FLOAT: u8 = 0x1B;
86/// Typed `*` for two `Float` operands; same shape as `ADD_FLOAT`.
87pub const MUL_FLOAT: u8 = 0x1C;
88/// Typed `/` for two `Float` operands; same shape as `ADD_FLOAT`.
89/// IEEE 754 division — `b == 0.0` produces `inf`/`-inf`/`NaN` per
90/// the spec, no runtime check.
91pub const DIV_FLOAT: u8 = 0x1D;
92
93// -- Comparison --------------------------------------------------------------
94
95/// Pop b, pop a, push a == b.
96pub const EQ: u8 = 0x20;
97
98/// Pop b, pop a, push a < b.
99pub const LT: u8 = 0x21;
100
101/// Pop b, pop a, push a > b.
102pub const GT: u8 = 0x22;
103
104/// Typed `==` for two `Int` operands. Skips `NanValue::eq_in`'s tag
105/// dispatch — `as_int_unboxed()` on both sides, raw i64 compare, push
106/// bool. Emitted by the VM compiler when both `left.ty()` and
107/// `right.ty()` resolve to `Type::Int`. Hot path in pattern-match-on-
108/// Int (`match n { 0 -> …; _ -> … }`) and arithmetic guards; profile
109/// shows `eq_in` at 10–12% self-time in newtype/match scenarios.
110pub const EQ_INT: u8 = 0x23;
111
112/// Typed `<` for two `Int` operands. `as_int` decode on both sides
113/// (folds the inline path inline), raw i64 compare, push bool. Skips
114/// `compare_lt`'s tag-dispatch (`is_int`/`is_float`/`is_string` chain
115/// plus cross-type promotion). Emitted when both operands'
116/// `Spanned::ty()` resolve to `Type::Int`.
117pub const LT_INT: u8 = 0x24;
118
119/// Typed `>` for two `Int` operands; same shape as `LT_INT`.
120pub const GT_INT: u8 = 0x25;
121
122/// Typed `<` for two `Float` operands. `as_float` (raw `f64::from_bits`)
123/// on both sides, IEEE 754 compare, push bool. Hot in numeric loops
124/// (Mandelbrot inner step `match curZ2 > 4.0 { … }` etc.); profile
125/// shows `compare_lt` at 2.6% self-time on fractal_seahorse.
126pub const LT_FLOAT: u8 = 0x26;
127
128/// Typed `>` for two `Float` operands; same shape as `LT_FLOAT`.
129pub const GT_FLOAT: u8 = 0x27;
130
131/// Fused `match n { LIT -> ...; _ -> ... }` arm test: peek the top of
132/// stack (subject left in place by the surrounding `compile_match`),
133/// if its `Int` value equals the inline `imm` literal — fall through
134/// to the arm body; otherwise skip via `fail_offset`. Replaces the
135/// `DUP + LOAD_CONST + EQ + JUMP_IF_FALSE` sequence the generic
136/// `compile_pattern` emits — one dispatch instead of four in the hot
137/// path of every `match n { 0 -> ... }` shape.
138///
139/// Encoding: `MATCH_INT_LITERAL imm:i64 fail_offset:i16`.
140pub const MATCH_INT_LITERAL: u8 = 0x7F;
141
142// -- String ------------------------------------------------------------------
143
144/// Pop b, pop a, push str(a) ++ str(b).
145pub const CONCAT: u8 = 0x28;
146
147// -- Control flow ------------------------------------------------------------
148
149/// Unconditional relative jump: ip += offset.
150pub const JUMP: u8 = 0x30; // offset:i16
151
152/// Pop top, if falsy: ip += offset.
153pub const JUMP_IF_FALSE: u8 = 0x31; // offset:i16
154
155// -- Calls -------------------------------------------------------------------
156
157/// Call a known function by id. Args already on stack.
158pub const CALL_KNOWN: u8 = 0x40; // fn_id:u16, argc:u8
159
160/// Call a function value on the stack (under args).
161pub const CALL_VALUE: u8 = 0x41; // argc:u8
162
163/// Call a builtin service function.
164pub const CALL_BUILTIN: u8 = 0x42; // symbol_id:u32, argc:u8
165
166/// Like CALL_BUILTIN but with owned-argument bitmask from reuse analysis.
167/// Builtins receiving owned args can mutate in-place instead of cloning.
168pub const CALL_BUILTIN_OWNED: u8 = 0x46; // symbol_id:u32, argc:u8, owned:u8
169
170/// Like CALL_KNOWN but with owned-argument bitmask from reuse analysis.
171pub const CALL_KNOWN_OWNED: u8 = 0x47; // fn_id:u16, argc:u8, owned:u8
172
173/// Self tail-call: reuse current frame with new args.
174pub const TAIL_CALL_SELF: u8 = 0x43; // argc:u8
175
176/// Mutual tail-call to a known function: reuse frame, switch target.
177pub const TAIL_CALL_KNOWN: u8 = 0x44; // fn_id:u16, argc:u8
178
179/// Return top of stack to caller.
180pub const RETURN: u8 = 0x50;
181
182// -- Structured values -------------------------------------------------------
183
184/// Push Nil (empty cons list).
185pub const LIST_NIL: u8 = 0x60;
186
187/// Pop tail, pop head, push Cons(head, tail).
188pub const LIST_CONS: u8 = 0x61;
189
190/// Pop `count` items, build cons list from them (first item = head), push list.
191pub const LIST_NEW: u8 = 0x62; // count:u8
192
193/// Pop `count` field values, push a new record with `type_id`.
194pub const RECORD_NEW: u8 = 0x63; // type_id:u16, count:u8
195
196/// Pop record, push `fields[field_idx]` (compile-time resolved index).
197pub const RECORD_GET: u8 = 0x64; // field_idx:u8
198
199/// Pop record, lookup field by interned field symbol, push value.
200pub const RECORD_GET_NAMED: u8 = 0x67; // field_symbol_id:u32
201
202/// Pop `count` field values, push a new variant.
203pub const VARIANT_NEW: u8 = 0x65; // type_id:u16, variant_id:u16, count:u8
204
205/// Pop value, push wrapped value. kind: 0=Ok, 1=Err, 2=Some.
206pub const WRAP: u8 = 0x66; // kind:u8
207
208/// Pop `count` items, build a tuple from them, push tuple.
209pub const TUPLE_NEW: u8 = 0x68; // count:u8
210
211/// Parallel function calls for independent products (?! / !).
212/// Pops N callable values plus their args from the stack, dispatches them via
213/// the same callable resolution rules as CALL_VALUE, then builds the result tuple.
214/// Enters/exits replay group around parallel dispatch.
215///
216/// Encoding: CALL_PAR count:u8 unwrap:u8 [argc:u8 × count]
217/// unwrap=1 (?!): unwrap each Result, propagate first Err.
218/// unwrap=0 (!): return raw tuple.
219pub const CALL_PAR: u8 = 0x86;
220
221/// Update selected fields on a record, preserving the rest from the base value.
222/// Stack: [..., base_record, update_0, ..., update_n-1] -> [..., updated_record]
223pub const RECORD_UPDATE: u8 = 0x69; // type_id:u16, count:u8, field_idx[count]:u8
224
225/// Propagate `Result.Err` to caller or unwrap `Result.Ok` in place.
226pub const PROPAGATE_ERR: u8 = 0x6A;
227
228/// Pop list, push its length as Int.
229pub const LIST_LEN: u8 = 0x6B;
230
231// 0x6C and 0x6D were LIST_GET and LIST_APPEND — removed.
232
233/// Pop list, pop value, push prepended list.
234pub const LIST_PREPEND: u8 = 0x6E;
235
236// 0x6F was LIST_GET_MATCH — removed.
237
238// -- Pattern matching --------------------------------------------------------
239
240/// Peek top: if NaN tag != expected, ip += fail_offset.
241pub const MATCH_TAG: u8 = 0x70; // expected_tag:u8, fail_offset:i16
242
243/// Peek top (must be variant): if variant_id != expected, ip += fail_offset.
244pub const MATCH_VARIANT: u8 = 0x71; // ctor_id:u16, fail_offset:i16
245
246/// Peek top: if not wrapper of `kind`, ip += fail_offset.
247/// If matches, replace top with inner value (unwrap in-place).
248/// kind: 0=Ok, 1=Err, 2=Some.
249pub const MATCH_UNWRAP: u8 = 0x72; // kind:u8, fail_offset:i16
250
251/// Peek top: if not Nil, ip += fail_offset.
252pub const MATCH_NIL: u8 = 0x73; // fail_offset:i16
253
254/// Peek top: if Nil (not a cons), ip += fail_offset.
255pub const MATCH_CONS: u8 = 0x74; // fail_offset:i16
256
257/// Pop cons cell, push tail then push head.
258pub const LIST_HEAD_TAIL: u8 = 0x75;
259
260/// Peek top (record/variant), push `fields[field_idx]` (non-destructive).
261pub const EXTRACT_FIELD: u8 = 0x76; // field_idx:u8
262
263/// Peek top: if not a tuple of `count` items, ip += fail_offset.
264pub const MATCH_TUPLE: u8 = 0x78; // count:u8, fail_offset:i16
265
266/// Peek top tuple, push `items[item_idx]` (non-destructive).
267pub const EXTRACT_TUPLE_ITEM: u8 = 0x79; // item_idx:u8
268
269/// Non-exhaustive match error at source line.
270pub const MATCH_FAIL: u8 = 0x77; // line:u16
271
272/// Unified prefix/exact dispatch on NanValue bits.
273///
274/// Encoding:
275/// MATCH_DISPATCH count:u8 default_offset:i16
276/// [(kind:u8, expected:u64, offset:i16) × count]
277///
278/// kind=0: exact match — `val.bits() == expected`
279/// kind=1: tag match — `(val.bits() & TAG_MASK_FULL) == expected`
280/// where TAG_MASK_FULL = 0xFFFF_C000_0000_0000 (QNAN 14 bits + tag 4 bits)
281///
282/// Pops subject. Scans entries in order; first match wins → ip += offset.
283/// No match → ip += default_offset.
284/// All offsets are relative to the end of the full instruction.
285pub const MATCH_DISPATCH: u8 = 0x7A;
286
287/// Like MATCH_DISPATCH but every entry carries an inline result instead
288/// of a jump offset. When an entry matches, the result is pushed directly
289/// onto the stack and the match body is skipped entirely.
290///
291/// Encoding:
292/// MATCH_DISPATCH_CONST count:u8 default_offset:i16
293/// [(kind:u8, expected:u64, result:u64) × count]
294///
295/// Hit → pop subject, push result NanValue.
296/// Miss → pop subject, ip += default_offset (execute default arm body).
297///
298/// Emitted when ALL dispatchable arms have constant bodies (literals).
299pub const MATCH_DISPATCH_CONST: u8 = 0x7B;
300
301/// Tail-call self for thin frames: no arena finalization needed.
302/// The compiler emits this instead of TAIL_CALL_SELF when the function
303/// is known to be "thin" (no heap allocations within the frame).
304/// Skips finalize_frame_locals_for_tail_call entirely — just copies
305/// args in-place and resets ip.
306pub const TAIL_CALL_SELF_THIN: u8 = 0x45; // argc:u8
307
308/// Inline Option.withDefault: pop default, pop option → push inner or default.
309/// Stack: [option, default] → [result]
310/// If option is Some → push unwrapped inner value.
311/// If option is None → push default.
312pub const UNWRAP_OR: u8 = 0x7C;
313
314/// Inline Result.withDefault: pop default, pop result → push inner or default.
315/// Stack: [result, default] → [value]
316/// If result is Ok → push unwrapped inner value.
317/// If result is Err → push default.
318pub const UNWRAP_RESULT_OR: u8 = 0x7D;
319
320/// Frameless call to a leaf+thin+args-only function.
321/// No CallFrame is pushed — just saves (fn_id, ip) in the dispatch loop,
322/// sets bp to the args already on stack, and jumps to the target.
323/// On RETURN, restores the caller's state directly.
324/// Format: fn_id:u16, argc:u8 (same as CALL_KNOWN).
325pub const CALL_LEAF: u8 = 0x7E;
326
327// ─── Superinstructions ──────────────────────────────────────
328
329/// Push two locals in one dispatch. Format: slot_a:u8, slot_b:u8.
330pub const LOAD_LOCAL_2: u8 = 0x80;
331
332/// Push one local + one constant in one dispatch. Format: slot:u8, const_idx:u16.
333pub const LOAD_LOCAL_CONST: u8 = 0x81;
334
335/// Inline Vector.get: pop index, pop vector → push Option (Some/None).
336/// Stack: [vector, index] → [option]
337pub const VECTOR_GET: u8 = 0x82;
338
339/// Fused Vector.get + Option.withDefault: pop default, pop index, pop vector → push value.
340/// Stack: [vector, index, default] → [value]
341/// Combines CALL_BUILTIN(Vector.get) + LOAD_CONST + UNWRAP_OR into one opcode.
342pub const VECTOR_GET_OR: u8 = 0x83;
343
344/// Inline Vector.set: pop value, pop index, pop vector → push Option<Vector>.
345/// Stack: [vector, index, value] → [option_vector]
346pub const VECTOR_SET: u8 = 0x84;
347
348/// Fused Vector.set + Option.withDefault(vec): pop value, pop index, pop vector → push vector.
349/// Stack: [vector, index, value] → [vector]
350pub const VECTOR_SET_OR_KEEP: u8 = 0x85;
351
352// -- Deforestation buffer (0.15 Traversal) -----------------------------------
353//
354// Mutable byte-buffer scratch backing the synthesizer's `__buf_*` intrinsics.
355// Buffer values travel as opaque `Int(handle)` NanValues — handles are
356// indices into `vm.buffer_pool: Vec<Option<String>>`. This keeps Buffer
357// out of `ArenaEntry` (no exhaustiveness ripples) and out of GC marking
358// (handle is an inline value, the underlying String lives on the host
359// heap unaffected by frame compactions). Buffers are use-once: created
360// by `BUFFER_NEW`, mutated through `BUFFER_APPEND_*`, finalized to an
361// arena `String` (Rc<str>) by `BUFFER_FINALIZE` which also frees the slot.
362
363/// Allocate a fresh String buffer. Pop cap_hint:i64 → push handle:Int(buffer_idx).
364pub const BUFFER_NEW: u8 = 0x90;
365
366/// Append the bytes of a string to a buffer. Pop str, pop buf →
367/// push buf (same handle). The String pointed to by `buf.handle` is
368/// mutated in place via `String::push_str`.
369pub const BUFFER_APPEND_STR: u8 = 0x91;
370
371/// Append separator only when buffer is non-empty. Pop sep, pop buf →
372/// push buf. No-op for the first append, so the synthesized `__buffered`
373/// loop body can place the separator before each element uniformly.
374pub const BUFFER_APPEND_SEP_UNLESS_FIRST: u8 = 0x92;
375
376/// Drain a buffer into an arena `OBJ_STRING`. Pop buf → push string.
377/// Frees the underlying `vm.buffer_pool` slot; the handle becomes invalid.
378pub const BUFFER_FINALIZE: u8 = 0x93;
379
380/// Opcode name for debug/disassembly.
381pub fn opcode_name(op: u8) -> &'static str {
382 match op {
383 LOAD_LOCAL => "LOAD_LOCAL",
384 MOVE_LOCAL => "MOVE_LOCAL",
385 STORE_LOCAL => "STORE_LOCAL",
386 LOAD_CONST => "LOAD_CONST",
387 LOAD_GLOBAL => "LOAD_GLOBAL",
388 POP => "POP",
389 DUP => "DUP",
390 LOAD_UNIT => "LOAD_UNIT",
391 LOAD_TRUE => "LOAD_TRUE",
392 LOAD_FALSE => "LOAD_FALSE",
393 ADD => "ADD",
394 ADD_INT => "ADD_INT",
395 SUB_INT => "SUB_INT",
396 MUL_INT => "MUL_INT",
397 ADD_FLOAT => "ADD_FLOAT",
398 SUB_FLOAT => "SUB_FLOAT",
399 MUL_FLOAT => "MUL_FLOAT",
400 DIV_FLOAT => "DIV_FLOAT",
401 SUB => "SUB",
402 MUL => "MUL",
403 DIV => "DIV",
404 MOD => "MOD",
405 NEG => "NEG",
406 NOT => "NOT",
407 EQ => "EQ",
408 EQ_INT => "EQ_INT",
409 LT_INT => "LT_INT",
410 GT_INT => "GT_INT",
411 LT_FLOAT => "LT_FLOAT",
412 GT_FLOAT => "GT_FLOAT",
413 LT => "LT",
414 GT => "GT",
415 CONCAT => "CONCAT",
416 JUMP => "JUMP",
417 JUMP_IF_FALSE => "JUMP_IF_FALSE",
418 CALL_KNOWN => "CALL_KNOWN",
419 CALL_VALUE => "CALL_VALUE",
420 CALL_BUILTIN => "CALL_BUILTIN",
421 CALL_BUILTIN_OWNED => "CALL_BUILTIN_OWNED",
422 CALL_KNOWN_OWNED => "CALL_KNOWN_OWNED",
423 TAIL_CALL_SELF => "TAIL_CALL_SELF",
424 TAIL_CALL_KNOWN => "TAIL_CALL_KNOWN",
425 RETURN => "RETURN",
426 LIST_NIL => "LIST_NIL",
427 LIST_CONS => "LIST_CONS",
428 LIST_NEW => "LIST_NEW",
429 RECORD_NEW => "RECORD_NEW",
430 STORE_GLOBAL => "STORE_GLOBAL",
431 RECORD_GET => "RECORD_GET",
432 RECORD_GET_NAMED => "RECORD_GET_NAMED",
433 VARIANT_NEW => "VARIANT_NEW",
434 WRAP => "WRAP",
435 TUPLE_NEW => "TUPLE_NEW",
436 RECORD_UPDATE => "RECORD_UPDATE",
437 PROPAGATE_ERR => "PROPAGATE_ERR",
438 LIST_LEN => "LIST_LEN",
439 LIST_PREPEND => "LIST_PREPEND",
440 MATCH_TAG => "MATCH_TAG",
441 MATCH_VARIANT => "MATCH_VARIANT",
442 MATCH_UNWRAP => "MATCH_UNWRAP",
443 MATCH_INT_LITERAL => "MATCH_INT_LITERAL",
444 MATCH_NIL => "MATCH_NIL",
445 MATCH_CONS => "MATCH_CONS",
446 LIST_HEAD_TAIL => "LIST_HEAD_TAIL",
447 EXTRACT_FIELD => "EXTRACT_FIELD",
448 MATCH_TUPLE => "MATCH_TUPLE",
449 EXTRACT_TUPLE_ITEM => "EXTRACT_TUPLE_ITEM",
450 MATCH_FAIL => "MATCH_FAIL",
451 MATCH_DISPATCH => "MATCH_DISPATCH",
452 MATCH_DISPATCH_CONST => "MATCH_DISPATCH_CONST",
453 TAIL_CALL_SELF_THIN => "TAIL_CALL_SELF_THIN",
454 UNWRAP_OR => "UNWRAP_OR",
455 UNWRAP_RESULT_OR => "UNWRAP_RESULT_OR",
456 CALL_LEAF => "CALL_LEAF",
457 LOAD_LOCAL_2 => "LOAD_LOCAL_2",
458 LOAD_LOCAL_CONST => "LOAD_LOCAL_CONST",
459 VECTOR_GET => "VECTOR_GET",
460 VECTOR_GET_OR => "VECTOR_GET_OR",
461 VECTOR_SET => "VECTOR_SET",
462 VECTOR_SET_OR_KEEP => "VECTOR_SET_OR_KEEP",
463 BUFFER_NEW => "BUFFER_NEW",
464 BUFFER_APPEND_STR => "BUFFER_APPEND_STR",
465 BUFFER_APPEND_SEP_UNLESS_FIRST => "BUFFER_APPEND_SEP_UNLESS_FIRST",
466 BUFFER_FINALIZE => "BUFFER_FINALIZE",
467 CALL_PAR => "CALL_PAR",
468 NOP => "NOP",
469 _ => "UNKNOWN",
470 }
471}
472
473/// Operand byte width after the opcode byte. Single source of truth —
474/// all bytecode traversal functions must use this.
475pub fn opcode_operand_width(op: u8, code: &[u8], ip: usize) -> usize {
476 match op {
477 // 0-byte (stack-only)
478 POP
479 | DUP
480 | LOAD_UNIT
481 | LOAD_TRUE
482 | LOAD_FALSE
483 | ADD
484 | ADD_INT
485 | SUB_INT
486 | MUL_INT
487 | ADD_FLOAT
488 | SUB_FLOAT
489 | MUL_FLOAT
490 | DIV_FLOAT
491 | SUB
492 | MUL
493 | DIV
494 | MOD
495 | NEG
496 | NOT
497 | EQ
498 | EQ_INT
499 | LT
500 | LT_INT
501 | LT_FLOAT
502 | GT
503 | GT_INT
504 | GT_FLOAT
505 | RETURN
506 | PROPAGATE_ERR
507 | LIST_HEAD_TAIL
508 | LIST_NIL
509 | LIST_CONS
510 | LIST_LEN
511 | LIST_PREPEND
512 | UNWRAP_OR
513 | UNWRAP_RESULT_OR
514 | CONCAT
515 | VECTOR_GET
516 | VECTOR_SET
517 | BUFFER_NEW
518 | BUFFER_APPEND_STR
519 | BUFFER_APPEND_SEP_UNLESS_FIRST
520 | BUFFER_FINALIZE
521 | NOP => 0,
522
523 // 1-byte
524 LOAD_LOCAL | MOVE_LOCAL | STORE_LOCAL | CALL_VALUE | RECORD_GET | EXTRACT_FIELD
525 | EXTRACT_TUPLE_ITEM | LIST_NEW | WRAP | TUPLE_NEW | TAIL_CALL_SELF
526 | TAIL_CALL_SELF_THIN | VECTOR_SET_OR_KEEP => 1,
527
528 // 2-byte (u16 or u8+u8)
529 LOAD_CONST | LOAD_GLOBAL | STORE_GLOBAL | JUMP | JUMP_IF_FALSE | MATCH_FAIL | MATCH_NIL
530 | MATCH_CONS | LOAD_LOCAL_2 | VECTOR_GET_OR => 2,
531
532 // 3-byte
533 CALL_KNOWN | CALL_LEAF | MATCH_TAG | MATCH_UNWRAP | MATCH_TUPLE | RECORD_NEW
534 | LOAD_LOCAL_CONST => 3,
535
536 // 4-byte
537 CALL_KNOWN_OWNED => 4, // fn_id:u16 + argc:u8 + owned:u8
538
539 // 4-byte
540 MATCH_VARIANT | RECORD_GET_NAMED => 4,
541
542 // 5-byte
543 CALL_BUILTIN | VARIANT_NEW => 5,
544
545 // 6-byte
546 CALL_BUILTIN_OWNED => 6, // symbol_id:u32 + argc:u8 + owned:u8
547
548 // 10-byte
549 MATCH_INT_LITERAL => 10, // imm:i64 + fail_offset:i16
550
551 // Variable-length
552 MATCH_DISPATCH | MATCH_DISPATCH_CONST if ip < code.len() => {
553 let count = code[ip] as usize;
554 let entry_size = if op == MATCH_DISPATCH { 11 } else { 17 };
555 3 + count * entry_size
556 }
557 RECORD_UPDATE if ip + 2 < code.len() => 3 + code[ip + 2] as usize,
558 // CALL_PAR count:u8 unwrap:u8 [argc:u8 × count]
559 CALL_PAR if ip < code.len() => {
560 let count = code[ip] as usize;
561 2 + count
562 }
563 _ => 0,
564 }
565}