Skip to main content

stryke/
bytecode.rs

1use serde::{Deserialize, Serialize};
2
3use crate::ast::{
4    AdviceKind, Block, ClassDef, EnumDef, Expr, MatchArm, StructDef, SubSigParam, TraitDef,
5};
6use crate::value::StrykeValue;
7
8/// `splice` operand tuple: array expr, offset, length, replacement list (see [`Chunk::splice_expr_entries`]).
9pub(crate) type SpliceExprEntry = (Expr, Option<Expr>, Option<Expr>, Vec<Expr>);
10
11/// `sub` body registered at run time (e.g. `BEGIN { sub f { ... } }`), mirrored from
12/// [`crate::vm_helper::VMHelper::exec_statement`] `StmtKind::SubDecl`.
13#[derive(Debug, Clone, Serialize, Deserialize)]
14pub struct RuntimeSubDecl {
15    pub name: String,
16    pub params: Vec<SubSigParam>,
17    pub body: Block,
18    pub prototype: Option<String>,
19}
20
21/// AOP advice registered at runtime (`before|after|around "<glob>" { ... }`).
22/// Installed via [`Op::RegisterAdvice`] into `Interpreter::intercepts`.
23///
24/// `body_block_idx` indexes [`Chunk::blocks`]. The body is lowered to bytecode
25/// during the fourth-pass block lowering ([`Chunk::block_bytecode_ranges`]) so
26/// `dispatch_with_advice` can run it through the VM (`run_block_region`) — the
27/// same path used by `map { }` / `grep { }` blocks. This keeps advice on the
28/// bytecode dispatch surface, away from the AST tree-walker, so compile-time
29/// name resolution (`our`-qualified scalars, lexical slots) works inside the
30/// advice exactly as it does outside. See `tests/tree_walker_absent_aop.rs`.
31#[derive(Debug, Clone, Serialize, Deserialize)]
32pub struct RuntimeAdviceDecl {
33    pub kind: AdviceKind,
34    pub pattern: String,
35    pub body: Block,
36    pub body_block_idx: u16,
37}
38
39/// Stack-based bytecode instruction set for the stryke VM.
40/// Operands use u16 for pool indices (64k names/constants) and i32 for jumps.
41#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
42pub enum Op {
43    Nop,
44    // ── Constants ──
45    LoadInt(i64),
46    LoadFloat(f64),
47    LoadConst(u16), // index into constant pool
48    LoadUndef,
49
50    // ── Stack ──
51    Pop,
52    Dup,
53    /// Duplicate the top two stack values: \[a, b\] (b on top) → \[a, b, a, b\].
54    Dup2,
55    /// Swap the top two stack values (StrykeValue).
56    Swap,
57    /// Rotate the top three values upward (FORTH `rot`): `[a, b, c]` (c on top) → `[b, c, a]`.
58    Rot,
59    /// Pop one value; push [`StrykeValue::scalar_context`] of that value (Perl aggregate rules).
60    ValueScalarContext,
61    /// Pop list/array; push first element (or undef if empty). For `my ($x) = @arr`.
62    ListFirst,
63
64    // ── Scalars (u16 = name pool index) ──
65    GetScalar(u16),
66    /// Like `GetScalar` but reads `scope.get_scalar` only (no Perl special-variable dispatch).
67    GetScalarPlain(u16),
68    SetScalar(u16),
69    /// Like `SetScalar` but calls `scope.set_scalar` only (no special-variable dispatch).
70    SetScalarPlain(u16),
71    DeclareScalar(u16),
72    /// Like `DeclareScalar` but the binding is immutable after initialization.
73    DeclareScalarFrozen(u16),
74    /// `typed my $x : Type` — u8 encodes [`crate::ast::PerlTypeName`] (0=Int,1=Str,2=Float).
75    DeclareScalarTyped(u16, u8),
76    /// `frozen typed my $x : Type` — immutable after initialization + type-checked.
77    DeclareScalarTypedFrozen(u16, u8),
78    /// `typed my $x : Foo` where `Foo` is a user-defined struct/class/enum type.
79    /// First u16 = scalar name index; second u16 = type-name pool index; the u8
80    /// flag encodes (frozen << 1) | is_enum so a single op covers all four
81    /// permutations (frozen × {struct/class, enum}).
82    DeclareScalarTypedUser(u16, u16, u8),
83
84    // ── State variables (persist across calls) ──
85    /// `state $x = EXPR` — pop TOS as initializer on first call only.
86    /// On subsequent calls the persisted value is used as the local binding.
87    /// Key: (sub entry IP, name_idx) in VM's state_vars table.
88    DeclareStateScalar(u16),
89    /// `state @arr = (...)` — array variant.
90    DeclareStateArray(u16),
91    /// `state %hash = (...)` — hash variant.
92    DeclareStateHash(u16),
93
94    // ── Arrays ──
95    GetArray(u16),
96    SetArray(u16),
97    DeclareArray(u16),
98    DeclareArrayFrozen(u16),
99    GetArrayElem(u16), // stack: [index] → value
100    SetArrayElem(u16), // stack: [value, index]
101    /// Like [`Op::SetArrayElem`] but leaves the assigned value on the stack (e.g. `$a[$i] //=`).
102    SetArrayElemKeep(u16),
103    PushArray(u16),  // stack: [value] → push to named array
104    PopArray(u16),   // → popped value
105    ShiftArray(u16), // → shifted value
106    ArrayLen(u16),   // → integer length
107    /// Pop index spec (scalar or array from [`Op::Range`]); push one `StrykeValue::array` of elements
108    /// read from the named array. Used for `@name[...]` slice rvalues.
109    ArraySlicePart(u16),
110    /// Push `array[start..]` as a `StrykeValue::array`. Used for slurpy-tail
111    /// destructure: `my ($a, $b, @rest) = LIST` reads `tmp[2..]` into
112    /// `@rest`. (BUG-090) — operands: `(name_idx, start)`.
113    GetArrayFromIndex(u16, u16),
114    /// Pop `b`, pop `a` (arrays); push concatenation `a` followed by `b` (Perl slice / list glue).
115    ArrayConcatTwo,
116    /// `exists $a[$i]` — stack: `[index]` → 0/1 (stash-qualified array name pool index).
117    ExistsArrayElem(u16),
118    /// `delete $a[$i]` — stack: `[index]` → deleted value (or undef).
119    DeleteArrayElem(u16),
120
121    // ── Hashes ──
122    GetHash(u16),
123    SetHash(u16),
124    DeclareHash(u16),
125    DeclareHashFrozen(u16),
126    /// Dynamic `local $x` — save previous binding, assign TOS (same stack shape as DeclareScalar).
127    LocalDeclareScalar(u16),
128    LocalDeclareArray(u16),
129    LocalDeclareHash(u16),
130    /// `local $h{key} = val` — stack: `[value, key]` (key on top), same as [`Op::SetHashElem`].
131    LocalDeclareHashElement(u16),
132    /// `local $a[i] = val` — stack: `[value, index]` (index on top), same as [`Op::SetArrayElem`].
133    LocalDeclareArrayElement(u16),
134    /// `local *name` or `local *name = *other` — second pool index is `Some(rhs)` when aliasing.
135    LocalDeclareTypeglob(u16, Option<u16>),
136    /// `local *{EXPR}` / `local *$x` — LHS glob name string on stack (TOS); optional static `*rhs` pool index.
137    LocalDeclareTypeglobDynamic(Option<u16>),
138    GetHashElem(u16), // stack: [key] → value
139    SetHashElem(u16), // stack: [value, key]
140    /// Like [`Op::SetHashElem`] but leaves the assigned value on the stack (e.g. `$h{k} //=`).
141    SetHashElemKeep(u16),
142    DeleteHashElem(u16), // stack: [key] → deleted value
143    ExistsHashElem(u16), // stack: [key] → 0/1
144    /// `delete $href->{key}` — stack: `[container, key]` (key on top) → deleted value.
145    DeleteArrowHashElem,
146    /// `exists $href->{key}` — stack: `[container, key]` → 0/1.
147    ExistsArrowHashElem,
148    /// `exists $aref->[$i]` — stack: `[container, index]` (index on top, int-coerced).
149    ExistsArrowArrayElem,
150    /// `delete $aref->[$i]` — stack: `[container, index]` → deleted value (or undef).
151    DeleteArrowArrayElem,
152    HashKeys(u16),   // → array of keys
153    HashValues(u16), // → array of values
154    /// Scalar `keys %h` — push integer key count.
155    HashKeysScalar(u16),
156    /// Scalar `values %h` — push integer value count.
157    HashValuesScalar(u16),
158    /// `keys EXPR` after operand evaluated in list context — stack: `[value]` → key list array.
159    KeysFromValue,
160    /// Scalar `keys EXPR` after operand — stack: `[value]` → key count.
161    KeysFromValueScalar,
162    /// `values EXPR` after operand evaluated in list context — stack: `[value]` → values array.
163    ValuesFromValue,
164    /// Scalar `values EXPR` after operand — stack: `[value]` → value count.
165    ValuesFromValueScalar,
166
167    /// `push @$aref, ITEM` — stack: `[aref, item]` (item on top); mutates; pushes `aref` back.
168    PushArrayDeref,
169    /// After `push @$aref, …` — stack: `[aref]` → `[len]` (consumes aref).
170    ArrayDerefLen,
171    /// `pop @$aref` — stack: `[aref]` → popped value.
172    PopArrayDeref,
173    /// `shift @$aref` — stack: `[aref]` → shifted value.
174    ShiftArrayDeref,
175    /// `unshift @$aref, LIST` — stack `[aref, v1, …, vn]` (vn on top); `n` extra values.
176    UnshiftArrayDeref(u8),
177    /// `splice @$aref, off, len, LIST` — stack top: replacements, then `len`, `off`, `aref` (`len` may be undef).
178    SpliceArrayDeref(u8),
179
180    // ── Arithmetic ──
181    Add,
182    Sub,
183    Mul,
184    Div,
185    Mod,
186    Pow,
187    Negate,
188    /// `inc EXPR` — pop value, push value + 1 (integer if input is integer, else float).
189    Inc,
190    /// `dec EXPR` — pop value, push value - 1.
191    Dec,
192
193    // ── String ──
194    Concat,
195    /// Pop array (or value coerced with [`StrykeValue::to_list`]), join element strings with
196    /// [`Interpreter::list_separator`] (`$"`), push one string. Used for `@a` in `"` / `qq`.
197    ArrayStringifyListSep,
198    StringRepeat,
199    /// Pop count (top), pop list (below — flattened to `Vec<StrykeValue>` via
200    /// [`StrykeValue::as_array_vec`] or wrapped as a 1-elt list), push the list
201    /// repeated `count` times. Backs `(LIST) x N` / `qw(...) x N`. See
202    /// `compiler.rs` `ExprKind::Repeat` for the parser-level discrimination.
203    ListRepeat,
204    /// Pop string, apply `\U` / `\L` / `\u` / `\l` / `\Q` / `\E` case escapes, push result.
205    ProcessCaseEscapes,
206
207    // ── Comparison (numeric) ──
208    NumEq,
209    NumNe,
210    NumLt,
211    NumGt,
212    NumLe,
213    NumGe,
214    Spaceship,
215
216    // ── Comparison (string) ──
217    StrEq,
218    StrNe,
219    StrLt,
220    StrGt,
221    StrLe,
222    StrGe,
223    StrCmp,
224
225    // ── Logical / Bitwise ──
226    LogNot,
227    BitAnd,
228    BitOr,
229    BitXor,
230    BitNot,
231    Shl,
232    Shr,
233
234    // ── Control flow (absolute target addresses) ──
235    Jump(usize),
236    JumpIfTrue(usize),
237    JumpIfFalse(usize),
238    /// Jump if TOS is falsy WITHOUT popping (for short-circuit &&)
239    JumpIfFalseKeep(usize),
240    /// Jump if TOS is truthy WITHOUT popping (for short-circuit ||)
241    JumpIfTrueKeep(usize),
242    /// Jump if TOS is defined WITHOUT popping (for //)
243    JumpIfDefinedKeep(usize),
244
245    // ── Increment / Decrement ──
246    PreInc(u16),
247    PreDec(u16),
248    PostInc(u16),
249    PostDec(u16),
250    /// Pre-increment on a frame slot entry (compiled `my $x` fast path).
251    PreIncSlot(u8),
252    PreDecSlot(u8),
253    PostIncSlot(u8),
254    PostDecSlot(u8),
255
256    // ── Functions ──
257    /// Call subroutine: name index, arg count, `WantarrayCtx` discriminant as `u8`
258    Call(u16, u8, u8),
259    /// Like [`Op::Call`] but with a compile-time-resolved entry: `sid` indexes [`Chunk::static_sub_calls`]
260    /// (entry IP + stack-args); `name_idx` duplicates the stash pool index for closure restore / JIT
261    /// (same as in the table; kept in the opcode so JIT does not need the side table).
262    CallStaticSubId(u16, u16, u8, u8),
263    Return,
264    ReturnValue,
265    /// End of a compiled `map` / `grep` / `sort` block body (empty block or last statement an expression).
266    /// Pops the synthetic call frame from [`crate::vm::VM::run_block_region`] and unwinds the
267    /// block-local scope (`scope_push_hook` per iteration, like [`crate::vm_helper::VMHelper::exec_block`]);
268    /// not subroutine `return` and not a closure capture.
269    BlockReturnValue,
270    /// At runtime statement position: capture current lexicals into [`crate::value::PerlSub::closure_env`]
271    /// for a sub already registered in [`Interpreter::subs`] (see `prepare_program_top_level`).
272    BindSubClosure(u16),
273
274    // ── Scope ──
275    PushFrame,
276    PopFrame,
277
278    // ── I/O ──
279    /// `print [HANDLE] LIST` — `None` uses [`crate::vm_helper::VMHelper::default_print_handle`].
280    Print(Option<u16>, u8),
281    Say(Option<u16>, u8),
282
283    // ── Built-in function calls ──
284    /// Calls a registered built-in: (builtin_id, arg_count)
285    CallBuiltin(u16, u8),
286    /// Save [`crate::vm_helper::VMHelper::wantarray_kind`] and set from `u8`
287    /// ([`crate::vm_helper::WantarrayCtx::as_byte`]). Used for `splice` / similar where the
288    /// dynamic context must match the expression's compile-time [`WantarrayCtx`] (e.g. `print splice…`).
289    WantarrayPush(u8),
290    /// Restore after [`Op::WantarrayPush`].
291    WantarrayPop,
292
293    // ── List / Range ──
294    MakeArray(u16), // pop N values, push as Array
295    /// `@$href{k1,k2}` — stack: `[container, key1, …, keyN]` (TOS = last key); pops `N+1` values; pushes array of slot values.
296    HashSliceDeref(u16),
297    /// `@$aref[i1,i2,...]` — stack: `[array_ref, spec1, …, specN]` (TOS = last spec); each spec is a
298    /// scalar index or array of indices (list-context `..` / `qw`/list). Pops `N+1`; pushes elements.
299    ArrowArraySlice(u16),
300    /// `@$href{k1,k2} = VALUE` — stack: `[value, container, key1, …, keyN]` (TOS = last key); pops `N+2` values.
301    SetHashSliceDeref(u16),
302    /// `%name{k1,k2} = VALUE` — stack: `[value, key1, …, keyN]` (TOS = last key); pops `N+1`. Pool: hash name, key count.
303    SetHashSlice(u16, u16),
304    /// `@h{k1,k2}` read — stack: `[key1, …, keyN]` (TOS = last key); pops `N` values; pushes array of slot values.
305    /// Each key value may be a scalar or array (from list-context range); arrays are flattened into individual keys.
306    /// Pool: hash name index, key-expression count.
307    GetHashSlice(u16, u16),
308    /// `@$href{k1,k2} OP= VALUE` — stack: `[rhs, container, key1, …, keyN]` (TOS = last key); pops `N+2`, pushes the new value.
309    /// `u8` = [`crate::compiler::scalar_compound_op_to_byte`] encoding of the binop.
310    /// Perl 5 applies the op only to the **last** key’s element.
311    HashSliceDerefCompound(u8, u16),
312    /// `++@$href{k1,k2}` / `--...` / `@$href{k1,k2}++` / `...--` — stack: `[container, key1, …, keyN]`;
313    /// pops `N+1`. Pre-forms push the new last-element value; post-forms push the **old** last value.
314    /// `u8` encodes kind: 0=PreInc, 1=PreDec, 2=PostInc, 3=PostDec. Only the last key is updated.
315    HashSliceDerefIncDec(u8, u16),
316    /// `@name{k1,k2} OP= rhs` — stack: `[rhs, key1, …, keyN]` (TOS = last key); pops `N+1`, pushes the new value.
317    /// Pool: compound-op byte ([`crate::compiler::scalar_compound_op_to_byte`]), stash hash name, key-slot count.
318    /// Only the **last** flattened key is updated (same as [`Op::HashSliceDerefCompound`]).
319    NamedHashSliceCompound(u8, u16, u16),
320    /// `++@name{k1,k2}` / `--…` / `@name{k1,k2}++` / `…--` — stack: `[key1, …, keyN]`; pops `N`.
321    /// `u8` kind matches [`Op::HashSliceDerefIncDec`]. Only the last key is updated.
322    NamedHashSliceIncDec(u8, u16, u16),
323    /// Multi-key `@h{k1,k2} //=` / `||=` / `&&=` — stack `[key1, …, keyN]` unchanged; pushes the **last**
324    /// flattened slot (Perl only tests that slot). Pool: hash name, key-slot count.
325    NamedHashSlicePeekLast(u16, u16),
326    /// Stack `[key1, …, keyN, cur]` — pop `N` key slots, keep `cur` (short-circuit path).
327    NamedHashSliceDropKeysKeepCur(u16),
328    /// Assign list RHS’s last element to the **last** flattened key; stack `[val, key1, …, keyN]` (TOS = last key). Pushes `val`.
329    SetNamedHashSliceLastKeep(u16, u16),
330    /// Multi-key `@$href{k1,k2} //=` — stack `[container, key1, …, keyN]`; pushes last slice element (see [`Op::ArrowArraySlicePeekLast`]).
331    HashSliceDerefPeekLast(u16),
332    /// `[container, key1, …, keyN, val]` → `[val, container, key1, …, keyN]` for [`Op::HashSliceDerefSetLastKeep`].
333    HashSliceDerefRollValUnderKeys(u16),
334    /// Assign to last flattened key only; stack `[val, container, key1, …, keyN]`. Pushes `val`.
335    HashSliceDerefSetLastKeep(u16),
336    /// Stack `[container, key1, …, keyN, cur]` — drop container and keys; keep `cur`.
337    HashSliceDerefDropKeysKeepCur(u16),
338    /// `@$aref[i1,i2,...] = LIST` — stack: `[value, aref, spec1, …, specN]` (TOS = last spec);
339    /// pops `N+2`. Delegates to [`crate::vm_helper::VMHelper::assign_arrow_array_slice`].
340    SetArrowArraySlice(u16),
341    /// `@$aref[i1,i2,...] OP= rhs` — stack: `[rhs, aref, spec1, …, specN]`; pops `N+2`, pushes new value.
342    /// `u8` = [`crate::compiler::scalar_compound_op_to_byte`] encoding of the binop.
343    /// Perl 5 applies the op only to the **last** index. Delegates to [`crate::vm_helper::VMHelper::compound_assign_arrow_array_slice`].
344    ArrowArraySliceCompound(u8, u16),
345    /// `++@$aref[i1,i2,...]` / `--...` / `...++` / `...--` — stack: `[aref, spec1, …, specN]`;
346    /// pops `N+1`. Pre-forms push the new last-element value; post-forms push the old last value.
347    /// `u8` kind matches [`Op::HashSliceDerefIncDec`]. Only the last index is updated. Delegates to
348    /// [`crate::vm_helper::VMHelper::arrow_array_slice_inc_dec`].
349    ArrowArraySliceIncDec(u8, u16),
350    /// Read the element at the **last** flattened index of `@$aref[spec1,…]` without popping `aref`
351    /// or specs. Stack: `[aref, spec1, …, specN]` (TOS = last spec) → same plus pushed scalar.
352    /// Used for `@$r[i,j] //=` / `||=` / `&&=` short-circuit tests (Perl only tests the last slot).
353    ArrowArraySlicePeekLast(u16),
354    /// Stack: `[aref, spec1, …, specN, cur]` — pop slice keys and container, keep `cur` (short-circuit
355    /// result). `u16` = number of spec slots (same as [`Op::ArrowArraySlice`]).
356    ArrowArraySliceDropKeysKeepCur(u16),
357    /// Reorder `[aref, spec1, …, specN, val]` → `[val, aref, spec1, …, specN]` for
358    /// [`Op::SetArrowArraySliceLastKeep`].
359    ArrowArraySliceRollValUnderSpecs(u16),
360    /// Assign `val` to the **last** flattened index only; stack `[val, aref, spec1, …, specN]`
361    /// (TOS = last spec). Pushes `val` (like [`Op::SetArrowArrayKeep`]).
362    SetArrowArraySliceLastKeep(u16),
363    /// Like [`Op::ArrowArraySliceIncDec`] but for a **named** stash array (`@a[i1,i2,...]`).
364    /// Stack: `[spec1, …, specN]` (TOS = last spec). `u16` = name pool index (stash-qualified).
365    /// Delegates to [`crate::vm_helper::VMHelper::named_array_slice_inc_dec`].
366    NamedArraySliceIncDec(u8, u16, u16),
367    /// `@name[spec1,…] OP= rhs` — stack `[rhs, spec1, …, specN]` (TOS = last spec); pops `N+1`.
368    /// Only the **last** flattened index is updated (same as [`Op::ArrowArraySliceCompound`]).
369    NamedArraySliceCompound(u8, u16, u16),
370    /// Read the **last** flattened slot of `@name[spec1,…]` without popping specs. Stack:
371    /// `[spec1, …, specN]` → same plus pushed scalar. `u16` pairs: name pool index, spec count.
372    NamedArraySlicePeekLast(u16, u16),
373    /// Stack: `[spec1, …, specN, cur]` — pop specs, keep `cur` (short-circuit). `u16` = spec count.
374    NamedArraySliceDropKeysKeepCur(u16),
375    /// `[spec1, …, specN, val]` → `[val, spec1, …, specN]` for [`Op::SetNamedArraySliceLastKeep`].
376    NamedArraySliceRollValUnderSpecs(u16),
377    /// Assign to the **last** index only; stack `[val, spec1, …, specN]`. Pushes `val`.
378    SetNamedArraySliceLastKeep(u16, u16),
379    /// `@name[spec1,…] = LIST` — stack `[value, spec1, …, specN]` (TOS = last spec); pops `N+1`.
380    /// Element-wise like [`Op::SetArrowArraySlice`]. Pool indices: stash-qualified array name, spec count.
381    SetNamedArraySlice(u16, u16),
382    /// `BAREWORD` as an rvalue — at run time, look up a subroutine with this name; if found,
383    /// call it with no args (nullary), otherwise push the name as a string (Perl's bareword-as-
384    /// stringifies behavior). `u16` is a name-pool index. Delegates to
385    /// [`crate::vm_helper::VMHelper::resolve_bareword_rvalue`].
386    BarewordRvalue(u16),
387    /// Throw `PerlError::runtime` with the message at constant pool index `u16`. Used by the compiler
388    /// to hard-reject constructs whose only valid response is a runtime error
389    /// (e.g. `++@$r`, `%{...}--`) without AST fallback.
390    RuntimeErrorConst(u16),
391    MakeHash(u16), // pop N key-value pairs, push as Hash
392    Range,         // stack: [from, to] → Array
393    RangeStep,     // stack: [from, to, step] → Array (stepped range)
394    /// Array slice via colon range — `@arr[FROM:TO:STEP]` / `@arr[::-1]`.
395    /// Stack: `[from, to, step]` — each may be `Undef` to mean "omitted" (uses array bounds).
396    /// `u16` is the array name pool index. Endpoints must coerce to integer cleanly; otherwise
397    /// runtime aborts (`die "slice: non-integer endpoint in array slice"`). Pushes the sliced array.
398    ArraySliceRange(u16),
399    /// Hash slice via colon range — `@h{FROM:TO:STEP}` (keys auto-quote like fat comma `=>`).
400    /// Stack: `[from, to, step]` — open ends die (no notion of "all keys" in unordered hash).
401    /// Endpoints stringify to hash keys; expansion uses numeric or magic-string-increment
402    /// depending on whether both ends parse as numbers. `u16` is the hash name pool index.
403    /// Pushes the array of slot values for the expanded keys.
404    HashSliceRange(u16),
405    /// Scalar `..` / `...` flip-flop (numeric bounds vs `$.` — [`Interpreter::scalar_flipflop_dot_line`]).
406    /// Stack: `[from, to]` (ints); pushes `1` or `0`. `u16` indexes flip-flop slots; `u8` is `1` for `...`
407    /// (exclusive: right bound only after `$.` is strictly past the line where the left bound matched).
408    ScalarFlipFlop(u16, u8),
409    /// Regex `..` / `...` flip-flop: both bounds are pattern literals; tests use `$_` and `$.` like Perl
410    /// (`Interpreter::regex_flip_flop_eval`). Operand order: `slot`, `exclusive`, left pattern, left flags,
411    /// right pattern, right flags (constant pool indices). No stack operands; pushes `0`/`1`.
412    RegexFlipFlop(u16, u8, u16, u16, u16, u16),
413    /// Regex `..` / `...` flip-flop with `eof` as the right operand (no arguments). Left bound matches `$_`;
414    /// right bound is [`Interpreter::eof_without_arg_is_true`] (Perl `eof` in `-n`/`-p`). Operand order:
415    /// `slot`, `exclusive`, left pattern, left flags.
416    RegexEofFlipFlop(u16, u8, u16, u16),
417    /// Regex `..` / `...` with a non-literal right operand (e.g. `m/a/ ... (m/b/ or m/c/)`). Left bound is
418    /// pattern + flags; right is evaluated in boolean context each line (pool index into
419    /// [`Chunk::regex_flip_flop_rhs_expr_entries`] / bytecode ranges). Operand order: `slot`, `exclusive`,
420    /// left pattern, left flags, rhs expr index.
421    RegexFlipFlopExprRhs(u16, u8, u16, u16, u16),
422    /// Regex `..` / `...` with a numeric right operand (Perl: right bound is [`Interpreter::scalar_flipflop_dot_line`]
423    /// vs literal line). Constant pool index holds the RHS line as [`StrykeValue::integer`]. Operand order:
424    /// `slot`, `exclusive`, left pattern, left flags, rhs line constant index.
425    RegexFlipFlopDotLineRhs(u16, u8, u16, u16, u16),
426
427    // ── Regex ──
428    /// Match: pattern_const_idx, flags_const_idx, scalar_g, pos_key_name_idx (`u16::MAX` = `$_`);
429    /// stack: string operand → result
430    RegexMatch(u16, u16, bool, u16),
431    /// Substitution `s///`: pattern, replacement, flags constant indices; lvalue index into chunk.
432    /// stack: string (subject from LHS expr) → replacement count
433    RegexSubst(u16, u16, u16, u16),
434    /// Transliterate `tr///`: from, to, flags constant indices; lvalue index into chunk.
435    /// stack: string → transliteration count
436    RegexTransliterate(u16, u16, u16, u16),
437    /// Dynamic `=~` / `!~`: pattern from RHS, subject from LHS; empty flags.
438    /// stack: `[subject, pattern]` (pattern on top) → 0/1; `true` = negate (`!~`).
439    RegexMatchDyn(bool),
440    /// Regex literal as a value (`qr/PAT/FLAGS`) — pattern and flags string pool indices.
441    LoadRegex(u16, u16),
442    /// After [`RegexMatchDyn`] for bare `m//` in `&&` / `||`: pop 0/1; push `""` or `1` (Perl scalar).
443    RegexBoolToScalar,
444    /// `pos $var = EXPR` / `pos = EXPR` (implicit `$_`). Stack: `[value, key]` (key string on top).
445    SetRegexPos,
446
447    // ── Assign helpers ──
448    /// SetScalar that also leaves the value on the stack (for chained assignment)
449    SetScalarKeep(u16),
450    /// `SetScalarKeep` for non-special scalars (see `SetScalarPlain`).
451    SetScalarKeepPlain(u16),
452
453    // ── Block-based operations (u16 = index into chunk.blocks) ──
454    /// map { BLOCK } @list — block_idx; stack: \[list\] → \[mapped\]
455    MapWithBlock(u16),
456    /// flat_map { BLOCK } @list — like [`Op::MapWithBlock`] but peels one ARRAY ref per iteration ([`StrykeValue::map_flatten_outputs`])
457    FlatMapWithBlock(u16),
458    /// grep { BLOCK } @list — block_idx; stack: \[list\] → \[filtered\]
459    GrepWithBlock(u16),
460    /// each { BLOCK } @list — block_idx; stack: \[list\] → \[count\]
461    ForEachWithBlock(u16),
462    /// map EXPR, LIST — index into [`Chunk::map_expr_entries`] / [`Chunk::map_expr_bytecode_ranges`];
463    /// stack: \[list\] → \[mapped\]
464    MapWithExpr(u16),
465    /// flat_map EXPR, LIST — same pools as [`Op::MapWithExpr`]; stack: \[list\] → \[mapped\]
466    FlatMapWithExpr(u16),
467    /// grep EXPR, LIST — index into [`Chunk::grep_expr_entries`] / [`Chunk::grep_expr_bytecode_ranges`];
468    /// stack: \[list\] → \[filtered\]
469    GrepWithExpr(u16),
470    /// `group_by { BLOCK } LIST` / `chunk_by { BLOCK } LIST` — consecutive runs where the block’s
471    /// return value stringifies the same as the previous (`str_eq`); stack: \[list\] → \[arrayrefs\]
472    ChunkByWithBlock(u16),
473    /// `group_by EXPR, LIST` / `chunk_by EXPR, LIST` — same as [`Op::ChunkByWithBlock`] but key from
474    /// `EXPR` with `$_` set each iteration; uses [`Chunk::map_expr_entries`].
475    ChunkByWithExpr(u16),
476    /// sort { BLOCK } @list — block_idx; stack: \[list\] → \[sorted\]
477    SortWithBlock(u16),
478    /// sort @list (no block) — stack: \[list\] → \[sorted\]
479    SortNoBlock,
480    /// sort $coderef LIST — stack: \[list, coderef\] (coderef on top); `u8` = wantarray for comparator calls.
481    SortWithCodeComparator(u8),
482    /// `{ $a <=> $b }` (0), `{ $a cmp $b }` (1), `{ $b <=> $a }` (2), `{ $b cmp $a }` (3)
483    SortWithBlockFast(u8),
484    /// `map { $_ * k }` with integer `k` — stack: \[list\] → \[mapped\]
485    MapIntMul(i64),
486    /// `grep { $_ % m == r }` with integer `m` (non-zero), `r` — stack: \[list\] → \[filtered\]
487    GrepIntModEq(i64, i64),
488    /// Parallel sort, same fast modes as [`Op::SortWithBlockFast`].
489    PSortWithBlockFast(u8),
490    /// `read(FH, $buf, LEN [, OFFSET])` — reads into a named variable.
491    /// Stack: [filehandle, length] (offset optional via `ReadIntoVarOffset`).
492    /// Writes result into `$name[u16]`, pushes bytes-read count (or undef on error).
493    ReadIntoVar(u16),
494    /// `chomp` on assignable expr: stack has value → chomped count; uses `chunk.lvalues[idx]`.
495    ChompInPlace(u16),
496    /// `chop` on assignable expr: stack has value → chopped char; uses `chunk.lvalues[idx]`.
497    ChopInPlace(u16),
498    /// Four-arg `substr LHS, OFF, LEN, REPL` — index into [`Chunk::substr_four_arg_entries`]; stack: \[\] → extracted slice string
499    SubstrFourArg(u16),
500    /// `keys EXPR` when `EXPR` is not a bare `%h` — [`Chunk::keys_expr_entries`] /
501    /// [`Chunk::keys_expr_bytecode_ranges`]
502    KeysExpr(u16),
503    /// `values EXPR` when not a bare `%h` — [`Chunk::values_expr_entries`] /
504    /// [`Chunk::values_expr_bytecode_ranges`]
505    ValuesExpr(u16),
506    /// Scalar `keys EXPR` (dynamic) — same pools as [`Op::KeysExpr`].
507    KeysExprScalar(u16),
508    /// Scalar `values EXPR` — same pools as [`Op::ValuesExpr`].
509    ValuesExprScalar(u16),
510    /// `delete EXPR` when not a fast `%h{...}` — index into [`Chunk::delete_expr_entries`]
511    DeleteExpr(u16),
512    /// `exists EXPR` when not a fast `%h{...}` — index into [`Chunk::exists_expr_entries`]
513    ExistsExpr(u16),
514    /// `push EXPR, ...` when not a bare `@name` — [`Chunk::push_expr_entries`]
515    PushExpr(u16),
516    /// `pop EXPR` when not a bare `@name` — [`Chunk::pop_expr_entries`]
517    PopExpr(u16),
518    /// `shift EXPR` when not a bare `@name` — [`Chunk::shift_expr_entries`]
519    ShiftExpr(u16),
520    /// `unshift EXPR, ...` when not a bare `@name` — [`Chunk::unshift_expr_entries`]
521    UnshiftExpr(u16),
522    /// `splice EXPR, ...` when not a bare `@name` — [`Chunk::splice_expr_entries`]
523    SpliceExpr(u16),
524    /// `$var .= expr` — append to scalar string in-place without cloning.
525    /// Stack: \[value_to_append\] → \[resulting_string\]. u16 = name pool index of target scalar.
526    ConcatAppend(u16),
527    /// Slot-indexed `$var .= expr` — avoids frame walking and string comparison.
528    /// Stack: \[value_to_append\] → \[resulting_string\]. u8 = slot index.
529    ConcatAppendSlot(u8),
530    /// Fused `$slot_a += $slot_b` — no stack traffic. Pushes result.
531    AddAssignSlotSlot(u8, u8),
532    /// Fused `$slot_a -= $slot_b` — no stack traffic. Pushes result.
533    SubAssignSlotSlot(u8, u8),
534    /// Fused `$slot_a *= $slot_b` — no stack traffic. Pushes result.
535    MulAssignSlotSlot(u8, u8),
536    /// Fused `if ($slot < INT) goto target` — replaces GetScalarSlot + LoadInt + NumLt + JumpIfFalse.
537    /// (slot, i32_limit, jump_target)
538    SlotLtIntJumpIfFalse(u8, i32, usize),
539    /// Void-context `$slot_a += $slot_b` — no stack push. Replaces AddAssignSlotSlot + Pop.
540    AddAssignSlotSlotVoid(u8, u8),
541    /// Void-context `++$slot` — no stack push. Replaces PreIncSlot + Pop.
542    PreIncSlotVoid(u8),
543    /// Void-context `$slot .= expr` — no stack push. Replaces ConcatAppendSlot + Pop.
544    ConcatAppendSlotVoid(u8),
545    /// Fused loop backedge: `$slot += 1; if $slot < limit jump body_target; else fall through`.
546    ///
547    /// Replaces the trailing `PreIncSlotVoid(s) + Jump(top)` of a C-style `for (my $i=0; $i<N; $i=$i+1)`
548    /// loop whose top op is a `SlotLtIntJumpIfFalse(s, limit, exit)`. The initial iteration still
549    /// goes through the top check; this op handles all subsequent iterations in a single dispatch,
550    /// halving the number of ops per loop trip for the `bench_loop`/`bench_string`/`bench_array` shape.
551    /// (slot, i32_limit, body_target)
552    SlotIncLtIntJumpBack(u8, i32, usize),
553    /// Fused accumulator loop: `while $i < limit { $sum += $i; $i += 1 }` — runs the entire
554    /// remaining counted-sum loop in native Rust, eliminating op dispatch per iteration.
555    ///
556    /// Fused when a `for (my $i = a; $i < N; $i = $i + 1) { $sum += $i }` body compiles down to
557    /// exactly `AddAssignSlotSlotVoid(sum, i) + SlotIncLtIntJumpBack(i, limit, body_target)` with
558    /// `body_target` pointing at the AddAssign — i.e. the body is 1 Perl statement. Both slots are
559    /// left as integers on exit (same coercion as `AddAssignSlotSlotVoid` + `PreIncSlotVoid`).
560    /// (sum_slot, i_slot, i32_limit)
561    AccumSumLoop(u8, u8, i32),
562    /// Fused string-append counted loop: `while $i < limit { $s .= CONST; $i += 1 }` — extends
563    /// the `String` buffer in place once and pushes the literal `(limit - i)` times in a tight
564    /// Rust loop, with `Arc::get_mut` → `reserve` → `push_str`. Falls back to the regular op
565    /// sequence if the slot is not a uniquely-owned heap `String`.
566    ///
567    /// Fused when the loop body is exactly `LoadConst(c) + ConcatAppendSlotVoid(s) +
568    /// SlotIncLtIntJumpBack(i, limit, body_target)` with `body_target` pointing at the `LoadConst`.
569    /// (const_idx, s_slot, i_slot, i32_limit)
570    ConcatConstSlotLoop(u16, u8, u8, i32),
571    /// Fused array-push counted loop: `while $i < limit { push @a, $i; $i += 1 }` — reserves the
572    /// target `Vec` once and pushes `StrykeValue::integer(i)` in a tight Rust loop. Emitted when
573    /// the loop body is exactly `GetScalarSlot(i) + PushArray(arr) + ArrayLen(arr) + Pop +
574    /// SlotIncLtIntJumpBack(i, limit, body_target)` with `body_target` pointing at the
575    /// `GetScalarSlot` (i.e. the body is one `push` statement whose return is discarded).
576    /// (arr_name_idx, i_slot, i32_limit)
577    PushIntRangeToArrayLoop(u16, u8, i32),
578    /// Fused hash-insert counted loop: `while $i < limit { $h{$i} = $i * k; $i += 1 }` — runs the
579    /// entire insert loop natively, reserving hash capacity once and writing `(stringified i, i*k)`
580    /// pairs in tight Rust. Emitted when the body is exactly
581    /// `GetScalarSlot(i) + LoadInt(k) + Mul + GetScalarSlot(i) + SetHashElem(h) + Pop +
582    /// SlotIncLtIntJumpBack(i, limit, body_target)` with `body_target` at the first `GetScalarSlot`.
583    /// (hash_name_idx, i_slot, i32_multiplier, i32_limit)
584    SetHashIntTimesLoop(u16, u8, i32, i32),
585    /// Fused `$sum += $h{$k}` body op for the inner loop of `for my $k (keys %h) { $sum += $h{$k} }`.
586    ///
587    /// Replaces the 6-op sequence `GetScalarSlot(sum) + GetScalarPlain(k) + GetHashElem(h) + Add +
588    /// SetScalarSlotKeep(sum) + Pop` with a single dispatch that reads the hash element directly
589    /// into the slot without going through the VM stack. (sum_slot, k_name_idx, h_name_idx)
590    AddHashElemPlainKeyToSlot(u8, u16, u16),
591    /// Like [`Op::AddHashElemPlainKeyToSlot`] but the key variable lives in a slot (`for my $k`
592    /// in slot-mode foreach). Pure slot read + hash lookup + slot write with zero VM stack traffic.
593    /// (sum_slot, k_slot, h_name_idx)
594    AddHashElemSlotKeyToSlot(u8, u8, u16),
595    /// Fused `for my $k (keys %h) { $sum += $h{$k} }` — walks `hash.values()` in a tight native
596    /// loop, accumulating integer or float sums directly into `sum_slot`. Emitted by the
597    /// bytecode-level peephole when the foreach shape + `AddHashElemSlotKeyToSlot` body + slot
598    /// counter/var declarations are detected. `h_name_idx` is the source hash's name pool index.
599    /// (sum_slot, h_name_idx)
600    SumHashValuesToSlot(u8, u16),
601
602    // ── Frame-local scalar slots (O(1) access, no string lookup) ──
603    /// Read scalar from current frame's slot array. u8 = slot index.
604    GetScalarSlot(u8),
605    /// Write scalar to current frame's slot array (pop, discard). u8 = slot index.
606    SetScalarSlot(u8),
607    /// Write scalar to current frame's slot array (pop, keep on stack). u8 = slot index.
608    SetScalarSlotKeep(u8),
609    /// Declare + initialize scalar in current frame's slot array. u8 = slot index; u16 = name pool
610    /// index (bare name) for closure capture.
611    DeclareScalarSlot(u8, u16),
612    /// Read argument from caller's stack region: push stack\[call_frame.stack_base + idx\].
613    /// Avoids @_ allocation + string-based shift for compiled sub argument passing.
614    GetArg(u8),
615    /// `reverse` in list context — stack: \[list\] → \[reversed list\]
616    ReverseListOp,
617    /// `scalar reverse` — stack: \[list\] → concatenated string with chars reversed (Perl).
618    ReverseScalarOp,
619    /// `rev` in list context — reverse list, preserve iterators lazily.
620    RevListOp,
621    /// `rev` in scalar context — char-reverse string.
622    RevScalarOp,
623    /// Pop TOS (array/list), push `to_list().len()` as integer (Perl `scalar` on map/grep result).
624    StackArrayLen,
625    /// Pop list-slice result array; push last element (Perl `scalar (LIST)[i,...]`).
626    ListSliceToScalar,
627    /// pmap { BLOCK } @list — block_idx; stack: \[progress_flag, list\] → \[mapped\] (`progress_flag` is 0/1)
628    PMapWithBlock(u16),
629    /// pflat_map { BLOCK } @list — flatten array results; output in **input order**; stack same as [`Op::PMapWithBlock`]
630    PFlatMapWithBlock(u16),
631    /// pmaps { BLOCK } LIST — streaming parallel map; stack: \[list\] → \[iterator\]
632    PMapsWithBlock(u16),
633    /// pflat_maps { BLOCK } LIST — streaming parallel flat map; stack: \[list\] → \[iterator\]
634    PFlatMapsWithBlock(u16),
635    /// `pmap_on` / `pflat_map_on` over SSH — stack: \[progress_flag, list, cluster\] → \[mapped\]; `flat` = 1 for flatten
636    PMapRemote {
637        block_idx: u16,
638        flat: u8,
639    },
640    /// puniq LIST — hash-partition parallel distinct (first occurrence order); stack: \[progress_flag, list\] → \[array\]
641    Puniq,
642    /// pfirst { BLOCK } LIST — short-circuit parallel; stack: \[progress_flag, list\] → value or undef
643    PFirstWithBlock(u16),
644    /// pany { BLOCK } LIST — short-circuit parallel; stack: \[progress_flag, list\] → 0/1
645    PAnyWithBlock(u16),
646    /// pmap_chunked N { BLOCK } @list — block_idx; stack: \[progress_flag, chunk_n, list\] → \[mapped\]
647    PMapChunkedWithBlock(u16),
648    /// pgrep { BLOCK } @list — block_idx; stack: \[progress_flag, list\] → \[filtered\]
649    PGrepWithBlock(u16),
650    /// pgreps { BLOCK } LIST — streaming parallel grep; stack: \[list\] → \[iterator\]
651    PGrepsWithBlock(u16),
652    /// pfor { BLOCK } @list — block_idx; stack: \[progress_flag, list\] → \[\]
653    PForWithBlock(u16),
654    /// psort { BLOCK } @list — block_idx; stack: \[progress_flag, list\] → \[sorted\]
655    PSortWithBlock(u16),
656    /// psort @list (no block) — stack: \[progress_flag, list\] → \[sorted\]
657    PSortNoBlockParallel,
658    /// `reduce { BLOCK } @list` — block_idx; stack: \[list\] → \[accumulator\]
659    ReduceWithBlock(u16),
660    /// `preduce { BLOCK } @list` — block_idx; stack: \[progress_flag, list\] → \[accumulator\]
661    PReduceWithBlock(u16),
662    /// `preduce_init EXPR, { BLOCK } @list` — block_idx; stack: \[progress_flag, list, init\] → \[accumulator\]
663    PReduceInitWithBlock(u16),
664    /// `pmap_reduce { MAP } { REDUCE } @list` — map and reduce block indices; stack: \[progress_flag, list\] → \[scalar\]
665    PMapReduceWithBlocks(u16, u16),
666    /// `pcache { BLOCK } @list` — block_idx; stack: \[progress_flag, list\] → \[array\]
667    PcacheWithBlock(u16),
668    /// `pselect($rx1, ... [, timeout => SECS])` — stack: \[rx0, …, rx_{n-1}\] with optional timeout on top
669    Pselect {
670        n_rx: u8,
671        has_timeout: bool,
672    },
673    /// `par_lines PATH, fn { } [, progress => EXPR]` — index into [`Chunk::par_lines_entries`]; stack: \[\] → `undef`
674    ParLines(u16),
675    /// `par_walk PATH, fn { } [, progress => EXPR]` — index into [`Chunk::par_walk_entries`]; stack: \[\] → `undef`
676    ParWalk(u16),
677    /// `pwatch GLOB, fn { }` — index into [`Chunk::pwatch_entries`]; stack: \[\] → result
678    Pwatch(u16),
679    /// fan N { BLOCK } — block_idx; stack: \[progress_flag, count\] (`progress_flag` is 0/1)
680    FanWithBlock(u16),
681    /// fan { BLOCK } — block_idx; stack: \[progress_flag\]; COUNT = rayon pool size (`stryke -j`)
682    FanWithBlockAuto(u16),
683    /// fan_cap N { BLOCK } — like fan; stack: \[progress_flag, count\] → array of block return values
684    FanCapWithBlock(u16),
685    /// fan_cap { BLOCK } — like fan; stack: \[progress_flag\] → array
686    FanCapWithBlockAuto(u16),
687    /// `do { BLOCK }` — block_idx + wantarray byte ([`crate::vm_helper::WantarrayCtx::as_byte`]);
688    /// stack: \[\] → result
689    EvalBlock(u16, u8),
690    /// `trace { BLOCK }` — block_idx; stack: \[\] → block value (stderr tracing for mysync mutations)
691    TraceBlock(u16),
692    /// `timer { BLOCK }` — block_idx; stack: \[\] → elapsed ms as float
693    TimerBlock(u16),
694    /// `bench { BLOCK } N` — block_idx; stack: \[iterations\] → benchmark summary string
695    BenchBlock(u16),
696    /// `given (EXPR) { when ... default ... }` — [`Chunk::given_entries`] /
697    /// [`Chunk::given_topic_bytecode_ranges`]; stack: \[\] → topic result
698    Given(u16),
699    /// `eval_timeout SECS { ... }` — index into [`Chunk::eval_timeout_entries`] /
700    /// [`Chunk::eval_timeout_expr_bytecode_ranges`]; stack: \[\] → block value
701    EvalTimeout(u16),
702    /// Algebraic `match (SUBJECT) { ... }` — [`Chunk::algebraic_match_entries`] /
703    /// [`Chunk::algebraic_match_subject_bytecode_ranges`]; stack: \[\] → arm value
704    AlgebraicMatch(u16),
705    /// `async { BLOCK }` / `spawn { BLOCK }` — block_idx; stack: \[\] → AsyncTask
706    AsyncBlock(u16),
707    /// `await EXPR` — stack: \[value\] → result
708    Await,
709    /// `__SUB__` — push reference to currently executing sub (for anonymous recursion).
710    LoadCurrentSub,
711    /// `defer { BLOCK }` — register a block to run when the current scope exits.
712    /// Stack: `[coderef]` → `[]`. The coderef is pushed to the frame's defer list.
713    DeferBlock,
714    /// Make a scalar reference from TOS (copies value into a new `RwLock`).
715    MakeScalarRef,
716    /// `\$name` when `name` is a plain scalar variable — ref aliases the live binding (same as tree `scalar_binding_ref`).
717    MakeScalarBindingRef(u16),
718    /// `\@name` — ref aliases the live array in scope (name pool index, stash-qualified like [`Op::GetArray`]).
719    MakeArrayBindingRef(u16),
720    /// `\%name` — ref aliases the live hash in scope.
721    MakeHashBindingRef(u16),
722    /// `\@{ EXPR }` after `EXPR` is on the stack — ARRAY ref aliasing the same storage as Perl (ref to existing ref or package array).
723    MakeArrayRefAlias,
724    /// `\%{ EXPR }` — HASH ref alias (same semantics as [`Op::MakeArrayRefAlias`] for hashes).
725    MakeHashRefAlias,
726    /// Make an array reference from TOS (which should be an Array)
727    MakeArrayRef,
728    /// Make a hash reference from TOS (which should be a Hash)
729    MakeHashRef,
730    /// Make an anonymous sub from a block — block_idx; stack: \[\] → CodeRef
731    /// Anonymous `sub` / coderef: block pool index + [`Chunk::code_ref_sigs`] index (may be empty vec).
732    MakeCodeRef(u16, u16),
733    /// Push a code reference to a named sub (`\&foo`) — name pool index; resolves at run time.
734    LoadNamedSubRef(u16),
735    /// `\&{ EXPR }` — stack: \[sub name string\] → code ref (resolves at run time).
736    LoadDynamicSubRef,
737    /// `*{ EXPR }` — stack: \[stash / glob name string\] → resolved handle string (IO alias map + identity).
738    LoadDynamicTypeglob,
739    /// `*lhs = *rhs` — copy stash slots (sub, scalar, array, hash, IO alias); name pool indices for both sides.
740    CopyTypeglobSlots(u16, u16),
741    /// `*name = $coderef` — stack: pop value, install subroutine in typeglob, push value back (assignment result).
742    TypeglobAssignFromValue(u16),
743    /// `*{LHS} = $coderef` — stack: pop value, pop LHS glob name string, install sub, push value back.
744    TypeglobAssignFromValueDynamic,
745    /// `*{LHS} = *rhs` — stack: pop LHS glob name string; RHS name is pool index; copies stash like [`Op::CopyTypeglobSlots`].
746    CopyTypeglobSlotsDynamicLhs(u16),
747    /// Symbolic deref (`$$r`, `@{...}`, `%{...}`, `*{...}`): stack: \[ref or name value\] → result.
748    /// Byte: `0` = [`crate::ast::Sigil::Scalar`], `1` = Array, `2` = Hash, `3` = Typeglob.
749    SymbolicDeref(u8),
750    /// Dereference arrow: ->\[\] — stack: \[ref, index\] → value
751    ArrowArray,
752    /// Dereference arrow: ->{} — stack: \[ref, key\] → value
753    ArrowHash,
754    /// Assign to `->{}`: stack: \[value, ref, key\] (key on top) — consumes three values.
755    SetArrowHash,
756    /// Assign to `->[]`: stack: \[value, ref, index\] (index on top) — consumes three values.
757    SetArrowArray,
758    /// Like [`Op::SetArrowArray`] but leaves the assigned value on the stack (for `++$aref->[$i]` value).
759    SetArrowArrayKeep,
760    /// Like [`Op::SetArrowHash`] but leaves the assigned value on the stack (for `++$href->{k}` value).
761    SetArrowHashKeep,
762    /// Postfix `++` / `--` on `->[]`: stack \[ref, index\] (index on top) → old value; mutates slot.
763    /// Byte: `0` = increment, `1` = decrement.
764    ArrowArrayPostfix(u8),
765    /// Postfix `++` / `--` on `->{}`: stack \[ref, key\] (key on top) → old value; mutates slot.
766    /// Byte: `0` = increment, `1` = decrement.
767    ArrowHashPostfix(u8),
768    /// `$$r = $val` — stack: \[value, ref\] (ref on top).
769    SetSymbolicScalarRef,
770    /// Like [`Op::SetSymbolicScalarRef`] but leaves the assigned value on the stack.
771    SetSymbolicScalarRefKeep,
772    /// `@{ EXPR } = LIST` — stack: \[list value, ref-or-name\] (top = ref / package name); delegates to
773    /// [`Interpreter::assign_symbolic_array_ref_deref`](crate::vm_helper::VMHelper::assign_symbolic_array_ref_deref).
774    SetSymbolicArrayRef,
775    /// `%{ EXPR } = LIST` — stack: \[list value, ref-or-name\]; pairs from list like `%h = (k => v, …)`.
776    SetSymbolicHashRef,
777    /// `*{ EXPR } = RHS` — stack: \[value, ref-or-name\] (top = symbolic glob name); coderef install or `*lhs = *rhs` copy.
778    SetSymbolicTypeglobRef,
779    /// Postfix `++` / `--` on symbolic scalar ref (`$$r`); stack \[ref\] → old value. Byte: `0` = increment, `1` = decrement.
780    SymbolicScalarRefPostfix(u8),
781    /// Dereference arrow: ->() — stack: \[ref, args_array\] → value
782    /// `$cr->(...)` — wantarray byte (see VM `WantarrayCtx` threading on `Call` / `MethodCall`).
783    ArrowCall(u8),
784    /// Indirect call `$coderef(ARG...)` / `&$coderef(ARG...)` — stack (bottom→top): `target`, then
785    /// `argc` argument values (first arg pushed first). Third byte: `1` = ignore stack args and use
786    /// caller `@_` (`argc` must be `0`).
787    IndirectCall(u8, u8, u8),
788    /// Method call: stack: \[object, args...\] → result; name_idx, argc, wantarray
789    MethodCall(u16, u8, u8),
790    /// Like [`Op::MethodCall`] but uses SUPER / C3 parent chain (see interpreter method resolution for `SUPER`).
791    MethodCallSuper(u16, u8, u8),
792    /// File test: -e, -f, -d, etc. — test char; stack: \[path\] → 0/1
793    FileTestOp(u8),
794
795    // ── try / catch / finally (VM exception handling; see [`VM::try_recover_from_exception`]) ──
796    /// Push a [`crate::vm::TryFrame`]; `catch_ip` / `after_ip` patched via [`Chunk::patch_try_push_catch`]
797    /// / [`Chunk::patch_try_push_after`]; `finally_ip` via [`Chunk::patch_try_push_finally`].
798    TryPush {
799        catch_ip: usize,
800        finally_ip: Option<usize>,
801        after_ip: usize,
802        catch_var_idx: u16,
803    },
804    /// Normal completion from try or catch body (jump to finally or merge).
805    TryContinueNormal,
806    /// End of `finally` block: pop try frame and jump to `after_ip`.
807    TryFinallyEnd,
808    /// Enter catch: consume [`crate::vm::VM::pending_catch_error`], pop try scope, push catch scope, bind `$var`.
809    CatchReceive(u16),
810
811    // ── `mysync` (thread-safe shared bindings; see [`StmtKind::MySync`]) ──
812    /// Stack: `[init]` → `[]`. Declares `${name}` as `StrykeValue::atomic` (or deque/heap unwrapped).
813    DeclareMySyncScalar(u16),
814    /// Stack: `[init_list]` → `[]`. Declares `@name` as atomic array.
815    DeclareMySyncArray(u16),
816    /// Stack: `[init_list]` → `[]`. Declares `%name` as atomic hash.
817    DeclareMySyncHash(u16),
818    // ── `oursync` (package-global thread-safe shared bindings; see [`StmtKind::OurSync`]) ──
819    /// Stack: `[init]` → `[]`. `name_idx` is the package-qualified key (`Pkg::name`).
820    /// Declares the binding in the **global frame** as `StrykeValue::atomic` (or deque/heap unwrapped),
821    /// so all packages and parallel workers share one cell.
822    DeclareOurSyncScalar(u16),
823    /// Stack: `[init_list]` → `[]`. `name_idx` is the package-qualified array key (`Pkg::name`).
824    /// Declares an atomic array in the global frame.
825    DeclareOurSyncArray(u16),
826    /// Stack: `[init_list]` → `[]`. `name_idx` is the package-qualified hash key (`Pkg::name`).
827    /// Declares an atomic hash in the global frame.
828    DeclareOurSyncHash(u16),
829    /// Register [`RuntimeSubDecl`] at index (nested `sub`, including inside `BEGIN`).
830    RuntimeSubDecl(u16),
831    /// Register [`RuntimeAdviceDecl`] at index — install AOP advice into VM `intercepts` registry.
832    RegisterAdvice(u16),
833    /// `tie $x | @arr | %h, 'Class', ...` — stack bottom = class expr, then user args; `argc` = `1 + args.len()`.
834    /// `target_kind`: 0 = scalar (`TIESCALAR`), 1 = array (`TIEARRAY`), 2 = hash (`TIEHASH`). `name_idx` = bare name.
835    Tie {
836        target_kind: u8,
837        name_idx: u16,
838        argc: u8,
839    },
840    /// `format NAME =` … — index into [`Chunk::format_decls`]; installs into current package at run time.
841    FormatDecl(u16),
842    /// `use overload 'op' => 'method', …` — index into [`Chunk::use_overload_entries`].
843    UseOverload(u16),
844    /// Scalar `$x OP= $rhs` — uses [`Scope::atomic_mutate`] so `mysync` scalars are RMW-safe.
845    /// Stack: `[rhs]` → `[result]`. `op` byte is from [`crate::compiler::scalar_compound_op_to_byte`].
846    ScalarCompoundAssign {
847        name_idx: u16,
848        op: u8,
849    },
850
851    // ── Special ──
852    /// Set `${^GLOBAL_PHASE}` on the interpreter. See [`GP_START`] … [`GP_END`].
853    SetGlobalPhase(u8),
854    Halt,
855    /// Delegate an AST expression to `Interpreter::eval_expr_ctx` at runtime.
856    /// Operand is an index into [`Chunk::ast_eval_exprs`].
857    EvalAstExpr(u16),
858
859    // ── Streaming map (appended — do not reorder earlier op tags) ─────────────
860    /// `maps { BLOCK } LIST` — stack: \[list\] → lazy iterator (pull-based; stryke extension).
861    MapsWithBlock(u16),
862    /// `flat_maps { BLOCK } LIST` — like [`Op::MapsWithBlock`] with `flat_map`-style flattening.
863    MapsFlatMapWithBlock(u16),
864    /// `maps EXPR, LIST` — index into [`Chunk::map_expr_entries`]; stack: \[list\] → iterator.
865    MapsWithExpr(u16),
866    /// `flat_maps EXPR, LIST` — same pools as [`Op::MapsWithExpr`].
867    MapsFlatMapWithExpr(u16),
868    /// `filter` / `fi` `{ BLOCK } LIST` — stack: \[list\] → lazy iterator (stryke; `grep` remains eager).
869    FilterWithBlock(u16),
870    /// `filter` / `fi` `EXPR, LIST` — index into [`Chunk::grep_expr_entries`]; stack: \[list\] → iterator.
871    FilterWithExpr(u16),
872}
873
874/// `${^GLOBAL_PHASE}` values emitted with [`Op::SetGlobalPhase`] (matches Perl’s phase strings).
875pub const GP_START: u8 = 0;
876/// Reserved; stock Perl 5 keeps `${^GLOBAL_PHASE}` as **`START`** during `UNITCHECK` blocks.
877pub const GP_UNITCHECK: u8 = 1;
878pub const GP_CHECK: u8 = 2;
879pub const GP_INIT: u8 = 3;
880pub const GP_RUN: u8 = 4;
881pub const GP_END: u8 = 5;
882
883/// Built-in function IDs for CallBuiltin dispatch.
884#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
885#[repr(u16)]
886pub enum BuiltinId {
887    // String
888    Length = 0,
889    Chomp,
890    Chop,
891    Substr,
892    Index,
893    Rindex,
894    Uc,
895    Lc,
896    Ucfirst,
897    Lcfirst,
898    Chr,
899    Ord,
900    Hex,
901    Oct,
902    Join,
903    Split,
904    Sprintf,
905
906    // Numeric
907    Abs,
908    Int,
909    Sqrt,
910
911    // Type
912    Defined,
913    Ref,
914    Scalar,
915
916    // Array
917    Splice,
918    Reverse,
919    Sort,
920    Unshift,
921
922    // Hash
923
924    // I/O
925    Open,
926    Close,
927    Eof,
928    ReadLine,
929    Printf,
930
931    // System
932    System,
933    Exec,
934    Exit,
935    Die,
936    Warn,
937    Chdir,
938    Mkdir,
939    Unlink,
940
941    // Control
942    Eval,
943    Do,
944    Require,
945
946    // OOP
947    Bless,
948    Caller,
949
950    // Parallel
951    PMap,
952    PGrep,
953    PFor,
954    PSort,
955    Fan,
956
957    // Map/Grep (block-based — need special handling)
958    MapBlock,
959    GrepBlock,
960    SortBlock,
961
962    // Math (appended — do not reorder earlier IDs)
963    Sin,
964    Cos,
965    Atan2,
966    Exp,
967    Log,
968    Rand,
969    Srand,
970
971    // String (appended)
972    Crypt,
973    Fc,
974    Pos,
975    Study,
976
977    Stat,
978    Lstat,
979    Link,
980    Symlink,
981    Readlink,
982    Glob,
983
984    Opendir,
985    Readdir,
986    Closedir,
987    Rewinddir,
988    Telldir,
989    Seekdir,
990    /// Read entire file as UTF-8 (`slurp $path`).
991    Slurp,
992    /// Blocking HTTP GET (`fetch_url $url`).
993    FetchUrl,
994    /// `pchannel()` — `(tx, rx)` as a two-element list.
995    Pchannel,
996    /// Parallel recursive glob (`glob_par`).
997    GlobPar,
998    /// `deque()` — empty deque.
999    DequeNew,
1000    /// `heap(fn { })` — empty heap with comparator.
1001    HeapNew,
1002    /// `pipeline(...)` — lazy iterator (filter/map/take/collect).
1003    Pipeline,
1004    /// `capture("cmd")` — structured stdout/stderr/exit (via `sh -c`).
1005    Capture,
1006    /// `ppool(N)` — persistent thread pool (`submit` / `collect`).
1007    Ppool,
1008    /// Scalar/list context query (`wantarray`).
1009    Wantarray,
1010    /// `rename OLD, NEW`
1011    Rename,
1012    /// `chmod MODE, ...`
1013    Chmod,
1014    /// `chown UID, GID, ...`
1015    Chown,
1016    /// `pselect($rx1, $rx2, ...)` — multiplexed recv; returns `(value, index)`.
1017    Pselect,
1018    /// `barrier(N)` — thread barrier (`->wait`).
1019    BarrierNew,
1020    /// `cluster(HOST_OR_LIST...)` — build a `RemoteCluster` value used as
1021    /// the dispatch target by `pmap_on` / `~d>`. Operand shapes match
1022    /// `RemoteCluster::from_list_args`: bare host (`"h1"`), host with
1023    /// slot count (`"h1:4"`), host with explicit `pe_path`
1024    /// (`"h1:3:/usr/local/bin/stryke"`), or a hash with tunables
1025    /// (`{ job_timeout_ms => N, max_attempts => M, ... }`).
1026    ClusterNew,
1027    /// `par_pipeline(...)` — list form: same as `pipeline` but parallel `filter`/`map` on `collect()`.
1028    ParPipeline,
1029    /// `glob_par(..., progress => EXPR)` — last stack arg is truthy progress flag.
1030    GlobParProgress,
1031    /// `par_pipeline_stream(...)` — streaming pipeline with bounded channels between stages.
1032    ParPipelineStream,
1033    /// `par_sed(PATTERN, REPLACEMENT, FILES...)` — parallel in-place regex substitution per file.
1034    ParSed,
1035    /// `par_sed(..., progress => EXPR)` — last stack arg is truthy progress flag.
1036    ParSedProgress,
1037    /// `each EXPR` — returns empty list.
1038    Each,
1039    /// `` `cmd` `` / `qx{...}` — stdout string via `sh -c` (Perl readpipe); sets `$?`.
1040    Readpipe,
1041    /// `readline` / `<HANDLE>` in **list** context — all remaining lines until EOF (Perl `readline` list semantics).
1042    ReadLineList,
1043    /// `readdir` in **list** context — all names not yet returned (Perl drains the rest of the stream).
1044    ReaddirList,
1045    /// `ssh HOST, CMD, …` / `ssh(HOST, …)` — `execvp` style `ssh` only (no shell).
1046    Ssh,
1047    /// `rmdir LIST` — remove empty directories; returns count removed (appended ID).
1048    Rmdir,
1049    /// `utime ATIME, MTIME, LIST` — set access/mod times (Unix).
1050    Utime,
1051    /// `umask EXPR` / `umask()` — process file mode creation mask (Unix).
1052    Umask,
1053    /// `getcwd` / `pwd` — bare-name builtin returning the absolute current working directory.
1054    Getcwd,
1055    /// `pipe READHANDLE, WRITEHANDLE` — OS pipe ends (Unix).
1056    Pipe,
1057    /// `files` / `files DIR` — list file names in a directory (default: `.`).
1058    Files,
1059    /// `filesf` / `filesf DIR` / `f` — list only regular file names in a directory (default: `.`).
1060    Filesf,
1061    /// `fr DIR` — list only regular file names recursively (default: `.`).
1062    FilesfRecursive,
1063    /// `dirs` / `dirs DIR` / `d` — list subdirectory names in a directory (default: `.`).
1064    Dirs,
1065    /// `dr DIR` — list subdirectory paths recursively (default: `.`).
1066    DirsRecursive,
1067    /// `sym_links` / `sym_links DIR` — list symlink names in a directory (default: `.`).
1068    SymLinks,
1069    /// `sockets` / `sockets DIR` — list Unix socket names in a directory (default: `.`).
1070    Sockets,
1071    /// `pipes` / `pipes DIR` — list named-pipe (FIFO) names in a directory (default: `.`).
1072    Pipes,
1073    /// `block_devices` / `block_devices DIR` — list block device names in a directory (default: `.`).
1074    BlockDevices,
1075    /// `char_devices` / `char_devices DIR` — list character device names in a directory (default: `.`).
1076    CharDevices,
1077    /// `exe` / `exe DIR` — list executable file names in a directory (default: `.`).
1078    Executables,
1079}
1080
1081impl BuiltinId {
1082    pub fn from_u16(v: u16) -> Option<Self> {
1083        if v <= Self::Executables as u16 {
1084            Some(unsafe { std::mem::transmute::<u16, BuiltinId>(v) })
1085        } else {
1086            None
1087        }
1088    }
1089}
1090
1091/// A compiled chunk of bytecode with its constant pools.
1092#[derive(Debug, Clone, Serialize, Deserialize)]
1093pub struct Chunk {
1094    pub ops: Vec<Op>,
1095    /// Constant pool: string literals, regex patterns, etc.
1096    #[serde(with = "crate::script_cache::constants_pool_codec")]
1097    pub constants: Vec<StrykeValue>,
1098    /// Name pool: variable names, sub names (interned/deduped).
1099    pub names: Vec<String>,
1100    /// Source line for each op (parallel array for error reporting).
1101    pub lines: Vec<usize>,
1102    /// Optional link from each op to the originating [`Expr`] (pool index into [`Self::ast_expr_pool`]).
1103    /// Filled for ops emitted from [`crate::compiler::Compiler::compile_expr_ctx`]; other paths leave `None`.
1104    pub op_ast_expr: Vec<Option<u32>>,
1105    /// Interned [`Expr`] nodes referenced by [`Self::op_ast_expr`] (for debugging / tooling).
1106    pub ast_expr_pool: Vec<Expr>,
1107    /// Compiled subroutine entry points: (name_index, op_index, uses_stack_args).
1108    /// When `uses_stack_args` is true, the Call op leaves arguments on the value
1109    /// stack and the sub reads them via `GetArg(idx)` instead of `shift @_`.
1110    pub sub_entries: Vec<(u16, usize, bool)>,
1111    /// AST blocks for map/grep/sort/parallel operations.
1112    /// Referenced by block-based opcodes via u16 index.
1113    pub blocks: Vec<Block>,
1114    /// When `Some((start, end))`, `blocks[i]` is also lowered to `ops[start..end]` (exclusive `end`)
1115    /// with trailing [`Op::BlockReturnValue`]. VM uses opcodes; otherwise the AST in `blocks[i]`.
1116    pub block_bytecode_ranges: Vec<Option<(usize, usize)>>,
1117    /// Resolved [`Op::CallStaticSubId`] targets: subroutine entry IP, stack-args calling convention,
1118    /// and stash name pool index (qualified key matching [`Interpreter::subs`]).
1119    pub static_sub_calls: Vec<(usize, bool, u16)>,
1120    /// Assign targets for `s///` / `tr///` bytecode (LHS expressions).
1121    pub lvalues: Vec<Expr>,
1122    /// AST expressions delegated to interpreter at runtime via [`Op::EvalAstExpr`].
1123    pub ast_eval_exprs: Vec<Expr>,
1124    /// Instruction pointer where the main program body starts (after BEGIN/CHECK/INIT phase blocks).
1125    /// Used by `-n`/`-p` line mode to re-execute only the body per input line.
1126    pub body_start_ip: usize,
1127    /// `struct Name { ... }` definitions in this chunk (registered on the interpreter at VM start).
1128    pub struct_defs: Vec<StructDef>,
1129    /// `enum Name { ... }` definitions in this chunk (registered on the interpreter at VM start).
1130    pub enum_defs: Vec<EnumDef>,
1131    /// `class Name extends ... impl ... { ... }` definitions.
1132    pub class_defs: Vec<ClassDef>,
1133    /// `trait Name { ... }` definitions.
1134    pub trait_defs: Vec<TraitDef>,
1135    /// `given (topic) { body }` — topic expression + body (when/default handled by interpreter).
1136    pub given_entries: Vec<(Expr, Block)>,
1137    /// When `Some((start, end))`, `given_entries[i].0` (topic) is lowered to `ops[start..end]` +
1138    /// [`Op::BlockReturnValue`].
1139    pub given_topic_bytecode_ranges: Vec<Option<(usize, usize)>>,
1140    /// `eval_timeout timeout_expr { body }` — evaluated at runtime.
1141    pub eval_timeout_entries: Vec<(Expr, Block)>,
1142    /// When `Some((start, end))`, `eval_timeout_entries[i].0` (timeout expr) is lowered to
1143    /// `ops[start..end]` with trailing [`Op::BlockReturnValue`].
1144    pub eval_timeout_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1145    /// Algebraic `match (subject) { arms }`.
1146    pub algebraic_match_entries: Vec<(Expr, Vec<MatchArm>)>,
1147    /// When `Some((start, end))`, `algebraic_match_entries[i].0` (subject) is lowered to
1148    /// `ops[start..end]` + [`Op::BlockReturnValue`].
1149    pub algebraic_match_subject_bytecode_ranges: Vec<Option<(usize, usize)>>,
1150    /// Nested / runtime `sub` declarations (see [`Op::RuntimeSubDecl`]).
1151    pub runtime_sub_decls: Vec<RuntimeSubDecl>,
1152    /// AOP advice declarations (see [`Op::RegisterAdvice`]).
1153    pub runtime_advice_decls: Vec<RuntimeAdviceDecl>,
1154    /// Stryke `fn ($a, …)` / hash-destruct params for [`Op::MakeCodeRef`] (second operand is pool index).
1155    pub code_ref_sigs: Vec<Vec<SubSigParam>>,
1156    /// `par_lines PATH, fn { } [, progress => EXPR]` — evaluated by interpreter inside VM.
1157    pub par_lines_entries: Vec<(Expr, Expr, Option<Expr>)>,
1158    /// `par_walk PATH, fn { } [, progress => EXPR]` — evaluated by interpreter inside VM.
1159    pub par_walk_entries: Vec<(Expr, Expr, Option<Expr>)>,
1160    /// `pwatch GLOB, fn { }` — evaluated by interpreter inside VM.
1161    pub pwatch_entries: Vec<(Expr, Expr)>,
1162    /// `substr $var, OFF, LEN, REPL` — four-arg form (mutates `LHS`); evaluated by interpreter inside VM.
1163    pub substr_four_arg_entries: Vec<(Expr, Expr, Option<Expr>, Expr)>,
1164    /// `keys EXPR` when `EXPR` is not bare `%h`.
1165    pub keys_expr_entries: Vec<Expr>,
1166    /// When `Some((start, end))`, `keys_expr_entries[i]` is lowered to `ops[start..end]` +
1167    /// [`Op::BlockReturnValue`] (operand only; [`Op::KeysExpr`] still applies `keys` to the value).
1168    pub keys_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1169    /// `values EXPR` when not bare `%h`.
1170    pub values_expr_entries: Vec<Expr>,
1171    pub values_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1172    /// `delete EXPR` when not the fast `%h{k}` lowering.
1173    pub delete_expr_entries: Vec<Expr>,
1174    /// `exists EXPR` when not the fast `%h{k}` lowering.
1175    pub exists_expr_entries: Vec<Expr>,
1176    /// `push` when the array operand is not a bare `@name` (e.g. `push $aref, ...`).
1177    pub push_expr_entries: Vec<(Expr, Vec<Expr>)>,
1178    pub pop_expr_entries: Vec<Expr>,
1179    pub shift_expr_entries: Vec<Expr>,
1180    pub unshift_expr_entries: Vec<(Expr, Vec<Expr>)>,
1181    pub splice_expr_entries: Vec<SpliceExprEntry>,
1182    /// `map EXPR, LIST` — map expression (list context) with `$_` set to each element.
1183    pub map_expr_entries: Vec<Expr>,
1184    /// When `Some((start, end))`, `map_expr_entries[i]` is lowered like [`Self::grep_expr_bytecode_ranges`].
1185    pub map_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1186    /// `grep EXPR, LIST` — filter expression evaluated with `$_` set to each element.
1187    pub grep_expr_entries: Vec<Expr>,
1188    /// When `Some((start, end))`, `grep_expr_entries[i]` is also lowered to `ops[start..end]`
1189    /// (exclusive `end`) with trailing [`Op::BlockReturnValue`], like [`Self::block_bytecode_ranges`].
1190    pub grep_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1191    /// Right-hand expression for [`Op::RegexFlipFlopExprRhs`] — boolean context (bare `m//` is `$_ =~ m//`).
1192    pub regex_flip_flop_rhs_expr_entries: Vec<Expr>,
1193    /// When `Some((start, end))`, `regex_flip_flop_rhs_expr_entries[i]` is lowered to `ops[start..end]` +
1194    /// [`Op::BlockReturnValue`].
1195    pub regex_flip_flop_rhs_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1196    /// Number of flip-flop slots ([`Op::ScalarFlipFlop`], [`Op::RegexFlipFlop`], [`Op::RegexEofFlipFlop`],
1197    /// [`Op::RegexFlipFlopExprRhs`], [`Op::RegexFlipFlopDotLineRhs`]); VM resets flip-flop vectors.
1198    pub flip_flop_slots: u16,
1199    /// `format NAME =` bodies: basename + lines between `=` and `.` (see lexer).
1200    pub format_decls: Vec<(String, Vec<String>)>,
1201    /// `use overload` pair lists (installed into current package at run time).
1202    pub use_overload_entries: Vec<Vec<(String, String)>>,
1203}
1204
1205impl Chunk {
1206    /// Look up a compiled subroutine entry by stash name pool index.
1207    pub fn find_sub_entry(&self, name_idx: u16) -> Option<(usize, bool)> {
1208        self.sub_entries
1209            .iter()
1210            .find(|(n, _, _)| *n == name_idx)
1211            .map(|(_, ip, stack_args)| (*ip, *stack_args))
1212    }
1213
1214    pub fn new() -> Self {
1215        Self {
1216            ops: Vec::with_capacity(256),
1217            constants: Vec::new(),
1218            names: Vec::new(),
1219            lines: Vec::new(),
1220            op_ast_expr: Vec::new(),
1221            ast_expr_pool: Vec::new(),
1222            sub_entries: Vec::new(),
1223            blocks: Vec::new(),
1224            block_bytecode_ranges: Vec::new(),
1225            static_sub_calls: Vec::new(),
1226            lvalues: Vec::new(),
1227            ast_eval_exprs: Vec::new(),
1228            body_start_ip: 0,
1229            struct_defs: Vec::new(),
1230            enum_defs: Vec::new(),
1231            class_defs: Vec::new(),
1232            trait_defs: Vec::new(),
1233            given_entries: Vec::new(),
1234            given_topic_bytecode_ranges: Vec::new(),
1235            eval_timeout_entries: Vec::new(),
1236            eval_timeout_expr_bytecode_ranges: Vec::new(),
1237            algebraic_match_entries: Vec::new(),
1238            algebraic_match_subject_bytecode_ranges: Vec::new(),
1239            runtime_sub_decls: Vec::new(),
1240            runtime_advice_decls: Vec::new(),
1241            code_ref_sigs: Vec::new(),
1242            par_lines_entries: Vec::new(),
1243            par_walk_entries: Vec::new(),
1244            pwatch_entries: Vec::new(),
1245            substr_four_arg_entries: Vec::new(),
1246            keys_expr_entries: Vec::new(),
1247            keys_expr_bytecode_ranges: Vec::new(),
1248            values_expr_entries: Vec::new(),
1249            values_expr_bytecode_ranges: Vec::new(),
1250            delete_expr_entries: Vec::new(),
1251            exists_expr_entries: Vec::new(),
1252            push_expr_entries: Vec::new(),
1253            pop_expr_entries: Vec::new(),
1254            shift_expr_entries: Vec::new(),
1255            unshift_expr_entries: Vec::new(),
1256            splice_expr_entries: Vec::new(),
1257            map_expr_entries: Vec::new(),
1258            map_expr_bytecode_ranges: Vec::new(),
1259            grep_expr_entries: Vec::new(),
1260            grep_expr_bytecode_ranges: Vec::new(),
1261            regex_flip_flop_rhs_expr_entries: Vec::new(),
1262            regex_flip_flop_rhs_expr_bytecode_ranges: Vec::new(),
1263            flip_flop_slots: 0,
1264            format_decls: Vec::new(),
1265            use_overload_entries: Vec::new(),
1266        }
1267    }
1268
1269    /// Pool index for [`Op::FormatDecl`].
1270    pub fn add_format_decl(&mut self, name: String, lines: Vec<String>) -> u16 {
1271        let idx = self.format_decls.len() as u16;
1272        self.format_decls.push((name, lines));
1273        idx
1274    }
1275
1276    /// Pool index for [`Op::UseOverload`].
1277    pub fn add_use_overload(&mut self, pairs: Vec<(String, String)>) -> u16 {
1278        let idx = self.use_overload_entries.len() as u16;
1279        self.use_overload_entries.push(pairs);
1280        idx
1281    }
1282
1283    /// Allocate a slot index for [`Op::ScalarFlipFlop`] / [`Op::RegexFlipFlop`] / [`Op::RegexEofFlipFlop`] /
1284    /// [`Op::RegexFlipFlopExprRhs`] / [`Op::RegexFlipFlopDotLineRhs`] flip-flop state.
1285    pub fn alloc_flip_flop_slot(&mut self) -> u16 {
1286        let id = self.flip_flop_slots;
1287        self.flip_flop_slots = self.flip_flop_slots.saturating_add(1);
1288        id
1289    }
1290
1291    /// `map EXPR, LIST` — pool index for [`Op::MapWithExpr`].
1292    pub fn add_map_expr_entry(&mut self, expr: Expr) -> u16 {
1293        let idx = self.map_expr_entries.len() as u16;
1294        self.map_expr_entries.push(expr);
1295        idx
1296    }
1297
1298    /// `grep EXPR, LIST` — pool index for [`Op::GrepWithExpr`].
1299    pub fn add_grep_expr_entry(&mut self, expr: Expr) -> u16 {
1300        let idx = self.grep_expr_entries.len() as u16;
1301        self.grep_expr_entries.push(expr);
1302        idx
1303    }
1304
1305    /// Regex flip-flop with compound RHS — pool index for [`Op::RegexFlipFlopExprRhs`].
1306    pub fn add_regex_flip_flop_rhs_expr_entry(&mut self, expr: Expr) -> u16 {
1307        let idx = self.regex_flip_flop_rhs_expr_entries.len() as u16;
1308        self.regex_flip_flop_rhs_expr_entries.push(expr);
1309        idx
1310    }
1311
1312    /// `keys EXPR` (dynamic) — pool index for [`Op::KeysExpr`].
1313    pub fn add_keys_expr_entry(&mut self, expr: Expr) -> u16 {
1314        let idx = self.keys_expr_entries.len() as u16;
1315        self.keys_expr_entries.push(expr);
1316        idx
1317    }
1318
1319    /// `values EXPR` (dynamic) — pool index for [`Op::ValuesExpr`].
1320    pub fn add_values_expr_entry(&mut self, expr: Expr) -> u16 {
1321        let idx = self.values_expr_entries.len() as u16;
1322        self.values_expr_entries.push(expr);
1323        idx
1324    }
1325
1326    /// `delete EXPR` (dynamic operand) — pool index for [`Op::DeleteExpr`].
1327    pub fn add_delete_expr_entry(&mut self, expr: Expr) -> u16 {
1328        let idx = self.delete_expr_entries.len() as u16;
1329        self.delete_expr_entries.push(expr);
1330        idx
1331    }
1332
1333    /// `exists EXPR` (dynamic operand) — pool index for [`Op::ExistsExpr`].
1334    pub fn add_exists_expr_entry(&mut self, expr: Expr) -> u16 {
1335        let idx = self.exists_expr_entries.len() as u16;
1336        self.exists_expr_entries.push(expr);
1337        idx
1338    }
1339
1340    pub fn add_push_expr_entry(&mut self, array: Expr, values: Vec<Expr>) -> u16 {
1341        let idx = self.push_expr_entries.len() as u16;
1342        self.push_expr_entries.push((array, values));
1343        idx
1344    }
1345
1346    pub fn add_pop_expr_entry(&mut self, array: Expr) -> u16 {
1347        let idx = self.pop_expr_entries.len() as u16;
1348        self.pop_expr_entries.push(array);
1349        idx
1350    }
1351
1352    pub fn add_shift_expr_entry(&mut self, array: Expr) -> u16 {
1353        let idx = self.shift_expr_entries.len() as u16;
1354        self.shift_expr_entries.push(array);
1355        idx
1356    }
1357
1358    pub fn add_unshift_expr_entry(&mut self, array: Expr, values: Vec<Expr>) -> u16 {
1359        let idx = self.unshift_expr_entries.len() as u16;
1360        self.unshift_expr_entries.push((array, values));
1361        idx
1362    }
1363
1364    pub fn add_splice_expr_entry(
1365        &mut self,
1366        array: Expr,
1367        offset: Option<Expr>,
1368        length: Option<Expr>,
1369        replacement: Vec<Expr>,
1370    ) -> u16 {
1371        let idx = self.splice_expr_entries.len() as u16;
1372        self.splice_expr_entries
1373            .push((array, offset, length, replacement));
1374        idx
1375    }
1376
1377    /// Four-arg `substr` — returns pool index for [`Op::SubstrFourArg`].
1378    pub fn add_substr_four_arg_entry(
1379        &mut self,
1380        string: Expr,
1381        offset: Expr,
1382        length: Option<Expr>,
1383        replacement: Expr,
1384    ) -> u16 {
1385        let idx = self.substr_four_arg_entries.len() as u16;
1386        self.substr_four_arg_entries
1387            .push((string, offset, length, replacement));
1388        idx
1389    }
1390
1391    /// `par_lines PATH, fn { } [, progress => EXPR]` — returns pool index for [`Op::ParLines`].
1392    pub fn add_par_lines_entry(
1393        &mut self,
1394        path: Expr,
1395        callback: Expr,
1396        progress: Option<Expr>,
1397    ) -> u16 {
1398        let idx = self.par_lines_entries.len() as u16;
1399        self.par_lines_entries.push((path, callback, progress));
1400        idx
1401    }
1402
1403    /// `par_walk PATH, fn { } [, progress => EXPR]` — returns pool index for [`Op::ParWalk`].
1404    pub fn add_par_walk_entry(
1405        &mut self,
1406        path: Expr,
1407        callback: Expr,
1408        progress: Option<Expr>,
1409    ) -> u16 {
1410        let idx = self.par_walk_entries.len() as u16;
1411        self.par_walk_entries.push((path, callback, progress));
1412        idx
1413    }
1414
1415    /// `pwatch GLOB, fn { }` — returns pool index for [`Op::Pwatch`].
1416    pub fn add_pwatch_entry(&mut self, path: Expr, callback: Expr) -> u16 {
1417        let idx = self.pwatch_entries.len() as u16;
1418        self.pwatch_entries.push((path, callback));
1419        idx
1420    }
1421
1422    /// `given (EXPR) { ... }` — returns pool index for [`Op::Given`].
1423    pub fn add_given_entry(&mut self, topic: Expr, body: Block) -> u16 {
1424        let idx = self.given_entries.len() as u16;
1425        self.given_entries.push((topic, body));
1426        idx
1427    }
1428
1429    /// `eval_timeout SECS { ... }` — returns pool index for [`Op::EvalTimeout`].
1430    pub fn add_eval_timeout_entry(&mut self, timeout: Expr, body: Block) -> u16 {
1431        let idx = self.eval_timeout_entries.len() as u16;
1432        self.eval_timeout_entries.push((timeout, body));
1433        idx
1434    }
1435
1436    /// Algebraic `match` — returns pool index for [`Op::AlgebraicMatch`].
1437    pub fn add_algebraic_match_entry(&mut self, subject: Expr, arms: Vec<MatchArm>) -> u16 {
1438        let idx = self.algebraic_match_entries.len() as u16;
1439        self.algebraic_match_entries.push((subject, arms));
1440        idx
1441    }
1442
1443    /// Store an AST block and return its index.
1444    pub fn add_block(&mut self, block: Block) -> u16 {
1445        let idx = self.blocks.len() as u16;
1446        self.blocks.push(block);
1447        idx
1448    }
1449
1450    /// Pool index for [`Op::MakeCodeRef`] signature (`stryke` extension); use empty vec for legacy `fn { }`.
1451    pub fn add_code_ref_sig(&mut self, params: Vec<SubSigParam>) -> u16 {
1452        let idx = self.code_ref_sigs.len();
1453        if idx > u16::MAX as usize {
1454            panic!("too many anonymous sub signatures in one chunk");
1455        }
1456        self.code_ref_sigs.push(params);
1457        idx as u16
1458    }
1459
1460    /// Store an assignable expression (LHS of `s///` / `tr///`) and return its index.
1461    pub fn add_lvalue_expr(&mut self, e: Expr) -> u16 {
1462        let idx = self.lvalues.len() as u16;
1463        self.lvalues.push(e);
1464        idx
1465    }
1466
1467    /// Intern a name, returning its pool index.
1468    pub fn intern_name(&mut self, name: &str) -> u16 {
1469        if let Some(idx) = self.names.iter().position(|n| n == name) {
1470            return idx as u16;
1471        }
1472        let idx = self.names.len() as u16;
1473        self.names.push(name.to_string());
1474        idx
1475    }
1476
1477    /// Add a constant to the pool, returning its index.
1478    pub fn add_constant(&mut self, val: StrykeValue) -> u16 {
1479        // Dedup string constants
1480        if let Some(ref s) = val.as_str() {
1481            for (i, c) in self.constants.iter().enumerate() {
1482                if let Some(cs) = c.as_str() {
1483                    if cs == *s {
1484                        return i as u16;
1485                    }
1486                }
1487            }
1488        }
1489        let idx = self.constants.len() as u16;
1490        self.constants.push(val);
1491        idx
1492    }
1493
1494    /// Append an op with source line info.
1495    #[inline]
1496    pub fn emit(&mut self, op: Op, line: usize) -> usize {
1497        self.emit_with_ast_idx(op, line, None)
1498    }
1499
1500    /// Like [`Self::emit`] but attach an optional interned AST [`Expr`] pool index (see [`Self::op_ast_expr`]).
1501    #[inline]
1502    pub fn emit_with_ast_idx(&mut self, op: Op, line: usize, ast: Option<u32>) -> usize {
1503        let idx = self.ops.len();
1504        self.ops.push(op);
1505        self.lines.push(line);
1506        self.op_ast_expr.push(ast);
1507        idx
1508    }
1509
1510    /// Resolve the originating expression for an instruction pointer, if recorded.
1511    #[inline]
1512    pub fn ast_expr_at(&self, ip: usize) -> Option<&Expr> {
1513        let id = (*self.op_ast_expr.get(ip)?)?;
1514        self.ast_expr_pool.get(id as usize)
1515    }
1516
1517    /// Patch a jump instruction at `idx` to target the current position.
1518    pub fn patch_jump_here(&mut self, idx: usize) {
1519        let target = self.ops.len();
1520        self.patch_jump_to(idx, target);
1521    }
1522
1523    /// Patch a jump instruction at `idx` to target an explicit op address.
1524    pub fn patch_jump_to(&mut self, idx: usize, target: usize) {
1525        match &mut self.ops[idx] {
1526            Op::Jump(ref mut t)
1527            | Op::JumpIfTrue(ref mut t)
1528            | Op::JumpIfFalse(ref mut t)
1529            | Op::JumpIfFalseKeep(ref mut t)
1530            | Op::JumpIfTrueKeep(ref mut t)
1531            | Op::JumpIfDefinedKeep(ref mut t) => *t = target,
1532            _ => panic!("patch_jump_to on non-jump op at {}", idx),
1533        }
1534    }
1535
1536    pub fn patch_try_push_catch(&mut self, idx: usize, catch_ip: usize) {
1537        match &mut self.ops[idx] {
1538            Op::TryPush { catch_ip: c, .. } => *c = catch_ip,
1539            _ => panic!("patch_try_push_catch on non-TryPush op at {}", idx),
1540        }
1541    }
1542
1543    pub fn patch_try_push_finally(&mut self, idx: usize, finally_ip: Option<usize>) {
1544        match &mut self.ops[idx] {
1545            Op::TryPush { finally_ip: f, .. } => *f = finally_ip,
1546            _ => panic!("patch_try_push_finally on non-TryPush op at {}", idx),
1547        }
1548    }
1549
1550    pub fn patch_try_push_after(&mut self, idx: usize, after_ip: usize) {
1551        match &mut self.ops[idx] {
1552            Op::TryPush { after_ip: a, .. } => *a = after_ip,
1553            _ => panic!("patch_try_push_after on non-TryPush op at {}", idx),
1554        }
1555    }
1556
1557    /// Current op count (next emit position).
1558    #[inline]
1559    pub fn len(&self) -> usize {
1560        self.ops.len()
1561    }
1562
1563    #[inline]
1564    pub fn is_empty(&self) -> bool {
1565        self.ops.is_empty()
1566    }
1567
1568    /// Human-readable listing: subroutine entry points and each op with its source line (javap / `dis`-style).
1569    pub fn disassemble(&self) -> String {
1570        use std::fmt::Write;
1571        let mut out = String::new();
1572        for (i, n) in self.names.iter().enumerate() {
1573            let _ = writeln!(out, "; name[{}] = {}", i, n);
1574        }
1575        let _ = writeln!(out, "; sub_entries:");
1576        for (ni, ip, stack_args) in &self.sub_entries {
1577            let name = self
1578                .names
1579                .get(*ni as usize)
1580                .map(|s| s.as_str())
1581                .unwrap_or("?");
1582            let _ = writeln!(out, ";   {} @ {} stack_args={}", name, ip, stack_args);
1583        }
1584        for (i, op) in self.ops.iter().enumerate() {
1585            let line = self.lines.get(i).copied().unwrap_or(0);
1586            let ast = self
1587                .op_ast_expr
1588                .get(i)
1589                .copied()
1590                .flatten()
1591                .map(|id| id.to_string())
1592                .unwrap_or_else(|| "-".into());
1593            let _ = writeln!(out, "{:04} {:>5} {:>6}  {:?}", i, line, ast, op);
1594        }
1595        out
1596    }
1597
1598    /// Peephole pass: fuse common multi-op sequences into single superinstructions,
1599    /// then compact by removing Nop slots and remapping all jump targets.
1600    pub fn peephole_fuse(&mut self) {
1601        let len = self.ops.len();
1602        if len < 2 {
1603            return;
1604        }
1605        // Pass 1: fuse OP + Pop → OPVoid
1606        let mut i = 0;
1607        while i + 1 < len {
1608            if matches!(self.ops[i + 1], Op::Pop) {
1609                let replacement = match &self.ops[i] {
1610                    Op::AddAssignSlotSlot(d, s) => Some(Op::AddAssignSlotSlotVoid(*d, *s)),
1611                    Op::PreIncSlot(s) => Some(Op::PreIncSlotVoid(*s)),
1612                    Op::ConcatAppendSlot(s) => Some(Op::ConcatAppendSlotVoid(*s)),
1613                    _ => None,
1614                };
1615                if let Some(op) = replacement {
1616                    self.ops[i] = op;
1617                    self.ops[i + 1] = Op::Nop;
1618                    i += 2;
1619                    continue;
1620                }
1621            }
1622            i += 1;
1623        }
1624        // Pass 2: fuse multi-op patterns
1625        // Helper: check if any jump targets position `pos`.
1626        let has_jump_to = |ops: &[Op], pos: usize| -> bool {
1627            for op in ops {
1628                let t = match op {
1629                    Op::Jump(t)
1630                    | Op::JumpIfFalse(t)
1631                    | Op::JumpIfTrue(t)
1632                    | Op::JumpIfFalseKeep(t)
1633                    | Op::JumpIfTrueKeep(t)
1634                    | Op::JumpIfDefinedKeep(t) => Some(*t),
1635                    _ => None,
1636                };
1637                if t == Some(pos) {
1638                    return true;
1639                }
1640            }
1641            false
1642        };
1643        let len = self.ops.len();
1644        if len >= 4 {
1645            i = 0;
1646            while i + 3 < len {
1647                if let (
1648                    Op::GetScalarSlot(slot),
1649                    Op::LoadInt(n),
1650                    Op::NumLt,
1651                    Op::JumpIfFalse(target),
1652                ) = (
1653                    &self.ops[i],
1654                    &self.ops[i + 1],
1655                    &self.ops[i + 2],
1656                    &self.ops[i + 3],
1657                ) {
1658                    if let Ok(n32) = i32::try_from(*n) {
1659                        // Don't fuse if any jump targets the ops that will become Nop.
1660                        // This prevents breaking short-circuit &&/|| that jump to the
1661                        // JumpIfFalse for the while condition exit check.
1662                        if has_jump_to(&self.ops, i + 1)
1663                            || has_jump_to(&self.ops, i + 2)
1664                            || has_jump_to(&self.ops, i + 3)
1665                        {
1666                            i += 1;
1667                            continue;
1668                        }
1669                        let slot = *slot;
1670                        let target = *target;
1671                        self.ops[i] = Op::SlotLtIntJumpIfFalse(slot, n32, target);
1672                        self.ops[i + 1] = Op::Nop;
1673                        self.ops[i + 2] = Op::Nop;
1674                        self.ops[i + 3] = Op::Nop;
1675                        i += 4;
1676                        continue;
1677                    }
1678                }
1679                i += 1;
1680            }
1681        }
1682        // Compact once so that pass 3 sees a Nop-free op stream and can match
1683        // adjacent `PreIncSlotVoid + Jump` backedges produced by passes 1/2.
1684        self.compact_nops();
1685        // Pass 3: fuse loop backedge
1686        //   PreIncSlotVoid(s)  + Jump(top)
1687        // where ops[top] is SlotLtIntJumpIfFalse(s, limit, exit)
1688        // becomes
1689        //   SlotIncLtIntJumpBack(s, limit, top + 1)   // body falls through
1690        //   Nop                                       // was Jump
1691        // The first-iteration check at `top` is still reached from before the loop
1692        // (the loop's initial entry goes through the top test), so leaving
1693        // SlotLtIntJumpIfFalse in place keeps the entry path correct. All
1694        // subsequent iterations now skip both the inc op and the jump.
1695        let len = self.ops.len();
1696        if len >= 2 {
1697            let mut i = 0;
1698            while i + 1 < len {
1699                if let (Op::PreIncSlotVoid(s), Op::Jump(top)) = (&self.ops[i], &self.ops[i + 1]) {
1700                    let slot = *s;
1701                    let top = *top;
1702                    // Only fuse backward branches — the C-style `for` shape where `top` is
1703                    // the loop's `SlotLtIntJumpIfFalse` test and the body falls through to
1704                    // this trailing increment. A forward `Jump` that happens to land on a
1705                    // similar test is not the same shape and must not be rewritten.
1706                    if top < i {
1707                        if let Op::SlotLtIntJumpIfFalse(tslot, limit, exit) = &self.ops[top] {
1708                            // Safety: the top test's exit target must equal the fused op's
1709                            // fall-through (i + 2). Otherwise exiting the loop via
1710                            // "condition false" would land somewhere the unfused shape never
1711                            // exited to.
1712                            if *tslot == slot && *exit == i + 2 {
1713                                let limit = *limit;
1714                                let body_target = top + 1;
1715                                self.ops[i] = Op::SlotIncLtIntJumpBack(slot, limit, body_target);
1716                                self.ops[i + 1] = Op::Nop;
1717                                i += 2;
1718                                continue;
1719                            }
1720                        }
1721                    }
1722                }
1723                i += 1;
1724            }
1725        }
1726        // Pass 4: compact again — remove the Nops introduced by pass 3.
1727        self.compact_nops();
1728        // Pass 5: fuse counted-loop bodies down to a single native superinstruction.
1729        //
1730        // After pass 3 + compact, a `for (my $i = ..; $i < N; $i = $i + 1) { $sum += $i }`
1731        // loop looks like:
1732        //
1733        //     [top]        SlotLtIntJumpIfFalse(i, N, exit)
1734        //     [body_start] AddAssignSlotSlotVoid(sum, i)       ← target of the backedge
1735        //                  SlotIncLtIntJumpBack(i, N, body_start)
1736        //     [exit]       ...
1737        //
1738        // When the body is exactly one op, we fuse the AddAssign + backedge into
1739        // `AccumSumLoop(sum, i, N)`, whose handler runs the whole remaining loop in a
1740        // tight Rust `while`. Same scheme for the counted `$s .= CONST` pattern, fused
1741        // into `ConcatConstSlotLoop`.
1742        //
1743        // Safety gate: only fire when no op jumps *into* the body (other than the backedge
1744        // itself and the top test's fall-through, which isn't a jump). That keeps loops with
1745        // interior labels / `last LABEL` / `next LABEL` from being silently skipped.
1746        let len = self.ops.len();
1747        if len >= 2 {
1748            let has_inbound_jump = |ops: &[Op], pos: usize, ignore: usize| -> bool {
1749                for (j, op) in ops.iter().enumerate() {
1750                    if j == ignore {
1751                        continue;
1752                    }
1753                    let t = match op {
1754                        Op::Jump(t)
1755                        | Op::JumpIfFalse(t)
1756                        | Op::JumpIfTrue(t)
1757                        | Op::JumpIfFalseKeep(t)
1758                        | Op::JumpIfTrueKeep(t)
1759                        | Op::JumpIfDefinedKeep(t) => Some(*t),
1760                        Op::SlotLtIntJumpIfFalse(_, _, t) => Some(*t),
1761                        Op::SlotIncLtIntJumpBack(_, _, t) => Some(*t),
1762                        _ => None,
1763                    };
1764                    if t == Some(pos) {
1765                        return true;
1766                    }
1767                }
1768                false
1769            };
1770            // 5a: AddAssignSlotSlotVoid + SlotIncLtIntJumpBack → AccumSumLoop
1771            let mut i = 0;
1772            while i + 1 < len {
1773                if let (
1774                    Op::AddAssignSlotSlotVoid(sum_slot, src_slot),
1775                    Op::SlotIncLtIntJumpBack(inc_slot, limit, body_target),
1776                ) = (&self.ops[i], &self.ops[i + 1])
1777                {
1778                    if *src_slot == *inc_slot
1779                        && *body_target == i
1780                        && !has_inbound_jump(&self.ops, i, i + 1)
1781                        && !has_inbound_jump(&self.ops, i + 1, i + 1)
1782                    {
1783                        let sum_slot = *sum_slot;
1784                        let src_slot = *src_slot;
1785                        let limit = *limit;
1786                        self.ops[i] = Op::AccumSumLoop(sum_slot, src_slot, limit);
1787                        self.ops[i + 1] = Op::Nop;
1788                        i += 2;
1789                        continue;
1790                    }
1791                }
1792                i += 1;
1793            }
1794            // 5b: LoadConst + ConcatAppendSlotVoid + SlotIncLtIntJumpBack → ConcatConstSlotLoop
1795            if len >= 3 {
1796                let mut i = 0;
1797                while i + 2 < len {
1798                    if let (
1799                        Op::LoadConst(const_idx),
1800                        Op::ConcatAppendSlotVoid(s_slot),
1801                        Op::SlotIncLtIntJumpBack(inc_slot, limit, body_target),
1802                    ) = (&self.ops[i], &self.ops[i + 1], &self.ops[i + 2])
1803                    {
1804                        if *body_target == i
1805                            && !has_inbound_jump(&self.ops, i, i + 2)
1806                            && !has_inbound_jump(&self.ops, i + 1, i + 2)
1807                            && !has_inbound_jump(&self.ops, i + 2, i + 2)
1808                        {
1809                            let const_idx = *const_idx;
1810                            let s_slot = *s_slot;
1811                            let inc_slot = *inc_slot;
1812                            let limit = *limit;
1813                            self.ops[i] =
1814                                Op::ConcatConstSlotLoop(const_idx, s_slot, inc_slot, limit);
1815                            self.ops[i + 1] = Op::Nop;
1816                            self.ops[i + 2] = Op::Nop;
1817                            i += 3;
1818                            continue;
1819                        }
1820                    }
1821                    i += 1;
1822                }
1823            }
1824            // 5e: `$sum += $h{$k}` body op inside `for my $k (keys %h) { ... }`
1825            //   GetScalarSlot(sum) + GetScalarPlain(k) + GetHashElem(h) + Add
1826            //     + SetScalarSlotKeep(sum) + Pop
1827            //   → AddHashElemPlainKeyToSlot(sum, k, h)
1828            // Safe because `SetScalarSlotKeep + Pop` leaves nothing on the stack net; the fused
1829            // op is a drop-in for that sequence. No inbound jumps permitted to interior ops.
1830            if len >= 6 {
1831                let mut i = 0;
1832                while i + 5 < len {
1833                    if let (
1834                        Op::GetScalarSlot(sum_slot),
1835                        Op::GetScalarPlain(k_idx),
1836                        Op::GetHashElem(h_idx),
1837                        Op::Add,
1838                        Op::SetScalarSlotKeep(sum_slot2),
1839                        Op::Pop,
1840                    ) = (
1841                        &self.ops[i],
1842                        &self.ops[i + 1],
1843                        &self.ops[i + 2],
1844                        &self.ops[i + 3],
1845                        &self.ops[i + 4],
1846                        &self.ops[i + 5],
1847                    ) {
1848                        if *sum_slot == *sum_slot2
1849                            && (0..6).all(|off| !has_inbound_jump(&self.ops, i + off, usize::MAX))
1850                        {
1851                            let sum_slot = *sum_slot;
1852                            let k_idx = *k_idx;
1853                            let h_idx = *h_idx;
1854                            self.ops[i] = Op::AddHashElemPlainKeyToSlot(sum_slot, k_idx, h_idx);
1855                            for off in 1..=5 {
1856                                self.ops[i + off] = Op::Nop;
1857                            }
1858                            i += 6;
1859                            continue;
1860                        }
1861                    }
1862                    i += 1;
1863                }
1864            }
1865            // 5e-slot: slot-key variant of 5e, emitted when the compiler lowers `$k` (the foreach
1866            // loop variable) into a slot rather than a frame scalar.
1867            //   GetScalarSlot(sum) + GetScalarSlot(k) + GetHashElem(h) + Add
1868            //     + SetScalarSlotKeep(sum) + Pop
1869            //   → AddHashElemSlotKeyToSlot(sum, k, h)
1870            if len >= 6 {
1871                let mut i = 0;
1872                while i + 5 < len {
1873                    if let (
1874                        Op::GetScalarSlot(sum_slot),
1875                        Op::GetScalarSlot(k_slot),
1876                        Op::GetHashElem(h_idx),
1877                        Op::Add,
1878                        Op::SetScalarSlotKeep(sum_slot2),
1879                        Op::Pop,
1880                    ) = (
1881                        &self.ops[i],
1882                        &self.ops[i + 1],
1883                        &self.ops[i + 2],
1884                        &self.ops[i + 3],
1885                        &self.ops[i + 4],
1886                        &self.ops[i + 5],
1887                    ) {
1888                        if *sum_slot == *sum_slot2
1889                            && *sum_slot != *k_slot
1890                            && (0..6).all(|off| !has_inbound_jump(&self.ops, i + off, usize::MAX))
1891                        {
1892                            let sum_slot = *sum_slot;
1893                            let k_slot = *k_slot;
1894                            let h_idx = *h_idx;
1895                            self.ops[i] = Op::AddHashElemSlotKeyToSlot(sum_slot, k_slot, h_idx);
1896                            for off in 1..=5 {
1897                                self.ops[i + off] = Op::Nop;
1898                            }
1899                            i += 6;
1900                            continue;
1901                        }
1902                    }
1903                    i += 1;
1904                }
1905            }
1906            // 5d: counted hash-insert loop `$h{$i} = $i * K`
1907            //   GetScalarSlot(i) + LoadInt(k) + Mul + GetScalarSlot(i) + SetHashElem(h) + Pop
1908            //     + SlotIncLtIntJumpBack(i, limit, body_target)
1909            //   → SetHashIntTimesLoop(h, i, k, limit)
1910            if len >= 7 {
1911                let mut i = 0;
1912                while i + 6 < len {
1913                    if let (
1914                        Op::GetScalarSlot(gs1),
1915                        Op::LoadInt(k),
1916                        Op::Mul,
1917                        Op::GetScalarSlot(gs2),
1918                        Op::SetHashElem(h_idx),
1919                        Op::Pop,
1920                        Op::SlotIncLtIntJumpBack(inc_slot, limit, body_target),
1921                    ) = (
1922                        &self.ops[i],
1923                        &self.ops[i + 1],
1924                        &self.ops[i + 2],
1925                        &self.ops[i + 3],
1926                        &self.ops[i + 4],
1927                        &self.ops[i + 5],
1928                        &self.ops[i + 6],
1929                    ) {
1930                        if *gs1 == *inc_slot
1931                            && *gs2 == *inc_slot
1932                            && *body_target == i
1933                            && i32::try_from(*k).is_ok()
1934                            && (0..6).all(|off| !has_inbound_jump(&self.ops, i + off, i + 6))
1935                            && !has_inbound_jump(&self.ops, i + 6, i + 6)
1936                        {
1937                            let h_idx = *h_idx;
1938                            let inc_slot = *inc_slot;
1939                            let k32 = *k as i32;
1940                            let limit = *limit;
1941                            self.ops[i] = Op::SetHashIntTimesLoop(h_idx, inc_slot, k32, limit);
1942                            for off in 1..=6 {
1943                                self.ops[i + off] = Op::Nop;
1944                            }
1945                            i += 7;
1946                            continue;
1947                        }
1948                    }
1949                    i += 1;
1950                }
1951            }
1952            // 5c: GetScalarSlot + PushArray + ArrayLen + Pop + SlotIncLtIntJumpBack
1953            //      → PushIntRangeToArrayLoop
1954            // This is the compiler's `push @a, $i; $i++` shape in void context, where
1955            // the `push` expression's length return is pushed by `ArrayLen` and then `Pop`ped.
1956            if len >= 5 {
1957                let mut i = 0;
1958                while i + 4 < len {
1959                    if let (
1960                        Op::GetScalarSlot(get_slot),
1961                        Op::PushArray(push_idx),
1962                        Op::ArrayLen(len_idx),
1963                        Op::Pop,
1964                        Op::SlotIncLtIntJumpBack(inc_slot, limit, body_target),
1965                    ) = (
1966                        &self.ops[i],
1967                        &self.ops[i + 1],
1968                        &self.ops[i + 2],
1969                        &self.ops[i + 3],
1970                        &self.ops[i + 4],
1971                    ) {
1972                        if *get_slot == *inc_slot
1973                            && *push_idx == *len_idx
1974                            && *body_target == i
1975                            && !has_inbound_jump(&self.ops, i, i + 4)
1976                            && !has_inbound_jump(&self.ops, i + 1, i + 4)
1977                            && !has_inbound_jump(&self.ops, i + 2, i + 4)
1978                            && !has_inbound_jump(&self.ops, i + 3, i + 4)
1979                            && !has_inbound_jump(&self.ops, i + 4, i + 4)
1980                        {
1981                            let push_idx = *push_idx;
1982                            let inc_slot = *inc_slot;
1983                            let limit = *limit;
1984                            self.ops[i] = Op::PushIntRangeToArrayLoop(push_idx, inc_slot, limit);
1985                            self.ops[i + 1] = Op::Nop;
1986                            self.ops[i + 2] = Op::Nop;
1987                            self.ops[i + 3] = Op::Nop;
1988                            self.ops[i + 4] = Op::Nop;
1989                            i += 5;
1990                            continue;
1991                        }
1992                    }
1993                    i += 1;
1994                }
1995            }
1996        }
1997        // Pass 6: compact — remove the Nops pass 5 introduced.
1998        self.compact_nops();
1999        // Pass 7: fuse the entire `for my $k (keys %h) { $sum += $h{$k} }` loop into a single
2000        // `SumHashValuesToSlot` op that walks the hash's values in a tight native loop.
2001        //
2002        // After prior passes and compaction the shape is a 15-op block:
2003        //
2004        //     HashKeys(h)
2005        //     DeclareArray(list)
2006        //     LoadInt(0)
2007        //     DeclareScalarSlot(c, cname)
2008        //     LoadUndef
2009        //     DeclareScalarSlot(v, vname)
2010        //     [top]  GetScalarSlot(c)
2011        //            ArrayLen(list)
2012        //            NumLt
2013        //            JumpIfFalse(end)
2014        //            GetScalarSlot(c)
2015        //            GetArrayElem(list)
2016        //            SetScalarSlot(v)
2017        //            AddHashElemSlotKeyToSlot(sum, v, h)     ← fused body (pass 5e-slot)
2018        //            PreIncSlotVoid(c)
2019        //            Jump(top)
2020        //     [end]
2021        //
2022        // The counter (`__foreach_i__`), list (`__foreach_list__`), and loop var (`$k`) live
2023        // inside a `PushFrame`-isolated scope and are invisible after the loop — it is safe to
2024        // elide all of them. The fused op accumulates directly into `sum` without creating the
2025        // keys array at all.
2026        //
2027        // Safety gates:
2028        //   - `h` in HashKeys must match `h` in AddHashElemSlotKeyToSlot.
2029        //   - `list` in DeclareArray must match the loop `ArrayLen` / `GetArrayElem`.
2030        //   - `c` / `v` slots must be consistent throughout.
2031        //   - No inbound jump lands inside the 15-op window from the outside.
2032        //   - JumpIfFalse target must be i+15 (just past the Jump back-edge).
2033        //   - Jump back-edge target must be i+6 (the GetScalarSlot(c) at loop top).
2034        let len = self.ops.len();
2035        if len >= 15 {
2036            let has_inbound_jump =
2037                |ops: &[Op], pos: usize, ignore_from: usize, ignore_to: usize| -> bool {
2038                    for (j, op) in ops.iter().enumerate() {
2039                        if j >= ignore_from && j <= ignore_to {
2040                            continue;
2041                        }
2042                        let t = match op {
2043                            Op::Jump(t)
2044                            | Op::JumpIfFalse(t)
2045                            | Op::JumpIfTrue(t)
2046                            | Op::JumpIfFalseKeep(t)
2047                            | Op::JumpIfTrueKeep(t)
2048                            | Op::JumpIfDefinedKeep(t) => *t,
2049                            Op::SlotLtIntJumpIfFalse(_, _, t) => *t,
2050                            Op::SlotIncLtIntJumpBack(_, _, t) => *t,
2051                            _ => continue,
2052                        };
2053                        if t == pos {
2054                            return true;
2055                        }
2056                    }
2057                    false
2058                };
2059            let mut i = 0;
2060            while i + 15 < len {
2061                if let (
2062                    Op::HashKeys(h_idx),
2063                    Op::DeclareArray(list_idx),
2064                    Op::LoadInt(0),
2065                    Op::DeclareScalarSlot(c_slot, _c_name),
2066                    Op::LoadUndef,
2067                    Op::DeclareScalarSlot(v_slot, _v_name),
2068                    Op::GetScalarSlot(c_get1),
2069                    Op::ArrayLen(len_idx),
2070                    Op::NumLt,
2071                    Op::JumpIfFalse(end_tgt),
2072                    Op::GetScalarSlot(c_get2),
2073                    Op::GetArrayElem(elem_idx),
2074                    Op::SetScalarSlot(v_set),
2075                    Op::AddHashElemSlotKeyToSlot(sum_slot, v_in_body, h_in_body),
2076                    Op::PreIncSlotVoid(c_inc),
2077                    Op::Jump(top_tgt),
2078                ) = (
2079                    &self.ops[i],
2080                    &self.ops[i + 1],
2081                    &self.ops[i + 2],
2082                    &self.ops[i + 3],
2083                    &self.ops[i + 4],
2084                    &self.ops[i + 5],
2085                    &self.ops[i + 6],
2086                    &self.ops[i + 7],
2087                    &self.ops[i + 8],
2088                    &self.ops[i + 9],
2089                    &self.ops[i + 10],
2090                    &self.ops[i + 11],
2091                    &self.ops[i + 12],
2092                    &self.ops[i + 13],
2093                    &self.ops[i + 14],
2094                    &self.ops[i + 15],
2095                ) {
2096                    let full_end = i + 15;
2097                    if *list_idx == *len_idx
2098                        && *list_idx == *elem_idx
2099                        && *c_slot == *c_get1
2100                        && *c_slot == *c_get2
2101                        && *c_slot == *c_inc
2102                        && *v_slot == *v_set
2103                        && *v_slot == *v_in_body
2104                        && *h_idx == *h_in_body
2105                        && *top_tgt == i + 6
2106                        && *end_tgt == i + 16
2107                        && *sum_slot != *c_slot
2108                        && *sum_slot != *v_slot
2109                        && !(i..=full_end).any(|k| has_inbound_jump(&self.ops, k, i, full_end))
2110                    {
2111                        let sum_slot = *sum_slot;
2112                        let h_idx = *h_idx;
2113                        self.ops[i] = Op::SumHashValuesToSlot(sum_slot, h_idx);
2114                        for off in 1..=15 {
2115                            self.ops[i + off] = Op::Nop;
2116                        }
2117                        i += 16;
2118                        continue;
2119                    }
2120                }
2121                i += 1;
2122            }
2123        }
2124        // Pass 8: compact pass 7's Nops.
2125        self.compact_nops();
2126    }
2127
2128    /// Remove all `Nop` instructions and remap jump targets + metadata indices.
2129    fn compact_nops(&mut self) {
2130        let old_len = self.ops.len();
2131        // Build old→new index mapping.
2132        let mut remap = vec![0usize; old_len + 1];
2133        let mut new_idx = 0usize;
2134        for (old, slot) in remap[..old_len].iter_mut().enumerate() {
2135            *slot = new_idx;
2136            if !matches!(self.ops[old], Op::Nop) {
2137                new_idx += 1;
2138            }
2139        }
2140        remap[old_len] = new_idx;
2141        if new_idx == old_len {
2142            return; // nothing to compact
2143        }
2144        // Remap jump targets in all ops.
2145        for op in &mut self.ops {
2146            match op {
2147                Op::Jump(t) | Op::JumpIfFalse(t) | Op::JumpIfTrue(t) => *t = remap[*t],
2148                Op::JumpIfFalseKeep(t) | Op::JumpIfTrueKeep(t) | Op::JumpIfDefinedKeep(t) => {
2149                    *t = remap[*t]
2150                }
2151                Op::SlotLtIntJumpIfFalse(_, _, t) => *t = remap[*t],
2152                Op::SlotIncLtIntJumpBack(_, _, t) => *t = remap[*t],
2153                _ => {}
2154            }
2155        }
2156        // Remap sub entry points.
2157        for e in &mut self.sub_entries {
2158            e.1 = remap[e.1];
2159        }
2160        // Remap `CallStaticSubId` resolved entry IPs — they were recorded by
2161        // `patch_static_sub_calls` before peephole fusion ran, so any Nop
2162        // removal in front of a sub body shifts its entry and must be
2163        // reflected here; otherwise `vm_dispatch_user_call` jumps one (or
2164        // more) ops past the real sub start and silently skips the first
2165        // instruction(s) of the body.
2166        for c in &mut self.static_sub_calls {
2167            c.0 = remap[c.0];
2168        }
2169        // Remap block/grep/sort/etc bytecode ranges.
2170        fn remap_ranges(ranges: &mut [Option<(usize, usize)>], remap: &[usize]) {
2171            for r in ranges.iter_mut().flatten() {
2172                r.0 = remap[r.0];
2173                r.1 = remap[r.1];
2174            }
2175        }
2176        remap_ranges(&mut self.block_bytecode_ranges, &remap);
2177        remap_ranges(&mut self.map_expr_bytecode_ranges, &remap);
2178        remap_ranges(&mut self.grep_expr_bytecode_ranges, &remap);
2179        remap_ranges(&mut self.keys_expr_bytecode_ranges, &remap);
2180        remap_ranges(&mut self.values_expr_bytecode_ranges, &remap);
2181        remap_ranges(&mut self.eval_timeout_expr_bytecode_ranges, &remap);
2182        remap_ranges(&mut self.given_topic_bytecode_ranges, &remap);
2183        remap_ranges(&mut self.algebraic_match_subject_bytecode_ranges, &remap);
2184        remap_ranges(&mut self.regex_flip_flop_rhs_expr_bytecode_ranges, &remap);
2185        // Compact ops, lines, op_ast_expr.
2186        let mut j = 0;
2187        for old in 0..old_len {
2188            if !matches!(self.ops[old], Op::Nop) {
2189                self.ops[j] = self.ops[old].clone();
2190                if old < self.lines.len() && j < self.lines.len() {
2191                    self.lines[j] = self.lines[old];
2192                }
2193                if old < self.op_ast_expr.len() && j < self.op_ast_expr.len() {
2194                    self.op_ast_expr[j] = self.op_ast_expr[old];
2195                }
2196                j += 1;
2197            }
2198        }
2199        self.ops.truncate(j);
2200        self.lines.truncate(j);
2201        self.op_ast_expr.truncate(j);
2202    }
2203}
2204
2205impl Default for Chunk {
2206    fn default() -> Self {
2207        Self::new()
2208    }
2209}
2210
2211#[cfg(test)]
2212mod tests {
2213    use super::*;
2214    use crate::ast;
2215
2216    #[test]
2217    fn chunk_new_and_default_match() {
2218        let a = Chunk::new();
2219        let b = Chunk::default();
2220        assert!(a.ops.is_empty() && a.names.is_empty() && a.constants.is_empty());
2221        assert!(b.ops.is_empty() && b.lines.is_empty());
2222    }
2223
2224    #[test]
2225    fn intern_name_deduplicates() {
2226        let mut c = Chunk::new();
2227        let i0 = c.intern_name("foo");
2228        let i1 = c.intern_name("foo");
2229        let i2 = c.intern_name("bar");
2230        assert_eq!(i0, i1);
2231        assert_ne!(i0, i2);
2232        assert_eq!(c.names.len(), 2);
2233    }
2234
2235    #[test]
2236    fn add_constant_dedups_identical_strings() {
2237        let mut c = Chunk::new();
2238        let a = c.add_constant(StrykeValue::string("x".into()));
2239        let b = c.add_constant(StrykeValue::string("x".into()));
2240        assert_eq!(a, b);
2241        assert_eq!(c.constants.len(), 1);
2242    }
2243
2244    #[test]
2245    fn add_constant_distinct_strings_different_indices() {
2246        let mut c = Chunk::new();
2247        let a = c.add_constant(StrykeValue::string("a".into()));
2248        let b = c.add_constant(StrykeValue::string("b".into()));
2249        assert_ne!(a, b);
2250        assert_eq!(c.constants.len(), 2);
2251    }
2252
2253    #[test]
2254    fn add_constant_non_string_no_dedup_scan() {
2255        let mut c = Chunk::new();
2256        let a = c.add_constant(StrykeValue::integer(1));
2257        let b = c.add_constant(StrykeValue::integer(1));
2258        assert_ne!(a, b);
2259        assert_eq!(c.constants.len(), 2);
2260    }
2261
2262    #[test]
2263    fn emit_records_parallel_ops_and_lines() {
2264        let mut c = Chunk::new();
2265        c.emit(Op::LoadInt(1), 10);
2266        c.emit(Op::Pop, 11);
2267        assert_eq!(c.len(), 2);
2268        assert_eq!(c.lines, vec![10, 11]);
2269        assert_eq!(c.op_ast_expr, vec![None, None]);
2270        assert!(!c.is_empty());
2271    }
2272
2273    #[test]
2274    fn len_is_empty_track_ops() {
2275        let mut c = Chunk::new();
2276        assert!(c.is_empty());
2277        assert_eq!(c.len(), 0);
2278        c.emit(Op::Halt, 0);
2279        assert!(!c.is_empty());
2280        assert_eq!(c.len(), 1);
2281    }
2282
2283    #[test]
2284    fn patch_jump_here_updates_jump_target() {
2285        let mut c = Chunk::new();
2286        let j = c.emit(Op::Jump(0), 1);
2287        c.emit(Op::LoadInt(99), 2);
2288        c.patch_jump_here(j);
2289        assert_eq!(c.ops.len(), 2);
2290        assert!(matches!(c.ops[j], Op::Jump(2)));
2291    }
2292
2293    #[test]
2294    fn patch_jump_here_jump_if_true() {
2295        let mut c = Chunk::new();
2296        let j = c.emit(Op::JumpIfTrue(0), 1);
2297        c.emit(Op::Halt, 2);
2298        c.patch_jump_here(j);
2299        assert!(matches!(c.ops[j], Op::JumpIfTrue(2)));
2300    }
2301
2302    #[test]
2303    fn patch_jump_here_jump_if_false_keep() {
2304        let mut c = Chunk::new();
2305        let j = c.emit(Op::JumpIfFalseKeep(0), 1);
2306        c.emit(Op::Pop, 2);
2307        c.patch_jump_here(j);
2308        assert!(matches!(c.ops[j], Op::JumpIfFalseKeep(2)));
2309    }
2310
2311    #[test]
2312    fn patch_jump_here_jump_if_true_keep() {
2313        let mut c = Chunk::new();
2314        let j = c.emit(Op::JumpIfTrueKeep(0), 1);
2315        c.emit(Op::Pop, 2);
2316        c.patch_jump_here(j);
2317        assert!(matches!(c.ops[j], Op::JumpIfTrueKeep(2)));
2318    }
2319
2320    #[test]
2321    fn patch_jump_here_jump_if_defined_keep() {
2322        let mut c = Chunk::new();
2323        let j = c.emit(Op::JumpIfDefinedKeep(0), 1);
2324        c.emit(Op::Halt, 2);
2325        c.patch_jump_here(j);
2326        assert!(matches!(c.ops[j], Op::JumpIfDefinedKeep(2)));
2327    }
2328
2329    #[test]
2330    #[should_panic(expected = "patch_jump_to on non-jump op")]
2331    fn patch_jump_here_panics_on_non_jump() {
2332        let mut c = Chunk::new();
2333        let idx = c.emit(Op::LoadInt(1), 1);
2334        c.patch_jump_here(idx);
2335    }
2336
2337    #[test]
2338    fn add_block_returns_sequential_indices() {
2339        let mut c = Chunk::new();
2340        let b0: ast::Block = vec![];
2341        let b1: ast::Block = vec![];
2342        assert_eq!(c.add_block(b0), 0);
2343        assert_eq!(c.add_block(b1), 1);
2344        assert_eq!(c.blocks.len(), 2);
2345    }
2346
2347    #[test]
2348    fn builtin_id_from_u16_first_and_last() {
2349        assert_eq!(BuiltinId::from_u16(0), Some(BuiltinId::Length));
2350        assert_eq!(
2351            BuiltinId::from_u16(BuiltinId::Pselect as u16),
2352            Some(BuiltinId::Pselect)
2353        );
2354        assert_eq!(
2355            BuiltinId::from_u16(BuiltinId::BarrierNew as u16),
2356            Some(BuiltinId::BarrierNew)
2357        );
2358        assert_eq!(
2359            BuiltinId::from_u16(BuiltinId::ParPipeline as u16),
2360            Some(BuiltinId::ParPipeline)
2361        );
2362        assert_eq!(
2363            BuiltinId::from_u16(BuiltinId::GlobParProgress as u16),
2364            Some(BuiltinId::GlobParProgress)
2365        );
2366        assert_eq!(
2367            BuiltinId::from_u16(BuiltinId::Readpipe as u16),
2368            Some(BuiltinId::Readpipe)
2369        );
2370        assert_eq!(
2371            BuiltinId::from_u16(BuiltinId::ReadLineList as u16),
2372            Some(BuiltinId::ReadLineList)
2373        );
2374        assert_eq!(
2375            BuiltinId::from_u16(BuiltinId::ReaddirList as u16),
2376            Some(BuiltinId::ReaddirList)
2377        );
2378        assert_eq!(
2379            BuiltinId::from_u16(BuiltinId::Ssh as u16),
2380            Some(BuiltinId::Ssh)
2381        );
2382        assert_eq!(
2383            BuiltinId::from_u16(BuiltinId::Pipe as u16),
2384            Some(BuiltinId::Pipe)
2385        );
2386        assert_eq!(
2387            BuiltinId::from_u16(BuiltinId::Files as u16),
2388            Some(BuiltinId::Files)
2389        );
2390        assert_eq!(
2391            BuiltinId::from_u16(BuiltinId::Filesf as u16),
2392            Some(BuiltinId::Filesf)
2393        );
2394        assert_eq!(
2395            BuiltinId::from_u16(BuiltinId::Dirs as u16),
2396            Some(BuiltinId::Dirs)
2397        );
2398        assert_eq!(
2399            BuiltinId::from_u16(BuiltinId::SymLinks as u16),
2400            Some(BuiltinId::SymLinks)
2401        );
2402        assert_eq!(
2403            BuiltinId::from_u16(BuiltinId::Sockets as u16),
2404            Some(BuiltinId::Sockets)
2405        );
2406        assert_eq!(
2407            BuiltinId::from_u16(BuiltinId::Pipes as u16),
2408            Some(BuiltinId::Pipes)
2409        );
2410        assert_eq!(
2411            BuiltinId::from_u16(BuiltinId::BlockDevices as u16),
2412            Some(BuiltinId::BlockDevices)
2413        );
2414        assert_eq!(
2415            BuiltinId::from_u16(BuiltinId::CharDevices as u16),
2416            Some(BuiltinId::CharDevices)
2417        );
2418        assert_eq!(
2419            BuiltinId::from_u16(BuiltinId::Executables as u16),
2420            Some(BuiltinId::Executables)
2421        );
2422    }
2423
2424    #[test]
2425    fn builtin_id_from_u16_out_of_range() {
2426        assert_eq!(BuiltinId::from_u16(BuiltinId::Executables as u16 + 1), None);
2427        assert_eq!(BuiltinId::from_u16(u16::MAX), None);
2428    }
2429
2430    #[test]
2431    fn op_enum_clone_roundtrip() {
2432        let o = Op::Call(42, 3, 0);
2433        assert!(matches!(o.clone(), Op::Call(42, 3, 0)));
2434    }
2435
2436    #[test]
2437    fn chunk_clone_independent_ops() {
2438        let mut c = Chunk::new();
2439        c.emit(Op::Negate, 1);
2440        let mut d = c.clone();
2441        d.emit(Op::Pop, 2);
2442        assert_eq!(c.len(), 1);
2443        assert_eq!(d.len(), 2);
2444    }
2445
2446    #[test]
2447    fn chunk_disassemble_includes_ops() {
2448        let mut c = Chunk::new();
2449        c.emit(Op::LoadInt(7), 1);
2450        let s = c.disassemble();
2451        assert!(s.contains("0000"));
2452        assert!(s.contains("LoadInt(7)"));
2453        assert!(s.contains("     -")); // no ast ref column
2454    }
2455
2456    #[test]
2457    fn ast_expr_at_roundtrips_pooled_expr() {
2458        let mut c = Chunk::new();
2459        let e = ast::Expr {
2460            kind: ast::ExprKind::Integer(99),
2461            line: 3,
2462        };
2463        c.ast_expr_pool.push(e);
2464        c.emit_with_ast_idx(Op::LoadInt(99), 3, Some(0));
2465        let got = c.ast_expr_at(0).expect("ast ref");
2466        assert!(matches!(&got.kind, ast::ExprKind::Integer(99)));
2467        assert_eq!(got.line, 3);
2468    }
2469}