Skip to main content

stryke/
bytecode.rs

1use serde::{Deserialize, Serialize};
2
3use crate::ast::{Block, ClassDef, EnumDef, Expr, MatchArm, StructDef, SubSigParam, TraitDef};
4use crate::value::PerlValue;
5
6/// `splice` operand tuple: array expr, offset, length, replacement list (see [`Chunk::splice_expr_entries`]).
7pub(crate) type SpliceExprEntry = (Expr, Option<Expr>, Option<Expr>, Vec<Expr>);
8
9/// `sub` body registered at run time (e.g. `BEGIN { sub f { ... } }`), mirrored from
10/// [`crate::interpreter::Interpreter::exec_statement`] `StmtKind::SubDecl`.
11#[derive(Debug, Clone, Serialize, Deserialize)]
12pub struct RuntimeSubDecl {
13    pub name: String,
14    pub params: Vec<SubSigParam>,
15    pub body: Block,
16    pub prototype: Option<String>,
17}
18
19/// Stack-based bytecode instruction set for the stryke VM.
20/// Operands use u16 for pool indices (64k names/constants) and i32 for jumps.
21#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
22pub enum Op {
23    Nop,
24    // ── Constants ──
25    LoadInt(i64),
26    LoadFloat(f64),
27    LoadConst(u16), // index into constant pool
28    LoadUndef,
29
30    // ── Stack ──
31    Pop,
32    Dup,
33    /// Duplicate the top two stack values: \[a, b\] (b on top) → \[a, b, a, b\].
34    Dup2,
35    /// Swap the top two stack values (PerlValue).
36    Swap,
37    /// Rotate the top three values upward (FORTH `rot`): `[a, b, c]` (c on top) → `[b, c, a]`.
38    Rot,
39    /// Pop one value; push [`PerlValue::scalar_context`] of that value (Perl aggregate rules).
40    ValueScalarContext,
41
42    // ── Scalars (u16 = name pool index) ──
43    GetScalar(u16),
44    /// Like `GetScalar` but reads `scope.get_scalar` only (no Perl special-variable dispatch).
45    GetScalarPlain(u16),
46    SetScalar(u16),
47    /// Like `SetScalar` but calls `scope.set_scalar` only (no special-variable dispatch).
48    SetScalarPlain(u16),
49    DeclareScalar(u16),
50    /// Like `DeclareScalar` but the binding is immutable after initialization.
51    DeclareScalarFrozen(u16),
52    /// `typed my $x : Type` — u8 encodes [`crate::ast::PerlTypeName`] (0=Int,1=Str,2=Float).
53    DeclareScalarTyped(u16, u8),
54    /// `frozen typed my $x : Type` — immutable after initialization + type-checked.
55    DeclareScalarTypedFrozen(u16, u8),
56
57    // ── Arrays ──
58    GetArray(u16),
59    SetArray(u16),
60    DeclareArray(u16),
61    DeclareArrayFrozen(u16),
62    GetArrayElem(u16), // stack: [index] → value
63    SetArrayElem(u16), // stack: [value, index]
64    /// Like [`Op::SetArrayElem`] but leaves the assigned value on the stack (e.g. `$a[$i] //=`).
65    SetArrayElemKeep(u16),
66    PushArray(u16),  // stack: [value] → push to named array
67    PopArray(u16),   // → popped value
68    ShiftArray(u16), // → shifted value
69    ArrayLen(u16),   // → integer length
70    /// Pop index spec (scalar or array from [`Op::Range`]); push one `PerlValue::array` of elements
71    /// read from the named array. Used for `@name[...]` slice rvalues.
72    ArraySlicePart(u16),
73    /// Pop `b`, pop `a` (arrays); push concatenation `a` followed by `b` (Perl slice / list glue).
74    ArrayConcatTwo,
75    /// `exists $a[$i]` — stack: `[index]` → 0/1 (stash-qualified array name pool index).
76    ExistsArrayElem(u16),
77    /// `delete $a[$i]` — stack: `[index]` → deleted value (or undef).
78    DeleteArrayElem(u16),
79
80    // ── Hashes ──
81    GetHash(u16),
82    SetHash(u16),
83    DeclareHash(u16),
84    DeclareHashFrozen(u16),
85    /// Dynamic `local $x` — save previous binding, assign TOS (same stack shape as DeclareScalar).
86    LocalDeclareScalar(u16),
87    LocalDeclareArray(u16),
88    LocalDeclareHash(u16),
89    /// `local $h{key} = val` — stack: `[value, key]` (key on top), same as [`Op::SetHashElem`].
90    LocalDeclareHashElement(u16),
91    /// `local $a[i] = val` — stack: `[value, index]` (index on top), same as [`Op::SetArrayElem`].
92    LocalDeclareArrayElement(u16),
93    /// `local *name` or `local *name = *other` — second pool index is `Some(rhs)` when aliasing.
94    LocalDeclareTypeglob(u16, Option<u16>),
95    /// `local *{EXPR}` / `local *$x` — LHS glob name string on stack (TOS); optional static `*rhs` pool index.
96    LocalDeclareTypeglobDynamic(Option<u16>),
97    GetHashElem(u16), // stack: [key] → value
98    SetHashElem(u16), // stack: [value, key]
99    /// Like [`Op::SetHashElem`] but leaves the assigned value on the stack (e.g. `$h{k} //=`).
100    SetHashElemKeep(u16),
101    DeleteHashElem(u16), // stack: [key] → deleted value
102    ExistsHashElem(u16), // stack: [key] → 0/1
103    /// `delete $href->{key}` — stack: `[container, key]` (key on top) → deleted value.
104    DeleteArrowHashElem,
105    /// `exists $href->{key}` — stack: `[container, key]` → 0/1.
106    ExistsArrowHashElem,
107    /// `exists $aref->[$i]` — stack: `[container, index]` (index on top, int-coerced).
108    ExistsArrowArrayElem,
109    /// `delete $aref->[$i]` — stack: `[container, index]` → deleted value (or undef).
110    DeleteArrowArrayElem,
111    HashKeys(u16),   // → array of keys
112    HashValues(u16), // → array of values
113    /// Scalar `keys %h` — push integer key count.
114    HashKeysScalar(u16),
115    /// Scalar `values %h` — push integer value count.
116    HashValuesScalar(u16),
117    /// `keys EXPR` after operand evaluated in list context — stack: `[value]` → key list array.
118    KeysFromValue,
119    /// Scalar `keys EXPR` after operand — stack: `[value]` → key count.
120    KeysFromValueScalar,
121    /// `values EXPR` after operand evaluated in list context — stack: `[value]` → values array.
122    ValuesFromValue,
123    /// Scalar `values EXPR` after operand — stack: `[value]` → value count.
124    ValuesFromValueScalar,
125
126    /// `push @$aref, ITEM` — stack: `[aref, item]` (item on top); mutates; pushes `aref` back.
127    PushArrayDeref,
128    /// After `push @$aref, …` — stack: `[aref]` → `[len]` (consumes aref).
129    ArrayDerefLen,
130    /// `pop @$aref` — stack: `[aref]` → popped value.
131    PopArrayDeref,
132    /// `shift @$aref` — stack: `[aref]` → shifted value.
133    ShiftArrayDeref,
134    /// `unshift @$aref, LIST` — stack `[aref, v1, …, vn]` (vn on top); `n` extra values.
135    UnshiftArrayDeref(u8),
136    /// `splice @$aref, off, len, LIST` — stack top: replacements, then `len`, `off`, `aref` (`len` may be undef).
137    SpliceArrayDeref(u8),
138
139    // ── Arithmetic ──
140    Add,
141    Sub,
142    Mul,
143    Div,
144    Mod,
145    Pow,
146    Negate,
147    /// `inc EXPR` — pop value, push value + 1 (integer if input is integer, else float).
148    Inc,
149    /// `dec EXPR` — pop value, push value - 1.
150    Dec,
151
152    // ── String ──
153    Concat,
154    /// Pop array (or value coerced with [`PerlValue::to_list`]), join element strings with
155    /// [`Interpreter::list_separator`] (`$"`), push one string. Used for `@a` in `"` / `qq`.
156    ArrayStringifyListSep,
157    StringRepeat,
158    /// Pop string, apply `\U` / `\L` / `\u` / `\l` / `\Q` / `\E` case escapes, push result.
159    ProcessCaseEscapes,
160
161    // ── Comparison (numeric) ──
162    NumEq,
163    NumNe,
164    NumLt,
165    NumGt,
166    NumLe,
167    NumGe,
168    Spaceship,
169
170    // ── Comparison (string) ──
171    StrEq,
172    StrNe,
173    StrLt,
174    StrGt,
175    StrLe,
176    StrGe,
177    StrCmp,
178
179    // ── Logical / Bitwise ──
180    LogNot,
181    BitAnd,
182    BitOr,
183    BitXor,
184    BitNot,
185    Shl,
186    Shr,
187
188    // ── Control flow (absolute target addresses) ──
189    Jump(usize),
190    JumpIfTrue(usize),
191    JumpIfFalse(usize),
192    /// Jump if TOS is falsy WITHOUT popping (for short-circuit &&)
193    JumpIfFalseKeep(usize),
194    /// Jump if TOS is truthy WITHOUT popping (for short-circuit ||)
195    JumpIfTrueKeep(usize),
196    /// Jump if TOS is defined WITHOUT popping (for //)
197    JumpIfDefinedKeep(usize),
198
199    // ── Increment / Decrement ──
200    PreInc(u16),
201    PreDec(u16),
202    PostInc(u16),
203    PostDec(u16),
204    /// Pre-increment on a frame slot entry (compiled `my $x` fast path).
205    PreIncSlot(u8),
206    PreDecSlot(u8),
207    PostIncSlot(u8),
208    PostDecSlot(u8),
209
210    // ── Functions ──
211    /// Call subroutine: name index, arg count, `WantarrayCtx` discriminant as `u8`
212    Call(u16, u8, u8),
213    /// Like [`Op::Call`] but with a compile-time-resolved entry: `sid` indexes [`Chunk::static_sub_calls`]
214    /// (entry IP + stack-args); `name_idx` duplicates the stash pool index for closure restore / JIT
215    /// (same as in the table; kept in the opcode so JIT does not need the side table).
216    CallStaticSubId(u16, u16, u8, u8),
217    Return,
218    ReturnValue,
219    /// End of a compiled `map` / `grep` / `sort` block body (empty block or last statement an expression).
220    /// Pops the synthetic call frame from [`crate::vm::VM::run_block_region`] and unwinds the
221    /// block-local scope (`scope_push_hook` per iteration, like [`crate::interpreter::Interpreter::exec_block`]);
222    /// not subroutine `return` and not a closure capture.
223    BlockReturnValue,
224    /// At runtime statement position: capture current lexicals into [`crate::value::PerlSub::closure_env`]
225    /// for a sub already registered in [`Interpreter::subs`] (see `prepare_program_top_level`).
226    BindSubClosure(u16),
227
228    // ── Scope ──
229    PushFrame,
230    PopFrame,
231
232    // ── I/O ──
233    /// `print [HANDLE] LIST` — `None` uses [`crate::interpreter::Interpreter::default_print_handle`].
234    Print(Option<u16>, u8),
235    Say(Option<u16>, u8),
236
237    // ── Built-in function calls ──
238    /// Calls a registered built-in: (builtin_id, arg_count)
239    CallBuiltin(u16, u8),
240    /// Save [`crate::interpreter::Interpreter::wantarray_kind`] and set from `u8`
241    /// ([`crate::interpreter::WantarrayCtx::as_byte`]). Used for `splice` / similar where the
242    /// dynamic context must match the expression's compile-time [`WantarrayCtx`] (e.g. `print splice…`).
243    WantarrayPush(u8),
244    /// Restore after [`Op::WantarrayPush`].
245    WantarrayPop,
246
247    // ── List / Range ──
248    MakeArray(u16), // pop N values, push as Array
249    /// `@$href{k1,k2}` — stack: `[container, key1, …, keyN]` (TOS = last key); pops `N+1` values; pushes array of slot values.
250    HashSliceDeref(u16),
251    /// `@$aref[i1,i2,...]` — stack: `[array_ref, spec1, …, specN]` (TOS = last spec); each spec is a
252    /// scalar index or array of indices (list-context `..` / `qw`/list). Pops `N+1`; pushes elements.
253    ArrowArraySlice(u16),
254    /// `@$href{k1,k2} = VALUE` — stack: `[value, container, key1, …, keyN]` (TOS = last key); pops `N+2` values.
255    SetHashSliceDeref(u16),
256    /// `%name{k1,k2} = VALUE` — stack: `[value, key1, …, keyN]` (TOS = last key); pops `N+1`. Pool: hash name, key count.
257    SetHashSlice(u16, u16),
258    /// `@$href{k1,k2} OP= VALUE` — stack: `[rhs, container, key1, …, keyN]` (TOS = last key); pops `N+2`, pushes the new value.
259    /// `u8` = [`crate::compiler::scalar_compound_op_to_byte`] encoding of the binop.
260    /// Perl 5 applies the op only to the **last** key’s element.
261    HashSliceDerefCompound(u8, u16),
262    /// `++@$href{k1,k2}` / `--...` / `@$href{k1,k2}++` / `...--` — stack: `[container, key1, …, keyN]`;
263    /// pops `N+1`. Pre-forms push the new last-element value; post-forms push the **old** last value.
264    /// `u8` encodes kind: 0=PreInc, 1=PreDec, 2=PostInc, 3=PostDec. Only the last key is updated.
265    HashSliceDerefIncDec(u8, u16),
266    /// `@name{k1,k2} OP= rhs` — stack: `[rhs, key1, …, keyN]` (TOS = last key); pops `N+1`, pushes the new value.
267    /// Pool: compound-op byte ([`crate::compiler::scalar_compound_op_to_byte`]), stash hash name, key-slot count.
268    /// Only the **last** flattened key is updated (same as [`Op::HashSliceDerefCompound`]).
269    NamedHashSliceCompound(u8, u16, u16),
270    /// `++@name{k1,k2}` / `--…` / `@name{k1,k2}++` / `…--` — stack: `[key1, …, keyN]`; pops `N`.
271    /// `u8` kind matches [`Op::HashSliceDerefIncDec`]. Only the last key is updated.
272    NamedHashSliceIncDec(u8, u16, u16),
273    /// Multi-key `@h{k1,k2} //=` / `||=` / `&&=` — stack `[key1, …, keyN]` unchanged; pushes the **last**
274    /// flattened slot (Perl only tests that slot). Pool: hash name, key-slot count.
275    NamedHashSlicePeekLast(u16, u16),
276    /// Stack `[key1, …, keyN, cur]` — pop `N` key slots, keep `cur` (short-circuit path).
277    NamedHashSliceDropKeysKeepCur(u16),
278    /// Assign list RHS’s last element to the **last** flattened key; stack `[val, key1, …, keyN]` (TOS = last key). Pushes `val`.
279    SetNamedHashSliceLastKeep(u16, u16),
280    /// Multi-key `@$href{k1,k2} //=` — stack `[container, key1, …, keyN]`; pushes last slice element (see [`Op::ArrowArraySlicePeekLast`]).
281    HashSliceDerefPeekLast(u16),
282    /// `[container, key1, …, keyN, val]` → `[val, container, key1, …, keyN]` for [`Op::HashSliceDerefSetLastKeep`].
283    HashSliceDerefRollValUnderKeys(u16),
284    /// Assign to last flattened key only; stack `[val, container, key1, …, keyN]`. Pushes `val`.
285    HashSliceDerefSetLastKeep(u16),
286    /// Stack `[container, key1, …, keyN, cur]` — drop container and keys; keep `cur`.
287    HashSliceDerefDropKeysKeepCur(u16),
288    /// `@$aref[i1,i2,...] = LIST` — stack: `[value, aref, spec1, …, specN]` (TOS = last spec);
289    /// pops `N+2`. Delegates to [`crate::interpreter::Interpreter::assign_arrow_array_slice`].
290    SetArrowArraySlice(u16),
291    /// `@$aref[i1,i2,...] OP= rhs` — stack: `[rhs, aref, spec1, …, specN]`; pops `N+2`, pushes new value.
292    /// `u8` = [`crate::compiler::scalar_compound_op_to_byte`] encoding of the binop.
293    /// Perl 5 applies the op only to the **last** index. Delegates to [`crate::interpreter::Interpreter::compound_assign_arrow_array_slice`].
294    ArrowArraySliceCompound(u8, u16),
295    /// `++@$aref[i1,i2,...]` / `--...` / `...++` / `...--` — stack: `[aref, spec1, …, specN]`;
296    /// pops `N+1`. Pre-forms push the new last-element value; post-forms push the old last value.
297    /// `u8` kind matches [`Op::HashSliceDerefIncDec`]. Only the last index is updated. Delegates to
298    /// [`crate::interpreter::Interpreter::arrow_array_slice_inc_dec`].
299    ArrowArraySliceIncDec(u8, u16),
300    /// Read the element at the **last** flattened index of `@$aref[spec1,…]` without popping `aref`
301    /// or specs. Stack: `[aref, spec1, …, specN]` (TOS = last spec) → same plus pushed scalar.
302    /// Used for `@$r[i,j] //=` / `||=` / `&&=` short-circuit tests (Perl only tests the last slot).
303    ArrowArraySlicePeekLast(u16),
304    /// Stack: `[aref, spec1, …, specN, cur]` — pop slice keys and container, keep `cur` (short-circuit
305    /// result). `u16` = number of spec slots (same as [`Op::ArrowArraySlice`]).
306    ArrowArraySliceDropKeysKeepCur(u16),
307    /// Reorder `[aref, spec1, …, specN, val]` → `[val, aref, spec1, …, specN]` for
308    /// [`Op::SetArrowArraySliceLastKeep`].
309    ArrowArraySliceRollValUnderSpecs(u16),
310    /// Assign `val` to the **last** flattened index only; stack `[val, aref, spec1, …, specN]`
311    /// (TOS = last spec). Pushes `val` (like [`Op::SetArrowArrayKeep`]).
312    SetArrowArraySliceLastKeep(u16),
313    /// Like [`Op::ArrowArraySliceIncDec`] but for a **named** stash array (`@a[i1,i2,...]`).
314    /// Stack: `[spec1, …, specN]` (TOS = last spec). `u16` = name pool index (stash-qualified).
315    /// Delegates to [`crate::interpreter::Interpreter::named_array_slice_inc_dec`].
316    NamedArraySliceIncDec(u8, u16, u16),
317    /// `@name[spec1,…] OP= rhs` — stack `[rhs, spec1, …, specN]` (TOS = last spec); pops `N+1`.
318    /// Only the **last** flattened index is updated (same as [`Op::ArrowArraySliceCompound`]).
319    NamedArraySliceCompound(u8, u16, u16),
320    /// Read the **last** flattened slot of `@name[spec1,…]` without popping specs. Stack:
321    /// `[spec1, …, specN]` → same plus pushed scalar. `u16` pairs: name pool index, spec count.
322    NamedArraySlicePeekLast(u16, u16),
323    /// Stack: `[spec1, …, specN, cur]` — pop specs, keep `cur` (short-circuit). `u16` = spec count.
324    NamedArraySliceDropKeysKeepCur(u16),
325    /// `[spec1, …, specN, val]` → `[val, spec1, …, specN]` for [`Op::SetNamedArraySliceLastKeep`].
326    NamedArraySliceRollValUnderSpecs(u16),
327    /// Assign to the **last** index only; stack `[val, spec1, …, specN]`. Pushes `val`.
328    SetNamedArraySliceLastKeep(u16, u16),
329    /// `@name[spec1,…] = LIST` — stack `[value, spec1, …, specN]` (TOS = last spec); pops `N+1`.
330    /// Element-wise like [`Op::SetArrowArraySlice`]. Pool indices: stash-qualified array name, spec count.
331    SetNamedArraySlice(u16, u16),
332    /// `BAREWORD` as an rvalue — at run time, look up a subroutine with this name; if found,
333    /// call it with no args (nullary), otherwise push the name as a string (Perl's bareword-as-
334    /// stringifies behavior). `u16` is a name-pool index. Delegates to
335    /// [`crate::interpreter::Interpreter::resolve_bareword_rvalue`].
336    BarewordRvalue(u16),
337    /// Throw `PerlError::runtime` with the message at constant pool index `u16`. Used by the compiler
338    /// to hard-reject constructs whose only valid response is the same runtime error that the
339    /// tree-walker produces (e.g. `++@$r`, `%{...}--`) without falling back to the tree path.
340    RuntimeErrorConst(u16),
341    MakeHash(u16), // pop N key-value pairs, push as Hash
342    Range,         // stack: [from, to] → Array
343    RangeStep,     // stack: [from, to, step] → Array (stepped range)
344    /// Scalar `..` / `...` flip-flop (numeric bounds vs `$.` — [`Interpreter::scalar_flipflop_dot_line`]).
345    /// Stack: `[from, to]` (ints); pushes `1` or `0`. `u16` indexes flip-flop slots; `u8` is `1` for `...`
346    /// (exclusive: right bound only after `$.` is strictly past the line where the left bound matched).
347    ScalarFlipFlop(u16, u8),
348    /// Regex `..` / `...` flip-flop: both bounds are pattern literals; tests use `$_` and `$.` like Perl
349    /// (`Interpreter::regex_flip_flop_eval`). Operand order: `slot`, `exclusive`, left pattern, left flags,
350    /// right pattern, right flags (constant pool indices). No stack operands; pushes `0`/`1`.
351    RegexFlipFlop(u16, u8, u16, u16, u16, u16),
352    /// Regex `..` / `...` flip-flop with `eof` as the right operand (no arguments). Left bound matches `$_`;
353    /// right bound is [`Interpreter::eof_without_arg_is_true`] (Perl `eof` in `-n`/`-p`). Operand order:
354    /// `slot`, `exclusive`, left pattern, left flags.
355    RegexEofFlipFlop(u16, u8, u16, u16),
356    /// Regex `..` / `...` with a non-literal right operand (e.g. `m/a/ ... (m/b/ or m/c/)`). Left bound is
357    /// pattern + flags; right is evaluated in boolean context each line (pool index into
358    /// [`Chunk::regex_flip_flop_rhs_expr_entries`] / bytecode ranges). Operand order: `slot`, `exclusive`,
359    /// left pattern, left flags, rhs expr index.
360    RegexFlipFlopExprRhs(u16, u8, u16, u16, u16),
361    /// Regex `..` / `...` with a numeric right operand (Perl: right bound is [`Interpreter::scalar_flipflop_dot_line`]
362    /// vs literal line). Constant pool index holds the RHS line as [`PerlValue::integer`]. Operand order:
363    /// `slot`, `exclusive`, left pattern, left flags, rhs line constant index.
364    RegexFlipFlopDotLineRhs(u16, u8, u16, u16, u16),
365
366    // ── Regex ──
367    /// Match: pattern_const_idx, flags_const_idx, scalar_g, pos_key_name_idx (`u16::MAX` = `$_`);
368    /// stack: string operand → result
369    RegexMatch(u16, u16, bool, u16),
370    /// Substitution `s///`: pattern, replacement, flags constant indices; lvalue index into chunk.
371    /// stack: string (subject from LHS expr) → replacement count
372    RegexSubst(u16, u16, u16, u16),
373    /// Transliterate `tr///`: from, to, flags constant indices; lvalue index into chunk.
374    /// stack: string → transliteration count
375    RegexTransliterate(u16, u16, u16, u16),
376    /// Dynamic `=~` / `!~`: pattern from RHS, subject from LHS; empty flags.
377    /// stack: `[subject, pattern]` (pattern on top) → 0/1; `true` = negate (`!~`).
378    RegexMatchDyn(bool),
379    /// Regex literal as a value (`qr/PAT/FLAGS`) — pattern and flags string pool indices.
380    LoadRegex(u16, u16),
381    /// After [`RegexMatchDyn`] for bare `m//` in `&&` / `||`: pop 0/1; push `""` or `1` (Perl scalar).
382    RegexBoolToScalar,
383    /// `pos $var = EXPR` / `pos = EXPR` (implicit `$_`). Stack: `[value, key]` (key string on top).
384    SetRegexPos,
385
386    // ── Assign helpers ──
387    /// SetScalar that also leaves the value on the stack (for chained assignment)
388    SetScalarKeep(u16),
389    /// `SetScalarKeep` for non-special scalars (see `SetScalarPlain`).
390    SetScalarKeepPlain(u16),
391
392    // ── Block-based operations (u16 = index into chunk.blocks) ──
393    /// map { BLOCK } @list — block_idx; stack: \[list\] → \[mapped\]
394    MapWithBlock(u16),
395    /// flat_map { BLOCK } @list — like [`Op::MapWithBlock`] but peels one ARRAY ref per iteration ([`PerlValue::map_flatten_outputs`])
396    FlatMapWithBlock(u16),
397    /// grep { BLOCK } @list — block_idx; stack: \[list\] → \[filtered\]
398    GrepWithBlock(u16),
399    /// each { BLOCK } @list — block_idx; stack: \[list\] → \[count\]
400    ForEachWithBlock(u16),
401    /// map EXPR, LIST — index into [`Chunk::map_expr_entries`] / [`Chunk::map_expr_bytecode_ranges`];
402    /// stack: \[list\] → \[mapped\]
403    MapWithExpr(u16),
404    /// flat_map EXPR, LIST — same pools as [`Op::MapWithExpr`]; stack: \[list\] → \[mapped\]
405    FlatMapWithExpr(u16),
406    /// grep EXPR, LIST — index into [`Chunk::grep_expr_entries`] / [`Chunk::grep_expr_bytecode_ranges`];
407    /// stack: \[list\] → \[filtered\]
408    GrepWithExpr(u16),
409    /// `group_by { BLOCK } LIST` / `chunk_by { BLOCK } LIST` — consecutive runs where the block’s
410    /// return value stringifies the same as the previous (`str_eq`); stack: \[list\] → \[arrayrefs\]
411    ChunkByWithBlock(u16),
412    /// `group_by EXPR, LIST` / `chunk_by EXPR, LIST` — same as [`Op::ChunkByWithBlock`] but key from
413    /// `EXPR` with `$_` set each iteration; uses [`Chunk::map_expr_entries`].
414    ChunkByWithExpr(u16),
415    /// sort { BLOCK } @list — block_idx; stack: \[list\] → \[sorted\]
416    SortWithBlock(u16),
417    /// sort @list (no block) — stack: \[list\] → \[sorted\]
418    SortNoBlock,
419    /// sort $coderef LIST — stack: \[list, coderef\] (coderef on top); `u8` = wantarray for comparator calls.
420    SortWithCodeComparator(u8),
421    /// `{ $a <=> $b }` (0), `{ $a cmp $b }` (1), `{ $b <=> $a }` (2), `{ $b cmp $a }` (3)
422    SortWithBlockFast(u8),
423    /// `map { $_ * k }` with integer `k` — stack: \[list\] → \[mapped\]
424    MapIntMul(i64),
425    /// `grep { $_ % m == r }` with integer `m` (non-zero), `r` — stack: \[list\] → \[filtered\]
426    GrepIntModEq(i64, i64),
427    /// Parallel sort, same fast modes as [`Op::SortWithBlockFast`].
428    PSortWithBlockFast(u8),
429    /// `chomp` on assignable expr: stack has value → chomped count; uses `chunk.lvalues[idx]`.
430    ChompInPlace(u16),
431    /// `chop` on assignable expr: stack has value → chopped char; uses `chunk.lvalues[idx]`.
432    ChopInPlace(u16),
433    /// Four-arg `substr LHS, OFF, LEN, REPL` — index into [`Chunk::substr_four_arg_entries`]; stack: \[\] → extracted slice string
434    SubstrFourArg(u16),
435    /// `keys EXPR` when `EXPR` is not a bare `%h` — [`Chunk::keys_expr_entries`] /
436    /// [`Chunk::keys_expr_bytecode_ranges`]
437    KeysExpr(u16),
438    /// `values EXPR` when not a bare `%h` — [`Chunk::values_expr_entries`] /
439    /// [`Chunk::values_expr_bytecode_ranges`]
440    ValuesExpr(u16),
441    /// Scalar `keys EXPR` (dynamic) — same pools as [`Op::KeysExpr`].
442    KeysExprScalar(u16),
443    /// Scalar `values EXPR` — same pools as [`Op::ValuesExpr`].
444    ValuesExprScalar(u16),
445    /// `delete EXPR` when not a fast `%h{...}` — index into [`Chunk::delete_expr_entries`]
446    DeleteExpr(u16),
447    /// `exists EXPR` when not a fast `%h{...}` — index into [`Chunk::exists_expr_entries`]
448    ExistsExpr(u16),
449    /// `push EXPR, ...` when not a bare `@name` — [`Chunk::push_expr_entries`]
450    PushExpr(u16),
451    /// `pop EXPR` when not a bare `@name` — [`Chunk::pop_expr_entries`]
452    PopExpr(u16),
453    /// `shift EXPR` when not a bare `@name` — [`Chunk::shift_expr_entries`]
454    ShiftExpr(u16),
455    /// `unshift EXPR, ...` when not a bare `@name` — [`Chunk::unshift_expr_entries`]
456    UnshiftExpr(u16),
457    /// `splice EXPR, ...` when not a bare `@name` — [`Chunk::splice_expr_entries`]
458    SpliceExpr(u16),
459    /// `$var .= expr` — append to scalar string in-place without cloning.
460    /// Stack: \[value_to_append\] → \[resulting_string\]. u16 = name pool index of target scalar.
461    ConcatAppend(u16),
462    /// Slot-indexed `$var .= expr` — avoids frame walking and string comparison.
463    /// Stack: \[value_to_append\] → \[resulting_string\]. u8 = slot index.
464    ConcatAppendSlot(u8),
465    /// Fused `$slot_a += $slot_b` — no stack traffic. Pushes result.
466    AddAssignSlotSlot(u8, u8),
467    /// Fused `$slot_a -= $slot_b` — no stack traffic. Pushes result.
468    SubAssignSlotSlot(u8, u8),
469    /// Fused `$slot_a *= $slot_b` — no stack traffic. Pushes result.
470    MulAssignSlotSlot(u8, u8),
471    /// Fused `if ($slot < INT) goto target` — replaces GetScalarSlot + LoadInt + NumLt + JumpIfFalse.
472    /// (slot, i32_limit, jump_target)
473    SlotLtIntJumpIfFalse(u8, i32, usize),
474    /// Void-context `$slot_a += $slot_b` — no stack push. Replaces AddAssignSlotSlot + Pop.
475    AddAssignSlotSlotVoid(u8, u8),
476    /// Void-context `++$slot` — no stack push. Replaces PreIncSlot + Pop.
477    PreIncSlotVoid(u8),
478    /// Void-context `$slot .= expr` — no stack push. Replaces ConcatAppendSlot + Pop.
479    ConcatAppendSlotVoid(u8),
480    /// Fused loop backedge: `$slot += 1; if $slot < limit jump body_target; else fall through`.
481    ///
482    /// Replaces the trailing `PreIncSlotVoid(s) + Jump(top)` of a C-style `for (my $i=0; $i<N; $i=$i+1)`
483    /// loop whose top op is a `SlotLtIntJumpIfFalse(s, limit, exit)`. The initial iteration still
484    /// goes through the top check; this op handles all subsequent iterations in a single dispatch,
485    /// halving the number of ops per loop trip for the `bench_loop`/`bench_string`/`bench_array` shape.
486    /// (slot, i32_limit, body_target)
487    SlotIncLtIntJumpBack(u8, i32, usize),
488    /// Fused accumulator loop: `while $i < limit { $sum += $i; $i += 1 }` — runs the entire
489    /// remaining counted-sum loop in native Rust, eliminating op dispatch per iteration.
490    ///
491    /// Fused when a `for (my $i = a; $i < N; $i = $i + 1) { $sum += $i }` body compiles down to
492    /// exactly `AddAssignSlotSlotVoid(sum, i) + SlotIncLtIntJumpBack(i, limit, body_target)` with
493    /// `body_target` pointing at the AddAssign — i.e. the body is 1 Perl statement. Both slots are
494    /// left as integers on exit (same coercion as `AddAssignSlotSlotVoid` + `PreIncSlotVoid`).
495    /// (sum_slot, i_slot, i32_limit)
496    AccumSumLoop(u8, u8, i32),
497    /// Fused string-append counted loop: `while $i < limit { $s .= CONST; $i += 1 }` — extends
498    /// the `String` buffer in place once and pushes the literal `(limit - i)` times in a tight
499    /// Rust loop, with `Arc::get_mut` → `reserve` → `push_str`. Falls back to the regular op
500    /// sequence if the slot is not a uniquely-owned heap `String`.
501    ///
502    /// Fused when the loop body is exactly `LoadConst(c) + ConcatAppendSlotVoid(s) +
503    /// SlotIncLtIntJumpBack(i, limit, body_target)` with `body_target` pointing at the `LoadConst`.
504    /// (const_idx, s_slot, i_slot, i32_limit)
505    ConcatConstSlotLoop(u16, u8, u8, i32),
506    /// Fused array-push counted loop: `while $i < limit { push @a, $i; $i += 1 }` — reserves the
507    /// target `Vec` once and pushes `PerlValue::integer(i)` in a tight Rust loop. Emitted when
508    /// the loop body is exactly `GetScalarSlot(i) + PushArray(arr) + ArrayLen(arr) + Pop +
509    /// SlotIncLtIntJumpBack(i, limit, body_target)` with `body_target` pointing at the
510    /// `GetScalarSlot` (i.e. the body is one `push` statement whose return is discarded).
511    /// (arr_name_idx, i_slot, i32_limit)
512    PushIntRangeToArrayLoop(u16, u8, i32),
513    /// Fused hash-insert counted loop: `while $i < limit { $h{$i} = $i * k; $i += 1 }` — runs the
514    /// entire insert loop natively, reserving hash capacity once and writing `(stringified i, i*k)`
515    /// pairs in tight Rust. Emitted when the body is exactly
516    /// `GetScalarSlot(i) + LoadInt(k) + Mul + GetScalarSlot(i) + SetHashElem(h) + Pop +
517    /// SlotIncLtIntJumpBack(i, limit, body_target)` with `body_target` at the first `GetScalarSlot`.
518    /// (hash_name_idx, i_slot, i32_multiplier, i32_limit)
519    SetHashIntTimesLoop(u16, u8, i32, i32),
520    /// Fused `$sum += $h{$k}` body op for the inner loop of `for my $k (keys %h) { $sum += $h{$k} }`.
521    ///
522    /// Replaces the 6-op sequence `GetScalarSlot(sum) + GetScalarPlain(k) + GetHashElem(h) + Add +
523    /// SetScalarSlotKeep(sum) + Pop` with a single dispatch that reads the hash element directly
524    /// into the slot without going through the VM stack. (sum_slot, k_name_idx, h_name_idx)
525    AddHashElemPlainKeyToSlot(u8, u16, u16),
526    /// Like [`Op::AddHashElemPlainKeyToSlot`] but the key variable lives in a slot (`for my $k`
527    /// in slot-mode foreach). Pure slot read + hash lookup + slot write with zero VM stack traffic.
528    /// (sum_slot, k_slot, h_name_idx)
529    AddHashElemSlotKeyToSlot(u8, u8, u16),
530    /// Fused `for my $k (keys %h) { $sum += $h{$k} }` — walks `hash.values()` in a tight native
531    /// loop, accumulating integer or float sums directly into `sum_slot`. Emitted by the
532    /// bytecode-level peephole when the foreach shape + `AddHashElemSlotKeyToSlot` body + slot
533    /// counter/var declarations are detected. `h_name_idx` is the source hash's name pool index.
534    /// (sum_slot, h_name_idx)
535    SumHashValuesToSlot(u8, u16),
536
537    // ── Frame-local scalar slots (O(1) access, no string lookup) ──
538    /// Read scalar from current frame's slot array. u8 = slot index.
539    GetScalarSlot(u8),
540    /// Write scalar to current frame's slot array (pop, discard). u8 = slot index.
541    SetScalarSlot(u8),
542    /// Write scalar to current frame's slot array (pop, keep on stack). u8 = slot index.
543    SetScalarSlotKeep(u8),
544    /// Declare + initialize scalar in current frame's slot array. u8 = slot index; u16 = name pool
545    /// index (bare name) for closure capture.
546    DeclareScalarSlot(u8, u16),
547    /// Read argument from caller's stack region: push stack\[call_frame.stack_base + idx\].
548    /// Avoids @_ allocation + string-based shift for compiled sub argument passing.
549    GetArg(u8),
550    /// `reverse` in list context — stack: \[list\] → \[reversed list\]
551    ReverseListOp,
552    /// `scalar reverse` — stack: \[list\] → concatenated string with chars reversed (Perl).
553    ReverseScalarOp,
554    /// `rev` in list context — reverse list, preserve iterators lazily.
555    RevListOp,
556    /// `rev` in scalar context — char-reverse string.
557    RevScalarOp,
558    /// Pop TOS (array/list), push `to_list().len()` as integer (Perl `scalar` on map/grep result).
559    StackArrayLen,
560    /// Pop list-slice result array; push last element (Perl `scalar (LIST)[i,...]`).
561    ListSliceToScalar,
562    /// pmap { BLOCK } @list — block_idx; stack: \[progress_flag, list\] → \[mapped\] (`progress_flag` is 0/1)
563    PMapWithBlock(u16),
564    /// pflat_map { BLOCK } @list — flatten array results; output in **input order**; stack same as [`Op::PMapWithBlock`]
565    PFlatMapWithBlock(u16),
566    /// pmaps { BLOCK } LIST — streaming parallel map; stack: \[list\] → \[iterator\]
567    PMapsWithBlock(u16),
568    /// pflat_maps { BLOCK } LIST — streaming parallel flat map; stack: \[list\] → \[iterator\]
569    PFlatMapsWithBlock(u16),
570    /// `pmap_on` / `pflat_map_on` over SSH — stack: \[progress_flag, list, cluster\] → \[mapped\]; `flat` = 1 for flatten
571    PMapRemote {
572        block_idx: u16,
573        flat: u8,
574    },
575    /// puniq LIST — hash-partition parallel distinct (first occurrence order); stack: \[progress_flag, list\] → \[array\]
576    Puniq,
577    /// pfirst { BLOCK } LIST — short-circuit parallel; stack: \[progress_flag, list\] → value or undef
578    PFirstWithBlock(u16),
579    /// pany { BLOCK } LIST — short-circuit parallel; stack: \[progress_flag, list\] → 0/1
580    PAnyWithBlock(u16),
581    /// pmap_chunked N { BLOCK } @list — block_idx; stack: \[progress_flag, chunk_n, list\] → \[mapped\]
582    PMapChunkedWithBlock(u16),
583    /// pgrep { BLOCK } @list — block_idx; stack: \[progress_flag, list\] → \[filtered\]
584    PGrepWithBlock(u16),
585    /// pgreps { BLOCK } LIST — streaming parallel grep; stack: \[list\] → \[iterator\]
586    PGrepsWithBlock(u16),
587    /// pfor { BLOCK } @list — block_idx; stack: \[progress_flag, list\] → \[\]
588    PForWithBlock(u16),
589    /// psort { BLOCK } @list — block_idx; stack: \[progress_flag, list\] → \[sorted\]
590    PSortWithBlock(u16),
591    /// psort @list (no block) — stack: \[progress_flag, list\] → \[sorted\]
592    PSortNoBlockParallel,
593    /// `reduce { BLOCK } @list` — block_idx; stack: \[list\] → \[accumulator\]
594    ReduceWithBlock(u16),
595    /// `preduce { BLOCK } @list` — block_idx; stack: \[progress_flag, list\] → \[accumulator\]
596    PReduceWithBlock(u16),
597    /// `preduce_init EXPR, { BLOCK } @list` — block_idx; stack: \[progress_flag, list, init\] → \[accumulator\]
598    PReduceInitWithBlock(u16),
599    /// `pmap_reduce { MAP } { REDUCE } @list` — map and reduce block indices; stack: \[progress_flag, list\] → \[scalar\]
600    PMapReduceWithBlocks(u16, u16),
601    /// `pcache { BLOCK } @list` — block_idx; stack: \[progress_flag, list\] → \[array\]
602    PcacheWithBlock(u16),
603    /// `pselect($rx1, ... [, timeout => SECS])` — stack: \[rx0, …, rx_{n-1}\] with optional timeout on top
604    Pselect {
605        n_rx: u8,
606        has_timeout: bool,
607    },
608    /// `par_lines PATH, fn { } [, progress => EXPR]` — index into [`Chunk::par_lines_entries`]; stack: \[\] → `undef`
609    ParLines(u16),
610    /// `par_walk PATH, fn { } [, progress => EXPR]` — index into [`Chunk::par_walk_entries`]; stack: \[\] → `undef`
611    ParWalk(u16),
612    /// `pwatch GLOB, fn { }` — index into [`Chunk::pwatch_entries`]; stack: \[\] → result
613    Pwatch(u16),
614    /// fan N { BLOCK } — block_idx; stack: \[progress_flag, count\] (`progress_flag` is 0/1)
615    FanWithBlock(u16),
616    /// fan { BLOCK } — block_idx; stack: \[progress_flag\]; COUNT = rayon pool size (`stryke -j`)
617    FanWithBlockAuto(u16),
618    /// fan_cap N { BLOCK } — like fan; stack: \[progress_flag, count\] → array of block return values
619    FanCapWithBlock(u16),
620    /// fan_cap { BLOCK } — like fan; stack: \[progress_flag\] → array
621    FanCapWithBlockAuto(u16),
622    /// `do { BLOCK }` — block_idx + wantarray byte ([`crate::interpreter::WantarrayCtx::as_byte`]);
623    /// stack: \[\] → result
624    EvalBlock(u16, u8),
625    /// `trace { BLOCK }` — block_idx; stack: \[\] → block value (stderr tracing for mysync mutations)
626    TraceBlock(u16),
627    /// `timer { BLOCK }` — block_idx; stack: \[\] → elapsed ms as float
628    TimerBlock(u16),
629    /// `bench { BLOCK } N` — block_idx; stack: \[iterations\] → benchmark summary string
630    BenchBlock(u16),
631    /// `given (EXPR) { when ... default ... }` — [`Chunk::given_entries`] /
632    /// [`Chunk::given_topic_bytecode_ranges`]; stack: \[\] → topic result
633    Given(u16),
634    /// `eval_timeout SECS { ... }` — index into [`Chunk::eval_timeout_entries`] /
635    /// [`Chunk::eval_timeout_expr_bytecode_ranges`]; stack: \[\] → block value
636    EvalTimeout(u16),
637    /// Algebraic `match (SUBJECT) { ... }` — [`Chunk::algebraic_match_entries`] /
638    /// [`Chunk::algebraic_match_subject_bytecode_ranges`]; stack: \[\] → arm value
639    AlgebraicMatch(u16),
640    /// `async { BLOCK }` / `spawn { BLOCK }` — block_idx; stack: \[\] → AsyncTask
641    AsyncBlock(u16),
642    /// `await EXPR` — stack: \[value\] → result
643    Await,
644    /// `__SUB__` — push reference to currently executing sub (for anonymous recursion).
645    LoadCurrentSub,
646    /// `defer { BLOCK }` — register a block to run when the current scope exits.
647    /// Stack: `[coderef]` → `[]`. The coderef is pushed to the frame's defer list.
648    DeferBlock,
649    /// Make a scalar reference from TOS (copies value into a new `RwLock`).
650    MakeScalarRef,
651    /// `\$name` when `name` is a plain scalar variable — ref aliases the live binding (same as tree `scalar_binding_ref`).
652    MakeScalarBindingRef(u16),
653    /// `\@name` — ref aliases the live array in scope (name pool index, stash-qualified like [`Op::GetArray`]).
654    MakeArrayBindingRef(u16),
655    /// `\%name` — ref aliases the live hash in scope.
656    MakeHashBindingRef(u16),
657    /// `\@{ EXPR }` after `EXPR` is on the stack — ARRAY ref aliasing the same storage as Perl (ref to existing ref or package array).
658    MakeArrayRefAlias,
659    /// `\%{ EXPR }` — HASH ref alias (same semantics as [`Op::MakeArrayRefAlias`] for hashes).
660    MakeHashRefAlias,
661    /// Make an array reference from TOS (which should be an Array)
662    MakeArrayRef,
663    /// Make a hash reference from TOS (which should be a Hash)
664    MakeHashRef,
665    /// Make an anonymous sub from a block — block_idx; stack: \[\] → CodeRef
666    /// Anonymous `sub` / coderef: block pool index + [`Chunk::code_ref_sigs`] index (may be empty vec).
667    MakeCodeRef(u16, u16),
668    /// Push a code reference to a named sub (`\&foo`) — name pool index; resolves at run time.
669    LoadNamedSubRef(u16),
670    /// `\&{ EXPR }` — stack: \[sub name string\] → code ref (resolves at run time).
671    LoadDynamicSubRef,
672    /// `*{ EXPR }` — stack: \[stash / glob name string\] → resolved handle string (IO alias map + identity).
673    LoadDynamicTypeglob,
674    /// `*lhs = *rhs` — copy stash slots (sub, scalar, array, hash, IO alias); name pool indices for both sides.
675    CopyTypeglobSlots(u16, u16),
676    /// `*name = $coderef` — stack: pop value, install subroutine in typeglob, push value back (assignment result).
677    TypeglobAssignFromValue(u16),
678    /// `*{LHS} = $coderef` — stack: pop value, pop LHS glob name string, install sub, push value back.
679    TypeglobAssignFromValueDynamic,
680    /// `*{LHS} = *rhs` — stack: pop LHS glob name string; RHS name is pool index; copies stash like [`Op::CopyTypeglobSlots`].
681    CopyTypeglobSlotsDynamicLhs(u16),
682    /// Symbolic deref (`$$r`, `@{...}`, `%{...}`, `*{...}`): stack: \[ref or name value\] → result.
683    /// Byte: `0` = [`crate::ast::Sigil::Scalar`], `1` = Array, `2` = Hash, `3` = Typeglob.
684    SymbolicDeref(u8),
685    /// Dereference arrow: ->\[\] — stack: \[ref, index\] → value
686    ArrowArray,
687    /// Dereference arrow: ->{} — stack: \[ref, key\] → value
688    ArrowHash,
689    /// Assign to `->{}`: stack: \[value, ref, key\] (key on top) — consumes three values.
690    SetArrowHash,
691    /// Assign to `->[]`: stack: \[value, ref, index\] (index on top) — consumes three values.
692    SetArrowArray,
693    /// Like [`Op::SetArrowArray`] but leaves the assigned value on the stack (for `++$aref->[$i]` value).
694    SetArrowArrayKeep,
695    /// Like [`Op::SetArrowHash`] but leaves the assigned value on the stack (for `++$href->{k}` value).
696    SetArrowHashKeep,
697    /// Postfix `++` / `--` on `->[]`: stack \[ref, index\] (index on top) → old value; mutates slot.
698    /// Byte: `0` = increment, `1` = decrement.
699    ArrowArrayPostfix(u8),
700    /// Postfix `++` / `--` on `->{}`: stack \[ref, key\] (key on top) → old value; mutates slot.
701    /// Byte: `0` = increment, `1` = decrement.
702    ArrowHashPostfix(u8),
703    /// `$$r = $val` — stack: \[value, ref\] (ref on top).
704    SetSymbolicScalarRef,
705    /// Like [`Op::SetSymbolicScalarRef`] but leaves the assigned value on the stack.
706    SetSymbolicScalarRefKeep,
707    /// `@{ EXPR } = LIST` — stack: \[list value, ref-or-name\] (top = ref / package name); delegates to
708    /// [`Interpreter::assign_symbolic_array_ref_deref`](crate::interpreter::Interpreter::assign_symbolic_array_ref_deref).
709    SetSymbolicArrayRef,
710    /// `%{ EXPR } = LIST` — stack: \[list value, ref-or-name\]; pairs from list like `%h = (k => v, …)`.
711    SetSymbolicHashRef,
712    /// `*{ EXPR } = RHS` — stack: \[value, ref-or-name\] (top = symbolic glob name); coderef install or `*lhs = *rhs` copy.
713    SetSymbolicTypeglobRef,
714    /// Postfix `++` / `--` on symbolic scalar ref (`$$r`); stack \[ref\] → old value. Byte: `0` = increment, `1` = decrement.
715    SymbolicScalarRefPostfix(u8),
716    /// Dereference arrow: ->() — stack: \[ref, args_array\] → value
717    /// `$cr->(...)` — wantarray byte (see VM `WantarrayCtx` threading on `Call` / `MethodCall`).
718    ArrowCall(u8),
719    /// Indirect call `$coderef(ARG...)` / `&$coderef(ARG...)` — stack (bottom→top): `target`, then
720    /// `argc` argument values (first arg pushed first). Third byte: `1` = ignore stack args and use
721    /// caller `@_` (`argc` must be `0`).
722    IndirectCall(u8, u8, u8),
723    /// Method call: stack: \[object, args...\] → result; name_idx, argc, wantarray
724    MethodCall(u16, u8, u8),
725    /// Like [`Op::MethodCall`] but uses SUPER / C3 parent chain (see interpreter method resolution for `SUPER`).
726    MethodCallSuper(u16, u8, u8),
727    /// File test: -e, -f, -d, etc. — test char; stack: \[path\] → 0/1
728    FileTestOp(u8),
729
730    // ── try / catch / finally (VM exception handling; see [`VM::try_recover_from_exception`]) ──
731    /// Push a [`crate::vm::TryFrame`]; `catch_ip` / `after_ip` patched via [`Chunk::patch_try_push_catch`]
732    /// / [`Chunk::patch_try_push_after`]; `finally_ip` via [`Chunk::patch_try_push_finally`].
733    TryPush {
734        catch_ip: usize,
735        finally_ip: Option<usize>,
736        after_ip: usize,
737        catch_var_idx: u16,
738    },
739    /// Normal completion from try or catch body (jump to finally or merge).
740    TryContinueNormal,
741    /// End of `finally` block: pop try frame and jump to `after_ip`.
742    TryFinallyEnd,
743    /// Enter catch: consume [`crate::vm::VM::pending_catch_error`], pop try scope, push catch scope, bind `$var`.
744    CatchReceive(u16),
745
746    // ── `mysync` (thread-safe shared bindings; see [`StmtKind::MySync`]) ──
747    /// Stack: `[init]` → `[]`. Declares `${name}` as `PerlValue::atomic` (or deque/heap unwrapped).
748    DeclareMySyncScalar(u16),
749    /// Stack: `[init_list]` → `[]`. Declares `@name` as atomic array.
750    DeclareMySyncArray(u16),
751    /// Stack: `[init_list]` → `[]`. Declares `%name` as atomic hash.
752    DeclareMySyncHash(u16),
753    /// Register [`RuntimeSubDecl`] at index (nested `sub`, including inside `BEGIN`).
754    RuntimeSubDecl(u16),
755    /// `tie $x | @arr | %h, 'Class', ...` — stack bottom = class expr, then user args; `argc` = `1 + args.len()`.
756    /// `target_kind`: 0 = scalar (`TIESCALAR`), 1 = array (`TIEARRAY`), 2 = hash (`TIEHASH`). `name_idx` = bare name.
757    Tie {
758        target_kind: u8,
759        name_idx: u16,
760        argc: u8,
761    },
762    /// `format NAME =` … — index into [`Chunk::format_decls`]; installs into current package at run time.
763    FormatDecl(u16),
764    /// `use overload 'op' => 'method', …` — index into [`Chunk::use_overload_entries`].
765    UseOverload(u16),
766    /// Scalar `$x OP= $rhs` — uses [`Scope::atomic_mutate`] so `mysync` scalars are RMW-safe.
767    /// Stack: `[rhs]` → `[result]`. `op` byte is from [`crate::compiler::scalar_compound_op_to_byte`].
768    ScalarCompoundAssign {
769        name_idx: u16,
770        op: u8,
771    },
772
773    // ── Special ──
774    /// Set `${^GLOBAL_PHASE}` on the interpreter. See [`GP_START`] … [`GP_END`].
775    SetGlobalPhase(u8),
776    Halt,
777
778    // ── Streaming map (appended — do not reorder earlier op tags) ─────────────
779    /// `maps { BLOCK } LIST` — stack: \[list\] → lazy iterator (pull-based; stryke extension).
780    MapsWithBlock(u16),
781    /// `flat_maps { BLOCK } LIST` — like [`Op::MapsWithBlock`] with `flat_map`-style flattening.
782    MapsFlatMapWithBlock(u16),
783    /// `maps EXPR, LIST` — index into [`Chunk::map_expr_entries`]; stack: \[list\] → iterator.
784    MapsWithExpr(u16),
785    /// `flat_maps EXPR, LIST` — same pools as [`Op::MapsWithExpr`].
786    MapsFlatMapWithExpr(u16),
787    /// `filter` / `fi` `{ BLOCK } LIST` — stack: \[list\] → lazy iterator (stryke; `grep` remains eager).
788    FilterWithBlock(u16),
789    /// `filter` / `fi` `EXPR, LIST` — index into [`Chunk::grep_expr_entries`]; stack: \[list\] → iterator.
790    FilterWithExpr(u16),
791}
792
793/// `${^GLOBAL_PHASE}` values emitted with [`Op::SetGlobalPhase`] (matches Perl’s phase strings).
794pub const GP_START: u8 = 0;
795/// Reserved; stock Perl 5 keeps `${^GLOBAL_PHASE}` as **`START`** during `UNITCHECK` blocks.
796pub const GP_UNITCHECK: u8 = 1;
797pub const GP_CHECK: u8 = 2;
798pub const GP_INIT: u8 = 3;
799pub const GP_RUN: u8 = 4;
800pub const GP_END: u8 = 5;
801
802/// Built-in function IDs for CallBuiltin dispatch.
803#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
804#[repr(u16)]
805pub enum BuiltinId {
806    // String
807    Length = 0,
808    Chomp,
809    Chop,
810    Substr,
811    Index,
812    Rindex,
813    Uc,
814    Lc,
815    Ucfirst,
816    Lcfirst,
817    Chr,
818    Ord,
819    Hex,
820    Oct,
821    Join,
822    Split,
823    Sprintf,
824
825    // Numeric
826    Abs,
827    Int,
828    Sqrt,
829
830    // Type
831    Defined,
832    Ref,
833    Scalar,
834
835    // Array
836    Splice,
837    Reverse,
838    Sort,
839    Unshift,
840
841    // Hash
842
843    // I/O
844    Open,
845    Close,
846    Eof,
847    ReadLine,
848    Printf,
849
850    // System
851    System,
852    Exec,
853    Exit,
854    Die,
855    Warn,
856    Chdir,
857    Mkdir,
858    Unlink,
859
860    // Control
861    Eval,
862    Do,
863    Require,
864
865    // OOP
866    Bless,
867    Caller,
868
869    // Parallel
870    PMap,
871    PGrep,
872    PFor,
873    PSort,
874    Fan,
875
876    // Map/Grep (block-based — need special handling)
877    MapBlock,
878    GrepBlock,
879    SortBlock,
880
881    // Math (appended — do not reorder earlier IDs)
882    Sin,
883    Cos,
884    Atan2,
885    Exp,
886    Log,
887    Rand,
888    Srand,
889
890    // String (appended)
891    Crypt,
892    Fc,
893    Pos,
894    Study,
895
896    Stat,
897    Lstat,
898    Link,
899    Symlink,
900    Readlink,
901    Glob,
902
903    Opendir,
904    Readdir,
905    Closedir,
906    Rewinddir,
907    Telldir,
908    Seekdir,
909    /// Read entire file as UTF-8 (`slurp $path`).
910    Slurp,
911    /// Blocking HTTP GET (`fetch_url $url`).
912    FetchUrl,
913    /// `pchannel()` — `(tx, rx)` as a two-element list.
914    Pchannel,
915    /// Parallel recursive glob (`glob_par`).
916    GlobPar,
917    /// `deque()` — empty deque.
918    DequeNew,
919    /// `heap(fn { })` — empty heap with comparator.
920    HeapNew,
921    /// `pipeline(...)` — lazy iterator (filter/map/take/collect).
922    Pipeline,
923    /// `capture("cmd")` — structured stdout/stderr/exit (via `sh -c`).
924    Capture,
925    /// `ppool(N)` — persistent thread pool (`submit` / `collect`).
926    Ppool,
927    /// Scalar/list context query (`wantarray`).
928    Wantarray,
929    /// `rename OLD, NEW`
930    Rename,
931    /// `chmod MODE, ...`
932    Chmod,
933    /// `chown UID, GID, ...`
934    Chown,
935    /// `pselect($rx1, $rx2, ...)` — multiplexed recv; returns `(value, index)`.
936    Pselect,
937    /// `barrier(N)` — thread barrier (`->wait`).
938    BarrierNew,
939    /// `par_pipeline(...)` — list form: same as `pipeline` but parallel `filter`/`map` on `collect()`.
940    ParPipeline,
941    /// `glob_par(..., progress => EXPR)` — last stack arg is truthy progress flag.
942    GlobParProgress,
943    /// `par_pipeline_stream(...)` — streaming pipeline with bounded channels between stages.
944    ParPipelineStream,
945    /// `par_sed(PATTERN, REPLACEMENT, FILES...)` — parallel in-place regex substitution per file.
946    ParSed,
947    /// `par_sed(..., progress => EXPR)` — last stack arg is truthy progress flag.
948    ParSedProgress,
949    /// `each EXPR` — matches tree interpreter (returns empty list).
950    Each,
951    /// `` `cmd` `` / `qx{...}` — stdout string via `sh -c` (Perl readpipe); sets `$?`.
952    Readpipe,
953    /// `readline` / `<HANDLE>` in **list** context — all remaining lines until EOF (Perl `readline` list semantics).
954    ReadLineList,
955    /// `readdir` in **list** context — all names not yet returned (Perl drains the rest of the stream).
956    ReaddirList,
957    /// `ssh HOST, CMD, …` / `ssh(HOST, …)` — `execvp` style `ssh` only (no shell).
958    Ssh,
959    /// `rmdir LIST` — remove empty directories; returns count removed (appended ID).
960    Rmdir,
961    /// `utime ATIME, MTIME, LIST` — set access/mod times (Unix).
962    Utime,
963    /// `umask EXPR` / `umask()` — process file mode creation mask (Unix).
964    Umask,
965    /// `getcwd` / `Cwd::getcwd` / `CORE::getcwd`.
966    Getcwd,
967    /// `pipe READHANDLE, WRITEHANDLE` — OS pipe ends (Unix).
968    Pipe,
969    /// `files` / `files DIR` — list file names in a directory (default: `.`).
970    Files,
971    /// `filesf` / `filesf DIR` / `f` — list only regular file names in a directory (default: `.`).
972    Filesf,
973    /// `fr DIR` — list only regular file names recursively (default: `.`).
974    FilesfRecursive,
975    /// `dirs` / `dirs DIR` / `d` — list subdirectory names in a directory (default: `.`).
976    Dirs,
977    /// `dr DIR` — list subdirectory paths recursively (default: `.`).
978    DirsRecursive,
979    /// `sym_links` / `sym_links DIR` — list symlink names in a directory (default: `.`).
980    SymLinks,
981    /// `sockets` / `sockets DIR` — list Unix socket names in a directory (default: `.`).
982    Sockets,
983    /// `pipes` / `pipes DIR` — list named-pipe (FIFO) names in a directory (default: `.`).
984    Pipes,
985    /// `block_devices` / `block_devices DIR` — list block device names in a directory (default: `.`).
986    BlockDevices,
987    /// `char_devices` / `char_devices DIR` — list character device names in a directory (default: `.`).
988    CharDevices,
989    /// `exe` / `exe DIR` — list executable file names in a directory (default: `.`).
990    Executables,
991}
992
993impl BuiltinId {
994    pub fn from_u16(v: u16) -> Option<Self> {
995        if v <= Self::Executables as u16 {
996            Some(unsafe { std::mem::transmute::<u16, BuiltinId>(v) })
997        } else {
998            None
999        }
1000    }
1001}
1002
1003/// A compiled chunk of bytecode with its constant pools.
1004#[derive(Debug, Clone, Serialize, Deserialize)]
1005pub struct Chunk {
1006    pub ops: Vec<Op>,
1007    /// Constant pool: string literals, regex patterns, etc.
1008    #[serde(with = "crate::pec::constants_pool_codec")]
1009    pub constants: Vec<PerlValue>,
1010    /// Name pool: variable names, sub names (interned/deduped).
1011    pub names: Vec<String>,
1012    /// Source line for each op (parallel array for error reporting).
1013    pub lines: Vec<usize>,
1014    /// Optional link from each op to the originating [`Expr`] (pool index into [`Self::ast_expr_pool`]).
1015    /// Filled for ops emitted from [`crate::compiler::Compiler::compile_expr_ctx`]; other paths leave `None`.
1016    pub op_ast_expr: Vec<Option<u32>>,
1017    /// Interned [`Expr`] nodes referenced by [`Self::op_ast_expr`] (for debugging / tooling).
1018    pub ast_expr_pool: Vec<Expr>,
1019    /// Compiled subroutine entry points: (name_index, op_index, uses_stack_args).
1020    /// When `uses_stack_args` is true, the Call op leaves arguments on the value
1021    /// stack and the sub reads them via `GetArg(idx)` instead of `shift @_`.
1022    pub sub_entries: Vec<(u16, usize, bool)>,
1023    /// AST blocks for map/grep/sort/parallel operations.
1024    /// Referenced by block-based opcodes via u16 index.
1025    pub blocks: Vec<Block>,
1026    /// When `Some((start, end))`, `blocks[i]` is also lowered to `ops[start..end]` (exclusive `end`)
1027    /// with trailing [`Op::BlockReturnValue`]. VM uses opcodes; otherwise the AST in `blocks[i]`.
1028    pub block_bytecode_ranges: Vec<Option<(usize, usize)>>,
1029    /// Resolved [`Op::CallStaticSubId`] targets: subroutine entry IP, stack-args calling convention,
1030    /// and stash name pool index (qualified key matching [`Interpreter::subs`]).
1031    pub static_sub_calls: Vec<(usize, bool, u16)>,
1032    /// Assign targets for `s///` / `tr///` bytecode (LHS expressions).
1033    pub lvalues: Vec<Expr>,
1034    /// `struct Name { ... }` definitions in this chunk (registered on the interpreter at VM start).
1035    pub struct_defs: Vec<StructDef>,
1036    /// `enum Name { ... }` definitions in this chunk (registered on the interpreter at VM start).
1037    pub enum_defs: Vec<EnumDef>,
1038    /// `class Name extends ... impl ... { ... }` definitions.
1039    pub class_defs: Vec<ClassDef>,
1040    /// `trait Name { ... }` definitions.
1041    pub trait_defs: Vec<TraitDef>,
1042    /// `given (topic) { body }` — topic expression + body (when/default handled by interpreter).
1043    pub given_entries: Vec<(Expr, Block)>,
1044    /// When `Some((start, end))`, `given_entries[i].0` (topic) is lowered to `ops[start..end]` +
1045    /// [`Op::BlockReturnValue`].
1046    pub given_topic_bytecode_ranges: Vec<Option<(usize, usize)>>,
1047    /// `eval_timeout timeout_expr { body }` — evaluated at runtime.
1048    pub eval_timeout_entries: Vec<(Expr, Block)>,
1049    /// When `Some((start, end))`, `eval_timeout_entries[i].0` (timeout expr) is lowered to
1050    /// `ops[start..end]` with trailing [`Op::BlockReturnValue`].
1051    pub eval_timeout_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1052    /// Algebraic `match (subject) { arms }`.
1053    pub algebraic_match_entries: Vec<(Expr, Vec<MatchArm>)>,
1054    /// When `Some((start, end))`, `algebraic_match_entries[i].0` (subject) is lowered to
1055    /// `ops[start..end]` + [`Op::BlockReturnValue`].
1056    pub algebraic_match_subject_bytecode_ranges: Vec<Option<(usize, usize)>>,
1057    /// Nested / runtime `sub` declarations (see [`Op::RuntimeSubDecl`]).
1058    pub runtime_sub_decls: Vec<RuntimeSubDecl>,
1059    /// Stryke `fn ($a, …)` / hash-destruct params for [`Op::MakeCodeRef`] (second operand is pool index).
1060    pub code_ref_sigs: Vec<Vec<SubSigParam>>,
1061    /// `par_lines PATH, fn { } [, progress => EXPR]` — evaluated by interpreter inside VM.
1062    pub par_lines_entries: Vec<(Expr, Expr, Option<Expr>)>,
1063    /// `par_walk PATH, fn { } [, progress => EXPR]` — evaluated by interpreter inside VM.
1064    pub par_walk_entries: Vec<(Expr, Expr, Option<Expr>)>,
1065    /// `pwatch GLOB, fn { }` — evaluated by interpreter inside VM.
1066    pub pwatch_entries: Vec<(Expr, Expr)>,
1067    /// `substr $var, OFF, LEN, REPL` — four-arg form (mutates `LHS`); evaluated by interpreter inside VM.
1068    pub substr_four_arg_entries: Vec<(Expr, Expr, Option<Expr>, Expr)>,
1069    /// `keys EXPR` when `EXPR` is not bare `%h`.
1070    pub keys_expr_entries: Vec<Expr>,
1071    /// When `Some((start, end))`, `keys_expr_entries[i]` is lowered to `ops[start..end]` +
1072    /// [`Op::BlockReturnValue`] (operand only; [`Op::KeysExpr`] still applies `keys` to the value).
1073    pub keys_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1074    /// `values EXPR` when not bare `%h`.
1075    pub values_expr_entries: Vec<Expr>,
1076    pub values_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1077    /// `delete EXPR` when not the fast `%h{k}` lowering.
1078    pub delete_expr_entries: Vec<Expr>,
1079    /// `exists EXPR` when not the fast `%h{k}` lowering.
1080    pub exists_expr_entries: Vec<Expr>,
1081    /// `push` when the array operand is not a bare `@name` (e.g. `push $aref, ...`).
1082    pub push_expr_entries: Vec<(Expr, Vec<Expr>)>,
1083    pub pop_expr_entries: Vec<Expr>,
1084    pub shift_expr_entries: Vec<Expr>,
1085    pub unshift_expr_entries: Vec<(Expr, Vec<Expr>)>,
1086    pub splice_expr_entries: Vec<SpliceExprEntry>,
1087    /// `map EXPR, LIST` — map expression (list context) with `$_` set to each element.
1088    pub map_expr_entries: Vec<Expr>,
1089    /// When `Some((start, end))`, `map_expr_entries[i]` is lowered like [`Self::grep_expr_bytecode_ranges`].
1090    pub map_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1091    /// `grep EXPR, LIST` — filter expression evaluated with `$_` set to each element.
1092    pub grep_expr_entries: Vec<Expr>,
1093    /// When `Some((start, end))`, `grep_expr_entries[i]` is also lowered to `ops[start..end]`
1094    /// (exclusive `end`) with trailing [`Op::BlockReturnValue`], like [`Self::block_bytecode_ranges`].
1095    pub grep_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1096    /// Right-hand expression for [`Op::RegexFlipFlopExprRhs`] — boolean context (bare `m//` is `$_ =~ m//`).
1097    pub regex_flip_flop_rhs_expr_entries: Vec<Expr>,
1098    /// When `Some((start, end))`, `regex_flip_flop_rhs_expr_entries[i]` is lowered to `ops[start..end]` +
1099    /// [`Op::BlockReturnValue`].
1100    pub regex_flip_flop_rhs_expr_bytecode_ranges: Vec<Option<(usize, usize)>>,
1101    /// Number of flip-flop slots ([`Op::ScalarFlipFlop`], [`Op::RegexFlipFlop`], [`Op::RegexEofFlipFlop`],
1102    /// [`Op::RegexFlipFlopExprRhs`], [`Op::RegexFlipFlopDotLineRhs`]); VM resets flip-flop vectors.
1103    pub flip_flop_slots: u16,
1104    /// `format NAME =` bodies: basename + lines between `=` and `.` (see lexer).
1105    pub format_decls: Vec<(String, Vec<String>)>,
1106    /// `use overload` pair lists (installed into current package at run time).
1107    pub use_overload_entries: Vec<Vec<(String, String)>>,
1108}
1109
1110impl Chunk {
1111    /// Look up a compiled subroutine entry by stash name pool index.
1112    pub fn find_sub_entry(&self, name_idx: u16) -> Option<(usize, bool)> {
1113        self.sub_entries
1114            .iter()
1115            .find(|(n, _, _)| *n == name_idx)
1116            .map(|(_, ip, stack_args)| (*ip, *stack_args))
1117    }
1118
1119    pub fn new() -> Self {
1120        Self {
1121            ops: Vec::with_capacity(256),
1122            constants: Vec::new(),
1123            names: Vec::new(),
1124            lines: Vec::new(),
1125            op_ast_expr: Vec::new(),
1126            ast_expr_pool: Vec::new(),
1127            sub_entries: Vec::new(),
1128            blocks: Vec::new(),
1129            block_bytecode_ranges: Vec::new(),
1130            static_sub_calls: Vec::new(),
1131            lvalues: Vec::new(),
1132            struct_defs: Vec::new(),
1133            enum_defs: Vec::new(),
1134            class_defs: Vec::new(),
1135            trait_defs: Vec::new(),
1136            given_entries: Vec::new(),
1137            given_topic_bytecode_ranges: Vec::new(),
1138            eval_timeout_entries: Vec::new(),
1139            eval_timeout_expr_bytecode_ranges: Vec::new(),
1140            algebraic_match_entries: Vec::new(),
1141            algebraic_match_subject_bytecode_ranges: Vec::new(),
1142            runtime_sub_decls: Vec::new(),
1143            code_ref_sigs: Vec::new(),
1144            par_lines_entries: Vec::new(),
1145            par_walk_entries: Vec::new(),
1146            pwatch_entries: Vec::new(),
1147            substr_four_arg_entries: Vec::new(),
1148            keys_expr_entries: Vec::new(),
1149            keys_expr_bytecode_ranges: Vec::new(),
1150            values_expr_entries: Vec::new(),
1151            values_expr_bytecode_ranges: Vec::new(),
1152            delete_expr_entries: Vec::new(),
1153            exists_expr_entries: Vec::new(),
1154            push_expr_entries: Vec::new(),
1155            pop_expr_entries: Vec::new(),
1156            shift_expr_entries: Vec::new(),
1157            unshift_expr_entries: Vec::new(),
1158            splice_expr_entries: Vec::new(),
1159            map_expr_entries: Vec::new(),
1160            map_expr_bytecode_ranges: Vec::new(),
1161            grep_expr_entries: Vec::new(),
1162            grep_expr_bytecode_ranges: Vec::new(),
1163            regex_flip_flop_rhs_expr_entries: Vec::new(),
1164            regex_flip_flop_rhs_expr_bytecode_ranges: Vec::new(),
1165            flip_flop_slots: 0,
1166            format_decls: Vec::new(),
1167            use_overload_entries: Vec::new(),
1168        }
1169    }
1170
1171    /// Pool index for [`Op::FormatDecl`].
1172    pub fn add_format_decl(&mut self, name: String, lines: Vec<String>) -> u16 {
1173        let idx = self.format_decls.len() as u16;
1174        self.format_decls.push((name, lines));
1175        idx
1176    }
1177
1178    /// Pool index for [`Op::UseOverload`].
1179    pub fn add_use_overload(&mut self, pairs: Vec<(String, String)>) -> u16 {
1180        let idx = self.use_overload_entries.len() as u16;
1181        self.use_overload_entries.push(pairs);
1182        idx
1183    }
1184
1185    /// Allocate a slot index for [`Op::ScalarFlipFlop`] / [`Op::RegexFlipFlop`] / [`Op::RegexEofFlipFlop`] /
1186    /// [`Op::RegexFlipFlopExprRhs`] / [`Op::RegexFlipFlopDotLineRhs`] flip-flop state.
1187    pub fn alloc_flip_flop_slot(&mut self) -> u16 {
1188        let id = self.flip_flop_slots;
1189        self.flip_flop_slots = self.flip_flop_slots.saturating_add(1);
1190        id
1191    }
1192
1193    /// `map EXPR, LIST` — pool index for [`Op::MapWithExpr`].
1194    pub fn add_map_expr_entry(&mut self, expr: Expr) -> u16 {
1195        let idx = self.map_expr_entries.len() as u16;
1196        self.map_expr_entries.push(expr);
1197        idx
1198    }
1199
1200    /// `grep EXPR, LIST` — pool index for [`Op::GrepWithExpr`].
1201    pub fn add_grep_expr_entry(&mut self, expr: Expr) -> u16 {
1202        let idx = self.grep_expr_entries.len() as u16;
1203        self.grep_expr_entries.push(expr);
1204        idx
1205    }
1206
1207    /// Regex flip-flop with compound RHS — pool index for [`Op::RegexFlipFlopExprRhs`].
1208    pub fn add_regex_flip_flop_rhs_expr_entry(&mut self, expr: Expr) -> u16 {
1209        let idx = self.regex_flip_flop_rhs_expr_entries.len() as u16;
1210        self.regex_flip_flop_rhs_expr_entries.push(expr);
1211        idx
1212    }
1213
1214    /// `keys EXPR` (dynamic) — pool index for [`Op::KeysExpr`].
1215    pub fn add_keys_expr_entry(&mut self, expr: Expr) -> u16 {
1216        let idx = self.keys_expr_entries.len() as u16;
1217        self.keys_expr_entries.push(expr);
1218        idx
1219    }
1220
1221    /// `values EXPR` (dynamic) — pool index for [`Op::ValuesExpr`].
1222    pub fn add_values_expr_entry(&mut self, expr: Expr) -> u16 {
1223        let idx = self.values_expr_entries.len() as u16;
1224        self.values_expr_entries.push(expr);
1225        idx
1226    }
1227
1228    /// `delete EXPR` (dynamic operand) — pool index for [`Op::DeleteExpr`].
1229    pub fn add_delete_expr_entry(&mut self, expr: Expr) -> u16 {
1230        let idx = self.delete_expr_entries.len() as u16;
1231        self.delete_expr_entries.push(expr);
1232        idx
1233    }
1234
1235    /// `exists EXPR` (dynamic operand) — pool index for [`Op::ExistsExpr`].
1236    pub fn add_exists_expr_entry(&mut self, expr: Expr) -> u16 {
1237        let idx = self.exists_expr_entries.len() as u16;
1238        self.exists_expr_entries.push(expr);
1239        idx
1240    }
1241
1242    pub fn add_push_expr_entry(&mut self, array: Expr, values: Vec<Expr>) -> u16 {
1243        let idx = self.push_expr_entries.len() as u16;
1244        self.push_expr_entries.push((array, values));
1245        idx
1246    }
1247
1248    pub fn add_pop_expr_entry(&mut self, array: Expr) -> u16 {
1249        let idx = self.pop_expr_entries.len() as u16;
1250        self.pop_expr_entries.push(array);
1251        idx
1252    }
1253
1254    pub fn add_shift_expr_entry(&mut self, array: Expr) -> u16 {
1255        let idx = self.shift_expr_entries.len() as u16;
1256        self.shift_expr_entries.push(array);
1257        idx
1258    }
1259
1260    pub fn add_unshift_expr_entry(&mut self, array: Expr, values: Vec<Expr>) -> u16 {
1261        let idx = self.unshift_expr_entries.len() as u16;
1262        self.unshift_expr_entries.push((array, values));
1263        idx
1264    }
1265
1266    pub fn add_splice_expr_entry(
1267        &mut self,
1268        array: Expr,
1269        offset: Option<Expr>,
1270        length: Option<Expr>,
1271        replacement: Vec<Expr>,
1272    ) -> u16 {
1273        let idx = self.splice_expr_entries.len() as u16;
1274        self.splice_expr_entries
1275            .push((array, offset, length, replacement));
1276        idx
1277    }
1278
1279    /// Four-arg `substr` — returns pool index for [`Op::SubstrFourArg`].
1280    pub fn add_substr_four_arg_entry(
1281        &mut self,
1282        string: Expr,
1283        offset: Expr,
1284        length: Option<Expr>,
1285        replacement: Expr,
1286    ) -> u16 {
1287        let idx = self.substr_four_arg_entries.len() as u16;
1288        self.substr_four_arg_entries
1289            .push((string, offset, length, replacement));
1290        idx
1291    }
1292
1293    /// `par_lines PATH, fn { } [, progress => EXPR]` — returns pool index for [`Op::ParLines`].
1294    pub fn add_par_lines_entry(
1295        &mut self,
1296        path: Expr,
1297        callback: Expr,
1298        progress: Option<Expr>,
1299    ) -> u16 {
1300        let idx = self.par_lines_entries.len() as u16;
1301        self.par_lines_entries.push((path, callback, progress));
1302        idx
1303    }
1304
1305    /// `par_walk PATH, fn { } [, progress => EXPR]` — returns pool index for [`Op::ParWalk`].
1306    pub fn add_par_walk_entry(
1307        &mut self,
1308        path: Expr,
1309        callback: Expr,
1310        progress: Option<Expr>,
1311    ) -> u16 {
1312        let idx = self.par_walk_entries.len() as u16;
1313        self.par_walk_entries.push((path, callback, progress));
1314        idx
1315    }
1316
1317    /// `pwatch GLOB, fn { }` — returns pool index for [`Op::Pwatch`].
1318    pub fn add_pwatch_entry(&mut self, path: Expr, callback: Expr) -> u16 {
1319        let idx = self.pwatch_entries.len() as u16;
1320        self.pwatch_entries.push((path, callback));
1321        idx
1322    }
1323
1324    /// `given (EXPR) { ... }` — returns pool index for [`Op::Given`].
1325    pub fn add_given_entry(&mut self, topic: Expr, body: Block) -> u16 {
1326        let idx = self.given_entries.len() as u16;
1327        self.given_entries.push((topic, body));
1328        idx
1329    }
1330
1331    /// `eval_timeout SECS { ... }` — returns pool index for [`Op::EvalTimeout`].
1332    pub fn add_eval_timeout_entry(&mut self, timeout: Expr, body: Block) -> u16 {
1333        let idx = self.eval_timeout_entries.len() as u16;
1334        self.eval_timeout_entries.push((timeout, body));
1335        idx
1336    }
1337
1338    /// Algebraic `match` — returns pool index for [`Op::AlgebraicMatch`].
1339    pub fn add_algebraic_match_entry(&mut self, subject: Expr, arms: Vec<MatchArm>) -> u16 {
1340        let idx = self.algebraic_match_entries.len() as u16;
1341        self.algebraic_match_entries.push((subject, arms));
1342        idx
1343    }
1344
1345    /// Store an AST block and return its index.
1346    pub fn add_block(&mut self, block: Block) -> u16 {
1347        let idx = self.blocks.len() as u16;
1348        self.blocks.push(block);
1349        idx
1350    }
1351
1352    /// Pool index for [`Op::MakeCodeRef`] signature (`stryke` extension); use empty vec for legacy `fn { }`.
1353    pub fn add_code_ref_sig(&mut self, params: Vec<SubSigParam>) -> u16 {
1354        let idx = self.code_ref_sigs.len();
1355        if idx > u16::MAX as usize {
1356            panic!("too many anonymous sub signatures in one chunk");
1357        }
1358        self.code_ref_sigs.push(params);
1359        idx as u16
1360    }
1361
1362    /// Store an assignable expression (LHS of `s///` / `tr///`) and return its index.
1363    pub fn add_lvalue_expr(&mut self, e: Expr) -> u16 {
1364        let idx = self.lvalues.len() as u16;
1365        self.lvalues.push(e);
1366        idx
1367    }
1368
1369    /// Intern a name, returning its pool index.
1370    pub fn intern_name(&mut self, name: &str) -> u16 {
1371        if let Some(idx) = self.names.iter().position(|n| n == name) {
1372            return idx as u16;
1373        }
1374        let idx = self.names.len() as u16;
1375        self.names.push(name.to_string());
1376        idx
1377    }
1378
1379    /// Add a constant to the pool, returning its index.
1380    pub fn add_constant(&mut self, val: PerlValue) -> u16 {
1381        // Dedup string constants
1382        if let Some(ref s) = val.as_str() {
1383            for (i, c) in self.constants.iter().enumerate() {
1384                if let Some(cs) = c.as_str() {
1385                    if cs == *s {
1386                        return i as u16;
1387                    }
1388                }
1389            }
1390        }
1391        let idx = self.constants.len() as u16;
1392        self.constants.push(val);
1393        idx
1394    }
1395
1396    /// Append an op with source line info.
1397    #[inline]
1398    pub fn emit(&mut self, op: Op, line: usize) -> usize {
1399        self.emit_with_ast_idx(op, line, None)
1400    }
1401
1402    /// Like [`Self::emit`] but attach an optional interned AST [`Expr`] pool index (see [`Self::op_ast_expr`]).
1403    #[inline]
1404    pub fn emit_with_ast_idx(&mut self, op: Op, line: usize, ast: Option<u32>) -> usize {
1405        let idx = self.ops.len();
1406        self.ops.push(op);
1407        self.lines.push(line);
1408        self.op_ast_expr.push(ast);
1409        idx
1410    }
1411
1412    /// Resolve the originating expression for an instruction pointer, if recorded.
1413    #[inline]
1414    pub fn ast_expr_at(&self, ip: usize) -> Option<&Expr> {
1415        let id = (*self.op_ast_expr.get(ip)?)?;
1416        self.ast_expr_pool.get(id as usize)
1417    }
1418
1419    /// Patch a jump instruction at `idx` to target the current position.
1420    pub fn patch_jump_here(&mut self, idx: usize) {
1421        let target = self.ops.len();
1422        self.patch_jump_to(idx, target);
1423    }
1424
1425    /// Patch a jump instruction at `idx` to target an explicit op address.
1426    pub fn patch_jump_to(&mut self, idx: usize, target: usize) {
1427        match &mut self.ops[idx] {
1428            Op::Jump(ref mut t)
1429            | Op::JumpIfTrue(ref mut t)
1430            | Op::JumpIfFalse(ref mut t)
1431            | Op::JumpIfFalseKeep(ref mut t)
1432            | Op::JumpIfTrueKeep(ref mut t)
1433            | Op::JumpIfDefinedKeep(ref mut t) => *t = target,
1434            _ => panic!("patch_jump_to on non-jump op at {}", idx),
1435        }
1436    }
1437
1438    pub fn patch_try_push_catch(&mut self, idx: usize, catch_ip: usize) {
1439        match &mut self.ops[idx] {
1440            Op::TryPush { catch_ip: c, .. } => *c = catch_ip,
1441            _ => panic!("patch_try_push_catch on non-TryPush op at {}", idx),
1442        }
1443    }
1444
1445    pub fn patch_try_push_finally(&mut self, idx: usize, finally_ip: Option<usize>) {
1446        match &mut self.ops[idx] {
1447            Op::TryPush { finally_ip: f, .. } => *f = finally_ip,
1448            _ => panic!("patch_try_push_finally on non-TryPush op at {}", idx),
1449        }
1450    }
1451
1452    pub fn patch_try_push_after(&mut self, idx: usize, after_ip: usize) {
1453        match &mut self.ops[idx] {
1454            Op::TryPush { after_ip: a, .. } => *a = after_ip,
1455            _ => panic!("patch_try_push_after on non-TryPush op at {}", idx),
1456        }
1457    }
1458
1459    /// Current op count (next emit position).
1460    #[inline]
1461    pub fn len(&self) -> usize {
1462        self.ops.len()
1463    }
1464
1465    #[inline]
1466    pub fn is_empty(&self) -> bool {
1467        self.ops.is_empty()
1468    }
1469
1470    /// Human-readable listing: subroutine entry points and each op with its source line (javap / `dis`-style).
1471    pub fn disassemble(&self) -> String {
1472        use std::fmt::Write;
1473        let mut out = String::new();
1474        for (i, n) in self.names.iter().enumerate() {
1475            let _ = writeln!(out, "; name[{}] = {}", i, n);
1476        }
1477        let _ = writeln!(out, "; sub_entries:");
1478        for (ni, ip, stack_args) in &self.sub_entries {
1479            let name = self
1480                .names
1481                .get(*ni as usize)
1482                .map(|s| s.as_str())
1483                .unwrap_or("?");
1484            let _ = writeln!(out, ";   {} @ {} stack_args={}", name, ip, stack_args);
1485        }
1486        for (i, op) in self.ops.iter().enumerate() {
1487            let line = self.lines.get(i).copied().unwrap_or(0);
1488            let ast = self
1489                .op_ast_expr
1490                .get(i)
1491                .copied()
1492                .flatten()
1493                .map(|id| id.to_string())
1494                .unwrap_or_else(|| "-".into());
1495            let _ = writeln!(out, "{:04} {:>5} {:>6}  {:?}", i, line, ast, op);
1496        }
1497        out
1498    }
1499
1500    /// Peephole pass: fuse common multi-op sequences into single superinstructions,
1501    /// then compact by removing Nop slots and remapping all jump targets.
1502    pub fn peephole_fuse(&mut self) {
1503        let len = self.ops.len();
1504        if len < 2 {
1505            return;
1506        }
1507        // Pass 1: fuse OP + Pop → OPVoid
1508        let mut i = 0;
1509        while i + 1 < len {
1510            if matches!(self.ops[i + 1], Op::Pop) {
1511                let replacement = match &self.ops[i] {
1512                    Op::AddAssignSlotSlot(d, s) => Some(Op::AddAssignSlotSlotVoid(*d, *s)),
1513                    Op::PreIncSlot(s) => Some(Op::PreIncSlotVoid(*s)),
1514                    Op::ConcatAppendSlot(s) => Some(Op::ConcatAppendSlotVoid(*s)),
1515                    _ => None,
1516                };
1517                if let Some(op) = replacement {
1518                    self.ops[i] = op;
1519                    self.ops[i + 1] = Op::Nop;
1520                    i += 2;
1521                    continue;
1522                }
1523            }
1524            i += 1;
1525        }
1526        // Pass 2: fuse multi-op patterns
1527        // Helper: check if any jump targets position `pos`.
1528        let has_jump_to = |ops: &[Op], pos: usize| -> bool {
1529            for op in ops {
1530                let t = match op {
1531                    Op::Jump(t)
1532                    | Op::JumpIfFalse(t)
1533                    | Op::JumpIfTrue(t)
1534                    | Op::JumpIfFalseKeep(t)
1535                    | Op::JumpIfTrueKeep(t)
1536                    | Op::JumpIfDefinedKeep(t) => Some(*t),
1537                    _ => None,
1538                };
1539                if t == Some(pos) {
1540                    return true;
1541                }
1542            }
1543            false
1544        };
1545        let len = self.ops.len();
1546        if len >= 4 {
1547            i = 0;
1548            while i + 3 < len {
1549                if let (
1550                    Op::GetScalarSlot(slot),
1551                    Op::LoadInt(n),
1552                    Op::NumLt,
1553                    Op::JumpIfFalse(target),
1554                ) = (
1555                    &self.ops[i],
1556                    &self.ops[i + 1],
1557                    &self.ops[i + 2],
1558                    &self.ops[i + 3],
1559                ) {
1560                    if let Ok(n32) = i32::try_from(*n) {
1561                        // Don't fuse if any jump targets the ops that will become Nop.
1562                        // This prevents breaking short-circuit &&/|| that jump to the
1563                        // JumpIfFalse for the while condition exit check.
1564                        if has_jump_to(&self.ops, i + 1)
1565                            || has_jump_to(&self.ops, i + 2)
1566                            || has_jump_to(&self.ops, i + 3)
1567                        {
1568                            i += 1;
1569                            continue;
1570                        }
1571                        let slot = *slot;
1572                        let target = *target;
1573                        self.ops[i] = Op::SlotLtIntJumpIfFalse(slot, n32, target);
1574                        self.ops[i + 1] = Op::Nop;
1575                        self.ops[i + 2] = Op::Nop;
1576                        self.ops[i + 3] = Op::Nop;
1577                        i += 4;
1578                        continue;
1579                    }
1580                }
1581                i += 1;
1582            }
1583        }
1584        // Compact once so that pass 3 sees a Nop-free op stream and can match
1585        // adjacent `PreIncSlotVoid + Jump` backedges produced by passes 1/2.
1586        self.compact_nops();
1587        // Pass 3: fuse loop backedge
1588        //   PreIncSlotVoid(s)  + Jump(top)
1589        // where ops[top] is SlotLtIntJumpIfFalse(s, limit, exit)
1590        // becomes
1591        //   SlotIncLtIntJumpBack(s, limit, top + 1)   // body falls through
1592        //   Nop                                       // was Jump
1593        // The first-iteration check at `top` is still reached from before the loop
1594        // (the loop's initial entry goes through the top test), so leaving
1595        // SlotLtIntJumpIfFalse in place keeps the entry path correct. All
1596        // subsequent iterations now skip both the inc op and the jump.
1597        let len = self.ops.len();
1598        if len >= 2 {
1599            let mut i = 0;
1600            while i + 1 < len {
1601                if let (Op::PreIncSlotVoid(s), Op::Jump(top)) = (&self.ops[i], &self.ops[i + 1]) {
1602                    let slot = *s;
1603                    let top = *top;
1604                    // Only fuse backward branches — the C-style `for` shape where `top` is
1605                    // the loop's `SlotLtIntJumpIfFalse` test and the body falls through to
1606                    // this trailing increment. A forward `Jump` that happens to land on a
1607                    // similar test is not the same shape and must not be rewritten.
1608                    if top < i {
1609                        if let Op::SlotLtIntJumpIfFalse(tslot, limit, exit) = &self.ops[top] {
1610                            // Safety: the top test's exit target must equal the fused op's
1611                            // fall-through (i + 2). Otherwise exiting the loop via
1612                            // "condition false" would land somewhere the unfused shape never
1613                            // exited to.
1614                            if *tslot == slot && *exit == i + 2 {
1615                                let limit = *limit;
1616                                let body_target = top + 1;
1617                                self.ops[i] = Op::SlotIncLtIntJumpBack(slot, limit, body_target);
1618                                self.ops[i + 1] = Op::Nop;
1619                                i += 2;
1620                                continue;
1621                            }
1622                        }
1623                    }
1624                }
1625                i += 1;
1626            }
1627        }
1628        // Pass 4: compact again — remove the Nops introduced by pass 3.
1629        self.compact_nops();
1630        // Pass 5: fuse counted-loop bodies down to a single native superinstruction.
1631        //
1632        // After pass 3 + compact, a `for (my $i = ..; $i < N; $i = $i + 1) { $sum += $i }`
1633        // loop looks like:
1634        //
1635        //     [top]        SlotLtIntJumpIfFalse(i, N, exit)
1636        //     [body_start] AddAssignSlotSlotVoid(sum, i)       ← target of the backedge
1637        //                  SlotIncLtIntJumpBack(i, N, body_start)
1638        //     [exit]       ...
1639        //
1640        // When the body is exactly one op, we fuse the AddAssign + backedge into
1641        // `AccumSumLoop(sum, i, N)`, whose handler runs the whole remaining loop in a
1642        // tight Rust `while`. Same scheme for the counted `$s .= CONST` pattern, fused
1643        // into `ConcatConstSlotLoop`.
1644        //
1645        // Safety gate: only fire when no op jumps *into* the body (other than the backedge
1646        // itself and the top test's fall-through, which isn't a jump). That keeps loops with
1647        // interior labels / `last LABEL` / `next LABEL` from being silently skipped.
1648        let len = self.ops.len();
1649        if len >= 2 {
1650            let has_inbound_jump = |ops: &[Op], pos: usize, ignore: usize| -> bool {
1651                for (j, op) in ops.iter().enumerate() {
1652                    if j == ignore {
1653                        continue;
1654                    }
1655                    let t = match op {
1656                        Op::Jump(t)
1657                        | Op::JumpIfFalse(t)
1658                        | Op::JumpIfTrue(t)
1659                        | Op::JumpIfFalseKeep(t)
1660                        | Op::JumpIfTrueKeep(t)
1661                        | Op::JumpIfDefinedKeep(t) => Some(*t),
1662                        Op::SlotLtIntJumpIfFalse(_, _, t) => Some(*t),
1663                        Op::SlotIncLtIntJumpBack(_, _, t) => Some(*t),
1664                        _ => None,
1665                    };
1666                    if t == Some(pos) {
1667                        return true;
1668                    }
1669                }
1670                false
1671            };
1672            // 5a: AddAssignSlotSlotVoid + SlotIncLtIntJumpBack → AccumSumLoop
1673            let mut i = 0;
1674            while i + 1 < len {
1675                if let (
1676                    Op::AddAssignSlotSlotVoid(sum_slot, src_slot),
1677                    Op::SlotIncLtIntJumpBack(inc_slot, limit, body_target),
1678                ) = (&self.ops[i], &self.ops[i + 1])
1679                {
1680                    if *src_slot == *inc_slot
1681                        && *body_target == i
1682                        && !has_inbound_jump(&self.ops, i, i + 1)
1683                        && !has_inbound_jump(&self.ops, i + 1, i + 1)
1684                    {
1685                        let sum_slot = *sum_slot;
1686                        let src_slot = *src_slot;
1687                        let limit = *limit;
1688                        self.ops[i] = Op::AccumSumLoop(sum_slot, src_slot, limit);
1689                        self.ops[i + 1] = Op::Nop;
1690                        i += 2;
1691                        continue;
1692                    }
1693                }
1694                i += 1;
1695            }
1696            // 5b: LoadConst + ConcatAppendSlotVoid + SlotIncLtIntJumpBack → ConcatConstSlotLoop
1697            if len >= 3 {
1698                let mut i = 0;
1699                while i + 2 < len {
1700                    if let (
1701                        Op::LoadConst(const_idx),
1702                        Op::ConcatAppendSlotVoid(s_slot),
1703                        Op::SlotIncLtIntJumpBack(inc_slot, limit, body_target),
1704                    ) = (&self.ops[i], &self.ops[i + 1], &self.ops[i + 2])
1705                    {
1706                        if *body_target == i
1707                            && !has_inbound_jump(&self.ops, i, i + 2)
1708                            && !has_inbound_jump(&self.ops, i + 1, i + 2)
1709                            && !has_inbound_jump(&self.ops, i + 2, i + 2)
1710                        {
1711                            let const_idx = *const_idx;
1712                            let s_slot = *s_slot;
1713                            let inc_slot = *inc_slot;
1714                            let limit = *limit;
1715                            self.ops[i] =
1716                                Op::ConcatConstSlotLoop(const_idx, s_slot, inc_slot, limit);
1717                            self.ops[i + 1] = Op::Nop;
1718                            self.ops[i + 2] = Op::Nop;
1719                            i += 3;
1720                            continue;
1721                        }
1722                    }
1723                    i += 1;
1724                }
1725            }
1726            // 5e: `$sum += $h{$k}` body op inside `for my $k (keys %h) { ... }`
1727            //   GetScalarSlot(sum) + GetScalarPlain(k) + GetHashElem(h) + Add
1728            //     + SetScalarSlotKeep(sum) + Pop
1729            //   → AddHashElemPlainKeyToSlot(sum, k, h)
1730            // Safe because `SetScalarSlotKeep + Pop` leaves nothing on the stack net; the fused
1731            // op is a drop-in for that sequence. No inbound jumps permitted to interior ops.
1732            if len >= 6 {
1733                let mut i = 0;
1734                while i + 5 < len {
1735                    if let (
1736                        Op::GetScalarSlot(sum_slot),
1737                        Op::GetScalarPlain(k_idx),
1738                        Op::GetHashElem(h_idx),
1739                        Op::Add,
1740                        Op::SetScalarSlotKeep(sum_slot2),
1741                        Op::Pop,
1742                    ) = (
1743                        &self.ops[i],
1744                        &self.ops[i + 1],
1745                        &self.ops[i + 2],
1746                        &self.ops[i + 3],
1747                        &self.ops[i + 4],
1748                        &self.ops[i + 5],
1749                    ) {
1750                        if *sum_slot == *sum_slot2
1751                            && (0..6).all(|off| !has_inbound_jump(&self.ops, i + off, usize::MAX))
1752                        {
1753                            let sum_slot = *sum_slot;
1754                            let k_idx = *k_idx;
1755                            let h_idx = *h_idx;
1756                            self.ops[i] = Op::AddHashElemPlainKeyToSlot(sum_slot, k_idx, h_idx);
1757                            for off in 1..=5 {
1758                                self.ops[i + off] = Op::Nop;
1759                            }
1760                            i += 6;
1761                            continue;
1762                        }
1763                    }
1764                    i += 1;
1765                }
1766            }
1767            // 5e-slot: slot-key variant of 5e, emitted when the compiler lowers `$k` (the foreach
1768            // loop variable) into a slot rather than a frame scalar.
1769            //   GetScalarSlot(sum) + GetScalarSlot(k) + GetHashElem(h) + Add
1770            //     + SetScalarSlotKeep(sum) + Pop
1771            //   → AddHashElemSlotKeyToSlot(sum, k, h)
1772            if len >= 6 {
1773                let mut i = 0;
1774                while i + 5 < len {
1775                    if let (
1776                        Op::GetScalarSlot(sum_slot),
1777                        Op::GetScalarSlot(k_slot),
1778                        Op::GetHashElem(h_idx),
1779                        Op::Add,
1780                        Op::SetScalarSlotKeep(sum_slot2),
1781                        Op::Pop,
1782                    ) = (
1783                        &self.ops[i],
1784                        &self.ops[i + 1],
1785                        &self.ops[i + 2],
1786                        &self.ops[i + 3],
1787                        &self.ops[i + 4],
1788                        &self.ops[i + 5],
1789                    ) {
1790                        if *sum_slot == *sum_slot2
1791                            && *sum_slot != *k_slot
1792                            && (0..6).all(|off| !has_inbound_jump(&self.ops, i + off, usize::MAX))
1793                        {
1794                            let sum_slot = *sum_slot;
1795                            let k_slot = *k_slot;
1796                            let h_idx = *h_idx;
1797                            self.ops[i] = Op::AddHashElemSlotKeyToSlot(sum_slot, k_slot, h_idx);
1798                            for off in 1..=5 {
1799                                self.ops[i + off] = Op::Nop;
1800                            }
1801                            i += 6;
1802                            continue;
1803                        }
1804                    }
1805                    i += 1;
1806                }
1807            }
1808            // 5d: counted hash-insert loop `$h{$i} = $i * K`
1809            //   GetScalarSlot(i) + LoadInt(k) + Mul + GetScalarSlot(i) + SetHashElem(h) + Pop
1810            //     + SlotIncLtIntJumpBack(i, limit, body_target)
1811            //   → SetHashIntTimesLoop(h, i, k, limit)
1812            if len >= 7 {
1813                let mut i = 0;
1814                while i + 6 < len {
1815                    if let (
1816                        Op::GetScalarSlot(gs1),
1817                        Op::LoadInt(k),
1818                        Op::Mul,
1819                        Op::GetScalarSlot(gs2),
1820                        Op::SetHashElem(h_idx),
1821                        Op::Pop,
1822                        Op::SlotIncLtIntJumpBack(inc_slot, limit, body_target),
1823                    ) = (
1824                        &self.ops[i],
1825                        &self.ops[i + 1],
1826                        &self.ops[i + 2],
1827                        &self.ops[i + 3],
1828                        &self.ops[i + 4],
1829                        &self.ops[i + 5],
1830                        &self.ops[i + 6],
1831                    ) {
1832                        if *gs1 == *inc_slot
1833                            && *gs2 == *inc_slot
1834                            && *body_target == i
1835                            && i32::try_from(*k).is_ok()
1836                            && (0..6).all(|off| !has_inbound_jump(&self.ops, i + off, i + 6))
1837                            && !has_inbound_jump(&self.ops, i + 6, i + 6)
1838                        {
1839                            let h_idx = *h_idx;
1840                            let inc_slot = *inc_slot;
1841                            let k32 = *k as i32;
1842                            let limit = *limit;
1843                            self.ops[i] = Op::SetHashIntTimesLoop(h_idx, inc_slot, k32, limit);
1844                            for off in 1..=6 {
1845                                self.ops[i + off] = Op::Nop;
1846                            }
1847                            i += 7;
1848                            continue;
1849                        }
1850                    }
1851                    i += 1;
1852                }
1853            }
1854            // 5c: GetScalarSlot + PushArray + ArrayLen + Pop + SlotIncLtIntJumpBack
1855            //      → PushIntRangeToArrayLoop
1856            // This is the compiler's `push @a, $i; $i++` shape in void context, where
1857            // the `push` expression's length return is pushed by `ArrayLen` and then `Pop`ped.
1858            if len >= 5 {
1859                let mut i = 0;
1860                while i + 4 < len {
1861                    if let (
1862                        Op::GetScalarSlot(get_slot),
1863                        Op::PushArray(push_idx),
1864                        Op::ArrayLen(len_idx),
1865                        Op::Pop,
1866                        Op::SlotIncLtIntJumpBack(inc_slot, limit, body_target),
1867                    ) = (
1868                        &self.ops[i],
1869                        &self.ops[i + 1],
1870                        &self.ops[i + 2],
1871                        &self.ops[i + 3],
1872                        &self.ops[i + 4],
1873                    ) {
1874                        if *get_slot == *inc_slot
1875                            && *push_idx == *len_idx
1876                            && *body_target == i
1877                            && !has_inbound_jump(&self.ops, i, i + 4)
1878                            && !has_inbound_jump(&self.ops, i + 1, i + 4)
1879                            && !has_inbound_jump(&self.ops, i + 2, i + 4)
1880                            && !has_inbound_jump(&self.ops, i + 3, i + 4)
1881                            && !has_inbound_jump(&self.ops, i + 4, i + 4)
1882                        {
1883                            let push_idx = *push_idx;
1884                            let inc_slot = *inc_slot;
1885                            let limit = *limit;
1886                            self.ops[i] = Op::PushIntRangeToArrayLoop(push_idx, inc_slot, limit);
1887                            self.ops[i + 1] = Op::Nop;
1888                            self.ops[i + 2] = Op::Nop;
1889                            self.ops[i + 3] = Op::Nop;
1890                            self.ops[i + 4] = Op::Nop;
1891                            i += 5;
1892                            continue;
1893                        }
1894                    }
1895                    i += 1;
1896                }
1897            }
1898        }
1899        // Pass 6: compact — remove the Nops pass 5 introduced.
1900        self.compact_nops();
1901        // Pass 7: fuse the entire `for my $k (keys %h) { $sum += $h{$k} }` loop into a single
1902        // `SumHashValuesToSlot` op that walks the hash's values in a tight native loop.
1903        //
1904        // After prior passes and compaction the shape is a 15-op block:
1905        //
1906        //     HashKeys(h)
1907        //     DeclareArray(list)
1908        //     LoadInt(0)
1909        //     DeclareScalarSlot(c, cname)
1910        //     LoadUndef
1911        //     DeclareScalarSlot(v, vname)
1912        //     [top]  GetScalarSlot(c)
1913        //            ArrayLen(list)
1914        //            NumLt
1915        //            JumpIfFalse(end)
1916        //            GetScalarSlot(c)
1917        //            GetArrayElem(list)
1918        //            SetScalarSlot(v)
1919        //            AddHashElemSlotKeyToSlot(sum, v, h)     ← fused body (pass 5e-slot)
1920        //            PreIncSlotVoid(c)
1921        //            Jump(top)
1922        //     [end]
1923        //
1924        // The counter (`__foreach_i__`), list (`__foreach_list__`), and loop var (`$k`) live
1925        // inside a `PushFrame`-isolated scope and are invisible after the loop — it is safe to
1926        // elide all of them. The fused op accumulates directly into `sum` without creating the
1927        // keys array at all.
1928        //
1929        // Safety gates:
1930        //   - `h` in HashKeys must match `h` in AddHashElemSlotKeyToSlot.
1931        //   - `list` in DeclareArray must match the loop `ArrayLen` / `GetArrayElem`.
1932        //   - `c` / `v` slots must be consistent throughout.
1933        //   - No inbound jump lands inside the 15-op window from the outside.
1934        //   - JumpIfFalse target must be i+15 (just past the Jump back-edge).
1935        //   - Jump back-edge target must be i+6 (the GetScalarSlot(c) at loop top).
1936        let len = self.ops.len();
1937        if len >= 15 {
1938            let has_inbound_jump =
1939                |ops: &[Op], pos: usize, ignore_from: usize, ignore_to: usize| -> bool {
1940                    for (j, op) in ops.iter().enumerate() {
1941                        if j >= ignore_from && j <= ignore_to {
1942                            continue;
1943                        }
1944                        let t = match op {
1945                            Op::Jump(t)
1946                            | Op::JumpIfFalse(t)
1947                            | Op::JumpIfTrue(t)
1948                            | Op::JumpIfFalseKeep(t)
1949                            | Op::JumpIfTrueKeep(t)
1950                            | Op::JumpIfDefinedKeep(t) => *t,
1951                            Op::SlotLtIntJumpIfFalse(_, _, t) => *t,
1952                            Op::SlotIncLtIntJumpBack(_, _, t) => *t,
1953                            _ => continue,
1954                        };
1955                        if t == pos {
1956                            return true;
1957                        }
1958                    }
1959                    false
1960                };
1961            let mut i = 0;
1962            while i + 15 < len {
1963                if let (
1964                    Op::HashKeys(h_idx),
1965                    Op::DeclareArray(list_idx),
1966                    Op::LoadInt(0),
1967                    Op::DeclareScalarSlot(c_slot, _c_name),
1968                    Op::LoadUndef,
1969                    Op::DeclareScalarSlot(v_slot, _v_name),
1970                    Op::GetScalarSlot(c_get1),
1971                    Op::ArrayLen(len_idx),
1972                    Op::NumLt,
1973                    Op::JumpIfFalse(end_tgt),
1974                    Op::GetScalarSlot(c_get2),
1975                    Op::GetArrayElem(elem_idx),
1976                    Op::SetScalarSlot(v_set),
1977                    Op::AddHashElemSlotKeyToSlot(sum_slot, v_in_body, h_in_body),
1978                    Op::PreIncSlotVoid(c_inc),
1979                    Op::Jump(top_tgt),
1980                ) = (
1981                    &self.ops[i],
1982                    &self.ops[i + 1],
1983                    &self.ops[i + 2],
1984                    &self.ops[i + 3],
1985                    &self.ops[i + 4],
1986                    &self.ops[i + 5],
1987                    &self.ops[i + 6],
1988                    &self.ops[i + 7],
1989                    &self.ops[i + 8],
1990                    &self.ops[i + 9],
1991                    &self.ops[i + 10],
1992                    &self.ops[i + 11],
1993                    &self.ops[i + 12],
1994                    &self.ops[i + 13],
1995                    &self.ops[i + 14],
1996                    &self.ops[i + 15],
1997                ) {
1998                    let full_end = i + 15;
1999                    if *list_idx == *len_idx
2000                        && *list_idx == *elem_idx
2001                        && *c_slot == *c_get1
2002                        && *c_slot == *c_get2
2003                        && *c_slot == *c_inc
2004                        && *v_slot == *v_set
2005                        && *v_slot == *v_in_body
2006                        && *h_idx == *h_in_body
2007                        && *top_tgt == i + 6
2008                        && *end_tgt == i + 16
2009                        && *sum_slot != *c_slot
2010                        && *sum_slot != *v_slot
2011                        && !(i..=full_end).any(|k| has_inbound_jump(&self.ops, k, i, full_end))
2012                    {
2013                        let sum_slot = *sum_slot;
2014                        let h_idx = *h_idx;
2015                        self.ops[i] = Op::SumHashValuesToSlot(sum_slot, h_idx);
2016                        for off in 1..=15 {
2017                            self.ops[i + off] = Op::Nop;
2018                        }
2019                        i += 16;
2020                        continue;
2021                    }
2022                }
2023                i += 1;
2024            }
2025        }
2026        // Pass 8: compact pass 7's Nops.
2027        self.compact_nops();
2028    }
2029
2030    /// Remove all `Nop` instructions and remap jump targets + metadata indices.
2031    fn compact_nops(&mut self) {
2032        let old_len = self.ops.len();
2033        // Build old→new index mapping.
2034        let mut remap = vec![0usize; old_len + 1];
2035        let mut new_idx = 0usize;
2036        for (old, slot) in remap[..old_len].iter_mut().enumerate() {
2037            *slot = new_idx;
2038            if !matches!(self.ops[old], Op::Nop) {
2039                new_idx += 1;
2040            }
2041        }
2042        remap[old_len] = new_idx;
2043        if new_idx == old_len {
2044            return; // nothing to compact
2045        }
2046        // Remap jump targets in all ops.
2047        for op in &mut self.ops {
2048            match op {
2049                Op::Jump(t) | Op::JumpIfFalse(t) | Op::JumpIfTrue(t) => *t = remap[*t],
2050                Op::JumpIfFalseKeep(t) | Op::JumpIfTrueKeep(t) | Op::JumpIfDefinedKeep(t) => {
2051                    *t = remap[*t]
2052                }
2053                Op::SlotLtIntJumpIfFalse(_, _, t) => *t = remap[*t],
2054                Op::SlotIncLtIntJumpBack(_, _, t) => *t = remap[*t],
2055                _ => {}
2056            }
2057        }
2058        // Remap sub entry points.
2059        for e in &mut self.sub_entries {
2060            e.1 = remap[e.1];
2061        }
2062        // Remap `CallStaticSubId` resolved entry IPs — they were recorded by
2063        // `patch_static_sub_calls` before peephole fusion ran, so any Nop
2064        // removal in front of a sub body shifts its entry and must be
2065        // reflected here; otherwise `vm_dispatch_user_call` jumps one (or
2066        // more) ops past the real sub start and silently skips the first
2067        // instruction(s) of the body.
2068        for c in &mut self.static_sub_calls {
2069            c.0 = remap[c.0];
2070        }
2071        // Remap block/grep/sort/etc bytecode ranges.
2072        fn remap_ranges(ranges: &mut [Option<(usize, usize)>], remap: &[usize]) {
2073            for r in ranges.iter_mut().flatten() {
2074                r.0 = remap[r.0];
2075                r.1 = remap[r.1];
2076            }
2077        }
2078        remap_ranges(&mut self.block_bytecode_ranges, &remap);
2079        remap_ranges(&mut self.map_expr_bytecode_ranges, &remap);
2080        remap_ranges(&mut self.grep_expr_bytecode_ranges, &remap);
2081        remap_ranges(&mut self.keys_expr_bytecode_ranges, &remap);
2082        remap_ranges(&mut self.values_expr_bytecode_ranges, &remap);
2083        remap_ranges(&mut self.eval_timeout_expr_bytecode_ranges, &remap);
2084        remap_ranges(&mut self.given_topic_bytecode_ranges, &remap);
2085        remap_ranges(&mut self.algebraic_match_subject_bytecode_ranges, &remap);
2086        remap_ranges(&mut self.regex_flip_flop_rhs_expr_bytecode_ranges, &remap);
2087        // Compact ops, lines, op_ast_expr.
2088        let mut j = 0;
2089        for old in 0..old_len {
2090            if !matches!(self.ops[old], Op::Nop) {
2091                self.ops[j] = self.ops[old].clone();
2092                if old < self.lines.len() && j < self.lines.len() {
2093                    self.lines[j] = self.lines[old];
2094                }
2095                if old < self.op_ast_expr.len() && j < self.op_ast_expr.len() {
2096                    self.op_ast_expr[j] = self.op_ast_expr[old];
2097                }
2098                j += 1;
2099            }
2100        }
2101        self.ops.truncate(j);
2102        self.lines.truncate(j);
2103        self.op_ast_expr.truncate(j);
2104    }
2105}
2106
2107impl Default for Chunk {
2108    fn default() -> Self {
2109        Self::new()
2110    }
2111}
2112
2113#[cfg(test)]
2114mod tests {
2115    use super::*;
2116    use crate::ast;
2117
2118    #[test]
2119    fn chunk_new_and_default_match() {
2120        let a = Chunk::new();
2121        let b = Chunk::default();
2122        assert!(a.ops.is_empty() && a.names.is_empty() && a.constants.is_empty());
2123        assert!(b.ops.is_empty() && b.lines.is_empty());
2124    }
2125
2126    #[test]
2127    fn intern_name_deduplicates() {
2128        let mut c = Chunk::new();
2129        let i0 = c.intern_name("foo");
2130        let i1 = c.intern_name("foo");
2131        let i2 = c.intern_name("bar");
2132        assert_eq!(i0, i1);
2133        assert_ne!(i0, i2);
2134        assert_eq!(c.names.len(), 2);
2135    }
2136
2137    #[test]
2138    fn add_constant_dedups_identical_strings() {
2139        let mut c = Chunk::new();
2140        let a = c.add_constant(PerlValue::string("x".into()));
2141        let b = c.add_constant(PerlValue::string("x".into()));
2142        assert_eq!(a, b);
2143        assert_eq!(c.constants.len(), 1);
2144    }
2145
2146    #[test]
2147    fn add_constant_distinct_strings_different_indices() {
2148        let mut c = Chunk::new();
2149        let a = c.add_constant(PerlValue::string("a".into()));
2150        let b = c.add_constant(PerlValue::string("b".into()));
2151        assert_ne!(a, b);
2152        assert_eq!(c.constants.len(), 2);
2153    }
2154
2155    #[test]
2156    fn add_constant_non_string_no_dedup_scan() {
2157        let mut c = Chunk::new();
2158        let a = c.add_constant(PerlValue::integer(1));
2159        let b = c.add_constant(PerlValue::integer(1));
2160        assert_ne!(a, b);
2161        assert_eq!(c.constants.len(), 2);
2162    }
2163
2164    #[test]
2165    fn emit_records_parallel_ops_and_lines() {
2166        let mut c = Chunk::new();
2167        c.emit(Op::LoadInt(1), 10);
2168        c.emit(Op::Pop, 11);
2169        assert_eq!(c.len(), 2);
2170        assert_eq!(c.lines, vec![10, 11]);
2171        assert_eq!(c.op_ast_expr, vec![None, None]);
2172        assert!(!c.is_empty());
2173    }
2174
2175    #[test]
2176    fn len_is_empty_track_ops() {
2177        let mut c = Chunk::new();
2178        assert!(c.is_empty());
2179        assert_eq!(c.len(), 0);
2180        c.emit(Op::Halt, 0);
2181        assert!(!c.is_empty());
2182        assert_eq!(c.len(), 1);
2183    }
2184
2185    #[test]
2186    fn patch_jump_here_updates_jump_target() {
2187        let mut c = Chunk::new();
2188        let j = c.emit(Op::Jump(0), 1);
2189        c.emit(Op::LoadInt(99), 2);
2190        c.patch_jump_here(j);
2191        assert_eq!(c.ops.len(), 2);
2192        assert!(matches!(c.ops[j], Op::Jump(2)));
2193    }
2194
2195    #[test]
2196    fn patch_jump_here_jump_if_true() {
2197        let mut c = Chunk::new();
2198        let j = c.emit(Op::JumpIfTrue(0), 1);
2199        c.emit(Op::Halt, 2);
2200        c.patch_jump_here(j);
2201        assert!(matches!(c.ops[j], Op::JumpIfTrue(2)));
2202    }
2203
2204    #[test]
2205    fn patch_jump_here_jump_if_false_keep() {
2206        let mut c = Chunk::new();
2207        let j = c.emit(Op::JumpIfFalseKeep(0), 1);
2208        c.emit(Op::Pop, 2);
2209        c.patch_jump_here(j);
2210        assert!(matches!(c.ops[j], Op::JumpIfFalseKeep(2)));
2211    }
2212
2213    #[test]
2214    fn patch_jump_here_jump_if_true_keep() {
2215        let mut c = Chunk::new();
2216        let j = c.emit(Op::JumpIfTrueKeep(0), 1);
2217        c.emit(Op::Pop, 2);
2218        c.patch_jump_here(j);
2219        assert!(matches!(c.ops[j], Op::JumpIfTrueKeep(2)));
2220    }
2221
2222    #[test]
2223    fn patch_jump_here_jump_if_defined_keep() {
2224        let mut c = Chunk::new();
2225        let j = c.emit(Op::JumpIfDefinedKeep(0), 1);
2226        c.emit(Op::Halt, 2);
2227        c.patch_jump_here(j);
2228        assert!(matches!(c.ops[j], Op::JumpIfDefinedKeep(2)));
2229    }
2230
2231    #[test]
2232    #[should_panic(expected = "patch_jump_to on non-jump op")]
2233    fn patch_jump_here_panics_on_non_jump() {
2234        let mut c = Chunk::new();
2235        let idx = c.emit(Op::LoadInt(1), 1);
2236        c.patch_jump_here(idx);
2237    }
2238
2239    #[test]
2240    fn add_block_returns_sequential_indices() {
2241        let mut c = Chunk::new();
2242        let b0: ast::Block = vec![];
2243        let b1: ast::Block = vec![];
2244        assert_eq!(c.add_block(b0), 0);
2245        assert_eq!(c.add_block(b1), 1);
2246        assert_eq!(c.blocks.len(), 2);
2247    }
2248
2249    #[test]
2250    fn builtin_id_from_u16_first_and_last() {
2251        assert_eq!(BuiltinId::from_u16(0), Some(BuiltinId::Length));
2252        assert_eq!(
2253            BuiltinId::from_u16(BuiltinId::Pselect as u16),
2254            Some(BuiltinId::Pselect)
2255        );
2256        assert_eq!(
2257            BuiltinId::from_u16(BuiltinId::BarrierNew as u16),
2258            Some(BuiltinId::BarrierNew)
2259        );
2260        assert_eq!(
2261            BuiltinId::from_u16(BuiltinId::ParPipeline as u16),
2262            Some(BuiltinId::ParPipeline)
2263        );
2264        assert_eq!(
2265            BuiltinId::from_u16(BuiltinId::GlobParProgress as u16),
2266            Some(BuiltinId::GlobParProgress)
2267        );
2268        assert_eq!(
2269            BuiltinId::from_u16(BuiltinId::Readpipe as u16),
2270            Some(BuiltinId::Readpipe)
2271        );
2272        assert_eq!(
2273            BuiltinId::from_u16(BuiltinId::ReadLineList as u16),
2274            Some(BuiltinId::ReadLineList)
2275        );
2276        assert_eq!(
2277            BuiltinId::from_u16(BuiltinId::ReaddirList as u16),
2278            Some(BuiltinId::ReaddirList)
2279        );
2280        assert_eq!(
2281            BuiltinId::from_u16(BuiltinId::Ssh as u16),
2282            Some(BuiltinId::Ssh)
2283        );
2284        assert_eq!(
2285            BuiltinId::from_u16(BuiltinId::Pipe as u16),
2286            Some(BuiltinId::Pipe)
2287        );
2288        assert_eq!(
2289            BuiltinId::from_u16(BuiltinId::Files as u16),
2290            Some(BuiltinId::Files)
2291        );
2292        assert_eq!(
2293            BuiltinId::from_u16(BuiltinId::Filesf as u16),
2294            Some(BuiltinId::Filesf)
2295        );
2296        assert_eq!(
2297            BuiltinId::from_u16(BuiltinId::Dirs as u16),
2298            Some(BuiltinId::Dirs)
2299        );
2300        assert_eq!(
2301            BuiltinId::from_u16(BuiltinId::SymLinks as u16),
2302            Some(BuiltinId::SymLinks)
2303        );
2304        assert_eq!(
2305            BuiltinId::from_u16(BuiltinId::Sockets as u16),
2306            Some(BuiltinId::Sockets)
2307        );
2308        assert_eq!(
2309            BuiltinId::from_u16(BuiltinId::Pipes as u16),
2310            Some(BuiltinId::Pipes)
2311        );
2312        assert_eq!(
2313            BuiltinId::from_u16(BuiltinId::BlockDevices as u16),
2314            Some(BuiltinId::BlockDevices)
2315        );
2316        assert_eq!(
2317            BuiltinId::from_u16(BuiltinId::CharDevices as u16),
2318            Some(BuiltinId::CharDevices)
2319        );
2320        assert_eq!(
2321            BuiltinId::from_u16(BuiltinId::Executables as u16),
2322            Some(BuiltinId::Executables)
2323        );
2324    }
2325
2326    #[test]
2327    fn builtin_id_from_u16_out_of_range() {
2328        assert_eq!(BuiltinId::from_u16(BuiltinId::Executables as u16 + 1), None);
2329        assert_eq!(BuiltinId::from_u16(u16::MAX), None);
2330    }
2331
2332    #[test]
2333    fn op_enum_clone_roundtrip() {
2334        let o = Op::Call(42, 3, 0);
2335        assert!(matches!(o.clone(), Op::Call(42, 3, 0)));
2336    }
2337
2338    #[test]
2339    fn chunk_clone_independent_ops() {
2340        let mut c = Chunk::new();
2341        c.emit(Op::Negate, 1);
2342        let mut d = c.clone();
2343        d.emit(Op::Pop, 2);
2344        assert_eq!(c.len(), 1);
2345        assert_eq!(d.len(), 2);
2346    }
2347
2348    #[test]
2349    fn chunk_disassemble_includes_ops() {
2350        let mut c = Chunk::new();
2351        c.emit(Op::LoadInt(7), 1);
2352        let s = c.disassemble();
2353        assert!(s.contains("0000"));
2354        assert!(s.contains("LoadInt(7)"));
2355        assert!(s.contains("     -")); // no ast ref column
2356    }
2357
2358    #[test]
2359    fn ast_expr_at_roundtrips_pooled_expr() {
2360        let mut c = Chunk::new();
2361        let e = ast::Expr {
2362            kind: ast::ExprKind::Integer(99),
2363            line: 3,
2364        };
2365        c.ast_expr_pool.push(e);
2366        c.emit_with_ast_idx(Op::LoadInt(99), 3, Some(0));
2367        let got = c.ast_expr_at(0).expect("ast ref");
2368        assert!(matches!(&got.kind, ast::ExprKind::Integer(99)));
2369        assert_eq!(got.line, 3);
2370    }
2371}