Skip to main content

stryke/
scope.rs

1use std::collections::{HashMap, HashSet};
2use std::sync::Arc;
3
4use indexmap::IndexMap;
5use parking_lot::{Mutex, RwLock};
6
7use crate::ast::PerlTypeName;
8use crate::error::PerlError;
9use crate::value::PerlValue;
10
11/// Thread-safe shared array for `mysync @a`.
12#[derive(Debug, Clone)]
13pub struct AtomicArray(pub Arc<Mutex<Vec<PerlValue>>>);
14
15/// Thread-safe shared hash for `mysync %h`.
16#[derive(Debug, Clone)]
17pub struct AtomicHash(pub Arc<Mutex<IndexMap<String, PerlValue>>>);
18
19type ScopeCaptureWithAtomics = (
20    Vec<(String, PerlValue)>,
21    Vec<(String, AtomicArray)>,
22    Vec<(String, AtomicHash)>,
23);
24
25/// Arrays installed by [`crate::interpreter::Interpreter::new`] on the outer frame. They must not be
26/// copied into [`Scope::capture`] / [`Scope::restore_capture`] for closures, or the restored copy
27/// would shadow the live handles (stale `@INC`, `%ENV`, topic `@_`, etc.).
28#[inline]
29fn capture_skip_bootstrap_array(name: &str) -> bool {
30    matches!(
31        name,
32        "INC" | "ARGV" | "_" | "-" | "+" | "^CAPTURE" | "^CAPTURE_ALL"
33    )
34}
35
36/// Hashes installed at interpreter bootstrap (same rationale as [`capture_skip_bootstrap_array`]).
37#[inline]
38fn capture_skip_bootstrap_hash(name: &str) -> bool {
39    matches!(name, "INC" | "ENV" | "SIG" | "^HOOK")
40}
41
42/// Saved bindings for `local $x` / `local @a` / `local %h` — restored on [`Scope::pop_frame`].
43#[derive(Clone, Debug)]
44enum LocalRestore {
45    Scalar(String, PerlValue),
46    Array(String, Vec<PerlValue>),
47    Hash(String, IndexMap<String, PerlValue>),
48    /// `local $h{k}` — third is `None` if the key was absent before `local` (restore deletes the key).
49    HashElement(String, String, Option<PerlValue>),
50    /// `local $a[i]` — restore previous slot value (see [`Scope::local_set_array_element`]).
51    ArrayElement(String, i64, PerlValue),
52}
53
54/// A single lexical scope frame.
55/// Uses Vec instead of HashMap — for typical Perl code with < 10 variables per
56/// scope, linear scan is faster than hashing due to cache locality and zero
57/// hash overhead.
58#[derive(Debug, Clone)]
59struct Frame {
60    scalars: Vec<(String, PerlValue)>,
61    arrays: Vec<(String, Vec<PerlValue>)>,
62    /// Subroutine (or bootstrap) `@_` — stored separately so call paths can move the arg
63    /// [`Vec`] into the frame without an extra copy via [`Frame::arrays`].
64    sub_underscore: Option<Vec<PerlValue>>,
65    hashes: Vec<(String, IndexMap<String, PerlValue>)>,
66    /// Slot-indexed scalars for O(1) access from compiled subroutines.
67    /// Compiler assigns `my $x` declarations a u8 slot index; the VM accesses
68    /// `scalar_slots[idx]` directly without name lookup or frame walking.
69    scalar_slots: Vec<PerlValue>,
70    /// Bare scalar name for each slot (same index as `scalar_slots`) — for [`Scope::capture`]
71    /// / closures when the binding exists only in `scalar_slots`.
72    scalar_slot_names: Vec<Option<String>>,
73    /// Dynamic `local` saves — applied in reverse when this frame is popped.
74    local_restores: Vec<LocalRestore>,
75    /// Lexical names from `frozen my $x` / `@a` / `%h` (bare name, same as storage key).
76    frozen_scalars: HashSet<String>,
77    frozen_arrays: HashSet<String>,
78    frozen_hashes: HashSet<String>,
79    /// `typed my $x : Int` — runtime type checks on assignment.
80    typed_scalars: HashMap<String, PerlTypeName>,
81    /// Arrays promoted to shared Arc-backed storage by `\@arr`.
82    /// When a ref is taken, both the scope and the ref share the same Arc,
83    /// so mutations through either path are visible. Re-declaration removes the entry.
84    shared_arrays: Vec<(String, Arc<parking_lot::RwLock<Vec<PerlValue>>>)>,
85    /// Hashes promoted to shared Arc-backed storage by `\%hash`.
86    shared_hashes: Vec<(
87        String,
88        Arc<parking_lot::RwLock<IndexMap<String, PerlValue>>>,
89    )>,
90    /// Thread-safe arrays from `mysync @a`
91    atomic_arrays: Vec<(String, AtomicArray)>,
92    /// Thread-safe hashes from `mysync %h`
93    atomic_hashes: Vec<(String, AtomicHash)>,
94    /// `defer { BLOCK }` closures to run when this frame is popped (LIFO order).
95    defers: Vec<PerlValue>,
96}
97
98impl Frame {
99    /// Drop all lexical bindings so blessed objects run `DESTROY` when frames are recycled
100    /// ([`Scope::pop_frame`]) or reused ([`Scope::push_frame`]).
101    #[inline]
102    fn clear_all_bindings(&mut self) {
103        self.scalars.clear();
104        self.arrays.clear();
105        self.sub_underscore = None;
106        self.hashes.clear();
107        self.scalar_slots.clear();
108        self.scalar_slot_names.clear();
109        self.local_restores.clear();
110        self.frozen_scalars.clear();
111        self.frozen_arrays.clear();
112        self.frozen_hashes.clear();
113        self.typed_scalars.clear();
114        self.shared_arrays.clear();
115        self.shared_hashes.clear();
116        self.atomic_arrays.clear();
117        self.defers.clear();
118        self.atomic_hashes.clear();
119    }
120
121    /// True if this slot index is a real binding (not vec padding before a higher-index declare).
122    /// Anonymous temps use [`Option::Some`] with an empty string so slot ops do not fall through
123    /// to an outer frame's same slot index.
124    #[inline]
125    fn owns_scalar_slot_index(&self, idx: usize) -> bool {
126        self.scalar_slot_names.get(idx).is_some_and(|n| n.is_some())
127    }
128
129    #[inline]
130    fn new() -> Self {
131        Self {
132            scalars: Vec::new(),
133            arrays: Vec::new(),
134            sub_underscore: None,
135            hashes: Vec::new(),
136            scalar_slots: Vec::new(),
137            scalar_slot_names: Vec::new(),
138            frozen_scalars: HashSet::new(),
139            frozen_arrays: HashSet::new(),
140            frozen_hashes: HashSet::new(),
141            shared_arrays: Vec::new(),
142            shared_hashes: Vec::new(),
143            typed_scalars: HashMap::new(),
144            atomic_arrays: Vec::new(),
145            atomic_hashes: Vec::new(),
146            local_restores: Vec::new(),
147            defers: Vec::new(),
148        }
149    }
150
151    #[inline]
152    fn get_scalar(&self, name: &str) -> Option<&PerlValue> {
153        if let Some(v) = self.get_scalar_from_slot(name) {
154            return Some(v);
155        }
156        self.scalars.iter().find(|(k, _)| k == name).map(|(_, v)| v)
157    }
158
159    /// O(N) scan over slot names — only used by `get_scalar` fallback (name-based lookup);
160    /// hot compiled paths use `get_scalar_slot(idx)` directly.
161    #[inline]
162    fn get_scalar_from_slot(&self, name: &str) -> Option<&PerlValue> {
163        for (i, sn) in self.scalar_slot_names.iter().enumerate() {
164            if let Some(ref n) = sn {
165                if n == name {
166                    return self.scalar_slots.get(i);
167                }
168            }
169        }
170        None
171    }
172
173    #[inline]
174    fn has_scalar(&self, name: &str) -> bool {
175        if self
176            .scalar_slot_names
177            .iter()
178            .any(|sn| sn.as_deref() == Some(name))
179        {
180            return true;
181        }
182        self.scalars.iter().any(|(k, _)| k == name)
183    }
184
185    #[inline]
186    fn set_scalar(&mut self, name: &str, val: PerlValue) {
187        for (i, sn) in self.scalar_slot_names.iter().enumerate() {
188            if let Some(ref n) = sn {
189                if n == name {
190                    if i < self.scalar_slots.len() {
191                        // Write through CaptureCell so closures sharing this cell see the update
192                        if let Some(r) = self.scalar_slots[i].as_capture_cell() {
193                            *r.write() = val;
194                        } else {
195                            self.scalar_slots[i] = val;
196                        }
197                    }
198                    return;
199                }
200            }
201        }
202        if let Some(entry) = self.scalars.iter_mut().find(|(k, _)| k == name) {
203            // Write through CaptureCell so closures sharing this cell see the update
204            if let Some(r) = entry.1.as_capture_cell() {
205                *r.write() = val;
206            } else {
207                entry.1 = val;
208            }
209        } else {
210            self.scalars.push((name.to_string(), val));
211        }
212    }
213
214    #[inline]
215    fn get_array(&self, name: &str) -> Option<&Vec<PerlValue>> {
216        if name == "_" {
217            if let Some(ref v) = self.sub_underscore {
218                return Some(v);
219            }
220        }
221        self.arrays.iter().find(|(k, _)| k == name).map(|(_, v)| v)
222    }
223
224    #[inline]
225    fn has_array(&self, name: &str) -> bool {
226        if name == "_" && self.sub_underscore.is_some() {
227            return true;
228        }
229        self.arrays.iter().any(|(k, _)| k == name)
230            || self.shared_arrays.iter().any(|(k, _)| k == name)
231    }
232
233    #[inline]
234    fn get_array_mut(&mut self, name: &str) -> Option<&mut Vec<PerlValue>> {
235        if name == "_" {
236            return self.sub_underscore.as_mut();
237        }
238        self.arrays
239            .iter_mut()
240            .find(|(k, _)| k == name)
241            .map(|(_, v)| v)
242    }
243
244    #[inline]
245    fn set_array(&mut self, name: &str, val: Vec<PerlValue>) {
246        if name == "_" {
247            if let Some(pos) = self.arrays.iter().position(|(k, _)| k == name) {
248                self.arrays.swap_remove(pos);
249            }
250            self.sub_underscore = Some(val);
251            return;
252        }
253        if let Some(entry) = self.arrays.iter_mut().find(|(k, _)| k == name) {
254            entry.1 = val;
255        } else {
256            self.arrays.push((name.to_string(), val));
257        }
258    }
259
260    #[inline]
261    fn get_hash(&self, name: &str) -> Option<&IndexMap<String, PerlValue>> {
262        self.hashes.iter().find(|(k, _)| k == name).map(|(_, v)| v)
263    }
264
265    #[inline]
266    fn has_hash(&self, name: &str) -> bool {
267        self.hashes.iter().any(|(k, _)| k == name)
268            || self.shared_hashes.iter().any(|(k, _)| k == name)
269    }
270
271    #[inline]
272    fn get_hash_mut(&mut self, name: &str) -> Option<&mut IndexMap<String, PerlValue>> {
273        self.hashes
274            .iter_mut()
275            .find(|(k, _)| k == name)
276            .map(|(_, v)| v)
277    }
278
279    #[inline]
280    fn set_hash(&mut self, name: &str, val: IndexMap<String, PerlValue>) {
281        if let Some(entry) = self.hashes.iter_mut().find(|(k, _)| k == name) {
282            entry.1 = val;
283        } else {
284            self.hashes.push((name.to_string(), val));
285        }
286    }
287}
288
289/// Manages lexical scoping with a stack of frames.
290/// Innermost frame is last in the vector.
291#[derive(Debug, Clone)]
292pub struct Scope {
293    frames: Vec<Frame>,
294    /// Recycled frames to avoid allocation on every push_frame/pop_frame cycle.
295    frame_pool: Vec<Frame>,
296    /// When true (rayon worker / parallel block), reject writes to outer captured lexicals unless
297    /// the binding is `mysync` (atomic) or a loop topic (`$_`, `$a`, `$b`). Package names with `::`
298    /// are exempt. Requires at least two frames (captured + block locals); use [`Self::push_frame`]
299    /// before running a block body on a worker.
300    parallel_guard: bool,
301}
302
303impl Default for Scope {
304    fn default() -> Self {
305        Self::new()
306    }
307}
308
309impl Scope {
310    pub fn new() -> Self {
311        let mut s = Self {
312            frames: Vec::with_capacity(32),
313            frame_pool: Vec::with_capacity(32),
314            parallel_guard: false,
315        };
316        s.frames.push(Frame::new());
317        s
318    }
319
320    /// Enable [`Self::parallel_guard`] for parallel worker interpreters (pmap, fan, …).
321    #[inline]
322    pub fn set_parallel_guard(&mut self, enabled: bool) {
323        self.parallel_guard = enabled;
324    }
325
326    #[inline]
327    pub fn parallel_guard(&self) -> bool {
328        self.parallel_guard
329    }
330
331    #[inline]
332    fn parallel_skip_special_name(name: &str) -> bool {
333        name.contains("::")
334    }
335
336    /// Loop/sort topic scalars that parallel ops assign before each iteration.
337    #[inline]
338    fn parallel_allowed_topic_scalar(name: &str) -> bool {
339        matches!(name, "_" | "a" | "b")
340    }
341
342    /// Regex / runtime scratch arrays live on an outer frame; parallel match still mutates them.
343    #[inline]
344    fn parallel_allowed_internal_array(name: &str) -> bool {
345        matches!(name, "-" | "+" | "^CAPTURE" | "^CAPTURE_ALL")
346    }
347
348    /// `%ENV`, `%INC`, and regex named-capture hashes `"+"` / `"-"` — same outer-frame issue as internal arrays.
349    #[inline]
350    fn parallel_allowed_internal_hash(name: &str) -> bool {
351        matches!(name, "+" | "-" | "ENV" | "INC")
352    }
353
354    fn check_parallel_scalar_write(&self, name: &str) -> Result<(), PerlError> {
355        if !self.parallel_guard || Self::parallel_skip_special_name(name) {
356            return Ok(());
357        }
358        if Self::parallel_allowed_topic_scalar(name) {
359            return Ok(());
360        }
361        if crate::special_vars::is_regex_match_scalar_name(name) {
362            return Ok(());
363        }
364        let inner = self.frames.len().saturating_sub(1);
365        for (i, frame) in self.frames.iter().enumerate().rev() {
366            if frame.has_scalar(name) {
367                if let Some(v) = frame.get_scalar(name) {
368                    if v.as_atomic_arc().is_some() {
369                        return Ok(());
370                    }
371                }
372                if i != inner {
373                    return Err(PerlError::runtime(
374                        format!(
375                            "cannot assign to captured non-mysync variable `${}` in a parallel block",
376                            name
377                        ),
378                        0,
379                    ));
380                }
381                return Ok(());
382            }
383        }
384        Err(PerlError::runtime(
385            format!(
386                "cannot assign to undeclared variable `${}` in a parallel block",
387                name
388            ),
389            0,
390        ))
391    }
392
393    #[inline]
394    pub fn depth(&self) -> usize {
395        self.frames.len()
396    }
397
398    /// Pop frames until we're at `target_depth`. Used by VM ReturnValue
399    /// to cleanly unwind through if/while/for blocks on return.
400    #[inline]
401    pub fn pop_to_depth(&mut self, target_depth: usize) {
402        while self.frames.len() > target_depth && self.frames.len() > 1 {
403            self.pop_frame();
404        }
405    }
406
407    #[inline]
408    pub fn push_frame(&mut self) {
409        if let Some(mut frame) = self.frame_pool.pop() {
410            frame.clear_all_bindings();
411            self.frames.push(frame);
412        } else {
413            self.frames.push(Frame::new());
414        }
415    }
416
417    // ── Frame-local scalar slots (O(1) access for compiled subs) ──
418
419    /// Read scalar from slot — innermost binding for `slot` wins (same index can exist on nested
420    /// frames; padding entries without [`Frame::owns_scalar_slot_index`] do not shadow outers).
421    #[inline]
422    pub fn get_scalar_slot(&self, slot: u8) -> PerlValue {
423        let idx = slot as usize;
424        for frame in self.frames.iter().rev() {
425            if idx < frame.scalar_slots.len() && frame.owns_scalar_slot_index(idx) {
426                let val = &frame.scalar_slots[idx];
427                // Transparently unwrap CaptureCell (closure-captured variable) — read through
428                // the shared lock. User-created ScalarRef from `\expr` is NOT unwrapped.
429                if let Some(arc) = val.as_capture_cell() {
430                    return arc.read().clone();
431                }
432                return val.clone();
433            }
434        }
435        PerlValue::UNDEF
436    }
437
438    /// Write scalar to slot — innermost binding for `slot` wins (see [`Self::get_scalar_slot`]).
439    #[inline]
440    pub fn set_scalar_slot(&mut self, slot: u8, val: PerlValue) {
441        let idx = slot as usize;
442        let len = self.frames.len();
443        for i in (0..len).rev() {
444            if idx < self.frames[i].scalar_slots.len() && self.frames[i].owns_scalar_slot_index(idx)
445            {
446                // Write through CaptureCell so closures sharing this cell see the update
447                if let Some(r) = self.frames[i].scalar_slots[idx].as_capture_cell() {
448                    *r.write() = val;
449                } else {
450                    self.frames[i].scalar_slots[idx] = val;
451                }
452                return;
453            }
454        }
455        let top = self.frames.last_mut().unwrap();
456        top.scalar_slots.resize(idx + 1, PerlValue::UNDEF);
457        if idx >= top.scalar_slot_names.len() {
458            top.scalar_slot_names.resize(idx + 1, None);
459        }
460        top.scalar_slot_names[idx] = Some(String::new());
461        top.scalar_slots[idx] = val;
462    }
463
464    /// Like [`set_scalar_slot`] but respects the parallel guard — returns `Err` when assigning
465    /// to a slot that belongs to an outer frame inside a parallel block.  `slot_name` is resolved
466    /// from the bytecode's name table by the caller when available.
467    #[inline]
468    pub fn set_scalar_slot_checked(
469        &mut self,
470        slot: u8,
471        val: PerlValue,
472        slot_name: Option<&str>,
473    ) -> Result<(), PerlError> {
474        if self.parallel_guard {
475            let idx = slot as usize;
476            let len = self.frames.len();
477            let top_has = idx < self.frames[len - 1].scalar_slots.len()
478                && self.frames[len - 1].owns_scalar_slot_index(idx);
479            if !top_has {
480                let name_owned: String = {
481                    let mut found = String::new();
482                    for i in (0..len).rev() {
483                        if let Some(Some(n)) = self.frames[i].scalar_slot_names.get(idx) {
484                            found = n.clone();
485                            break;
486                        }
487                    }
488                    if found.is_empty() {
489                        if let Some(sn) = slot_name {
490                            found = sn.to_string();
491                        }
492                    }
493                    found
494                };
495                let name = name_owned.as_str();
496                if !name.is_empty() && !Self::parallel_allowed_topic_scalar(name) {
497                    let inner = len.saturating_sub(1);
498                    for (fi, frame) in self.frames.iter().enumerate().rev() {
499                        if frame.has_scalar(name)
500                            || (idx < frame.scalar_slots.len() && frame.owns_scalar_slot_index(idx))
501                        {
502                            if fi != inner {
503                                return Err(PerlError::runtime(
504                                    format!(
505                                        "cannot assign to captured outer lexical `${}` inside a parallel block (use `mysync`)",
506                                        name
507                                    ),
508                                    0,
509                                ));
510                            }
511                            break;
512                        }
513                    }
514                }
515            }
516        }
517        self.set_scalar_slot(slot, val);
518        Ok(())
519    }
520
521    /// Declare + initialize scalar in the current frame's slot array.
522    /// `name` (bare identifier, e.g. `x` for `$x`) is stored for [`Scope::capture`] when the
523    /// binding is slot-only (no duplicate `frame.scalars` row).
524    #[inline]
525    pub fn declare_scalar_slot(&mut self, slot: u8, val: PerlValue, name: Option<&str>) {
526        let idx = slot as usize;
527        let frame = self.frames.last_mut().unwrap();
528        if idx >= frame.scalar_slots.len() {
529            frame.scalar_slots.resize(idx + 1, PerlValue::UNDEF);
530        }
531        frame.scalar_slots[idx] = val;
532        if idx >= frame.scalar_slot_names.len() {
533            frame.scalar_slot_names.resize(idx + 1, None);
534        }
535        match name {
536            Some(n) => frame.scalar_slot_names[idx] = Some(n.to_string()),
537            // Anonymous slot: mark occupied so padding holes don't shadow parent frame slots.
538            None => frame.scalar_slot_names[idx] = Some(String::new()),
539        }
540    }
541
542    /// Slot-indexed `.=` — avoids frame walking and string comparison on every iteration.
543    ///
544    /// Returns a [`PerlValue::shallow_clone`] (Arc::clone) of the stored value
545    /// rather than a full [`Clone`], which would deep-copy the entire `String`
546    /// payload and turn a `$s .= "x"` loop into O(N²) memcpy.
547    /// Repeated `$slot .= rhs` fused-loop fast path: locates the slot's frame once,
548    /// tries `try_concat_repeat_inplace` (unique heap-String → single `reserve`+`push_str`
549    /// burst), and returns `true` on success. Returns `false` when the slot is not a
550    /// uniquely-held `String` so the caller can fall back to the per-iteration slow
551    /// path. Called from `Op::ConcatConstSlotLoop`.
552    #[inline]
553    pub fn scalar_slot_concat_repeat_inplace(&mut self, slot: u8, rhs: &str, n: usize) -> bool {
554        let idx = slot as usize;
555        let len = self.frames.len();
556        let fi = {
557            let mut found = len - 1;
558            if idx >= self.frames[found].scalar_slots.len()
559                || !self.frames[found].owns_scalar_slot_index(idx)
560            {
561                for i in (0..len - 1).rev() {
562                    if idx < self.frames[i].scalar_slots.len()
563                        && self.frames[i].owns_scalar_slot_index(idx)
564                    {
565                        found = i;
566                        break;
567                    }
568                }
569            }
570            found
571        };
572        let frame = &mut self.frames[fi];
573        if idx >= frame.scalar_slots.len() {
574            frame.scalar_slots.resize(idx + 1, PerlValue::UNDEF);
575        }
576        frame.scalar_slots[idx].try_concat_repeat_inplace(rhs, n)
577    }
578
579    /// Slow fallback for the fused string-append loop: clones the RHS into a new
580    /// `PerlValue::string` once and runs the existing `scalar_slot_concat_inplace`
581    /// path `n` times. Used by `Op::ConcatConstSlotLoop` when the slot is aliased
582    /// and the in-place fast path rejected the mutation.
583    #[inline]
584    pub fn scalar_slot_concat_repeat_slow(&mut self, slot: u8, rhs: &str, n: usize) {
585        let pv = PerlValue::string(rhs.to_owned());
586        for _ in 0..n {
587            let _ = self.scalar_slot_concat_inplace(slot, &pv);
588        }
589    }
590
591    #[inline]
592    pub fn scalar_slot_concat_inplace(&mut self, slot: u8, rhs: &PerlValue) -> PerlValue {
593        let idx = slot as usize;
594        let len = self.frames.len();
595        let fi = {
596            let mut found = len - 1;
597            if idx >= self.frames[found].scalar_slots.len()
598                || !self.frames[found].owns_scalar_slot_index(idx)
599            {
600                for i in (0..len - 1).rev() {
601                    if idx < self.frames[i].scalar_slots.len()
602                        && self.frames[i].owns_scalar_slot_index(idx)
603                    {
604                        found = i;
605                        break;
606                    }
607                }
608            }
609            found
610        };
611        let frame = &mut self.frames[fi];
612        if idx >= frame.scalar_slots.len() {
613            frame.scalar_slots.resize(idx + 1, PerlValue::UNDEF);
614        }
615        // Fast path: when the slot holds the only `Arc<HeapObject::String>` handle,
616        // extend the underlying `String` buffer in place — no Arc alloc, no full
617        // unwrap/rewrap. This turns a `$s .= "x"` loop into `String::push_str` only.
618        // The shallow_clone handle that goes back onto the VM stack briefly bumps
619        // the refcount to 2, so the NEXT iteration's fast path would fail — except
620        // the VM immediately `Pop`s that handle (or `ConcatAppendSlotVoid` never
621        // pushes it), restoring unique ownership before the next `.=`.
622        if frame.scalar_slots[idx].try_concat_append_inplace(rhs) {
623            return frame.scalar_slots[idx].shallow_clone();
624        }
625        let new_val = std::mem::replace(&mut frame.scalar_slots[idx], PerlValue::UNDEF)
626            .concat_append_owned(rhs);
627        let handle = new_val.shallow_clone();
628        frame.scalar_slots[idx] = new_val;
629        handle
630    }
631
632    #[inline]
633    pub(crate) fn can_pop_frame(&self) -> bool {
634        self.frames.len() > 1
635    }
636
637    #[inline]
638    pub fn pop_frame(&mut self) {
639        if self.frames.len() > 1 {
640            let mut frame = self.frames.pop().expect("pop_frame");
641            // Local restore must write outer bindings even when parallel_guard is on
642            // (user code cannot mutate captured vars; unwind is not user mutation).
643            let saved_guard = self.parallel_guard;
644            self.parallel_guard = false;
645            for entry in frame.local_restores.drain(..).rev() {
646                match entry {
647                    LocalRestore::Scalar(name, old) => {
648                        let _ = self.set_scalar(&name, old);
649                    }
650                    LocalRestore::Array(name, old) => {
651                        let _ = self.set_array(&name, old);
652                    }
653                    LocalRestore::Hash(name, old) => {
654                        let _ = self.set_hash(&name, old);
655                    }
656                    LocalRestore::HashElement(name, key, old) => match old {
657                        Some(v) => {
658                            let _ = self.set_hash_element(&name, &key, v);
659                        }
660                        None => {
661                            let _ = self.delete_hash_element(&name, &key);
662                        }
663                    },
664                    LocalRestore::ArrayElement(name, index, old) => {
665                        let _ = self.set_array_element(&name, index, old);
666                    }
667                }
668            }
669            self.parallel_guard = saved_guard;
670            frame.clear_all_bindings();
671            // Return frame to pool for reuse (avoids allocation on next push_frame).
672            if self.frame_pool.len() < 64 {
673                self.frame_pool.push(frame);
674            }
675        }
676    }
677
678    /// `local $name` — save current value, assign `val`; restore on `pop_frame`.
679    pub fn local_set_scalar(&mut self, name: &str, val: PerlValue) -> Result<(), PerlError> {
680        let old = self.get_scalar(name);
681        if let Some(frame) = self.frames.last_mut() {
682            frame
683                .local_restores
684                .push(LocalRestore::Scalar(name.to_string(), old));
685        }
686        self.set_scalar(name, val)
687    }
688
689    /// `local @name` — not valid for `mysync` arrays.
690    pub fn local_set_array(&mut self, name: &str, val: Vec<PerlValue>) -> Result<(), PerlError> {
691        if self.find_atomic_array(name).is_some() {
692            return Err(PerlError::runtime(
693                "local cannot be used on mysync arrays",
694                0,
695            ));
696        }
697        let old = self.get_array(name);
698        if let Some(frame) = self.frames.last_mut() {
699            frame
700                .local_restores
701                .push(LocalRestore::Array(name.to_string(), old));
702        }
703        self.set_array(name, val)?;
704        Ok(())
705    }
706
707    /// `local %name`
708    pub fn local_set_hash(
709        &mut self,
710        name: &str,
711        val: IndexMap<String, PerlValue>,
712    ) -> Result<(), PerlError> {
713        if self.find_atomic_hash(name).is_some() {
714            return Err(PerlError::runtime(
715                "local cannot be used on mysync hashes",
716                0,
717            ));
718        }
719        let old = self.get_hash(name);
720        if let Some(frame) = self.frames.last_mut() {
721            frame
722                .local_restores
723                .push(LocalRestore::Hash(name.to_string(), old));
724        }
725        self.set_hash(name, val)?;
726        Ok(())
727    }
728
729    /// `local $h{key} = val` — save key state; restore one slot on `pop_frame`.
730    pub fn local_set_hash_element(
731        &mut self,
732        name: &str,
733        key: &str,
734        val: PerlValue,
735    ) -> Result<(), PerlError> {
736        if self.find_atomic_hash(name).is_some() {
737            return Err(PerlError::runtime(
738                "local cannot be used on mysync hash elements",
739                0,
740            ));
741        }
742        let old = if self.exists_hash_element(name, key) {
743            Some(self.get_hash_element(name, key))
744        } else {
745            None
746        };
747        if let Some(frame) = self.frames.last_mut() {
748            frame.local_restores.push(LocalRestore::HashElement(
749                name.to_string(),
750                key.to_string(),
751                old,
752            ));
753        }
754        self.set_hash_element(name, key, val)?;
755        Ok(())
756    }
757
758    /// `local $a[i] = val` — save element (as returned by [`Self::get_array_element`]), assign;
759    /// restore on [`Self::pop_frame`].
760    pub fn local_set_array_element(
761        &mut self,
762        name: &str,
763        index: i64,
764        val: PerlValue,
765    ) -> Result<(), PerlError> {
766        if self.find_atomic_array(name).is_some() {
767            return Err(PerlError::runtime(
768                "local cannot be used on mysync array elements",
769                0,
770            ));
771        }
772        let old = self.get_array_element(name, index);
773        if let Some(frame) = self.frames.last_mut() {
774            frame
775                .local_restores
776                .push(LocalRestore::ArrayElement(name.to_string(), index, old));
777        }
778        self.set_array_element(name, index, val)?;
779        Ok(())
780    }
781
782    // ── Scalars ──
783
784    #[inline]
785    pub fn declare_scalar(&mut self, name: &str, val: PerlValue) {
786        let _ = self.declare_scalar_frozen(name, val, false, None);
787    }
788
789    /// Declare a lexical scalar; `frozen` means no further assignment to this binding.
790    /// `ty` is from `typed my $x : Int` — enforced on every assignment.
791    pub fn declare_scalar_frozen(
792        &mut self,
793        name: &str,
794        val: PerlValue,
795        frozen: bool,
796        ty: Option<PerlTypeName>,
797    ) -> Result<(), PerlError> {
798        if let Some(ref t) = ty {
799            t.check_value(&val)
800                .map_err(|msg| PerlError::type_error(format!("`${}`: {}", name, msg), 0))?;
801        }
802        if let Some(frame) = self.frames.last_mut() {
803            frame.set_scalar(name, val);
804            if frozen {
805                frame.frozen_scalars.insert(name.to_string());
806            }
807            if let Some(t) = ty {
808                frame.typed_scalars.insert(name.to_string(), t);
809            }
810        }
811        Ok(())
812    }
813
814    /// True if the innermost lexical scalar binding for `name` is `frozen`.
815    pub fn is_scalar_frozen(&self, name: &str) -> bool {
816        for frame in self.frames.iter().rev() {
817            if frame.has_scalar(name) {
818                return frame.frozen_scalars.contains(name);
819            }
820        }
821        false
822    }
823
824    /// True if the innermost lexical array binding for `name` is `frozen`.
825    pub fn is_array_frozen(&self, name: &str) -> bool {
826        for frame in self.frames.iter().rev() {
827            if frame.has_array(name) {
828                return frame.frozen_arrays.contains(name);
829            }
830        }
831        false
832    }
833
834    /// True if the innermost lexical hash binding for `name` is `frozen`.
835    pub fn is_hash_frozen(&self, name: &str) -> bool {
836        for frame in self.frames.iter().rev() {
837            if frame.has_hash(name) {
838                return frame.frozen_hashes.contains(name);
839            }
840        }
841        false
842    }
843
844    /// Returns Some(sigil) if the named variable is frozen, None if mutable.
845    pub fn check_frozen(&self, sigil: &str, name: &str) -> Option<&'static str> {
846        match sigil {
847            "$" => {
848                if self.is_scalar_frozen(name) {
849                    Some("scalar")
850                } else {
851                    None
852                }
853            }
854            "@" => {
855                if self.is_array_frozen(name) {
856                    Some("array")
857                } else {
858                    None
859                }
860            }
861            "%" => {
862                if self.is_hash_frozen(name) {
863                    Some("hash")
864                } else {
865                    None
866                }
867            }
868            _ => None,
869        }
870    }
871
872    #[inline]
873    pub fn get_scalar(&self, name: &str) -> PerlValue {
874        for frame in self.frames.iter().rev() {
875            if let Some(val) = frame.get_scalar(name) {
876                // Transparently unwrap Atomic — read through the lock
877                if let Some(arc) = val.as_atomic_arc() {
878                    return arc.lock().clone();
879                }
880                // Transparently unwrap CaptureCell (closure-captured variable) — read through the lock.
881                // User-created ScalarRef from `\expr` is NOT unwrapped.
882                if let Some(arc) = val.as_capture_cell() {
883                    return arc.read().clone();
884                }
885                return val.clone();
886            }
887        }
888        PerlValue::UNDEF
889    }
890
891    /// True if any frame has a lexical scalar binding for `name` (`my` / `our` / assignment).
892    #[inline]
893    pub fn scalar_binding_exists(&self, name: &str) -> bool {
894        for frame in self.frames.iter().rev() {
895            if frame.has_scalar(name) {
896                return true;
897            }
898        }
899        false
900    }
901
902    /// Collect all scalar variable names across all frames (for debugger).
903    pub fn all_scalar_names(&self) -> Vec<String> {
904        let mut names = Vec::new();
905        for frame in &self.frames {
906            for (name, _) in &frame.scalars {
907                if !names.contains(name) {
908                    names.push(name.clone());
909                }
910            }
911            for name in frame.scalar_slot_names.iter().flatten() {
912                if !names.contains(name) {
913                    names.push(name.clone());
914                }
915            }
916        }
917        names
918    }
919
920    /// True if any frame or atomic slot holds an array named `name`.
921    #[inline]
922    pub fn array_binding_exists(&self, name: &str) -> bool {
923        if self.find_atomic_array(name).is_some() {
924            return true;
925        }
926        for frame in self.frames.iter().rev() {
927            if frame.has_array(name) {
928                return true;
929            }
930        }
931        false
932    }
933
934    /// True if any frame or atomic slot holds a hash named `name`.
935    #[inline]
936    pub fn hash_binding_exists(&self, name: &str) -> bool {
937        if self.find_atomic_hash(name).is_some() {
938            return true;
939        }
940        for frame in self.frames.iter().rev() {
941            if frame.has_hash(name) {
942                return true;
943            }
944        }
945        false
946    }
947
948    /// Get the raw scalar value WITHOUT unwrapping Atomic.
949    /// Used by scope.capture() to preserve the Arc for sharing across threads.
950    #[inline]
951    pub fn get_scalar_raw(&self, name: &str) -> PerlValue {
952        for frame in self.frames.iter().rev() {
953            if let Some(val) = frame.get_scalar(name) {
954                return val.clone();
955            }
956        }
957        PerlValue::UNDEF
958    }
959
960    /// Atomically read-modify-write a scalar. Holds the Mutex lock for
961    /// the entire cycle so `mysync` variables are race-free under `fan`/`pfor`.
962    /// Returns the NEW value.
963    pub fn atomic_mutate(
964        &mut self,
965        name: &str,
966        f: impl FnOnce(&PerlValue) -> PerlValue,
967    ) -> PerlValue {
968        for frame in self.frames.iter().rev() {
969            if let Some(v) = frame.get_scalar(name) {
970                if let Some(arc) = v.as_atomic_arc() {
971                    let mut guard = arc.lock();
972                    let old = guard.clone();
973                    let new_val = f(&guard);
974                    *guard = new_val.clone();
975                    crate::parallel_trace::emit_scalar_mutation(name, &old, &new_val);
976                    return new_val;
977                }
978            }
979        }
980        // Non-atomic fallback
981        let old = self.get_scalar(name);
982        let new_val = f(&old);
983        let _ = self.set_scalar(name, new_val.clone());
984        new_val
985    }
986
987    /// Like atomic_mutate but returns the OLD value (for postfix `$x++`).
988    pub fn atomic_mutate_post(
989        &mut self,
990        name: &str,
991        f: impl FnOnce(&PerlValue) -> PerlValue,
992    ) -> PerlValue {
993        for frame in self.frames.iter().rev() {
994            if let Some(v) = frame.get_scalar(name) {
995                if let Some(arc) = v.as_atomic_arc() {
996                    let mut guard = arc.lock();
997                    let old = guard.clone();
998                    let new_val = f(&old);
999                    *guard = new_val.clone();
1000                    crate::parallel_trace::emit_scalar_mutation(name, &old, &new_val);
1001                    return old;
1002                }
1003            }
1004        }
1005        // Non-atomic fallback
1006        let old = self.get_scalar(name);
1007        let _ = self.set_scalar(name, f(&old));
1008        old
1009    }
1010
1011    /// Append `rhs` to a scalar string in-place (no clone of the existing string).
1012    /// If the scalar is not yet a String, it is converted first.
1013    ///
1014    /// The binding and the returned [`PerlValue`] share the same heap [`Arc`] via
1015    /// [`PerlValue::shallow_clone`] on the store — a full [`Clone`] would deep-copy the
1016    /// entire `String` each time and make repeated `.=` O(N²) in the total length.
1017    #[inline]
1018    pub fn scalar_concat_inplace(
1019        &mut self,
1020        name: &str,
1021        rhs: &PerlValue,
1022    ) -> Result<PerlValue, PerlError> {
1023        self.check_parallel_scalar_write(name)?;
1024        for frame in self.frames.iter_mut().rev() {
1025            if let Some(entry) = frame.scalars.iter_mut().find(|(k, _)| k == name) {
1026                // `mysync $x` stores `HeapObject::Atomic` — must mutate under the mutex, not
1027                // `into_string()` the wrapper (that would stringify the cell, not the payload).
1028                if let Some(atomic_arc) = entry.1.as_atomic_arc() {
1029                    let mut guard = atomic_arc.lock();
1030                    let inner = std::mem::replace(&mut *guard, PerlValue::UNDEF);
1031                    let new_val = inner.concat_append_owned(rhs);
1032                    *guard = new_val.shallow_clone();
1033                    return Ok(new_val);
1034                }
1035                // Fast path: same `Arc::get_mut` trick as the slot variant — mutate the
1036                // underlying `String` directly when the scalar is the lone handle.
1037                if entry.1.try_concat_append_inplace(rhs) {
1038                    return Ok(entry.1.shallow_clone());
1039                }
1040                // Use `into_string` + `append_to` so heap strings take the `Arc::try_unwrap`
1041                // fast path instead of `Display` / heap formatting on every `.=`.
1042                let new_val =
1043                    std::mem::replace(&mut entry.1, PerlValue::UNDEF).concat_append_owned(rhs);
1044                entry.1 = new_val.shallow_clone();
1045                return Ok(new_val);
1046            }
1047        }
1048        // Variable not found — create as new string
1049        let val = PerlValue::UNDEF.concat_append_owned(rhs);
1050        self.frames[0].set_scalar(name, val.shallow_clone());
1051        Ok(val)
1052    }
1053
1054    #[inline]
1055    pub fn set_scalar(&mut self, name: &str, val: PerlValue) -> Result<(), PerlError> {
1056        self.check_parallel_scalar_write(name)?;
1057        for frame in self.frames.iter_mut().rev() {
1058            // If the existing value is Atomic, write through the lock
1059            if let Some(v) = frame.get_scalar(name) {
1060                if let Some(arc) = v.as_atomic_arc() {
1061                    let mut guard = arc.lock();
1062                    let old = guard.clone();
1063                    *guard = val.clone();
1064                    crate::parallel_trace::emit_scalar_mutation(name, &old, &val);
1065                    return Ok(());
1066                }
1067                // If the existing value is CaptureCell (closure-captured variable), write through it
1068                if let Some(arc) = v.as_capture_cell() {
1069                    *arc.write() = val;
1070                    return Ok(());
1071                }
1072            }
1073            if frame.has_scalar(name) {
1074                if let Some(ty) = frame.typed_scalars.get(name) {
1075                    ty.check_value(&val)
1076                        .map_err(|msg| PerlError::type_error(format!("`${}`: {}", name, msg), 0))?;
1077                }
1078                frame.set_scalar(name, val);
1079                return Ok(());
1080            }
1081        }
1082        self.frames[0].set_scalar(name, val);
1083        Ok(())
1084    }
1085
1086    /// Set the topic variable `$_` and its numeric alias `$_0` together.
1087    /// Use this for single-arg closures (map, grep, etc.) so both `$_` and `$_0` work.
1088    /// This declares them in the current scope (not global), suitable for sub calls.
1089    ///
1090    /// Also sets outer topic aliases: `$_<` = previous `$_`, `$_<<` = previous `$_<`, etc.
1091    /// This allows nested blocks (e.g. `fan` inside `>{}`) to access enclosing topic values.
1092    #[inline]
1093    pub fn set_topic(&mut self, val: PerlValue) {
1094        // Shift existing outer topics down one level before setting new topic.
1095        // We support up to 4 levels: $_<, $_<<, $_<<<, $_<<<<
1096        // First, read current values (in reverse order to avoid overwriting what we read).
1097        let old_3lt = self.get_scalar("_<<<");
1098        let old_2lt = self.get_scalar("_<<");
1099        let old_1lt = self.get_scalar("_<");
1100        let old_topic = self.get_scalar("_");
1101
1102        // Now set the new values
1103        self.declare_scalar("_", val.clone());
1104        self.declare_scalar("_0", val);
1105        // Set outer topics only if there was a previous topic
1106        if !old_topic.is_undef() {
1107            self.declare_scalar("_<", old_topic);
1108        }
1109        if !old_1lt.is_undef() {
1110            self.declare_scalar("_<<", old_1lt);
1111        }
1112        if !old_2lt.is_undef() {
1113            self.declare_scalar("_<<<", old_2lt);
1114        }
1115        if !old_3lt.is_undef() {
1116            self.declare_scalar("_<<<<", old_3lt);
1117        }
1118    }
1119
1120    /// Set numeric closure argument aliases `$_0`, `$_1`, `$_2`, ... for all args.
1121    /// Also sets `$_` to the first argument (if any), shifting outer topics like [`set_topic`].
1122    #[inline]
1123    pub fn set_closure_args(&mut self, args: &[PerlValue]) {
1124        if let Some(first) = args.first() {
1125            // Use set_topic to properly shift the topic stack
1126            self.set_topic(first.clone());
1127        }
1128        for (i, val) in args.iter().enumerate() {
1129            self.declare_scalar(&format!("_{}", i), val.clone());
1130        }
1131    }
1132
1133    /// Register a `defer { BLOCK }` closure to run when this scope exits.
1134    #[inline]
1135    pub fn push_defer(&mut self, coderef: PerlValue) {
1136        if let Some(frame) = self.frames.last_mut() {
1137            frame.defers.push(coderef);
1138        }
1139    }
1140
1141    /// Take all deferred blocks from the current frame (for execution on scope exit).
1142    /// Returns them in reverse order (LIFO - last defer runs first).
1143    #[inline]
1144    pub fn take_defers(&mut self) -> Vec<PerlValue> {
1145        if let Some(frame) = self.frames.last_mut() {
1146            let mut defers = std::mem::take(&mut frame.defers);
1147            defers.reverse();
1148            defers
1149        } else {
1150            Vec::new()
1151        }
1152    }
1153
1154    // ── Atomic array/hash declarations ──
1155
1156    pub fn declare_atomic_array(&mut self, name: &str, val: Vec<PerlValue>) {
1157        if let Some(frame) = self.frames.last_mut() {
1158            frame
1159                .atomic_arrays
1160                .push((name.to_string(), AtomicArray(Arc::new(Mutex::new(val)))));
1161        }
1162    }
1163
1164    pub fn declare_atomic_hash(&mut self, name: &str, val: IndexMap<String, PerlValue>) {
1165        if let Some(frame) = self.frames.last_mut() {
1166            frame
1167                .atomic_hashes
1168                .push((name.to_string(), AtomicHash(Arc::new(Mutex::new(val)))));
1169        }
1170    }
1171
1172    /// Find an atomic array by name (returns the Arc for sharing).
1173    fn find_atomic_array(&self, name: &str) -> Option<&AtomicArray> {
1174        for frame in self.frames.iter().rev() {
1175            if let Some(aa) = frame.atomic_arrays.iter().find(|(k, _)| k == name) {
1176                return Some(&aa.1);
1177            }
1178        }
1179        None
1180    }
1181
1182    /// Find an atomic hash by name.
1183    fn find_atomic_hash(&self, name: &str) -> Option<&AtomicHash> {
1184        for frame in self.frames.iter().rev() {
1185            if let Some(ah) = frame.atomic_hashes.iter().find(|(k, _)| k == name) {
1186                return Some(&ah.1);
1187            }
1188        }
1189        None
1190    }
1191
1192    // ── Arrays ──
1193
1194    /// Remove `@_` from the innermost frame without cloning (move out of the frame `sub_underscore` field).
1195    /// Call sites restore with [`Self::declare_array`] before running a body that uses `shift` / `@_`.
1196    #[inline]
1197    pub fn take_sub_underscore(&mut self) -> Option<Vec<PerlValue>> {
1198        self.frames.last_mut()?.sub_underscore.take()
1199    }
1200
1201    pub fn declare_array(&mut self, name: &str, val: Vec<PerlValue>) {
1202        self.declare_array_frozen(name, val, false);
1203    }
1204
1205    pub fn declare_array_frozen(&mut self, name: &str, val: Vec<PerlValue>, frozen: bool) {
1206        // Package stash names (`Foo::BAR`) live in the outermost frame so nested blocks/subs
1207        // cannot shadow `@C::ISA` with an empty array (breaks inheritance / SUPER).
1208        let idx = if name.contains("::") {
1209            0
1210        } else {
1211            self.frames.len().saturating_sub(1)
1212        };
1213        if let Some(frame) = self.frames.get_mut(idx) {
1214            // Remove any existing shared Arc — re-declaration disconnects old refs.
1215            frame.shared_arrays.retain(|(k, _)| k != name);
1216            frame.set_array(name, val);
1217            if frozen {
1218                frame.frozen_arrays.insert(name.to_string());
1219            } else {
1220                // Redeclaring as non-frozen should unfreeze if previously frozen
1221                frame.frozen_arrays.remove(name);
1222            }
1223        }
1224    }
1225
1226    pub fn get_array(&self, name: &str) -> Vec<PerlValue> {
1227        // Check atomic arrays first
1228        if let Some(aa) = self.find_atomic_array(name) {
1229            return aa.0.lock().clone();
1230        }
1231        // Check shared (Arc-backed) arrays
1232        if let Some(arc) = self.find_shared_array(name) {
1233            return arc.read().clone();
1234        }
1235        if name.contains("::") {
1236            if let Some(f) = self.frames.first() {
1237                if let Some(val) = f.get_array(name) {
1238                    return val.clone();
1239                }
1240            }
1241            return Vec::new();
1242        }
1243        for frame in self.frames.iter().rev() {
1244            if let Some(val) = frame.get_array(name) {
1245                return val.clone();
1246            }
1247        }
1248        Vec::new()
1249    }
1250
1251    /// Borrow the innermost binding for `name` when it is a plain [`Vec`] (not `mysync`).
1252    /// Used to pass `@_` to [`crate::list_util::native_dispatch`] without cloning the vector.
1253    #[inline]
1254    pub fn get_array_borrow(&self, name: &str) -> Option<&[PerlValue]> {
1255        if self.find_atomic_array(name).is_some() {
1256            return None;
1257        }
1258        if name.contains("::") {
1259            return self
1260                .frames
1261                .first()
1262                .and_then(|f| f.get_array(name))
1263                .map(|v| v.as_slice());
1264        }
1265        for frame in self.frames.iter().rev() {
1266            if let Some(val) = frame.get_array(name) {
1267                return Some(val.as_slice());
1268            }
1269        }
1270        None
1271    }
1272
1273    fn resolve_array_frame_idx(&self, name: &str) -> Option<usize> {
1274        if name.contains("::") {
1275            return Some(0);
1276        }
1277        (0..self.frames.len())
1278            .rev()
1279            .find(|&i| self.frames[i].has_array(name))
1280    }
1281
1282    fn check_parallel_array_write(&self, name: &str) -> Result<(), PerlError> {
1283        if !self.parallel_guard
1284            || Self::parallel_skip_special_name(name)
1285            || Self::parallel_allowed_internal_array(name)
1286        {
1287            return Ok(());
1288        }
1289        let inner = self.frames.len().saturating_sub(1);
1290        match self.resolve_array_frame_idx(name) {
1291            None => Err(PerlError::runtime(
1292                format!(
1293                    "cannot modify undeclared array `@{}` in a parallel block",
1294                    name
1295                ),
1296                0,
1297            )),
1298            Some(idx) if idx != inner => Err(PerlError::runtime(
1299                format!(
1300                    "cannot modify captured non-mysync array `@{}` in a parallel block",
1301                    name
1302                ),
1303                0,
1304            )),
1305            Some(_) => Ok(()),
1306        }
1307    }
1308
1309    /// Resolve an [`ArrayBindingRef`] or [`HashBindingRef`] to an Arc-backed
1310    /// snapshot so the value survives scope pop. Called when a value is stored
1311    /// as an *element* inside a container (array/hash) — NOT for scalar assignment,
1312    /// where binding refs must stay live for aliasing.
1313    #[inline]
1314    pub fn resolve_container_binding_ref(&self, val: PerlValue) -> PerlValue {
1315        if let Some(name) = val.as_array_binding_name() {
1316            let data = self.get_array(&name);
1317            return PerlValue::array_ref(Arc::new(parking_lot::RwLock::new(data)));
1318        }
1319        if let Some(name) = val.as_hash_binding_name() {
1320            let data = self.get_hash(&name);
1321            return PerlValue::hash_ref(Arc::new(parking_lot::RwLock::new(data)));
1322        }
1323        val
1324    }
1325
1326    /// Promote `@name` to shared Arc-backed storage and return an [`ArrayRef`] that
1327    /// shares the same `Arc`. Both the scope binding and the returned ref point to
1328    /// the same data, so mutations through either path are visible.
1329    pub fn promote_array_to_shared(
1330        &mut self,
1331        name: &str,
1332    ) -> Arc<parking_lot::RwLock<Vec<PerlValue>>> {
1333        // Atomic (mysync) arrays: snapshot current data into a separate Arc.
1334        // Can't share the Mutex-backed storage directly.
1335        if let Some(aa) = self.find_atomic_array(name) {
1336            let data = aa.0.lock().clone();
1337            return Arc::new(parking_lot::RwLock::new(data));
1338        }
1339        // Already promoted? Return the existing Arc.
1340        let idx = self.resolve_array_frame_idx(name).unwrap_or_default();
1341        let frame = &mut self.frames[idx];
1342        if let Some(entry) = frame.shared_arrays.iter().find(|(k, _)| k == name) {
1343            return Arc::clone(&entry.1);
1344        }
1345        // Take data from frame.arrays, create Arc, store in shared_arrays.
1346        let data = if let Some(pos) = frame.arrays.iter().position(|(k, _)| k == name) {
1347            frame.arrays.swap_remove(pos).1
1348        } else if name == "_" {
1349            frame.sub_underscore.take().unwrap_or_default()
1350        } else {
1351            Vec::new()
1352        };
1353        let arc = Arc::new(parking_lot::RwLock::new(data));
1354        frame
1355            .shared_arrays
1356            .push((name.to_string(), Arc::clone(&arc)));
1357        arc
1358    }
1359
1360    /// Promote `%name` to shared Arc-backed storage and return a [`HashRef`] that
1361    /// shares the same `Arc`.
1362    pub fn promote_hash_to_shared(
1363        &mut self,
1364        name: &str,
1365    ) -> Arc<parking_lot::RwLock<IndexMap<String, PerlValue>>> {
1366        let idx = self.resolve_hash_frame_idx(name).unwrap_or_default();
1367        let frame = &mut self.frames[idx];
1368        if let Some(entry) = frame.shared_hashes.iter().find(|(k, _)| k == name) {
1369            return Arc::clone(&entry.1);
1370        }
1371        let data = if let Some(pos) = frame.hashes.iter().position(|(k, _)| k == name) {
1372            frame.hashes.swap_remove(pos).1
1373        } else {
1374            IndexMap::new()
1375        };
1376        let arc = Arc::new(parking_lot::RwLock::new(data));
1377        frame
1378            .shared_hashes
1379            .push((name.to_string(), Arc::clone(&arc)));
1380        arc
1381    }
1382
1383    /// Find the shared Arc for `@name`, if any.
1384    fn find_shared_array(&self, name: &str) -> Option<Arc<parking_lot::RwLock<Vec<PerlValue>>>> {
1385        for frame in self.frames.iter().rev() {
1386            if let Some(entry) = frame.shared_arrays.iter().find(|(k, _)| k == name) {
1387                return Some(Arc::clone(&entry.1));
1388            }
1389            // If this frame has the plain array, stop — it shadows outer shared ones.
1390            if frame.arrays.iter().any(|(k, _)| k == name) {
1391                return None;
1392            }
1393        }
1394        None
1395    }
1396
1397    /// Find the shared Arc for `%name`, if any.
1398    fn find_shared_hash(
1399        &self,
1400        name: &str,
1401    ) -> Option<Arc<parking_lot::RwLock<IndexMap<String, PerlValue>>>> {
1402        for frame in self.frames.iter().rev() {
1403            if let Some(entry) = frame.shared_hashes.iter().find(|(k, _)| k == name) {
1404                return Some(Arc::clone(&entry.1));
1405            }
1406            if frame.hashes.iter().any(|(k, _)| k == name) {
1407                return None;
1408            }
1409        }
1410        None
1411    }
1412
1413    pub fn get_array_mut(&mut self, name: &str) -> Result<&mut Vec<PerlValue>, PerlError> {
1414        // Note: can't return &mut into a Mutex. Callers needing atomic array
1415        // mutation should use atomic_array_mutate instead. For non-atomic arrays:
1416        if self.find_atomic_array(name).is_some() {
1417            return Err(PerlError::runtime(
1418                "get_array_mut: use atomic path for mysync arrays",
1419                0,
1420            ));
1421        }
1422        self.check_parallel_array_write(name)?;
1423        let idx = self.resolve_array_frame_idx(name).unwrap_or_default();
1424        let frame = &mut self.frames[idx];
1425        if frame.get_array_mut(name).is_none() {
1426            frame.arrays.push((name.to_string(), Vec::new()));
1427        }
1428        Ok(frame.get_array_mut(name).unwrap())
1429    }
1430
1431    /// Push to array — works for both regular and atomic arrays.
1432    pub fn push_to_array(&mut self, name: &str, val: PerlValue) -> Result<(), PerlError> {
1433        let val = self.resolve_container_binding_ref(val);
1434        if let Some(aa) = self.find_atomic_array(name) {
1435            aa.0.lock().push(val);
1436            return Ok(());
1437        }
1438        if let Some(arc) = self.find_shared_array(name) {
1439            arc.write().push(val);
1440            return Ok(());
1441        }
1442        self.get_array_mut(name)?.push(val);
1443        Ok(())
1444    }
1445
1446    /// Bulk `push @name, start..end-1` for the fused counted-loop superinstruction:
1447    /// reserves the `Vec` once, then pushes `PerlValue::integer(i)` for `i in start..end`
1448    /// in a tight Rust loop. Atomic arrays take a single `lock().push()` burst.
1449    pub fn push_int_range_to_array(
1450        &mut self,
1451        name: &str,
1452        start: i64,
1453        end: i64,
1454    ) -> Result<(), PerlError> {
1455        if end <= start {
1456            return Ok(());
1457        }
1458        let count = (end - start) as usize;
1459        if let Some(aa) = self.find_atomic_array(name) {
1460            let mut g = aa.0.lock();
1461            g.reserve(count);
1462            for i in start..end {
1463                g.push(PerlValue::integer(i));
1464            }
1465            return Ok(());
1466        }
1467        let arr = self.get_array_mut(name)?;
1468        arr.reserve(count);
1469        for i in start..end {
1470            arr.push(PerlValue::integer(i));
1471        }
1472        Ok(())
1473    }
1474
1475    /// Pop from array — works for regular, shared, and atomic arrays.
1476    pub fn pop_from_array(&mut self, name: &str) -> Result<PerlValue, PerlError> {
1477        if let Some(aa) = self.find_atomic_array(name) {
1478            return Ok(aa.0.lock().pop().unwrap_or(PerlValue::UNDEF));
1479        }
1480        if let Some(arc) = self.find_shared_array(name) {
1481            return Ok(arc.write().pop().unwrap_or(PerlValue::UNDEF));
1482        }
1483        Ok(self.get_array_mut(name)?.pop().unwrap_or(PerlValue::UNDEF))
1484    }
1485
1486    /// Shift from array — works for regular, shared, and atomic arrays.
1487    pub fn shift_from_array(&mut self, name: &str) -> Result<PerlValue, PerlError> {
1488        if let Some(aa) = self.find_atomic_array(name) {
1489            let mut guard = aa.0.lock();
1490            return Ok(if guard.is_empty() {
1491                PerlValue::UNDEF
1492            } else {
1493                guard.remove(0)
1494            });
1495        }
1496        if let Some(arc) = self.find_shared_array(name) {
1497            let mut arr = arc.write();
1498            return Ok(if arr.is_empty() {
1499                PerlValue::UNDEF
1500            } else {
1501                arr.remove(0)
1502            });
1503        }
1504        let arr = self.get_array_mut(name)?;
1505        Ok(if arr.is_empty() {
1506            PerlValue::UNDEF
1507        } else {
1508            arr.remove(0)
1509        })
1510    }
1511
1512    /// Get array length — works for both regular and atomic arrays.
1513    pub fn array_len(&self, name: &str) -> usize {
1514        if let Some(aa) = self.find_atomic_array(name) {
1515            return aa.0.lock().len();
1516        }
1517        if let Some(arc) = self.find_shared_array(name) {
1518            return arc.read().len();
1519        }
1520        if name.contains("::") {
1521            return self
1522                .frames
1523                .first()
1524                .and_then(|f| f.get_array(name))
1525                .map(|a| a.len())
1526                .unwrap_or(0);
1527        }
1528        for frame in self.frames.iter().rev() {
1529            if let Some(arr) = frame.get_array(name) {
1530                return arr.len();
1531            }
1532        }
1533        0
1534    }
1535
1536    pub fn set_array(&mut self, name: &str, val: Vec<PerlValue>) -> Result<(), PerlError> {
1537        if let Some(aa) = self.find_atomic_array(name) {
1538            *aa.0.lock() = val;
1539            return Ok(());
1540        }
1541        if let Some(arc) = self.find_shared_array(name) {
1542            *arc.write() = val;
1543            return Ok(());
1544        }
1545        self.check_parallel_array_write(name)?;
1546        for frame in self.frames.iter_mut().rev() {
1547            if frame.has_array(name) {
1548                frame.set_array(name, val);
1549                return Ok(());
1550            }
1551        }
1552        self.frames[0].set_array(name, val);
1553        Ok(())
1554    }
1555
1556    /// Direct element access — works for both regular and atomic arrays.
1557    #[inline]
1558    pub fn get_array_element(&self, name: &str, index: i64) -> PerlValue {
1559        if let Some(aa) = self.find_atomic_array(name) {
1560            let arr = aa.0.lock();
1561            let idx = if index < 0 {
1562                (arr.len() as i64 + index) as usize
1563            } else {
1564                index as usize
1565            };
1566            return arr.get(idx).cloned().unwrap_or(PerlValue::UNDEF);
1567        }
1568        if let Some(arc) = self.find_shared_array(name) {
1569            let arr = arc.read();
1570            let idx = if index < 0 {
1571                (arr.len() as i64 + index) as usize
1572            } else {
1573                index as usize
1574            };
1575            return arr.get(idx).cloned().unwrap_or(PerlValue::UNDEF);
1576        }
1577        for frame in self.frames.iter().rev() {
1578            if let Some(arr) = frame.get_array(name) {
1579                let idx = if index < 0 {
1580                    (arr.len() as i64 + index) as usize
1581                } else {
1582                    index as usize
1583                };
1584                return arr.get(idx).cloned().unwrap_or(PerlValue::UNDEF);
1585            }
1586        }
1587        PerlValue::UNDEF
1588    }
1589
1590    pub fn set_array_element(
1591        &mut self,
1592        name: &str,
1593        index: i64,
1594        val: PerlValue,
1595    ) -> Result<(), PerlError> {
1596        let val = self.resolve_container_binding_ref(val);
1597        if let Some(aa) = self.find_atomic_array(name) {
1598            let mut arr = aa.0.lock();
1599            let idx = if index < 0 {
1600                (arr.len() as i64 + index).max(0) as usize
1601            } else {
1602                index as usize
1603            };
1604            if idx >= arr.len() {
1605                arr.resize(idx + 1, PerlValue::UNDEF);
1606            }
1607            arr[idx] = val;
1608            return Ok(());
1609        }
1610        if let Some(arc) = self.find_shared_array(name) {
1611            let mut arr = arc.write();
1612            let idx = if index < 0 {
1613                (arr.len() as i64 + index).max(0) as usize
1614            } else {
1615                index as usize
1616            };
1617            if idx >= arr.len() {
1618                arr.resize(idx + 1, PerlValue::UNDEF);
1619            }
1620            arr[idx] = val;
1621            return Ok(());
1622        }
1623        let arr = self.get_array_mut(name)?;
1624        let idx = if index < 0 {
1625            let len = arr.len() as i64;
1626            (len + index).max(0) as usize
1627        } else {
1628            index as usize
1629        };
1630        if idx >= arr.len() {
1631            arr.resize(idx + 1, PerlValue::UNDEF);
1632        }
1633        arr[idx] = val;
1634        Ok(())
1635    }
1636
1637    /// Perl `exists $a[$i]` — true when the slot index is within the current array length.
1638    pub fn exists_array_element(&self, name: &str, index: i64) -> bool {
1639        if let Some(aa) = self.find_atomic_array(name) {
1640            let arr = aa.0.lock();
1641            let idx = if index < 0 {
1642                (arr.len() as i64 + index) as usize
1643            } else {
1644                index as usize
1645            };
1646            return idx < arr.len();
1647        }
1648        for frame in self.frames.iter().rev() {
1649            if let Some(arr) = frame.get_array(name) {
1650                let idx = if index < 0 {
1651                    (arr.len() as i64 + index) as usize
1652                } else {
1653                    index as usize
1654                };
1655                return idx < arr.len();
1656            }
1657        }
1658        false
1659    }
1660
1661    /// Perl `delete $a[$i]` — sets the element to `undef`, returns the previous value.
1662    pub fn delete_array_element(&mut self, name: &str, index: i64) -> Result<PerlValue, PerlError> {
1663        if let Some(aa) = self.find_atomic_array(name) {
1664            let mut arr = aa.0.lock();
1665            let idx = if index < 0 {
1666                (arr.len() as i64 + index) as usize
1667            } else {
1668                index as usize
1669            };
1670            if idx >= arr.len() {
1671                return Ok(PerlValue::UNDEF);
1672            }
1673            let old = arr.get(idx).cloned().unwrap_or(PerlValue::UNDEF);
1674            arr[idx] = PerlValue::UNDEF;
1675            return Ok(old);
1676        }
1677        let arr = self.get_array_mut(name)?;
1678        let idx = if index < 0 {
1679            (arr.len() as i64 + index) as usize
1680        } else {
1681            index as usize
1682        };
1683        if idx >= arr.len() {
1684            return Ok(PerlValue::UNDEF);
1685        }
1686        let old = arr.get(idx).cloned().unwrap_or(PerlValue::UNDEF);
1687        arr[idx] = PerlValue::UNDEF;
1688        Ok(old)
1689    }
1690
1691    // ── Hashes ──
1692
1693    #[inline]
1694    pub fn declare_hash(&mut self, name: &str, val: IndexMap<String, PerlValue>) {
1695        self.declare_hash_frozen(name, val, false);
1696    }
1697
1698    pub fn declare_hash_frozen(
1699        &mut self,
1700        name: &str,
1701        val: IndexMap<String, PerlValue>,
1702        frozen: bool,
1703    ) {
1704        if let Some(frame) = self.frames.last_mut() {
1705            // Remove any existing shared Arc — re-declaration disconnects old refs.
1706            frame.shared_hashes.retain(|(k, _)| k != name);
1707            frame.set_hash(name, val);
1708            if frozen {
1709                frame.frozen_hashes.insert(name.to_string());
1710            }
1711        }
1712    }
1713
1714    /// Declare a hash in the bottom (global) frame, not the current lexical frame.
1715    pub fn declare_hash_global(&mut self, name: &str, val: IndexMap<String, PerlValue>) {
1716        if let Some(frame) = self.frames.first_mut() {
1717            frame.set_hash(name, val);
1718        }
1719    }
1720
1721    /// Declare a frozen hash in the bottom (global) frame — prevents user reassignment.
1722    pub fn declare_hash_global_frozen(&mut self, name: &str, val: IndexMap<String, PerlValue>) {
1723        if let Some(frame) = self.frames.first_mut() {
1724            frame.set_hash(name, val);
1725            frame.frozen_hashes.insert(name.to_string());
1726        }
1727    }
1728
1729    /// Returns `true` if a lexical (non-bottom) frame declares `%name`.
1730    pub fn has_lexical_hash(&self, name: &str) -> bool {
1731        self.frames.iter().skip(1).any(|f| f.has_hash(name))
1732    }
1733
1734    /// Returns `true` if ANY frame (including global) declares `%name`.
1735    pub fn any_frame_has_hash(&self, name: &str) -> bool {
1736        self.frames.iter().any(|f| f.has_hash(name))
1737    }
1738
1739    pub fn get_hash(&self, name: &str) -> IndexMap<String, PerlValue> {
1740        if let Some(ah) = self.find_atomic_hash(name) {
1741            return ah.0.lock().clone();
1742        }
1743        if let Some(arc) = self.find_shared_hash(name) {
1744            return arc.read().clone();
1745        }
1746        for frame in self.frames.iter().rev() {
1747            if let Some(val) = frame.get_hash(name) {
1748                return val.clone();
1749            }
1750        }
1751        IndexMap::new()
1752    }
1753
1754    fn resolve_hash_frame_idx(&self, name: &str) -> Option<usize> {
1755        if name.contains("::") {
1756            return Some(0);
1757        }
1758        (0..self.frames.len())
1759            .rev()
1760            .find(|&i| self.frames[i].has_hash(name))
1761    }
1762
1763    fn check_parallel_hash_write(&self, name: &str) -> Result<(), PerlError> {
1764        if !self.parallel_guard
1765            || Self::parallel_skip_special_name(name)
1766            || Self::parallel_allowed_internal_hash(name)
1767        {
1768            return Ok(());
1769        }
1770        let inner = self.frames.len().saturating_sub(1);
1771        match self.resolve_hash_frame_idx(name) {
1772            None => Err(PerlError::runtime(
1773                format!(
1774                    "cannot modify undeclared hash `%{}` in a parallel block",
1775                    name
1776                ),
1777                0,
1778            )),
1779            Some(idx) if idx != inner => Err(PerlError::runtime(
1780                format!(
1781                    "cannot modify captured non-mysync hash `%{}` in a parallel block",
1782                    name
1783                ),
1784                0,
1785            )),
1786            Some(_) => Ok(()),
1787        }
1788    }
1789
1790    pub fn get_hash_mut(
1791        &mut self,
1792        name: &str,
1793    ) -> Result<&mut IndexMap<String, PerlValue>, PerlError> {
1794        if self.find_atomic_hash(name).is_some() {
1795            return Err(PerlError::runtime(
1796                "get_hash_mut: use atomic path for mysync hashes",
1797                0,
1798            ));
1799        }
1800        self.check_parallel_hash_write(name)?;
1801        let idx = self.resolve_hash_frame_idx(name).unwrap_or_default();
1802        let frame = &mut self.frames[idx];
1803        if frame.get_hash_mut(name).is_none() {
1804            frame.hashes.push((name.to_string(), IndexMap::new()));
1805        }
1806        Ok(frame.get_hash_mut(name).unwrap())
1807    }
1808
1809    pub fn set_hash(
1810        &mut self,
1811        name: &str,
1812        val: IndexMap<String, PerlValue>,
1813    ) -> Result<(), PerlError> {
1814        if let Some(ah) = self.find_atomic_hash(name) {
1815            *ah.0.lock() = val;
1816            return Ok(());
1817        }
1818        self.check_parallel_hash_write(name)?;
1819        for frame in self.frames.iter_mut().rev() {
1820            if frame.has_hash(name) {
1821                frame.set_hash(name, val);
1822                return Ok(());
1823            }
1824        }
1825        self.frames[0].set_hash(name, val);
1826        Ok(())
1827    }
1828
1829    #[inline]
1830    pub fn get_hash_element(&self, name: &str, key: &str) -> PerlValue {
1831        if let Some(ah) = self.find_atomic_hash(name) {
1832            return ah.0.lock().get(key).cloned().unwrap_or(PerlValue::UNDEF);
1833        }
1834        if let Some(arc) = self.find_shared_hash(name) {
1835            return arc.read().get(key).cloned().unwrap_or(PerlValue::UNDEF);
1836        }
1837        for frame in self.frames.iter().rev() {
1838            if let Some(hash) = frame.get_hash(name) {
1839                return hash.get(key).cloned().unwrap_or(PerlValue::UNDEF);
1840            }
1841        }
1842        PerlValue::UNDEF
1843    }
1844
1845    /// Atomically read-modify-write a hash element. For atomic hashes, holds
1846    /// the Mutex for the full cycle. Returns the new value.
1847    pub fn atomic_hash_mutate(
1848        &mut self,
1849        name: &str,
1850        key: &str,
1851        f: impl FnOnce(&PerlValue) -> PerlValue,
1852    ) -> Result<PerlValue, PerlError> {
1853        if let Some(ah) = self.find_atomic_hash(name) {
1854            let mut guard = ah.0.lock();
1855            let old = guard.get(key).cloned().unwrap_or(PerlValue::UNDEF);
1856            let new_val = f(&old);
1857            guard.insert(key.to_string(), new_val.clone());
1858            return Ok(new_val);
1859        }
1860        // Non-atomic fallback
1861        let old = self.get_hash_element(name, key);
1862        let new_val = f(&old);
1863        self.set_hash_element(name, key, new_val.clone())?;
1864        Ok(new_val)
1865    }
1866
1867    /// Atomically read-modify-write an array element. Returns the new value.
1868    pub fn atomic_array_mutate(
1869        &mut self,
1870        name: &str,
1871        index: i64,
1872        f: impl FnOnce(&PerlValue) -> PerlValue,
1873    ) -> Result<PerlValue, PerlError> {
1874        if let Some(aa) = self.find_atomic_array(name) {
1875            let mut guard = aa.0.lock();
1876            let idx = if index < 0 {
1877                (guard.len() as i64 + index).max(0) as usize
1878            } else {
1879                index as usize
1880            };
1881            if idx >= guard.len() {
1882                guard.resize(idx + 1, PerlValue::UNDEF);
1883            }
1884            let old = guard[idx].clone();
1885            let new_val = f(&old);
1886            guard[idx] = new_val.clone();
1887            return Ok(new_val);
1888        }
1889        // Non-atomic fallback
1890        let old = self.get_array_element(name, index);
1891        let new_val = f(&old);
1892        self.set_array_element(name, index, new_val.clone())?;
1893        Ok(new_val)
1894    }
1895
1896    pub fn set_hash_element(
1897        &mut self,
1898        name: &str,
1899        key: &str,
1900        val: PerlValue,
1901    ) -> Result<(), PerlError> {
1902        let val = self.resolve_container_binding_ref(val);
1903        // `$SIG{INT} = \&h` — lazily install the matching signal hook. Until Perl code touches
1904        // `%SIG`, the POSIX default stays in place so Ctrl-C terminates immediately.
1905        if name == "SIG" {
1906            crate::perl_signal::install(key);
1907        }
1908        if let Some(ah) = self.find_atomic_hash(name) {
1909            ah.0.lock().insert(key.to_string(), val);
1910            return Ok(());
1911        }
1912        if let Some(arc) = self.find_shared_hash(name) {
1913            arc.write().insert(key.to_string(), val);
1914            return Ok(());
1915        }
1916        let hash = self.get_hash_mut(name)?;
1917        hash.insert(key.to_string(), val);
1918        Ok(())
1919    }
1920
1921    /// Bulk `for i in start..end { $h{i} = i * k }` for the fused hash-insert loop.
1922    /// Reserves capacity once and runs the whole range in a tight Rust loop.
1923    /// `itoa` is used to stringify each key without a transient `format!` allocation.
1924    pub fn set_hash_int_times_range(
1925        &mut self,
1926        name: &str,
1927        start: i64,
1928        end: i64,
1929        k: i64,
1930    ) -> Result<(), PerlError> {
1931        if end <= start {
1932            return Ok(());
1933        }
1934        let count = (end - start) as usize;
1935        if let Some(ah) = self.find_atomic_hash(name) {
1936            let mut g = ah.0.lock();
1937            g.reserve(count);
1938            let mut buf = itoa::Buffer::new();
1939            for i in start..end {
1940                let key = buf.format(i).to_owned();
1941                g.insert(key, PerlValue::integer(i.wrapping_mul(k)));
1942            }
1943            return Ok(());
1944        }
1945        let hash = self.get_hash_mut(name)?;
1946        hash.reserve(count);
1947        let mut buf = itoa::Buffer::new();
1948        for i in start..end {
1949            let key = buf.format(i).to_owned();
1950            hash.insert(key, PerlValue::integer(i.wrapping_mul(k)));
1951        }
1952        Ok(())
1953    }
1954
1955    pub fn delete_hash_element(&mut self, name: &str, key: &str) -> Result<PerlValue, PerlError> {
1956        if let Some(ah) = self.find_atomic_hash(name) {
1957            return Ok(ah.0.lock().shift_remove(key).unwrap_or(PerlValue::UNDEF));
1958        }
1959        let hash = self.get_hash_mut(name)?;
1960        Ok(hash.shift_remove(key).unwrap_or(PerlValue::UNDEF))
1961    }
1962
1963    #[inline]
1964    pub fn exists_hash_element(&self, name: &str, key: &str) -> bool {
1965        if let Some(ah) = self.find_atomic_hash(name) {
1966            return ah.0.lock().contains_key(key);
1967        }
1968        for frame in self.frames.iter().rev() {
1969            if let Some(hash) = frame.get_hash(name) {
1970                return hash.contains_key(key);
1971            }
1972        }
1973        false
1974    }
1975
1976    /// Walk all values of the named hash with a visitor. Used by the fused
1977    /// `for my $k (keys %h) { $sum += $h{$k} }` op so the hot loop runs without
1978    /// cloning the entire map into a keys array (vs the un-fused shape, which
1979    /// allocates one `PerlValue::string` per key).
1980    #[inline]
1981    pub fn for_each_hash_value(&self, name: &str, mut visit: impl FnMut(&PerlValue)) {
1982        if let Some(ah) = self.find_atomic_hash(name) {
1983            let g = ah.0.lock();
1984            for v in g.values() {
1985                visit(v);
1986            }
1987            return;
1988        }
1989        for frame in self.frames.iter().rev() {
1990            if let Some(hash) = frame.get_hash(name) {
1991                for v in hash.values() {
1992                    visit(v);
1993                }
1994                return;
1995            }
1996        }
1997    }
1998
1999    /// Sigil-prefixed names (`$x`, `@a`, `%h`) from all frames, for REPL tab-completion.
2000    pub fn repl_binding_names(&self) -> Vec<String> {
2001        let mut seen = HashSet::new();
2002        let mut out = Vec::new();
2003        for frame in &self.frames {
2004            for (name, _) in &frame.scalars {
2005                let s = format!("${}", name);
2006                if seen.insert(s.clone()) {
2007                    out.push(s);
2008                }
2009            }
2010            for (name, _) in &frame.arrays {
2011                let s = format!("@{}", name);
2012                if seen.insert(s.clone()) {
2013                    out.push(s);
2014                }
2015            }
2016            for (name, _) in &frame.hashes {
2017                let s = format!("%{}", name);
2018                if seen.insert(s.clone()) {
2019                    out.push(s);
2020                }
2021            }
2022            for (name, _) in &frame.atomic_arrays {
2023                let s = format!("@{}", name);
2024                if seen.insert(s.clone()) {
2025                    out.push(s);
2026                }
2027            }
2028            for (name, _) in &frame.atomic_hashes {
2029                let s = format!("%{}", name);
2030                if seen.insert(s.clone()) {
2031                    out.push(s);
2032                }
2033            }
2034        }
2035        out.sort();
2036        out
2037    }
2038
2039    pub fn capture(&mut self) -> Vec<(String, PerlValue)> {
2040        let mut captured = Vec::new();
2041        for frame in &mut self.frames {
2042            for (k, v) in &mut frame.scalars {
2043                // Wrap scalar in CaptureCell so the closure shares the same memory cell.
2044                // If it's already a CaptureCell or ScalarRef, clone as-is (shares the same Arc).
2045                // Only wrap simple scalars (integers, floats, strings, undef); complex values
2046                // like refs, blessed objects, atomics, etc. already share via Arc and wrapping
2047                // them in CaptureCell breaks type detection (as_ppool, as_blessed_ref, etc.).
2048                if v.as_capture_cell().is_some() || v.as_scalar_ref().is_some() {
2049                    captured.push((format!("${}", k), v.clone()));
2050                } else if v.is_simple_scalar() {
2051                    let wrapped = PerlValue::capture_cell(Arc::new(RwLock::new(v.clone())));
2052                    // Update the original scope variable to point to the same CaptureCell
2053                    // so that subsequent closures share the same reference.
2054                    *v = wrapped.clone();
2055                    captured.push((format!("${}", k), wrapped));
2056                } else {
2057                    captured.push((format!("${}", k), v.clone()));
2058                }
2059            }
2060            for (i, v) in frame.scalar_slots.iter().enumerate() {
2061                if let Some(Some(name)) = frame.scalar_slot_names.get(i) {
2062                    // Wrap slot value in CaptureCell for closure sharing.
2063                    let wrapped = if v.as_capture_cell().is_some() || v.as_scalar_ref().is_some() {
2064                        v.clone()
2065                    } else {
2066                        PerlValue::capture_cell(Arc::new(RwLock::new(v.clone())))
2067                    };
2068                    captured.push((format!("$slot:{}:{}", i, name), wrapped));
2069                }
2070            }
2071            for (k, v) in &frame.arrays {
2072                if capture_skip_bootstrap_array(k) {
2073                    continue;
2074                }
2075                if frame.frozen_arrays.contains(k) {
2076                    captured.push((format!("@frozen:{}", k), PerlValue::array(v.clone())));
2077                } else {
2078                    captured.push((format!("@{}", k), PerlValue::array(v.clone())));
2079                }
2080            }
2081            for (k, v) in &frame.hashes {
2082                if capture_skip_bootstrap_hash(k) {
2083                    continue;
2084                }
2085                if frame.frozen_hashes.contains(k) {
2086                    captured.push((format!("%frozen:{}", k), PerlValue::hash(v.clone())));
2087                } else {
2088                    captured.push((format!("%{}", k), PerlValue::hash(v.clone())));
2089                }
2090            }
2091            for (k, _aa) in &frame.atomic_arrays {
2092                captured.push((
2093                    format!("@sync_{}", k),
2094                    PerlValue::atomic(Arc::new(Mutex::new(PerlValue::string(String::new())))),
2095                ));
2096            }
2097            for (k, _ah) in &frame.atomic_hashes {
2098                captured.push((
2099                    format!("%sync_{}", k),
2100                    PerlValue::atomic(Arc::new(Mutex::new(PerlValue::string(String::new())))),
2101                ));
2102            }
2103        }
2104        captured
2105    }
2106
2107    /// Extended capture that returns atomic arrays/hashes separately.
2108    pub fn capture_with_atomics(&self) -> ScopeCaptureWithAtomics {
2109        let mut scalars = Vec::new();
2110        let mut arrays = Vec::new();
2111        let mut hashes = Vec::new();
2112        for frame in &self.frames {
2113            for (k, v) in &frame.scalars {
2114                scalars.push((format!("${}", k), v.clone()));
2115            }
2116            for (i, v) in frame.scalar_slots.iter().enumerate() {
2117                if let Some(Some(name)) = frame.scalar_slot_names.get(i) {
2118                    scalars.push((format!("$slot:{}:{}", i, name), v.clone()));
2119                }
2120            }
2121            for (k, v) in &frame.arrays {
2122                if capture_skip_bootstrap_array(k) {
2123                    continue;
2124                }
2125                if frame.frozen_arrays.contains(k) {
2126                    scalars.push((format!("@frozen:{}", k), PerlValue::array(v.clone())));
2127                } else {
2128                    scalars.push((format!("@{}", k), PerlValue::array(v.clone())));
2129                }
2130            }
2131            for (k, v) in &frame.hashes {
2132                if capture_skip_bootstrap_hash(k) {
2133                    continue;
2134                }
2135                if frame.frozen_hashes.contains(k) {
2136                    scalars.push((format!("%frozen:{}", k), PerlValue::hash(v.clone())));
2137                } else {
2138                    scalars.push((format!("%{}", k), PerlValue::hash(v.clone())));
2139                }
2140            }
2141            for (k, aa) in &frame.atomic_arrays {
2142                arrays.push((k.clone(), aa.clone()));
2143            }
2144            for (k, ah) in &frame.atomic_hashes {
2145                hashes.push((k.clone(), ah.clone()));
2146            }
2147        }
2148        (scalars, arrays, hashes)
2149    }
2150
2151    pub fn restore_capture(&mut self, captured: &[(String, PerlValue)]) {
2152        for (name, val) in captured {
2153            if let Some(rest) = name.strip_prefix("$slot:") {
2154                // "$slot:INDEX:NAME" — restore into scalar_slots only.
2155                // `get_scalar` finds slots via `get_scalar_from_slot`, so a separate
2156                // `declare_scalar` is unnecessary and would double-wrap: `set_scalar`
2157                // sees the slot's ScalarRef and writes *through* it, nesting
2158                // `ScalarRef(ScalarRef(inner))`.
2159                if let Some(colon) = rest.find(':') {
2160                    let idx: usize = rest[..colon].parse().unwrap_or(0);
2161                    let sname = &rest[colon + 1..];
2162                    self.declare_scalar_slot(idx as u8, val.clone(), Some(sname));
2163                }
2164            } else if let Some(stripped) = name.strip_prefix('$') {
2165                self.declare_scalar(stripped, val.clone());
2166            } else if let Some(rest) = name.strip_prefix("@frozen:") {
2167                let arr = val.as_array_vec().unwrap_or_else(|| val.to_list());
2168                self.declare_array_frozen(rest, arr, true);
2169            } else if let Some(rest) = name.strip_prefix("%frozen:") {
2170                if let Some(h) = val.as_hash_map() {
2171                    self.declare_hash_frozen(rest, h.clone(), true);
2172                }
2173            } else if let Some(rest) = name.strip_prefix('@') {
2174                if rest.starts_with("sync_") {
2175                    continue;
2176                }
2177                let arr = val.as_array_vec().unwrap_or_else(|| val.to_list());
2178                self.declare_array(rest, arr);
2179            } else if let Some(rest) = name.strip_prefix('%') {
2180                if rest.starts_with("sync_") {
2181                    continue;
2182                }
2183                if let Some(h) = val.as_hash_map() {
2184                    self.declare_hash(rest, h.clone());
2185                }
2186            }
2187        }
2188    }
2189
2190    /// Restore atomic arrays/hashes from capture_with_atomics.
2191    pub fn restore_atomics(
2192        &mut self,
2193        arrays: &[(String, AtomicArray)],
2194        hashes: &[(String, AtomicHash)],
2195    ) {
2196        if let Some(frame) = self.frames.last_mut() {
2197            for (name, aa) in arrays {
2198                frame.atomic_arrays.push((name.clone(), aa.clone()));
2199            }
2200            for (name, ah) in hashes {
2201                frame.atomic_hashes.push((name.clone(), ah.clone()));
2202            }
2203        }
2204    }
2205}
2206
2207#[cfg(test)]
2208mod tests {
2209    use super::*;
2210    use crate::value::PerlValue;
2211
2212    #[test]
2213    fn missing_scalar_is_undef() {
2214        let s = Scope::new();
2215        assert!(s.get_scalar("not_declared").is_undef());
2216    }
2217
2218    #[test]
2219    fn inner_frame_shadows_outer_scalar() {
2220        let mut s = Scope::new();
2221        s.declare_scalar("a", PerlValue::integer(1));
2222        s.push_frame();
2223        s.declare_scalar("a", PerlValue::integer(2));
2224        assert_eq!(s.get_scalar("a").to_int(), 2);
2225        s.pop_frame();
2226        assert_eq!(s.get_scalar("a").to_int(), 1);
2227    }
2228
2229    #[test]
2230    fn set_scalar_updates_innermost_binding() {
2231        let mut s = Scope::new();
2232        s.declare_scalar("a", PerlValue::integer(1));
2233        s.push_frame();
2234        s.declare_scalar("a", PerlValue::integer(2));
2235        let _ = s.set_scalar("a", PerlValue::integer(99));
2236        assert_eq!(s.get_scalar("a").to_int(), 99);
2237        s.pop_frame();
2238        assert_eq!(s.get_scalar("a").to_int(), 1);
2239    }
2240
2241    #[test]
2242    fn array_negative_index_reads_from_end() {
2243        let mut s = Scope::new();
2244        s.declare_array(
2245            "a",
2246            vec![
2247                PerlValue::integer(10),
2248                PerlValue::integer(20),
2249                PerlValue::integer(30),
2250            ],
2251        );
2252        assert_eq!(s.get_array_element("a", -1).to_int(), 30);
2253    }
2254
2255    #[test]
2256    fn set_array_element_extends_array_with_undef_gaps() {
2257        let mut s = Scope::new();
2258        s.declare_array("a", vec![]);
2259        s.set_array_element("a", 2, PerlValue::integer(7)).unwrap();
2260        assert_eq!(s.get_array_element("a", 2).to_int(), 7);
2261        assert!(s.get_array_element("a", 0).is_undef());
2262    }
2263
2264    #[test]
2265    fn capture_restore_roundtrip_scalar() {
2266        let mut s = Scope::new();
2267        s.declare_scalar("n", PerlValue::integer(42));
2268        let cap = s.capture();
2269        let mut t = Scope::new();
2270        t.restore_capture(&cap);
2271        assert_eq!(t.get_scalar("n").to_int(), 42);
2272    }
2273
2274    #[test]
2275    fn capture_restore_roundtrip_lexical_array_and_hash() {
2276        let mut s = Scope::new();
2277        s.declare_array("a", vec![PerlValue::integer(1), PerlValue::integer(2)]);
2278        let mut m = IndexMap::new();
2279        m.insert("k".to_string(), PerlValue::integer(99));
2280        s.declare_hash("h", m);
2281        let cap = s.capture();
2282        let mut t = Scope::new();
2283        t.restore_capture(&cap);
2284        assert_eq!(t.get_array_element("a", 1).to_int(), 2);
2285        assert_eq!(t.get_hash_element("h", "k").to_int(), 99);
2286    }
2287
2288    #[test]
2289    fn hash_get_set_delete_exists() {
2290        let mut s = Scope::new();
2291        let mut m = IndexMap::new();
2292        m.insert("k".to_string(), PerlValue::integer(1));
2293        s.declare_hash("h", m);
2294        assert_eq!(s.get_hash_element("h", "k").to_int(), 1);
2295        assert!(s.exists_hash_element("h", "k"));
2296        s.set_hash_element("h", "k", PerlValue::integer(99))
2297            .unwrap();
2298        assert_eq!(s.get_hash_element("h", "k").to_int(), 99);
2299        let del = s.delete_hash_element("h", "k").unwrap();
2300        assert_eq!(del.to_int(), 99);
2301        assert!(!s.exists_hash_element("h", "k"));
2302    }
2303
2304    #[test]
2305    fn inner_frame_shadows_outer_hash_name() {
2306        let mut s = Scope::new();
2307        let mut outer = IndexMap::new();
2308        outer.insert("k".to_string(), PerlValue::integer(1));
2309        s.declare_hash("h", outer);
2310        s.push_frame();
2311        let mut inner = IndexMap::new();
2312        inner.insert("k".to_string(), PerlValue::integer(2));
2313        s.declare_hash("h", inner);
2314        assert_eq!(s.get_hash_element("h", "k").to_int(), 2);
2315        s.pop_frame();
2316        assert_eq!(s.get_hash_element("h", "k").to_int(), 1);
2317    }
2318
2319    #[test]
2320    fn inner_frame_shadows_outer_array_name() {
2321        let mut s = Scope::new();
2322        s.declare_array("a", vec![PerlValue::integer(1)]);
2323        s.push_frame();
2324        s.declare_array("a", vec![PerlValue::integer(2), PerlValue::integer(3)]);
2325        assert_eq!(s.get_array_element("a", 1).to_int(), 3);
2326        s.pop_frame();
2327        assert_eq!(s.get_array_element("a", 0).to_int(), 1);
2328    }
2329
2330    #[test]
2331    fn pop_frame_never_removes_global_frame() {
2332        let mut s = Scope::new();
2333        s.declare_scalar("x", PerlValue::integer(1));
2334        s.pop_frame();
2335        s.pop_frame();
2336        assert_eq!(s.get_scalar("x").to_int(), 1);
2337    }
2338
2339    #[test]
2340    fn empty_array_declared_has_zero_length() {
2341        let mut s = Scope::new();
2342        s.declare_array("a", vec![]);
2343        assert_eq!(s.get_array("a").len(), 0);
2344    }
2345
2346    #[test]
2347    fn depth_increments_with_push_frame() {
2348        let mut s = Scope::new();
2349        let d0 = s.depth();
2350        s.push_frame();
2351        assert_eq!(s.depth(), d0 + 1);
2352        s.pop_frame();
2353        assert_eq!(s.depth(), d0);
2354    }
2355
2356    #[test]
2357    fn pop_to_depth_unwinds_to_target() {
2358        let mut s = Scope::new();
2359        s.push_frame();
2360        s.push_frame();
2361        let target = s.depth() - 1;
2362        s.pop_to_depth(target);
2363        assert_eq!(s.depth(), target);
2364    }
2365
2366    #[test]
2367    fn array_len_and_push_pop_roundtrip() {
2368        let mut s = Scope::new();
2369        s.declare_array("a", vec![]);
2370        assert_eq!(s.array_len("a"), 0);
2371        s.push_to_array("a", PerlValue::integer(1)).unwrap();
2372        s.push_to_array("a", PerlValue::integer(2)).unwrap();
2373        assert_eq!(s.array_len("a"), 2);
2374        assert_eq!(s.pop_from_array("a").unwrap().to_int(), 2);
2375        assert_eq!(s.pop_from_array("a").unwrap().to_int(), 1);
2376        assert!(s.pop_from_array("a").unwrap().is_undef());
2377    }
2378
2379    #[test]
2380    fn shift_from_array_drops_front() {
2381        let mut s = Scope::new();
2382        s.declare_array("a", vec![PerlValue::integer(1), PerlValue::integer(2)]);
2383        assert_eq!(s.shift_from_array("a").unwrap().to_int(), 1);
2384        assert_eq!(s.array_len("a"), 1);
2385    }
2386
2387    #[test]
2388    fn atomic_mutate_increments_wrapped_scalar() {
2389        use parking_lot::Mutex;
2390        use std::sync::Arc;
2391        let mut s = Scope::new();
2392        s.declare_scalar(
2393            "n",
2394            PerlValue::atomic(Arc::new(Mutex::new(PerlValue::integer(10)))),
2395        );
2396        let v = s.atomic_mutate("n", |old| PerlValue::integer(old.to_int() + 5));
2397        assert_eq!(v.to_int(), 15);
2398        assert_eq!(s.get_scalar("n").to_int(), 15);
2399    }
2400
2401    #[test]
2402    fn atomic_mutate_post_returns_old_value() {
2403        use parking_lot::Mutex;
2404        use std::sync::Arc;
2405        let mut s = Scope::new();
2406        s.declare_scalar(
2407            "n",
2408            PerlValue::atomic(Arc::new(Mutex::new(PerlValue::integer(7)))),
2409        );
2410        let old = s.atomic_mutate_post("n", |v| PerlValue::integer(v.to_int() + 1));
2411        assert_eq!(old.to_int(), 7);
2412        assert_eq!(s.get_scalar("n").to_int(), 8);
2413    }
2414
2415    #[test]
2416    fn get_scalar_raw_keeps_atomic_wrapper() {
2417        use parking_lot::Mutex;
2418        use std::sync::Arc;
2419        let mut s = Scope::new();
2420        s.declare_scalar(
2421            "n",
2422            PerlValue::atomic(Arc::new(Mutex::new(PerlValue::integer(3)))),
2423        );
2424        assert!(s.get_scalar_raw("n").is_atomic());
2425        assert!(!s.get_scalar("n").is_atomic());
2426    }
2427
2428    #[test]
2429    fn missing_array_element_is_undef() {
2430        let mut s = Scope::new();
2431        s.declare_array("a", vec![PerlValue::integer(1)]);
2432        assert!(s.get_array_element("a", 99).is_undef());
2433    }
2434
2435    #[test]
2436    fn restore_atomics_puts_atomic_containers_in_frame() {
2437        use indexmap::IndexMap;
2438        use parking_lot::Mutex;
2439        use std::sync::Arc;
2440        let mut s = Scope::new();
2441        let aa = AtomicArray(Arc::new(Mutex::new(vec![PerlValue::integer(1)])));
2442        let ah = AtomicHash(Arc::new(Mutex::new(IndexMap::new())));
2443        s.restore_atomics(&[("ax".into(), aa.clone())], &[("hx".into(), ah.clone())]);
2444        assert_eq!(s.get_array_element("ax", 0).to_int(), 1);
2445        assert_eq!(s.array_len("ax"), 1);
2446        s.set_hash_element("hx", "k", PerlValue::integer(2))
2447            .unwrap();
2448        assert_eq!(s.get_hash_element("hx", "k").to_int(), 2);
2449    }
2450}