Skip to main content

harn_vm/vm/
debug.rs

1use std::rc::Rc;
2
3use crate::chunk::{Chunk, Constant};
4use crate::value::{VmError, VmValue};
5
6use super::{CallFrame, Vm};
7
8/// Debug action returned by the debug hook.
9#[derive(Debug, Clone, PartialEq)]
10pub enum DebugAction {
11    /// Continue execution normally.
12    Continue,
13    /// Stop (breakpoint hit, step complete).
14    Stop,
15}
16
17/// Information about current execution state for the debugger.
18#[derive(Debug, Clone)]
19pub struct DebugState {
20    pub line: usize,
21    pub variables: std::collections::BTreeMap<String, VmValue>,
22    pub frame_name: String,
23    pub frame_depth: usize,
24}
25
26pub(super) type DebugHook = dyn FnMut(&DebugState) -> DebugAction;
27
28impl Vm {
29    /// Replace breakpoints for a single source file. Pass an empty string
30    /// (or call `set_breakpoints` for the wildcard equivalent) to install
31    /// breakpoints that match every file — useful for ad-hoc CLI runs
32    /// where the embedder doesn't track per-file source paths.
33    pub fn set_breakpoints_for_file(&mut self, file: &str, lines: Vec<usize>) {
34        if lines.is_empty() {
35            self.breakpoints.remove(file);
36            return;
37        }
38        self.breakpoints
39            .insert(file.to_string(), lines.into_iter().collect());
40    }
41
42    /// Backwards-compatible wildcard form. Stores all lines under the
43    /// empty-string key, which matches *any* source file at the check
44    /// site. Existing embedders that don't track file scoping still work.
45    pub fn set_breakpoints(&mut self, lines: Vec<usize>) {
46        self.set_breakpoints_for_file("", lines);
47    }
48
49    /// Replace the function-breakpoint set. Every subsequent closure
50    /// call whose name matches one of the provided strings will pause
51    /// on entry. Empty vec clears the set.
52    pub fn set_function_breakpoints(&mut self, names: Vec<String>) {
53        self.function_breakpoints = names.into_iter().collect();
54        // Clear any pending latch so a stale entry from the previous
55        // configuration doesn't fire once.
56        self.pending_function_bp = None;
57    }
58
59    /// Returns the current function-breakpoint name set. Used by the
60    /// DAP adapter to build the `setFunctionBreakpoints` response with
61    /// verified=true per registered name.
62    pub fn function_breakpoint_names(&self) -> Vec<String> {
63        self.function_breakpoints.iter().cloned().collect()
64    }
65
66    /// Drain any pending function-breakpoint name latched by the most
67    /// recent closure entry. Returns `Some(name)` exactly once per hit
68    /// so the caller can emit a single `stopped` event.
69    pub fn take_pending_function_bp(&mut self) -> Option<String> {
70        self.pending_function_bp.take()
71    }
72
73    /// Source file path of the currently executing frame, if known.
74    pub(crate) fn current_source_file(&self) -> Option<&str> {
75        self.frames
76            .last()
77            .and_then(|f| f.chunk.source_file.as_deref())
78    }
79
80    /// True when a breakpoint at `line` is set for the current frame's
81    /// source file (or the wildcard set covers it).
82    pub(crate) fn breakpoint_matches(&self, line: usize) -> bool {
83        if let Some(wild) = self.breakpoints.get("") {
84            if wild.contains(&line) {
85                return true;
86            }
87        }
88        if let Some(file) = self.current_source_file() {
89            if let Some(set) = self.breakpoints.get(file) {
90                if set.contains(&line) {
91                    return true;
92                }
93            }
94            // Some callers send a relative or differently-prefixed path
95            // than the chunk records; fall back to suffix comparison so
96            // foo.harn matches /abs/path/foo.harn and vice-versa.
97            for (key, set) in &self.breakpoints {
98                if key.is_empty() {
99                    continue;
100                }
101                if (file.ends_with(key.as_str()) || key.ends_with(file)) && set.contains(&line) {
102                    return true;
103                }
104            }
105        }
106        false
107    }
108
109    /// Enable step mode (stop at the next source line regardless of
110    /// frame depth — i.e. step-in semantics, descending into calls).
111    pub fn set_step_mode(&mut self, step: bool) {
112        self.step_mode = step;
113        self.step_frame_depth = usize::MAX;
114    }
115
116    /// Enable step-over mode (stop at the next source line in the current
117    /// frame or a shallower one, skipping past any nested calls).
118    pub fn set_step_over(&mut self) {
119        self.step_mode = true;
120        self.step_frame_depth = self.frames.len();
121    }
122
123    /// Register a debug hook invoked whenever execution advances to a new source line.
124    pub fn set_debug_hook<F>(&mut self, hook: F)
125    where
126        F: FnMut(&DebugState) -> DebugAction + 'static,
127    {
128        self.debug_hook = Some(Box::new(hook));
129    }
130
131    /// Clear the current debug hook.
132    pub fn clear_debug_hook(&mut self) {
133        self.debug_hook = None;
134    }
135
136    /// Enable step-out mode (stop at the next source line *after* the
137    /// current frame has returned — strictly shallower than where the
138    /// user requested the step-out).
139    pub fn set_step_out(&mut self) {
140        self.step_mode = true;
141        // Condition site compares `frames.len() <= step_frame_depth`, so
142        // storing N-1 makes the stop fire only after the current frame
143        // pops (frames.len() drops from N to N-1 or less). Clamp to 0 for
144        // the top frame — caller handles that via the usize::MAX sentinel
145        // if they wanted step-in semantics.
146        self.step_frame_depth = self.frames.len().saturating_sub(1);
147    }
148
149    /// Check if the VM is stopped at a debug point.
150    pub fn is_stopped(&self) -> bool {
151        self.stopped
152    }
153
154    /// Get the current debug state (variables, line, etc.).
155    pub fn debug_state(&self) -> DebugState {
156        let line = self.current_line();
157        let variables = self.visible_variables();
158        let frame_name = if self.frames.len() > 1 {
159            format!("frame_{}", self.frames.len() - 1)
160        } else {
161            "pipeline".to_string()
162        };
163        DebugState {
164            line,
165            variables,
166            frame_name,
167            frame_depth: self.frames.len(),
168        }
169    }
170
171    /// Call sites (name + ip) on `line` within the current frame's
172    /// chunk — drives DAP `stepInTargets` (#112). Walks the chunk's
173    /// parallel lines array, surfaces every Call / MethodCall /
174    /// CallSpread and pairs it with the name of the constant or
175    /// identifier preceding the call when we can derive it cheaply.
176    pub fn call_sites_on_line(&self, line: u32) -> Vec<(u32, String)> {
177        let Some(frame) = self.frames.last() else {
178            return Vec::new();
179        };
180        let chunk = &frame.chunk;
181        let mut out = Vec::new();
182        let code = &chunk.code;
183        let lines = &chunk.lines;
184        let mut ip: usize = 0;
185        while ip < code.len() {
186            let op = code[ip];
187            if ip < lines.len() && lines[ip] == line {
188                // 0x00 .. 0x99 covers the opcode space the compiler
189                // emits for calls. Rather than decode every op, we
190                // pattern-match on the Call-family opcodes via
191                // their numeric tag — stable because harn-vm locks
192                // opcodes with pin tests.
193                if matches!(op, 0x40..=0x44) {
194                    // Best-effort label: take the most recent
195                    // LoadConst / LoadGlobal constant value.
196                    let label = Self::label_preceding_call(chunk, ip);
197                    out.push((ip as u32, label));
198                }
199            }
200            ip += 1;
201        }
202        out
203    }
204
205    fn label_preceding_call(chunk: &Chunk, call_ip: usize) -> String {
206        // Walk backwards a few instructions to find a LoadConst that
207        // resolves to a string (the callee name). Good enough for
208        // the IDE menu; deep callee resolution can land later if
209        // needed.
210        let mut back = call_ip.saturating_sub(6);
211        while back < call_ip {
212            let op = chunk.code[back];
213            // LoadConst opcodes (range covers the two-byte tag) —
214            // fall back to "call" when none found.
215            if (op == 0x01 || op == 0x02) && back + 2 < chunk.code.len() {
216                let idx = (u16::from(chunk.code[back + 1]) << 8) | u16::from(chunk.code[back + 2]);
217                if let Some(Constant::String(s)) = chunk.constants.get(idx as usize) {
218                    return s.clone();
219                }
220            }
221            back += 1;
222        }
223        "call".to_string()
224    }
225
226    /// Install (or replace) the cooperative cancellation token on
227    /// this VM. Callers (DAP adapter, embedded host) flip the
228    /// wrapped AtomicBool to request graceful shutdown; the step
229    /// loop checks `is_cancel_requested()` at every instruction and
230    /// exits with `VmError::Cancelled` when set.
231    pub fn install_cancel_token(&mut self, token: std::sync::Arc<std::sync::atomic::AtomicBool>) {
232        self.cancel_token = Some(token);
233        self.cancel_grace_instructions_remaining = None;
234    }
235
236    /// Install a host signal token paired with the cancellation token. Hosts set
237    /// it to `SIGINT`, `SIGTERM`, or `SIGHUP` before flipping cancellation so
238    /// `std/signal` handlers can match the actual process signal.
239    pub fn install_interrupt_signal_token(
240        &mut self,
241        token: std::sync::Arc<std::sync::Mutex<Option<String>>>,
242    ) {
243        self.interrupt_signal_token = Some(token);
244    }
245
246    /// Signal cooperative cancellation on this VM — the step loop
247    /// unwinds on its next instruction check. Lazily allocates a
248    /// fresh token when none is installed so hosts don't need to
249    /// pre-plumb it on every launch. Returns the Arc so the caller
250    /// can hold onto it and re-signal later if needed.
251    pub fn signal_cancel(&mut self) -> std::sync::Arc<std::sync::atomic::AtomicBool> {
252        let token = self.cancel_token.clone().unwrap_or_else(|| {
253            let t = std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false));
254            self.cancel_token = Some(t.clone());
255            t
256        });
257        token.store(true, std::sync::atomic::Ordering::SeqCst);
258        token
259    }
260
261    /// True when cooperative cancellation has been requested.
262    pub fn is_cancel_requested(&self) -> bool {
263        self.cancel_token
264            .as_ref()
265            .map(|t| t.load(std::sync::atomic::Ordering::SeqCst))
266            .unwrap_or(false)
267    }
268
269    /// Identifiers visible at the given frame's scope — locals plus
270    /// every registered builtin + async builtin. Drives DAP
271    /// `completions` (#109) so the REPL autocomplete surfaces
272    /// everything the unified evaluator can reach.
273    pub fn identifiers_in_scope(&self, _frame_id: usize) -> Vec<String> {
274        let mut out: Vec<String> = self.visible_variables().keys().cloned().collect();
275        out.extend(self.builtins.keys().cloned());
276        out.extend(self.async_builtins.keys().cloned());
277        out.sort();
278        out.dedup();
279        out
280    }
281
282    /// Get all stack frames for the debugger.
283    pub fn debug_stack_frames(&self) -> Vec<(String, usize)> {
284        self.debug_stack_frames_with_sources()
285            .into_iter()
286            .map(|(name, line, _source)| (name, line))
287            .collect()
288    }
289
290    /// Get all stack frames plus their source keys for debugger clients that
291    /// can retrieve synthetic sources through DAP `source`.
292    pub fn debug_stack_frames_with_sources(&self) -> Vec<(String, usize, Option<String>)> {
293        let mut frames = Vec::new();
294        for (i, frame) in self.frames.iter().enumerate() {
295            let line = if frame.ip > 0 && frame.ip - 1 < frame.chunk.lines.len() {
296                frame.chunk.lines[frame.ip - 1] as usize
297            } else {
298                0
299            };
300            let name = if frame.fn_name.is_empty() {
301                if i == 0 {
302                    "pipeline".to_string()
303                } else {
304                    format!("fn_{}", i)
305                }
306            } else {
307                frame.fn_name.clone()
308            };
309            frames.push((name, line, frame.chunk.source_file.clone()));
310        }
311        frames
312    }
313
314    /// Return cached source text by debugger source key. This covers entry
315    /// programs, real imports that have already been read, and synthetic
316    /// sources such as stdlib modules or generated in-memory modules.
317    pub fn debug_source_for_path(&self, path: &str) -> Option<String> {
318        if self.source_file.as_deref() == Some(path) {
319            if let Some(source) = &self.source_text {
320                return Some(source.clone());
321            }
322        }
323
324        let key = std::path::PathBuf::from(path);
325        if let Some(source) = self.source_cache.get(&key) {
326            return Some(source.clone());
327        }
328
329        if let Some(module) = path
330            .strip_prefix("<stdlib>/")
331            .and_then(|s| s.strip_suffix(".harn"))
332        {
333            return crate::stdlib_modules::get_stdlib_source(module).map(str::to_string);
334        }
335
336        None
337    }
338
339    /// Get the current source line.
340    pub(crate) fn current_line(&self) -> usize {
341        if let Some(frame) = self.frames.last() {
342            let ip = if frame.ip > 0 { frame.ip - 1 } else { 0 };
343            if ip < frame.chunk.lines.len() {
344                return frame.chunk.lines[ip] as usize;
345            }
346        }
347        0
348    }
349
350    /// Execute one instruction, returning whether to stop (breakpoint/step).
351    /// Returns Ok(None) to continue, Ok(Some(val)) on program end, Err on error.
352    ///
353    /// Line-change detection reads the line of the instruction we're
354    /// *about to execute* (`lines[ip]`) rather than the byte before
355    /// `ip`. After a jump, `ip-1` still points into the skipped region,
356    /// which previously reported phantom stops on the tail of a
357    /// not-taken branch (e.g. `host_metadata_save()` highlighted even
358    /// though `any_stale` was false). Using `lines[ip]` — combined with
359    /// cleanup ops emitted at line 0 after branch/loop exits — keeps
360    /// the debugger aligned with what's actually going to run.
361    pub async fn step_execute(&mut self) -> Result<Option<(VmValue, bool)>, VmError> {
362        // Cooperative cancellation and std/signal interrupts are both
363        // observed before instruction work so debug stepping exits promptly.
364        if let Some(err) = self.pending_scope_interrupt().await {
365            return Err(err);
366        }
367        if self.is_cancel_requested() {
368            self.cancel_spawned_tasks();
369            return Err(Self::cancelled_error());
370        }
371        let current_line = self.upcoming_line();
372        let line_changed = current_line != self.last_line && current_line > 0;
373
374        if line_changed {
375            self.last_line = current_line;
376
377            let state = self.debug_state();
378            if let Some(hook) = self.debug_hook.as_mut() {
379                if matches!(hook(&state), DebugAction::Stop) {
380                    self.stopped = true;
381                    return Ok(Some((VmValue::Nil, true)));
382                }
383            }
384
385            if self.breakpoint_matches(current_line) {
386                self.stopped = true;
387                return Ok(Some((VmValue::Nil, true)));
388            }
389
390            // Function-breakpoint latch: set by push_closure_frame when
391            // the callee's name is in `function_breakpoints`. Stop with
392            // the same shape as a line BP so the DAP adapter's
393            // classify_breakpoint_hit emits a standard stopped event.
394            if self.pending_function_bp.is_some() {
395                self.stopped = true;
396                return Ok(Some((VmValue::Nil, true)));
397            }
398
399            // step_frame_depth is the deepest frame count at which a stop
400            // is acceptable. set_step_mode uses usize::MAX (any depth,
401            // step-in), set_step_over uses N (same frame or shallower),
402            // set_step_out uses N-1 (strictly shallower than where the
403            // step-out was requested).
404            if self.step_mode && self.frames.len() <= self.step_frame_depth {
405                self.step_mode = false;
406                self.stopped = true;
407                return Ok(Some((VmValue::Nil, true)));
408            }
409        }
410
411        self.stopped = false;
412        self.execute_one_cycle().await
413    }
414
415    /// Line of the instruction *about to execute* — used by the
416    /// debugger for line-change detection so the first cycle after a
417    /// jump doesn't report a stale line from the skipped region.
418    pub(crate) fn upcoming_line(&self) -> usize {
419        if let Some(frame) = self.frames.last() {
420            if frame.ip < frame.chunk.lines.len() {
421                return frame.chunk.lines[frame.ip] as usize;
422            }
423        }
424        0
425    }
426
427    /// Number of live call frames. Used by the DAP adapter to
428    /// translate stackTrace ids (1-based, innermost first) back to
429    /// the VM's 0-based outermost-first index when processing
430    /// `restartFrame`.
431    pub fn frame_count(&self) -> usize {
432        self.frames.len()
433    }
434
435    /// Rewind the given frame to its entry state so stepping resumes
436    /// from the first instruction of the function with the original
437    /// arguments re-bound. Higher frames above `frame_id` are dropped.
438    /// Returns an error if the frame has no captured `initial_env`
439    /// (scratch / evaluator frames don't) or if the id is out of range.
440    ///
441    /// Side effects already performed by the restarted frame (tool
442    /// calls, file writes, host_call round-trips) are *not* rolled
443    /// back — DAP leaves that to the adapter's discretion. The IDE
444    /// should warn on frames whose source text contains obvious
445    /// side-effectful calls before invoking restartFrame.
446    pub fn restart_frame(&mut self, frame_id: usize) -> Result<(), VmError> {
447        if frame_id >= self.frames.len() {
448            return Err(VmError::Runtime(format!(
449                "restartFrame: frame id {frame_id} out of range (have {} frames)",
450                self.frames.len()
451            )));
452        }
453        let Some(initial_env) = self.frames[frame_id].initial_env.clone() else {
454            return Err(VmError::Runtime(
455                "restartFrame: target frame was not captured for restart (scratch / evaluator frame)"
456                    .into(),
457            ));
458        };
459        let initial_local_slots = self.frames[frame_id].initial_local_slots.clone();
460        // Drop every frame above the target. Each pop restores its
461        // saved_iterator_depth into `self.iterators` so iterator state
462        // unwinds consistently.
463        while self.frames.len() > frame_id + 1 {
464            let popped = self.frames.pop().expect("bounds checked above");
465            self.iterators.truncate(popped.saved_iterator_depth);
466        }
467        // Rewind the target frame.
468        let frame = self
469            .frames
470            .last_mut()
471            .expect("frame_id within bounds guarantees a frame");
472        frame.ip = 0;
473        let stack_base = frame.stack_base;
474        let saved_iter_depth = frame.saved_iterator_depth;
475        self.stack.truncate(stack_base);
476        self.iterators.truncate(saved_iter_depth);
477        if let Some(initial_local_slots) = initial_local_slots {
478            frame.local_slots = initial_local_slots;
479            frame.local_scope_depth = 0;
480        }
481        self.env = initial_env;
482        self.last_line = 0;
483        self.stopped = false;
484        Ok(())
485    }
486
487    /// Assign a new value to a named binding in the paused VM's env.
488    /// Returns the value that was actually stored (after coercion, if
489    /// the VM performed any) so the caller can echo it back to the
490    /// DAP client. Fails if the name does not resolve to a mutable
491    /// binding in any live scope.
492    ///
493    /// The provided `value_expr` goes through the unified evaluator so
494    /// callers can type expressions like `plan.tasks.len() + 1` in the
495    /// Locals inline-edit field, not just literals.
496    pub async fn set_variable_in_frame(
497        &mut self,
498        name: &str,
499        value_expr: &str,
500        frame_id: usize,
501    ) -> Result<VmValue, VmError> {
502        let value = self.evaluate_in_frame(value_expr, frame_id).await?;
503        // Debug-specific assign: bypasses the `let` immutability gate
504        // because the user is explicitly editing in the IDE, and
505        // almost every pipeline binding is `let`. The underlying
506        // binding's mutability flag is preserved so runtime behavior
507        // after the override is unchanged.
508        if !self.assign_active_local_slot(name, value.clone(), true)? {
509            self.env
510                .assign_debug(name, value.clone())
511                .map_err(|e| match e {
512                    VmError::UndefinedVariable(n) => {
513                        VmError::Runtime(format!("setVariable: '{n}' is not in the current scope"))
514                    }
515                    other => other,
516                })?;
517        }
518        Ok(value)
519    }
520
521    /// Evaluate a Harn expression against the currently paused frame's
522    /// scope and return its value. This is the single evaluation path
523    /// used by hover tips, watch expressions, conditional breakpoints,
524    /// logpoint interpolation, and `setVariable` / `setExpression`
525    /// before we had a unified evaluator there were four separate
526    /// mini-parsers, each with its own rough edges (see burin-code #85).
527    ///
528    /// The expression is wrapped as `let __r = (<expr>)` so arbitrary
529    /// infix chains, ternaries, and access paths parse uniformly. A
530    /// scratch `CallFrame` runs the wrapped bytecode with `saved_env`
531    /// pointing at the caller's env, so the compiled expression sees
532    /// every local in scope. When the scratch frame pops, the caller's
533    /// env is automatically restored.
534    ///
535    /// A fixed instruction budget guards against runaway expressions
536    /// (infinite loops, accidental recursion) wedging the debugger.
537    /// Side effects — including `llm_call`, `host_*`, and file mutators
538    /// — are not blocked here; callers that invoke this for read-only
539    /// surfaces (hover, watch) should reject obviously-side-effectful
540    /// expressions before calling.
541    pub async fn evaluate_in_frame(
542        &mut self,
543        expr: &str,
544        _frame_id: usize,
545    ) -> Result<VmValue, VmError> {
546        let trimmed = expr.trim();
547        if trimmed.is_empty() {
548            return Err(VmError::Runtime("evaluate: empty expression".into()));
549        }
550
551        // Wrap as a pipeline whose body *returns* the expression. The
552        // explicit `return` compiles to `push value + Op::Return`, and
553        // Op::Return's frame-exit path pushes that value onto the
554        // caller's stack — which is where we read it from below.
555        // Avoids the script-mode compile path that trails a Pop+Nil
556        // sequence after every expression statement, which would
557        // clobber the result before we could capture it.
558        let wrapped = format!("pipeline default() {{\n  return ({trimmed})\n}}\n");
559        let program = harn_parser::check_source_strict(&wrapped)
560            .map_err(|e| VmError::Runtime(format!("evaluate: parse error: {e}")))?;
561        let mut chunk = crate::compiler::Compiler::new()
562            .compile(&program)
563            .map_err(|e| VmError::Runtime(format!("evaluate: compile error: {e}")))?;
564        // Inherit the current frame's source file so any runtime error
565        // enriched with `(line N)` attributes cleanly.
566        if let Some(current) = self.frames.last() {
567            chunk.source_file = current.chunk.source_file.clone();
568        }
569
570        // Snapshot every piece of VM state the scratch frame could
571        // perturb. Evaluation MUST be transparent: step state, scope
572        // depth, iterator depth, and the line-change baseline all
573        // restore on exit so the paused session continues exactly as
574        // before the user typed an expression into the REPL.
575        self.sync_current_frame_locals_to_env();
576        let saved_stack_len = self.stack.len();
577        let saved_frame_count = self.frames.len();
578        let saved_iter_depth = self.iterators.len();
579        let saved_scope_depth = self.env.scope_depth();
580        let saved_last_line = self.last_line;
581        let saved_step_mode = self.step_mode;
582        let saved_step_frame_depth = self.step_frame_depth;
583        let saved_stopped = self.stopped;
584        let saved_env = self.env.clone();
585
586        // Disable stepping during evaluation; otherwise the debug hook
587        // would fire on every synthetic line and block the pause UI.
588        self.step_mode = false;
589        self.stopped = false;
590
591        let local_slots = Self::fresh_local_slots(&chunk);
592        self.frames.push(CallFrame {
593            chunk: Rc::new(chunk),
594            ip: 0,
595            stack_base: saved_stack_len,
596            saved_env,
597            // Scratch evaluator frames never accept restartFrame — the
598            // REPL/watch user expects read-only inspection semantics,
599            // not replay — so skip the clone.
600            initial_env: None,
601            initial_local_slots: None,
602            saved_iterator_depth: saved_iter_depth,
603            fn_name: "<eval>".to_string(),
604            argc: 0,
605            saved_source_dir: self.source_dir.clone(),
606            module_functions: None,
607            module_state: None,
608            local_slots,
609            local_scope_base: self.env.scope_depth().saturating_sub(1),
610            local_scope_depth: 0,
611        });
612
613        // Drive one op at a time with a fixed budget. A pure expression
614        // is typically < 20 instructions; 10k gives plenty of headroom
615        // for e.g. a list comprehension without letting a bad loop
616        // hang the debugger forever.
617        const MAX_EVAL_STEPS: usize = 10_000;
618        let mut err: Option<VmError> = None;
619        for _ in 0..MAX_EVAL_STEPS {
620            if self.frames.len() <= saved_frame_count {
621                break;
622            }
623            match self.execute_one_cycle().await {
624                Ok(_) => {
625                    if self.frames.len() <= saved_frame_count {
626                        break;
627                    }
628                }
629                Err(e) => {
630                    err = Some(e);
631                    break;
632                }
633            }
634        }
635
636        // Read the result before restoring the stack — frame exit
637        // pushes the last-computed value onto the caller's stack, so
638        // it sits at `saved_stack_len` if execution completed cleanly.
639        let result = if self.stack.len() > saved_stack_len {
640            Some(self.stack[saved_stack_len].clone())
641        } else {
642            None
643        };
644
645        // Unconditional cleanup so a mid-execution error doesn't leak
646        // scratch state into the live session.
647        self.frames.truncate(saved_frame_count);
648        self.stack.truncate(saved_stack_len);
649        self.iterators.truncate(saved_iter_depth);
650        self.env.truncate_scopes(saved_scope_depth);
651        self.last_line = saved_last_line;
652        self.step_mode = saved_step_mode;
653        self.step_frame_depth = saved_step_frame_depth;
654        self.stopped = saved_stopped;
655
656        if let Some(e) = err {
657            return Err(e);
658        }
659        result.ok_or_else(|| {
660            VmError::Runtime(
661                "evaluate: step budget exceeded before the expression produced a value".into(),
662            )
663        })
664    }
665}