Skip to main content

shape_vm/executor/
dispatch.rs

1//! Main execution loop and opcode dispatch.
2
3use std::sync::Arc;
4use std::sync::atomic::Ordering;
5
6use crate::bytecode::{Instruction, OpCode};
7use shape_value::{VMError, ValueWord};
8
9use super::debugger_integration::DebuggerIntegration;
10use super::{DebugVMState, ExecutionResult, VirtualMachine, async_ops};
11
12impl VirtualMachine {
13    /// Execute the loaded program
14    ///
15    /// # Arguments
16    /// * `ctx` - Optional ExecutionContext for trading operations (rows, indicators, etc.)
17    pub fn execute(
18        &mut self,
19        ctx: Option<&mut shape_runtime::context::ExecutionContext>,
20    ) -> Result<ValueWord, VMError> {
21        match self.execute_with_suspend(ctx)? {
22            ExecutionResult::Completed(value) => Ok(value),
23            ExecutionResult::Suspended { future_id, .. } => Err(VMError::Suspended {
24                future_id,
25                resume_ip: 0,
26            }),
27        }
28    }
29
30    /// Execute the loaded program, returning either a completed value or suspension info.
31    ///
32    /// Unlike `execute()`, this method distinguishes between completion and suspension,
33    /// allowing the host to resume execution after resolving a future.
34    pub fn execute_with_suspend(
35        &mut self,
36        mut ctx: Option<&mut shape_runtime::context::ExecutionContext>,
37    ) -> Result<ExecutionResult, VMError> {
38        self.clear_last_uncaught_exception();
39
40        // Fast path: when no debugger is attached and tracing is off, use the
41        // streamlined loop that skips per-instruction debug/trace checks.
42        if self.debugger.is_none() && !self.config.trace_execution {
43            return self.execute_fast_with_exceptions(ctx);
44        }
45
46        // Start debugger if enabled
47        if let Some(ref mut debugger) = self.debugger {
48            debugger.start();
49        }
50
51        while self.ip < self.program.instructions.len() {
52            // Check for debug break
53            let should_break = if let Some(ref mut debugger) = self.debugger {
54                debugger.should_break(
55                    &DebugVMState {
56                        ip: self.ip,
57                        call_stack_depth: self.call_stack.len(),
58                    },
59                    self.ip,
60                )
61            } else {
62                false
63            };
64
65            if should_break {
66                if let Some(ref mut debugger) = self.debugger {
67                    debugger.debug_break(
68                        &DebugVMState {
69                            ip: self.ip,
70                            call_stack_depth: self.call_stack.len(),
71                        },
72                        &self.program,
73                    );
74                }
75            }
76
77            let instruction = self.program.instructions[self.ip];
78
79            // Record instruction in metrics (opt-in, near-zero cost when None)
80            if let Some(ref mut metrics) = self.metrics {
81                metrics.record_instruction();
82            }
83
84            // Trace instruction if enabled
85            if self.config.trace_execution {
86                if let Some(ref debugger) = self.debugger {
87                    debugger.trace_instruction(
88                        &DebugVMState {
89                            ip: self.ip,
90                            call_stack_depth: self.call_stack.len(),
91                        },
92                        &self.program,
93                        &instruction,
94                    );
95                } else {
96                    self.trace_state();
97                }
98            }
99
100            self.ip += 1;
101            self.instruction_count += 1;
102
103            // Check for Ctrl+C interrupt every 1024 instructions
104            if self.instruction_count & 0x3FF == 0 && self.interrupt.load(Ordering::Relaxed) > 0 {
105                return Err(VMError::Interrupted);
106            }
107
108            // Resource limit check (sandboxed execution)
109            if let Some(ref mut usage) = self.resource_usage {
110                usage
111                    .tick_instruction()
112                    .map_err(|e| VMError::RuntimeError(e.to_string()))?;
113            }
114
115            // Poll for completed tier promotions every 1024 instructions.
116            if self.instruction_count & 0x3FF == 0 {
117                self.poll_tier_completions();
118            }
119
120            // GC safepoint poll (gc feature only)
121            #[cfg(feature = "gc")]
122            if self.instruction_count & 0x3FF == 0 {
123                self.gc_safepoint_poll();
124
125                // Incremental marking: make bounded progress on the gray worklist
126                // when a marking cycle is active, without stopping the world.
127                if self.gc_heap.as_ref().map_or(false, |h| h.is_marking()) {
128                    self.gc_incremental_mark_step();
129                }
130            }
131
132            // Time-travel capture check (debug path).
133            if let Some(ref mut tt) = self.time_travel {
134                let current_ip = self.ip.saturating_sub(1);
135                let is_call_or_return = matches!(
136                    instruction.opcode,
137                    OpCode::Call | OpCode::CallValue | OpCode::Return | OpCode::ReturnValue
138                );
139                if tt.should_capture(current_ip, self.instruction_count as u64, is_call_or_return) {
140                    if let Ok(store) = tt.snapshot_store() {
141                        let store_ptr = store as *const shape_runtime::snapshot::SnapshotStore;
142                        if let Ok(snap) = self.snapshot(unsafe { &*store_ptr }) {
143                            let call_depth = self.call_stack.len();
144                            self.time_travel.as_mut().unwrap().record(
145                                snap,
146                                current_ip,
147                                self.instruction_count as u64,
148                                call_depth,
149                            );
150                        }
151                    }
152                }
153            }
154
155            // Track instruction index before execution for error reporting
156            let error_ip = self.ip.saturating_sub(1);
157
158            if let Err(err) = self.execute_instruction(&instruction, ctx.as_deref_mut()) {
159                // Check for suspension (not a real error)
160                if let VMError::Suspended {
161                    future_id,
162                    resume_ip,
163                } = err
164                {
165                    return Ok(ExecutionResult::Suspended {
166                        future_id,
167                        resume_ip,
168                    });
169                }
170
171                // Check for state.resume() request
172                if matches!(err, VMError::ResumeRequested) {
173                    self.apply_pending_resume()?;
174                    continue;
175                }
176
177                if !self.exception_handlers.is_empty() {
178                    let error_nb = ValueWord::from_string(Arc::new(err.to_string()));
179                    self.handle_exception_nb(error_nb)?;
180                } else {
181                    // Enrich error with source location before returning
182                    return Err(self.enrich_error_with_location(err, error_ip));
183                }
184            }
185
186            // Check for pending frame resume (from state.resume_frame)
187            if self.pending_frame_resume.is_some() {
188                self.apply_pending_frame_resume()?;
189            }
190
191            // Check for halt
192            if matches!(instruction.opcode, OpCode::Halt) {
193                break;
194            }
195        }
196
197        // Return top of stack or none (only if sp is above top-level locals region)
198        let tl = self.program.top_level_locals_count as usize;
199        Ok(ExecutionResult::Completed(if self.sp > tl {
200            self.sp -= 1;
201            std::mem::replace(&mut self.stack[self.sp], ValueWord::none())
202        } else {
203            ValueWord::none()
204        }))
205    }
206
207    /// Fast execution loop: no debugger/trace checks, but full exception handling
208    /// and halt/suspension support. This is the default hot path for production code.
209    fn execute_fast_with_exceptions(
210        &mut self,
211        mut ctx: Option<&mut shape_runtime::context::ExecutionContext>,
212    ) -> Result<ExecutionResult, VMError> {
213        while self.ip < self.program.instructions.len() {
214            let ip = self.ip;
215            self.ip += 1;
216            self.instruction_count += 1;
217
218            // Check for Ctrl+C interrupt every 1024 instructions
219            if self.instruction_count & 0x3FF == 0 && self.interrupt.load(Ordering::Relaxed) > 0 {
220                return Err(VMError::Interrupted);
221            }
222
223            // Poll for completed tier promotions every 1024 instructions.
224            if self.instruction_count & 0x3FF == 0 {
225                self.poll_tier_completions();
226            }
227
228            // GC safepoint poll (gc feature only)
229            #[cfg(feature = "gc")]
230            if self.instruction_count & 0x3FF == 0 {
231                self.gc_safepoint_poll();
232
233                // Incremental marking: make bounded progress on the gray worklist
234                // when a marking cycle is active, without stopping the world.
235                if self.gc_heap.as_ref().map_or(false, |h| h.is_marking()) {
236                    self.gc_incremental_mark_step();
237                }
238            }
239
240            let instruction = self.program.instructions[ip];
241
242            // Record instruction in metrics (opt-in, near-zero cost when None)
243            if let Some(ref mut metrics) = self.metrics {
244                metrics.record_instruction();
245            }
246
247            // Time-travel capture check (cheap: just a mode check + counter).
248            if let Some(ref mut tt) = self.time_travel {
249                let is_call_or_return = matches!(
250                    instruction.opcode,
251                    OpCode::Call | OpCode::CallValue | OpCode::Return | OpCode::ReturnValue
252                );
253                if tt.should_capture(ip, self.instruction_count as u64, is_call_or_return) {
254                    if let Ok(store) = tt.snapshot_store() {
255                        let store_ptr = store as *const shape_runtime::snapshot::SnapshotStore;
256                        if let Ok(snap) = self.snapshot(unsafe { &*store_ptr }) {
257                            let call_depth = self.call_stack.len();
258                            self.time_travel.as_mut().unwrap().record(
259                                snap,
260                                ip,
261                                self.instruction_count as u64,
262                                call_depth,
263                            );
264                        }
265                    }
266                }
267            }
268
269            if let Err(err) = self.execute_instruction(&instruction, ctx.as_deref_mut()) {
270                // Check for suspension (not a real error)
271                if let VMError::Suspended {
272                    future_id,
273                    resume_ip,
274                } = err
275                {
276                    return Ok(ExecutionResult::Suspended {
277                        future_id,
278                        resume_ip,
279                    });
280                }
281
282                // Check for state.resume() request
283                if matches!(err, VMError::ResumeRequested) {
284                    self.apply_pending_resume()?;
285                    continue;
286                }
287
288                if !self.exception_handlers.is_empty() {
289                    let error_nb = ValueWord::from_string(Arc::new(err.to_string()));
290                    self.handle_exception_nb(error_nb)?;
291                } else {
292                    return Err(self.enrich_error_with_location(err, ip));
293                }
294            }
295
296            // Check for pending frame resume (from state.resume_frame)
297            if self.pending_frame_resume.is_some() {
298                self.apply_pending_frame_resume()?;
299            }
300
301            if matches!(instruction.opcode, OpCode::Halt) {
302                break;
303            }
304        }
305
306        let tl = self.program.top_level_locals_count as usize;
307        Ok(ExecutionResult::Completed(if self.sp > tl {
308            self.sp -= 1;
309            std::mem::replace(&mut self.stack[self.sp], ValueWord::none())
310        } else {
311            ValueWord::none()
312        }))
313    }
314
315    /// Fast execution loop without debugging overhead or exception handling.
316    /// Used for hot inner loops (e.g., function calls) where we need maximum performance
317    /// and exceptions propagate via `?`.
318    #[inline]
319    pub(crate) fn execute_fast(
320        &mut self,
321        mut ctx: Option<&mut shape_runtime::context::ExecutionContext>,
322    ) -> Result<ValueWord, VMError> {
323        while self.ip < self.program.instructions.len() {
324            // Get index first, then increment
325            let ip = self.ip;
326            self.ip += 1;
327            self.instruction_count += 1;
328
329            // Check for Ctrl+C interrupt every 1024 instructions
330            if self.instruction_count & 0x3FF == 0 && self.interrupt.load(Ordering::Relaxed) > 0 {
331                return Err(VMError::Interrupted);
332            }
333
334            // GC safepoint poll (gc feature only)
335            #[cfg(feature = "gc")]
336            if self.instruction_count & 0x3FF == 0 {
337                self.gc_safepoint_poll();
338
339                // Incremental marking: make bounded progress on the gray worklist
340                // when a marking cycle is active, without stopping the world.
341                if self.gc_heap.as_ref().map_or(false, |h| h.is_marking()) {
342                    self.gc_incremental_mark_step();
343                }
344            }
345
346            let instruction = self.program.instructions[ip];
347
348            // Record instruction in metrics (opt-in, near-zero cost when None)
349            if let Some(ref mut metrics) = self.metrics {
350                metrics.record_instruction();
351            }
352
353            self.execute_instruction(&instruction, ctx.as_deref_mut())?;
354
355            if matches!(instruction.opcode, OpCode::Halt) {
356                break;
357            }
358        }
359
360        let tl = self.program.top_level_locals_count as usize;
361        Ok(if self.sp > tl {
362            self.sp -= 1;
363            std::mem::replace(&mut self.stack[self.sp], ValueWord::none())
364        } else {
365            ValueWord::none()
366        })
367    }
368
369    pub(crate) fn execute_until_call_depth(
370        &mut self,
371        target_depth: usize,
372        mut ctx: Option<&mut shape_runtime::context::ExecutionContext>,
373    ) -> Result<(), VMError> {
374        loop {
375            if self.ip >= self.program.instructions.len() {
376                break;
377            }
378
379            let instruction = self.program.instructions[self.ip];
380            self.ip += 1;
381            self.instruction_count += 1;
382
383            match self.execute_instruction(&instruction, ctx.as_deref_mut()) {
384                Ok(()) => {}
385                Err(VMError::ResumeRequested) => {
386                    self.apply_pending_resume()?;
387                    continue;
388                }
389                Err(err) => return Err(err),
390            }
391
392            if self.pending_frame_resume.is_some() {
393                self.apply_pending_frame_resume()?;
394            }
395
396            if matches!(instruction.opcode, OpCode::Halt) || self.call_stack.len() == target_depth {
397                break;
398            }
399        }
400        Ok(())
401    }
402
403    /// Execute a single instruction
404    pub(crate) fn execute_instruction(
405        &mut self,
406        instruction: &Instruction,
407        ctx: Option<&mut shape_runtime::context::ExecutionContext>,
408    ) -> Result<(), VMError> {
409        use OpCode::*;
410
411        match instruction.opcode {
412            // Stack operations
413            PushConst | PushNull | Pop | Dup | Swap => {
414                return self.exec_stack_ops(instruction);
415            }
416
417            // Arithmetic (generic, with runtime type dispatch)
418            Add | Sub | Mul | Div | Mod | Neg | Pow | BitAnd | BitOr | BitXor | BitShl | BitShr
419            | BitNot => {
420                return self.exec_arithmetic(instruction);
421            }
422
423            // Typed arithmetic (compiler-guaranteed types, zero dispatch)
424            AddInt | AddNumber | AddDecimal | SubInt | SubNumber | SubDecimal | MulInt
425            | MulNumber | MulDecimal | DivInt | DivNumber | DivDecimal | ModInt | ModNumber
426            | ModDecimal | PowInt | PowNumber | PowDecimal | IntToNumber | NumberToInt => {
427                return self.exec_typed_arithmetic(instruction);
428            }
429
430            // NOTE: Trusted arithmetic/comparison opcodes removed — the typed
431            // variants (AddInt, GtInt, etc.) already provide zero-dispatch execution.
432
433            // Compact typed arithmetic (width-parameterised, ABI-stable)
434            AddTyped | SubTyped | MulTyped | DivTyped | ModTyped | CmpTyped => {
435                return self.exec_compact_typed_arithmetic(instruction);
436            }
437
438            // CastWidth: integer width casting (bit truncation)
439            CastWidth => {
440                return self.op_cast_width(instruction);
441            }
442
443            // Comparison (generic, with runtime type dispatch)
444            Gt | Lt | Gte | Lte | Eq | Neq => {
445                return self.exec_comparison(instruction);
446            }
447
448            // Typed comparison (compiler-guaranteed types, zero dispatch)
449            GtInt | GtNumber | GtDecimal | LtInt | LtNumber | LtDecimal | GteInt | GteNumber
450            | GteDecimal | LteInt | LteNumber | LteDecimal | EqInt | EqNumber | NeqInt
451            | NeqNumber => {
452                return self.exec_typed_comparison(instruction);
453            }
454
455            // Logical
456            And | Or | Not => {
457                return self.exec_logical(instruction);
458            }
459
460            // Control flow
461            Jump | JumpIfFalse | JumpIfTrue | JumpIfFalseTrusted | Call | CallValue
462            | CallForeign | Return | ReturnValue => {
463                return self.exec_control_flow(instruction);
464            }
465
466            // Variables (including reference operations)
467            LoadLocal
468            | LoadLocalTrusted
469            | StoreLocal
470            | StoreLocalTyped
471            | LoadModuleBinding
472            | StoreModuleBinding
473            | StoreModuleBindingTyped
474            | LoadClosure
475            | StoreClosure
476            | CloseUpvalue
477            | MakeRef
478            | MakeFieldRef
479            | MakeIndexRef
480            | DerefLoad
481            | DerefStore
482            | SetIndexRef
483            | BoxLocal
484            | BoxModuleBinding => {
485                return self.exec_variables(instruction);
486            }
487
488            // Objects/Arrays
489            NewArray
490            | NewMatrix
491            | NewObject
492            | GetProp
493            | SetProp
494            | SetLocalIndex
495            | SetModuleBindingIndex
496            | Length
497            | ArrayPush
498            | ArrayPushLocal
499            | ArrayPop
500            | MakeClosure
501            | MergeObject
502            | NewTypedObject
503            | NewTypedArray
504            | TypedMergeObject
505            | WrapTypeAnnotation => {
506                return self.exec_objects(instruction, ctx);
507            }
508
509            // Built-in functions
510            BuiltinCall | TypeCheck | Convert => {
511                return self.exec_builtins(instruction, ctx);
512            }
513
514            // Typed conversion opcodes (zero-dispatch, no operand)
515            ConvertToInt => return self.op_convert_to_int(),
516            ConvertToNumber => return self.op_convert_to_number(),
517            ConvertToString => return self.op_convert_to_string(),
518            ConvertToBool => return self.op_convert_to_bool(),
519            ConvertToDecimal => return self.op_convert_to_decimal(),
520            ConvertToChar => return self.op_convert_to_char(),
521            TryConvertToInt => return self.op_try_convert_to_int(),
522            TryConvertToNumber => return self.op_try_convert_to_number(),
523            TryConvertToString => return self.op_try_convert_to_string(),
524            TryConvertToBool => return self.op_try_convert_to_bool(),
525            TryConvertToDecimal => return self.op_try_convert_to_decimal(),
526            TryConvertToChar => return self.op_try_convert_to_char(),
527
528            // Exception handling
529            SetupTry | PopHandler | Throw | TryUnwrap | UnwrapOption | ErrorContext | IsOk
530            | IsErr | UnwrapOk | UnwrapErr => {
531                return self.exec_exceptions(instruction);
532            }
533
534            // Additional operations
535            SliceAccess | NullCoalesce | MakeRange => {
536                return self.exec_additional(instruction);
537            }
538
539            // Loop control
540            LoopStart | LoopEnd | Break | Continue | IterNext | IterDone => {
541                return self.exec_loops(instruction);
542            }
543
544            // Method calls on values
545            CallMethod => {
546                return self.op_call_method(instruction, ctx);
547            }
548
549            PushTimeframe => {
550                return Err(VMError::NotImplemented(
551                    "Opcode 'PushTimeframe' is reserved but not yet implemented".into(),
552                ));
553            }
554            PopTimeframe => {
555                return Err(VMError::NotImplemented(
556                    "Opcode 'PopTimeframe' is reserved but not yet implemented".into(),
557                ));
558            }
559
560            // Typed column access on RowView values
561            LoadColF64 | LoadColI64 | LoadColBool | LoadColStr => {
562                return self.exec_load_col(instruction);
563            }
564
565            // Bind DataTable to TypeSchema (runtime safety net)
566            BindSchema => {
567                return self.exec_bind_schema(instruction);
568            }
569
570            // Type-specialized operations (JIT optimization)
571            GetFieldTyped | SetFieldTyped => {
572                return self.exec_jit_ops(instruction);
573            }
574
575            // Async operations
576            Yield | Suspend | Resume | Poll | AwaitBar | AwaitTick | EmitAlert | EmitEvent
577            | Await | SpawnTask | JoinInit | JoinAwait | CancelTask | AsyncScopeEnter
578            | AsyncScopeExit => {
579                match self.exec_async_op(instruction) {
580                    Ok(async_ops::AsyncExecutionResult::Continue) => return Ok(()),
581                    Ok(async_ops::AsyncExecutionResult::Yielded) => {
582                        return Ok(());
583                    }
584                    Ok(async_ops::AsyncExecutionResult::Suspended(info)) => {
585                        // Propagate suspension as VMError::Suspended so execute() can catch it
586                        match info.wait_type {
587                            async_ops::WaitType::Future { id } => {
588                                return Err(VMError::Suspended {
589                                    future_id: id,
590                                    resume_ip: info.resume_ip,
591                                });
592                            }
593                            async_ops::WaitType::TaskGroup { kind, task_ids } => {
594                                // TaskGroup suspension: propagate with first task_id as marker
595                                // The host resolves the group based on kind + task_ids
596                                let marker_id = task_ids.first().copied().unwrap_or(0);
597                                let _ = (kind, task_ids); // Host retrieves from SuspensionInfo
598                                return Err(VMError::Suspended {
599                                    future_id: marker_id,
600                                    resume_ip: info.resume_ip,
601                                });
602                            }
603                            _ => {
604                                // Non-future suspensions (NextBar, Timer, AnyEvent) cannot be
605                                // resumed by the host via future_id. Drain any open async scopes
606                                // to prevent leaked task tracking, then continue execution.
607                                while let Some(mut scope_tasks) = self.async_scope_stack.pop() {
608                                    scope_tasks.reverse();
609                                    for task_id in scope_tasks {
610                                        self.task_scheduler.cancel(task_id);
611                                    }
612                                }
613                                return Ok(());
614                            }
615                        }
616                    }
617                    Err(e) => return Err(e),
618                }
619            }
620
621            // Trait object operations
622            BoxTraitObject | DynMethodCall | DropCall | DropCallAsync => {
623                return self.exec_trait_object_ops(instruction, ctx);
624            }
625
626            // Special
627            Nop => {}
628            Halt => {}
629            Debug => {
630                if let Some(ref mut debugger) = self.debugger {
631                    debugger.debug_break(
632                        &DebugVMState {
633                            ip: self.ip,
634                            call_stack_depth: self.call_stack.len(),
635                        },
636                        &self.program,
637                    );
638                } else if self.config.debug_mode {
639                    self.debug_break();
640                }
641            }
642
643            _ => return Err(VMError::InvalidOperand),
644        }
645
646        Ok(())
647    }
648
649    /// Enrich an error with source location context
650    ///
651    /// Uses debug_info from the program to add line numbers and source context
652    /// to the error message for better debugging.
653    pub(crate) fn enrich_error_with_location(&mut self, error: VMError, ip: usize) -> VMError {
654        let debug_info = &self.program.debug_info;
655
656        // Try to get line number and file for this instruction
657        let location = debug_info.get_location_for_instruction(ip);
658
659        if let Some((file_id, line_num)) = location {
660            // Store the line number and file for LSP integration
661            self.last_error_line = Some(line_num);
662            self.last_error_file = debug_info
663                .source_map
664                .get_file(file_id)
665                .map(|s| s.to_string());
666
667            // Try to get the source line from the correct file
668            let source_context = debug_info
669                .get_source_line_from_file(file_id, line_num as usize)
670                .map(|s| s.trim())
671                .filter(|s| !s.is_empty());
672
673            let base_msg = match &error {
674                VMError::RuntimeError(msg) => msg.clone(),
675                VMError::TypeError { expected, got } => {
676                    format!("TypeError: expected {}, got {}", expected, got)
677                }
678                VMError::StackUnderflow => "Stack underflow".to_string(),
679                VMError::StackOverflow => "Stack overflow".to_string(),
680                VMError::DivisionByZero => "Division by zero".to_string(),
681                VMError::UndefinedVariable(name) => format!("Undefined variable: {}", name),
682                VMError::UndefinedProperty(name) => format!("Undefined property: {}", name),
683                VMError::InvalidCall => "Invalid function call".to_string(),
684                VMError::IndexOutOfBounds { index, length } => {
685                    format!("Index {} out of bounds (length {})", index, length)
686                }
687                VMError::InvalidOperand => "Invalid operand".to_string(),
688                VMError::ArityMismatch {
689                    function,
690                    expected,
691                    got,
692                } => {
693                    format!(
694                        "{}() expects {} argument(s), got {}",
695                        function, expected, got
696                    )
697                }
698                VMError::InvalidArgument { function, message } => {
699                    format!("{}(): {}", function, message)
700                }
701                VMError::NotImplemented(feature) => format!("Not implemented: {}", feature),
702                VMError::Suspended { .. } | VMError::Interrupted | VMError::ResumeRequested => {
703                    return error;
704                } // Don't enrich suspension/interrupt/resume signals
705            };
706
707            // Build enhanced error message with source context
708            let enhanced = if let Some(source) = source_context {
709                format!(
710                    "{}\n  --> line {}\n   |\n{:>3} | {}\n   |",
711                    base_msg, line_num, line_num, source
712                )
713            } else {
714                format!("{} (line {})", base_msg, line_num)
715            };
716
717            VMError::RuntimeError(enhanced)
718        } else {
719            self.last_error_line = None;
720            self.last_error_file = None;
721            error
722        }
723    }
724
725    // apply_pending_resume() and apply_pending_frame_resume() moved to resume.rs
726}