Skip to main content

shape_vm/executor/
dispatch.rs

1//! Main execution loop and opcode dispatch.
2
3use std::sync::Arc;
4use std::sync::atomic::Ordering;
5
6use crate::bytecode::{Instruction, OpCode};
7use shape_value::{VMError, ValueWord};
8
9use super::debugger_integration::DebuggerIntegration;
10use super::{DebugVMState, ExecutionResult, VirtualMachine, async_ops};
11
12impl VirtualMachine {
13    /// Execute the loaded program
14    ///
15    /// # Arguments
16    /// * `ctx` - Optional ExecutionContext for trading operations (rows, indicators, etc.)
17    pub fn execute(
18        &mut self,
19        ctx: Option<&mut shape_runtime::context::ExecutionContext>,
20    ) -> Result<ValueWord, VMError> {
21        match self.execute_with_suspend(ctx)? {
22            ExecutionResult::Completed(value) => Ok(value),
23            ExecutionResult::Suspended { future_id, .. } => Err(VMError::Suspended {
24                future_id,
25                resume_ip: 0,
26            }),
27        }
28    }
29
30    /// Execute the loaded program, returning either a completed value or suspension info.
31    ///
32    /// Unlike `execute()`, this method distinguishes between completion and suspension,
33    /// allowing the host to resume execution after resolving a future.
34    pub fn execute_with_suspend(
35        &mut self,
36        mut ctx: Option<&mut shape_runtime::context::ExecutionContext>,
37    ) -> Result<ExecutionResult, VMError> {
38        self.clear_last_uncaught_exception();
39
40        // Fast path: when no debugger is attached and tracing is off, use the
41        // streamlined loop that skips per-instruction debug/trace checks.
42        if self.debugger.is_none() && !self.config.trace_execution {
43            return self.execute_fast_with_exceptions(ctx);
44        }
45
46        // Start debugger if enabled
47        if let Some(ref mut debugger) = self.debugger {
48            debugger.start();
49        }
50
51        while self.ip < self.program.instructions.len() {
52            // Check for debug break
53            let should_break = if let Some(ref mut debugger) = self.debugger {
54                debugger.should_break(
55                    &DebugVMState {
56                        ip: self.ip,
57                        call_stack_depth: self.call_stack.len(),
58                    },
59                    self.ip,
60                )
61            } else {
62                false
63            };
64
65            if should_break {
66                if let Some(ref mut debugger) = self.debugger {
67                    debugger.debug_break(
68                        &DebugVMState {
69                            ip: self.ip,
70                            call_stack_depth: self.call_stack.len(),
71                        },
72                        &self.program,
73                    );
74                }
75            }
76
77            let instruction = self.program.instructions[self.ip];
78
79            // Record instruction in metrics (opt-in, near-zero cost when None)
80            if let Some(ref mut metrics) = self.metrics {
81                metrics.record_instruction();
82            }
83
84            // Trace instruction if enabled
85            if self.config.trace_execution {
86                if let Some(ref debugger) = self.debugger {
87                    debugger.trace_instruction(
88                        &DebugVMState {
89                            ip: self.ip,
90                            call_stack_depth: self.call_stack.len(),
91                        },
92                        &self.program,
93                        &instruction,
94                    );
95                } else {
96                    self.trace_state();
97                }
98            }
99
100            self.ip += 1;
101            self.instruction_count += 1;
102
103            // Check for Ctrl+C interrupt every 1024 instructions
104            if self.instruction_count & 0x3FF == 0 && self.interrupt.load(Ordering::Relaxed) > 0 {
105                return Err(VMError::Interrupted);
106            }
107
108            // Resource limit check (sandboxed execution)
109            if let Some(ref mut usage) = self.resource_usage {
110                usage
111                    .tick_instruction()
112                    .map_err(|e| VMError::RuntimeError(e.to_string()))?;
113            }
114
115            // Poll for completed tier promotions every 1024 instructions.
116            if self.instruction_count & 0x3FF == 0 {
117                self.poll_tier_completions();
118            }
119
120            // GC safepoint poll (gc feature only)
121            #[cfg(feature = "gc")]
122            if self.instruction_count & 0x3FF == 0 {
123                self.gc_safepoint_poll();
124
125                // Incremental marking: make bounded progress on the gray worklist
126                // when a marking cycle is active, without stopping the world.
127                if self.gc_heap.as_ref().map_or(false, |h| h.is_marking()) {
128                    self.gc_incremental_mark_step();
129                }
130            }
131
132            // Time-travel capture check (debug path).
133            if let Some(ref mut tt) = self.time_travel {
134                let current_ip = self.ip.saturating_sub(1);
135                let is_call_or_return = matches!(
136                    instruction.opcode,
137                    OpCode::Call | OpCode::CallValue | OpCode::Return | OpCode::ReturnValue
138                );
139                if tt.should_capture(current_ip, self.instruction_count as u64, is_call_or_return) {
140                    if let Ok(store) = tt.snapshot_store() {
141                        let store_ptr = store as *const shape_runtime::snapshot::SnapshotStore;
142                        if let Ok(snap) = self.snapshot(unsafe { &*store_ptr }) {
143                            let call_depth = self.call_stack.len();
144                            self.time_travel.as_mut().unwrap().record(
145                                snap,
146                                current_ip,
147                                self.instruction_count as u64,
148                                call_depth,
149                            );
150                        }
151                    }
152                }
153            }
154
155            // Track instruction index before execution for error reporting
156            let error_ip = self.ip.saturating_sub(1);
157
158            if let Err(err) = self.execute_instruction(&instruction, ctx.as_deref_mut()) {
159                // Check for suspension (not a real error)
160                if let VMError::Suspended {
161                    future_id,
162                    resume_ip,
163                } = err
164                {
165                    return Ok(ExecutionResult::Suspended {
166                        future_id,
167                        resume_ip,
168                    });
169                }
170
171                // Check for state.resume() request
172                if matches!(err, VMError::ResumeRequested) {
173                    self.apply_pending_resume()?;
174                    continue;
175                }
176
177                if !self.exception_handlers.is_empty() {
178                    let error_nb = ValueWord::from_string(Arc::new(err.to_string()));
179                    self.handle_exception_nb(error_nb)?;
180                } else {
181                    // Enrich error with source location before returning
182                    return Err(self.enrich_error_with_location(err, error_ip));
183                }
184            }
185
186            // Check for pending frame resume (from state.resume_frame)
187            if self.pending_frame_resume.is_some() {
188                self.apply_pending_frame_resume()?;
189            }
190
191            // Check for halt
192            if matches!(instruction.opcode, OpCode::Halt) {
193                break;
194            }
195        }
196
197        // Return top of stack or none (only if sp is above top-level locals region)
198        let tl = self.program.top_level_locals_count as usize;
199        Ok(ExecutionResult::Completed(if self.sp > tl {
200            self.sp -= 1;
201            std::mem::replace(&mut self.stack[self.sp], ValueWord::none())
202        } else {
203            ValueWord::none()
204        }))
205    }
206
207    /// Fast execution loop: no debugger/trace checks, but full exception handling
208    /// and halt/suspension support. This is the default hot path for production code.
209    fn execute_fast_with_exceptions(
210        &mut self,
211        mut ctx: Option<&mut shape_runtime::context::ExecutionContext>,
212    ) -> Result<ExecutionResult, VMError> {
213        while self.ip < self.program.instructions.len() {
214            let ip = self.ip;
215            self.ip += 1;
216            self.instruction_count += 1;
217
218            // Check for Ctrl+C interrupt every 1024 instructions
219            if self.instruction_count & 0x3FF == 0 && self.interrupt.load(Ordering::Relaxed) > 0 {
220                return Err(VMError::Interrupted);
221            }
222
223            // Poll for completed tier promotions every 1024 instructions.
224            if self.instruction_count & 0x3FF == 0 {
225                self.poll_tier_completions();
226            }
227
228            // GC safepoint poll (gc feature only)
229            #[cfg(feature = "gc")]
230            if self.instruction_count & 0x3FF == 0 {
231                self.gc_safepoint_poll();
232
233                // Incremental marking: make bounded progress on the gray worklist
234                // when a marking cycle is active, without stopping the world.
235                if self.gc_heap.as_ref().map_or(false, |h| h.is_marking()) {
236                    self.gc_incremental_mark_step();
237                }
238            }
239
240            let instruction = self.program.instructions[ip];
241
242            // Record instruction in metrics (opt-in, near-zero cost when None)
243            if let Some(ref mut metrics) = self.metrics {
244                metrics.record_instruction();
245            }
246
247            // Time-travel capture check (cheap: just a mode check + counter).
248            if let Some(ref mut tt) = self.time_travel {
249                let is_call_or_return = matches!(
250                    instruction.opcode,
251                    OpCode::Call | OpCode::CallValue | OpCode::Return | OpCode::ReturnValue
252                );
253                if tt.should_capture(ip, self.instruction_count as u64, is_call_or_return) {
254                    if let Ok(store) = tt.snapshot_store() {
255                        let store_ptr = store as *const shape_runtime::snapshot::SnapshotStore;
256                        if let Ok(snap) = self.snapshot(unsafe { &*store_ptr }) {
257                            let call_depth = self.call_stack.len();
258                            self.time_travel.as_mut().unwrap().record(
259                                snap,
260                                ip,
261                                self.instruction_count as u64,
262                                call_depth,
263                            );
264                        }
265                    }
266                }
267            }
268
269            if let Err(err) = self.execute_instruction(&instruction, ctx.as_deref_mut()) {
270                // Check for suspension (not a real error)
271                if let VMError::Suspended {
272                    future_id,
273                    resume_ip,
274                } = err
275                {
276                    return Ok(ExecutionResult::Suspended {
277                        future_id,
278                        resume_ip,
279                    });
280                }
281
282                // Check for state.resume() request
283                if matches!(err, VMError::ResumeRequested) {
284                    self.apply_pending_resume()?;
285                    continue;
286                }
287
288                if !self.exception_handlers.is_empty() {
289                    let error_nb = ValueWord::from_string(Arc::new(err.to_string()));
290                    self.handle_exception_nb(error_nb)?;
291                } else {
292                    return Err(self.enrich_error_with_location(err, ip));
293                }
294            }
295
296            // Check for pending frame resume (from state.resume_frame)
297            if self.pending_frame_resume.is_some() {
298                self.apply_pending_frame_resume()?;
299            }
300
301            if matches!(instruction.opcode, OpCode::Halt) {
302                break;
303            }
304        }
305
306        let tl = self.program.top_level_locals_count as usize;
307        Ok(ExecutionResult::Completed(if self.sp > tl {
308            self.sp -= 1;
309            std::mem::replace(&mut self.stack[self.sp], ValueWord::none())
310        } else {
311            ValueWord::none()
312        }))
313    }
314
315    /// Fast execution loop without debugging overhead or exception handling.
316    /// Used for hot inner loops (e.g., function calls) where we need maximum performance
317    /// and exceptions propagate via `?`.
318    #[inline]
319    pub(crate) fn execute_fast(
320        &mut self,
321        mut ctx: Option<&mut shape_runtime::context::ExecutionContext>,
322    ) -> Result<ValueWord, VMError> {
323        while self.ip < self.program.instructions.len() {
324            // Get index first, then increment
325            let ip = self.ip;
326            self.ip += 1;
327            self.instruction_count += 1;
328
329            // Check for Ctrl+C interrupt every 1024 instructions
330            if self.instruction_count & 0x3FF == 0 && self.interrupt.load(Ordering::Relaxed) > 0 {
331                return Err(VMError::Interrupted);
332            }
333
334            // GC safepoint poll (gc feature only)
335            #[cfg(feature = "gc")]
336            if self.instruction_count & 0x3FF == 0 {
337                self.gc_safepoint_poll();
338
339                // Incremental marking: make bounded progress on the gray worklist
340                // when a marking cycle is active, without stopping the world.
341                if self.gc_heap.as_ref().map_or(false, |h| h.is_marking()) {
342                    self.gc_incremental_mark_step();
343                }
344            }
345
346            let instruction = self.program.instructions[ip];
347
348            // Record instruction in metrics (opt-in, near-zero cost when None)
349            if let Some(ref mut metrics) = self.metrics {
350                metrics.record_instruction();
351            }
352
353            self.execute_instruction(&instruction, ctx.as_deref_mut())?;
354        }
355
356        let tl = self.program.top_level_locals_count as usize;
357        Ok(if self.sp > tl {
358            self.sp -= 1;
359            std::mem::replace(&mut self.stack[self.sp], ValueWord::none())
360        } else {
361            ValueWord::none()
362        })
363    }
364
365    pub(crate) fn execute_until_call_depth(
366        &mut self,
367        target_depth: usize,
368        mut ctx: Option<&mut shape_runtime::context::ExecutionContext>,
369    ) -> Result<(), VMError> {
370        loop {
371            if self.ip >= self.program.instructions.len() {
372                break;
373            }
374
375            let instruction = self.program.instructions[self.ip];
376            self.ip += 1;
377            self.instruction_count += 1;
378
379            match self.execute_instruction(&instruction, ctx.as_deref_mut()) {
380                Ok(()) => {}
381                Err(VMError::ResumeRequested) => {
382                    self.apply_pending_resume()?;
383                    continue;
384                }
385                Err(err) => return Err(err),
386            }
387
388            if self.pending_frame_resume.is_some() {
389                self.apply_pending_frame_resume()?;
390            }
391
392            if matches!(instruction.opcode, OpCode::Halt) || self.call_stack.len() == target_depth {
393                break;
394            }
395        }
396        Ok(())
397    }
398
399    /// Execute a single instruction
400    pub(crate) fn execute_instruction(
401        &mut self,
402        instruction: &Instruction,
403        ctx: Option<&mut shape_runtime::context::ExecutionContext>,
404    ) -> Result<(), VMError> {
405        use OpCode::*;
406
407        match instruction.opcode {
408            // Stack operations
409            PushConst | PushNull | Pop | Dup | Swap => {
410                return self.exec_stack_ops(instruction);
411            }
412
413            // Arithmetic (generic, with runtime type dispatch)
414            Add | Sub | Mul | Div | Mod | Neg | Pow | BitAnd | BitOr | BitXor | BitShl | BitShr
415            | BitNot => {
416                return self.exec_arithmetic(instruction);
417            }
418
419            // Typed arithmetic (compiler-guaranteed types, zero dispatch)
420            AddInt | AddNumber | AddDecimal | SubInt | SubNumber | SubDecimal | MulInt
421            | MulNumber | MulDecimal | DivInt | DivNumber | DivDecimal | ModInt | ModNumber
422            | ModDecimal | PowInt | PowNumber | PowDecimal | IntToNumber | NumberToInt => {
423                return self.exec_typed_arithmetic(instruction);
424            }
425
426            // Trusted arithmetic (compiler-proved types, no runtime guard)
427            AddIntTrusted | SubIntTrusted | MulIntTrusted | DivIntTrusted | AddNumberTrusted
428            | SubNumberTrusted | MulNumberTrusted | DivNumberTrusted => {
429                return self.exec_trusted_arithmetic(instruction);
430            }
431
432            // Trusted comparison (compiler-proved types, no runtime guard)
433            GtIntTrusted | LtIntTrusted | GteIntTrusted | LteIntTrusted | GtNumberTrusted
434            | LtNumberTrusted | GteNumberTrusted | LteNumberTrusted => {
435                return self.exec_typed_comparison(instruction);
436            }
437
438            // Compact typed arithmetic (width-parameterised, ABI-stable)
439            AddTyped | SubTyped | MulTyped | DivTyped | ModTyped | CmpTyped => {
440                return self.exec_compact_typed_arithmetic(instruction);
441            }
442
443            // CastWidth: integer width casting (bit truncation)
444            CastWidth => {
445                return self.op_cast_width(instruction);
446            }
447
448            // Comparison (generic, with runtime type dispatch)
449            Gt | Lt | Gte | Lte | Eq | Neq => {
450                return self.exec_comparison(instruction);
451            }
452
453            // Typed comparison (compiler-guaranteed types, zero dispatch)
454            GtInt | GtNumber | GtDecimal | LtInt | LtNumber | LtDecimal | GteInt | GteNumber
455            | GteDecimal | LteInt | LteNumber | LteDecimal | EqInt | EqNumber | NeqInt
456            | NeqNumber => {
457                return self.exec_typed_comparison(instruction);
458            }
459
460            // Logical
461            And | Or | Not => {
462                return self.exec_logical(instruction);
463            }
464
465            // Control flow
466            Jump | JumpIfFalse | JumpIfTrue | JumpIfFalseTrusted | Call | CallValue
467            | CallForeign | Return | ReturnValue => {
468                return self.exec_control_flow(instruction);
469            }
470
471            // Variables (including reference operations)
472            LoadLocal | LoadLocalTrusted | StoreLocal | StoreLocalTyped | LoadModuleBinding
473            | StoreModuleBinding | LoadClosure | StoreClosure | CloseUpvalue | MakeRef
474            | DerefLoad | DerefStore | SetIndexRef | BoxLocal | BoxModuleBinding => {
475                return self.exec_variables(instruction);
476            }
477
478            // Objects/Arrays
479            NewArray
480            | NewMatrix
481            | NewObject
482            | GetProp
483            | SetProp
484            | SetLocalIndex
485            | SetModuleBindingIndex
486            | Length
487            | ArrayPush
488            | ArrayPushLocal
489            | ArrayPop
490            | MakeClosure
491            | MergeObject
492            | NewTypedObject
493            | NewTypedArray
494            | TypedMergeObject
495            | WrapTypeAnnotation => {
496                return self.exec_objects(instruction, ctx);
497            }
498
499            // Built-in functions
500            BuiltinCall | TypeCheck | Convert => {
501                return self.exec_builtins(instruction, ctx);
502            }
503
504            // Exception handling
505            SetupTry | PopHandler | Throw | TryUnwrap | UnwrapOption | ErrorContext | IsOk
506            | IsErr | UnwrapOk | UnwrapErr => {
507                return self.exec_exceptions(instruction);
508            }
509
510            // Additional operations
511            SliceAccess | NullCoalesce | MakeRange => {
512                return self.exec_additional(instruction);
513            }
514
515            // Loop control
516            LoopStart | LoopEnd | Break | Continue | IterNext | IterDone => {
517                return self.exec_loops(instruction);
518            }
519
520            // Method calls on values
521            CallMethod => {
522                return self.op_call_method(instruction, ctx);
523            }
524
525            // Operations not yet implemented
526            PushTimeframe | PopTimeframe | RunSimulation => {
527                return Err(VMError::NotImplemented(format!(
528                    "Operation {:?}",
529                    instruction.opcode
530                )));
531            }
532
533            // Typed column access on RowView values
534            LoadColF64 | LoadColI64 | LoadColBool | LoadColStr => {
535                return self.exec_load_col(instruction);
536            }
537
538            // Bind DataTable to TypeSchema (runtime safety net)
539            BindSchema => {
540                return self.exec_bind_schema(instruction);
541            }
542
543            // Type-specialized operations (JIT optimization)
544            GetFieldTyped | SetFieldTyped => {
545                return self.exec_jit_ops(instruction);
546            }
547
548            // Async operations
549            Yield | Suspend | Resume | Poll | AwaitBar | AwaitTick | EmitAlert | EmitEvent
550            | Await | SpawnTask | JoinInit | JoinAwait | CancelTask | AsyncScopeEnter
551            | AsyncScopeExit => {
552                match self.exec_async_op(instruction) {
553                    Ok(async_ops::AsyncExecutionResult::Continue) => return Ok(()),
554                    Ok(async_ops::AsyncExecutionResult::Yielded) => {
555                        return Ok(());
556                    }
557                    Ok(async_ops::AsyncExecutionResult::Suspended(info)) => {
558                        // Propagate suspension as VMError::Suspended so execute() can catch it
559                        match info.wait_type {
560                            async_ops::WaitType::Future { id } => {
561                                return Err(VMError::Suspended {
562                                    future_id: id,
563                                    resume_ip: info.resume_ip,
564                                });
565                            }
566                            async_ops::WaitType::TaskGroup { kind, task_ids } => {
567                                // TaskGroup suspension: propagate with first task_id as marker
568                                // The host resolves the group based on kind + task_ids
569                                let marker_id = task_ids.first().copied().unwrap_or(0);
570                                let _ = (kind, task_ids); // Host retrieves from SuspensionInfo
571                                return Err(VMError::Suspended {
572                                    future_id: marker_id,
573                                    resume_ip: info.resume_ip,
574                                });
575                            }
576                            _ => {
577                                // Non-future suspensions continue for now
578                                return Ok(());
579                            }
580                        }
581                    }
582                    Err(e) => return Err(e),
583                }
584            }
585
586            // Trait object operations
587            BoxTraitObject | DynMethodCall | DropCall | DropCallAsync => {
588                return self.exec_trait_object_ops(instruction, ctx);
589            }
590
591            // Special
592            Nop => {}
593            Halt => {}
594            Debug => {
595                if let Some(ref mut debugger) = self.debugger {
596                    debugger.debug_break(
597                        &DebugVMState {
598                            ip: self.ip,
599                            call_stack_depth: self.call_stack.len(),
600                        },
601                        &self.program,
602                    );
603                } else if self.config.debug_mode {
604                    self.debug_break();
605                }
606            }
607
608            _ => return Err(VMError::InvalidOperand),
609        }
610
611        Ok(())
612    }
613
614    /// Enrich an error with source location context
615    ///
616    /// Uses debug_info from the program to add line numbers and source context
617    /// to the error message for better debugging.
618    pub(crate) fn enrich_error_with_location(&mut self, error: VMError, ip: usize) -> VMError {
619        let debug_info = &self.program.debug_info;
620
621        // Try to get line number and file for this instruction
622        let location = debug_info.get_location_for_instruction(ip);
623
624        if let Some((file_id, line_num)) = location {
625            // Store the line number and file for LSP integration
626            self.last_error_line = Some(line_num);
627            self.last_error_file = debug_info
628                .source_map
629                .get_file(file_id)
630                .map(|s| s.to_string());
631
632            // Try to get the source line from the correct file
633            let source_context = debug_info
634                .get_source_line_from_file(file_id, line_num as usize)
635                .map(|s| s.trim())
636                .filter(|s| !s.is_empty());
637
638            let base_msg = match &error {
639                VMError::RuntimeError(msg) => msg.clone(),
640                VMError::TypeError { expected, got } => {
641                    format!("TypeError: expected {}, got {}", expected, got)
642                }
643                VMError::StackUnderflow => "Stack underflow".to_string(),
644                VMError::StackOverflow => "Stack overflow".to_string(),
645                VMError::DivisionByZero => "Division by zero".to_string(),
646                VMError::UndefinedVariable(name) => format!("Undefined variable: {}", name),
647                VMError::UndefinedProperty(name) => format!("Undefined property: {}", name),
648                VMError::InvalidCall => "Invalid function call".to_string(),
649                VMError::IndexOutOfBounds { index, length } => {
650                    format!("Index {} out of bounds (length {})", index, length)
651                }
652                VMError::InvalidOperand => "Invalid operand".to_string(),
653                VMError::ArityMismatch {
654                    function,
655                    expected,
656                    got,
657                } => {
658                    format!(
659                        "{}() expects {} argument(s), got {}",
660                        function, expected, got
661                    )
662                }
663                VMError::InvalidArgument { function, message } => {
664                    format!("{}(): {}", function, message)
665                }
666                VMError::NotImplemented(feature) => format!("Not implemented: {}", feature),
667                VMError::Suspended { .. } | VMError::Interrupted | VMError::ResumeRequested => {
668                    return error;
669                } // Don't enrich suspension/interrupt/resume signals
670            };
671
672            // Build enhanced error message with source context
673            let enhanced = if let Some(source) = source_context {
674                format!(
675                    "{}\n  --> line {}\n   |\n{:>3} | {}\n   |",
676                    base_msg, line_num, line_num, source
677                )
678            } else {
679                format!("{} (line {})", base_msg, line_num)
680            };
681
682            VMError::RuntimeError(enhanced)
683        } else {
684            self.last_error_line = None;
685            self.last_error_file = None;
686            error
687        }
688    }
689
690    // apply_pending_resume() and apply_pending_frame_resume() moved to resume.rs
691}