mica_language/
vm.rs

1//! The virtual machine.
2
3use std::collections::HashSet;
4use std::pin::Pin;
5use std::ptr;
6use std::rc::Rc;
7
8use crate::bytecode::{
9   CaptureKind, Chunk, Control, DispatchTable, Environment, FunctionKind, FunctionSignature,
10   Opcode, Opr24,
11};
12use crate::common::{Error, ErrorKind, Location, RenderedSignature, StackTraceEntry};
13use crate::gc::{GcRaw, Memory};
14use crate::value::{
15   create_trait, Closure, Dict, List, RawValue, Struct, Trait, Upvalue, ValueKind,
16};
17
18/// Storage for global variables.
19#[derive(Debug)]
20pub struct Globals {
21   values: Vec<RawValue>,
22}
23
24impl Globals {
25   /// Creates a new storage.
26   pub fn new() -> Self {
27      Self { values: Vec::new() }
28   }
29
30   /// Sets the global in the given slot.
31   pub fn set(&mut self, slot: Opr24, value: RawValue) {
32      let slot = u32::from(slot) as usize;
33      if slot >= self.values.len() {
34         self.values.resize(slot + 1, ().into());
35      }
36      // Safety: the vec has just been extended to have the correct size so `slot` is in bounds.
37      unsafe {
38         *self.values.get_unchecked_mut(slot) = value;
39      }
40   }
41
42   /// Returns the global in the given slot, or `Nil` if there's no global there.
43   pub fn get(&self, slot: Opr24) -> RawValue {
44      let slot = u32::from(slot) as usize;
45      self.values.get(slot).cloned().unwrap_or(().into())
46   }
47
48   /// Returns the global in the given slot without performing bounds checks.
49   ///
50   /// # Safety
51   ///
52   /// This is only safe to use by the VM. The code generator ensures that all globals are set
53   /// before use.
54   unsafe fn get_unchecked(&self, slot: Opr24) -> RawValue {
55      let slot = u32::from(slot) as usize;
56      *self.values.get_unchecked(slot)
57   }
58
59   /// Returns an iterator over all globals.
60   pub(crate) fn iter(&self) -> impl Iterator<Item = RawValue> + '_ {
61      self.values.iter().copied()
62   }
63}
64
65impl Default for Globals {
66   fn default() -> Self {
67      Self::new()
68   }
69}
70
71/// The return point saved before entering a functi
72struct ReturnPoint {
73   chunk: Option<Rc<Chunk>>,
74   closure: Option<GcRaw<Closure>>,
75   pc: usize,
76   stack_bottom: usize,
77}
78
79/// The virtual machine state.
80pub struct Fiber {
81   pc: usize,
82   chunk: Rc<Chunk>,
83   closure: Option<GcRaw<Closure>>,
84
85   stack: Vec<RawValue>,
86   stack_bottom: usize,
87   open_upvalues: Vec<(u32, Pin<Rc<Upvalue>>)>,
88   call_stack: Vec<ReturnPoint>,
89   breakable_block_stack: Vec<usize>,
90
91   halted: bool,
92}
93
94impl Fiber {
95   /// Creates a new VM.
96   pub fn new(chunk: Rc<Chunk>, stack: Vec<RawValue>) -> Self {
97      Self {
98         pc: 0,
99         chunk,
100         closure: None,
101         stack,
102         stack_bottom: 0,
103         open_upvalues: Vec::new(),
104         call_stack: Vec::new(),
105         breakable_block_stack: Vec::new(),
106         halted: false,
107      }
108   }
109
110   /// Returns whether the fiber has halted execution.
111   pub fn halted(&self) -> bool {
112      self.halted
113   }
114
115   /// Halts the VM and produces an error.
116   fn error(&mut self, env: &Environment, kind: ErrorKind) -> Error {
117      self.halted = true;
118      Error::Runtime {
119         kind,
120         call_stack: self
121            .call_stack
122            .iter()
123            .filter_map(|return_point| {
124               Some(StackTraceEntry {
125                  function_name: if let Some(closure) = &return_point.closure {
126                     let closure = unsafe { closure.get() };
127                     let function = unsafe { env.get_function_unchecked(closure.function_id) };
128                     if function.hidden_in_stack_traces {
129                        return None;
130                     }
131                     Rc::clone(&function.name)
132                  } else {
133                     Rc::from("<main>")
134                  },
135                  module_name: if let Some(chunk) = &return_point.chunk {
136                     Rc::clone(&chunk.module_name)
137                  } else {
138                     Rc::from("<FFI>")
139                  },
140                  location: if let Some(chunk) = &return_point.chunk {
141                     chunk.location(return_point.pc - Opcode::INSTRUCTION_SIZE)
142                  } else {
143                     Location::UNINIT
144                  },
145               })
146            })
147            .collect(),
148      }
149   }
150
151   /// Pushes a value onto the stack.
152   fn push(&mut self, value: RawValue) {
153      self.stack.push(value);
154      #[cfg(feature = "trace-vm-stack-ops")]
155      {
156         println!("push | {:?}", &self.stack);
157      }
158   }
159
160   /// Pops a value off the stack.
161   fn pop(&mut self) -> RawValue {
162      #[cfg(debug_assertions)]
163      let value = { self.stack.pop().unwrap() };
164      #[cfg(not(debug_assertions))]
165      let value = unsafe { self.stack.pop().unwrap_unchecked() };
166      #[cfg(feature = "trace-vm-stack-ops")]
167      {
168         println!("pop  | {:?} -> {:?}", &self.stack, value);
169      }
170      value
171   }
172
173   /// Returns a reference to the value at the top of the stack.
174   fn stack_top(&self) -> RawValue {
175      #[cfg(debug_assertions)]
176      {
177         self.stack.last().copied().unwrap()
178      }
179      #[cfg(not(debug_assertions))]
180      unsafe {
181         *self.stack.get_unchecked(self.stack.len() - 1)
182      }
183   }
184
185   /// Returns a mutable reference to the value at the top of the stack.
186   fn stack_top_mut(&mut self) -> &mut RawValue {
187      #[cfg(debug_assertions)]
188      {
189         self.stack.last_mut().unwrap()
190      }
191      #[cfg(not(debug_assertions))]
192      unsafe {
193         let last = self.stack.len() - 1;
194         self.stack.get_unchecked_mut(last)
195      }
196   }
197
198   /// Returns a reference to the `n`th value counted from the top of the stack.
199   fn nth_from_top(&self, n: usize) -> &RawValue {
200      #[cfg(debug_assertions)]
201      {
202         &self.stack[self.stack.len() - n]
203      }
204      #[cfg(not(debug_assertions))]
205      unsafe {
206         let i = self.stack.len() - n;
207         self.stack.get_unchecked(i)
208      }
209   }
210
211   /// Saves the current program counter and other execution state before entering a function.
212   fn save_return_point(&mut self) {
213      self.call_stack.push(ReturnPoint {
214         chunk: Some(Rc::clone(&self.chunk)),
215         closure: self.closure,
216         pc: self.pc,
217         stack_bottom: self.stack_bottom,
218      });
219   }
220
221   /// Restores previous VM state from the return point stack.
222   fn restore_return_point(&mut self) {
223      let return_point = self.call_stack.pop().unwrap();
224      // Remove unnecessary values from the stack (eg. arguments).
225      while self.stack.len() > self.stack_bottom {
226         self.stack.pop().unwrap();
227      }
228      // Restore state.
229      // SAFETY: Assume the matching chunk is a bytecode chunk.
230      self.chunk = unsafe { return_point.chunk.unwrap_unchecked() };
231      self.closure = return_point.closure;
232      self.pc = return_point.pc;
233      self.stack_bottom = return_point.stack_bottom;
234   }
235
236   /// Returns an upvalue for the local at the given stack slot.
237   fn get_upvalue_for_local(&mut self, stack_slot: u32) -> Pin<Rc<Upvalue>> {
238      if let Some((_, upvalue)) =
239         self.open_upvalues.iter().rev().find(|(slot, _)| *slot == stack_slot)
240      {
241         Pin::clone(upvalue)
242      } else {
243         let stack_ptr = &mut self.stack[stack_slot as usize] as *mut _;
244         // SAFETY: Vec storage is never null.
245         let stack_ptr = unsafe { ptr::NonNull::new_unchecked(stack_ptr) };
246         let upvalue = Upvalue::new(stack_ptr);
247         self.open_upvalues.push((stack_slot, Pin::clone(&upvalue)));
248         upvalue
249      }
250   }
251
252   /// Allocates `n` storage slots for local variables.
253   fn allocate_chunk_storage_slots(&mut self, n: usize) {
254      self.stack.extend(std::iter::repeat_with(|| RawValue::from(())).take(n));
255   }
256
257   /// Calls a function. For bytecode functions this saves the stack and begins executing the
258   /// function's chunk. For foreign functions it simply calls the function.
259   fn enter_function(
260      &mut self,
261      env: &mut Environment,
262      globals: &mut Globals,
263      gc: &mut Memory,
264      closure: GcRaw<Closure>,
265      argument_count: usize,
266   ) -> Result<(), Error> {
267      let function = unsafe { env.get_function_unchecked_mut(closure.get().function_id) };
268      match &mut function.kind {
269         FunctionKind::Bytecode { chunk, .. } => {
270            self.save_return_point();
271            self.chunk = Rc::clone(chunk);
272            self.closure = Some(closure);
273            self.pc = 0;
274            self.stack_bottom = self.stack.len() - argument_count;
275            self.allocate_chunk_storage_slots(chunk.preallocate_stack_slots as usize);
276         }
277         FunctionKind::Foreign(f) => {
278            let arguments =
279               unsafe { self.stack.get_unchecked(self.stack.len() - argument_count..) };
280            let result = match f(gc, arguments) {
281               Ok(value) => value,
282               Err(kind) => {
283                  return Err(self.error_outside_function_call(Some(closure), env, kind));
284               }
285            };
286            for _ in 0..argument_count {
287               self.pop();
288            }
289            self.push(result);
290         }
291         &mut FunctionKind::Control(ctl) => {
292            self.call_control(env, globals, gc, ctl, argument_count)?;
293         }
294      }
295      Ok(())
296   }
297
298   /// Handles a call to a control function.
299   fn call_control(
300      &mut self,
301      env: &Environment,
302      globals: &mut Globals,
303      gc: &mut Memory,
304      ctl: Control,
305      argument_count: usize,
306   ) -> Result<(), Error> {
307      match ctl {
308         Control::GcCollect => {
309            if argument_count != 1 {
310               return Err(self.error_outside_function_call(
311                  None,
312                  env,
313                  ErrorKind::TooManyArguments,
314               ));
315            }
316            unsafe { gc.collect(self.roots(globals)) }
317            self.pop();
318            self.push(RawValue::from(()));
319         }
320      }
321      Ok(())
322   }
323
324   /// Constructs an error that wasn't triggered by a function call.
325   fn error_outside_function_call(
326      &mut self,
327      closure: Option<GcRaw<Closure>>,
328      env: &Environment,
329      kind: ErrorKind,
330   ) -> Error {
331      self.save_return_point();
332      if let Some(closure) = closure {
333         self.call_stack.push(ReturnPoint {
334            chunk: None,
335            closure: Some(closure),
336            pc: 0,
337            stack_bottom: 0,
338         });
339      }
340      let error = self.error(env, kind);
341      if closure.is_some() {
342         self.call_stack.pop();
343      }
344      self.call_stack.pop();
345      error
346   }
347
348   /// Constructs a closure from surrounding stack variables and upvalues.
349   fn create_closure(
350      &mut self,
351      env: &mut Environment,
352      gc: &mut Memory,
353      function_id: Opr24,
354   ) -> GcRaw<Closure> {
355      let function = unsafe { env.get_function_unchecked_mut(function_id) };
356      let mut captures = Vec::new();
357      if let FunctionKind::Bytecode {
358         captured_locals, ..
359      } = &function.kind
360      {
361         for capture in captured_locals {
362            captures.push(match capture {
363               CaptureKind::Local(slot) => {
364                  self.get_upvalue_for_local(self.stack_bottom as u32 + u32::from(*slot))
365               }
366               CaptureKind::Upvalue(index) => {
367                  let closure = unsafe { self.closure.unwrap_unchecked() };
368                  let closure = unsafe { closure.get() };
369                  Pin::clone(&closure.captures[u32::from(*index) as usize])
370               }
371            });
372         }
373      }
374      gc.allocate(Closure {
375         function_id,
376         captures,
377      })
378   }
379
380   /// Returns the dispatch table of the given value.
381   fn get_dispatch_table<'v>(value: &'v RawValue, env: &'v Environment) -> &'v DispatchTable {
382      unsafe {
383         match value.kind() {
384            ValueKind::Nil => &env.builtin_dtables.nil,
385            ValueKind::Boolean => &env.builtin_dtables.boolean,
386            ValueKind::Number => &env.builtin_dtables.number,
387            ValueKind::String => &env.builtin_dtables.string,
388            ValueKind::Function => &env.builtin_dtables.function,
389            ValueKind::List => &env.builtin_dtables.list,
390            ValueKind::Dict => &env.builtin_dtables.dict,
391            ValueKind::Struct => value.get_raw_struct_unchecked().get().dtable(),
392            ValueKind::Trait => value.get_raw_trait_unchecked().get().dtable(),
393            ValueKind::UserData => value.get_raw_user_data_unchecked().get().dtable(),
394         }
395      }
396   }
397
398   /// Initializes a dispatch table with methods obtained from a method ID to function ID map.
399   /// Each function's name is prepended with `type_name.`.
400   fn initialize_dtable(
401      &mut self,
402      methods: impl Iterator<Item = (u16, Opr24)>,
403      env: &mut Environment,
404      gc: &mut Memory,
405      dtable: &mut DispatchTable,
406   ) {
407      for (method_id, function_id) in methods {
408         let function = unsafe { env.get_function_unchecked_mut(function_id) };
409         function.name = Rc::from(format!("{}.{}", dtable.pretty_name, function.name));
410         let closure = self.create_closure(env, gc, function_id);
411         dtable.set_method(method_id, closure);
412      }
413   }
414
415   fn initialize_dtable_with_trait_methods(
416      &mut self,
417      methods: impl Iterator<Item = (Rc<str>, u16, u16, Opr24)>,
418      traits: &[GcRaw<Trait>],
419      env: &mut Environment,
420      gc: &mut Memory,
421      dtable: &mut DispatchTable,
422      unimplemented_methods: &mut HashSet<u16>,
423   ) -> Result<(), ErrorKind> {
424      for (name, arity, trait_index, function_id) in methods {
425         let trait_handle = unsafe { traits[trait_index as usize].get() };
426         let trait_id = trait_handle.id;
427         let method_signature = FunctionSignature {
428            name,
429            arity: Some(arity),
430            trait_id: Some(trait_id),
431         };
432         // NOTE: This should never panic because trait method indices are created during the
433         // trait's declaration. New method indices should not be made here.
434         let method_id =
435            env.get_method_index(&method_signature).expect("existing method index must be found");
436         let closure = self.create_closure(env, gc, function_id);
437         dtable.set_method(method_id, closure);
438
439         if !unimplemented_methods.remove(&method_id) {
440            return Err(ErrorKind::DoubleMethodImplementation {
441               type_name: Rc::clone(&dtable.pretty_name),
442               signature: method_signature.render(env),
443            });
444         }
445      }
446
447      if !unimplemented_methods.is_empty() {
448         let mut methods: Vec<_> = unimplemented_methods
449            .iter()
450            .map(|&method_id| {
451               env.get_method_signature(method_id)
452                  .map(|signature| signature.render(env))
453                  .unwrap_or_else(RenderedSignature::invalid)
454            })
455            .collect();
456         methods.sort_unstable_by_key(|signature| {
457            // Maybe doing the Rc clones here isn't ideal, but this is the sad path which doesn't
458            // have to be The Fastest.
459            (
460               signature.trait_name.clone(),
461               Rc::clone(&signature.name),
462               signature.arity,
463            )
464         });
465         return Err(ErrorKind::MethodsUnimplemented {
466            type_name: Rc::clone(&dtable.pretty_name),
467            methods,
468         });
469      }
470
471      Ok(())
472   }
473
474   /// Returns an iterator over all GC roots.
475   fn roots<'a>(&'a self, globals: &'a mut Globals) -> impl Iterator<Item = RawValue> + 'a {
476      globals.iter().chain(self.stack.iter().copied()).chain(self.closure.map(RawValue::from))
477   }
478
479   /// Interprets bytecode in the chunk, with the provided user state.
480   pub fn interpret(
481      &mut self,
482      env: &mut Environment,
483      globals: &mut Globals,
484      gc: &mut Memory,
485   ) -> Result<RawValue, Error> {
486      self.allocate_chunk_storage_slots(self.chunk.preallocate_stack_slots as usize);
487
488      loop {
489         #[cfg(feature = "trace-vm-opcodes")]
490         {
491            print!("op   @ {:06x} ", self.pc);
492         }
493         let (opcode, operand) = unsafe { self.chunk.read_instruction(&mut self.pc) };
494         #[cfg(feature = "trace-vm-opcodes")]
495         {
496            println!("{:?} ({})", opcode, operand);
497         }
498
499         macro_rules! wrap_error {
500            ($exp:expr) => {{
501               let result = $exp;
502               match result {
503                  Ok(value) => value,
504                  Err(kind) => {
505                     self.save_return_point();
506                     return Err(self.error(env, kind));
507                  }
508               }
509            }};
510         }
511
512         macro_rules! binary_operator {
513            ($op:tt) => {{
514               let right = wrap_error!(self.pop().ensure_number());
515               let left = wrap_error!(self.pop().ensure_number());
516               self.stack.push(RawValue::from(left $op right));
517            }};
518         }
519
520         match opcode {
521            Opcode::Nop => (),
522
523            Opcode::PushNil => self.push(RawValue::from(())),
524            Opcode::PushTrue => self.push(RawValue::from(true)),
525            Opcode::PushFalse => self.push(RawValue::from(false)),
526            Opcode::PushNumber => {
527               let number = unsafe { self.chunk.read_number(&mut self.pc) };
528               self.push(RawValue::from(number));
529            }
530            Opcode::PushString => {
531               let string = unsafe { self.chunk.read_string(&mut self.pc) }.to_owned();
532               unsafe { gc.auto_collect(self.roots(globals)) };
533               let rc = gc.allocate(string);
534               self.push(RawValue::from(rc));
535            }
536            Opcode::CreateClosure => {
537               unsafe { gc.auto_collect(self.roots(globals)) };
538               let closure = self.create_closure(env, gc, operand);
539               self.push(RawValue::from(closure));
540            }
541            Opcode::CreateType => {
542               let name = unsafe { self.chunk.read_string(&mut self.pc) };
543               let mut dispatch_table = DispatchTable::new_for_type(name);
544               dispatch_table.pretty_name = Rc::from(format!("unimplemented type {name}"));
545               let dispatch_table = gc.allocate(dispatch_table);
546               let struct_v = Struct::new_type(dispatch_table);
547               self.stack.push(RawValue::from(gc.allocate(struct_v)));
548            }
549            Opcode::CreateStruct => {
550               unsafe { gc.auto_collect(self.roots(globals)) };
551               let type_struct = unsafe { self.pop().get_raw_struct_unchecked() };
552               let field_count = u32::from(operand) as usize;
553               let instance = unsafe { type_struct.get().new_instance(field_count) };
554               let instance = gc.allocate(instance);
555               self.push(RawValue::from(instance));
556            }
557            Opcode::CreateTrait => {
558               let instance = create_trait(env, gc, operand);
559               self.push(RawValue::from(instance));
560            }
561            Opcode::CreateList => {
562               unsafe { gc.auto_collect(self.roots(globals)) };
563               let len = u32::from(operand) as usize;
564               let elements = self.stack.drain(self.stack.len() - len..).collect();
565               let list = gc.allocate(List::new(elements));
566               self.push(RawValue::from(list));
567            }
568            Opcode::CreateDict => {
569               unsafe { gc.auto_collect(self.roots(globals)) };
570               let npairs = u32::from(operand) as usize;
571               let dict = Dict::new();
572               {
573                  let mut pairs = self.stack.drain(self.stack.len() - npairs * 2..);
574                  while let Some(key) = pairs.next() {
575                     let value = pairs.next().unwrap();
576                     dict.insert(key, value);
577                  }
578               }
579               let dict = gc.allocate(dict);
580               self.push(RawValue::from(dict));
581            }
582
583            Opcode::AssignGlobal => {
584               let value = self.stack_top();
585               globals.set(operand, value);
586            }
587            Opcode::SinkGlobal => {
588               let value = self.pop();
589               globals.set(operand, value);
590            }
591            Opcode::GetGlobal => {
592               let value = unsafe { globals.get_unchecked(operand) };
593               self.push(value);
594            }
595            Opcode::AssignLocal => {
596               let slot = u32::from(operand) as usize;
597               let value = self.stack_top();
598               self.stack[self.stack_bottom + slot] = value;
599            }
600            Opcode::SinkLocal => {
601               let slot = u32::from(operand) as usize;
602               let value = self.pop();
603               self.stack[self.stack_bottom + slot] = value;
604            }
605            Opcode::GetLocal => {
606               let slot = u32::from(operand) as usize;
607               let value = self.stack[self.stack_bottom + slot];
608               self.push(value);
609            }
610            Opcode::AssignUpvalue => {
611               let index = u32::from(operand) as usize;
612               let closure = unsafe { self.closure.as_ref().unwrap_unchecked().get() };
613               let value = self.stack_top();
614               unsafe { Upvalue::set(&closure.captures[index], value) }
615            }
616            Opcode::SinkUpvalue => {
617               let index = u32::from(operand) as usize;
618               let value = self.pop();
619               let closure = unsafe { self.closure.as_ref().unwrap_unchecked().get() };
620               unsafe { Upvalue::set(&closure.captures[index], value) }
621            }
622            Opcode::GetUpvalue => {
623               let index = u32::from(operand) as usize;
624               let closure = unsafe { self.closure.as_ref().unwrap_unchecked().get() };
625               let value = unsafe { closure.captures[index].get() };
626               self.push(value);
627            }
628            Opcode::CloseLocal => {
629               let stack_slot = self.stack_bottom as u32 + u32::from(operand);
630               // This is O(n) and I can't say I'm a fan of that, but I haven't benchmarked the
631               // performance impact this makes yet.
632               let index =
633                  self.open_upvalues.iter().rposition(|(slot, _)| *slot == stack_slot).unwrap();
634               let (_, upvalue) = self.open_upvalues.remove(index);
635               unsafe { upvalue.close() };
636            }
637            Opcode::AssignField => {
638               let struct_v = self.pop();
639               let value = self.pop();
640               let struct_v = unsafe { struct_v.get_raw_struct_unchecked() };
641               self.push(value);
642               unsafe { struct_v.get().set_field(u32::from(operand) as usize, value) }
643            }
644            Opcode::SinkField => {
645               let struct_v = self.pop();
646               let value = self.pop();
647               let struct_v = unsafe { struct_v.get_raw_struct_unchecked() };
648               unsafe { struct_v.get().set_field(u32::from(operand) as usize, value) }
649            }
650            Opcode::GetField => {
651               let struct_v = self.pop();
652               let struct_v = unsafe { struct_v.get_raw_struct_unchecked() };
653               let value = unsafe { struct_v.get().get_field(u32::from(operand) as usize) };
654               self.push(value);
655            }
656
657            Opcode::Swap => {
658               let len = self.stack.len();
659               self.stack.swap(len - 2, len - 1);
660            }
661            Opcode::Discard => {
662               self.pop();
663            }
664
665            Opcode::JumpForward => {
666               let amount = u32::from(operand) as usize;
667               self.pc += amount;
668            }
669            Opcode::JumpForwardIfFalsy => {
670               let amount = u32::from(operand) as usize;
671               if self.stack_top().is_falsy() {
672                  self.pc += amount;
673               }
674            }
675            Opcode::JumpForwardIfTruthy => {
676               let amount = u32::from(operand) as usize;
677               if self.stack_top().is_truthy() {
678                  self.pc += amount;
679               }
680            }
681            Opcode::JumpBackward => {
682               let amount = u32::from(operand) as usize;
683               self.pc -= amount;
684            }
685
686            Opcode::EnterBreakableBlock => {
687               self.breakable_block_stack.push(self.stack.len());
688            }
689            Opcode::ExitBreakableBlock => {
690               let result = self.pop();
691               let n = u32::from(operand);
692               for _ in 0..n {
693                  unsafe {
694                     let top = self.breakable_block_stack.pop().unwrap_unchecked();
695                     while self.stack.len() > top {
696                        self.stack.pop().unwrap();
697                     }
698                  }
699               }
700               self.push(result);
701            }
702
703            Opcode::Call => {
704               // Add 1 to count in the called function itself, which is treated like an argument.
705               let argument_count = u32::from(operand) as usize + 1;
706               let function = self.nth_from_top(argument_count);
707               let closure = wrap_error!(function.ensure_raw_function());
708               self.enter_function(env, globals, gc, closure, argument_count)?;
709            }
710            Opcode::CallMethod => {
711               let (method_index, argument_count) = operand.unpack();
712               let receiver = self.nth_from_top(argument_count as usize);
713               let dtable = Self::get_dispatch_table(receiver, env);
714               #[cfg(feature = "trace-vm-calls")]
715               {
716                  println!(
717                     "call # m. idx={}, argc={}; {:?}",
718                     method_index,
719                     argument_count,
720                     env.get_method_signature(method_index)
721                  );
722               }
723               if let Some(closure) = dtable.get_method(method_index) {
724                  self.enter_function(env, globals, gc, closure, argument_count as usize)?;
725               } else {
726                  let signature =
727                     env.get_method_signature(method_index).cloned().unwrap_or_else(|| {
728                        FunctionSignature {
729                           name: Rc::from("(invalid method index)"),
730                           arity: None,
731                           trait_id: None,
732                        }
733                     });
734                  let error_kind = ErrorKind::MethodDoesNotExist {
735                     type_name: Rc::clone(&dtable.pretty_name),
736                     signature: signature.render(env),
737                  };
738                  return Err(self.error_outside_function_call(None, env, error_kind));
739               }
740            }
741            Opcode::Return => {
742               let result = self.pop();
743               self.restore_return_point();
744               self.push(result);
745            }
746
747            Opcode::Implement => {
748               let proto = unsafe { env.take_prototype_unchecked(operand) };
749               let struct_position = proto.implemented_trait_count as usize + 1;
750
751               let traits: Vec<_> = {
752                  // TODO: Maybe get rid of this allocation, or hoist it into the fiber?
753                  let mut traits = vec![];
754                  for trait_value in &self.stack[self.stack.len() - struct_position + 1..] {
755                     traits.push(wrap_error!(trait_value.ensure_raw_trait()));
756                  }
757                  traits
758               };
759               let mut unimplemented_trait_methods: HashSet<_> = traits
760                  .iter()
761                  .enumerate()
762                  .map(|(trait_index, trait_handle)| {
763                     (trait_index, unsafe { trait_handle.get() }.id)
764                  })
765                  .flat_map(|(_, trait_id)| {
766                     env.get_trait(trait_id).unwrap().required.iter().copied()
767                  })
768                  .collect();
769
770               let impld_struct =
771                  wrap_error!(self.nth_from_top(struct_position).ensure_raw_struct());
772               let impld_struct = unsafe { impld_struct.get() };
773               let type_name = Rc::clone(&unsafe { impld_struct.dtable() }.type_name);
774
775               let mut type_dtable = DispatchTable::new_for_type(Rc::clone(&type_name));
776               self.initialize_dtable(
777                  proto.statics.iter().map(|(&k, &v)| (k, v)),
778                  env,
779                  gc,
780                  &mut type_dtable,
781               );
782
783               let mut instance_dtable = DispatchTable::new_for_instance(type_name);
784               self.initialize_dtable(
785                  proto.instance.iter().map(|(&k, &v)| (k, v)),
786                  env,
787                  gc,
788                  &mut instance_dtable,
789               );
790               wrap_error!(self.initialize_dtable_with_trait_methods(
791                  proto.trait_instance.iter().map(
792                     |((name, arity, trait_index), &function_index)| {
793                        (Rc::clone(name), *arity, *trait_index, function_index)
794                     },
795                  ),
796                  &traits,
797                  env,
798                  gc,
799                  &mut instance_dtable,
800                  &mut unimplemented_trait_methods,
801               ));
802
803               let instance_dtable = gc.allocate(instance_dtable);
804               type_dtable.instance = Some(instance_dtable);
805               let type_dtable = gc.allocate(type_dtable);
806
807               for _ in 0..proto.implemented_trait_count {
808                  let _ = self.pop();
809               }
810
811               impld_struct
812                  .implement(type_dtable)
813                  .map_err(|kind| self.error_outside_function_call(None, env, kind))?;
814            }
815
816            Opcode::Negate => {
817               let number = wrap_error!(self.pop().ensure_number());
818               self.push(RawValue::from(-number));
819            }
820            Opcode::Add => binary_operator!(+),
821            Opcode::Subtract => binary_operator!(-),
822            Opcode::Multiply => binary_operator!(*),
823            Opcode::Divide => binary_operator!(/),
824
825            Opcode::Not => {
826               let value = self.stack_top();
827               *self.stack_top_mut() = RawValue::from(!value.is_truthy());
828            }
829            Opcode::Equal => {
830               let right = self.pop();
831               let left = self.stack_top();
832               *self.stack_top_mut() = RawValue::from(left.eq(&right));
833            }
834            Opcode::Less => {
835               let right = self.pop();
836               let left = self.stack_top();
837               let is_less = if let Some(ordering) = wrap_error!(left.try_partial_cmp(&right)) {
838                  ordering.is_lt()
839               } else {
840                  false
841               };
842               *self.stack_top_mut() = RawValue::from(is_less);
843            }
844            Opcode::LessEqual => {
845               let right = self.pop();
846               let left = self.stack_top();
847               let is_less = if let Some(ordering) = wrap_error!(left.try_partial_cmp(&right)) {
848                  ordering.is_le()
849               } else {
850                  false
851               };
852               *self.stack_top_mut() = RawValue::from(is_less);
853            }
854
855            Opcode::Halt => {
856               self.halted = true;
857               break;
858            }
859         }
860      }
861
862      let result = self.stack.pop().expect("no result found on the top of the stack");
863
864      self.stack.resize(
865         self.stack.len() - self.chunk.preallocate_stack_slots as usize,
866         RawValue::from(()),
867      );
868
869      Ok(result)
870   }
871}