wasmer_runtime_core_near/
state.rs

1//! The state module is used to track state of a running web assembly instances so that
2//! state could read or updated at runtime. Use cases include generating stack traces, switching
3//! generated code from one tier to another, or serializing state of a running instace.
4
5use crate::backend::RunnableModule;
6use borsh::{BorshDeserialize, BorshSerialize};
7use std::collections::BTreeMap;
8use std::ops::Bound::{Included, Unbounded};
9use std::sync::Arc;
10
11/// An index to a register
12#[derive(
13    Copy,
14    Clone,
15    Debug,
16    Eq,
17    PartialEq,
18    Hash,
19    Serialize,
20    Deserialize,
21    BorshSerialize,
22    BorshDeserialize,
23)]
24pub struct RegisterIndex(pub usize);
25
26/// A kind of wasm or constant value
27#[derive(
28    Copy,
29    Clone,
30    Debug,
31    Eq,
32    PartialEq,
33    Hash,
34    Serialize,
35    Deserialize,
36    BorshSerialize,
37    BorshDeserialize,
38)]
39pub enum WasmAbstractValue {
40    /// A wasm runtime value
41    Runtime,
42    /// A wasm constant value
43    Const(u64),
44}
45
46/// A container for the state of a running wasm instance.
47#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
48pub struct MachineState {
49    /// Stack values.
50    pub stack_values: Vec<MachineValue>,
51    /// Register values.
52    pub register_values: Vec<MachineValue>,
53    /// Previous frame.
54    pub prev_frame: BTreeMap<usize, MachineValue>,
55    /// Wasm stack.
56    pub wasm_stack: Vec<WasmAbstractValue>,
57    /// Private depth of the wasm stack.
58    pub wasm_stack_private_depth: usize,
59    /// Wasm instruction offset.
60    pub wasm_inst_offset: usize,
61}
62
63/// A diff of two `MachineState`s.
64#[derive(Clone, Debug, Default, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
65pub struct MachineStateDiff {
66    /// Last.
67    pub last: Option<usize>,
68    /// Stack push.
69    pub stack_push: Vec<MachineValue>,
70    /// Stack pop.
71    pub stack_pop: usize,
72
73    /// Register diff.
74    pub reg_diff: Vec<(RegisterIndex, MachineValue)>,
75
76    /// Previous frame diff.
77    pub prev_frame_diff: BTreeMap<usize, Option<MachineValue>>, // None for removal
78
79    /// Wasm stack push.
80    pub wasm_stack_push: Vec<WasmAbstractValue>,
81    /// Wasm stack pop.
82    pub wasm_stack_pop: usize,
83    /// Private depth of the wasm stack.
84    pub wasm_stack_private_depth: usize, // absolute value; not a diff.
85    /// Wasm instruction offset.
86    pub wasm_inst_offset: usize, // absolute value; not a diff.
87}
88
89/// A kind of machine value.
90#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
91pub enum MachineValue {
92    /// Undefined.
93    Undefined,
94    /// Vmctx.
95    Vmctx,
96    /// Vmctx Deref.
97    VmctxDeref(Vec<usize>),
98    /// Preserve Register.
99    PreserveRegister(RegisterIndex),
100    /// Copy Stack BP Relative.
101    CopyStackBPRelative(i32), // relative to Base Pointer, in byte offset
102    /// Explicit Shadow.
103    ExplicitShadow, // indicates that all values above this are above the shadow region
104    /// Wasm Stack.
105    WasmStack(usize),
106    /// Wasm Local.
107    WasmLocal(usize),
108    /// Two Halves.
109    TwoHalves(Box<(MachineValue, MachineValue)>), // 32-bit values. TODO: optimize: add another type for inner "half" value to avoid boxing?
110}
111
112impl BorshSerialize for MachineValue {
113    fn serialize<W: std::io::Write>(&self, writer: &mut W) -> std::io::Result<()> {
114        match self {
115            MachineValue::Undefined => writer.write_all(&[0u8])?,
116            MachineValue::Vmctx => writer.write_all(&[1u8])?,
117            MachineValue::VmctxDeref(v) => {
118                writer.write_all(&[2u8])?;
119                BorshSerialize::serialize(&v, writer)?;
120            }
121            MachineValue::PreserveRegister(r) => {
122                writer.write_all(&[3u8])?;
123                BorshSerialize::serialize(&r, writer)?;
124            }
125            MachineValue::CopyStackBPRelative(i) => {
126                writer.write_all(&[4u8])?;
127                BorshSerialize::serialize(&i, writer)?;
128            }
129            MachineValue::ExplicitShadow => writer.write_all(&[5u8])?,
130            MachineValue::WasmStack(u) => {
131                writer.write_all(&[6u8])?;
132                BorshSerialize::serialize(&(*u as u64), writer)?;
133            }
134            MachineValue::WasmLocal(u) => {
135                writer.write_all(&[7u8])?;
136                BorshSerialize::serialize(&(*u as u64), writer)?;
137            }
138            MachineValue::TwoHalves(b) => {
139                writer.write_all(&[8u8])?;
140                BorshSerialize::serialize(&b, writer)?;
141            }
142        }
143        Ok(())
144    }
145}
146
147impl BorshDeserialize for MachineValue {
148    fn deserialize_reader<R: std::io::prelude::Read>(reader: &mut R) -> std::io::Result<Self> {
149        let variant: u8 = BorshDeserialize::deserialize_reader(reader)?;
150        Ok(match variant {
151            0 => MachineValue::Undefined,
152            1 => MachineValue::Vmctx,
153            2 => {
154                let v: Vec<usize> = BorshDeserialize::deserialize_reader(reader)?;
155                MachineValue::VmctxDeref(v)
156            }
157            3 => {
158                let r: RegisterIndex = BorshDeserialize::deserialize_reader(reader)?;
159                MachineValue::PreserveRegister(r)
160            }
161            4 => {
162                let i: i32 = BorshDeserialize::deserialize_reader(reader)?;
163                MachineValue::CopyStackBPRelative(i)
164            }
165            5 => MachineValue::ExplicitShadow,
166            6 => {
167                let u: usize = BorshDeserialize::deserialize_reader(reader)?;
168                MachineValue::WasmStack(u)
169            }
170            7 => {
171                let u: usize = BorshDeserialize::deserialize_reader(reader)?;
172                MachineValue::WasmLocal(u)
173            }
174            8 => {
175                let b: Box<(MachineValue, MachineValue)> =
176                    BorshDeserialize::deserialize_reader(reader)?;
177                MachineValue::TwoHalves(b)
178            }
179            _ => {
180                return Err(std::io::Error::new(
181                    std::io::ErrorKind::InvalidInput,
182                    "Unexpected variant",
183                ))
184            }
185        })
186    }
187}
188
189/// A map of function states.
190#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
191pub struct FunctionStateMap {
192    /// Initial.
193    pub initial: MachineState,
194    /// Local Function Id.
195    pub local_function_id: usize,
196    /// Locals.
197    pub locals: Vec<WasmAbstractValue>,
198    /// Shadow size.
199    pub shadow_size: usize, // for single-pass backend, 32 bytes on x86-64
200    /// Diffs.
201    pub diffs: Vec<MachineStateDiff>,
202    /// Wasm Function Header target offset.
203    pub wasm_function_header_target_offset: Option<SuspendOffset>,
204    /// Wasm offset to target offset
205    pub wasm_offset_to_target_offset: BTreeMap<usize, SuspendOffset>,
206    /// Loop offsets.
207    pub loop_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
208    /// Call offsets.
209    pub call_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
210    /// Trappable offsets.
211    pub trappable_offsets: BTreeMap<usize, OffsetInfo>, /* suspend_offset -> info */
212}
213
214/// A kind of suspend offset.
215#[derive(Clone, Copy, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
216pub enum SuspendOffset {
217    /// A loop.
218    Loop(usize),
219    /// A call.
220    Call(usize),
221    /// A trappable.
222    Trappable(usize),
223}
224
225/// Info for an offset.
226#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
227pub struct OffsetInfo {
228    /// End offset.
229    pub end_offset: usize, // excluded bound
230    /// Diff Id.
231    pub diff_id: usize,
232    /// Activate offset.
233    pub activate_offset: usize,
234}
235
236/// A map of module state.
237#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
238pub struct ModuleStateMap {
239    /// Local functions.
240    pub local_functions: BTreeMap<usize, FunctionStateMap>,
241    /// Total size.
242    pub total_size: usize,
243}
244
245/// State dump of a wasm function.
246#[derive(Clone, Debug, Serialize, Deserialize)]
247pub struct WasmFunctionStateDump {
248    /// Local function id.
249    pub local_function_id: usize,
250    /// Wasm instruction offset.
251    pub wasm_inst_offset: usize,
252    /// Stack.
253    pub stack: Vec<Option<u64>>,
254    /// Locals.
255    pub locals: Vec<Option<u64>>,
256}
257
258/// An image of the execution state.
259#[derive(Clone, Debug, Serialize, Deserialize)]
260pub struct ExecutionStateImage {
261    /// Frames.
262    pub frames: Vec<WasmFunctionStateDump>,
263}
264
265/// Represents an image of an `Instance` including its memory, globals, and execution state.
266#[derive(Debug, Clone, Serialize, Deserialize)]
267pub struct InstanceImage {
268    /// Memory for this `InstanceImage`
269    pub memory: Option<Vec<u8>>,
270    /// Stored globals for this `InstanceImage`
271    pub globals: Vec<u128>,
272    /// `ExecutionStateImage` for this `InstanceImage`
273    pub execution_state: ExecutionStateImage,
274}
275
276/// A `CodeVersion` is a container for a unit of generated code for a module.
277#[derive(Clone)]
278pub struct CodeVersion {
279    /// Indicates if this code version is the baseline version.
280    pub baseline: bool,
281
282    /// `ModuleStateMap` for this code version.
283    pub msm: ModuleStateMap,
284
285    /// A pointer to the machine code for this module.
286    pub base: usize,
287
288    /// The backend used to compile this module.
289    pub backend: &'static str,
290
291    /// `RunnableModule` for this code version.
292    pub runnable_module: Arc<Box<dyn RunnableModule>>,
293}
294
295impl ModuleStateMap {
296    /// Looks up an ip from self using the given ip, base, and offset table provider.
297    pub fn lookup_ip<F: FnOnce(&FunctionStateMap) -> &BTreeMap<usize, OffsetInfo>>(
298        &self,
299        ip: usize,
300        base: usize,
301        offset_table_provider: F,
302    ) -> Option<(&FunctionStateMap, MachineState)> {
303        if ip < base || ip - base >= self.total_size {
304            None
305        } else {
306            let (_, fsm) = self
307                .local_functions
308                .range((Unbounded, Included(&(ip - base))))
309                .last()
310                .unwrap();
311
312            match offset_table_provider(fsm)
313                .range((Unbounded, Included(&(ip - base))))
314                .last()
315            {
316                Some((_, x)) => {
317                    if ip - base >= x.end_offset {
318                        None
319                    } else if x.diff_id < fsm.diffs.len() {
320                        Some((fsm, fsm.diffs[x.diff_id].build_state(fsm)))
321                    } else {
322                        None
323                    }
324                }
325                None => None,
326            }
327        }
328    }
329    /// Looks up a call ip from self using the given ip and base values.
330    pub fn lookup_call_ip(
331        &self,
332        ip: usize,
333        base: usize,
334    ) -> Option<(&FunctionStateMap, MachineState)> {
335        self.lookup_ip(ip, base, |fsm| &fsm.call_offsets)
336    }
337
338    /// Looks up a trappable ip from self using the given ip and base values.
339    pub fn lookup_trappable_ip(
340        &self,
341        ip: usize,
342        base: usize,
343    ) -> Option<(&FunctionStateMap, MachineState)> {
344        self.lookup_ip(ip, base, |fsm| &fsm.trappable_offsets)
345    }
346
347    /// Looks up a loop ip from self using the given ip and base values.
348    pub fn lookup_loop_ip(
349        &self,
350        ip: usize,
351        base: usize,
352    ) -> Option<(&FunctionStateMap, MachineState)> {
353        self.lookup_ip(ip, base, |fsm| &fsm.loop_offsets)
354    }
355}
356
357impl FunctionStateMap {
358    /// Creates a new `FunctionStateMap` with the given parameters.
359    pub fn new(
360        initial: MachineState,
361        local_function_id: usize,
362        shadow_size: usize,
363        locals: Vec<WasmAbstractValue>,
364    ) -> FunctionStateMap {
365        FunctionStateMap {
366            initial,
367            local_function_id,
368            shadow_size,
369            locals,
370            diffs: vec![],
371            wasm_function_header_target_offset: None,
372            wasm_offset_to_target_offset: BTreeMap::new(),
373            loop_offsets: BTreeMap::new(),
374            call_offsets: BTreeMap::new(),
375            trappable_offsets: BTreeMap::new(),
376        }
377    }
378}
379
380impl MachineState {
381    /// Creates a `MachineStateDiff` from self and the given `&MachineState`.
382    pub fn diff(&self, old: &MachineState) -> MachineStateDiff {
383        let first_diff_stack_depth: usize = self
384            .stack_values
385            .iter()
386            .zip(old.stack_values.iter())
387            .enumerate()
388            .find(|&(_, (a, b))| a != b)
389            .map(|x| x.0)
390            .unwrap_or(old.stack_values.len().min(self.stack_values.len()));
391        assert_eq!(self.register_values.len(), old.register_values.len());
392        let reg_diff: Vec<_> = self
393            .register_values
394            .iter()
395            .zip(old.register_values.iter())
396            .enumerate()
397            .filter(|&(_, (a, b))| a != b)
398            .map(|(i, (a, _))| (RegisterIndex(i), a.clone()))
399            .collect();
400        let prev_frame_diff: BTreeMap<usize, Option<MachineValue>> = self
401            .prev_frame
402            .iter()
403            .filter(|(k, v)| {
404                if let Some(ref old_v) = old.prev_frame.get(k) {
405                    v != old_v
406                } else {
407                    true
408                }
409            })
410            .map(|(&k, v)| (k, Some(v.clone())))
411            .chain(
412                old.prev_frame
413                    .iter()
414                    .filter(|(k, _)| self.prev_frame.get(k).is_none())
415                    .map(|(&k, _)| (k, None)),
416            )
417            .collect();
418        let first_diff_wasm_stack_depth: usize = self
419            .wasm_stack
420            .iter()
421            .zip(old.wasm_stack.iter())
422            .enumerate()
423            .find(|&(_, (a, b))| a != b)
424            .map(|x| x.0)
425            .unwrap_or(old.wasm_stack.len().min(self.wasm_stack.len()));
426        MachineStateDiff {
427            last: None,
428            stack_push: self.stack_values[first_diff_stack_depth..].to_vec(),
429            stack_pop: old.stack_values.len() - first_diff_stack_depth,
430            reg_diff,
431
432            prev_frame_diff,
433
434            wasm_stack_push: self.wasm_stack[first_diff_wasm_stack_depth..].to_vec(),
435            wasm_stack_pop: old.wasm_stack.len() - first_diff_wasm_stack_depth,
436            wasm_stack_private_depth: self.wasm_stack_private_depth,
437
438            wasm_inst_offset: self.wasm_inst_offset,
439        }
440    }
441}
442
443impl MachineStateDiff {
444    /// Creates a `MachineState` from the given `&FunctionStateMap`.
445    pub fn build_state(&self, m: &FunctionStateMap) -> MachineState {
446        let mut chain: Vec<&MachineStateDiff> = vec![];
447        chain.push(self);
448        let mut current = self.last;
449        while let Some(x) = current {
450            let that = &m.diffs[x];
451            current = that.last;
452            chain.push(that);
453        }
454        chain.reverse();
455        let mut state = m.initial.clone();
456        for x in chain {
457            for _ in 0..x.stack_pop {
458                state.stack_values.pop().unwrap();
459            }
460            for v in &x.stack_push {
461                state.stack_values.push(v.clone());
462            }
463            for &(index, ref v) in &x.reg_diff {
464                state.register_values[index.0] = v.clone();
465            }
466            for (index, ref v) in &x.prev_frame_diff {
467                if let Some(ref x) = v {
468                    state.prev_frame.insert(*index, x.clone());
469                } else {
470                    state.prev_frame.remove(index).unwrap();
471                }
472            }
473            for _ in 0..x.wasm_stack_pop {
474                state.wasm_stack.pop().unwrap();
475            }
476            for v in &x.wasm_stack_push {
477                state.wasm_stack.push(*v);
478            }
479        }
480        state.wasm_stack_private_depth = self.wasm_stack_private_depth;
481        state.wasm_inst_offset = self.wasm_inst_offset;
482        state
483    }
484}
485
486impl ExecutionStateImage {
487    /// Prints a backtrace if the `WASMER_BACKTRACE` environment variable is 1.
488    pub fn print_backtrace_if_needed(&self) {
489        use std::env;
490
491        if let Ok(x) = env::var("WASMER_BACKTRACE") {
492            if x == "1" {
493                eprintln!("{}", self.output());
494                return;
495            }
496        }
497
498        eprintln!("Run with `WASMER_BACKTRACE=1` environment variable to display a backtrace.");
499    }
500
501    /// Converts self into a `String`, used for display purposes.
502    pub fn output(&self) -> String {
503        fn join_strings(x: impl Iterator<Item = String>, sep: &str) -> String {
504            let mut ret = String::new();
505            let mut first = true;
506
507            for s in x {
508                if first {
509                    first = false;
510                } else {
511                    ret += sep;
512                }
513                ret += &s;
514            }
515
516            ret
517        }
518
519        fn format_optional_u64_sequence(x: &[Option<u64>]) -> String {
520            if x.is_empty() {
521                "(empty)".into()
522            } else {
523                join_strings(
524                    x.iter().enumerate().map(|(i, x)| {
525                        format!(
526                            "[{}] = {}",
527                            i,
528                            x.map(|x| format!("{}", x))
529                                .unwrap_or_else(|| "?".to_string())
530                        )
531                    }),
532                    ", ",
533                )
534            }
535        }
536
537        let mut ret = String::new();
538
539        if self.frames.is_empty() {
540            ret += &"Unknown fault address, cannot read stack.";
541            ret += "\n";
542        } else {
543            ret += &"Backtrace:";
544            ret += "\n";
545            for (i, f) in self.frames.iter().enumerate() {
546                ret += &format!("* Frame {} @ Local function {}", i, f.local_function_id);
547                ret += "\n";
548                ret += &format!("  {} {}\n", "Offset:", format!("{}", f.wasm_inst_offset),);
549                ret += &format!(
550                    "  {} {}\n",
551                    "Locals:",
552                    format_optional_u64_sequence(&f.locals)
553                );
554                ret += &format!(
555                    "  {} {}\n\n",
556                    "Stack:",
557                    format_optional_u64_sequence(&f.stack)
558                );
559            }
560        }
561
562        ret
563    }
564}
565
566impl InstanceImage {
567    /// Converts a slice of bytes into an `Option<InstanceImage>`
568    pub fn from_bytes(input: &[u8]) -> Option<InstanceImage> {
569        use bincode::deserialize;
570        match deserialize(input) {
571            Ok(x) => Some(x),
572            Err(_) => None,
573        }
574    }
575
576    /// Converts self into a vector of bytes.
577    pub fn to_bytes(&self) -> Vec<u8> {
578        use bincode::serialize;
579        serialize(self).unwrap()
580    }
581}
582
583/// X64-specific structures and methods that do not depend on an x64 machine to run.
584#[cfg(unix)]
585pub mod x64_decl {
586    use super::*;
587    use crate::types::Type;
588
589    /// General-purpose registers.
590    #[repr(u8)]
591    #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
592    pub enum GPR {
593        /// RAX register
594        RAX,
595        /// RCX register
596        RCX,
597        /// RDX register
598        RDX,
599        /// RBX register
600        RBX,
601        /// RSP register
602        RSP,
603        /// RBP register
604        RBP,
605        /// RSI register
606        RSI,
607        /// RDI register
608        RDI,
609        /// R8 register
610        R8,
611        /// R9 register
612        R9,
613        /// R10 register
614        R10,
615        /// R11 register
616        R11,
617        /// R12 register
618        R12,
619        /// R13 register
620        R13,
621        /// R14 register
622        R14,
623        /// R15 register
624        R15,
625    }
626
627    /// XMM registers.
628    #[repr(u8)]
629    #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
630    pub enum XMM {
631        /// XMM register 0
632        XMM0,
633        /// XMM register 1
634        XMM1,
635        /// XMM register 2
636        XMM2,
637        /// XMM register 3
638        XMM3,
639        /// XMM register 4
640        XMM4,
641        /// XMM register 5
642        XMM5,
643        /// XMM register 6
644        XMM6,
645        /// XMM register 7
646        XMM7,
647        /// XMM register 8
648        XMM8,
649        /// XMM register 9
650        XMM9,
651        /// XMM register 10
652        XMM10,
653        /// XMM register 11
654        XMM11,
655        /// XMM register 12
656        XMM12,
657        /// XMM register 13
658        XMM13,
659        /// XMM register 14
660        XMM14,
661        /// XMM register 15
662        XMM15,
663    }
664
665    /// A machine register under the x86-64 architecture.
666    #[derive(Copy, Clone, Debug, Eq, PartialEq)]
667    pub enum X64Register {
668        /// General-purpose registers.
669        GPR(GPR),
670        /// XMM (floating point/SIMD) registers.
671        XMM(XMM),
672    }
673
674    impl X64Register {
675        /// Returns the index of the register.
676        pub fn to_index(&self) -> RegisterIndex {
677            match *self {
678                X64Register::GPR(x) => RegisterIndex(x as usize),
679                X64Register::XMM(x) => RegisterIndex(x as usize + 16),
680            }
681        }
682
683        /// Converts a DWARD regnum to X64Register.
684        pub fn from_dwarf_regnum(x: u16) -> Option<X64Register> {
685            Some(match x {
686                0 => X64Register::GPR(GPR::RAX),
687                1 => X64Register::GPR(GPR::RDX),
688                2 => X64Register::GPR(GPR::RCX),
689                3 => X64Register::GPR(GPR::RBX),
690                4 => X64Register::GPR(GPR::RSI),
691                5 => X64Register::GPR(GPR::RDI),
692                6 => X64Register::GPR(GPR::RBP),
693                7 => X64Register::GPR(GPR::RSP),
694                8 => X64Register::GPR(GPR::R8),
695                9 => X64Register::GPR(GPR::R9),
696                10 => X64Register::GPR(GPR::R10),
697                11 => X64Register::GPR(GPR::R11),
698                12 => X64Register::GPR(GPR::R12),
699                13 => X64Register::GPR(GPR::R13),
700                14 => X64Register::GPR(GPR::R14),
701                15 => X64Register::GPR(GPR::R15),
702
703                17 => X64Register::XMM(XMM::XMM0),
704                18 => X64Register::XMM(XMM::XMM1),
705                19 => X64Register::XMM(XMM::XMM2),
706                20 => X64Register::XMM(XMM::XMM3),
707                21 => X64Register::XMM(XMM::XMM4),
708                22 => X64Register::XMM(XMM::XMM5),
709                23 => X64Register::XMM(XMM::XMM6),
710                24 => X64Register::XMM(XMM::XMM7),
711                _ => return None,
712            })
713        }
714
715        /// Returns the instruction prefix for `movq %this_reg, ?(%rsp)`.
716        ///
717        /// To build an instruction, append the memory location as a 32-bit
718        /// offset to the stack pointer to this prefix.
719        pub fn prefix_mov_to_stack(&self) -> Option<&'static [u8]> {
720            Some(match *self {
721                X64Register::GPR(gpr) => match gpr {
722                    GPR::RDI => &[0x48, 0x89, 0xbc, 0x24],
723                    GPR::RSI => &[0x48, 0x89, 0xb4, 0x24],
724                    GPR::RDX => &[0x48, 0x89, 0x94, 0x24],
725                    GPR::RCX => &[0x48, 0x89, 0x8c, 0x24],
726                    GPR::R8 => &[0x4c, 0x89, 0x84, 0x24],
727                    GPR::R9 => &[0x4c, 0x89, 0x8c, 0x24],
728                    _ => return None,
729                },
730                X64Register::XMM(xmm) => match xmm {
731                    XMM::XMM0 => &[0x66, 0x0f, 0xd6, 0x84, 0x24],
732                    XMM::XMM1 => &[0x66, 0x0f, 0xd6, 0x8c, 0x24],
733                    XMM::XMM2 => &[0x66, 0x0f, 0xd6, 0x94, 0x24],
734                    XMM::XMM3 => &[0x66, 0x0f, 0xd6, 0x9c, 0x24],
735                    XMM::XMM4 => &[0x66, 0x0f, 0xd6, 0xa4, 0x24],
736                    XMM::XMM5 => &[0x66, 0x0f, 0xd6, 0xac, 0x24],
737                    XMM::XMM6 => &[0x66, 0x0f, 0xd6, 0xb4, 0x24],
738                    XMM::XMM7 => &[0x66, 0x0f, 0xd6, 0xbc, 0x24],
739                    _ => return None,
740                },
741            })
742        }
743    }
744
745    /// An allocator that allocates registers for function arguments according to the System V ABI.
746    #[derive(Default)]
747    pub struct ArgumentRegisterAllocator {
748        n_gprs: usize,
749        n_xmms: usize,
750    }
751
752    impl ArgumentRegisterAllocator {
753        /// Allocates a register for argument type `ty`. Returns `None` if no register is available for this type.
754        pub fn next(&mut self, ty: Type) -> Option<X64Register> {
755            static GPR_SEQ: &'static [GPR] =
756                &[GPR::RDI, GPR::RSI, GPR::RDX, GPR::RCX, GPR::R8, GPR::R9];
757            static XMM_SEQ: &'static [XMM] = &[
758                XMM::XMM0,
759                XMM::XMM1,
760                XMM::XMM2,
761                XMM::XMM3,
762                XMM::XMM4,
763                XMM::XMM5,
764                XMM::XMM6,
765                XMM::XMM7,
766            ];
767            match ty {
768                Type::I32 | Type::I64 => {
769                    if self.n_gprs < GPR_SEQ.len() {
770                        let gpr = GPR_SEQ[self.n_gprs];
771                        self.n_gprs += 1;
772                        Some(X64Register::GPR(gpr))
773                    } else {
774                        None
775                    }
776                }
777                Type::F32 | Type::F64 => {
778                    if self.n_xmms < XMM_SEQ.len() {
779                        let xmm = XMM_SEQ[self.n_xmms];
780                        self.n_xmms += 1;
781                        Some(X64Register::XMM(xmm))
782                    } else {
783                        None
784                    }
785                }
786                _ => todo!(
787                    "ArgumentRegisterAllocator::next: Unsupported type: {:?}",
788                    ty
789                ),
790            }
791        }
792    }
793}
794
795/// X64-specific structures and methods that only work on an x64 machine.
796#[cfg(unix)]
797pub mod x64 {
798    //! The x64 state module contains functions to generate state and code for x64 targets.
799    pub use super::x64_decl::*;
800    use super::*;
801    use crate::codegen::BreakpointMap;
802    use crate::error::RuntimeError;
803    use crate::fault::{
804        catch_unsafe_unwind, get_boundary_register_preservation, run_on_alternative_stack,
805    };
806    use crate::structures::TypedIndex;
807    use crate::types::LocalGlobalIndex;
808    use crate::vm::Ctx;
809
810    #[allow(clippy::cast_ptr_alignment)]
811    unsafe fn compute_vmctx_deref(vmctx: *const Ctx, seq: &[usize]) -> u64 {
812        let mut ptr = &vmctx as *const *const Ctx as *const u8;
813        for x in seq {
814            debug_assert!(ptr.align_offset(std::mem::align_of::<*const u8>()) == 0);
815            ptr = (*(ptr as *const *const u8)).add(*x);
816        }
817        ptr as usize as u64
818    }
819
820    /// Create a new `MachineState` with default values.
821    pub fn new_machine_state() -> MachineState {
822        MachineState {
823            stack_values: vec![],
824            register_values: vec![MachineValue::Undefined; 16 + 8],
825            prev_frame: BTreeMap::new(),
826            wasm_stack: vec![],
827            wasm_stack_private_depth: 0,
828            wasm_inst_offset: ::std::usize::MAX,
829        }
830    }
831
832    /// Invokes a call return on the stack for the given module state map, code base, instance
833    /// image and context.
834    #[warn(unused_variables)]
835    pub unsafe fn invoke_call_return_on_stack(
836        msm: &ModuleStateMap,
837        code_base: usize,
838        image: InstanceImage,
839        vmctx: &mut Ctx,
840        breakpoints: Option<BreakpointMap>,
841    ) -> Result<u64, RuntimeError> {
842        let mut stack: Vec<u64> = vec![0; 1048576 * 8 / 8]; // 8MB stack
843        let mut stack_offset: usize = stack.len();
844
845        stack_offset -= 3; // placeholder for call return
846
847        let mut last_stack_offset: u64 = 0; // rbp
848
849        let mut known_registers: [Option<u64>; 32] = [None; 32];
850
851        let local_functions_vec: Vec<&FunctionStateMap> =
852            msm.local_functions.iter().map(|(_, v)| v).collect();
853
854        // Bottom to top
855        for f in image.execution_state.frames.iter().rev() {
856            let fsm = local_functions_vec[f.local_function_id];
857            let suspend_offset = if f.wasm_inst_offset == ::std::usize::MAX {
858                fsm.wasm_function_header_target_offset
859            } else {
860                fsm.wasm_offset_to_target_offset
861                    .get(&f.wasm_inst_offset)
862                    .copied()
863            }
864            .expect("instruction is not a critical point");
865
866            let (activate_offset, diff_id) = match suspend_offset {
867                SuspendOffset::Loop(x) => fsm.loop_offsets.get(&x),
868                SuspendOffset::Call(x) => fsm.call_offsets.get(&x),
869                SuspendOffset::Trappable(x) => fsm.trappable_offsets.get(&x),
870            }
871            .map(|x| (x.activate_offset, x.diff_id))
872            .expect("offset cannot be found in table");
873
874            let diff = &fsm.diffs[diff_id];
875            let state = diff.build_state(fsm);
876
877            stack_offset -= 1;
878            stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; // push rbp
879            last_stack_offset = stack_offset as _;
880
881            let mut got_explicit_shadow = false;
882
883            for v in state.stack_values.iter() {
884                match *v {
885                    MachineValue::Undefined => stack_offset -= 1,
886                    MachineValue::Vmctx => {
887                        stack_offset -= 1;
888                        stack[stack_offset] = vmctx as *mut Ctx as usize as u64;
889                    }
890                    MachineValue::VmctxDeref(ref seq) => {
891                        stack_offset -= 1;
892                        stack[stack_offset] = compute_vmctx_deref(vmctx as *const Ctx, seq);
893                    }
894                    MachineValue::PreserveRegister(index) => {
895                        stack_offset -= 1;
896                        stack[stack_offset] = known_registers[index.0].unwrap_or(0);
897                    }
898                    MachineValue::CopyStackBPRelative(byte_offset) => {
899                        assert!(byte_offset % 8 == 0);
900                        let target_offset = (byte_offset / 8) as isize;
901                        let v = stack[(last_stack_offset as isize + target_offset) as usize];
902                        stack_offset -= 1;
903                        stack[stack_offset] = v;
904                    }
905                    MachineValue::ExplicitShadow => {
906                        assert!(fsm.shadow_size % 8 == 0);
907                        stack_offset -= fsm.shadow_size / 8;
908                        got_explicit_shadow = true;
909                    }
910                    MachineValue::WasmStack(x) => {
911                        stack_offset -= 1;
912                        match state.wasm_stack[x] {
913                            WasmAbstractValue::Const(x) => {
914                                stack[stack_offset] = x;
915                            }
916                            WasmAbstractValue::Runtime => {
917                                stack[stack_offset] = f.stack[x].unwrap();
918                            }
919                        }
920                    }
921                    MachineValue::WasmLocal(x) => {
922                        stack_offset -= 1;
923                        match fsm.locals[x] {
924                            WasmAbstractValue::Const(x) => {
925                                stack[stack_offset] = x;
926                            }
927                            WasmAbstractValue::Runtime => {
928                                stack[stack_offset] = f.locals[x].unwrap();
929                            }
930                        }
931                    }
932                    MachineValue::TwoHalves(ref inner) => {
933                        stack_offset -= 1;
934                        // TODO: Cleanup
935                        match inner.0 {
936                            MachineValue::WasmStack(x) => match state.wasm_stack[x] {
937                                WasmAbstractValue::Const(x) => {
938                                    assert!(x <= std::u32::MAX as u64);
939                                    stack[stack_offset] |= x;
940                                }
941                                WasmAbstractValue::Runtime => {
942                                    let v = f.stack[x].unwrap();
943                                    assert!(v <= std::u32::MAX as u64);
944                                    stack[stack_offset] |= v;
945                                }
946                            },
947                            MachineValue::WasmLocal(x) => match fsm.locals[x] {
948                                WasmAbstractValue::Const(x) => {
949                                    assert!(x <= std::u32::MAX as u64);
950                                    stack[stack_offset] |= x;
951                                }
952                                WasmAbstractValue::Runtime => {
953                                    let v = f.locals[x].unwrap();
954                                    assert!(v <= std::u32::MAX as u64);
955                                    stack[stack_offset] |= v;
956                                }
957                            },
958                            MachineValue::VmctxDeref(ref seq) => {
959                                stack[stack_offset] |=
960                                    compute_vmctx_deref(vmctx as *const Ctx, seq)
961                                        & (std::u32::MAX as u64);
962                            }
963                            MachineValue::Undefined => {}
964                            _ => unimplemented!("TwoHalves.0"),
965                        }
966                        match inner.1 {
967                            MachineValue::WasmStack(x) => match state.wasm_stack[x] {
968                                WasmAbstractValue::Const(x) => {
969                                    assert!(x <= std::u32::MAX as u64);
970                                    stack[stack_offset] |= x << 32;
971                                }
972                                WasmAbstractValue::Runtime => {
973                                    let v = f.stack[x].unwrap();
974                                    assert!(v <= std::u32::MAX as u64);
975                                    stack[stack_offset] |= v << 32;
976                                }
977                            },
978                            MachineValue::WasmLocal(x) => match fsm.locals[x] {
979                                WasmAbstractValue::Const(x) => {
980                                    assert!(x <= std::u32::MAX as u64);
981                                    stack[stack_offset] |= x << 32;
982                                }
983                                WasmAbstractValue::Runtime => {
984                                    let v = f.locals[x].unwrap();
985                                    assert!(v <= std::u32::MAX as u64);
986                                    stack[stack_offset] |= v << 32;
987                                }
988                            },
989                            MachineValue::VmctxDeref(ref seq) => {
990                                stack[stack_offset] |=
991                                    (compute_vmctx_deref(vmctx as *const Ctx, seq)
992                                        & (std::u32::MAX as u64))
993                                        << 32;
994                            }
995                            MachineValue::Undefined => {}
996                            _ => unimplemented!("TwoHalves.1"),
997                        }
998                    }
999                }
1000            }
1001            if !got_explicit_shadow {
1002                assert!(fsm.shadow_size % 8 == 0);
1003                stack_offset -= fsm.shadow_size / 8;
1004            }
1005            for (i, v) in state.register_values.iter().enumerate() {
1006                match *v {
1007                    MachineValue::Undefined => {}
1008                    MachineValue::Vmctx => {
1009                        known_registers[i] = Some(vmctx as *mut Ctx as usize as u64);
1010                    }
1011                    MachineValue::VmctxDeref(ref seq) => {
1012                        known_registers[i] = Some(compute_vmctx_deref(vmctx as *const Ctx, seq));
1013                    }
1014                    MachineValue::WasmStack(x) => match state.wasm_stack[x] {
1015                        WasmAbstractValue::Const(x) => {
1016                            known_registers[i] = Some(x);
1017                        }
1018                        WasmAbstractValue::Runtime => {
1019                            known_registers[i] = Some(f.stack[x].unwrap());
1020                        }
1021                    },
1022                    MachineValue::WasmLocal(x) => match fsm.locals[x] {
1023                        WasmAbstractValue::Const(x) => {
1024                            known_registers[i] = Some(x);
1025                        }
1026                        WasmAbstractValue::Runtime => {
1027                            known_registers[i] = Some(f.locals[x].unwrap());
1028                        }
1029                    },
1030                    _ => unreachable!(),
1031                }
1032            }
1033
1034            // no need to check 16-byte alignment here because it's possible that we're not at a call entry.
1035
1036            stack_offset -= 1;
1037            stack[stack_offset] = (code_base + activate_offset) as u64; // return address
1038        }
1039
1040        stack_offset -= 1;
1041        stack[stack_offset] = known_registers[X64Register::GPR(GPR::R15).to_index().0].unwrap_or(0);
1042
1043        stack_offset -= 1;
1044        stack[stack_offset] = known_registers[X64Register::GPR(GPR::R14).to_index().0].unwrap_or(0);
1045
1046        stack_offset -= 1;
1047        stack[stack_offset] = known_registers[X64Register::GPR(GPR::R13).to_index().0].unwrap_or(0);
1048
1049        stack_offset -= 1;
1050        stack[stack_offset] = known_registers[X64Register::GPR(GPR::R12).to_index().0].unwrap_or(0);
1051
1052        stack_offset -= 1;
1053        stack[stack_offset] = known_registers[X64Register::GPR(GPR::R11).to_index().0].unwrap_or(0);
1054
1055        stack_offset -= 1;
1056        stack[stack_offset] = known_registers[X64Register::GPR(GPR::R10).to_index().0].unwrap_or(0);
1057
1058        stack_offset -= 1;
1059        stack[stack_offset] = known_registers[X64Register::GPR(GPR::R9).to_index().0].unwrap_or(0);
1060
1061        stack_offset -= 1;
1062        stack[stack_offset] = known_registers[X64Register::GPR(GPR::R8).to_index().0].unwrap_or(0);
1063
1064        stack_offset -= 1;
1065        stack[stack_offset] = known_registers[X64Register::GPR(GPR::RSI).to_index().0].unwrap_or(0);
1066
1067        stack_offset -= 1;
1068        stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDI).to_index().0].unwrap_or(0);
1069
1070        stack_offset -= 1;
1071        stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDX).to_index().0].unwrap_or(0);
1072
1073        stack_offset -= 1;
1074        stack[stack_offset] = known_registers[X64Register::GPR(GPR::RCX).to_index().0].unwrap_or(0);
1075
1076        stack_offset -= 1;
1077        stack[stack_offset] = known_registers[X64Register::GPR(GPR::RBX).to_index().0].unwrap_or(0);
1078
1079        stack_offset -= 1;
1080        stack[stack_offset] = known_registers[X64Register::GPR(GPR::RAX).to_index().0].unwrap_or(0);
1081
1082        stack_offset -= 1;
1083        stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; // rbp
1084
1085        stack_offset -= 1;
1086        stack[stack_offset] =
1087            known_registers[X64Register::XMM(XMM::XMM15).to_index().0].unwrap_or(0);
1088
1089        stack_offset -= 1;
1090        stack[stack_offset] =
1091            known_registers[X64Register::XMM(XMM::XMM14).to_index().0].unwrap_or(0);
1092
1093        stack_offset -= 1;
1094        stack[stack_offset] =
1095            known_registers[X64Register::XMM(XMM::XMM13).to_index().0].unwrap_or(0);
1096
1097        stack_offset -= 1;
1098        stack[stack_offset] =
1099            known_registers[X64Register::XMM(XMM::XMM12).to_index().0].unwrap_or(0);
1100
1101        stack_offset -= 1;
1102        stack[stack_offset] =
1103            known_registers[X64Register::XMM(XMM::XMM11).to_index().0].unwrap_or(0);
1104
1105        stack_offset -= 1;
1106        stack[stack_offset] =
1107            known_registers[X64Register::XMM(XMM::XMM10).to_index().0].unwrap_or(0);
1108
1109        stack_offset -= 1;
1110        stack[stack_offset] =
1111            known_registers[X64Register::XMM(XMM::XMM9).to_index().0].unwrap_or(0);
1112
1113        stack_offset -= 1;
1114        stack[stack_offset] =
1115            known_registers[X64Register::XMM(XMM::XMM8).to_index().0].unwrap_or(0);
1116        stack_offset -= 1;
1117        stack[stack_offset] =
1118            known_registers[X64Register::XMM(XMM::XMM7).to_index().0].unwrap_or(0);
1119
1120        stack_offset -= 1;
1121        stack[stack_offset] =
1122            known_registers[X64Register::XMM(XMM::XMM6).to_index().0].unwrap_or(0);
1123
1124        stack_offset -= 1;
1125        stack[stack_offset] =
1126            known_registers[X64Register::XMM(XMM::XMM5).to_index().0].unwrap_or(0);
1127
1128        stack_offset -= 1;
1129        stack[stack_offset] =
1130            known_registers[X64Register::XMM(XMM::XMM4).to_index().0].unwrap_or(0);
1131
1132        stack_offset -= 1;
1133        stack[stack_offset] =
1134            known_registers[X64Register::XMM(XMM::XMM3).to_index().0].unwrap_or(0);
1135
1136        stack_offset -= 1;
1137        stack[stack_offset] =
1138            known_registers[X64Register::XMM(XMM::XMM2).to_index().0].unwrap_or(0);
1139
1140        stack_offset -= 1;
1141        stack[stack_offset] =
1142            known_registers[X64Register::XMM(XMM::XMM1).to_index().0].unwrap_or(0);
1143
1144        stack_offset -= 1;
1145        stack[stack_offset] =
1146            known_registers[X64Register::XMM(XMM::XMM0).to_index().0].unwrap_or(0);
1147
1148        if let Some(ref memory) = image.memory {
1149            assert!(vmctx.internal.memory_bound <= memory.len());
1150
1151            if vmctx.internal.memory_bound < memory.len() {
1152                let grow: unsafe extern "C" fn(ctx: &mut Ctx, memory_index: usize, delta: usize) =
1153                    ::std::mem::transmute((*vmctx.internal.intrinsics).memory_grow);
1154                grow(
1155                    vmctx,
1156                    0,
1157                    (memory.len() - vmctx.internal.memory_bound) / 65536,
1158                );
1159                assert_eq!(vmctx.internal.memory_bound, memory.len());
1160            }
1161
1162            std::slice::from_raw_parts_mut(vmctx.internal.memory_base, vmctx.internal.memory_bound)
1163                .copy_from_slice(memory);
1164        }
1165
1166        let globals_len = (*vmctx.module).info.globals.len();
1167        for i in 0..globals_len {
1168            (*(*vmctx.local_backing).globals[LocalGlobalIndex::new(i)].vm_local_global()).data =
1169                image.globals[i];
1170        }
1171
1172        drop(image); // free up host memory
1173
1174        catch_unsafe_unwind(
1175            || {
1176                run_on_alternative_stack(
1177                    stack.as_mut_ptr().add(stack.len()),
1178                    stack.as_mut_ptr().add(stack_offset),
1179                )
1180            },
1181            breakpoints,
1182        )
1183    }
1184
1185    /// Builds an `InstanceImage` for the given `Ctx` and `ExecutionStateImage`.
1186    pub fn build_instance_image(
1187        vmctx: &mut Ctx,
1188        execution_state: ExecutionStateImage,
1189    ) -> InstanceImage {
1190        unsafe {
1191            let memory = if vmctx.internal.memory_base.is_null() {
1192                None
1193            } else {
1194                Some(
1195                    std::slice::from_raw_parts(
1196                        vmctx.internal.memory_base,
1197                        vmctx.internal.memory_bound,
1198                    )
1199                    .to_vec(),
1200                )
1201            };
1202
1203            // FIXME: Imported globals
1204            let globals_len = (*vmctx.module).info.globals.len();
1205            let globals: Vec<u128> = (0..globals_len)
1206                .map(|i| {
1207                    (*vmctx.local_backing).globals[LocalGlobalIndex::new(i)]
1208                        .get()
1209                        .to_u128()
1210                })
1211                .collect();
1212
1213            InstanceImage {
1214                memory: memory,
1215                globals: globals,
1216                execution_state: execution_state,
1217            }
1218        }
1219    }
1220
1221    /// Returns a `ExecutionStateImage` for the given versions, stack, initial registers and
1222    /// initial address.
1223    #[warn(unused_variables)]
1224    pub unsafe fn read_stack<'a, I: Iterator<Item = &'a CodeVersion>, F: Fn() -> I + 'a>(
1225        versions: F,
1226        mut stack: *const u64,
1227        initially_known_registers: [Option<u64>; 32],
1228        mut initial_address: Option<u64>,
1229        max_depth: Option<usize>,
1230    ) -> ExecutionStateImage {
1231        let mut known_registers: [Option<u64>; 32] = initially_known_registers;
1232        let mut results: Vec<WasmFunctionStateDump> = vec![];
1233        let mut was_baseline = true;
1234
1235        for depth in 0.. {
1236            if let Some(max_depth) = max_depth {
1237                if depth >= max_depth {
1238                    return ExecutionStateImage { frames: results };
1239                }
1240            }
1241
1242            let ret_addr = initial_address.take().unwrap_or_else(|| {
1243                let x = *stack;
1244                stack = stack.offset(1);
1245                x
1246            });
1247
1248            let mut fsm_state: Option<(&FunctionStateMap, MachineState)> = None;
1249            let mut is_baseline: Option<bool> = None;
1250
1251            for version in versions() {
1252                match version
1253                    .msm
1254                    .lookup_call_ip(ret_addr as usize, version.base)
1255                    .or_else(|| {
1256                        version
1257                            .msm
1258                            .lookup_trappable_ip(ret_addr as usize, version.base)
1259                    })
1260                    .or_else(|| version.msm.lookup_loop_ip(ret_addr as usize, version.base))
1261                {
1262                    Some(x) => {
1263                        fsm_state = Some(x);
1264                        is_baseline = Some(version.baseline);
1265                        break;
1266                    }
1267                    None => {}
1268                };
1269            }
1270
1271            let (fsm, state) = if let Some(x) = fsm_state {
1272                x
1273            } else {
1274                return ExecutionStateImage { frames: results };
1275            };
1276
1277            {
1278                let is_baseline = is_baseline.unwrap();
1279
1280                // Are we unwinding through an optimized/baseline boundary?
1281                if is_baseline && !was_baseline {
1282                    let callee_saved = &*get_boundary_register_preservation();
1283                    known_registers[X64Register::GPR(GPR::R15).to_index().0] =
1284                        Some(callee_saved.r15);
1285                    known_registers[X64Register::GPR(GPR::R14).to_index().0] =
1286                        Some(callee_saved.r14);
1287                    known_registers[X64Register::GPR(GPR::R13).to_index().0] =
1288                        Some(callee_saved.r13);
1289                    known_registers[X64Register::GPR(GPR::R12).to_index().0] =
1290                        Some(callee_saved.r12);
1291                    known_registers[X64Register::GPR(GPR::RBX).to_index().0] =
1292                        Some(callee_saved.rbx);
1293                }
1294
1295                was_baseline = is_baseline;
1296            }
1297
1298            let mut wasm_stack: Vec<Option<u64>> = state
1299                .wasm_stack
1300                .iter()
1301                .map(|x| match *x {
1302                    WasmAbstractValue::Const(x) => Some(x),
1303                    WasmAbstractValue::Runtime => None,
1304                })
1305                .collect();
1306            let mut wasm_locals: Vec<Option<u64>> = fsm
1307                .locals
1308                .iter()
1309                .map(|x| match *x {
1310                    WasmAbstractValue::Const(x) => Some(x),
1311                    WasmAbstractValue::Runtime => None,
1312                })
1313                .collect();
1314
1315            // This must be before the next loop because that modifies `known_registers`.
1316            for (i, v) in state.register_values.iter().enumerate() {
1317                match *v {
1318                    MachineValue::Undefined => {}
1319                    MachineValue::Vmctx => {}
1320                    MachineValue::VmctxDeref(_) => {}
1321                    MachineValue::WasmStack(idx) => {
1322                        if let Some(v) = known_registers[i] {
1323                            wasm_stack[idx] = Some(v);
1324                        } else {
1325                            eprintln!(
1326                                "BUG: Register {} for WebAssembly stack slot {} has unknown value.",
1327                                i, idx
1328                            );
1329                        }
1330                    }
1331                    MachineValue::WasmLocal(idx) => {
1332                        if let Some(v) = known_registers[i] {
1333                            wasm_locals[idx] = Some(v);
1334                        }
1335                    }
1336                    _ => unreachable!(),
1337                }
1338            }
1339
1340            let found_shadow = state
1341                .stack_values
1342                .iter()
1343                .any(|v| *v == MachineValue::ExplicitShadow);
1344            if !found_shadow {
1345                stack = stack.add(fsm.shadow_size / 8);
1346            }
1347
1348            for v in state.stack_values.iter().rev() {
1349                match *v {
1350                    MachineValue::ExplicitShadow => {
1351                        stack = stack.add(fsm.shadow_size / 8);
1352                    }
1353                    MachineValue::Undefined => {
1354                        stack = stack.offset(1);
1355                    }
1356                    MachineValue::Vmctx => {
1357                        stack = stack.offset(1);
1358                    }
1359                    MachineValue::VmctxDeref(_) => {
1360                        stack = stack.offset(1);
1361                    }
1362                    MachineValue::PreserveRegister(idx) => {
1363                        known_registers[idx.0] = Some(*stack);
1364                        stack = stack.offset(1);
1365                    }
1366                    MachineValue::CopyStackBPRelative(_) => {
1367                        stack = stack.offset(1);
1368                    }
1369                    MachineValue::WasmStack(idx) => {
1370                        wasm_stack[idx] = Some(*stack);
1371                        stack = stack.offset(1);
1372                    }
1373                    MachineValue::WasmLocal(idx) => {
1374                        wasm_locals[idx] = Some(*stack);
1375                        stack = stack.offset(1);
1376                    }
1377                    MachineValue::TwoHalves(ref inner) => {
1378                        let v = *stack;
1379                        stack = stack.offset(1);
1380                        match inner.0 {
1381                            MachineValue::WasmStack(idx) => {
1382                                wasm_stack[idx] = Some(v & 0xffffffffu64);
1383                            }
1384                            MachineValue::WasmLocal(idx) => {
1385                                wasm_locals[idx] = Some(v & 0xffffffffu64);
1386                            }
1387                            MachineValue::VmctxDeref(_) => {}
1388                            MachineValue::Undefined => {}
1389                            _ => unimplemented!("TwoHalves.0 (read)"),
1390                        }
1391                        match inner.1 {
1392                            MachineValue::WasmStack(idx) => {
1393                                wasm_stack[idx] = Some(v >> 32);
1394                            }
1395                            MachineValue::WasmLocal(idx) => {
1396                                wasm_locals[idx] = Some(v >> 32);
1397                            }
1398                            MachineValue::VmctxDeref(_) => {}
1399                            MachineValue::Undefined => {}
1400                            _ => unimplemented!("TwoHalves.1 (read)"),
1401                        }
1402                    }
1403                }
1404            }
1405
1406            for (offset, v) in state.prev_frame.iter() {
1407                let offset = (*offset + 2) as isize; // (saved_rbp, return_address)
1408                match *v {
1409                    MachineValue::WasmStack(idx) => {
1410                        wasm_stack[idx] = Some(*stack.offset(offset));
1411                    }
1412                    MachineValue::WasmLocal(idx) => {
1413                        wasm_locals[idx] = Some(*stack.offset(offset));
1414                    }
1415                    _ => unreachable!("values in prev frame can only be stack/local"),
1416                }
1417            }
1418            stack = stack.offset(1); // saved_rbp
1419
1420            wasm_stack.truncate(
1421                wasm_stack
1422                    .len()
1423                    .checked_sub(state.wasm_stack_private_depth)
1424                    .unwrap(),
1425            );
1426
1427            let wfs = WasmFunctionStateDump {
1428                local_function_id: fsm.local_function_id,
1429                wasm_inst_offset: state.wasm_inst_offset,
1430                stack: wasm_stack,
1431                locals: wasm_locals,
1432            };
1433            results.push(wfs);
1434        }
1435
1436        unreachable!();
1437    }
1438}