1use crate::backend::RunnableModule;
6use borsh::{BorshDeserialize, BorshSerialize};
7use std::collections::BTreeMap;
8use std::ops::Bound::{Included, Unbounded};
9use std::sync::Arc;
10
11#[derive(
13 Copy,
14 Clone,
15 Debug,
16 Eq,
17 PartialEq,
18 Hash,
19 Serialize,
20 Deserialize,
21 BorshSerialize,
22 BorshDeserialize,
23)]
24pub struct RegisterIndex(pub usize);
25
26#[derive(
28 Copy,
29 Clone,
30 Debug,
31 Eq,
32 PartialEq,
33 Hash,
34 Serialize,
35 Deserialize,
36 BorshSerialize,
37 BorshDeserialize,
38)]
39pub enum WasmAbstractValue {
40 Runtime,
42 Const(u64),
44}
45
46#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
48pub struct MachineState {
49 pub stack_values: Vec<MachineValue>,
51 pub register_values: Vec<MachineValue>,
53 pub prev_frame: BTreeMap<usize, MachineValue>,
55 pub wasm_stack: Vec<WasmAbstractValue>,
57 pub wasm_stack_private_depth: usize,
59 pub wasm_inst_offset: usize,
61}
62
63#[derive(Clone, Debug, Default, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
65pub struct MachineStateDiff {
66 pub last: Option<usize>,
68 pub stack_push: Vec<MachineValue>,
70 pub stack_pop: usize,
72
73 pub reg_diff: Vec<(RegisterIndex, MachineValue)>,
75
76 pub prev_frame_diff: BTreeMap<usize, Option<MachineValue>>, pub wasm_stack_push: Vec<WasmAbstractValue>,
81 pub wasm_stack_pop: usize,
83 pub wasm_stack_private_depth: usize, pub wasm_inst_offset: usize, }
88
89#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
91pub enum MachineValue {
92 Undefined,
94 Vmctx,
96 VmctxDeref(Vec<usize>),
98 PreserveRegister(RegisterIndex),
100 CopyStackBPRelative(i32), ExplicitShadow, WasmStack(usize),
106 WasmLocal(usize),
108 TwoHalves(Box<(MachineValue, MachineValue)>), }
111
112impl BorshSerialize for MachineValue {
113 fn serialize<W: std::io::Write>(&self, writer: &mut W) -> std::io::Result<()> {
114 match self {
115 MachineValue::Undefined => writer.write_all(&[0u8])?,
116 MachineValue::Vmctx => writer.write_all(&[1u8])?,
117 MachineValue::VmctxDeref(v) => {
118 writer.write_all(&[2u8])?;
119 BorshSerialize::serialize(&v, writer)?;
120 }
121 MachineValue::PreserveRegister(r) => {
122 writer.write_all(&[3u8])?;
123 BorshSerialize::serialize(&r, writer)?;
124 }
125 MachineValue::CopyStackBPRelative(i) => {
126 writer.write_all(&[4u8])?;
127 BorshSerialize::serialize(&i, writer)?;
128 }
129 MachineValue::ExplicitShadow => writer.write_all(&[5u8])?,
130 MachineValue::WasmStack(u) => {
131 writer.write_all(&[6u8])?;
132 BorshSerialize::serialize(&(*u as u64), writer)?;
133 }
134 MachineValue::WasmLocal(u) => {
135 writer.write_all(&[7u8])?;
136 BorshSerialize::serialize(&(*u as u64), writer)?;
137 }
138 MachineValue::TwoHalves(b) => {
139 writer.write_all(&[8u8])?;
140 BorshSerialize::serialize(&b, writer)?;
141 }
142 }
143 Ok(())
144 }
145}
146
147impl BorshDeserialize for MachineValue {
148 fn deserialize(buf: &mut &[u8]) -> std::io::Result<Self> {
149 let variant: u8 = BorshDeserialize::deserialize(buf)?;
150 Ok(match variant {
151 0 => MachineValue::Undefined,
152 1 => MachineValue::Vmctx,
153 2 => {
154 let v: Vec<usize> = BorshDeserialize::deserialize(buf)?;
155 MachineValue::VmctxDeref(v)
156 }
157 3 => {
158 let r: RegisterIndex = BorshDeserialize::deserialize(buf)?;
159 MachineValue::PreserveRegister(r)
160 }
161 4 => {
162 let i: i32 = BorshDeserialize::deserialize(buf)?;
163 MachineValue::CopyStackBPRelative(i)
164 }
165 5 => MachineValue::ExplicitShadow,
166 6 => {
167 let u: usize = BorshDeserialize::deserialize(buf)?;
168 MachineValue::WasmStack(u)
169 }
170 7 => {
171 let u: usize = BorshDeserialize::deserialize(buf)?;
172 MachineValue::WasmLocal(u)
173 }
174 8 => {
175 let b: Box<(MachineValue, MachineValue)> = BorshDeserialize::deserialize(buf)?;
176 MachineValue::TwoHalves(b)
177 }
178 _ => {
179 return Err(std::io::Error::new(
180 std::io::ErrorKind::InvalidInput,
181 "Unexpected variant",
182 ))
183 }
184 })
185 }
186}
187
188#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
190pub struct FunctionStateMap {
191 pub initial: MachineState,
193 pub local_function_id: usize,
195 pub locals: Vec<WasmAbstractValue>,
197 pub shadow_size: usize, pub diffs: Vec<MachineStateDiff>,
201 pub wasm_function_header_target_offset: Option<SuspendOffset>,
203 pub wasm_offset_to_target_offset: BTreeMap<usize, SuspendOffset>,
205 pub loop_offsets: BTreeMap<usize, OffsetInfo>, pub call_offsets: BTreeMap<usize, OffsetInfo>, pub trappable_offsets: BTreeMap<usize, OffsetInfo>, }
212
213#[derive(Clone, Copy, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
215pub enum SuspendOffset {
216 Loop(usize),
218 Call(usize),
220 Trappable(usize),
222}
223
224#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
226pub struct OffsetInfo {
227 pub end_offset: usize, pub diff_id: usize,
231 pub activate_offset: usize,
233}
234
235#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
237pub struct ModuleStateMap {
238 pub local_functions: BTreeMap<usize, FunctionStateMap>,
240 pub total_size: usize,
242}
243
244#[derive(Clone, Debug, Serialize, Deserialize)]
246pub struct WasmFunctionStateDump {
247 pub local_function_id: usize,
249 pub wasm_inst_offset: usize,
251 pub stack: Vec<Option<u64>>,
253 pub locals: Vec<Option<u64>>,
255}
256
257#[derive(Clone, Debug, Serialize, Deserialize)]
259pub struct ExecutionStateImage {
260 pub frames: Vec<WasmFunctionStateDump>,
262}
263
264#[derive(Debug, Clone, Serialize, Deserialize)]
266pub struct InstanceImage {
267 pub memory: Option<Vec<u8>>,
269 pub globals: Vec<u128>,
271 pub execution_state: ExecutionStateImage,
273}
274
275#[derive(Clone)]
277pub struct CodeVersion {
278 pub baseline: bool,
280
281 pub msm: ModuleStateMap,
283
284 pub base: usize,
286
287 pub backend: &'static str,
289
290 pub runnable_module: Arc<Box<dyn RunnableModule>>,
292}
293
294impl ModuleStateMap {
295 pub fn lookup_ip<F: FnOnce(&FunctionStateMap) -> &BTreeMap<usize, OffsetInfo>>(
297 &self,
298 ip: usize,
299 base: usize,
300 offset_table_provider: F,
301 ) -> Option<(&FunctionStateMap, MachineState)> {
302 if ip < base || ip - base >= self.total_size {
303 None
304 } else {
305 let (_, fsm) = self
306 .local_functions
307 .range((Unbounded, Included(&(ip - base))))
308 .last()
309 .unwrap();
310
311 match offset_table_provider(fsm)
312 .range((Unbounded, Included(&(ip - base))))
313 .last()
314 {
315 Some((_, x)) => {
316 if ip - base >= x.end_offset {
317 None
318 } else if x.diff_id < fsm.diffs.len() {
319 Some((fsm, fsm.diffs[x.diff_id].build_state(fsm)))
320 } else {
321 None
322 }
323 }
324 None => None,
325 }
326 }
327 }
328 pub fn lookup_call_ip(
330 &self,
331 ip: usize,
332 base: usize,
333 ) -> Option<(&FunctionStateMap, MachineState)> {
334 self.lookup_ip(ip, base, |fsm| &fsm.call_offsets)
335 }
336
337 pub fn lookup_trappable_ip(
339 &self,
340 ip: usize,
341 base: usize,
342 ) -> Option<(&FunctionStateMap, MachineState)> {
343 self.lookup_ip(ip, base, |fsm| &fsm.trappable_offsets)
344 }
345
346 pub fn lookup_loop_ip(
348 &self,
349 ip: usize,
350 base: usize,
351 ) -> Option<(&FunctionStateMap, MachineState)> {
352 self.lookup_ip(ip, base, |fsm| &fsm.loop_offsets)
353 }
354}
355
356impl FunctionStateMap {
357 pub fn new(
359 initial: MachineState,
360 local_function_id: usize,
361 shadow_size: usize,
362 locals: Vec<WasmAbstractValue>,
363 ) -> FunctionStateMap {
364 FunctionStateMap {
365 initial,
366 local_function_id,
367 shadow_size,
368 locals,
369 diffs: vec![],
370 wasm_function_header_target_offset: None,
371 wasm_offset_to_target_offset: BTreeMap::new(),
372 loop_offsets: BTreeMap::new(),
373 call_offsets: BTreeMap::new(),
374 trappable_offsets: BTreeMap::new(),
375 }
376 }
377}
378
379impl MachineState {
380 pub fn diff(&self, old: &MachineState) -> MachineStateDiff {
382 let first_diff_stack_depth: usize = self
383 .stack_values
384 .iter()
385 .zip(old.stack_values.iter())
386 .enumerate()
387 .find(|&(_, (a, b))| a != b)
388 .map(|x| x.0)
389 .unwrap_or(old.stack_values.len().min(self.stack_values.len()));
390 assert_eq!(self.register_values.len(), old.register_values.len());
391 let reg_diff: Vec<_> = self
392 .register_values
393 .iter()
394 .zip(old.register_values.iter())
395 .enumerate()
396 .filter(|&(_, (a, b))| a != b)
397 .map(|(i, (a, _))| (RegisterIndex(i), a.clone()))
398 .collect();
399 let prev_frame_diff: BTreeMap<usize, Option<MachineValue>> = self
400 .prev_frame
401 .iter()
402 .filter(|(k, v)| {
403 if let Some(ref old_v) = old.prev_frame.get(k) {
404 v != old_v
405 } else {
406 true
407 }
408 })
409 .map(|(&k, v)| (k, Some(v.clone())))
410 .chain(
411 old.prev_frame
412 .iter()
413 .filter(|(k, _)| self.prev_frame.get(k).is_none())
414 .map(|(&k, _)| (k, None)),
415 )
416 .collect();
417 let first_diff_wasm_stack_depth: usize = self
418 .wasm_stack
419 .iter()
420 .zip(old.wasm_stack.iter())
421 .enumerate()
422 .find(|&(_, (a, b))| a != b)
423 .map(|x| x.0)
424 .unwrap_or(old.wasm_stack.len().min(self.wasm_stack.len()));
425 MachineStateDiff {
426 last: None,
427 stack_push: self.stack_values[first_diff_stack_depth..].to_vec(),
428 stack_pop: old.stack_values.len() - first_diff_stack_depth,
429 reg_diff,
430
431 prev_frame_diff,
432
433 wasm_stack_push: self.wasm_stack[first_diff_wasm_stack_depth..].to_vec(),
434 wasm_stack_pop: old.wasm_stack.len() - first_diff_wasm_stack_depth,
435 wasm_stack_private_depth: self.wasm_stack_private_depth,
436
437 wasm_inst_offset: self.wasm_inst_offset,
438 }
439 }
440}
441
442impl MachineStateDiff {
443 pub fn build_state(&self, m: &FunctionStateMap) -> MachineState {
445 let mut chain: Vec<&MachineStateDiff> = vec![];
446 chain.push(self);
447 let mut current = self.last;
448 while let Some(x) = current {
449 let that = &m.diffs[x];
450 current = that.last;
451 chain.push(that);
452 }
453 chain.reverse();
454 let mut state = m.initial.clone();
455 for x in chain {
456 for _ in 0..x.stack_pop {
457 state.stack_values.pop().unwrap();
458 }
459 for v in &x.stack_push {
460 state.stack_values.push(v.clone());
461 }
462 for &(index, ref v) in &x.reg_diff {
463 state.register_values[index.0] = v.clone();
464 }
465 for (index, ref v) in &x.prev_frame_diff {
466 if let Some(ref x) = v {
467 state.prev_frame.insert(*index, x.clone());
468 } else {
469 state.prev_frame.remove(index).unwrap();
470 }
471 }
472 for _ in 0..x.wasm_stack_pop {
473 state.wasm_stack.pop().unwrap();
474 }
475 for v in &x.wasm_stack_push {
476 state.wasm_stack.push(*v);
477 }
478 }
479 state.wasm_stack_private_depth = self.wasm_stack_private_depth;
480 state.wasm_inst_offset = self.wasm_inst_offset;
481 state
482 }
483}
484
485impl ExecutionStateImage {
486 pub fn print_backtrace_if_needed(&self) {
488 use std::env;
489
490 if let Ok(x) = env::var("WASMER_BACKTRACE") {
491 if x == "1" {
492 eprintln!("{}", self.output());
493 return;
494 }
495 }
496
497 eprintln!("Run with `WASMER_BACKTRACE=1` environment variable to display a backtrace.");
498 }
499
500 pub fn output(&self) -> String {
502 fn join_strings(x: impl Iterator<Item = String>, sep: &str) -> String {
503 let mut ret = String::new();
504 let mut first = true;
505
506 for s in x {
507 if first {
508 first = false;
509 } else {
510 ret += sep;
511 }
512 ret += &s;
513 }
514
515 ret
516 }
517
518 fn format_optional_u64_sequence(x: &[Option<u64>]) -> String {
519 if x.is_empty() {
520 "(empty)".into()
521 } else {
522 join_strings(
523 x.iter().enumerate().map(|(i, x)| {
524 format!(
525 "[{}] = {}",
526 i,
527 x.map(|x| format!("{}", x))
528 .unwrap_or_else(|| "?".to_string())
529 )
530 }),
531 ", ",
532 )
533 }
534 }
535
536 let mut ret = String::new();
537
538 if self.frames.is_empty() {
539 ret += &"Unknown fault address, cannot read stack.";
540 ret += "\n";
541 } else {
542 ret += &"Backtrace:";
543 ret += "\n";
544 for (i, f) in self.frames.iter().enumerate() {
545 ret += &format!("* Frame {} @ Local function {}", i, f.local_function_id);
546 ret += "\n";
547 ret += &format!(" {} {}\n", "Offset:", format!("{}", f.wasm_inst_offset),);
548 ret += &format!(
549 " {} {}\n",
550 "Locals:",
551 format_optional_u64_sequence(&f.locals)
552 );
553 ret += &format!(
554 " {} {}\n\n",
555 "Stack:",
556 format_optional_u64_sequence(&f.stack)
557 );
558 }
559 }
560
561 ret
562 }
563}
564
565impl InstanceImage {
566 pub fn from_bytes(input: &[u8]) -> Option<InstanceImage> {
568 use bincode::deserialize;
569 match deserialize(input) {
570 Ok(x) => Some(x),
571 Err(_) => None,
572 }
573 }
574
575 pub fn to_bytes(&self) -> Vec<u8> {
577 use bincode::serialize;
578 serialize(self).unwrap()
579 }
580}
581
582#[cfg(unix)]
584pub mod x64_decl {
585 use super::*;
586 use crate::types::Type;
587
588 #[repr(u8)]
590 #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
591 pub enum GPR {
592 RAX,
594 RCX,
596 RDX,
598 RBX,
600 RSP,
602 RBP,
604 RSI,
606 RDI,
608 R8,
610 R9,
612 R10,
614 R11,
616 R12,
618 R13,
620 R14,
622 R15,
624 }
625
626 #[repr(u8)]
628 #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
629 pub enum XMM {
630 XMM0,
632 XMM1,
634 XMM2,
636 XMM3,
638 XMM4,
640 XMM5,
642 XMM6,
644 XMM7,
646 XMM8,
648 XMM9,
650 XMM10,
652 XMM11,
654 XMM12,
656 XMM13,
658 XMM14,
660 XMM15,
662 }
663
664 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
666 pub enum X64Register {
667 GPR(GPR),
669 XMM(XMM),
671 }
672
673 impl X64Register {
674 pub fn to_index(&self) -> RegisterIndex {
676 match *self {
677 X64Register::GPR(x) => RegisterIndex(x as usize),
678 X64Register::XMM(x) => RegisterIndex(x as usize + 16),
679 }
680 }
681
682 pub fn from_dwarf_regnum(x: u16) -> Option<X64Register> {
684 Some(match x {
685 0 => X64Register::GPR(GPR::RAX),
686 1 => X64Register::GPR(GPR::RDX),
687 2 => X64Register::GPR(GPR::RCX),
688 3 => X64Register::GPR(GPR::RBX),
689 4 => X64Register::GPR(GPR::RSI),
690 5 => X64Register::GPR(GPR::RDI),
691 6 => X64Register::GPR(GPR::RBP),
692 7 => X64Register::GPR(GPR::RSP),
693 8 => X64Register::GPR(GPR::R8),
694 9 => X64Register::GPR(GPR::R9),
695 10 => X64Register::GPR(GPR::R10),
696 11 => X64Register::GPR(GPR::R11),
697 12 => X64Register::GPR(GPR::R12),
698 13 => X64Register::GPR(GPR::R13),
699 14 => X64Register::GPR(GPR::R14),
700 15 => X64Register::GPR(GPR::R15),
701
702 17 => X64Register::XMM(XMM::XMM0),
703 18 => X64Register::XMM(XMM::XMM1),
704 19 => X64Register::XMM(XMM::XMM2),
705 20 => X64Register::XMM(XMM::XMM3),
706 21 => X64Register::XMM(XMM::XMM4),
707 22 => X64Register::XMM(XMM::XMM5),
708 23 => X64Register::XMM(XMM::XMM6),
709 24 => X64Register::XMM(XMM::XMM7),
710 _ => return None,
711 })
712 }
713
714 pub fn prefix_mov_to_stack(&self) -> Option<&'static [u8]> {
719 Some(match *self {
720 X64Register::GPR(gpr) => match gpr {
721 GPR::RDI => &[0x48, 0x89, 0xbc, 0x24],
722 GPR::RSI => &[0x48, 0x89, 0xb4, 0x24],
723 GPR::RDX => &[0x48, 0x89, 0x94, 0x24],
724 GPR::RCX => &[0x48, 0x89, 0x8c, 0x24],
725 GPR::R8 => &[0x4c, 0x89, 0x84, 0x24],
726 GPR::R9 => &[0x4c, 0x89, 0x8c, 0x24],
727 _ => return None,
728 },
729 X64Register::XMM(xmm) => match xmm {
730 XMM::XMM0 => &[0x66, 0x0f, 0xd6, 0x84, 0x24],
731 XMM::XMM1 => &[0x66, 0x0f, 0xd6, 0x8c, 0x24],
732 XMM::XMM2 => &[0x66, 0x0f, 0xd6, 0x94, 0x24],
733 XMM::XMM3 => &[0x66, 0x0f, 0xd6, 0x9c, 0x24],
734 XMM::XMM4 => &[0x66, 0x0f, 0xd6, 0xa4, 0x24],
735 XMM::XMM5 => &[0x66, 0x0f, 0xd6, 0xac, 0x24],
736 XMM::XMM6 => &[0x66, 0x0f, 0xd6, 0xb4, 0x24],
737 XMM::XMM7 => &[0x66, 0x0f, 0xd6, 0xbc, 0x24],
738 _ => return None,
739 },
740 })
741 }
742 }
743
744 #[derive(Default)]
746 pub struct ArgumentRegisterAllocator {
747 n_gprs: usize,
748 n_xmms: usize,
749 }
750
751 impl ArgumentRegisterAllocator {
752 pub fn next(&mut self, ty: Type) -> Option<X64Register> {
754 static GPR_SEQ: &'static [GPR] =
755 &[GPR::RDI, GPR::RSI, GPR::RDX, GPR::RCX, GPR::R8, GPR::R9];
756 static XMM_SEQ: &'static [XMM] = &[
757 XMM::XMM0,
758 XMM::XMM1,
759 XMM::XMM2,
760 XMM::XMM3,
761 XMM::XMM4,
762 XMM::XMM5,
763 XMM::XMM6,
764 XMM::XMM7,
765 ];
766 match ty {
767 Type::I32 | Type::I64 => {
768 if self.n_gprs < GPR_SEQ.len() {
769 let gpr = GPR_SEQ[self.n_gprs];
770 self.n_gprs += 1;
771 Some(X64Register::GPR(gpr))
772 } else {
773 None
774 }
775 }
776 Type::F32 | Type::F64 => {
777 if self.n_xmms < XMM_SEQ.len() {
778 let xmm = XMM_SEQ[self.n_xmms];
779 self.n_xmms += 1;
780 Some(X64Register::XMM(xmm))
781 } else {
782 None
783 }
784 }
785 _ => todo!(
786 "ArgumentRegisterAllocator::next: Unsupported type: {:?}",
787 ty
788 ),
789 }
790 }
791 }
792}
793
794#[cfg(unix)]
796pub mod x64 {
797 pub use super::x64_decl::*;
799 use super::*;
800 use crate::codegen::BreakpointMap;
801 use crate::error::RuntimeError;
802 use crate::fault::{
803 catch_unsafe_unwind, get_boundary_register_preservation, run_on_alternative_stack,
804 };
805 use crate::structures::TypedIndex;
806 use crate::types::LocalGlobalIndex;
807 use crate::vm::Ctx;
808
809 #[allow(clippy::cast_ptr_alignment)]
810 unsafe fn compute_vmctx_deref(vmctx: *const Ctx, seq: &[usize]) -> u64 {
811 let mut ptr = &vmctx as *const *const Ctx as *const u8;
812 for x in seq {
813 debug_assert!(ptr.align_offset(std::mem::align_of::<*const u8>()) == 0);
814 ptr = (*(ptr as *const *const u8)).add(*x);
815 }
816 ptr as usize as u64
817 }
818
819 pub fn new_machine_state() -> MachineState {
821 MachineState {
822 stack_values: vec![],
823 register_values: vec![MachineValue::Undefined; 16 + 8],
824 prev_frame: BTreeMap::new(),
825 wasm_stack: vec![],
826 wasm_stack_private_depth: 0,
827 wasm_inst_offset: ::std::usize::MAX,
828 }
829 }
830
831 #[warn(unused_variables)]
834 pub unsafe fn invoke_call_return_on_stack(
835 msm: &ModuleStateMap,
836 code_base: usize,
837 image: InstanceImage,
838 vmctx: &mut Ctx,
839 breakpoints: Option<BreakpointMap>,
840 ) -> Result<u64, RuntimeError> {
841 let mut stack: Vec<u64> = vec![0; 1048576 * 8 / 8]; let mut stack_offset: usize = stack.len();
843
844 stack_offset -= 3; let mut last_stack_offset: u64 = 0; let mut known_registers: [Option<u64>; 32] = [None; 32];
849
850 let local_functions_vec: Vec<&FunctionStateMap> =
851 msm.local_functions.iter().map(|(_, v)| v).collect();
852
853 for f in image.execution_state.frames.iter().rev() {
855 let fsm = local_functions_vec[f.local_function_id];
856 let suspend_offset = if f.wasm_inst_offset == ::std::usize::MAX {
857 fsm.wasm_function_header_target_offset
858 } else {
859 fsm.wasm_offset_to_target_offset
860 .get(&f.wasm_inst_offset)
861 .copied()
862 }
863 .expect("instruction is not a critical point");
864
865 let (activate_offset, diff_id) = match suspend_offset {
866 SuspendOffset::Loop(x) => fsm.loop_offsets.get(&x),
867 SuspendOffset::Call(x) => fsm.call_offsets.get(&x),
868 SuspendOffset::Trappable(x) => fsm.trappable_offsets.get(&x),
869 }
870 .map(|x| (x.activate_offset, x.diff_id))
871 .expect("offset cannot be found in table");
872
873 let diff = &fsm.diffs[diff_id];
874 let state = diff.build_state(fsm);
875
876 stack_offset -= 1;
877 stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; last_stack_offset = stack_offset as _;
879
880 let mut got_explicit_shadow = false;
881
882 for v in state.stack_values.iter() {
883 match *v {
884 MachineValue::Undefined => stack_offset -= 1,
885 MachineValue::Vmctx => {
886 stack_offset -= 1;
887 stack[stack_offset] = vmctx as *mut Ctx as usize as u64;
888 }
889 MachineValue::VmctxDeref(ref seq) => {
890 stack_offset -= 1;
891 stack[stack_offset] = compute_vmctx_deref(vmctx as *const Ctx, seq);
892 }
893 MachineValue::PreserveRegister(index) => {
894 stack_offset -= 1;
895 stack[stack_offset] = known_registers[index.0].unwrap_or(0);
896 }
897 MachineValue::CopyStackBPRelative(byte_offset) => {
898 assert!(byte_offset % 8 == 0);
899 let target_offset = (byte_offset / 8) as isize;
900 let v = stack[(last_stack_offset as isize + target_offset) as usize];
901 stack_offset -= 1;
902 stack[stack_offset] = v;
903 }
904 MachineValue::ExplicitShadow => {
905 assert!(fsm.shadow_size % 8 == 0);
906 stack_offset -= fsm.shadow_size / 8;
907 got_explicit_shadow = true;
908 }
909 MachineValue::WasmStack(x) => {
910 stack_offset -= 1;
911 match state.wasm_stack[x] {
912 WasmAbstractValue::Const(x) => {
913 stack[stack_offset] = x;
914 }
915 WasmAbstractValue::Runtime => {
916 stack[stack_offset] = f.stack[x].unwrap();
917 }
918 }
919 }
920 MachineValue::WasmLocal(x) => {
921 stack_offset -= 1;
922 match fsm.locals[x] {
923 WasmAbstractValue::Const(x) => {
924 stack[stack_offset] = x;
925 }
926 WasmAbstractValue::Runtime => {
927 stack[stack_offset] = f.locals[x].unwrap();
928 }
929 }
930 }
931 MachineValue::TwoHalves(ref inner) => {
932 stack_offset -= 1;
933 match inner.0 {
935 MachineValue::WasmStack(x) => match state.wasm_stack[x] {
936 WasmAbstractValue::Const(x) => {
937 assert!(x <= std::u32::MAX as u64);
938 stack[stack_offset] |= x;
939 }
940 WasmAbstractValue::Runtime => {
941 let v = f.stack[x].unwrap();
942 assert!(v <= std::u32::MAX as u64);
943 stack[stack_offset] |= v;
944 }
945 },
946 MachineValue::WasmLocal(x) => match fsm.locals[x] {
947 WasmAbstractValue::Const(x) => {
948 assert!(x <= std::u32::MAX as u64);
949 stack[stack_offset] |= x;
950 }
951 WasmAbstractValue::Runtime => {
952 let v = f.locals[x].unwrap();
953 assert!(v <= std::u32::MAX as u64);
954 stack[stack_offset] |= v;
955 }
956 },
957 MachineValue::VmctxDeref(ref seq) => {
958 stack[stack_offset] |=
959 compute_vmctx_deref(vmctx as *const Ctx, seq)
960 & (std::u32::MAX as u64);
961 }
962 MachineValue::Undefined => {}
963 _ => unimplemented!("TwoHalves.0"),
964 }
965 match inner.1 {
966 MachineValue::WasmStack(x) => match state.wasm_stack[x] {
967 WasmAbstractValue::Const(x) => {
968 assert!(x <= std::u32::MAX as u64);
969 stack[stack_offset] |= x << 32;
970 }
971 WasmAbstractValue::Runtime => {
972 let v = f.stack[x].unwrap();
973 assert!(v <= std::u32::MAX as u64);
974 stack[stack_offset] |= v << 32;
975 }
976 },
977 MachineValue::WasmLocal(x) => match fsm.locals[x] {
978 WasmAbstractValue::Const(x) => {
979 assert!(x <= std::u32::MAX as u64);
980 stack[stack_offset] |= x << 32;
981 }
982 WasmAbstractValue::Runtime => {
983 let v = f.locals[x].unwrap();
984 assert!(v <= std::u32::MAX as u64);
985 stack[stack_offset] |= v << 32;
986 }
987 },
988 MachineValue::VmctxDeref(ref seq) => {
989 stack[stack_offset] |=
990 (compute_vmctx_deref(vmctx as *const Ctx, seq)
991 & (std::u32::MAX as u64))
992 << 32;
993 }
994 MachineValue::Undefined => {}
995 _ => unimplemented!("TwoHalves.1"),
996 }
997 }
998 }
999 }
1000 if !got_explicit_shadow {
1001 assert!(fsm.shadow_size % 8 == 0);
1002 stack_offset -= fsm.shadow_size / 8;
1003 }
1004 for (i, v) in state.register_values.iter().enumerate() {
1005 match *v {
1006 MachineValue::Undefined => {}
1007 MachineValue::Vmctx => {
1008 known_registers[i] = Some(vmctx as *mut Ctx as usize as u64);
1009 }
1010 MachineValue::VmctxDeref(ref seq) => {
1011 known_registers[i] = Some(compute_vmctx_deref(vmctx as *const Ctx, seq));
1012 }
1013 MachineValue::WasmStack(x) => match state.wasm_stack[x] {
1014 WasmAbstractValue::Const(x) => {
1015 known_registers[i] = Some(x);
1016 }
1017 WasmAbstractValue::Runtime => {
1018 known_registers[i] = Some(f.stack[x].unwrap());
1019 }
1020 },
1021 MachineValue::WasmLocal(x) => match fsm.locals[x] {
1022 WasmAbstractValue::Const(x) => {
1023 known_registers[i] = Some(x);
1024 }
1025 WasmAbstractValue::Runtime => {
1026 known_registers[i] = Some(f.locals[x].unwrap());
1027 }
1028 },
1029 _ => unreachable!(),
1030 }
1031 }
1032
1033 stack_offset -= 1;
1036 stack[stack_offset] = (code_base + activate_offset) as u64; }
1038
1039 stack_offset -= 1;
1040 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R15).to_index().0].unwrap_or(0);
1041
1042 stack_offset -= 1;
1043 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R14).to_index().0].unwrap_or(0);
1044
1045 stack_offset -= 1;
1046 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R13).to_index().0].unwrap_or(0);
1047
1048 stack_offset -= 1;
1049 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R12).to_index().0].unwrap_or(0);
1050
1051 stack_offset -= 1;
1052 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R11).to_index().0].unwrap_or(0);
1053
1054 stack_offset -= 1;
1055 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R10).to_index().0].unwrap_or(0);
1056
1057 stack_offset -= 1;
1058 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R9).to_index().0].unwrap_or(0);
1059
1060 stack_offset -= 1;
1061 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R8).to_index().0].unwrap_or(0);
1062
1063 stack_offset -= 1;
1064 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RSI).to_index().0].unwrap_or(0);
1065
1066 stack_offset -= 1;
1067 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDI).to_index().0].unwrap_or(0);
1068
1069 stack_offset -= 1;
1070 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDX).to_index().0].unwrap_or(0);
1071
1072 stack_offset -= 1;
1073 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RCX).to_index().0].unwrap_or(0);
1074
1075 stack_offset -= 1;
1076 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RBX).to_index().0].unwrap_or(0);
1077
1078 stack_offset -= 1;
1079 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RAX).to_index().0].unwrap_or(0);
1080
1081 stack_offset -= 1;
1082 stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; stack_offset -= 1;
1085 stack[stack_offset] =
1086 known_registers[X64Register::XMM(XMM::XMM15).to_index().0].unwrap_or(0);
1087
1088 stack_offset -= 1;
1089 stack[stack_offset] =
1090 known_registers[X64Register::XMM(XMM::XMM14).to_index().0].unwrap_or(0);
1091
1092 stack_offset -= 1;
1093 stack[stack_offset] =
1094 known_registers[X64Register::XMM(XMM::XMM13).to_index().0].unwrap_or(0);
1095
1096 stack_offset -= 1;
1097 stack[stack_offset] =
1098 known_registers[X64Register::XMM(XMM::XMM12).to_index().0].unwrap_or(0);
1099
1100 stack_offset -= 1;
1101 stack[stack_offset] =
1102 known_registers[X64Register::XMM(XMM::XMM11).to_index().0].unwrap_or(0);
1103
1104 stack_offset -= 1;
1105 stack[stack_offset] =
1106 known_registers[X64Register::XMM(XMM::XMM10).to_index().0].unwrap_or(0);
1107
1108 stack_offset -= 1;
1109 stack[stack_offset] =
1110 known_registers[X64Register::XMM(XMM::XMM9).to_index().0].unwrap_or(0);
1111
1112 stack_offset -= 1;
1113 stack[stack_offset] =
1114 known_registers[X64Register::XMM(XMM::XMM8).to_index().0].unwrap_or(0);
1115 stack_offset -= 1;
1116 stack[stack_offset] =
1117 known_registers[X64Register::XMM(XMM::XMM7).to_index().0].unwrap_or(0);
1118
1119 stack_offset -= 1;
1120 stack[stack_offset] =
1121 known_registers[X64Register::XMM(XMM::XMM6).to_index().0].unwrap_or(0);
1122
1123 stack_offset -= 1;
1124 stack[stack_offset] =
1125 known_registers[X64Register::XMM(XMM::XMM5).to_index().0].unwrap_or(0);
1126
1127 stack_offset -= 1;
1128 stack[stack_offset] =
1129 known_registers[X64Register::XMM(XMM::XMM4).to_index().0].unwrap_or(0);
1130
1131 stack_offset -= 1;
1132 stack[stack_offset] =
1133 known_registers[X64Register::XMM(XMM::XMM3).to_index().0].unwrap_or(0);
1134
1135 stack_offset -= 1;
1136 stack[stack_offset] =
1137 known_registers[X64Register::XMM(XMM::XMM2).to_index().0].unwrap_or(0);
1138
1139 stack_offset -= 1;
1140 stack[stack_offset] =
1141 known_registers[X64Register::XMM(XMM::XMM1).to_index().0].unwrap_or(0);
1142
1143 stack_offset -= 1;
1144 stack[stack_offset] =
1145 known_registers[X64Register::XMM(XMM::XMM0).to_index().0].unwrap_or(0);
1146
1147 if let Some(ref memory) = image.memory {
1148 assert!(vmctx.internal.memory_bound <= memory.len());
1149
1150 if vmctx.internal.memory_bound < memory.len() {
1151 let grow: unsafe extern "C" fn(ctx: &mut Ctx, memory_index: usize, delta: usize) =
1152 ::std::mem::transmute((*vmctx.internal.intrinsics).memory_grow);
1153 grow(
1154 vmctx,
1155 0,
1156 (memory.len() - vmctx.internal.memory_bound) / 65536,
1157 );
1158 assert_eq!(vmctx.internal.memory_bound, memory.len());
1159 }
1160
1161 std::slice::from_raw_parts_mut(vmctx.internal.memory_base, vmctx.internal.memory_bound)
1162 .copy_from_slice(memory);
1163 }
1164
1165 let globals_len = (*vmctx.module).info.globals.len();
1166 for i in 0..globals_len {
1167 (*(*vmctx.local_backing).globals[LocalGlobalIndex::new(i)].vm_local_global()).data =
1168 image.globals[i];
1169 }
1170
1171 drop(image); catch_unsafe_unwind(
1174 || {
1175 run_on_alternative_stack(
1176 stack.as_mut_ptr().add(stack.len()),
1177 stack.as_mut_ptr().add(stack_offset),
1178 )
1179 },
1180 breakpoints,
1181 )
1182 }
1183
1184 pub fn build_instance_image(
1186 vmctx: &mut Ctx,
1187 execution_state: ExecutionStateImage,
1188 ) -> InstanceImage {
1189 unsafe {
1190 let memory = if vmctx.internal.memory_base.is_null() {
1191 None
1192 } else {
1193 Some(
1194 std::slice::from_raw_parts(
1195 vmctx.internal.memory_base,
1196 vmctx.internal.memory_bound,
1197 )
1198 .to_vec(),
1199 )
1200 };
1201
1202 let globals_len = (*vmctx.module).info.globals.len();
1204 let globals: Vec<u128> = (0..globals_len)
1205 .map(|i| {
1206 (*vmctx.local_backing).globals[LocalGlobalIndex::new(i)]
1207 .get()
1208 .to_u128()
1209 })
1210 .collect();
1211
1212 InstanceImage {
1213 memory: memory,
1214 globals: globals,
1215 execution_state: execution_state,
1216 }
1217 }
1218 }
1219
1220 #[warn(unused_variables)]
1223 pub unsafe fn read_stack<'a, I: Iterator<Item = &'a CodeVersion>, F: Fn() -> I + 'a>(
1224 versions: F,
1225 mut stack: *const u64,
1226 initially_known_registers: [Option<u64>; 32],
1227 mut initial_address: Option<u64>,
1228 max_depth: Option<usize>,
1229 ) -> ExecutionStateImage {
1230 let mut known_registers: [Option<u64>; 32] = initially_known_registers;
1231 let mut results: Vec<WasmFunctionStateDump> = vec![];
1232 let mut was_baseline = true;
1233
1234 for depth in 0.. {
1235 if let Some(max_depth) = max_depth {
1236 if depth >= max_depth {
1237 return ExecutionStateImage { frames: results };
1238 }
1239 }
1240
1241 let ret_addr = initial_address.take().unwrap_or_else(|| {
1242 let x = *stack;
1243 stack = stack.offset(1);
1244 x
1245 });
1246
1247 let mut fsm_state: Option<(&FunctionStateMap, MachineState)> = None;
1248 let mut is_baseline: Option<bool> = None;
1249
1250 for version in versions() {
1251 match version
1252 .msm
1253 .lookup_call_ip(ret_addr as usize, version.base)
1254 .or_else(|| {
1255 version
1256 .msm
1257 .lookup_trappable_ip(ret_addr as usize, version.base)
1258 })
1259 .or_else(|| version.msm.lookup_loop_ip(ret_addr as usize, version.base))
1260 {
1261 Some(x) => {
1262 fsm_state = Some(x);
1263 is_baseline = Some(version.baseline);
1264 break;
1265 }
1266 None => {}
1267 };
1268 }
1269
1270 let (fsm, state) = if let Some(x) = fsm_state {
1271 x
1272 } else {
1273 return ExecutionStateImage { frames: results };
1274 };
1275
1276 {
1277 let is_baseline = is_baseline.unwrap();
1278
1279 if is_baseline && !was_baseline {
1281 let callee_saved = &*get_boundary_register_preservation();
1282 known_registers[X64Register::GPR(GPR::R15).to_index().0] =
1283 Some(callee_saved.r15);
1284 known_registers[X64Register::GPR(GPR::R14).to_index().0] =
1285 Some(callee_saved.r14);
1286 known_registers[X64Register::GPR(GPR::R13).to_index().0] =
1287 Some(callee_saved.r13);
1288 known_registers[X64Register::GPR(GPR::R12).to_index().0] =
1289 Some(callee_saved.r12);
1290 known_registers[X64Register::GPR(GPR::RBX).to_index().0] =
1291 Some(callee_saved.rbx);
1292 }
1293
1294 was_baseline = is_baseline;
1295 }
1296
1297 let mut wasm_stack: Vec<Option<u64>> = state
1298 .wasm_stack
1299 .iter()
1300 .map(|x| match *x {
1301 WasmAbstractValue::Const(x) => Some(x),
1302 WasmAbstractValue::Runtime => None,
1303 })
1304 .collect();
1305 let mut wasm_locals: Vec<Option<u64>> = fsm
1306 .locals
1307 .iter()
1308 .map(|x| match *x {
1309 WasmAbstractValue::Const(x) => Some(x),
1310 WasmAbstractValue::Runtime => None,
1311 })
1312 .collect();
1313
1314 for (i, v) in state.register_values.iter().enumerate() {
1316 match *v {
1317 MachineValue::Undefined => {}
1318 MachineValue::Vmctx => {}
1319 MachineValue::VmctxDeref(_) => {}
1320 MachineValue::WasmStack(idx) => {
1321 if let Some(v) = known_registers[i] {
1322 wasm_stack[idx] = Some(v);
1323 } else {
1324 eprintln!(
1325 "BUG: Register {} for WebAssembly stack slot {} has unknown value.",
1326 i, idx
1327 );
1328 }
1329 }
1330 MachineValue::WasmLocal(idx) => {
1331 if let Some(v) = known_registers[i] {
1332 wasm_locals[idx] = Some(v);
1333 }
1334 }
1335 _ => unreachable!(),
1336 }
1337 }
1338
1339 let found_shadow = state
1340 .stack_values
1341 .iter()
1342 .any(|v| *v == MachineValue::ExplicitShadow);
1343 if !found_shadow {
1344 stack = stack.add(fsm.shadow_size / 8);
1345 }
1346
1347 for v in state.stack_values.iter().rev() {
1348 match *v {
1349 MachineValue::ExplicitShadow => {
1350 stack = stack.add(fsm.shadow_size / 8);
1351 }
1352 MachineValue::Undefined => {
1353 stack = stack.offset(1);
1354 }
1355 MachineValue::Vmctx => {
1356 stack = stack.offset(1);
1357 }
1358 MachineValue::VmctxDeref(_) => {
1359 stack = stack.offset(1);
1360 }
1361 MachineValue::PreserveRegister(idx) => {
1362 known_registers[idx.0] = Some(*stack);
1363 stack = stack.offset(1);
1364 }
1365 MachineValue::CopyStackBPRelative(_) => {
1366 stack = stack.offset(1);
1367 }
1368 MachineValue::WasmStack(idx) => {
1369 wasm_stack[idx] = Some(*stack);
1370 stack = stack.offset(1);
1371 }
1372 MachineValue::WasmLocal(idx) => {
1373 wasm_locals[idx] = Some(*stack);
1374 stack = stack.offset(1);
1375 }
1376 MachineValue::TwoHalves(ref inner) => {
1377 let v = *stack;
1378 stack = stack.offset(1);
1379 match inner.0 {
1380 MachineValue::WasmStack(idx) => {
1381 wasm_stack[idx] = Some(v & 0xffffffffu64);
1382 }
1383 MachineValue::WasmLocal(idx) => {
1384 wasm_locals[idx] = Some(v & 0xffffffffu64);
1385 }
1386 MachineValue::VmctxDeref(_) => {}
1387 MachineValue::Undefined => {}
1388 _ => unimplemented!("TwoHalves.0 (read)"),
1389 }
1390 match inner.1 {
1391 MachineValue::WasmStack(idx) => {
1392 wasm_stack[idx] = Some(v >> 32);
1393 }
1394 MachineValue::WasmLocal(idx) => {
1395 wasm_locals[idx] = Some(v >> 32);
1396 }
1397 MachineValue::VmctxDeref(_) => {}
1398 MachineValue::Undefined => {}
1399 _ => unimplemented!("TwoHalves.1 (read)"),
1400 }
1401 }
1402 }
1403 }
1404
1405 for (offset, v) in state.prev_frame.iter() {
1406 let offset = (*offset + 2) as isize; match *v {
1408 MachineValue::WasmStack(idx) => {
1409 wasm_stack[idx] = Some(*stack.offset(offset));
1410 }
1411 MachineValue::WasmLocal(idx) => {
1412 wasm_locals[idx] = Some(*stack.offset(offset));
1413 }
1414 _ => unreachable!("values in prev frame can only be stack/local"),
1415 }
1416 }
1417 stack = stack.offset(1); wasm_stack.truncate(
1420 wasm_stack
1421 .len()
1422 .checked_sub(state.wasm_stack_private_depth)
1423 .unwrap(),
1424 );
1425
1426 let wfs = WasmFunctionStateDump {
1427 local_function_id: fsm.local_function_id,
1428 wasm_inst_offset: state.wasm_inst_offset,
1429 stack: wasm_stack,
1430 locals: wasm_locals,
1431 };
1432 results.push(wfs);
1433 }
1434
1435 unreachable!();
1436 }
1437}