1use crate::backend::RunnableModule;
6use borsh::{BorshDeserialize, BorshSerialize};
7use std::collections::BTreeMap;
8use std::ops::Bound::{Included, Unbounded};
9use std::sync::Arc;
10
11#[derive(
13 Copy,
14 Clone,
15 Debug,
16 Eq,
17 PartialEq,
18 Hash,
19 Serialize,
20 Deserialize,
21 BorshSerialize,
22 BorshDeserialize,
23)]
24pub struct RegisterIndex(pub usize);
25
26#[derive(
28 Copy,
29 Clone,
30 Debug,
31 Eq,
32 PartialEq,
33 Hash,
34 Serialize,
35 Deserialize,
36 BorshSerialize,
37 BorshDeserialize,
38)]
39pub enum WasmAbstractValue {
40 Runtime,
42 Const(u64),
44}
45
46#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
48pub struct MachineState {
49 pub stack_values: Vec<MachineValue>,
51 pub register_values: Vec<MachineValue>,
53 pub prev_frame: BTreeMap<usize, MachineValue>,
55 pub wasm_stack: Vec<WasmAbstractValue>,
57 pub wasm_stack_private_depth: usize,
59 pub wasm_inst_offset: usize,
61}
62
63#[derive(Clone, Debug, Default, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
65pub struct MachineStateDiff {
66 pub last: Option<usize>,
68 pub stack_push: Vec<MachineValue>,
70 pub stack_pop: usize,
72
73 pub reg_diff: Vec<(RegisterIndex, MachineValue)>,
75
76 pub prev_frame_diff: BTreeMap<usize, Option<MachineValue>>, pub wasm_stack_push: Vec<WasmAbstractValue>,
81 pub wasm_stack_pop: usize,
83 pub wasm_stack_private_depth: usize, pub wasm_inst_offset: usize, }
88
89#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
91pub enum MachineValue {
92 Undefined,
94 Vmctx,
96 VmctxDeref(Vec<usize>),
98 PreserveRegister(RegisterIndex),
100 CopyStackBPRelative(i32), ExplicitShadow, WasmStack(usize),
106 WasmLocal(usize),
108 TwoHalves(Box<(MachineValue, MachineValue)>), }
111
112impl BorshSerialize for MachineValue {
113 fn serialize<W: std::io::Write>(&self, writer: &mut W) -> std::io::Result<()> {
114 match self {
115 MachineValue::Undefined => writer.write_all(&[0u8])?,
116 MachineValue::Vmctx => writer.write_all(&[1u8])?,
117 MachineValue::VmctxDeref(v) => {
118 writer.write_all(&[2u8])?;
119 BorshSerialize::serialize(&v, writer)?;
120 }
121 MachineValue::PreserveRegister(r) => {
122 writer.write_all(&[3u8])?;
123 BorshSerialize::serialize(&r, writer)?;
124 }
125 MachineValue::CopyStackBPRelative(i) => {
126 writer.write_all(&[4u8])?;
127 BorshSerialize::serialize(&i, writer)?;
128 }
129 MachineValue::ExplicitShadow => writer.write_all(&[5u8])?,
130 MachineValue::WasmStack(u) => {
131 writer.write_all(&[6u8])?;
132 BorshSerialize::serialize(&(*u as u64), writer)?;
133 }
134 MachineValue::WasmLocal(u) => {
135 writer.write_all(&[7u8])?;
136 BorshSerialize::serialize(&(*u as u64), writer)?;
137 }
138 MachineValue::TwoHalves(b) => {
139 writer.write_all(&[8u8])?;
140 BorshSerialize::serialize(&b, writer)?;
141 }
142 }
143 Ok(())
144 }
145}
146
147impl BorshDeserialize for MachineValue {
148 fn deserialize_reader<R: std::io::prelude::Read>(reader: &mut R) -> std::io::Result<Self> {
149 let variant: u8 = BorshDeserialize::deserialize_reader(reader)?;
150 Ok(match variant {
151 0 => MachineValue::Undefined,
152 1 => MachineValue::Vmctx,
153 2 => {
154 let v: Vec<usize> = BorshDeserialize::deserialize_reader(reader)?;
155 MachineValue::VmctxDeref(v)
156 }
157 3 => {
158 let r: RegisterIndex = BorshDeserialize::deserialize_reader(reader)?;
159 MachineValue::PreserveRegister(r)
160 }
161 4 => {
162 let i: i32 = BorshDeserialize::deserialize_reader(reader)?;
163 MachineValue::CopyStackBPRelative(i)
164 }
165 5 => MachineValue::ExplicitShadow,
166 6 => {
167 let u: usize = BorshDeserialize::deserialize_reader(reader)?;
168 MachineValue::WasmStack(u)
169 }
170 7 => {
171 let u: usize = BorshDeserialize::deserialize_reader(reader)?;
172 MachineValue::WasmLocal(u)
173 }
174 8 => {
175 let b: Box<(MachineValue, MachineValue)> =
176 BorshDeserialize::deserialize_reader(reader)?;
177 MachineValue::TwoHalves(b)
178 }
179 _ => {
180 return Err(std::io::Error::new(
181 std::io::ErrorKind::InvalidInput,
182 "Unexpected variant",
183 ))
184 }
185 })
186 }
187}
188
189#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
191pub struct FunctionStateMap {
192 pub initial: MachineState,
194 pub local_function_id: usize,
196 pub locals: Vec<WasmAbstractValue>,
198 pub shadow_size: usize, pub diffs: Vec<MachineStateDiff>,
202 pub wasm_function_header_target_offset: Option<SuspendOffset>,
204 pub wasm_offset_to_target_offset: BTreeMap<usize, SuspendOffset>,
206 pub loop_offsets: BTreeMap<usize, OffsetInfo>, pub call_offsets: BTreeMap<usize, OffsetInfo>, pub trappable_offsets: BTreeMap<usize, OffsetInfo>, }
213
214#[derive(Clone, Copy, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
216pub enum SuspendOffset {
217 Loop(usize),
219 Call(usize),
221 Trappable(usize),
223}
224
225#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
227pub struct OffsetInfo {
228 pub end_offset: usize, pub diff_id: usize,
232 pub activate_offset: usize,
234}
235
236#[derive(Clone, Debug, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
238pub struct ModuleStateMap {
239 pub local_functions: BTreeMap<usize, FunctionStateMap>,
241 pub total_size: usize,
243}
244
245#[derive(Clone, Debug, Serialize, Deserialize)]
247pub struct WasmFunctionStateDump {
248 pub local_function_id: usize,
250 pub wasm_inst_offset: usize,
252 pub stack: Vec<Option<u64>>,
254 pub locals: Vec<Option<u64>>,
256}
257
258#[derive(Clone, Debug, Serialize, Deserialize)]
260pub struct ExecutionStateImage {
261 pub frames: Vec<WasmFunctionStateDump>,
263}
264
265#[derive(Debug, Clone, Serialize, Deserialize)]
267pub struct InstanceImage {
268 pub memory: Option<Vec<u8>>,
270 pub globals: Vec<u128>,
272 pub execution_state: ExecutionStateImage,
274}
275
276#[derive(Clone)]
278pub struct CodeVersion {
279 pub baseline: bool,
281
282 pub msm: ModuleStateMap,
284
285 pub base: usize,
287
288 pub backend: &'static str,
290
291 pub runnable_module: Arc<Box<dyn RunnableModule>>,
293}
294
295impl ModuleStateMap {
296 pub fn lookup_ip<F: FnOnce(&FunctionStateMap) -> &BTreeMap<usize, OffsetInfo>>(
298 &self,
299 ip: usize,
300 base: usize,
301 offset_table_provider: F,
302 ) -> Option<(&FunctionStateMap, MachineState)> {
303 if ip < base || ip - base >= self.total_size {
304 None
305 } else {
306 let (_, fsm) = self
307 .local_functions
308 .range((Unbounded, Included(&(ip - base))))
309 .last()
310 .unwrap();
311
312 match offset_table_provider(fsm)
313 .range((Unbounded, Included(&(ip - base))))
314 .last()
315 {
316 Some((_, x)) => {
317 if ip - base >= x.end_offset {
318 None
319 } else if x.diff_id < fsm.diffs.len() {
320 Some((fsm, fsm.diffs[x.diff_id].build_state(fsm)))
321 } else {
322 None
323 }
324 }
325 None => None,
326 }
327 }
328 }
329 pub fn lookup_call_ip(
331 &self,
332 ip: usize,
333 base: usize,
334 ) -> Option<(&FunctionStateMap, MachineState)> {
335 self.lookup_ip(ip, base, |fsm| &fsm.call_offsets)
336 }
337
338 pub fn lookup_trappable_ip(
340 &self,
341 ip: usize,
342 base: usize,
343 ) -> Option<(&FunctionStateMap, MachineState)> {
344 self.lookup_ip(ip, base, |fsm| &fsm.trappable_offsets)
345 }
346
347 pub fn lookup_loop_ip(
349 &self,
350 ip: usize,
351 base: usize,
352 ) -> Option<(&FunctionStateMap, MachineState)> {
353 self.lookup_ip(ip, base, |fsm| &fsm.loop_offsets)
354 }
355}
356
357impl FunctionStateMap {
358 pub fn new(
360 initial: MachineState,
361 local_function_id: usize,
362 shadow_size: usize,
363 locals: Vec<WasmAbstractValue>,
364 ) -> FunctionStateMap {
365 FunctionStateMap {
366 initial,
367 local_function_id,
368 shadow_size,
369 locals,
370 diffs: vec![],
371 wasm_function_header_target_offset: None,
372 wasm_offset_to_target_offset: BTreeMap::new(),
373 loop_offsets: BTreeMap::new(),
374 call_offsets: BTreeMap::new(),
375 trappable_offsets: BTreeMap::new(),
376 }
377 }
378}
379
380impl MachineState {
381 pub fn diff(&self, old: &MachineState) -> MachineStateDiff {
383 let first_diff_stack_depth: usize = self
384 .stack_values
385 .iter()
386 .zip(old.stack_values.iter())
387 .enumerate()
388 .find(|&(_, (a, b))| a != b)
389 .map(|x| x.0)
390 .unwrap_or(old.stack_values.len().min(self.stack_values.len()));
391 assert_eq!(self.register_values.len(), old.register_values.len());
392 let reg_diff: Vec<_> = self
393 .register_values
394 .iter()
395 .zip(old.register_values.iter())
396 .enumerate()
397 .filter(|&(_, (a, b))| a != b)
398 .map(|(i, (a, _))| (RegisterIndex(i), a.clone()))
399 .collect();
400 let prev_frame_diff: BTreeMap<usize, Option<MachineValue>> = self
401 .prev_frame
402 .iter()
403 .filter(|(k, v)| {
404 if let Some(ref old_v) = old.prev_frame.get(k) {
405 v != old_v
406 } else {
407 true
408 }
409 })
410 .map(|(&k, v)| (k, Some(v.clone())))
411 .chain(
412 old.prev_frame
413 .iter()
414 .filter(|(k, _)| self.prev_frame.get(k).is_none())
415 .map(|(&k, _)| (k, None)),
416 )
417 .collect();
418 let first_diff_wasm_stack_depth: usize = self
419 .wasm_stack
420 .iter()
421 .zip(old.wasm_stack.iter())
422 .enumerate()
423 .find(|&(_, (a, b))| a != b)
424 .map(|x| x.0)
425 .unwrap_or(old.wasm_stack.len().min(self.wasm_stack.len()));
426 MachineStateDiff {
427 last: None,
428 stack_push: self.stack_values[first_diff_stack_depth..].to_vec(),
429 stack_pop: old.stack_values.len() - first_diff_stack_depth,
430 reg_diff,
431
432 prev_frame_diff,
433
434 wasm_stack_push: self.wasm_stack[first_diff_wasm_stack_depth..].to_vec(),
435 wasm_stack_pop: old.wasm_stack.len() - first_diff_wasm_stack_depth,
436 wasm_stack_private_depth: self.wasm_stack_private_depth,
437
438 wasm_inst_offset: self.wasm_inst_offset,
439 }
440 }
441}
442
443impl MachineStateDiff {
444 pub fn build_state(&self, m: &FunctionStateMap) -> MachineState {
446 let mut chain: Vec<&MachineStateDiff> = vec![];
447 chain.push(self);
448 let mut current = self.last;
449 while let Some(x) = current {
450 let that = &m.diffs[x];
451 current = that.last;
452 chain.push(that);
453 }
454 chain.reverse();
455 let mut state = m.initial.clone();
456 for x in chain {
457 for _ in 0..x.stack_pop {
458 state.stack_values.pop().unwrap();
459 }
460 for v in &x.stack_push {
461 state.stack_values.push(v.clone());
462 }
463 for &(index, ref v) in &x.reg_diff {
464 state.register_values[index.0] = v.clone();
465 }
466 for (index, ref v) in &x.prev_frame_diff {
467 if let Some(ref x) = v {
468 state.prev_frame.insert(*index, x.clone());
469 } else {
470 state.prev_frame.remove(index).unwrap();
471 }
472 }
473 for _ in 0..x.wasm_stack_pop {
474 state.wasm_stack.pop().unwrap();
475 }
476 for v in &x.wasm_stack_push {
477 state.wasm_stack.push(*v);
478 }
479 }
480 state.wasm_stack_private_depth = self.wasm_stack_private_depth;
481 state.wasm_inst_offset = self.wasm_inst_offset;
482 state
483 }
484}
485
486impl ExecutionStateImage {
487 pub fn print_backtrace_if_needed(&self) {
489 use std::env;
490
491 if let Ok(x) = env::var("WASMER_BACKTRACE") {
492 if x == "1" {
493 eprintln!("{}", self.output());
494 return;
495 }
496 }
497
498 eprintln!("Run with `WASMER_BACKTRACE=1` environment variable to display a backtrace.");
499 }
500
501 pub fn output(&self) -> String {
503 fn join_strings(x: impl Iterator<Item = String>, sep: &str) -> String {
504 let mut ret = String::new();
505 let mut first = true;
506
507 for s in x {
508 if first {
509 first = false;
510 } else {
511 ret += sep;
512 }
513 ret += &s;
514 }
515
516 ret
517 }
518
519 fn format_optional_u64_sequence(x: &[Option<u64>]) -> String {
520 if x.is_empty() {
521 "(empty)".into()
522 } else {
523 join_strings(
524 x.iter().enumerate().map(|(i, x)| {
525 format!(
526 "[{}] = {}",
527 i,
528 x.map(|x| format!("{}", x))
529 .unwrap_or_else(|| "?".to_string())
530 )
531 }),
532 ", ",
533 )
534 }
535 }
536
537 let mut ret = String::new();
538
539 if self.frames.is_empty() {
540 ret += &"Unknown fault address, cannot read stack.";
541 ret += "\n";
542 } else {
543 ret += &"Backtrace:";
544 ret += "\n";
545 for (i, f) in self.frames.iter().enumerate() {
546 ret += &format!("* Frame {} @ Local function {}", i, f.local_function_id);
547 ret += "\n";
548 ret += &format!(" {} {}\n", "Offset:", format!("{}", f.wasm_inst_offset),);
549 ret += &format!(
550 " {} {}\n",
551 "Locals:",
552 format_optional_u64_sequence(&f.locals)
553 );
554 ret += &format!(
555 " {} {}\n\n",
556 "Stack:",
557 format_optional_u64_sequence(&f.stack)
558 );
559 }
560 }
561
562 ret
563 }
564}
565
566impl InstanceImage {
567 pub fn from_bytes(input: &[u8]) -> Option<InstanceImage> {
569 use bincode::deserialize;
570 match deserialize(input) {
571 Ok(x) => Some(x),
572 Err(_) => None,
573 }
574 }
575
576 pub fn to_bytes(&self) -> Vec<u8> {
578 use bincode::serialize;
579 serialize(self).unwrap()
580 }
581}
582
583#[cfg(unix)]
585pub mod x64_decl {
586 use super::*;
587 use crate::types::Type;
588
589 #[repr(u8)]
591 #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
592 pub enum GPR {
593 RAX,
595 RCX,
597 RDX,
599 RBX,
601 RSP,
603 RBP,
605 RSI,
607 RDI,
609 R8,
611 R9,
613 R10,
615 R11,
617 R12,
619 R13,
621 R14,
623 R15,
625 }
626
627 #[repr(u8)]
629 #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
630 pub enum XMM {
631 XMM0,
633 XMM1,
635 XMM2,
637 XMM3,
639 XMM4,
641 XMM5,
643 XMM6,
645 XMM7,
647 XMM8,
649 XMM9,
651 XMM10,
653 XMM11,
655 XMM12,
657 XMM13,
659 XMM14,
661 XMM15,
663 }
664
665 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
667 pub enum X64Register {
668 GPR(GPR),
670 XMM(XMM),
672 }
673
674 impl X64Register {
675 pub fn to_index(&self) -> RegisterIndex {
677 match *self {
678 X64Register::GPR(x) => RegisterIndex(x as usize),
679 X64Register::XMM(x) => RegisterIndex(x as usize + 16),
680 }
681 }
682
683 pub fn from_dwarf_regnum(x: u16) -> Option<X64Register> {
685 Some(match x {
686 0 => X64Register::GPR(GPR::RAX),
687 1 => X64Register::GPR(GPR::RDX),
688 2 => X64Register::GPR(GPR::RCX),
689 3 => X64Register::GPR(GPR::RBX),
690 4 => X64Register::GPR(GPR::RSI),
691 5 => X64Register::GPR(GPR::RDI),
692 6 => X64Register::GPR(GPR::RBP),
693 7 => X64Register::GPR(GPR::RSP),
694 8 => X64Register::GPR(GPR::R8),
695 9 => X64Register::GPR(GPR::R9),
696 10 => X64Register::GPR(GPR::R10),
697 11 => X64Register::GPR(GPR::R11),
698 12 => X64Register::GPR(GPR::R12),
699 13 => X64Register::GPR(GPR::R13),
700 14 => X64Register::GPR(GPR::R14),
701 15 => X64Register::GPR(GPR::R15),
702
703 17 => X64Register::XMM(XMM::XMM0),
704 18 => X64Register::XMM(XMM::XMM1),
705 19 => X64Register::XMM(XMM::XMM2),
706 20 => X64Register::XMM(XMM::XMM3),
707 21 => X64Register::XMM(XMM::XMM4),
708 22 => X64Register::XMM(XMM::XMM5),
709 23 => X64Register::XMM(XMM::XMM6),
710 24 => X64Register::XMM(XMM::XMM7),
711 _ => return None,
712 })
713 }
714
715 pub fn prefix_mov_to_stack(&self) -> Option<&'static [u8]> {
720 Some(match *self {
721 X64Register::GPR(gpr) => match gpr {
722 GPR::RDI => &[0x48, 0x89, 0xbc, 0x24],
723 GPR::RSI => &[0x48, 0x89, 0xb4, 0x24],
724 GPR::RDX => &[0x48, 0x89, 0x94, 0x24],
725 GPR::RCX => &[0x48, 0x89, 0x8c, 0x24],
726 GPR::R8 => &[0x4c, 0x89, 0x84, 0x24],
727 GPR::R9 => &[0x4c, 0x89, 0x8c, 0x24],
728 _ => return None,
729 },
730 X64Register::XMM(xmm) => match xmm {
731 XMM::XMM0 => &[0x66, 0x0f, 0xd6, 0x84, 0x24],
732 XMM::XMM1 => &[0x66, 0x0f, 0xd6, 0x8c, 0x24],
733 XMM::XMM2 => &[0x66, 0x0f, 0xd6, 0x94, 0x24],
734 XMM::XMM3 => &[0x66, 0x0f, 0xd6, 0x9c, 0x24],
735 XMM::XMM4 => &[0x66, 0x0f, 0xd6, 0xa4, 0x24],
736 XMM::XMM5 => &[0x66, 0x0f, 0xd6, 0xac, 0x24],
737 XMM::XMM6 => &[0x66, 0x0f, 0xd6, 0xb4, 0x24],
738 XMM::XMM7 => &[0x66, 0x0f, 0xd6, 0xbc, 0x24],
739 _ => return None,
740 },
741 })
742 }
743 }
744
745 #[derive(Default)]
747 pub struct ArgumentRegisterAllocator {
748 n_gprs: usize,
749 n_xmms: usize,
750 }
751
752 impl ArgumentRegisterAllocator {
753 pub fn next(&mut self, ty: Type) -> Option<X64Register> {
755 static GPR_SEQ: &'static [GPR] =
756 &[GPR::RDI, GPR::RSI, GPR::RDX, GPR::RCX, GPR::R8, GPR::R9];
757 static XMM_SEQ: &'static [XMM] = &[
758 XMM::XMM0,
759 XMM::XMM1,
760 XMM::XMM2,
761 XMM::XMM3,
762 XMM::XMM4,
763 XMM::XMM5,
764 XMM::XMM6,
765 XMM::XMM7,
766 ];
767 match ty {
768 Type::I32 | Type::I64 => {
769 if self.n_gprs < GPR_SEQ.len() {
770 let gpr = GPR_SEQ[self.n_gprs];
771 self.n_gprs += 1;
772 Some(X64Register::GPR(gpr))
773 } else {
774 None
775 }
776 }
777 Type::F32 | Type::F64 => {
778 if self.n_xmms < XMM_SEQ.len() {
779 let xmm = XMM_SEQ[self.n_xmms];
780 self.n_xmms += 1;
781 Some(X64Register::XMM(xmm))
782 } else {
783 None
784 }
785 }
786 _ => todo!(
787 "ArgumentRegisterAllocator::next: Unsupported type: {:?}",
788 ty
789 ),
790 }
791 }
792 }
793}
794
795#[cfg(unix)]
797pub mod x64 {
798 pub use super::x64_decl::*;
800 use super::*;
801 use crate::codegen::BreakpointMap;
802 use crate::error::RuntimeError;
803 use crate::fault::{
804 catch_unsafe_unwind, get_boundary_register_preservation, run_on_alternative_stack,
805 };
806 use crate::structures::TypedIndex;
807 use crate::types::LocalGlobalIndex;
808 use crate::vm::Ctx;
809
810 #[allow(clippy::cast_ptr_alignment)]
811 unsafe fn compute_vmctx_deref(vmctx: *const Ctx, seq: &[usize]) -> u64 {
812 let mut ptr = &vmctx as *const *const Ctx as *const u8;
813 for x in seq {
814 debug_assert!(ptr.align_offset(std::mem::align_of::<*const u8>()) == 0);
815 ptr = (*(ptr as *const *const u8)).add(*x);
816 }
817 ptr as usize as u64
818 }
819
820 pub fn new_machine_state() -> MachineState {
822 MachineState {
823 stack_values: vec![],
824 register_values: vec![MachineValue::Undefined; 16 + 8],
825 prev_frame: BTreeMap::new(),
826 wasm_stack: vec![],
827 wasm_stack_private_depth: 0,
828 wasm_inst_offset: ::std::usize::MAX,
829 }
830 }
831
832 #[warn(unused_variables)]
835 pub unsafe fn invoke_call_return_on_stack(
836 msm: &ModuleStateMap,
837 code_base: usize,
838 image: InstanceImage,
839 vmctx: &mut Ctx,
840 breakpoints: Option<BreakpointMap>,
841 ) -> Result<u64, RuntimeError> {
842 let mut stack: Vec<u64> = vec![0; 1048576 * 8 / 8]; let mut stack_offset: usize = stack.len();
844
845 stack_offset -= 3; let mut last_stack_offset: u64 = 0; let mut known_registers: [Option<u64>; 32] = [None; 32];
850
851 let local_functions_vec: Vec<&FunctionStateMap> =
852 msm.local_functions.iter().map(|(_, v)| v).collect();
853
854 for f in image.execution_state.frames.iter().rev() {
856 let fsm = local_functions_vec[f.local_function_id];
857 let suspend_offset = if f.wasm_inst_offset == ::std::usize::MAX {
858 fsm.wasm_function_header_target_offset
859 } else {
860 fsm.wasm_offset_to_target_offset
861 .get(&f.wasm_inst_offset)
862 .copied()
863 }
864 .expect("instruction is not a critical point");
865
866 let (activate_offset, diff_id) = match suspend_offset {
867 SuspendOffset::Loop(x) => fsm.loop_offsets.get(&x),
868 SuspendOffset::Call(x) => fsm.call_offsets.get(&x),
869 SuspendOffset::Trappable(x) => fsm.trappable_offsets.get(&x),
870 }
871 .map(|x| (x.activate_offset, x.diff_id))
872 .expect("offset cannot be found in table");
873
874 let diff = &fsm.diffs[diff_id];
875 let state = diff.build_state(fsm);
876
877 stack_offset -= 1;
878 stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; last_stack_offset = stack_offset as _;
880
881 let mut got_explicit_shadow = false;
882
883 for v in state.stack_values.iter() {
884 match *v {
885 MachineValue::Undefined => stack_offset -= 1,
886 MachineValue::Vmctx => {
887 stack_offset -= 1;
888 stack[stack_offset] = vmctx as *mut Ctx as usize as u64;
889 }
890 MachineValue::VmctxDeref(ref seq) => {
891 stack_offset -= 1;
892 stack[stack_offset] = compute_vmctx_deref(vmctx as *const Ctx, seq);
893 }
894 MachineValue::PreserveRegister(index) => {
895 stack_offset -= 1;
896 stack[stack_offset] = known_registers[index.0].unwrap_or(0);
897 }
898 MachineValue::CopyStackBPRelative(byte_offset) => {
899 assert!(byte_offset % 8 == 0);
900 let target_offset = (byte_offset / 8) as isize;
901 let v = stack[(last_stack_offset as isize + target_offset) as usize];
902 stack_offset -= 1;
903 stack[stack_offset] = v;
904 }
905 MachineValue::ExplicitShadow => {
906 assert!(fsm.shadow_size % 8 == 0);
907 stack_offset -= fsm.shadow_size / 8;
908 got_explicit_shadow = true;
909 }
910 MachineValue::WasmStack(x) => {
911 stack_offset -= 1;
912 match state.wasm_stack[x] {
913 WasmAbstractValue::Const(x) => {
914 stack[stack_offset] = x;
915 }
916 WasmAbstractValue::Runtime => {
917 stack[stack_offset] = f.stack[x].unwrap();
918 }
919 }
920 }
921 MachineValue::WasmLocal(x) => {
922 stack_offset -= 1;
923 match fsm.locals[x] {
924 WasmAbstractValue::Const(x) => {
925 stack[stack_offset] = x;
926 }
927 WasmAbstractValue::Runtime => {
928 stack[stack_offset] = f.locals[x].unwrap();
929 }
930 }
931 }
932 MachineValue::TwoHalves(ref inner) => {
933 stack_offset -= 1;
934 match inner.0 {
936 MachineValue::WasmStack(x) => match state.wasm_stack[x] {
937 WasmAbstractValue::Const(x) => {
938 assert!(x <= std::u32::MAX as u64);
939 stack[stack_offset] |= x;
940 }
941 WasmAbstractValue::Runtime => {
942 let v = f.stack[x].unwrap();
943 assert!(v <= std::u32::MAX as u64);
944 stack[stack_offset] |= v;
945 }
946 },
947 MachineValue::WasmLocal(x) => match fsm.locals[x] {
948 WasmAbstractValue::Const(x) => {
949 assert!(x <= std::u32::MAX as u64);
950 stack[stack_offset] |= x;
951 }
952 WasmAbstractValue::Runtime => {
953 let v = f.locals[x].unwrap();
954 assert!(v <= std::u32::MAX as u64);
955 stack[stack_offset] |= v;
956 }
957 },
958 MachineValue::VmctxDeref(ref seq) => {
959 stack[stack_offset] |=
960 compute_vmctx_deref(vmctx as *const Ctx, seq)
961 & (std::u32::MAX as u64);
962 }
963 MachineValue::Undefined => {}
964 _ => unimplemented!("TwoHalves.0"),
965 }
966 match inner.1 {
967 MachineValue::WasmStack(x) => match state.wasm_stack[x] {
968 WasmAbstractValue::Const(x) => {
969 assert!(x <= std::u32::MAX as u64);
970 stack[stack_offset] |= x << 32;
971 }
972 WasmAbstractValue::Runtime => {
973 let v = f.stack[x].unwrap();
974 assert!(v <= std::u32::MAX as u64);
975 stack[stack_offset] |= v << 32;
976 }
977 },
978 MachineValue::WasmLocal(x) => match fsm.locals[x] {
979 WasmAbstractValue::Const(x) => {
980 assert!(x <= std::u32::MAX as u64);
981 stack[stack_offset] |= x << 32;
982 }
983 WasmAbstractValue::Runtime => {
984 let v = f.locals[x].unwrap();
985 assert!(v <= std::u32::MAX as u64);
986 stack[stack_offset] |= v << 32;
987 }
988 },
989 MachineValue::VmctxDeref(ref seq) => {
990 stack[stack_offset] |=
991 (compute_vmctx_deref(vmctx as *const Ctx, seq)
992 & (std::u32::MAX as u64))
993 << 32;
994 }
995 MachineValue::Undefined => {}
996 _ => unimplemented!("TwoHalves.1"),
997 }
998 }
999 }
1000 }
1001 if !got_explicit_shadow {
1002 assert!(fsm.shadow_size % 8 == 0);
1003 stack_offset -= fsm.shadow_size / 8;
1004 }
1005 for (i, v) in state.register_values.iter().enumerate() {
1006 match *v {
1007 MachineValue::Undefined => {}
1008 MachineValue::Vmctx => {
1009 known_registers[i] = Some(vmctx as *mut Ctx as usize as u64);
1010 }
1011 MachineValue::VmctxDeref(ref seq) => {
1012 known_registers[i] = Some(compute_vmctx_deref(vmctx as *const Ctx, seq));
1013 }
1014 MachineValue::WasmStack(x) => match state.wasm_stack[x] {
1015 WasmAbstractValue::Const(x) => {
1016 known_registers[i] = Some(x);
1017 }
1018 WasmAbstractValue::Runtime => {
1019 known_registers[i] = Some(f.stack[x].unwrap());
1020 }
1021 },
1022 MachineValue::WasmLocal(x) => match fsm.locals[x] {
1023 WasmAbstractValue::Const(x) => {
1024 known_registers[i] = Some(x);
1025 }
1026 WasmAbstractValue::Runtime => {
1027 known_registers[i] = Some(f.locals[x].unwrap());
1028 }
1029 },
1030 _ => unreachable!(),
1031 }
1032 }
1033
1034 stack_offset -= 1;
1037 stack[stack_offset] = (code_base + activate_offset) as u64; }
1039
1040 stack_offset -= 1;
1041 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R15).to_index().0].unwrap_or(0);
1042
1043 stack_offset -= 1;
1044 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R14).to_index().0].unwrap_or(0);
1045
1046 stack_offset -= 1;
1047 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R13).to_index().0].unwrap_or(0);
1048
1049 stack_offset -= 1;
1050 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R12).to_index().0].unwrap_or(0);
1051
1052 stack_offset -= 1;
1053 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R11).to_index().0].unwrap_or(0);
1054
1055 stack_offset -= 1;
1056 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R10).to_index().0].unwrap_or(0);
1057
1058 stack_offset -= 1;
1059 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R9).to_index().0].unwrap_or(0);
1060
1061 stack_offset -= 1;
1062 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R8).to_index().0].unwrap_or(0);
1063
1064 stack_offset -= 1;
1065 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RSI).to_index().0].unwrap_or(0);
1066
1067 stack_offset -= 1;
1068 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDI).to_index().0].unwrap_or(0);
1069
1070 stack_offset -= 1;
1071 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDX).to_index().0].unwrap_or(0);
1072
1073 stack_offset -= 1;
1074 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RCX).to_index().0].unwrap_or(0);
1075
1076 stack_offset -= 1;
1077 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RBX).to_index().0].unwrap_or(0);
1078
1079 stack_offset -= 1;
1080 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RAX).to_index().0].unwrap_or(0);
1081
1082 stack_offset -= 1;
1083 stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; stack_offset -= 1;
1086 stack[stack_offset] =
1087 known_registers[X64Register::XMM(XMM::XMM15).to_index().0].unwrap_or(0);
1088
1089 stack_offset -= 1;
1090 stack[stack_offset] =
1091 known_registers[X64Register::XMM(XMM::XMM14).to_index().0].unwrap_or(0);
1092
1093 stack_offset -= 1;
1094 stack[stack_offset] =
1095 known_registers[X64Register::XMM(XMM::XMM13).to_index().0].unwrap_or(0);
1096
1097 stack_offset -= 1;
1098 stack[stack_offset] =
1099 known_registers[X64Register::XMM(XMM::XMM12).to_index().0].unwrap_or(0);
1100
1101 stack_offset -= 1;
1102 stack[stack_offset] =
1103 known_registers[X64Register::XMM(XMM::XMM11).to_index().0].unwrap_or(0);
1104
1105 stack_offset -= 1;
1106 stack[stack_offset] =
1107 known_registers[X64Register::XMM(XMM::XMM10).to_index().0].unwrap_or(0);
1108
1109 stack_offset -= 1;
1110 stack[stack_offset] =
1111 known_registers[X64Register::XMM(XMM::XMM9).to_index().0].unwrap_or(0);
1112
1113 stack_offset -= 1;
1114 stack[stack_offset] =
1115 known_registers[X64Register::XMM(XMM::XMM8).to_index().0].unwrap_or(0);
1116 stack_offset -= 1;
1117 stack[stack_offset] =
1118 known_registers[X64Register::XMM(XMM::XMM7).to_index().0].unwrap_or(0);
1119
1120 stack_offset -= 1;
1121 stack[stack_offset] =
1122 known_registers[X64Register::XMM(XMM::XMM6).to_index().0].unwrap_or(0);
1123
1124 stack_offset -= 1;
1125 stack[stack_offset] =
1126 known_registers[X64Register::XMM(XMM::XMM5).to_index().0].unwrap_or(0);
1127
1128 stack_offset -= 1;
1129 stack[stack_offset] =
1130 known_registers[X64Register::XMM(XMM::XMM4).to_index().0].unwrap_or(0);
1131
1132 stack_offset -= 1;
1133 stack[stack_offset] =
1134 known_registers[X64Register::XMM(XMM::XMM3).to_index().0].unwrap_or(0);
1135
1136 stack_offset -= 1;
1137 stack[stack_offset] =
1138 known_registers[X64Register::XMM(XMM::XMM2).to_index().0].unwrap_or(0);
1139
1140 stack_offset -= 1;
1141 stack[stack_offset] =
1142 known_registers[X64Register::XMM(XMM::XMM1).to_index().0].unwrap_or(0);
1143
1144 stack_offset -= 1;
1145 stack[stack_offset] =
1146 known_registers[X64Register::XMM(XMM::XMM0).to_index().0].unwrap_or(0);
1147
1148 if let Some(ref memory) = image.memory {
1149 assert!(vmctx.internal.memory_bound <= memory.len());
1150
1151 if vmctx.internal.memory_bound < memory.len() {
1152 let grow: unsafe extern "C" fn(ctx: &mut Ctx, memory_index: usize, delta: usize) =
1153 ::std::mem::transmute((*vmctx.internal.intrinsics).memory_grow);
1154 grow(
1155 vmctx,
1156 0,
1157 (memory.len() - vmctx.internal.memory_bound) / 65536,
1158 );
1159 assert_eq!(vmctx.internal.memory_bound, memory.len());
1160 }
1161
1162 std::slice::from_raw_parts_mut(vmctx.internal.memory_base, vmctx.internal.memory_bound)
1163 .copy_from_slice(memory);
1164 }
1165
1166 let globals_len = (*vmctx.module).info.globals.len();
1167 for i in 0..globals_len {
1168 (*(*vmctx.local_backing).globals[LocalGlobalIndex::new(i)].vm_local_global()).data =
1169 image.globals[i];
1170 }
1171
1172 drop(image); catch_unsafe_unwind(
1175 || {
1176 run_on_alternative_stack(
1177 stack.as_mut_ptr().add(stack.len()),
1178 stack.as_mut_ptr().add(stack_offset),
1179 )
1180 },
1181 breakpoints,
1182 )
1183 }
1184
1185 pub fn build_instance_image(
1187 vmctx: &mut Ctx,
1188 execution_state: ExecutionStateImage,
1189 ) -> InstanceImage {
1190 unsafe {
1191 let memory = if vmctx.internal.memory_base.is_null() {
1192 None
1193 } else {
1194 Some(
1195 std::slice::from_raw_parts(
1196 vmctx.internal.memory_base,
1197 vmctx.internal.memory_bound,
1198 )
1199 .to_vec(),
1200 )
1201 };
1202
1203 let globals_len = (*vmctx.module).info.globals.len();
1205 let globals: Vec<u128> = (0..globals_len)
1206 .map(|i| {
1207 (*vmctx.local_backing).globals[LocalGlobalIndex::new(i)]
1208 .get()
1209 .to_u128()
1210 })
1211 .collect();
1212
1213 InstanceImage {
1214 memory: memory,
1215 globals: globals,
1216 execution_state: execution_state,
1217 }
1218 }
1219 }
1220
1221 #[warn(unused_variables)]
1224 pub unsafe fn read_stack<'a, I: Iterator<Item = &'a CodeVersion>, F: Fn() -> I + 'a>(
1225 versions: F,
1226 mut stack: *const u64,
1227 initially_known_registers: [Option<u64>; 32],
1228 mut initial_address: Option<u64>,
1229 max_depth: Option<usize>,
1230 ) -> ExecutionStateImage {
1231 let mut known_registers: [Option<u64>; 32] = initially_known_registers;
1232 let mut results: Vec<WasmFunctionStateDump> = vec![];
1233 let mut was_baseline = true;
1234
1235 for depth in 0.. {
1236 if let Some(max_depth) = max_depth {
1237 if depth >= max_depth {
1238 return ExecutionStateImage { frames: results };
1239 }
1240 }
1241
1242 let ret_addr = initial_address.take().unwrap_or_else(|| {
1243 let x = *stack;
1244 stack = stack.offset(1);
1245 x
1246 });
1247
1248 let mut fsm_state: Option<(&FunctionStateMap, MachineState)> = None;
1249 let mut is_baseline: Option<bool> = None;
1250
1251 for version in versions() {
1252 match version
1253 .msm
1254 .lookup_call_ip(ret_addr as usize, version.base)
1255 .or_else(|| {
1256 version
1257 .msm
1258 .lookup_trappable_ip(ret_addr as usize, version.base)
1259 })
1260 .or_else(|| version.msm.lookup_loop_ip(ret_addr as usize, version.base))
1261 {
1262 Some(x) => {
1263 fsm_state = Some(x);
1264 is_baseline = Some(version.baseline);
1265 break;
1266 }
1267 None => {}
1268 };
1269 }
1270
1271 let (fsm, state) = if let Some(x) = fsm_state {
1272 x
1273 } else {
1274 return ExecutionStateImage { frames: results };
1275 };
1276
1277 {
1278 let is_baseline = is_baseline.unwrap();
1279
1280 if is_baseline && !was_baseline {
1282 let callee_saved = &*get_boundary_register_preservation();
1283 known_registers[X64Register::GPR(GPR::R15).to_index().0] =
1284 Some(callee_saved.r15);
1285 known_registers[X64Register::GPR(GPR::R14).to_index().0] =
1286 Some(callee_saved.r14);
1287 known_registers[X64Register::GPR(GPR::R13).to_index().0] =
1288 Some(callee_saved.r13);
1289 known_registers[X64Register::GPR(GPR::R12).to_index().0] =
1290 Some(callee_saved.r12);
1291 known_registers[X64Register::GPR(GPR::RBX).to_index().0] =
1292 Some(callee_saved.rbx);
1293 }
1294
1295 was_baseline = is_baseline;
1296 }
1297
1298 let mut wasm_stack: Vec<Option<u64>> = state
1299 .wasm_stack
1300 .iter()
1301 .map(|x| match *x {
1302 WasmAbstractValue::Const(x) => Some(x),
1303 WasmAbstractValue::Runtime => None,
1304 })
1305 .collect();
1306 let mut wasm_locals: Vec<Option<u64>> = fsm
1307 .locals
1308 .iter()
1309 .map(|x| match *x {
1310 WasmAbstractValue::Const(x) => Some(x),
1311 WasmAbstractValue::Runtime => None,
1312 })
1313 .collect();
1314
1315 for (i, v) in state.register_values.iter().enumerate() {
1317 match *v {
1318 MachineValue::Undefined => {}
1319 MachineValue::Vmctx => {}
1320 MachineValue::VmctxDeref(_) => {}
1321 MachineValue::WasmStack(idx) => {
1322 if let Some(v) = known_registers[i] {
1323 wasm_stack[idx] = Some(v);
1324 } else {
1325 eprintln!(
1326 "BUG: Register {} for WebAssembly stack slot {} has unknown value.",
1327 i, idx
1328 );
1329 }
1330 }
1331 MachineValue::WasmLocal(idx) => {
1332 if let Some(v) = known_registers[i] {
1333 wasm_locals[idx] = Some(v);
1334 }
1335 }
1336 _ => unreachable!(),
1337 }
1338 }
1339
1340 let found_shadow = state
1341 .stack_values
1342 .iter()
1343 .any(|v| *v == MachineValue::ExplicitShadow);
1344 if !found_shadow {
1345 stack = stack.add(fsm.shadow_size / 8);
1346 }
1347
1348 for v in state.stack_values.iter().rev() {
1349 match *v {
1350 MachineValue::ExplicitShadow => {
1351 stack = stack.add(fsm.shadow_size / 8);
1352 }
1353 MachineValue::Undefined => {
1354 stack = stack.offset(1);
1355 }
1356 MachineValue::Vmctx => {
1357 stack = stack.offset(1);
1358 }
1359 MachineValue::VmctxDeref(_) => {
1360 stack = stack.offset(1);
1361 }
1362 MachineValue::PreserveRegister(idx) => {
1363 known_registers[idx.0] = Some(*stack);
1364 stack = stack.offset(1);
1365 }
1366 MachineValue::CopyStackBPRelative(_) => {
1367 stack = stack.offset(1);
1368 }
1369 MachineValue::WasmStack(idx) => {
1370 wasm_stack[idx] = Some(*stack);
1371 stack = stack.offset(1);
1372 }
1373 MachineValue::WasmLocal(idx) => {
1374 wasm_locals[idx] = Some(*stack);
1375 stack = stack.offset(1);
1376 }
1377 MachineValue::TwoHalves(ref inner) => {
1378 let v = *stack;
1379 stack = stack.offset(1);
1380 match inner.0 {
1381 MachineValue::WasmStack(idx) => {
1382 wasm_stack[idx] = Some(v & 0xffffffffu64);
1383 }
1384 MachineValue::WasmLocal(idx) => {
1385 wasm_locals[idx] = Some(v & 0xffffffffu64);
1386 }
1387 MachineValue::VmctxDeref(_) => {}
1388 MachineValue::Undefined => {}
1389 _ => unimplemented!("TwoHalves.0 (read)"),
1390 }
1391 match inner.1 {
1392 MachineValue::WasmStack(idx) => {
1393 wasm_stack[idx] = Some(v >> 32);
1394 }
1395 MachineValue::WasmLocal(idx) => {
1396 wasm_locals[idx] = Some(v >> 32);
1397 }
1398 MachineValue::VmctxDeref(_) => {}
1399 MachineValue::Undefined => {}
1400 _ => unimplemented!("TwoHalves.1 (read)"),
1401 }
1402 }
1403 }
1404 }
1405
1406 for (offset, v) in state.prev_frame.iter() {
1407 let offset = (*offset + 2) as isize; match *v {
1409 MachineValue::WasmStack(idx) => {
1410 wasm_stack[idx] = Some(*stack.offset(offset));
1411 }
1412 MachineValue::WasmLocal(idx) => {
1413 wasm_locals[idx] = Some(*stack.offset(offset));
1414 }
1415 _ => unreachable!("values in prev frame can only be stack/local"),
1416 }
1417 }
1418 stack = stack.offset(1); wasm_stack.truncate(
1421 wasm_stack
1422 .len()
1423 .checked_sub(state.wasm_stack_private_depth)
1424 .unwrap(),
1425 );
1426
1427 let wfs = WasmFunctionStateDump {
1428 local_function_id: fsm.local_function_id,
1429 wasm_inst_offset: state.wasm_inst_offset,
1430 stack: wasm_stack,
1431 locals: wasm_locals,
1432 };
1433 results.push(wfs);
1434 }
1435
1436 unreachable!();
1437 }
1438}