1use crate::backend::RunnableModule;
6use std::collections::BTreeMap;
7use std::ops::Bound::{Included, Unbounded};
8use std::sync::Arc;
9
10#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
12pub struct RegisterIndex(pub usize);
13
14#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
16pub enum WasmAbstractValue {
17 Runtime,
19 Const(u64),
21}
22
23#[derive(Clone, Debug, Serialize, Deserialize)]
25pub struct MachineState {
26 pub stack_values: Vec<MachineValue>,
28 pub register_values: Vec<MachineValue>,
30 pub prev_frame: BTreeMap<usize, MachineValue>,
32 pub wasm_stack: Vec<WasmAbstractValue>,
34 pub wasm_stack_private_depth: usize,
36 pub wasm_inst_offset: usize,
38}
39
40#[derive(Clone, Debug, Default, Serialize, Deserialize)]
42pub struct MachineStateDiff {
43 pub last: Option<usize>,
45 pub stack_push: Vec<MachineValue>,
47 pub stack_pop: usize,
49
50 pub reg_diff: Vec<(RegisterIndex, MachineValue)>,
52
53 pub prev_frame_diff: BTreeMap<usize, Option<MachineValue>>, pub wasm_stack_push: Vec<WasmAbstractValue>,
58 pub wasm_stack_pop: usize,
60 pub wasm_stack_private_depth: usize, pub wasm_inst_offset: usize, }
65
66#[derive(Clone, Debug, Eq, PartialEq, Hash, Serialize, Deserialize)]
68pub enum MachineValue {
69 Undefined,
71 Vmctx,
73 VmctxDeref(Vec<usize>),
75 PreserveRegister(RegisterIndex),
77 CopyStackBPRelative(i32), ExplicitShadow, WasmStack(usize),
83 WasmLocal(usize),
85 TwoHalves(Box<(MachineValue, MachineValue)>), }
88
89#[derive(Clone, Debug, Serialize, Deserialize)]
91pub struct FunctionStateMap {
92 pub initial: MachineState,
94 pub local_function_id: usize,
96 pub locals: Vec<WasmAbstractValue>,
98 pub shadow_size: usize, pub diffs: Vec<MachineStateDiff>,
102 pub wasm_function_header_target_offset: Option<SuspendOffset>,
104 pub wasm_offset_to_target_offset: BTreeMap<usize, SuspendOffset>,
106 pub loop_offsets: BTreeMap<usize, OffsetInfo>, pub call_offsets: BTreeMap<usize, OffsetInfo>, pub trappable_offsets: BTreeMap<usize, OffsetInfo>, }
113
114#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
116pub enum SuspendOffset {
117 Loop(usize),
119 Call(usize),
121 Trappable(usize),
123}
124
125#[derive(Clone, Debug, Serialize, Deserialize)]
127pub struct OffsetInfo {
128 pub end_offset: usize, pub diff_id: usize,
132 pub activate_offset: usize,
134}
135
136#[derive(Clone, Debug, Serialize, Deserialize)]
138pub struct ModuleStateMap {
139 pub local_functions: BTreeMap<usize, FunctionStateMap>,
141 pub total_size: usize,
143}
144
145#[derive(Clone, Debug, Serialize, Deserialize)]
147pub struct WasmFunctionStateDump {
148 pub local_function_id: usize,
150 pub wasm_inst_offset: usize,
152 pub stack: Vec<Option<u64>>,
154 pub locals: Vec<Option<u64>>,
156}
157
158#[derive(Clone, Debug, Serialize, Deserialize)]
160pub struct ExecutionStateImage {
161 pub frames: Vec<WasmFunctionStateDump>,
163}
164
165#[derive(Debug, Clone, Serialize, Deserialize)]
167pub struct InstanceImage {
168 pub memory: Option<Vec<u8>>,
170 pub globals: Vec<u128>,
172 pub execution_state: ExecutionStateImage,
174}
175
176#[derive(Clone)]
178pub struct CodeVersion {
179 pub baseline: bool,
181
182 pub msm: ModuleStateMap,
184
185 pub base: usize,
187
188 pub backend: &'static str,
190
191 pub runnable_module: Arc<Box<dyn RunnableModule>>,
193}
194
195impl ModuleStateMap {
196 pub fn lookup_ip<F: FnOnce(&FunctionStateMap) -> &BTreeMap<usize, OffsetInfo>>(
198 &self,
199 ip: usize,
200 base: usize,
201 offset_table_provider: F,
202 ) -> Option<(&FunctionStateMap, MachineState)> {
203 if ip < base || ip - base >= self.total_size {
204 None
205 } else {
206 let (_, fsm) = self
207 .local_functions
208 .range((Unbounded, Included(&(ip - base))))
209 .last()
210 .unwrap();
211
212 match offset_table_provider(fsm)
213 .range((Unbounded, Included(&(ip - base))))
214 .last()
215 {
216 Some((_, x)) => {
217 if ip - base >= x.end_offset {
218 None
219 } else if x.diff_id < fsm.diffs.len() {
220 Some((fsm, fsm.diffs[x.diff_id].build_state(fsm)))
221 } else {
222 None
223 }
224 }
225 None => None,
226 }
227 }
228 }
229 pub fn lookup_call_ip(
231 &self,
232 ip: usize,
233 base: usize,
234 ) -> Option<(&FunctionStateMap, MachineState)> {
235 self.lookup_ip(ip, base, |fsm| &fsm.call_offsets)
236 }
237
238 pub fn lookup_trappable_ip(
240 &self,
241 ip: usize,
242 base: usize,
243 ) -> Option<(&FunctionStateMap, MachineState)> {
244 self.lookup_ip(ip, base, |fsm| &fsm.trappable_offsets)
245 }
246
247 pub fn lookup_loop_ip(
249 &self,
250 ip: usize,
251 base: usize,
252 ) -> Option<(&FunctionStateMap, MachineState)> {
253 self.lookup_ip(ip, base, |fsm| &fsm.loop_offsets)
254 }
255}
256
257impl FunctionStateMap {
258 pub fn new(
260 initial: MachineState,
261 local_function_id: usize,
262 shadow_size: usize,
263 locals: Vec<WasmAbstractValue>,
264 ) -> FunctionStateMap {
265 FunctionStateMap {
266 initial,
267 local_function_id,
268 shadow_size,
269 locals,
270 diffs: vec![],
271 wasm_function_header_target_offset: None,
272 wasm_offset_to_target_offset: BTreeMap::new(),
273 loop_offsets: BTreeMap::new(),
274 call_offsets: BTreeMap::new(),
275 trappable_offsets: BTreeMap::new(),
276 }
277 }
278}
279
280impl MachineState {
281 pub fn diff(&self, old: &MachineState) -> MachineStateDiff {
283 let first_diff_stack_depth: usize = self
284 .stack_values
285 .iter()
286 .zip(old.stack_values.iter())
287 .enumerate()
288 .find(|&(_, (a, b))| a != b)
289 .map(|x| x.0)
290 .unwrap_or(old.stack_values.len().min(self.stack_values.len()));
291 assert_eq!(self.register_values.len(), old.register_values.len());
292 let reg_diff: Vec<_> = self
293 .register_values
294 .iter()
295 .zip(old.register_values.iter())
296 .enumerate()
297 .filter(|&(_, (a, b))| a != b)
298 .map(|(i, (a, _))| (RegisterIndex(i), a.clone()))
299 .collect();
300 let prev_frame_diff: BTreeMap<usize, Option<MachineValue>> = self
301 .prev_frame
302 .iter()
303 .filter(|(k, v)| {
304 if let Some(ref old_v) = old.prev_frame.get(k) {
305 v != old_v
306 } else {
307 true
308 }
309 })
310 .map(|(&k, v)| (k, Some(v.clone())))
311 .chain(
312 old.prev_frame
313 .iter()
314 .filter(|(k, _)| self.prev_frame.get(k).is_none())
315 .map(|(&k, _)| (k, None)),
316 )
317 .collect();
318 let first_diff_wasm_stack_depth: usize = self
319 .wasm_stack
320 .iter()
321 .zip(old.wasm_stack.iter())
322 .enumerate()
323 .find(|&(_, (a, b))| a != b)
324 .map(|x| x.0)
325 .unwrap_or(old.wasm_stack.len().min(self.wasm_stack.len()));
326 MachineStateDiff {
327 last: None,
328 stack_push: self.stack_values[first_diff_stack_depth..].to_vec(),
329 stack_pop: old.stack_values.len() - first_diff_stack_depth,
330 reg_diff,
331
332 prev_frame_diff,
333
334 wasm_stack_push: self.wasm_stack[first_diff_wasm_stack_depth..].to_vec(),
335 wasm_stack_pop: old.wasm_stack.len() - first_diff_wasm_stack_depth,
336 wasm_stack_private_depth: self.wasm_stack_private_depth,
337
338 wasm_inst_offset: self.wasm_inst_offset,
339 }
340 }
341}
342
343impl MachineStateDiff {
344 pub fn build_state(&self, m: &FunctionStateMap) -> MachineState {
346 let mut chain: Vec<&MachineStateDiff> = vec![];
347 chain.push(self);
348 let mut current = self.last;
349 while let Some(x) = current {
350 let that = &m.diffs[x];
351 current = that.last;
352 chain.push(that);
353 }
354 chain.reverse();
355 let mut state = m.initial.clone();
356 for x in chain {
357 for _ in 0..x.stack_pop {
358 state.stack_values.pop().unwrap();
359 }
360 for v in &x.stack_push {
361 state.stack_values.push(v.clone());
362 }
363 for &(index, ref v) in &x.reg_diff {
364 state.register_values[index.0] = v.clone();
365 }
366 for (index, ref v) in &x.prev_frame_diff {
367 if let Some(ref x) = v {
368 state.prev_frame.insert(*index, x.clone());
369 } else {
370 state.prev_frame.remove(index).unwrap();
371 }
372 }
373 for _ in 0..x.wasm_stack_pop {
374 state.wasm_stack.pop().unwrap();
375 }
376 for v in &x.wasm_stack_push {
377 state.wasm_stack.push(*v);
378 }
379 }
380 state.wasm_stack_private_depth = self.wasm_stack_private_depth;
381 state.wasm_inst_offset = self.wasm_inst_offset;
382 state
383 }
384}
385
386impl ExecutionStateImage {
387 pub fn print_backtrace_if_needed(&self) {
389 use std::env;
390
391 if let Ok(x) = env::var("WASMER_BACKTRACE") {
392 if x == "1" {
393 eprintln!("{}", self.output());
394 return;
395 }
396 }
397
398 eprintln!("Run with `WASMER_BACKTRACE=1` environment variable to display a backtrace.");
399 }
400
401 pub fn output(&self) -> String {
403 fn join_strings(x: impl Iterator<Item = String>, sep: &str) -> String {
404 let mut ret = String::new();
405 let mut first = true;
406
407 for s in x {
408 if first {
409 first = false;
410 } else {
411 ret += sep;
412 }
413 ret += &s;
414 }
415
416 ret
417 }
418
419 fn format_optional_u64_sequence(x: &[Option<u64>]) -> String {
420 if x.is_empty() {
421 "(empty)".into()
422 } else {
423 join_strings(
424 x.iter().enumerate().map(|(i, x)| {
425 format!(
426 "[{}] = {}",
427 i,
428 x.map(|x| format!("{}", x))
429 .unwrap_or_else(|| "?".to_string())
430 )
431 }),
432 ", ",
433 )
434 }
435 }
436
437 let mut ret = String::new();
438
439 if self.frames.is_empty() {
440 ret += &"Unknown fault address, cannot read stack.";
441 ret += "\n";
442 } else {
443 ret += &"Backtrace:";
444 ret += "\n";
445 for (i, f) in self.frames.iter().enumerate() {
446 ret += &format!("* Frame {} @ Local function {}", i, f.local_function_id);
447 ret += "\n";
448 ret += &format!(" {} {}\n", "Offset:", format!("{}", f.wasm_inst_offset),);
449 ret += &format!(
450 " {} {}\n",
451 "Locals:",
452 format_optional_u64_sequence(&f.locals)
453 );
454 ret += &format!(
455 " {} {}\n\n",
456 "Stack:",
457 format_optional_u64_sequence(&f.stack)
458 );
459 }
460 }
461
462 ret
463 }
464}
465
466impl InstanceImage {
467 pub fn from_bytes(input: &[u8]) -> Option<InstanceImage> {
469 use bincode::deserialize;
470 match deserialize(input) {
471 Ok(x) => Some(x),
472 Err(_) => None,
473 }
474 }
475
476 pub fn to_bytes(&self) -> Vec<u8> {
478 use bincode::serialize;
479 serialize(self).unwrap()
480 }
481}
482
483#[cfg(unix)]
485pub mod x64_decl {
486 use super::*;
487 use crate::types::Type;
488
489 #[repr(u8)]
491 #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
492 pub enum GPR {
493 RAX,
495 RCX,
497 RDX,
499 RBX,
501 RSP,
503 RBP,
505 RSI,
507 RDI,
509 R8,
511 R9,
513 R10,
515 R11,
517 R12,
519 R13,
521 R14,
523 R15,
525 }
526
527 #[repr(u8)]
529 #[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
530 pub enum XMM {
531 XMM0,
533 XMM1,
535 XMM2,
537 XMM3,
539 XMM4,
541 XMM5,
543 XMM6,
545 XMM7,
547 XMM8,
549 XMM9,
551 XMM10,
553 XMM11,
555 XMM12,
557 XMM13,
559 XMM14,
561 XMM15,
563 }
564
565 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
567 pub enum X64Register {
568 GPR(GPR),
570 XMM(XMM),
572 }
573
574 impl X64Register {
575 pub fn to_index(&self) -> RegisterIndex {
577 match *self {
578 X64Register::GPR(x) => RegisterIndex(x as usize),
579 X64Register::XMM(x) => RegisterIndex(x as usize + 16),
580 }
581 }
582
583 pub fn from_dwarf_regnum(x: u16) -> Option<X64Register> {
585 Some(match x {
586 0 => X64Register::GPR(GPR::RAX),
587 1 => X64Register::GPR(GPR::RDX),
588 2 => X64Register::GPR(GPR::RCX),
589 3 => X64Register::GPR(GPR::RBX),
590 4 => X64Register::GPR(GPR::RSI),
591 5 => X64Register::GPR(GPR::RDI),
592 6 => X64Register::GPR(GPR::RBP),
593 7 => X64Register::GPR(GPR::RSP),
594 8 => X64Register::GPR(GPR::R8),
595 9 => X64Register::GPR(GPR::R9),
596 10 => X64Register::GPR(GPR::R10),
597 11 => X64Register::GPR(GPR::R11),
598 12 => X64Register::GPR(GPR::R12),
599 13 => X64Register::GPR(GPR::R13),
600 14 => X64Register::GPR(GPR::R14),
601 15 => X64Register::GPR(GPR::R15),
602
603 17 => X64Register::XMM(XMM::XMM0),
604 18 => X64Register::XMM(XMM::XMM1),
605 19 => X64Register::XMM(XMM::XMM2),
606 20 => X64Register::XMM(XMM::XMM3),
607 21 => X64Register::XMM(XMM::XMM4),
608 22 => X64Register::XMM(XMM::XMM5),
609 23 => X64Register::XMM(XMM::XMM6),
610 24 => X64Register::XMM(XMM::XMM7),
611 _ => return None,
612 })
613 }
614
615 pub fn prefix_mov_to_stack(&self) -> Option<&'static [u8]> {
620 Some(match *self {
621 X64Register::GPR(gpr) => match gpr {
622 GPR::RDI => &[0x48, 0x89, 0xbc, 0x24],
623 GPR::RSI => &[0x48, 0x89, 0xb4, 0x24],
624 GPR::RDX => &[0x48, 0x89, 0x94, 0x24],
625 GPR::RCX => &[0x48, 0x89, 0x8c, 0x24],
626 GPR::R8 => &[0x4c, 0x89, 0x84, 0x24],
627 GPR::R9 => &[0x4c, 0x89, 0x8c, 0x24],
628 _ => return None,
629 },
630 X64Register::XMM(xmm) => match xmm {
631 XMM::XMM0 => &[0x66, 0x0f, 0xd6, 0x84, 0x24],
632 XMM::XMM1 => &[0x66, 0x0f, 0xd6, 0x8c, 0x24],
633 XMM::XMM2 => &[0x66, 0x0f, 0xd6, 0x94, 0x24],
634 XMM::XMM3 => &[0x66, 0x0f, 0xd6, 0x9c, 0x24],
635 XMM::XMM4 => &[0x66, 0x0f, 0xd6, 0xa4, 0x24],
636 XMM::XMM5 => &[0x66, 0x0f, 0xd6, 0xac, 0x24],
637 XMM::XMM6 => &[0x66, 0x0f, 0xd6, 0xb4, 0x24],
638 XMM::XMM7 => &[0x66, 0x0f, 0xd6, 0xbc, 0x24],
639 _ => return None,
640 },
641 })
642 }
643 }
644
645 #[derive(Default)]
647 pub struct ArgumentRegisterAllocator {
648 n_gprs: usize,
649 n_xmms: usize,
650 }
651
652 impl ArgumentRegisterAllocator {
653 pub fn next(&mut self, ty: Type) -> Option<X64Register> {
655 static GPR_SEQ: &'static [GPR] =
656 &[GPR::RDI, GPR::RSI, GPR::RDX, GPR::RCX, GPR::R8, GPR::R9];
657 static XMM_SEQ: &'static [XMM] = &[
658 XMM::XMM0,
659 XMM::XMM1,
660 XMM::XMM2,
661 XMM::XMM3,
662 XMM::XMM4,
663 XMM::XMM5,
664 XMM::XMM6,
665 XMM::XMM7,
666 ];
667 match ty {
668 Type::I32 | Type::I64 => {
669 if self.n_gprs < GPR_SEQ.len() {
670 let gpr = GPR_SEQ[self.n_gprs];
671 self.n_gprs += 1;
672 Some(X64Register::GPR(gpr))
673 } else {
674 None
675 }
676 }
677 Type::F32 | Type::F64 => {
678 if self.n_xmms < XMM_SEQ.len() {
679 let xmm = XMM_SEQ[self.n_xmms];
680 self.n_xmms += 1;
681 Some(X64Register::XMM(xmm))
682 } else {
683 None
684 }
685 }
686 _ => todo!(
687 "ArgumentRegisterAllocator::next: Unsupported type: {:?}",
688 ty
689 ),
690 }
691 }
692 }
693}
694
695#[cfg(unix)]
697pub mod x64 {
698 pub use super::x64_decl::*;
700 use super::*;
701 use crate::codegen::BreakpointMap;
702 use crate::error::RuntimeError;
703 use crate::fault::{
704 catch_unsafe_unwind, get_boundary_register_preservation, run_on_alternative_stack,
705 };
706 use crate::structures::TypedIndex;
707 use crate::types::LocalGlobalIndex;
708 use crate::vm::Ctx;
709
710 #[allow(clippy::cast_ptr_alignment)]
711 unsafe fn compute_vmctx_deref(vmctx: *const Ctx, seq: &[usize]) -> u64 {
712 let mut ptr = &vmctx as *const *const Ctx as *const u8;
713 for x in seq {
714 debug_assert!(ptr.align_offset(std::mem::align_of::<*const u8>()) == 0);
715 ptr = (*(ptr as *const *const u8)).add(*x);
716 }
717 ptr as usize as u64
718 }
719
720 pub fn new_machine_state() -> MachineState {
722 MachineState {
723 stack_values: vec![],
724 register_values: vec![MachineValue::Undefined; 16 + 8],
725 prev_frame: BTreeMap::new(),
726 wasm_stack: vec![],
727 wasm_stack_private_depth: 0,
728 wasm_inst_offset: ::std::usize::MAX,
729 }
730 }
731
732 #[warn(unused_variables)]
735 pub unsafe fn invoke_call_return_on_stack(
736 msm: &ModuleStateMap,
737 code_base: usize,
738 image: InstanceImage,
739 vmctx: &mut Ctx,
740 breakpoints: Option<BreakpointMap>,
741 ) -> Result<u64, RuntimeError> {
742 let mut stack: Vec<u64> = vec![0; 1048576 * 8 / 8]; let mut stack_offset: usize = stack.len();
744
745 stack_offset -= 3; let mut last_stack_offset: u64 = 0; let mut known_registers: [Option<u64>; 32] = [None; 32];
750
751 let local_functions_vec: Vec<&FunctionStateMap> =
752 msm.local_functions.iter().map(|(_, v)| v).collect();
753
754 for f in image.execution_state.frames.iter().rev() {
756 let fsm = local_functions_vec[f.local_function_id];
757 let suspend_offset = if f.wasm_inst_offset == ::std::usize::MAX {
758 fsm.wasm_function_header_target_offset
759 } else {
760 fsm.wasm_offset_to_target_offset
761 .get(&f.wasm_inst_offset)
762 .copied()
763 }
764 .expect("instruction is not a critical point");
765
766 let (activate_offset, diff_id) = match suspend_offset {
767 SuspendOffset::Loop(x) => fsm.loop_offsets.get(&x),
768 SuspendOffset::Call(x) => fsm.call_offsets.get(&x),
769 SuspendOffset::Trappable(x) => fsm.trappable_offsets.get(&x),
770 }
771 .map(|x| (x.activate_offset, x.diff_id))
772 .expect("offset cannot be found in table");
773
774 let diff = &fsm.diffs[diff_id];
775 let state = diff.build_state(fsm);
776
777 stack_offset -= 1;
778 stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; last_stack_offset = stack_offset as _;
780
781 let mut got_explicit_shadow = false;
782
783 for v in state.stack_values.iter() {
784 match *v {
785 MachineValue::Undefined => stack_offset -= 1,
786 MachineValue::Vmctx => {
787 stack_offset -= 1;
788 stack[stack_offset] = vmctx as *mut Ctx as usize as u64;
789 }
790 MachineValue::VmctxDeref(ref seq) => {
791 stack_offset -= 1;
792 stack[stack_offset] = compute_vmctx_deref(vmctx as *const Ctx, seq);
793 }
794 MachineValue::PreserveRegister(index) => {
795 stack_offset -= 1;
796 stack[stack_offset] = known_registers[index.0].unwrap_or(0);
797 }
798 MachineValue::CopyStackBPRelative(byte_offset) => {
799 assert!(byte_offset % 8 == 0);
800 let target_offset = (byte_offset / 8) as isize;
801 let v = stack[(last_stack_offset as isize + target_offset) as usize];
802 stack_offset -= 1;
803 stack[stack_offset] = v;
804 }
805 MachineValue::ExplicitShadow => {
806 assert!(fsm.shadow_size % 8 == 0);
807 stack_offset -= fsm.shadow_size / 8;
808 got_explicit_shadow = true;
809 }
810 MachineValue::WasmStack(x) => {
811 stack_offset -= 1;
812 match state.wasm_stack[x] {
813 WasmAbstractValue::Const(x) => {
814 stack[stack_offset] = x;
815 }
816 WasmAbstractValue::Runtime => {
817 stack[stack_offset] = f.stack[x].unwrap();
818 }
819 }
820 }
821 MachineValue::WasmLocal(x) => {
822 stack_offset -= 1;
823 match fsm.locals[x] {
824 WasmAbstractValue::Const(x) => {
825 stack[stack_offset] = x;
826 }
827 WasmAbstractValue::Runtime => {
828 stack[stack_offset] = f.locals[x].unwrap();
829 }
830 }
831 }
832 MachineValue::TwoHalves(ref inner) => {
833 stack_offset -= 1;
834 match inner.0 {
836 MachineValue::WasmStack(x) => match state.wasm_stack[x] {
837 WasmAbstractValue::Const(x) => {
838 assert!(x <= std::u32::MAX as u64);
839 stack[stack_offset] |= x;
840 }
841 WasmAbstractValue::Runtime => {
842 let v = f.stack[x].unwrap();
843 assert!(v <= std::u32::MAX as u64);
844 stack[stack_offset] |= v;
845 }
846 },
847 MachineValue::WasmLocal(x) => match fsm.locals[x] {
848 WasmAbstractValue::Const(x) => {
849 assert!(x <= std::u32::MAX as u64);
850 stack[stack_offset] |= x;
851 }
852 WasmAbstractValue::Runtime => {
853 let v = f.locals[x].unwrap();
854 assert!(v <= std::u32::MAX as u64);
855 stack[stack_offset] |= v;
856 }
857 },
858 MachineValue::VmctxDeref(ref seq) => {
859 stack[stack_offset] |=
860 compute_vmctx_deref(vmctx as *const Ctx, seq)
861 & (std::u32::MAX as u64);
862 }
863 MachineValue::Undefined => {}
864 _ => unimplemented!("TwoHalves.0"),
865 }
866 match inner.1 {
867 MachineValue::WasmStack(x) => match state.wasm_stack[x] {
868 WasmAbstractValue::Const(x) => {
869 assert!(x <= std::u32::MAX as u64);
870 stack[stack_offset] |= x << 32;
871 }
872 WasmAbstractValue::Runtime => {
873 let v = f.stack[x].unwrap();
874 assert!(v <= std::u32::MAX as u64);
875 stack[stack_offset] |= v << 32;
876 }
877 },
878 MachineValue::WasmLocal(x) => match fsm.locals[x] {
879 WasmAbstractValue::Const(x) => {
880 assert!(x <= std::u32::MAX as u64);
881 stack[stack_offset] |= x << 32;
882 }
883 WasmAbstractValue::Runtime => {
884 let v = f.locals[x].unwrap();
885 assert!(v <= std::u32::MAX as u64);
886 stack[stack_offset] |= v << 32;
887 }
888 },
889 MachineValue::VmctxDeref(ref seq) => {
890 stack[stack_offset] |=
891 (compute_vmctx_deref(vmctx as *const Ctx, seq)
892 & (std::u32::MAX as u64))
893 << 32;
894 }
895 MachineValue::Undefined => {}
896 _ => unimplemented!("TwoHalves.1"),
897 }
898 }
899 }
900 }
901 if !got_explicit_shadow {
902 assert!(fsm.shadow_size % 8 == 0);
903 stack_offset -= fsm.shadow_size / 8;
904 }
905 for (i, v) in state.register_values.iter().enumerate() {
906 match *v {
907 MachineValue::Undefined => {}
908 MachineValue::Vmctx => {
909 known_registers[i] = Some(vmctx as *mut Ctx as usize as u64);
910 }
911 MachineValue::VmctxDeref(ref seq) => {
912 known_registers[i] = Some(compute_vmctx_deref(vmctx as *const Ctx, seq));
913 }
914 MachineValue::WasmStack(x) => match state.wasm_stack[x] {
915 WasmAbstractValue::Const(x) => {
916 known_registers[i] = Some(x);
917 }
918 WasmAbstractValue::Runtime => {
919 known_registers[i] = Some(f.stack[x].unwrap());
920 }
921 },
922 MachineValue::WasmLocal(x) => match fsm.locals[x] {
923 WasmAbstractValue::Const(x) => {
924 known_registers[i] = Some(x);
925 }
926 WasmAbstractValue::Runtime => {
927 known_registers[i] = Some(f.locals[x].unwrap());
928 }
929 },
930 _ => unreachable!(),
931 }
932 }
933
934 stack_offset -= 1;
937 stack[stack_offset] = (code_base + activate_offset) as u64; }
939
940 stack_offset -= 1;
941 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R15).to_index().0].unwrap_or(0);
942
943 stack_offset -= 1;
944 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R14).to_index().0].unwrap_or(0);
945
946 stack_offset -= 1;
947 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R13).to_index().0].unwrap_or(0);
948
949 stack_offset -= 1;
950 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R12).to_index().0].unwrap_or(0);
951
952 stack_offset -= 1;
953 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R11).to_index().0].unwrap_or(0);
954
955 stack_offset -= 1;
956 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R10).to_index().0].unwrap_or(0);
957
958 stack_offset -= 1;
959 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R9).to_index().0].unwrap_or(0);
960
961 stack_offset -= 1;
962 stack[stack_offset] = known_registers[X64Register::GPR(GPR::R8).to_index().0].unwrap_or(0);
963
964 stack_offset -= 1;
965 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RSI).to_index().0].unwrap_or(0);
966
967 stack_offset -= 1;
968 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDI).to_index().0].unwrap_or(0);
969
970 stack_offset -= 1;
971 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RDX).to_index().0].unwrap_or(0);
972
973 stack_offset -= 1;
974 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RCX).to_index().0].unwrap_or(0);
975
976 stack_offset -= 1;
977 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RBX).to_index().0].unwrap_or(0);
978
979 stack_offset -= 1;
980 stack[stack_offset] = known_registers[X64Register::GPR(GPR::RAX).to_index().0].unwrap_or(0);
981
982 stack_offset -= 1;
983 stack[stack_offset] = stack.as_ptr().offset(last_stack_offset as isize) as usize as u64; stack_offset -= 1;
986 stack[stack_offset] =
987 known_registers[X64Register::XMM(XMM::XMM15).to_index().0].unwrap_or(0);
988
989 stack_offset -= 1;
990 stack[stack_offset] =
991 known_registers[X64Register::XMM(XMM::XMM14).to_index().0].unwrap_or(0);
992
993 stack_offset -= 1;
994 stack[stack_offset] =
995 known_registers[X64Register::XMM(XMM::XMM13).to_index().0].unwrap_or(0);
996
997 stack_offset -= 1;
998 stack[stack_offset] =
999 known_registers[X64Register::XMM(XMM::XMM12).to_index().0].unwrap_or(0);
1000
1001 stack_offset -= 1;
1002 stack[stack_offset] =
1003 known_registers[X64Register::XMM(XMM::XMM11).to_index().0].unwrap_or(0);
1004
1005 stack_offset -= 1;
1006 stack[stack_offset] =
1007 known_registers[X64Register::XMM(XMM::XMM10).to_index().0].unwrap_or(0);
1008
1009 stack_offset -= 1;
1010 stack[stack_offset] =
1011 known_registers[X64Register::XMM(XMM::XMM9).to_index().0].unwrap_or(0);
1012
1013 stack_offset -= 1;
1014 stack[stack_offset] =
1015 known_registers[X64Register::XMM(XMM::XMM8).to_index().0].unwrap_or(0);
1016 stack_offset -= 1;
1017 stack[stack_offset] =
1018 known_registers[X64Register::XMM(XMM::XMM7).to_index().0].unwrap_or(0);
1019
1020 stack_offset -= 1;
1021 stack[stack_offset] =
1022 known_registers[X64Register::XMM(XMM::XMM6).to_index().0].unwrap_or(0);
1023
1024 stack_offset -= 1;
1025 stack[stack_offset] =
1026 known_registers[X64Register::XMM(XMM::XMM5).to_index().0].unwrap_or(0);
1027
1028 stack_offset -= 1;
1029 stack[stack_offset] =
1030 known_registers[X64Register::XMM(XMM::XMM4).to_index().0].unwrap_or(0);
1031
1032 stack_offset -= 1;
1033 stack[stack_offset] =
1034 known_registers[X64Register::XMM(XMM::XMM3).to_index().0].unwrap_or(0);
1035
1036 stack_offset -= 1;
1037 stack[stack_offset] =
1038 known_registers[X64Register::XMM(XMM::XMM2).to_index().0].unwrap_or(0);
1039
1040 stack_offset -= 1;
1041 stack[stack_offset] =
1042 known_registers[X64Register::XMM(XMM::XMM1).to_index().0].unwrap_or(0);
1043
1044 stack_offset -= 1;
1045 stack[stack_offset] =
1046 known_registers[X64Register::XMM(XMM::XMM0).to_index().0].unwrap_or(0);
1047
1048 if let Some(ref memory) = image.memory {
1049 assert!(vmctx.internal.memory_bound <= memory.len());
1050
1051 if vmctx.internal.memory_bound < memory.len() {
1052 let grow: unsafe extern "C" fn(ctx: &mut Ctx, memory_index: usize, delta: usize) =
1053 ::std::mem::transmute((*vmctx.internal.intrinsics).memory_grow);
1054 grow(
1055 vmctx,
1056 0,
1057 (memory.len() - vmctx.internal.memory_bound) / 65536,
1058 );
1059 assert_eq!(vmctx.internal.memory_bound, memory.len());
1060 }
1061
1062 std::slice::from_raw_parts_mut(vmctx.internal.memory_base, vmctx.internal.memory_bound)
1063 .copy_from_slice(memory);
1064 }
1065
1066 let globals_len = (*vmctx.module).info.globals.len();
1067 for i in 0..globals_len {
1068 (*(*vmctx.local_backing).globals[LocalGlobalIndex::new(i)].vm_local_global()).data =
1069 image.globals[i];
1070 }
1071
1072 drop(image); catch_unsafe_unwind(
1075 || {
1076 run_on_alternative_stack(
1077 stack.as_mut_ptr().add(stack.len()),
1078 stack.as_mut_ptr().add(stack_offset),
1079 )
1080 },
1081 breakpoints,
1082 )
1083 }
1084
1085 pub fn build_instance_image(
1087 vmctx: &mut Ctx,
1088 execution_state: ExecutionStateImage,
1089 ) -> InstanceImage {
1090 unsafe {
1091 let memory = if vmctx.internal.memory_base.is_null() {
1092 None
1093 } else {
1094 Some(
1095 std::slice::from_raw_parts(
1096 vmctx.internal.memory_base,
1097 vmctx.internal.memory_bound,
1098 )
1099 .to_vec(),
1100 )
1101 };
1102
1103 let globals_len = (*vmctx.module).info.globals.len();
1105 let globals: Vec<u128> = (0..globals_len)
1106 .map(|i| {
1107 (*vmctx.local_backing).globals[LocalGlobalIndex::new(i)]
1108 .get()
1109 .to_u128()
1110 })
1111 .collect();
1112
1113 InstanceImage {
1114 memory: memory,
1115 globals: globals,
1116 execution_state: execution_state,
1117 }
1118 }
1119 }
1120
1121 #[warn(unused_variables)]
1124 pub unsafe fn read_stack<'a, I: Iterator<Item = &'a CodeVersion>, F: Fn() -> I + 'a>(
1125 versions: F,
1126 mut stack: *const u64,
1127 initially_known_registers: [Option<u64>; 32],
1128 mut initial_address: Option<u64>,
1129 max_depth: Option<usize>,
1130 ) -> ExecutionStateImage {
1131 let mut known_registers: [Option<u64>; 32] = initially_known_registers;
1132 let mut results: Vec<WasmFunctionStateDump> = vec![];
1133 let mut was_baseline = true;
1134
1135 for depth in 0.. {
1136 if let Some(max_depth) = max_depth {
1137 if depth >= max_depth {
1138 return ExecutionStateImage { frames: results };
1139 }
1140 }
1141
1142 let ret_addr = initial_address.take().unwrap_or_else(|| {
1143 let x = *stack;
1144 stack = stack.offset(1);
1145 x
1146 });
1147
1148 let mut fsm_state: Option<(&FunctionStateMap, MachineState)> = None;
1149 let mut is_baseline: Option<bool> = None;
1150
1151 for version in versions() {
1152 match version
1153 .msm
1154 .lookup_call_ip(ret_addr as usize, version.base)
1155 .or_else(|| {
1156 version
1157 .msm
1158 .lookup_trappable_ip(ret_addr as usize, version.base)
1159 })
1160 .or_else(|| version.msm.lookup_loop_ip(ret_addr as usize, version.base))
1161 {
1162 Some(x) => {
1163 fsm_state = Some(x);
1164 is_baseline = Some(version.baseline);
1165 break;
1166 }
1167 None => {}
1168 };
1169 }
1170
1171 let (fsm, state) = if let Some(x) = fsm_state {
1172 x
1173 } else {
1174 return ExecutionStateImage { frames: results };
1175 };
1176
1177 {
1178 let is_baseline = is_baseline.unwrap();
1179
1180 if is_baseline && !was_baseline {
1182 let callee_saved = &*get_boundary_register_preservation();
1183 known_registers[X64Register::GPR(GPR::R15).to_index().0] =
1184 Some(callee_saved.r15);
1185 known_registers[X64Register::GPR(GPR::R14).to_index().0] =
1186 Some(callee_saved.r14);
1187 known_registers[X64Register::GPR(GPR::R13).to_index().0] =
1188 Some(callee_saved.r13);
1189 known_registers[X64Register::GPR(GPR::R12).to_index().0] =
1190 Some(callee_saved.r12);
1191 known_registers[X64Register::GPR(GPR::RBX).to_index().0] =
1192 Some(callee_saved.rbx);
1193 }
1194
1195 was_baseline = is_baseline;
1196 }
1197
1198 let mut wasm_stack: Vec<Option<u64>> = state
1199 .wasm_stack
1200 .iter()
1201 .map(|x| match *x {
1202 WasmAbstractValue::Const(x) => Some(x),
1203 WasmAbstractValue::Runtime => None,
1204 })
1205 .collect();
1206 let mut wasm_locals: Vec<Option<u64>> = fsm
1207 .locals
1208 .iter()
1209 .map(|x| match *x {
1210 WasmAbstractValue::Const(x) => Some(x),
1211 WasmAbstractValue::Runtime => None,
1212 })
1213 .collect();
1214
1215 for (i, v) in state.register_values.iter().enumerate() {
1217 match *v {
1218 MachineValue::Undefined => {}
1219 MachineValue::Vmctx => {}
1220 MachineValue::VmctxDeref(_) => {}
1221 MachineValue::WasmStack(idx) => {
1222 if let Some(v) = known_registers[i] {
1223 wasm_stack[idx] = Some(v);
1224 } else {
1225 eprintln!(
1226 "BUG: Register {} for WebAssembly stack slot {} has unknown value.",
1227 i, idx
1228 );
1229 }
1230 }
1231 MachineValue::WasmLocal(idx) => {
1232 if let Some(v) = known_registers[i] {
1233 wasm_locals[idx] = Some(v);
1234 }
1235 }
1236 _ => unreachable!(),
1237 }
1238 }
1239
1240 let found_shadow = state
1241 .stack_values
1242 .iter()
1243 .any(|v| *v == MachineValue::ExplicitShadow);
1244 if !found_shadow {
1245 stack = stack.add(fsm.shadow_size / 8);
1246 }
1247
1248 for v in state.stack_values.iter().rev() {
1249 match *v {
1250 MachineValue::ExplicitShadow => {
1251 stack = stack.add(fsm.shadow_size / 8);
1252 }
1253 MachineValue::Undefined => {
1254 stack = stack.offset(1);
1255 }
1256 MachineValue::Vmctx => {
1257 stack = stack.offset(1);
1258 }
1259 MachineValue::VmctxDeref(_) => {
1260 stack = stack.offset(1);
1261 }
1262 MachineValue::PreserveRegister(idx) => {
1263 known_registers[idx.0] = Some(*stack);
1264 stack = stack.offset(1);
1265 }
1266 MachineValue::CopyStackBPRelative(_) => {
1267 stack = stack.offset(1);
1268 }
1269 MachineValue::WasmStack(idx) => {
1270 wasm_stack[idx] = Some(*stack);
1271 stack = stack.offset(1);
1272 }
1273 MachineValue::WasmLocal(idx) => {
1274 wasm_locals[idx] = Some(*stack);
1275 stack = stack.offset(1);
1276 }
1277 MachineValue::TwoHalves(ref inner) => {
1278 let v = *stack;
1279 stack = stack.offset(1);
1280 match inner.0 {
1281 MachineValue::WasmStack(idx) => {
1282 wasm_stack[idx] = Some(v & 0xffffffffu64);
1283 }
1284 MachineValue::WasmLocal(idx) => {
1285 wasm_locals[idx] = Some(v & 0xffffffffu64);
1286 }
1287 MachineValue::VmctxDeref(_) => {}
1288 MachineValue::Undefined => {}
1289 _ => unimplemented!("TwoHalves.0 (read)"),
1290 }
1291 match inner.1 {
1292 MachineValue::WasmStack(idx) => {
1293 wasm_stack[idx] = Some(v >> 32);
1294 }
1295 MachineValue::WasmLocal(idx) => {
1296 wasm_locals[idx] = Some(v >> 32);
1297 }
1298 MachineValue::VmctxDeref(_) => {}
1299 MachineValue::Undefined => {}
1300 _ => unimplemented!("TwoHalves.1 (read)"),
1301 }
1302 }
1303 }
1304 }
1305
1306 for (offset, v) in state.prev_frame.iter() {
1307 let offset = (*offset + 2) as isize; match *v {
1309 MachineValue::WasmStack(idx) => {
1310 wasm_stack[idx] = Some(*stack.offset(offset));
1311 }
1312 MachineValue::WasmLocal(idx) => {
1313 wasm_locals[idx] = Some(*stack.offset(offset));
1314 }
1315 _ => unreachable!("values in prev frame can only be stack/local"),
1316 }
1317 }
1318 stack = stack.offset(1); wasm_stack.truncate(
1321 wasm_stack
1322 .len()
1323 .checked_sub(state.wasm_stack_private_depth)
1324 .unwrap(),
1325 );
1326
1327 let wfs = WasmFunctionStateDump {
1328 local_function_id: fsm.local_function_id,
1329 wasm_inst_offset: state.wasm_inst_offset,
1330 stack: wasm_stack,
1331 locals: wasm_locals,
1332 };
1333 results.push(wfs);
1334 }
1335
1336 unreachable!();
1337 }
1338}