use crate::prelude::*;
use crate::runtime::store::StoreOpaque;
use crate::runtime::vm::stack_switching::VMStackChain;
use crate::runtime::vm::{
Unwind, VMStoreContext,
traphandlers::{CallThreadState, tls},
};
#[cfg(all(feature = "gc", feature = "stack-switching"))]
use crate::vm::stack_switching::{VMContRef, VMStackState};
use core::ops::ControlFlow;
use wasmtime_unwinder::Frame;
#[derive(Debug)]
pub struct Backtrace(Vec<Frame>);
impl Backtrace {
pub fn empty() -> Backtrace {
Backtrace(Vec::new())
}
pub fn new(store: &StoreOpaque) -> Backtrace {
let vm_store_context = store.vm_store_context();
let unwind = store.unwinder();
tls::with(|state| match state {
Some(state) => unsafe {
Self::new_with_trap_state(vm_store_context, unwind, state, None)
},
None => Backtrace(vec![]),
})
}
pub(crate) unsafe fn new_with_trap_state(
vm_store_context: *const VMStoreContext,
unwind: &dyn Unwind,
state: &CallThreadState,
trap_pc_and_fp: Option<(usize, usize)>,
) -> Backtrace {
let mut frames = vec![];
Self::trace_with_trap_state(vm_store_context, unwind, state, trap_pc_and_fp, |frame| {
frames.push(frame);
ControlFlow::Continue(())
});
Backtrace(frames)
}
#[cfg(feature = "gc")]
pub fn trace(store: &StoreOpaque, f: impl FnMut(Frame) -> ControlFlow<()>) {
let vm_store_context = store.vm_store_context();
let unwind = store.unwinder();
tls::with(|state| match state {
Some(state) => unsafe {
Self::trace_with_trap_state(vm_store_context, unwind, state, None, f)
},
None => {}
});
}
#[cfg(all(feature = "gc", feature = "stack-switching"))]
pub fn trace_suspended_continuation(
store: &StoreOpaque,
continuation: &VMContRef,
f: impl FnMut(Frame) -> ControlFlow<()>,
) {
log::trace!("====== Capturing Backtrace (suspended continuation) ======");
assert_eq!(
continuation.common_stack_information.state,
VMStackState::Suspended
);
let unwind = store.unwinder();
let pc = continuation.stack.control_context_instruction_pointer();
let fp = continuation.stack.control_context_frame_pointer();
let trampoline_fp = continuation
.common_stack_information
.limits
.last_wasm_entry_fp;
unsafe {
let stack_chain =
VMStackChain::Continuation(continuation as *const VMContRef as *mut VMContRef);
if let ControlFlow::Break(()) =
Self::trace_through_continuations(unwind, stack_chain, pc, fp, trampoline_fp, f)
{
log::trace!("====== Done Capturing Backtrace (closure break) ======");
return;
}
}
log::trace!("====== Done Capturing Backtrace (reached end of stack chain) ======");
}
pub(crate) unsafe fn trace_with_trap_state(
vm_store_context: *const VMStoreContext,
unwind: &dyn Unwind,
state: &CallThreadState,
trap_pc_and_fp: Option<(usize, usize)>,
mut f: impl FnMut(Frame) -> ControlFlow<()>,
) {
log::trace!("====== Capturing Backtrace ======");
let (last_wasm_exit_pc, last_wasm_exit_fp) = match trap_pc_and_fp {
Some((pc, fp)) => {
assert!(core::ptr::eq(
vm_store_context,
state.vm_store_context.as_ptr()
));
(pc, fp)
}
None => {
let pc = *(*vm_store_context).last_wasm_exit_pc.get();
let fp = *(*vm_store_context).last_wasm_exit_fp.get();
(pc, fp)
}
};
let stack_chain = (*(*vm_store_context).stack_chain.get()).clone();
let activations = core::iter::once((
stack_chain,
last_wasm_exit_pc,
last_wasm_exit_fp,
*(*vm_store_context).last_wasm_entry_fp.get(),
))
.chain(
state
.iter()
.flat_map(|state| state.iter())
.filter(|state| core::ptr::eq(vm_store_context, state.vm_store_context.as_ptr()))
.map(|state| {
(
state.old_stack_chain(),
state.old_last_wasm_exit_pc(),
state.old_last_wasm_exit_fp(),
state.old_last_wasm_entry_fp(),
)
}),
)
.take_while(|(chain, pc, fp, sp)| {
if *pc == 0 {
debug_assert_eq!(*fp, 0);
debug_assert_eq!(*sp, 0);
} else {
debug_assert_ne!(chain.clone(), VMStackChain::Absent)
}
*pc != 0
});
for (chain, pc, fp, sp) in activations {
if let ControlFlow::Break(()) =
Self::trace_through_continuations(unwind, chain, pc, fp, sp, &mut f)
{
log::trace!("====== Done Capturing Backtrace (closure break) ======");
return;
}
}
log::trace!("====== Done Capturing Backtrace (reached end of activations) ======");
}
unsafe fn trace_through_continuations(
unwind: &dyn Unwind,
chain: VMStackChain,
pc: usize,
fp: usize,
trampoline_fp: usize,
mut f: impl FnMut(Frame) -> ControlFlow<()>,
) -> ControlFlow<()> {
use crate::runtime::vm::stack_switching::{VMContRef, VMStackLimits};
wasmtime_unwinder::visit_frames(unwind, pc, fp, trampoline_fp, &mut f)?;
assert_ne!(chain, VMStackChain::Absent);
let stack_limits_vec: Vec<*mut VMStackLimits> =
chain.clone().into_stack_limits_iter().collect();
let continuations_vec: Vec<*mut VMContRef> =
chain.clone().into_continuation_iter().collect();
assert_eq!(stack_limits_vec.len(), continuations_vec.len() + 1);
for i in 0..continuations_vec.len() {
let continuation = unsafe { &*continuations_vec[i] };
let parent_limits = unsafe { &*stack_limits_vec[i + 1] };
let parent_continuation = continuations_vec.get(i + 1).map(|&c| unsafe { &*c });
let fiber_stack = continuation.fiber_stack();
let resume_pc = fiber_stack.control_context_instruction_pointer();
let resume_fp = fiber_stack.control_context_frame_pointer();
let parent_stack_range = parent_continuation.and_then(|p| p.fiber_stack().range());
parent_stack_range.inspect(|parent_stack_range| {
debug_assert!(parent_stack_range.contains(&resume_fp));
debug_assert!(parent_stack_range.contains(&parent_limits.last_wasm_entry_fp));
debug_assert!(parent_stack_range.contains(&parent_limits.stack_limit));
});
wasmtime_unwinder::visit_frames(
unwind,
resume_pc,
resume_fp,
parent_limits.last_wasm_entry_fp,
&mut f,
)?
}
ControlFlow::Continue(())
}
pub fn frames<'a>(
&'a self,
) -> impl ExactSizeIterator<Item = &'a Frame> + DoubleEndedIterator + 'a {
self.0.iter()
}
}