mod backtrace;
#[cfg(feature = "coredump")]
#[path = "traphandlers/coredump_enabled.rs"]
mod coredump;
#[cfg(not(feature = "coredump"))]
#[path = "traphandlers/coredump_disabled.rs"]
mod coredump;
#[cfg(all(has_native_signals))]
mod signals;
#[cfg(all(has_native_signals))]
pub use self::signals::*;
#[cfg(feature = "gc")]
use crate::ThrownException;
use crate::runtime::module::lookup_code;
use crate::runtime::store::{ExecutorRef, StoreOpaque};
use crate::runtime::vm::sys::traphandlers;
use crate::runtime::vm::{InterpreterRef, VMContext, VMStore, VMStoreContext, f32x4, f64x2, i8x16};
#[cfg(all(feature = "debug", feature = "gc"))]
use crate::store::AsStoreOpaque;
use crate::{EntryStoreContext, prelude::*};
use crate::{StoreContextMut, WasmBacktrace};
use core::cell::Cell;
use core::num::NonZeroU32;
use core::ptr::{self, NonNull};
use wasmtime_unwinder::Handler;
pub use self::backtrace::Backtrace;
#[cfg(feature = "debug")]
pub(crate) use self::backtrace::{FrameOrHostCode, StoreBacktrace};
#[cfg(feature = "gc")]
pub use wasmtime_unwinder::Frame;
pub use self::coredump::CoreDumpStack;
pub use self::tls::tls_eager_initialize;
#[cfg(feature = "async")]
pub use self::tls::{AsyncWasmCallState, PreviousAsyncWasmCallState};
pub use traphandlers::SignalHandler;
pub(crate) struct TrapRegisters {
pub pc: usize,
pub fp: usize,
}
pub(crate) enum TrapTest {
NotWasm,
#[cfg(has_host_compiler_backend)]
#[cfg_attr(miri, expect(dead_code, reason = "using #[cfg] too unergonomic"))]
HandledByEmbedder,
Trap(Handler),
}
fn lazy_per_thread_init() {
traphandlers::lazy_per_thread_init();
}
pub(super) unsafe fn raise_preexisting_trap(store: &mut dyn VMStore) {
tls::with(|info| unsafe { info.unwrap().unwind(store) })
}
pub fn catch_unwind_and_record_trap<R>(
store: &mut dyn VMStore,
f: impl FnOnce(&mut dyn VMStore) -> R,
) -> R::Abi
where
R: HostResult,
{
let (ret, unwind) = R::maybe_catch_unwind(store, |store| f(store));
if let Some(unwind) = unwind {
tls::with(|info| info.unwrap().record_unwind(store, unwind));
}
ret
}
pub trait HostResult {
type Abi: Copy;
fn maybe_catch_unwind(
store: &mut dyn VMStore,
f: impl FnOnce(&mut dyn VMStore) -> Self,
) -> (Self::Abi, Option<UnwindReason>);
}
macro_rules! host_result_no_catch {
($($t:ty,)*) => {
$(
impl HostResult for $t {
type Abi = $t;
#[allow(unreachable_code, reason = "some types uninhabited on some platforms")]
fn maybe_catch_unwind(
store: &mut dyn VMStore,
f: impl FnOnce(&mut dyn VMStore) -> $t,
) -> ($t, Option<UnwindReason>) {
(f(store), None)
}
}
)*
}
}
host_result_no_catch! {
(),
bool,
u32,
*mut u8,
u64,
f32,
f64,
i8x16,
f32x4,
f64x2,
}
impl HostResult for NonNull<u8> {
type Abi = *mut u8;
fn maybe_catch_unwind(
store: &mut dyn VMStore,
f: impl FnOnce(&mut dyn VMStore) -> Self,
) -> (*mut u8, Option<UnwindReason>) {
(f(store).as_ptr(), None)
}
}
impl<T, E> HostResult for Result<T, E>
where
T: HostResultHasUnwindSentinel,
E: Into<TrapReason>,
{
type Abi = T::Abi;
fn maybe_catch_unwind(
store: &mut dyn VMStore,
f: impl FnOnce(&mut dyn VMStore) -> Result<T, E>,
) -> (T::Abi, Option<UnwindReason>) {
let f = move || match f(store) {
Ok(ret) => (ret.into_abi(), None),
Err(reason) => (T::SENTINEL, Some(UnwindReason::Trap(reason.into()))),
};
#[cfg(all(feature = "std", panic = "unwind"))]
{
match std::panic::catch_unwind(std::panic::AssertUnwindSafe(f)) {
Ok(result) => result,
Err(err) => (T::SENTINEL, Some(UnwindReason::Panic(err))),
}
}
#[cfg(not(all(feature = "std", panic = "unwind")))]
{
f()
}
}
}
pub unsafe trait HostResultHasUnwindSentinel {
type Abi: Copy;
const SENTINEL: Self::Abi;
fn into_abi(self) -> Self::Abi;
}
unsafe impl HostResultHasUnwindSentinel for () {
type Abi = bool;
const SENTINEL: bool = false;
fn into_abi(self) -> bool {
true
}
}
unsafe impl HostResultHasUnwindSentinel for NonZeroU32 {
type Abi = u32;
const SENTINEL: Self::Abi = 0;
fn into_abi(self) -> Self::Abi {
self.get()
}
}
unsafe impl HostResultHasUnwindSentinel for u32 {
type Abi = u64;
const SENTINEL: u64 = u64::MAX;
fn into_abi(self) -> u64 {
self.into()
}
}
unsafe impl HostResultHasUnwindSentinel for core::convert::Infallible {
type Abi = ();
const SENTINEL: () = ();
fn into_abi(self) {
match self {}
}
}
unsafe impl HostResultHasUnwindSentinel for bool {
type Abi = u32;
const SENTINEL: Self::Abi = u32::MAX;
fn into_abi(self) -> Self::Abi {
u32::from(self)
}
}
#[derive(Debug)]
pub struct Trap {
pub reason: TrapReason,
pub backtrace: Option<Backtrace>,
pub coredumpstack: Option<CoreDumpStack>,
}
#[derive(Debug)]
pub enum TrapReason {
User(Error),
Jit {
pc: usize,
faulting_addr: Option<usize>,
trap: wasmtime_environ::Trap,
},
Wasm(wasmtime_environ::Trap),
#[cfg(feature = "gc")]
Exception,
}
impl From<Error> for TrapReason {
fn from(error: Error) -> Self {
#[cfg(feature = "gc")]
if error.is::<ThrownException>() {
return TrapReason::Exception;
}
TrapReason::User(error)
}
}
impl From<wasmtime_environ::Trap> for TrapReason {
fn from(code: wasmtime_environ::Trap) -> Self {
TrapReason::Wasm(code)
}
}
pub fn catch_traps<T, F>(
store: &mut StoreContextMut<'_, T>,
old_state: &mut EntryStoreContext,
mut closure: F,
) -> Result<()>
where
F: FnMut(NonNull<VMContext>, Option<InterpreterRef<'_>>) -> bool,
{
let caller = store.0.default_caller();
let result = CallThreadState::new(store.0, old_state).with(|_cx| match store.0.executor() {
ExecutorRef::Interpreter(r) => closure(caller, Some(r)),
#[cfg(has_host_compiler_backend)]
ExecutorRef::Native => closure(caller, None),
});
match result {
Ok(x) => Ok(x),
#[cfg(feature = "gc")]
Err(UnwindState::UnwindToHost {
reason: UnwindReason::Trap(TrapReason::Exception),
backtrace: _,
coredump_stack: _,
}) => Err(ThrownException.into()),
Err(UnwindState::UnwindToHost {
reason: UnwindReason::Trap(reason),
backtrace,
coredump_stack,
}) => Err(crate::trap::from_runtime_box(
store.0,
Box::new(Trap {
reason,
backtrace,
coredumpstack: coredump_stack,
}),
)),
#[cfg(all(feature = "std", panic = "unwind"))]
Err(UnwindState::UnwindToHost {
reason: UnwindReason::Panic(panic),
..
}) => std::panic::resume_unwind(panic),
#[cfg(feature = "gc")]
Err(UnwindState::UnwindToWasm { .. }) => {
unreachable!("We should not have returned to the host with an UnwindToWasm state");
}
Err(UnwindState::None) => {
unreachable!("We should not have gotten an error with no unwind state");
}
}
}
mod call_thread_state {
use super::*;
use crate::EntryStoreContext;
use crate::runtime::vm::{Unwind, VMStackChain};
pub enum UnwindState {
UnwindToHost {
reason: UnwindReason,
backtrace: Option<Backtrace>,
coredump_stack: Option<CoreDumpStack>,
},
#[cfg(feature = "gc")]
UnwindToWasm(Handler),
None,
}
impl UnwindState {
pub(super) fn is_none(&self) -> bool {
match self {
Self::None => true,
_ => false,
}
}
}
pub struct CallThreadState {
pub(super) unwind: Cell<UnwindState>,
#[cfg(all(has_native_signals))]
pub(super) signal_handler: Option<*const SignalHandler>,
pub(super) capture_backtrace: bool,
#[cfg(feature = "coredump")]
pub(super) capture_coredump: bool,
pub(crate) vm_store_context: NonNull<VMStoreContext>,
pub(crate) unwinder: &'static dyn Unwind,
pub(super) prev: Cell<tls::Ptr>,
old_state: *mut EntryStoreContext,
}
impl Drop for CallThreadState {
fn drop(&mut self) {
debug_assert!(self.unwind.replace(UnwindState::None).is_none());
}
}
impl CallThreadState {
#[inline]
pub(super) fn new(
store: &mut StoreOpaque,
old_state: *mut EntryStoreContext,
) -> CallThreadState {
CallThreadState {
unwind: Cell::new(UnwindState::None),
unwinder: store.unwinder(),
#[cfg(all(has_native_signals))]
signal_handler: store.signal_handler(),
capture_backtrace: store.engine().config().wasm_backtrace,
#[cfg(feature = "coredump")]
capture_coredump: store.engine().config().coredump_on_trap,
vm_store_context: store.vm_store_context_ptr(),
prev: Cell::new(ptr::null()),
old_state,
}
}
pub unsafe fn old_last_wasm_exit_fp(&self) -> usize {
let trampoline_fp = unsafe { (&*self.old_state).last_wasm_exit_trampoline_fp };
unsafe { VMStoreContext::wasm_exit_fp_from_trampoline_fp(trampoline_fp) }
}
pub unsafe fn old_last_wasm_exit_pc(&self) -> usize {
unsafe { (&*self.old_state).last_wasm_exit_pc }
}
pub unsafe fn old_last_wasm_entry_fp(&self) -> usize {
unsafe { (&*self.old_state).last_wasm_entry_fp }
}
pub unsafe fn old_stack_chain(&self) -> VMStackChain {
unsafe { (&*self.old_state).stack_chain.clone() }
}
pub fn prev(&self) -> tls::Ptr {
self.prev.get()
}
#[inline]
pub(crate) unsafe fn push(&self) {
assert!(self.prev.get().is_null());
self.prev.set(tls::raw::replace(self));
}
#[inline]
pub(crate) unsafe fn pop(&self) {
let prev = self.prev.replace(ptr::null());
let head = tls::raw::replace(prev);
assert!(core::ptr::eq(head, self));
}
#[cfg(feature = "async")]
pub(super) unsafe fn swap(&self) {
unsafe fn swap<T>(a: &core::cell::UnsafeCell<T>, b: &mut T) {
unsafe { core::mem::swap(&mut *a.get(), b) }
}
unsafe {
let cx = self.vm_store_context.as_ref();
swap(
&cx.last_wasm_exit_trampoline_fp,
&mut (*self.old_state).last_wasm_exit_trampoline_fp,
);
swap(
&cx.last_wasm_exit_pc,
&mut (*self.old_state).last_wasm_exit_pc,
);
swap(
&cx.last_wasm_entry_fp,
&mut (*self.old_state).last_wasm_entry_fp,
);
swap(
&cx.last_wasm_entry_sp,
&mut (*self.old_state).last_wasm_entry_sp,
);
swap(
&cx.last_wasm_entry_trap_handler,
&mut (*self.old_state).last_wasm_entry_trap_handler,
);
swap(&cx.stack_chain, &mut (*self.old_state).stack_chain);
}
}
}
}
pub use call_thread_state::*;
#[cfg(feature = "gc")]
use super::compute_handler;
pub enum UnwindReason {
#[cfg(all(feature = "std", panic = "unwind"))]
Panic(Box<dyn std::any::Any + Send>),
Trap(TrapReason),
}
impl<E> From<E> for UnwindReason
where
E: Into<TrapReason>,
{
fn from(value: E) -> UnwindReason {
UnwindReason::Trap(value.into())
}
}
impl CallThreadState {
#[inline]
fn with(mut self, closure: impl FnOnce(&CallThreadState) -> bool) -> Result<(), UnwindState> {
let succeeded = tls::set(&mut self, |me| closure(me));
if succeeded {
Ok(())
} else {
Err(self.read_unwind())
}
}
#[cold]
fn read_unwind(&self) -> UnwindState {
self.unwind.replace(UnwindState::None)
}
fn record_unwind(&self, store: &mut dyn VMStore, reason: UnwindReason) {
if cfg!(debug_assertions) {
let prev = self.unwind.replace(UnwindState::None);
assert!(prev.is_none());
}
let state = match reason {
#[cfg(all(feature = "std", panic = "unwind"))]
UnwindReason::Panic(err) => {
UnwindState::UnwindToHost {
reason: UnwindReason::Panic(err),
backtrace: None,
coredump_stack: None,
}
}
#[cfg(feature = "gc")]
UnwindReason::Trap(TrapReason::Exception) => {
let handler = unsafe { compute_handler(store) };
match handler {
Some(handler) => UnwindState::UnwindToWasm(handler),
None => UnwindState::UnwindToHost {
reason: UnwindReason::Trap(TrapReason::Exception),
backtrace: None,
coredump_stack: None,
},
}
}
UnwindReason::Trap(TrapReason::User(err))
if err.downcast_ref::<WasmBacktrace>().is_some() =>
{
UnwindState::UnwindToHost {
reason: UnwindReason::Trap(TrapReason::User(err)),
backtrace: None,
coredump_stack: None,
}
}
UnwindReason::Trap(trap) => {
log::trace!("Capturing backtrace and coredump for {trap:?}");
UnwindState::UnwindToHost {
reason: UnwindReason::Trap(trap),
backtrace: self.capture_backtrace(store.vm_store_context_mut(), None),
coredump_stack: self.capture_coredump(store.vm_store_context_mut(), None),
}
}
};
let _ = store;
self.unwind.set(state);
}
unsafe fn unwind(&self, store: &mut dyn VMStore) {
#[allow(unused_mut, reason = "only mutated in `debug` configuration")]
let mut unwind = self.unwind.replace(UnwindState::None);
#[cfg(feature = "debug")]
{
let result = match &unwind {
#[cfg(feature = "gc")]
UnwindState::UnwindToWasm(_) => {
use wasmtime_core::alloc::PanicOnOom;
assert!(store.as_store_opaque().has_pending_exception());
let exn = store
.as_store_opaque()
.pending_exception_owned_rooted()
.panic_on_oom()
.expect("exception should be set when we are throwing");
store.block_on_debug_handler(crate::DebugEvent::CaughtExceptionThrown(exn))
}
#[cfg(feature = "gc")]
UnwindState::UnwindToHost {
reason: UnwindReason::Trap(TrapReason::Exception),
..
} => {
use wasmtime_core::alloc::PanicOnOom;
let exn = store
.as_store_opaque()
.pending_exception_owned_rooted()
.panic_on_oom()
.expect("exception should be set when we are throwing");
store.block_on_debug_handler(crate::DebugEvent::UncaughtExceptionThrown(
exn.clone(),
))
}
UnwindState::UnwindToHost {
reason: UnwindReason::Trap(TrapReason::Wasm(trap)),
..
} => store.block_on_debug_handler(crate::DebugEvent::Trap(*trap)),
UnwindState::UnwindToHost {
reason: UnwindReason::Trap(TrapReason::User(err)),
..
} => store.block_on_debug_handler(crate::DebugEvent::HostcallError(err)),
UnwindState::UnwindToHost {
reason: UnwindReason::Trap(TrapReason::Jit { .. }),
..
} => {
Ok(())
}
#[cfg(all(feature = "std", panic = "unwind"))]
UnwindState::UnwindToHost {
reason: UnwindReason::Panic(_),
..
} => {
Ok(())
}
UnwindState::None => unreachable!(),
};
if let Err(err) = result {
unwind = UnwindState::UnwindToHost {
reason: UnwindReason::Trap(TrapReason::User(err)),
backtrace: None,
coredump_stack: None,
};
}
}
match unwind {
UnwindState::UnwindToHost { .. } => {
self.unwind.set(unwind);
let handler = self.entry_trap_handler();
let payload1 = 0;
let payload2 = 0;
unsafe {
self.resume_to_exception_handler(
store.executor(),
&handler,
payload1,
payload2,
);
}
}
#[cfg(feature = "gc")]
UnwindState::UnwindToWasm(handler) => {
let payload1 = usize::try_from(
store
.take_pending_exception()
.unwrap()
.as_gc_ref()
.as_raw_u32(),
)
.expect("GC ref does not fit in usize");
let payload2 = 0;
unsafe {
self.resume_to_exception_handler(
store.executor(),
&handler,
payload1,
payload2,
);
}
}
UnwindState::None => {
panic!("Attempting to unwind with no unwind state set.");
}
}
}
pub(crate) fn entry_trap_handler(&self) -> Handler {
unsafe {
let vm_store_context = self.vm_store_context.as_ref();
let fp = *vm_store_context.last_wasm_entry_fp.get();
let sp = *vm_store_context.last_wasm_entry_sp.get();
let pc = *vm_store_context.last_wasm_entry_trap_handler.get();
Handler { pc, sp, fp }
}
}
unsafe fn resume_to_exception_handler(
&self,
executor: ExecutorRef<'_>,
handler: &Handler,
payload1: usize,
payload2: usize,
) {
unsafe {
match executor {
ExecutorRef::Interpreter(mut r) => {
r.resume_to_exception_handler(handler, payload1, payload2)
}
#[cfg(has_host_compiler_backend)]
ExecutorRef::Native => handler.resume_tailcc(payload1, payload2),
}
}
}
fn capture_backtrace(
&self,
limits: *const VMStoreContext,
trap_pc_and_fp: Option<(usize, usize)>,
) -> Option<Backtrace> {
if !self.capture_backtrace {
return None;
}
Some(unsafe { Backtrace::new_with_trap_state(limits, self.unwinder, self, trap_pc_and_fp) })
}
pub(crate) fn iter<'a>(&'a self) -> impl Iterator<Item = &'a Self> + 'a {
let mut state = Some(self);
core::iter::from_fn(move || {
let this = state?;
state = unsafe { this.prev().as_ref() };
Some(this)
})
}
pub(crate) fn test_if_trap(
&self,
regs: TrapRegisters,
faulting_addr: Option<usize>,
call_handler: impl FnOnce(&SignalHandler) -> bool,
) -> TrapTest {
let _ = &call_handler;
#[cfg(all(has_native_signals, not(miri)))]
if let Some(handler) = self.signal_handler {
if unsafe { call_handler(&*handler) } {
return TrapTest::HandledByEmbedder;
}
}
let Some((code, text_offset)) = lookup_code(regs.pc) else {
return TrapTest::NotWasm;
};
let Some(trap) = code.lookup_trap_code(text_offset) else {
return TrapTest::NotWasm;
};
self.set_jit_trap(regs, faulting_addr, trap);
let entry_handler = self.entry_trap_handler();
TrapTest::Trap(entry_handler)
}
pub(crate) fn set_jit_trap(
&self,
TrapRegisters { pc, fp, .. }: TrapRegisters,
faulting_addr: Option<usize>,
trap: wasmtime_environ::Trap,
) {
let backtrace = self.capture_backtrace(self.vm_store_context.as_ptr(), Some((pc, fp)));
let coredump_stack = self.capture_coredump(self.vm_store_context.as_ptr(), Some((pc, fp)));
self.unwind.set(UnwindState::UnwindToHost {
reason: UnwindReason::Trap(TrapReason::Jit {
pc,
faulting_addr,
trap,
}),
backtrace,
coredump_stack,
});
}
}
pub(crate) mod tls {
use super::CallThreadState;
pub use raw::Ptr;
pub(super) mod raw {
use super::CallThreadState;
pub type Ptr = *const CallThreadState;
const _: () = {
assert!(core::mem::align_of::<CallThreadState>() > 1);
};
fn tls_get() -> (Ptr, bool) {
let mut initialized = false;
let p = crate::runtime::vm::sys::tls_get().map_addr(|a| {
initialized = (a & 1) != 0;
a & !1
});
(p.cast(), initialized)
}
fn tls_set(ptr: Ptr, initialized: bool) {
let encoded = ptr.map_addr(|a| a | usize::from(initialized));
crate::runtime::vm::sys::tls_set(encoded.cast_mut().cast::<u8>());
}
#[cfg_attr(feature = "async", inline(never))] #[cfg_attr(not(feature = "async"), inline)]
pub fn replace(val: Ptr) -> Ptr {
let (prev, initialized) = tls_get();
if !initialized {
super::super::lazy_per_thread_init();
}
tls_set(val, true);
prev
}
#[cfg_attr(feature = "async", inline(never))] #[cfg_attr(not(feature = "async"), inline)]
pub fn initialize() {
let (state, initialized) = tls_get();
if initialized {
return;
}
super::super::lazy_per_thread_init();
tls_set(state, true);
}
#[cfg_attr(feature = "async", inline(never))] #[cfg_attr(not(feature = "async"), inline)]
pub fn get() -> Ptr {
tls_get().0
}
}
pub use raw::initialize as tls_eager_initialize;
#[cfg(feature = "async")]
pub struct AsyncWasmCallState {
state: raw::Ptr,
}
#[cfg(feature = "async")]
unsafe impl Send for AsyncWasmCallState {}
#[cfg(feature = "async")]
impl AsyncWasmCallState {
pub fn new() -> AsyncWasmCallState {
AsyncWasmCallState {
state: core::ptr::null_mut(),
}
}
pub unsafe fn push(self) -> PreviousAsyncWasmCallState {
let ret = PreviousAsyncWasmCallState { state: raw::get() };
unsafe {
if let Some(state) = self.state.as_ref() {
state.swap();
}
}
let mut ptr = self.state;
unsafe {
while let Some(state) = ptr.as_ref() {
ptr = state.prev.replace(core::ptr::null_mut());
state.push();
}
}
ret
}
pub fn assert_null(&self) {
assert!(self.state.is_null());
}
pub fn assert_current_state_not_in_range(range: core::ops::Range<usize>) {
let p = raw::get() as usize;
assert!(p < range.start || range.end < p);
}
}
#[cfg(feature = "async")]
pub struct PreviousAsyncWasmCallState {
state: raw::Ptr,
}
#[cfg(feature = "async")]
impl PreviousAsyncWasmCallState {
pub unsafe fn restore(self) -> AsyncWasmCallState {
let thread_head = self.state;
core::mem::forget(self);
let mut ret = AsyncWasmCallState::new();
loop {
let ptr = raw::get();
if ptr == thread_head {
unsafe {
if let Some(state) = ret.state.as_ref() {
state.swap();
}
}
break ret;
}
unsafe {
(*ptr).pop();
if let Some(state) = ret.state.as_ref() {
(*ptr).prev.set(state);
}
}
ret.state = ptr;
}
}
}
#[cfg(feature = "async")]
impl Drop for PreviousAsyncWasmCallState {
fn drop(&mut self) {
panic!("must be consumed with `restore`");
}
}
#[inline]
pub fn set<R>(state: &mut CallThreadState, closure: impl FnOnce(&CallThreadState) -> R) -> R {
struct Reset<'a> {
state: &'a CallThreadState,
}
impl Drop for Reset<'_> {
#[inline]
fn drop(&mut self) {
unsafe {
self.state.pop();
}
}
}
unsafe {
state.push();
let reset = Reset { state };
closure(reset.state)
}
}
pub fn with<R>(closure: impl FnOnce(Option<&CallThreadState>) -> R) -> R {
let p = raw::get();
unsafe { closure(if p.is_null() { None } else { Some(&*p) }) }
}
}