use core::ptr::NonNull;
use std::io;
use std::ops::Range;
use std::ptr;
use crate::runtime::vm::stack_switching::VMHostArray;
use crate::runtime::vm::{VMContext, VMFuncRef, ValRaw};
#[derive(Debug, PartialEq, Eq)]
pub enum Allocator {
Mmap,
Custom,
}
#[derive(Debug)]
#[repr(C)]
pub struct VMContinuationStack {
top: *mut u8,
len: usize,
allocator: Allocator,
}
impl VMContinuationStack {
pub fn new(size: usize) -> io::Result<Self> {
let page_size = rustix::param::page_size();
let size = if size == 0 {
page_size
} else {
size.next_multiple_of(page_size)
};
unsafe {
let mmap_len = size + page_size;
let mmap = rustix::mm::mmap_anonymous(
ptr::null_mut(),
mmap_len,
rustix::mm::ProtFlags::empty(),
rustix::mm::MapFlags::PRIVATE,
)?;
rustix::mm::mprotect(
mmap.cast::<u8>().add(page_size).cast(),
size,
rustix::mm::MprotectFlags::READ | rustix::mm::MprotectFlags::WRITE,
)?;
Ok(Self {
top: mmap.cast::<u8>().add(mmap_len),
len: mmap_len,
allocator: Allocator::Mmap,
})
}
}
pub fn unallocated() -> Self {
Self {
top: std::ptr::null_mut(),
len: 0,
allocator: Allocator::Custom,
}
}
pub fn is_unallocated(&self) -> bool {
debug_assert_eq!(self.len == 0, self.top == std::ptr::null_mut());
self.len == 0
}
pub unsafe fn from_raw_parts(
base: *mut u8,
_guard_size: usize,
len: usize,
) -> io::Result<Self> {
Ok(Self {
top: unsafe { base.add(len) },
len,
allocator: Allocator::Custom,
})
}
pub fn is_from_raw_parts(&self) -> bool {
self.allocator == Allocator::Custom
}
pub fn top(&self) -> Option<*mut u8> {
Some(self.top)
}
pub fn range(&self) -> Option<Range<usize>> {
let base = unsafe { self.top.sub(self.len).addr() };
Some(base..base + self.len)
}
pub fn control_context_instruction_pointer(&self) -> usize {
unsafe {
let ptr = self.top.sub(8).cast::<usize>();
*ptr
}
}
pub fn control_context_frame_pointer(&self) -> usize {
unsafe {
let ptr = self.top.sub(16).cast::<usize>();
*ptr
}
}
pub fn control_context_stack_pointer(&self) -> usize {
unsafe {
let ptr = self.top.sub(24).cast::<usize>();
*ptr
}
}
pub fn initialize(
&self,
func_ref: *const VMFuncRef,
caller_vmctx: *mut VMContext,
args: *mut VMHostArray<ValRaw>,
parameter_count: u32,
return_value_count: u32,
) {
let tos = self.top;
unsafe {
let store = |tos_neg_offset, value| {
let target = tos.sub(tos_neg_offset).cast::<usize>();
target.write(value)
};
let args_ref = &mut *args;
let args_capacity = std::cmp::max(parameter_count, return_value_count);
debug_assert_eq!(args_ref.capacity, 0);
debug_assert_eq!(args_ref.length, 0);
let args_data_size =
usize::try_from(args_capacity).unwrap() * std::mem::size_of::<ValRaw>();
let args_data_ptr = if args_capacity == 0 {
ptr::null_mut()
} else {
tos.sub(0x20 + args_data_size)
};
args_ref.capacity = args_capacity;
args_ref.data = args_data_ptr.cast::<ValRaw>();
let to_store = [
(0x08, wasmtime_continuation_start_address().addr()),
(0x10, tos.sub(0x10).addr()),
(0x18, tos.sub(0x40 + args_data_size).addr()),
(0x20, usize::try_from(args_capacity).unwrap()),
(0x28 + args_data_size, func_ref.addr()),
(0x30 + args_data_size, caller_vmctx.addr()),
(0x38 + args_data_size, args.addr()),
(
0x40 + args_data_size,
usize::try_from(return_value_count).unwrap(),
),
];
for (offset, data) in to_store {
store(offset, data);
}
}
}
}
impl Drop for VMContinuationStack {
fn drop(&mut self) {
unsafe {
match self.allocator {
Allocator::Mmap => {
let ret = rustix::mm::munmap(self.top.sub(self.len) as _, self.len);
debug_assert!(ret.is_ok());
}
Allocator::Custom => {} }
}
}
}
unsafe extern "C" fn fiber_start(
func_ref: *mut VMFuncRef,
caller_vmctx: *mut VMContext,
args: *mut VMHostArray<ValRaw>,
return_value_count: u32,
) {
unsafe {
let func_ref = NonNull::new(func_ref).unwrap();
let caller_vmxtx = NonNull::new_unchecked(caller_vmctx);
let args = &mut *args;
let params_and_returns: NonNull<[ValRaw]> = if args.capacity == 0 {
NonNull::from(&[])
} else {
std::slice::from_raw_parts_mut(args.data, usize::try_from(args.capacity).unwrap())
.into()
};
VMFuncRef::array_call(func_ref, None, caller_vmxtx, params_and_returns);
args.length = return_value_count;
}
}
cfg_if::cfg_if! {
if #[cfg(target_arch = "x86_64")] {
mod x86_64;
use x86_64::*;
} else {
compile_error!("the stack switching feature is not supported on this CPU architecture");
}
}