use crate::{
abi::{scratch, vmctx, ABIOperand, ABISig, RetArea},
codegen::{BuiltinFunction, BuiltinType, Callee, CodeGenContext},
masm::{
CalleeKind, ContextArgs, MacroAssembler, MemMoveDirection, OperandSize, SPOffset,
VMContextLoc,
},
reg::Reg,
stack::Val,
FuncEnv,
};
use wasmtime_environ::{FuncIndex, PtrSize, VMOffsets};
#[derive(Copy, Clone)]
pub(crate) struct FnCall {}
impl FnCall {
pub fn emit<M: MacroAssembler>(
env: &mut FuncEnv<M::Ptr>,
masm: &mut M,
context: &mut CodeGenContext,
callee: Callee,
) {
let (kind, callee_context) = Self::lower(env, context.vmoffsets, &callee, context, masm);
let sig = env.callee_sig::<M::ABI>(&callee);
context.spill(masm);
let ret_area = Self::make_ret_area(&sig, masm);
let arg_stack_space = sig.params_stack_size();
let reserved_stack = masm.call(arg_stack_space, |masm| {
Self::assign(sig, &callee_context, ret_area.as_ref(), context, masm);
kind
});
Self::cleanup(
sig,
&callee_context,
&kind,
reserved_stack,
ret_area,
masm,
context,
);
}
fn make_ret_area<M: MacroAssembler>(callee_sig: &ABISig, masm: &mut M) -> Option<RetArea> {
callee_sig.has_stack_results().then(|| {
let base = masm.sp_offset().as_u32();
let end = base + callee_sig.results_stack_size();
if end > base {
masm.reserve_stack(end - base);
}
RetArea::sp(SPOffset::from_u32(end))
})
}
fn lower<M: MacroAssembler>(
env: &mut FuncEnv<M::Ptr>,
vmoffsets: &VMOffsets<u8>,
callee: &Callee,
context: &mut CodeGenContext,
masm: &mut M,
) -> (CalleeKind, ContextArgs) {
let ptr = vmoffsets.ptr.size();
match callee {
Callee::Builtin(b) => Self::lower_builtin(env, b),
Callee::FuncRef(_) => {
Self::lower_funcref(env.callee_sig::<M::ABI>(callee), ptr, context, masm)
}
Callee::Local(i) => Self::lower_local(env, *i),
Callee::Import(i) => {
let sig = env.callee_sig::<M::ABI>(callee);
Self::lower_import(*i, sig, context, masm, vmoffsets)
}
}
}
fn lower_builtin<P: PtrSize>(
env: &mut FuncEnv<P>,
builtin: &BuiltinFunction,
) -> (CalleeKind, ContextArgs) {
match builtin.ty() {
BuiltinType::Builtin(idx) => (
CalleeKind::direct(env.name_builtin(idx)),
ContextArgs::pinned_vmctx(),
),
BuiltinType::LibCall(c) => (CalleeKind::libcall(c), ContextArgs::none()),
}
}
fn lower_local<P: PtrSize>(
env: &mut FuncEnv<P>,
index: FuncIndex,
) -> (CalleeKind, ContextArgs) {
(
CalleeKind::direct(env.name_wasm(index)),
ContextArgs::pinned_callee_and_caller_vmctx(),
)
}
fn lower_import<M: MacroAssembler, P: PtrSize>(
index: FuncIndex,
sig: &ABISig,
context: &mut CodeGenContext,
masm: &mut M,
vmoffsets: &VMOffsets<P>,
) -> (CalleeKind, ContextArgs) {
let (callee, callee_vmctx) =
context.without::<(Reg, Reg), M, _>(&sig.regs, masm, |context, masm| {
(context.any_gpr(masm), context.any_gpr(masm))
});
let callee_vmctx_offset = vmoffsets.vmctx_vmfunction_import_vmctx(index);
let callee_vmctx_addr = masm.address_at_vmctx(callee_vmctx_offset);
masm.load_ptr(callee_vmctx_addr, callee_vmctx);
let callee_body_offset = vmoffsets.vmctx_vmfunction_import_wasm_call(index);
let callee_addr = masm.address_at_vmctx(callee_body_offset);
masm.load_ptr(callee_addr, callee);
(
CalleeKind::indirect(callee),
ContextArgs::with_callee_and_pinned_caller(callee_vmctx),
)
}
fn lower_funcref<M: MacroAssembler>(
sig: &ABISig,
ptr: impl PtrSize,
context: &mut CodeGenContext,
masm: &mut M,
) -> (CalleeKind, ContextArgs) {
let (funcref_ptr, funcref, callee_vmctx) =
context.without::<_, M, _>(&sig.regs, masm, |cx, masm| {
(
cx.pop_to_reg(masm, None).into(),
cx.any_gpr(masm),
cx.any_gpr(masm),
)
});
masm.load_ptr(
masm.address_at_reg(funcref_ptr, ptr.vm_func_ref_vmctx().into()),
callee_vmctx,
);
masm.load_ptr(
masm.address_at_reg(funcref_ptr, ptr.vm_func_ref_wasm_call().into()),
funcref,
);
context.free_reg(funcref_ptr);
(
CalleeKind::indirect(funcref),
ContextArgs::with_callee_and_pinned_caller(callee_vmctx),
)
}
fn assign_context_args<M: MacroAssembler>(sig: &ABISig, context: &ContextArgs, masm: &mut M) {
debug_assert!(sig.params().len() >= context.len());
for (context_arg, operand) in context
.as_slice()
.iter()
.zip(sig.params_without_retptr().iter().take(context.len()))
{
match (context_arg, operand) {
(VMContextLoc::Pinned, ABIOperand::Reg { ty, reg, .. }) => {
masm.mov(vmctx!(M).into(), *reg, (*ty).into());
}
(VMContextLoc::Pinned, ABIOperand::Stack { ty, offset, .. }) => {
let addr = masm.address_at_sp(SPOffset::from_u32(*offset));
masm.store(vmctx!(M).into(), addr, (*ty).into());
}
(VMContextLoc::Reg(src), ABIOperand::Reg { ty, reg, .. }) => {
masm.mov((*src).into(), *reg, (*ty).into());
}
(VMContextLoc::Reg(src), ABIOperand::Stack { ty, offset, .. }) => {
let addr = masm.address_at_sp(SPOffset::from_u32(*offset));
masm.store((*src).into(), addr, (*ty).into());
}
}
}
}
fn assign<M: MacroAssembler>(
sig: &ABISig,
callee_context: &ContextArgs,
ret_area: Option<&RetArea>,
context: &mut CodeGenContext,
masm: &mut M,
) {
let arg_count = sig.params.len_without_retptr();
debug_assert!(arg_count >= callee_context.len());
let stack = &context.stack;
let stack_values = stack.peekn(arg_count - callee_context.len());
if callee_context.len() > 0 {
Self::assign_context_args(&sig, &callee_context, masm);
}
for (arg, val) in sig
.params_without_retptr()
.iter()
.skip(callee_context.len())
.zip(stack_values)
{
match arg {
&ABIOperand::Reg { reg, .. } => {
context.move_val_to_reg(&val, reg, masm);
}
&ABIOperand::Stack { ty, offset, .. } => {
let addr = masm.address_at_sp(SPOffset::from_u32(offset));
let size: OperandSize = ty.into();
let scratch = scratch!(M, &ty);
context.move_val_to_reg(val, scratch, masm);
masm.store(scratch.into(), addr, size);
}
}
}
if sig.has_stack_results() {
let operand = sig.params.unwrap_results_area_operand();
let base = ret_area.unwrap().unwrap_sp();
let addr = masm.address_from_sp(base);
match operand {
&ABIOperand::Reg { ty, reg, .. } => {
masm.load_addr(addr, reg, ty.into());
}
&ABIOperand::Stack { ty, offset, .. } => {
let slot = masm.address_at_sp(SPOffset::from_u32(offset));
let scratch = scratch!(M);
masm.load_addr(addr, scratch, ty.into());
masm.store(scratch.into(), slot, ty.into());
}
}
}
}
fn cleanup<M: MacroAssembler>(
sig: &ABISig,
callee_context: &ContextArgs,
callee_kind: &CalleeKind,
reserved_space: u32,
ret_area: Option<RetArea>,
masm: &mut M,
context: &mut CodeGenContext,
) {
match callee_kind {
CalleeKind::Indirect(r) => context.free_reg(*r),
_ => {}
}
for loc in callee_context.as_slice() {
match loc {
VMContextLoc::Reg(r) => context.free_reg(*r),
_ => {}
}
}
masm.free_stack(reserved_space);
debug_assert!(sig.params.len_without_retptr() >= callee_context.len());
let mut stack_consumed = 0;
context.drop_last(
sig.params.len_without_retptr() - callee_context.len(),
|_regalloc, v| {
debug_assert!(v.is_mem() || v.is_const());
if let Val::Memory(mem) = v {
stack_consumed += mem.slot.size;
}
},
);
if let Some(ret_area) = ret_area {
if stack_consumed > 0 {
let sp = ret_area.unwrap_sp();
let result_bytes = sig.results_stack_size();
debug_assert!(sp.as_u32() >= stack_consumed + result_bytes);
let dst = SPOffset::from_u32(sp.as_u32() - stack_consumed);
masm.memmove(sp, dst, result_bytes, MemMoveDirection::LowToHigh);
}
};
masm.free_stack(stack_consumed);
let ret_area = ret_area.map(|area| {
if stack_consumed > 0 {
RetArea::sp(masm.sp_offset())
} else {
debug_assert_eq!(area.unwrap_sp(), masm.sp_offset());
area
}
});
context.push_abi_results(&sig.results, masm, |_, _, _| ret_area);
context.load_vmctx(masm);
}
}