use crate::{
FuncEnv, Result,
abi::{ABIOperand, ABISig, RetArea, vmctx},
codegen::{BuiltinFunction, BuiltinType, Callee, CodeGenContext, CodeGenError, Emission},
ensure,
masm::{
CalleeKind, ContextArgs, IntScratch, MacroAssembler, MemMoveDirection, OperandSize,
SPOffset, VMContextLoc,
},
reg::{Reg, writable},
stack::Val,
};
use wasmtime_environ::{DefinedFuncIndex, FuncIndex, PtrSize, VMOffsets};
#[derive(Copy, Clone)]
pub(crate) struct FnCall {}
impl FnCall {
pub fn emit<M: MacroAssembler>(
env: &mut FuncEnv<M::Ptr>,
masm: &mut M,
context: &mut CodeGenContext<Emission>,
callee: Callee,
) -> Result<()> {
let (kind, callee_context) = Self::lower(env, context.vmoffsets, &callee, context, masm)?;
let sig = env.callee_sig::<M::ABI>(&callee)?;
context.spill(masm)?;
let ret_area = Self::make_ret_area(&sig, masm)?;
let arg_stack_space = sig.params_stack_size();
let reserved_stack = masm.call(arg_stack_space, |masm| {
Self::assign(sig, &callee_context, ret_area.as_ref(), context, masm)?;
Ok((kind, sig.call_conv))
})?;
Self::cleanup(
sig,
&callee_context,
&kind,
reserved_stack,
ret_area,
masm,
context,
)
}
fn make_ret_area<M: MacroAssembler>(
callee_sig: &ABISig,
masm: &mut M,
) -> Result<Option<RetArea>> {
if callee_sig.has_stack_results() {
let base = masm.sp_offset()?.as_u32();
let end = base + callee_sig.results_stack_size();
if end > base {
masm.reserve_stack(end - base)?;
}
Ok(Some(RetArea::sp(SPOffset::from_u32(end))))
} else {
Ok(None)
}
}
fn lower<M: MacroAssembler>(
env: &mut FuncEnv<M::Ptr>,
vmoffsets: &VMOffsets<u8>,
callee: &Callee,
context: &mut CodeGenContext<Emission>,
masm: &mut M,
) -> Result<(CalleeKind, ContextArgs)> {
let ptr = vmoffsets.ptr.size();
match callee {
Callee::Builtin(b) => Ok(Self::lower_builtin(env, b, None)),
Callee::BuiltinWithDifferentVmctx(b, offset) => {
Ok(Self::lower_builtin(env, b, Some(*offset)))
}
Callee::FuncRef(_) => {
Self::lower_funcref(env.callee_sig::<M::ABI>(callee)?, ptr, context, masm)
}
Callee::Local(i) => {
let f = env.translation.module.defined_func_index(*i).unwrap();
Ok(Self::lower_local(env, f))
}
Callee::Import(i) => {
let sig = env.callee_sig::<M::ABI>(callee)?;
Self::lower_import(*i, sig, context, masm, vmoffsets)
}
}
}
fn lower_builtin<P: PtrSize>(
env: &mut FuncEnv<P>,
builtin: &BuiltinFunction,
vmctx_offset: Option<u32>,
) -> (CalleeKind, ContextArgs) {
match builtin.ty() {
BuiltinType::Builtin(idx) => (
CalleeKind::direct(env.name_builtin(idx)),
match vmctx_offset {
Some(offset) => ContextArgs::offset_from_pinned_vmctx(offset),
None => ContextArgs::pinned_vmctx(),
},
),
}
}
fn lower_local<P: PtrSize>(
env: &mut FuncEnv<P>,
index: DefinedFuncIndex,
) -> (CalleeKind, ContextArgs) {
(
CalleeKind::direct(env.name_wasm(index)),
ContextArgs::pinned_callee_and_caller_vmctx(),
)
}
fn lower_import<M: MacroAssembler, P: PtrSize>(
index: FuncIndex,
sig: &ABISig,
context: &mut CodeGenContext<Emission>,
masm: &mut M,
vmoffsets: &VMOffsets<P>,
) -> Result<(CalleeKind, ContextArgs)> {
let (callee, callee_vmctx) =
context.without::<Result<(Reg, Reg)>, M, _>(&sig.regs, masm, |context, masm| {
Ok((context.any_gpr(masm)?, context.any_gpr(masm)?))
})??;
let callee_vmctx_offset = vmoffsets.vmctx_vmfunction_import_vmctx(index);
let callee_vmctx_addr = masm.address_at_vmctx(callee_vmctx_offset)?;
masm.load_ptr(callee_vmctx_addr, writable!(callee_vmctx))?;
let callee_body_offset = vmoffsets.vmctx_vmfunction_import_wasm_call(index);
let callee_addr = masm.address_at_vmctx(callee_body_offset)?;
masm.load_ptr(callee_addr, writable!(callee))?;
Ok((
CalleeKind::indirect(callee),
ContextArgs::with_callee_and_pinned_caller(callee_vmctx),
))
}
fn lower_funcref<M: MacroAssembler>(
sig: &ABISig,
ptr: impl PtrSize,
context: &mut CodeGenContext<Emission>,
masm: &mut M,
) -> Result<(CalleeKind, ContextArgs)> {
let (funcref_ptr, funcref, callee_vmctx) = context
.without::<Result<(Reg, Reg, Reg)>, M, _>(&sig.regs, masm, |cx, masm| {
Ok((
cx.pop_to_reg(masm, None)?.into(),
cx.any_gpr(masm)?,
cx.any_gpr(masm)?,
))
})??;
masm.load_ptr(
masm.address_at_reg(funcref_ptr, ptr.vm_func_ref_vmctx().into())?,
writable!(callee_vmctx),
)?;
masm.load_ptr(
masm.address_at_reg(funcref_ptr, ptr.vm_func_ref_wasm_call().into())?,
writable!(funcref),
)?;
context.free_reg(funcref_ptr);
Ok((
CalleeKind::indirect(funcref),
ContextArgs::with_callee_and_pinned_caller(callee_vmctx),
))
}
fn assign_context_args<M: MacroAssembler>(
sig: &ABISig,
context: &ContextArgs,
masm: &mut M,
) -> Result<()> {
ensure!(
sig.params().len() >= context.len(),
CodeGenError::vmcontext_arg_expected(),
);
for (context_arg, operand) in context
.as_slice()
.iter()
.zip(sig.params_without_retptr().iter().take(context.len()))
{
match (context_arg, operand) {
(VMContextLoc::Pinned, ABIOperand::Reg { ty, reg, .. }) => {
masm.mov(writable!(*reg), vmctx!(M).into(), (*ty).try_into()?)?;
}
(VMContextLoc::Pinned, ABIOperand::Stack { ty, offset, .. }) => {
let addr = masm.address_at_sp(SPOffset::from_u32(*offset))?;
masm.store(vmctx!(M).into(), addr, (*ty).try_into()?)?;
}
(VMContextLoc::OffsetFromPinned(offset), ABIOperand::Reg { ty, reg, .. }) => {
let addr = masm.address_at_vmctx(*offset)?;
masm.load(addr, writable!(*reg), (*ty).try_into()?)?;
}
(VMContextLoc::OffsetFromPinned(_), ABIOperand::Stack { .. }) => {
crate::bail!("unimplemented load from vmctx into stack");
}
(VMContextLoc::Reg(src), ABIOperand::Reg { ty, reg, .. }) => {
masm.mov(writable!(*reg), (*src).into(), (*ty).try_into()?)?;
}
(VMContextLoc::Reg(src), ABIOperand::Stack { ty, offset, .. }) => {
let addr = masm.address_at_sp(SPOffset::from_u32(*offset))?;
masm.store((*src).into(), addr, (*ty).try_into()?)?;
}
}
}
Ok(())
}
fn assign<M: MacroAssembler>(
sig: &ABISig,
callee_context: &ContextArgs,
ret_area: Option<&RetArea>,
context: &mut CodeGenContext<Emission>,
masm: &mut M,
) -> Result<()> {
let arg_count = sig.params.len_without_retptr();
debug_assert!(arg_count >= callee_context.len());
let stack = &context.stack;
let stack_values = stack.peekn(arg_count - callee_context.len());
if callee_context.len() > 0 {
Self::assign_context_args(&sig, &callee_context, masm)?;
}
for (arg, val) in sig
.params_without_retptr()
.iter()
.skip(callee_context.len())
.zip(stack_values)
{
match arg {
&ABIOperand::Reg { reg, .. } => {
context.move_val_to_reg(&val, reg, masm)?;
}
&ABIOperand::Stack { ty, offset, .. } => {
let addr = masm.address_at_sp(SPOffset::from_u32(offset))?;
let size: OperandSize = ty.try_into()?;
masm.with_scratch_for(ty, |masm, scratch| {
context.move_val_to_reg(val, scratch.inner(), masm)?;
masm.store(scratch.inner().into(), addr, size)
})?;
}
}
}
if sig.has_stack_results() {
let operand = sig.params.unwrap_results_area_operand();
let base = ret_area.unwrap().unwrap_sp();
let addr = masm.address_from_sp(base)?;
match operand {
&ABIOperand::Reg { ty, reg, .. } => {
masm.compute_addr(addr, writable!(reg), ty.try_into()?)?;
}
&ABIOperand::Stack { ty, offset, .. } => {
let slot = masm.address_at_sp(SPOffset::from_u32(offset))?;
masm.with_scratch::<IntScratch, _>(|masm, scratch| {
masm.compute_addr(addr, scratch.writable(), ty.try_into()?)?;
masm.store(scratch.inner().into(), slot, ty.try_into()?)
})?;
}
}
}
Ok(())
}
fn cleanup<M: MacroAssembler>(
sig: &ABISig,
callee_context: &ContextArgs,
callee_kind: &CalleeKind,
reserved_space: u32,
ret_area: Option<RetArea>,
masm: &mut M,
context: &mut CodeGenContext<Emission>,
) -> Result<()> {
match callee_kind {
CalleeKind::Indirect(r) => context.free_reg(*r),
_ => {}
}
for loc in callee_context.as_slice() {
match loc {
VMContextLoc::Reg(r) => context.free_reg(*r),
_ => {}
}
}
masm.free_stack(reserved_space)?;
ensure!(
sig.params.len_without_retptr() >= callee_context.len(),
CodeGenError::vmcontext_arg_expected()
);
let mut stack_consumed = 0;
context.drop_last(
sig.params.len_without_retptr() - callee_context.len(),
|_regalloc, v| {
ensure!(
v.is_mem() || v.is_const(),
CodeGenError::unexpected_value_in_value_stack()
);
if let Val::Memory(mem) = v {
stack_consumed += mem.slot.size;
}
Ok(())
},
)?;
if let Some(ret_area) = ret_area {
if stack_consumed > 0 {
let sp = ret_area.unwrap_sp();
let result_bytes = sig.results_stack_size();
ensure!(
sp.as_u32() >= stack_consumed + result_bytes,
CodeGenError::invalid_sp_offset(),
);
let dst = SPOffset::from_u32(sp.as_u32() - stack_consumed);
masm.memmove(sp, dst, result_bytes, MemMoveDirection::LowToHigh)?;
}
};
masm.free_stack(stack_consumed)?;
let mut calculated_ret_area = None;
if let Some(area) = ret_area {
if stack_consumed > 0 {
calculated_ret_area = Some(RetArea::sp(masm.sp_offset()?));
} else {
ensure!(
area.unwrap_sp() == masm.sp_offset()?,
CodeGenError::invalid_sp_offset()
);
calculated_ret_area = Some(area);
}
}
context.push_abi_results(&sig.results, masm, |_, _, _| calculated_ret_area)?;
context.load_vmctx(masm)
}
}