1use crate::TRAP_INTERNAL_ASSERT;
2use crate::debug::DwarfSectionRelocTarget;
3use crate::func_environ::FuncEnvironment;
4use crate::translate::FuncTranslator;
5use crate::{BuiltinFunctionSignatures, builder::LinkOptions, wasm_call_signature};
6use crate::{CompiledFunction, ModuleTextBuilder, array_call_signature};
7use anyhow::{Context as _, Result};
8use cranelift_codegen::binemit::CodeOffset;
9use cranelift_codegen::ir::condcodes::IntCC;
10use cranelift_codegen::ir::{self, InstBuilder, MemFlags, UserExternalName, UserFuncName, Value};
11use cranelift_codegen::isa::{
12 OwnedTargetIsa, TargetIsa,
13 unwind::{UnwindInfo, UnwindInfoKind},
14};
15use cranelift_codegen::print_errors::pretty_error;
16use cranelift_codegen::{CompiledCode, Context};
17use cranelift_entity::PrimaryMap;
18use cranelift_frontend::FunctionBuilder;
19use object::write::{Object, StandardSegment, SymbolId};
20use object::{RelocationEncoding, RelocationFlags, RelocationKind, SectionKind};
21use std::any::Any;
22use std::cmp;
23use std::collections::HashMap;
24use std::mem;
25use std::ops::Range;
26use std::path;
27use std::sync::{Arc, Mutex};
28use wasmparser::{FuncValidatorAllocations, FunctionBody};
29use wasmtime_environ::{
30 AddressMapSection, BuiltinFunctionIndex, CacheStore, CompileError, CompiledFunctionBody,
31 DefinedFuncIndex, FlagValue, FunctionBodyData, FunctionLoc, HostCall, ModuleTranslation,
32 ModuleTypesBuilder, PtrSize, RelocationTarget, StackMapSection, StaticModuleIndex,
33 TrapEncodingBuilder, TrapSentinel, TripleExt, Tunables, VMOffsets, WasmFuncType, WasmValType,
34};
35
36#[cfg(feature = "component-model")]
37mod component;
38
39struct IncrementalCacheContext {
40 #[cfg(feature = "incremental-cache")]
41 cache_store: Arc<dyn CacheStore>,
42 num_hits: usize,
43 num_cached: usize,
44}
45
46struct CompilerContext {
47 func_translator: FuncTranslator,
48 codegen_context: Context,
49 incremental_cache_ctx: Option<IncrementalCacheContext>,
50 validator_allocations: FuncValidatorAllocations,
51}
52
53impl Default for CompilerContext {
54 fn default() -> Self {
55 Self {
56 func_translator: FuncTranslator::new(),
57 codegen_context: Context::new(),
58 incremental_cache_ctx: None,
59 validator_allocations: Default::default(),
60 }
61 }
62}
63
64pub struct Compiler {
67 tunables: Tunables,
68 contexts: Mutex<Vec<CompilerContext>>,
69 isa: OwnedTargetIsa,
70 linkopts: LinkOptions,
71 cache_store: Option<Arc<dyn CacheStore>>,
72 clif_dir: Option<path::PathBuf>,
73 #[cfg(feature = "wmemcheck")]
74 pub(crate) wmemcheck: bool,
75}
76
77impl Drop for Compiler {
78 fn drop(&mut self) {
79 if self.cache_store.is_none() {
80 return;
81 }
82
83 let mut num_hits = 0;
84 let mut num_cached = 0;
85 for ctx in self.contexts.lock().unwrap().iter() {
86 if let Some(ref cache_ctx) = ctx.incremental_cache_ctx {
87 num_hits += cache_ctx.num_hits;
88 num_cached += cache_ctx.num_cached;
89 }
90 }
91
92 let total = num_hits + num_cached;
93 if num_hits + num_cached > 0 {
94 log::trace!(
95 "Incremental compilation cache stats: {}/{} = {}% (hits/lookup)\ncached: {}",
96 num_hits,
97 total,
98 (num_hits as f32) / (total as f32) * 100.0,
99 num_cached
100 );
101 }
102 }
103}
104
105impl Compiler {
106 pub fn new(
107 tunables: Tunables,
108 isa: OwnedTargetIsa,
109 cache_store: Option<Arc<dyn CacheStore>>,
110 linkopts: LinkOptions,
111 clif_dir: Option<path::PathBuf>,
112 wmemcheck: bool,
113 ) -> Compiler {
114 let _ = wmemcheck;
115 Compiler {
116 contexts: Default::default(),
117 tunables,
118 isa,
119 linkopts,
120 cache_store,
121 clif_dir,
122 #[cfg(feature = "wmemcheck")]
123 wmemcheck,
124 }
125 }
126
127 fn call_indirect_host(
136 &self,
137 builder: &mut FunctionBuilder<'_>,
138 hostcall: impl Into<HostCall>,
139 sig: ir::SigRef,
140 addr: Value,
141 args: &[Value],
142 ) -> ir::Inst {
143 let signature = &builder.func.dfg.signatures[sig];
144
145 assert_eq!(signature.call_conv, self.isa.default_call_conv());
149
150 if self.isa.triple().is_pulley() {
156 let mut new_signature = signature.clone();
157 new_signature
158 .params
159 .insert(0, ir::AbiParam::new(self.isa.pointer_type()));
160 let new_sig = builder.func.import_signature(new_signature);
161 let name = ir::ExternalName::User(builder.func.declare_imported_user_function(
162 ir::UserExternalName {
163 namespace: crate::NS_PULLEY_HOSTCALL,
164 index: hostcall.into().index(),
165 },
166 ));
167 let func = builder.func.import_function(ir::ExtFuncData {
168 name,
169 signature: new_sig,
170 colocated: false,
173 });
174 let mut raw_args = vec![addr];
175 raw_args.extend_from_slice(args);
176 return builder.ins().call(func, &raw_args);
177 }
178
179 builder.ins().call_indirect(sig, addr, args)
180 }
181}
182
183impl wasmtime_environ::Compiler for Compiler {
184 fn compile_function(
185 &self,
186 translation: &ModuleTranslation<'_>,
187 func_index: DefinedFuncIndex,
188 input: FunctionBodyData<'_>,
189 types: &ModuleTypesBuilder,
190 ) -> Result<CompiledFunctionBody, CompileError> {
191 let isa = &*self.isa;
192 let module = &translation.module;
193 let func_index = module.func_index(func_index);
194 let sig = translation.module.functions[func_index]
195 .signature
196 .unwrap_module_type_index();
197 let wasm_func_ty = types[sig].unwrap_func();
198
199 let mut compiler = self.function_compiler();
200
201 let context = &mut compiler.cx.codegen_context;
202 context.func.signature = wasm_call_signature(isa, wasm_func_ty, &self.tunables);
203 context.func.name = UserFuncName::User(UserExternalName {
204 namespace: crate::NS_WASM_FUNC,
205 index: func_index.as_u32(),
206 });
207
208 if self.tunables.generate_native_debuginfo {
209 context.func.collect_debug_info();
210 }
211
212 let mut func_env = FuncEnvironment::new(self, translation, types, wasm_func_ty);
213
214 if !isa.triple().is_pulley() {
247 let vmctx = context
248 .func
249 .create_global_value(ir::GlobalValueData::VMContext);
250 let interrupts_ptr = context.func.create_global_value(ir::GlobalValueData::Load {
251 base: vmctx,
252 offset: i32::from(func_env.offsets.ptr.vmctx_store_context()).into(),
253 global_type: isa.pointer_type(),
254 flags: MemFlags::trusted().with_readonly(),
255 });
256 let stack_limit = context.func.create_global_value(ir::GlobalValueData::Load {
257 base: interrupts_ptr,
258 offset: i32::from(func_env.offsets.ptr.vmstore_context_stack_limit()).into(),
259 global_type: isa.pointer_type(),
260 flags: MemFlags::trusted(),
261 });
262 if self.tunables.signals_based_traps {
263 context.func.stack_limit = Some(stack_limit);
264 } else {
265 func_env.stack_limit_at_function_entry = Some(stack_limit);
266 }
267 }
268 let FunctionBodyData { validator, body } = input;
269 let mut validator =
270 validator.into_validator(mem::take(&mut compiler.cx.validator_allocations));
271 compiler.cx.func_translator.translate_body(
272 &mut validator,
273 body.clone(),
274 &mut context.func,
275 &mut func_env,
276 )?;
277
278 let func = compiler.finish_with_info(
279 Some((&body, &self.tunables)),
280 &format!("wasm_func_{}", func_index.as_u32()),
281 )?;
282
283 let timing = cranelift_codegen::timing::take_current();
284 log::debug!("{:?} translated in {:?}", func_index, timing.total());
285 log::trace!("{:?} timing info\n{}", func_index, timing);
286
287 Ok(CompiledFunctionBody {
288 code: Box::new(func),
289 needs_gc_heap: func_env.needs_gc_heap(),
290 })
291 }
292
293 fn compile_array_to_wasm_trampoline(
294 &self,
295 translation: &ModuleTranslation<'_>,
296 types: &ModuleTypesBuilder,
297 def_func_index: DefinedFuncIndex,
298 ) -> Result<CompiledFunctionBody, CompileError> {
299 let func_index = translation.module.func_index(def_func_index);
300 let sig = translation.module.functions[func_index]
301 .signature
302 .unwrap_module_type_index();
303 let wasm_func_ty = types[sig].unwrap_func();
304
305 let isa = &*self.isa;
306 let pointer_type = isa.pointer_type();
307 let wasm_call_sig = wasm_call_signature(isa, wasm_func_ty, &self.tunables);
308 let array_call_sig = array_call_signature(isa);
309
310 let mut compiler = self.function_compiler();
311 let func = ir::Function::with_name_signature(Default::default(), array_call_sig);
312 let (mut builder, block0) = compiler.builder(func);
313
314 let (vmctx, caller_vmctx, values_vec_ptr, values_vec_len) = {
315 let params = builder.func.dfg.block_params(block0);
316 (params[0], params[1], params[2], params[3])
317 };
318
319 let mut args = self.load_values_from_array(
321 wasm_func_ty.params(),
322 &mut builder,
323 values_vec_ptr,
324 values_vec_len,
325 );
326 args.insert(0, caller_vmctx);
327 args.insert(0, vmctx);
328
329 debug_assert_vmctx_kind(isa, &mut builder, vmctx, wasmtime_environ::VMCONTEXT_MAGIC);
334 let offsets = VMOffsets::new(isa.pointer_bytes(), &translation.module);
335 let vm_store_context_offset = offsets.ptr.vmctx_store_context();
336 save_last_wasm_entry_fp(
337 &mut builder,
338 pointer_type,
339 &offsets.ptr,
340 vm_store_context_offset.into(),
341 vmctx,
342 );
343
344 let call = declare_and_call(&mut builder, wasm_call_sig, func_index.as_u32(), &args);
346 let results = builder.func.dfg.inst_results(call).to_vec();
347
348 self.store_values_to_array(
350 &mut builder,
351 wasm_func_ty.returns(),
352 &results,
353 values_vec_ptr,
354 values_vec_len,
355 );
356
357 let true_return = builder.ins().iconst(ir::types::I8, 1);
362 builder.ins().return_(&[true_return]);
363 builder.finalize();
364
365 Ok(CompiledFunctionBody {
366 code: Box::new(compiler.finish(&format!("array_to_wasm_{}", func_index.as_u32(),))?),
367 needs_gc_heap: false,
368 })
369 }
370
371 fn compile_wasm_to_array_trampoline(
372 &self,
373 wasm_func_ty: &WasmFuncType,
374 ) -> Result<CompiledFunctionBody, CompileError> {
375 let isa = &*self.isa;
376 let pointer_type = isa.pointer_type();
377 let wasm_call_sig = wasm_call_signature(isa, wasm_func_ty, &self.tunables);
378 let array_call_sig = array_call_signature(isa);
379
380 let mut compiler = self.function_compiler();
381 let func = ir::Function::with_name_signature(Default::default(), wasm_call_sig);
382 let (mut builder, block0) = compiler.builder(func);
383
384 let args = builder.func.dfg.block_params(block0).to_vec();
385 let callee_vmctx = args[0];
386 let caller_vmctx = args[1];
387
388 debug_assert_vmctx_kind(
393 isa,
394 &mut builder,
395 caller_vmctx,
396 wasmtime_environ::VMCONTEXT_MAGIC,
397 );
398 let ptr = isa.pointer_bytes();
399 let vm_store_context = builder.ins().load(
400 pointer_type,
401 MemFlags::trusted(),
402 caller_vmctx,
403 i32::from(ptr.vmcontext_store_context()),
404 );
405 save_last_wasm_exit_fp_and_pc(&mut builder, pointer_type, &ptr, vm_store_context);
406
407 let (args_base, args_len) =
409 self.allocate_stack_array_and_spill_args(wasm_func_ty, &mut builder, &args[2..]);
410 let args_len = builder.ins().iconst(pointer_type, i64::from(args_len));
411
412 let ptr_size = isa.pointer_bytes();
415 let callee = builder.ins().load(
416 pointer_type,
417 MemFlags::trusted(),
418 callee_vmctx,
419 ptr_size.vmarray_call_host_func_context_func_ref() + ptr_size.vm_func_ref_array_call(),
420 );
421
422 let callee_signature = builder.func.import_signature(array_call_sig);
424 let call = self.call_indirect_host(
425 &mut builder,
426 HostCall::ArrayCall,
427 callee_signature,
428 callee,
429 &[callee_vmctx, caller_vmctx, args_base, args_len],
430 );
431 let succeeded = builder.func.dfg.inst_results(call)[0];
432 self.raise_if_host_trapped(&mut builder, caller_vmctx, succeeded);
433 let results =
434 self.load_values_from_array(wasm_func_ty.returns(), &mut builder, args_base, args_len);
435 builder.ins().return_(&results);
436 builder.finalize();
437
438 Ok(CompiledFunctionBody {
439 code: Box::new(compiler.finish(&format!("wasm_to_array_trampoline_{wasm_func_ty}"))?),
440 needs_gc_heap: false,
441 })
442 }
443
444 fn append_code(
445 &self,
446 obj: &mut Object<'static>,
447 funcs: &[(String, Box<dyn Any + Send>)],
448 resolve_reloc: &dyn Fn(usize, RelocationTarget) -> usize,
449 ) -> Result<Vec<(SymbolId, FunctionLoc)>> {
450 let mut builder =
451 ModuleTextBuilder::new(obj, self, self.isa.text_section_builder(funcs.len()));
452 if self.linkopts.force_jump_veneers {
453 builder.force_veneers();
454 }
455 let mut addrs = AddressMapSection::default();
456 let mut traps = TrapEncodingBuilder::default();
457 let mut stack_maps = StackMapSection::default();
458
459 let mut ret = Vec::with_capacity(funcs.len());
460 for (i, (sym, func)) in funcs.iter().enumerate() {
461 let func = func.downcast_ref::<CompiledFunction>().unwrap();
462 let (sym, range) = builder.append_func(&sym, func, |idx| resolve_reloc(i, idx));
463 if self.tunables.generate_address_map {
464 let addr = func.address_map();
465 addrs.push(range.clone(), &addr.instructions);
466 }
467 clif_to_env_stack_maps(
468 &mut stack_maps,
469 range.clone(),
470 func.buffer.user_stack_maps(),
471 );
472 traps.push(range.clone(), &func.traps().collect::<Vec<_>>());
473 builder.append_padding(self.linkopts.padding_between_functions);
474 let info = FunctionLoc {
475 start: u32::try_from(range.start).unwrap(),
476 length: u32::try_from(range.end - range.start).unwrap(),
477 };
478 ret.push((sym, info));
479 }
480
481 builder.finish();
482
483 if self.tunables.generate_address_map {
484 addrs.append_to(obj);
485 }
486 stack_maps.append_to(obj);
487 traps.append_to(obj);
488
489 Ok(ret)
490 }
491
492 fn triple(&self) -> &target_lexicon::Triple {
493 self.isa.triple()
494 }
495
496 fn flags(&self) -> Vec<(&'static str, FlagValue<'static>)> {
497 crate::clif_flags_to_wasmtime(self.isa.flags().iter())
498 }
499
500 fn isa_flags(&self) -> Vec<(&'static str, FlagValue<'static>)> {
501 crate::clif_flags_to_wasmtime(self.isa.isa_flags())
502 }
503
504 fn is_branch_protection_enabled(&self) -> bool {
505 self.isa.is_branch_protection_enabled()
506 }
507
508 #[cfg(feature = "component-model")]
509 fn component_compiler(&self) -> &dyn wasmtime_environ::component::ComponentCompiler {
510 self
511 }
512
513 fn append_dwarf<'a>(
514 &self,
515 obj: &mut Object<'_>,
516 translations: &'a PrimaryMap<StaticModuleIndex, ModuleTranslation<'a>>,
517 get_func: &'a dyn Fn(
518 StaticModuleIndex,
519 DefinedFuncIndex,
520 ) -> (SymbolId, &'a (dyn Any + Send)),
521 dwarf_package_bytes: Option<&'a [u8]>,
522 tunables: &'a Tunables,
523 ) -> Result<()> {
524 let get_func = move |m, f| {
525 let (sym, any) = get_func(m, f);
526 (
527 sym,
528 any.downcast_ref::<CompiledFunction>().unwrap().metadata(),
529 )
530 };
531 let mut compilation = crate::debug::Compilation::new(
532 &*self.isa,
533 translations,
534 &get_func,
535 dwarf_package_bytes,
536 tunables,
537 );
538 let dwarf_sections = crate::debug::emit_dwarf(&*self.isa, &mut compilation)
539 .with_context(|| "failed to emit DWARF debug information")?;
540
541 let (debug_bodies, debug_relocs): (Vec<_>, Vec<_>) = dwarf_sections
542 .iter()
543 .map(|s| ((s.name, &s.body), (s.name, &s.relocs)))
544 .unzip();
545 let mut dwarf_sections_ids = HashMap::new();
546 for (name, body) in debug_bodies {
547 let segment = obj.segment_name(StandardSegment::Debug).to_vec();
548 let section_id = obj.add_section(segment, name.as_bytes().to_vec(), SectionKind::Debug);
549 dwarf_sections_ids.insert(name, section_id);
550 obj.append_section_data(section_id, &body, 1);
551 }
552
553 for (name, relocs) in debug_relocs {
555 let section_id = *dwarf_sections_ids.get(name).unwrap();
556 for reloc in relocs {
557 let target_symbol = match reloc.target {
558 DwarfSectionRelocTarget::Func(id) => compilation.symbol_id(id),
559 DwarfSectionRelocTarget::Section(name) => {
560 obj.section_symbol(dwarf_sections_ids[name])
561 }
562 };
563 obj.add_relocation(
564 section_id,
565 object::write::Relocation {
566 offset: u64::from(reloc.offset),
567 symbol: target_symbol,
568 addend: i64::from(reloc.addend),
569 flags: RelocationFlags::Generic {
570 size: reloc.size << 3,
571 kind: RelocationKind::Absolute,
572 encoding: RelocationEncoding::Generic,
573 },
574 },
575 )?;
576 }
577 }
578
579 Ok(())
580 }
581
582 fn create_systemv_cie(&self) -> Option<gimli::write::CommonInformationEntry> {
583 self.isa.create_systemv_cie()
584 }
585
586 fn compile_wasm_to_builtin(
587 &self,
588 index: BuiltinFunctionIndex,
589 ) -> Result<CompiledFunctionBody, CompileError> {
590 let isa = &*self.isa;
591 let ptr_size = isa.pointer_bytes();
592 let pointer_type = isa.pointer_type();
593 let sigs = BuiltinFunctionSignatures::new(self);
594 let wasm_sig = sigs.wasm_signature(index);
595 let host_sig = sigs.host_signature(index);
596
597 let mut compiler = self.function_compiler();
598 let func = ir::Function::with_name_signature(Default::default(), wasm_sig.clone());
599 let (mut builder, block0) = compiler.builder(func);
600 let vmctx = builder.block_params(block0)[0];
601
602 debug_assert_vmctx_kind(isa, &mut builder, vmctx, wasmtime_environ::VMCONTEXT_MAGIC);
606 let vm_store_context = builder.ins().load(
607 pointer_type,
608 MemFlags::trusted(),
609 vmctx,
610 ptr_size.vmcontext_store_context(),
611 );
612 save_last_wasm_exit_fp_and_pc(&mut builder, pointer_type, &ptr_size, vm_store_context);
613
614 let args = builder.block_params(block0).to_vec();
617 let call = self.call_builtin(&mut builder, vmctx, &args, index, host_sig);
618 let results = builder.func.dfg.inst_results(call).to_vec();
619
620 match index.trap_sentinel() {
627 Some(TrapSentinel::Falsy) => {
628 self.raise_if_host_trapped(&mut builder, vmctx, results[0]);
629 }
630 Some(TrapSentinel::NegativeTwo) => {
631 let ty = builder.func.dfg.value_type(results[0]);
632 let trapped = builder.ins().iconst(ty, -2);
633 let succeeded = builder.ins().icmp(IntCC::NotEqual, results[0], trapped);
634 self.raise_if_host_trapped(&mut builder, vmctx, succeeded);
635 }
636 Some(TrapSentinel::Negative) => {
637 let ty = builder.func.dfg.value_type(results[0]);
638 let zero = builder.ins().iconst(ty, 0);
639 let succeeded =
640 builder
641 .ins()
642 .icmp(IntCC::SignedGreaterThanOrEqual, results[0], zero);
643 self.raise_if_host_trapped(&mut builder, vmctx, succeeded);
644 }
645 Some(TrapSentinel::NegativeOne) => {
646 let ty = builder.func.dfg.value_type(results[0]);
647 let minus_one = builder.ins().iconst(ty, -1);
648 let succeeded = builder.ins().icmp(IntCC::NotEqual, results[0], minus_one);
649 self.raise_if_host_trapped(&mut builder, vmctx, succeeded);
650 }
651 None => {}
652 }
653
654 builder.ins().return_(&results);
656 builder.finalize();
657
658 Ok(CompiledFunctionBody {
659 code: Box::new(compiler.finish(&format!("wasm_to_builtin_{}", index.name()))?),
660 needs_gc_heap: false,
661 })
662 }
663
664 fn compiled_function_relocation_targets<'a>(
665 &'a self,
666 func: &'a dyn Any,
667 ) -> Box<dyn Iterator<Item = RelocationTarget> + 'a> {
668 let func = func.downcast_ref::<CompiledFunction>().unwrap();
669 Box::new(func.relocations().map(|r| r.reloc_target))
670 }
671}
672
673#[cfg(feature = "incremental-cache")]
674mod incremental_cache {
675 use super::*;
676
677 struct CraneliftCacheStore(Arc<dyn CacheStore>);
678
679 impl cranelift_codegen::incremental_cache::CacheKvStore for CraneliftCacheStore {
680 fn get(&self, key: &[u8]) -> Option<std::borrow::Cow<[u8]>> {
681 self.0.get(key)
682 }
683 fn insert(&mut self, key: &[u8], val: Vec<u8>) {
684 self.0.insert(key, val);
685 }
686 }
687
688 pub(super) fn compile_maybe_cached<'a>(
689 context: &'a mut Context,
690 isa: &dyn TargetIsa,
691 cache_ctx: Option<&mut IncrementalCacheContext>,
692 ) -> Result<CompiledCode, CompileError> {
693 let cache_ctx = match cache_ctx {
694 Some(ctx) => ctx,
695 None => return compile_uncached(context, isa),
696 };
697
698 let mut cache_store = CraneliftCacheStore(cache_ctx.cache_store.clone());
699 let (_compiled_code, from_cache) = context
700 .compile_with_cache(isa, &mut cache_store, &mut Default::default())
701 .map_err(|error| CompileError::Codegen(pretty_error(&error.func, error.inner)))?;
702
703 if from_cache {
704 cache_ctx.num_hits += 1;
705 } else {
706 cache_ctx.num_cached += 1;
707 }
708
709 Ok(context.take_compiled_code().unwrap())
710 }
711}
712
713#[cfg(feature = "incremental-cache")]
714use incremental_cache::*;
715
716#[cfg(not(feature = "incremental-cache"))]
717fn compile_maybe_cached<'a>(
718 context: &'a mut Context,
719 isa: &dyn TargetIsa,
720 _cache_ctx: Option<&mut IncrementalCacheContext>,
721) -> Result<CompiledCode, CompileError> {
722 compile_uncached(context, isa)
723}
724
725fn compile_uncached<'a>(
726 context: &'a mut Context,
727 isa: &dyn TargetIsa,
728) -> Result<CompiledCode, CompileError> {
729 context
730 .compile(isa, &mut Default::default())
731 .map_err(|error| CompileError::Codegen(pretty_error(&error.func, error.inner)))?;
732 Ok(context.take_compiled_code().unwrap())
733}
734
735impl Compiler {
736 fn allocate_stack_array_and_spill_args(
746 &self,
747 ty: &WasmFuncType,
748 builder: &mut FunctionBuilder,
749 args: &[ir::Value],
750 ) -> (Value, u32) {
751 let isa = &*self.isa;
752 let pointer_type = isa.pointer_type();
753
754 let value_size = mem::size_of::<u128>();
756 let values_vec_len = cmp::max(ty.params().len(), ty.returns().len());
757 let values_vec_byte_size = u32::try_from(value_size * values_vec_len).unwrap();
758 let values_vec_len = u32::try_from(values_vec_len).unwrap();
759
760 let slot = builder.func.create_sized_stack_slot(ir::StackSlotData::new(
761 ir::StackSlotKind::ExplicitSlot,
762 values_vec_byte_size,
763 4,
764 ));
765 let values_vec_ptr = builder.ins().stack_addr(pointer_type, slot, 0);
766
767 {
768 let values_vec_len = builder
769 .ins()
770 .iconst(ir::types::I32, i64::from(values_vec_len));
771 self.store_values_to_array(builder, ty.params(), args, values_vec_ptr, values_vec_len);
772 }
773
774 (values_vec_ptr, values_vec_len)
775 }
776
777 fn store_values_to_array(
784 &self,
785 builder: &mut FunctionBuilder,
786 types: &[WasmValType],
787 values: &[Value],
788 values_vec_ptr: Value,
789 values_vec_capacity: Value,
790 ) {
791 debug_assert_eq!(types.len(), values.len());
792 debug_assert_enough_capacity_for_length(builder, types.len(), values_vec_capacity);
793
794 let flags = ir::MemFlags::new()
800 .with_notrap()
801 .with_endianness(ir::Endianness::Little);
802
803 let value_size = mem::size_of::<u128>();
804 for (i, val) in values.iter().copied().enumerate() {
805 crate::unbarriered_store_type_at_offset(
806 &mut builder.cursor(),
807 flags,
808 values_vec_ptr,
809 i32::try_from(i * value_size).unwrap(),
810 val,
811 );
812 }
813 }
814
815 fn load_values_from_array(
823 &self,
824 types: &[WasmValType],
825 builder: &mut FunctionBuilder,
826 values_vec_ptr: Value,
827 values_vec_capacity: Value,
828 ) -> Vec<ir::Value> {
829 let isa = &*self.isa;
830 let value_size = mem::size_of::<u128>();
831
832 debug_assert_enough_capacity_for_length(builder, types.len(), values_vec_capacity);
833
834 let flags = MemFlags::new()
837 .with_notrap()
838 .with_endianness(ir::Endianness::Little);
839
840 let mut results = Vec::new();
841 for (i, ty) in types.iter().enumerate() {
842 results.push(crate::unbarriered_load_type_at_offset(
843 isa,
844 &mut builder.cursor(),
845 *ty,
846 flags,
847 values_vec_ptr,
848 i32::try_from(i * value_size).unwrap(),
849 ));
850 }
851 results
852 }
853
854 fn function_compiler(&self) -> FunctionCompiler<'_> {
855 let saved_context = self.contexts.lock().unwrap().pop();
856 FunctionCompiler {
857 compiler: self,
858 cx: saved_context
859 .map(|mut ctx| {
860 ctx.codegen_context.clear();
861 ctx
862 })
863 .unwrap_or_else(|| CompilerContext {
864 #[cfg(feature = "incremental-cache")]
865 incremental_cache_ctx: self.cache_store.as_ref().map(|cache_store| {
866 IncrementalCacheContext {
867 cache_store: cache_store.clone(),
868 num_hits: 0,
869 num_cached: 0,
870 }
871 }),
872 ..Default::default()
873 }),
874 }
875 }
876
877 pub fn raise_if_host_trapped(
891 &self,
892 builder: &mut FunctionBuilder<'_>,
893 vmctx: ir::Value,
894 succeeded: ir::Value,
895 ) {
896 let trapped_block = builder.create_block();
897 let continuation_block = builder.create_block();
898 builder.set_cold_block(trapped_block);
899 builder
900 .ins()
901 .brif(succeeded, continuation_block, &[], trapped_block, &[]);
902
903 builder.seal_block(trapped_block);
904 builder.seal_block(continuation_block);
905
906 builder.switch_to_block(trapped_block);
907 let sigs = BuiltinFunctionSignatures::new(self);
908 let sig = sigs.host_signature(BuiltinFunctionIndex::raise());
909 self.call_builtin(builder, vmctx, &[vmctx], BuiltinFunctionIndex::raise(), sig);
910 builder.ins().trap(TRAP_INTERNAL_ASSERT);
911
912 builder.switch_to_block(continuation_block);
913 }
914
915 fn call_builtin(
918 &self,
919 builder: &mut FunctionBuilder<'_>,
920 vmctx: ir::Value,
921 args: &[ir::Value],
922 builtin: BuiltinFunctionIndex,
923 sig: ir::Signature,
924 ) -> ir::Inst {
925 let isa = &*self.isa;
926 let ptr_size = isa.pointer_bytes();
927 let pointer_type = isa.pointer_type();
928
929 let mem_flags = ir::MemFlags::trusted().with_readonly();
933 let array_addr = builder.ins().load(
934 pointer_type,
935 mem_flags,
936 vmctx,
937 i32::from(ptr_size.vmcontext_builtin_functions()),
938 );
939 let body_offset = i32::try_from(builtin.index() * pointer_type.bytes()).unwrap();
940 let func_addr = builder
941 .ins()
942 .load(pointer_type, mem_flags, array_addr, body_offset);
943
944 let sig = builder.func.import_signature(sig);
945 self.call_indirect_host(builder, builtin, sig, func_addr, args)
946 }
947
948 pub fn isa(&self) -> &dyn TargetIsa {
949 &*self.isa
950 }
951
952 pub fn tunables(&self) -> &Tunables {
953 &self.tunables
954 }
955}
956
957struct FunctionCompiler<'a> {
958 compiler: &'a Compiler,
959 cx: CompilerContext,
960}
961
962impl FunctionCompiler<'_> {
963 fn builder(&mut self, func: ir::Function) -> (FunctionBuilder<'_>, ir::Block) {
964 self.cx.codegen_context.func = func;
965 let mut builder = FunctionBuilder::new(
966 &mut self.cx.codegen_context.func,
967 self.cx.func_translator.context(),
968 );
969
970 let block0 = builder.create_block();
971 builder.append_block_params_for_function_params(block0);
972 builder.switch_to_block(block0);
973 builder.seal_block(block0);
974 (builder, block0)
975 }
976
977 fn finish(self, clif_filename: &str) -> Result<CompiledFunction, CompileError> {
978 self.finish_with_info(None, clif_filename)
979 }
980
981 fn finish_with_info(
982 mut self,
983 body_and_tunables: Option<(&FunctionBody<'_>, &Tunables)>,
984 clif_filename: &str,
985 ) -> Result<CompiledFunction, CompileError> {
986 let context = &mut self.cx.codegen_context;
987 let isa = &*self.compiler.isa;
988
989 let compilation_result =
995 compile_maybe_cached(context, isa, self.cx.incremental_cache_ctx.as_mut());
996
997 if let Some(path) = &self.compiler.clif_dir {
998 use std::io::Write;
999
1000 let mut path = path.join(clif_filename);
1001 path.set_extension("clif");
1002
1003 let mut output = std::fs::File::create(path).unwrap();
1004 write!(output, "{}", context.func.display()).unwrap();
1005 }
1006
1007 let compiled_code = compilation_result?;
1008
1009 let preferred_alignment = if body_and_tunables.is_some() {
1013 self.compiler.isa.function_alignment().preferred
1014 } else {
1015 1
1016 };
1017
1018 let alignment = compiled_code.buffer.alignment.max(preferred_alignment);
1019 let mut compiled_function = CompiledFunction::new(
1020 compiled_code.buffer.clone(),
1021 context.func.params.user_named_funcs().clone(),
1022 alignment,
1023 );
1024
1025 if let Some((body, tunables)) = body_and_tunables {
1026 let data = body.get_binary_reader();
1027 let offset = data.original_position();
1028 let len = data.bytes_remaining();
1029 compiled_function.set_address_map(
1030 offset.try_into().unwrap(),
1031 len.try_into().unwrap(),
1032 tunables.generate_address_map,
1033 );
1034 }
1035
1036 if isa.flags().unwind_info() {
1037 let unwind = compiled_code
1038 .create_unwind_info(isa)
1039 .map_err(|error| CompileError::Codegen(pretty_error(&context.func, error)))?;
1040
1041 if let Some(unwind_info) = unwind {
1042 compiled_function.set_unwind_info(unwind_info);
1043 }
1044 }
1045
1046 if body_and_tunables
1047 .map(|(_, t)| t.generate_native_debuginfo)
1048 .unwrap_or(false)
1049 {
1050 compiled_function.set_value_labels_ranges(compiled_code.value_labels_ranges.clone());
1051
1052 if !matches!(
1054 compiled_function.metadata().unwind_info,
1055 Some(UnwindInfo::SystemV(_))
1056 ) {
1057 let cfa_unwind = compiled_code
1058 .create_unwind_info_of_kind(isa, UnwindInfoKind::SystemV)
1059 .map_err(|error| CompileError::Codegen(pretty_error(&context.func, error)))?;
1060
1061 if let Some(UnwindInfo::SystemV(cfa_unwind_info)) = cfa_unwind {
1062 compiled_function.set_cfa_unwind_info(cfa_unwind_info);
1063 }
1064 }
1065 }
1066
1067 compiled_function
1068 .set_sized_stack_slots(std::mem::take(&mut context.func.sized_stack_slots));
1069 self.compiler.contexts.lock().unwrap().push(self.cx);
1070
1071 Ok(compiled_function)
1072 }
1073}
1074
1075fn clif_to_env_stack_maps(
1082 section: &mut StackMapSection,
1083 range: Range<u64>,
1084 clif_stack_maps: &[(CodeOffset, u32, ir::UserStackMap)],
1085) {
1086 for (offset, frame_size, stack_map) in clif_stack_maps {
1087 let mut frame_offsets = Vec::new();
1088 for (ty, frame_offset) in stack_map.entries() {
1089 assert_eq!(ty, ir::types::I32);
1090 frame_offsets.push(frame_offset);
1091 }
1092 let code_offset = range.start + u64::from(*offset);
1093 assert!(code_offset < range.end);
1094 section.push(code_offset, *frame_size, frame_offsets.into_iter());
1095 }
1096}
1097
1098fn declare_and_call(
1099 builder: &mut FunctionBuilder,
1100 signature: ir::Signature,
1101 func_index: u32,
1102 args: &[ir::Value],
1103) -> ir::Inst {
1104 let name = ir::ExternalName::User(builder.func.declare_imported_user_function(
1105 ir::UserExternalName {
1106 namespace: crate::NS_WASM_FUNC,
1107 index: func_index,
1108 },
1109 ));
1110 let signature = builder.func.import_signature(signature);
1111 let callee = builder.func.dfg.ext_funcs.push(ir::ExtFuncData {
1112 name,
1113 signature,
1114 colocated: true,
1115 });
1116 builder.ins().call(callee, &args)
1117}
1118
1119fn debug_assert_enough_capacity_for_length(
1120 builder: &mut FunctionBuilder,
1121 length: usize,
1122 capacity: ir::Value,
1123) {
1124 if cfg!(debug_assertions) {
1125 let enough_capacity = builder.ins().icmp_imm(
1126 ir::condcodes::IntCC::UnsignedGreaterThanOrEqual,
1127 capacity,
1128 ir::immediates::Imm64::new(length.try_into().unwrap()),
1129 );
1130 builder.ins().trapz(enough_capacity, TRAP_INTERNAL_ASSERT);
1131 }
1132}
1133
1134fn debug_assert_vmctx_kind(
1135 isa: &dyn TargetIsa,
1136 builder: &mut FunctionBuilder,
1137 vmctx: ir::Value,
1138 expected_vmctx_magic: u32,
1139) {
1140 if cfg!(debug_assertions) {
1141 let magic = builder.ins().load(
1142 ir::types::I32,
1143 MemFlags::trusted().with_endianness(isa.endianness()),
1144 vmctx,
1145 0,
1146 );
1147 let is_expected_vmctx = builder.ins().icmp_imm(
1148 ir::condcodes::IntCC::Equal,
1149 magic,
1150 i64::from(expected_vmctx_magic),
1151 );
1152 builder.ins().trapz(is_expected_vmctx, TRAP_INTERNAL_ASSERT);
1153 }
1154}
1155
1156fn save_last_wasm_entry_fp(
1157 builder: &mut FunctionBuilder,
1158 pointer_type: ir::Type,
1159 ptr_size: &impl PtrSize,
1160 vm_store_context_offset: u32,
1161 vmctx: Value,
1162) {
1163 let vm_store_context = builder.ins().load(
1165 pointer_type,
1166 MemFlags::trusted(),
1167 vmctx,
1168 i32::try_from(vm_store_context_offset).unwrap(),
1169 );
1170
1171 let fp = builder.ins().get_frame_pointer(pointer_type);
1173 builder.ins().store(
1174 MemFlags::trusted(),
1175 fp,
1176 vm_store_context,
1177 ptr_size.vmstore_context_last_wasm_entry_fp(),
1178 );
1179}
1180
1181fn save_last_wasm_exit_fp_and_pc(
1182 builder: &mut FunctionBuilder,
1183 pointer_type: ir::Type,
1184 ptr: &impl PtrSize,
1185 limits: Value,
1186) {
1187 let trampoline_fp = builder.ins().get_frame_pointer(pointer_type);
1191 let wasm_fp = builder.ins().load(
1192 pointer_type,
1193 MemFlags::trusted(),
1194 trampoline_fp,
1195 0,
1199 );
1200 builder.ins().store(
1201 MemFlags::trusted(),
1202 wasm_fp,
1203 limits,
1204 ptr.vmstore_context_last_wasm_exit_fp(),
1205 );
1206 let wasm_pc = builder.ins().get_return_address(pointer_type);
1208 builder.ins().store(
1209 MemFlags::trusted(),
1210 wasm_pc,
1211 limits,
1212 ptr.vmstore_context_last_wasm_exit_pc(),
1213 );
1214}