1use crate::cursor::{Cursor as _, FuncCursor};
23use crate::ir::{self, ExceptionTableData, ExceptionTableItem, InstBuilder as _};
24use crate::result::CodegenResult;
25use crate::trace;
26use crate::traversals::Dfs;
27use alloc::borrow::Cow;
28use alloc::vec::Vec;
29use cranelift_entity::{SecondaryMap, packed_option::PackedOption};
30use smallvec::SmallVec;
31
32type SmallValueVec = SmallVec<[ir::Value; 8]>;
33type SmallBlockArgVec = SmallVec<[ir::BlockArg; 8]>;
34type SmallBlockCallVec = SmallVec<[ir::BlockCall; 8]>;
35
36pub enum InlineCommand<'a> {
38 KeepCall,
40
41 Inline {
47 callee: Cow<'a, ir::Function>,
49 visit_callee: bool,
52 },
53}
54
55pub trait Inline {
60 fn inline(
83 &mut self,
84 caller: &ir::Function,
85 call_inst: ir::Inst,
86 call_opcode: ir::Opcode,
87 callee: ir::FuncRef,
88 call_args: &[ir::Value],
89 ) -> InlineCommand<'_>;
90}
91
92impl<'a, T> Inline for &'a mut T
93where
94 T: Inline,
95{
96 fn inline(
97 &mut self,
98 caller: &ir::Function,
99 inst: ir::Inst,
100 opcode: ir::Opcode,
101 callee: ir::FuncRef,
102 args: &[ir::Value],
103 ) -> InlineCommand<'_> {
104 (*self).inline(caller, inst, opcode, callee, args)
105 }
106}
107
108pub(crate) fn do_inlining(
113 func: &mut ir::Function,
114 mut inliner: impl Inline,
115) -> CodegenResult<bool> {
116 trace!("function {} before inlining: {}", func.name, func);
117
118 let mut inlined_any = false;
119 let mut allocs = InliningAllocs::default();
120
121 let mut cursor = FuncCursor::new(func);
122 'block_loop: while let Some(block) = cursor.next_block() {
123 let mut prev_pos;
131
132 while let Some(inst) = {
133 prev_pos = cursor.position();
134 cursor.next_inst()
135 } {
136 debug_assert_eq!(Some(block), cursor.func.layout.inst_block(inst));
140
141 match cursor.func.dfg.insts[inst] {
142 ir::InstructionData::Call {
143 opcode: opcode @ ir::Opcode::Call | opcode @ ir::Opcode::ReturnCall,
144 args: _,
145 func_ref,
146 } => {
147 trace!(
148 "considering call site for inlining: {inst}: {}",
149 cursor.func.dfg.display_inst(inst),
150 );
151 let args = cursor.func.dfg.inst_args(inst);
152 match inliner.inline(&cursor.func, inst, opcode, func_ref, args) {
153 InlineCommand::KeepCall => {
154 trace!(" --> keeping call");
155 }
156 InlineCommand::Inline {
157 callee,
158 visit_callee,
159 } => {
160 let last_inlined_block = inline_one(
161 &mut allocs,
162 cursor.func,
163 func_ref,
164 block,
165 inst,
166 opcode,
167 &callee,
168 None,
169 );
170 inlined_any = true;
171 if visit_callee {
172 cursor.set_position(prev_pos);
173 } else {
174 cursor.goto_bottom(last_inlined_block);
178 continue 'block_loop;
179 }
180 }
181 }
182 }
183 ir::InstructionData::TryCall {
184 opcode: opcode @ ir::Opcode::TryCall,
185 args: _,
186 func_ref,
187 exception,
188 } => {
189 trace!(
190 "considering call site for inlining: {inst}: {}",
191 cursor.func.dfg.display_inst(inst),
192 );
193 let args = cursor.func.dfg.inst_args(inst);
194 match inliner.inline(&cursor.func, inst, opcode, func_ref, args) {
195 InlineCommand::KeepCall => {
196 trace!(" --> keeping call");
197 }
198 InlineCommand::Inline {
199 callee,
200 visit_callee,
201 } => {
202 let last_inlined_block = inline_one(
203 &mut allocs,
204 cursor.func,
205 func_ref,
206 block,
207 inst,
208 opcode,
209 &callee,
210 Some(exception),
211 );
212 inlined_any = true;
213 if visit_callee {
214 cursor.set_position(prev_pos);
215 } else {
216 cursor.goto_bottom(last_inlined_block);
220 continue 'block_loop;
221 }
222 }
223 }
224 }
225 ir::InstructionData::CallIndirect { .. }
226 | ir::InstructionData::TryCallIndirect { .. } => {
227 }
230 _ => {
231 debug_assert!(
232 !cursor.func.dfg.insts[inst].opcode().is_call(),
233 "should have matched all call instructions, but found: {inst}: {}",
234 cursor.func.dfg.display_inst(inst),
235 );
236 }
237 }
238 }
239 }
240
241 if inlined_any {
242 trace!("function {} after inlining: {}", func.name, func);
243 } else {
244 trace!("function {} did not have any callees inlined", func.name);
245 }
246
247 Ok(inlined_any)
248}
249
250#[derive(Default)]
251struct InliningAllocs {
252 values: SecondaryMap<ir::Value, PackedOption<ir::Value>>,
254
255 constants: SecondaryMap<ir::Constant, PackedOption<ir::Constant>>,
260
261 user_external_name_refs:
266 SecondaryMap<ir::UserExternalNameRef, PackedOption<ir::UserExternalNameRef>>,
267
268 calls_needing_exception_table_fixup: Vec<ir::Inst>,
284}
285
286impl InliningAllocs {
287 fn reset(&mut self, callee: &ir::Function) {
288 let InliningAllocs {
289 values,
290 constants,
291 user_external_name_refs,
292 calls_needing_exception_table_fixup,
293 } = self;
294
295 values.clear();
296 values.resize(callee.dfg.len_values());
297
298 constants.clear();
299 constants.resize(callee.dfg.constants.len());
300
301 user_external_name_refs.clear();
302 user_external_name_refs.resize(callee.params.user_named_funcs().len());
303
304 calls_needing_exception_table_fixup.clear();
308 }
309
310 fn set_inlined_value(
311 &mut self,
312 callee: &ir::Function,
313 callee_val: ir::Value,
314 inlined_val: ir::Value,
315 ) {
316 trace!(" --> callee {callee_val:?} = inlined {inlined_val:?}");
317 debug_assert!(self.values[callee_val].is_none());
318 let resolved_callee_val = callee.dfg.resolve_aliases(callee_val);
319 debug_assert!(self.values[resolved_callee_val].is_none());
320 self.values[resolved_callee_val] = Some(inlined_val).into();
321 }
322
323 fn get_inlined_value(&self, callee: &ir::Function, callee_val: ir::Value) -> Option<ir::Value> {
324 let resolved_callee_val = callee.dfg.resolve_aliases(callee_val);
325 self.values[resolved_callee_val].expand()
326 }
327}
328
329fn inline_one(
333 allocs: &mut InliningAllocs,
334 func: &mut ir::Function,
335 callee_func_ref: ir::FuncRef,
336 call_block: ir::Block,
337 call_inst: ir::Inst,
338 call_opcode: ir::Opcode,
339 callee: &ir::Function,
340 call_exception_table: Option<ir::ExceptionTable>,
341) -> ir::Block {
342 trace!(
343 "Inlining call {call_inst:?}: {}\n\
344 with callee = {callee:?}",
345 func.dfg.display_inst(call_inst)
346 );
347
348 let expected_callee_sig = func.dfg.ext_funcs[callee_func_ref].signature;
350 let expected_callee_sig = &func.dfg.signatures[expected_callee_sig];
351 assert_eq!(expected_callee_sig, &callee.signature);
352
353 allocs.reset(callee);
354
355 let entity_map = create_entities(allocs, func, callee);
358
359 let return_block = split_off_return_block(func, call_inst, call_opcode, callee);
362 let call_stack_map = replace_call_with_jump(allocs, func, call_inst, callee, &entity_map);
363
364 let mut last_inlined_block = inline_block_layout(func, call_block, callee, &entity_map);
368
369 for callee_block in Dfs::new().pre_order_iter(callee) {
375 let inlined_block = entity_map.inlined_block(callee_block);
376 trace!(
377 "Processing instructions in callee block {callee_block:?} (inlined block {inlined_block:?}"
378 );
379
380 let mut next_callee_inst = callee.layout.first_inst(callee_block);
381 while let Some(callee_inst) = next_callee_inst {
382 trace!(
383 "Processing callee instruction {callee_inst:?}: {}",
384 callee.dfg.display_inst(callee_inst)
385 );
386
387 assert_ne!(
388 callee.dfg.insts[callee_inst].opcode(),
389 ir::Opcode::GlobalValue,
390 "callee must already be legalized, we shouldn't see any `global_value` \
391 instructions when inlining; found {callee_inst:?}: {}",
392 callee.dfg.display_inst(callee_inst)
393 );
394
395 let inlined_inst_data = callee.dfg.insts[callee_inst].map(InliningInstRemapper {
398 allocs: &allocs,
399 func,
400 callee,
401 entity_map: &entity_map,
402 });
403 let inlined_inst = func.dfg.make_inst(inlined_inst_data);
404 func.layout.append_inst(inlined_inst, inlined_block);
405
406 let opcode = callee.dfg.insts[callee_inst].opcode();
407 if opcode.is_return() {
408 if let Some(return_block) = return_block {
413 fixup_inst_that_returns(
414 allocs,
415 func,
416 callee,
417 &entity_map,
418 call_opcode,
419 inlined_inst,
420 callee_inst,
421 return_block,
422 call_stack_map.as_ref().map(|es| &**es),
423 );
424 } else {
425 debug_assert_eq!(call_opcode, ir::Opcode::ReturnCall);
430 }
431 } else {
432 let ctrl_typevar = callee.dfg.ctrl_typevar(callee_inst);
434 func.dfg.make_inst_results(inlined_inst, ctrl_typevar);
435
436 let callee_results = callee.dfg.inst_results(callee_inst);
438 let inlined_results = func.dfg.inst_results(inlined_inst);
439 debug_assert_eq!(callee_results.len(), inlined_results.len());
440 for (callee_val, inlined_val) in callee_results.iter().zip(inlined_results) {
441 allocs.set_inlined_value(callee, *callee_val, *inlined_val);
442 }
443
444 if opcode.is_call() {
445 append_stack_map_entries(
446 func,
447 callee,
448 &entity_map,
449 call_stack_map.as_deref(),
450 inlined_inst,
451 callee_inst,
452 );
453
454 debug_assert_eq!(
467 call_opcode == ir::Opcode::TryCall,
468 call_exception_table.is_some()
469 );
470 if call_opcode == ir::Opcode::TryCall {
471 allocs
472 .calls_needing_exception_table_fixup
473 .push(inlined_inst);
474 }
475 }
476 }
477
478 trace!(
479 " --> inserted inlined instruction {inlined_inst:?}: {}",
480 func.dfg.display_inst(inlined_inst)
481 );
482
483 next_callee_inst = callee.layout.next_inst(callee_inst);
484 }
485 }
486
487 for block in entity_map.iter_inlined_blocks(func) {
494 if func.layout.is_block_inserted(block) && func.layout.first_inst(block).is_none() {
495 log::trace!("removing unreachable inlined block from layout: {block}");
496
497 if block == last_inlined_block {
501 last_inlined_block = func.layout.prev_block(last_inlined_block).expect(
502 "there will always at least be the block that contained the call we are \
503 inlining",
504 );
505 }
506
507 func.layout.remove_block(block);
508 }
509 }
510
511 debug_assert!(
522 allocs.calls_needing_exception_table_fixup.is_empty() || call_exception_table.is_some()
523 );
524 debug_assert_eq!(
525 call_opcode == ir::Opcode::TryCall,
526 call_exception_table.is_some()
527 );
528 if let Some(call_exception_table) = call_exception_table {
529 fixup_inlined_call_exception_tables(allocs, func, call_exception_table);
530 }
531
532 debug_assert!(
533 func.layout.is_block_inserted(last_inlined_block),
534 "last_inlined_block={last_inlined_block} should be inserted in the layout"
535 );
536 last_inlined_block
537}
538
539fn append_stack_map_entries(
542 func: &mut ir::Function,
543 callee: &ir::Function,
544 entity_map: &EntityMap,
545 call_stack_map: Option<&[ir::UserStackMapEntry]>,
546 inlined_inst: ir::Inst,
547 callee_inst: ir::Inst,
548) {
549 func.dfg.append_user_stack_map_entries(
553 inlined_inst,
554 call_stack_map
555 .iter()
556 .flat_map(|entries| entries.iter().cloned()),
557 );
558
559 func.dfg.append_user_stack_map_entries(
563 inlined_inst,
564 callee
565 .dfg
566 .user_stack_map_entries(callee_inst)
567 .iter()
568 .flat_map(|entries| entries.iter())
569 .map(|entry| ir::UserStackMapEntry {
570 ty: entry.ty,
571 slot: entity_map.inlined_stack_slot(entry.slot),
572 offset: entry.offset,
573 }),
574 );
575}
576
577fn fixup_inlined_call_exception_tables(
581 allocs: &mut InliningAllocs,
582 func: &mut ir::Function,
583 call_exception_table: ir::ExceptionTable,
584) {
585 let split_block_for_new_try_call = |func: &mut ir::Function, inst: ir::Inst| -> ir::Block {
588 debug_assert!(func.dfg.insts[inst].opcode().is_call());
589 debug_assert!(!func.dfg.insts[inst].opcode().is_terminator());
590
591 let next_inst = func
593 .layout
594 .next_inst(inst)
595 .expect("inst is not a terminator, should have a successor");
596 let new_block = func.dfg.blocks.add();
597 func.layout.split_block(new_block, next_inst);
598
599 let old_results = SmallValueVec::from_iter(func.dfg.inst_results(inst).iter().copied());
604 func.dfg.detach_inst_results(inst);
605 for old_result in old_results {
606 let ty = func.dfg.value_type(old_result);
607 let new_block_param = func.dfg.append_block_param(new_block, ty);
608 func.dfg.change_to_alias(old_result, new_block_param);
609 }
610
611 new_block
612 };
613
614 let clone_exception_table_for_this_call = |func: &mut ir::Function,
617 signature: ir::SigRef,
618 new_block: ir::Block|
619 -> ir::ExceptionTable {
620 let mut exception = func.stencil.dfg.exception_tables[call_exception_table]
621 .deep_clone(&mut func.stencil.dfg.value_lists);
622
623 *exception.signature_mut() = signature;
624
625 let returns_len = func.dfg.signatures[signature].returns.len();
626 let returns_len = u32::try_from(returns_len).unwrap();
627
628 *exception.normal_return_mut() = ir::BlockCall::new(
629 new_block,
630 (0..returns_len).map(|i| ir::BlockArg::TryCallRet(i)),
631 &mut func.dfg.value_lists,
632 );
633
634 func.dfg.exception_tables.push(exception)
635 };
636
637 for inst in allocs.calls_needing_exception_table_fixup.drain(..) {
638 debug_assert!(func.dfg.insts[inst].opcode().is_call());
639 debug_assert!(!func.dfg.insts[inst].opcode().is_return());
640 match func.dfg.insts[inst] {
641 ir::InstructionData::Call {
654 opcode: ir::Opcode::Call,
655 args,
656 func_ref,
657 } => {
658 let new_block = split_block_for_new_try_call(func, inst);
659 let signature = func.dfg.ext_funcs[func_ref].signature;
660 let exception = clone_exception_table_for_this_call(func, signature, new_block);
661 func.dfg.insts[inst] = ir::InstructionData::TryCall {
662 opcode: ir::Opcode::TryCall,
663 args,
664 func_ref,
665 exception,
666 };
667 }
668
669 ir::InstructionData::CallIndirect {
682 opcode: ir::Opcode::CallIndirect,
683 args,
684 sig_ref,
685 } => {
686 let new_block = split_block_for_new_try_call(func, inst);
687 let exception = clone_exception_table_for_this_call(func, sig_ref, new_block);
688 func.dfg.insts[inst] = ir::InstructionData::TryCallIndirect {
689 opcode: ir::Opcode::TryCallIndirect,
690 args,
691 exception,
692 };
693 }
694
695 ir::InstructionData::TryCall {
698 opcode: ir::Opcode::TryCall,
699 exception,
700 ..
701 }
702 | ir::InstructionData::TryCallIndirect {
703 opcode: ir::Opcode::TryCallIndirect,
704 exception,
705 ..
706 } => {
707 let sig = func.dfg.exception_tables[exception].signature();
714 let normal_return = *func.dfg.exception_tables[exception].normal_return();
715 let exception_data = ExceptionTableData::new(
716 sig,
717 normal_return,
718 func.dfg.exception_tables[exception]
719 .items()
720 .chain(func.dfg.exception_tables[call_exception_table].items()),
721 )
722 .deep_clone(&mut func.dfg.value_lists);
723
724 func.dfg.exception_tables[exception] = exception_data;
725 }
726
727 otherwise => unreachable!("unknown non-return call instruction: {otherwise:?}"),
728 }
729 }
730}
731
732fn fixup_inst_that_returns(
737 allocs: &mut InliningAllocs,
738 func: &mut ir::Function,
739 callee: &ir::Function,
740 entity_map: &EntityMap,
741 call_opcode: ir::Opcode,
742 inlined_inst: ir::Inst,
743 callee_inst: ir::Inst,
744 return_block: ir::Block,
745 call_stack_map: Option<&[ir::UserStackMapEntry]>,
746) {
747 debug_assert!(func.dfg.insts[inlined_inst].opcode().is_return());
748 match func.dfg.insts[inlined_inst] {
749 ir::InstructionData::MultiAry {
755 opcode: ir::Opcode::Return,
756 args,
757 } => {
758 let rets = SmallBlockArgVec::from_iter(
759 args.as_slice(&func.dfg.value_lists)
760 .iter()
761 .copied()
762 .map(|v| v.into()),
763 );
764 func.dfg.replace(inlined_inst).jump(return_block, &rets);
765 }
766
767 ir::InstructionData::Call {
774 opcode: ir::Opcode::ReturnCall,
775 args,
776 func_ref,
777 } => {
778 func.dfg.insts[inlined_inst] = ir::InstructionData::Call {
779 opcode: ir::Opcode::Call,
780 args,
781 func_ref,
782 };
783 func.dfg.make_inst_results(inlined_inst, ir::types::INVALID);
784
785 append_stack_map_entries(
786 func,
787 callee,
788 &entity_map,
789 call_stack_map,
790 inlined_inst,
791 callee_inst,
792 );
793
794 let rets = SmallBlockArgVec::from_iter(
795 func.dfg
796 .inst_results(inlined_inst)
797 .iter()
798 .copied()
799 .map(|v| v.into()),
800 );
801 let mut cursor = FuncCursor::new(func);
802 cursor.goto_after_inst(inlined_inst);
803 cursor.ins().jump(return_block, &rets);
804
805 if call_opcode == ir::Opcode::TryCall {
806 allocs
807 .calls_needing_exception_table_fixup
808 .push(inlined_inst);
809 }
810 }
811
812 ir::InstructionData::CallIndirect {
819 opcode: ir::Opcode::ReturnCallIndirect,
820 args,
821 sig_ref,
822 } => {
823 func.dfg.insts[inlined_inst] = ir::InstructionData::CallIndirect {
824 opcode: ir::Opcode::CallIndirect,
825 args,
826 sig_ref,
827 };
828 func.dfg.make_inst_results(inlined_inst, ir::types::INVALID);
829
830 append_stack_map_entries(
831 func,
832 callee,
833 &entity_map,
834 call_stack_map,
835 inlined_inst,
836 callee_inst,
837 );
838
839 let rets = SmallBlockArgVec::from_iter(
840 func.dfg
841 .inst_results(inlined_inst)
842 .iter()
843 .copied()
844 .map(|v| v.into()),
845 );
846 let mut cursor = FuncCursor::new(func);
847 cursor.goto_after_inst(inlined_inst);
848 cursor.ins().jump(return_block, &rets);
849
850 if call_opcode == ir::Opcode::TryCall {
851 allocs
852 .calls_needing_exception_table_fixup
853 .push(inlined_inst);
854 }
855 }
856
857 inst_data => unreachable!(
858 "should have handled all `is_return() == true` instructions above; \
859 got {inst_data:?}"
860 ),
861 }
862}
863
864struct InliningInstRemapper<'a> {
867 allocs: &'a InliningAllocs,
868 func: &'a mut ir::Function,
869 callee: &'a ir::Function,
870 entity_map: &'a EntityMap,
871}
872
873impl<'a> ir::instructions::InstructionMapper for InliningInstRemapper<'a> {
874 fn map_value(&mut self, value: ir::Value) -> ir::Value {
875 self.allocs.get_inlined_value(self.callee, value).expect(
876 "defs come before uses; we should have already inlined all values \
877 used by an instruction",
878 )
879 }
880
881 fn map_value_list(&mut self, value_list: ir::ValueList) -> ir::ValueList {
882 let mut inlined_list = ir::ValueList::new();
883 for callee_val in value_list.as_slice(&self.callee.dfg.value_lists) {
884 let inlined_val = self.map_value(*callee_val);
885 inlined_list.push(inlined_val, &mut self.func.dfg.value_lists);
886 }
887 inlined_list
888 }
889
890 fn map_global_value(&mut self, global_value: ir::GlobalValue) -> ir::GlobalValue {
891 self.entity_map.inlined_global_value(global_value)
892 }
893
894 fn map_jump_table(&mut self, jump_table: ir::JumpTable) -> ir::JumpTable {
895 let inlined_default =
896 self.map_block_call(self.callee.dfg.jump_tables[jump_table].default_block());
897 let inlined_table = self.callee.dfg.jump_tables[jump_table]
898 .as_slice()
899 .iter()
900 .map(|callee_block_call| self.map_block_call(*callee_block_call))
901 .collect::<SmallBlockCallVec>();
902 self.func
903 .dfg
904 .jump_tables
905 .push(ir::JumpTableData::new(inlined_default, &inlined_table))
906 }
907
908 fn map_exception_table(&mut self, exception_table: ir::ExceptionTable) -> ir::ExceptionTable {
909 let exception_table = &self.callee.dfg.exception_tables[exception_table];
910 let inlined_sig_ref = self.map_sig_ref(exception_table.signature());
911 let inlined_normal_return = self.map_block_call(*exception_table.normal_return());
912 let inlined_table = exception_table
913 .items()
914 .map(|item| match item {
915 ExceptionTableItem::Tag(tag, block_call) => {
916 ExceptionTableItem::Tag(tag, self.map_block_call(block_call))
917 }
918 ExceptionTableItem::Default(block_call) => {
919 ExceptionTableItem::Default(self.map_block_call(block_call))
920 }
921 ExceptionTableItem::Context(value) => {
922 ExceptionTableItem::Context(self.map_value(value))
923 }
924 })
925 .collect::<SmallVec<[_; 8]>>();
926 self.func
927 .dfg
928 .exception_tables
929 .push(ir::ExceptionTableData::new(
930 inlined_sig_ref,
931 inlined_normal_return,
932 inlined_table,
933 ))
934 }
935
936 fn map_block_call(&mut self, block_call: ir::BlockCall) -> ir::BlockCall {
937 let callee_block = block_call.block(&self.callee.dfg.value_lists);
938 let inlined_block = self.entity_map.inlined_block(callee_block);
939 let args = block_call
940 .args(&self.callee.dfg.value_lists)
941 .map(|arg| match arg {
942 ir::BlockArg::Value(value) => self.map_value(value).into(),
943 ir::BlockArg::TryCallRet(_) | ir::BlockArg::TryCallExn(_) => arg,
944 })
945 .collect::<SmallBlockArgVec>();
946 ir::BlockCall::new(inlined_block, args, &mut self.func.dfg.value_lists)
947 }
948
949 fn map_func_ref(&mut self, func_ref: ir::FuncRef) -> ir::FuncRef {
950 self.entity_map.inlined_func_ref(func_ref)
951 }
952
953 fn map_sig_ref(&mut self, sig_ref: ir::SigRef) -> ir::SigRef {
954 self.entity_map.inlined_sig_ref(sig_ref)
955 }
956
957 fn map_stack_slot(&mut self, stack_slot: ir::StackSlot) -> ir::StackSlot {
958 self.entity_map.inlined_stack_slot(stack_slot)
959 }
960
961 fn map_dynamic_stack_slot(
962 &mut self,
963 dynamic_stack_slot: ir::DynamicStackSlot,
964 ) -> ir::DynamicStackSlot {
965 self.entity_map
966 .inlined_dynamic_stack_slot(dynamic_stack_slot)
967 }
968
969 fn map_constant(&mut self, constant: ir::Constant) -> ir::Constant {
970 self.allocs
971 .constants
972 .get(constant)
973 .and_then(|o| o.expand())
974 .expect("should have inlined all callee constants")
975 }
976
977 fn map_immediate(&mut self, immediate: ir::Immediate) -> ir::Immediate {
978 self.entity_map.inlined_immediate(immediate)
979 }
980}
981
982fn inline_block_layout(
986 func: &mut ir::Function,
987 call_block: ir::Block,
988 callee: &ir::Function,
989 entity_map: &EntityMap,
990) -> ir::Block {
991 debug_assert!(func.layout.is_block_inserted(call_block));
992
993 let mut prev_inlined_block = call_block;
996 let mut next_callee_block = callee.layout.entry_block();
997 while let Some(callee_block) = next_callee_block {
998 debug_assert!(func.layout.is_block_inserted(prev_inlined_block));
999
1000 let inlined_block = entity_map.inlined_block(callee_block);
1001 func.layout
1002 .insert_block_after(inlined_block, prev_inlined_block);
1003
1004 prev_inlined_block = inlined_block;
1005 next_callee_block = callee.layout.next_block(callee_block);
1006 }
1007
1008 debug_assert!(func.layout.is_block_inserted(prev_inlined_block));
1009 prev_inlined_block
1010}
1011
1012fn split_off_return_block(
1018 func: &mut ir::Function,
1019 call_inst: ir::Inst,
1020 opcode: ir::Opcode,
1021 callee: &ir::Function,
1022) -> Option<ir::Block> {
1023 let return_block = func.layout.next_inst(call_inst).map(|next_inst| {
1026 let return_block = func.dfg.blocks.add();
1027 func.layout.split_block(return_block, next_inst);
1028
1029 let old_results =
1032 SmallValueVec::from_iter(func.dfg.inst_results(call_inst).iter().copied());
1033 debug_assert_eq!(old_results.len(), callee.signature.returns.len());
1034 func.dfg.detach_inst_results(call_inst);
1035 for (abi, old_val) in callee.signature.returns.iter().zip(old_results) {
1036 debug_assert_eq!(abi.value_type, func.dfg.value_type(old_val));
1037 let ret_param = func.dfg.append_block_param(return_block, abi.value_type);
1038 func.dfg.change_to_alias(old_val, ret_param);
1039 }
1040
1041 return_block
1042 });
1043
1044 debug_assert_eq!(
1075 return_block.is_none(),
1076 opcode == ir::Opcode::ReturnCall || opcode == ir::Opcode::TryCall,
1077 );
1078 return_block.or_else(|| match func.dfg.insts[call_inst] {
1079 ir::InstructionData::TryCall {
1080 opcode: ir::Opcode::TryCall,
1081 args: _,
1082 func_ref: _,
1083 exception,
1084 } => {
1085 let normal_return = func.dfg.exception_tables[exception].normal_return();
1086 let normal_return_block = normal_return.block(&func.dfg.value_lists);
1087
1088 {
1090 let normal_return_args = normal_return.args(&func.dfg.value_lists);
1091 if normal_return_args.len() == callee.signature.returns.len()
1092 && normal_return_args.enumerate().all(|(i, arg)| {
1093 let i = u32::try_from(i).unwrap();
1094 arg == ir::BlockArg::TryCallRet(i)
1095 })
1096 {
1097 return Some(normal_return_block);
1098 }
1099 }
1100
1101 let return_block = func.dfg.blocks.add();
1105 func.layout.insert_block(return_block, normal_return_block);
1106
1107 let return_block_params = callee
1108 .signature
1109 .returns
1110 .iter()
1111 .map(|abi| func.dfg.append_block_param(return_block, abi.value_type))
1112 .collect::<SmallValueVec>();
1113
1114 let normal_return_args = func.dfg.exception_tables[exception]
1115 .normal_return()
1116 .args(&func.dfg.value_lists)
1117 .collect::<SmallBlockArgVec>();
1118 let jump_args = normal_return_args
1119 .into_iter()
1120 .map(|arg| match arg {
1121 ir::BlockArg::Value(value) => ir::BlockArg::Value(value),
1122 ir::BlockArg::TryCallRet(i) => {
1123 let i = usize::try_from(i).unwrap();
1124 ir::BlockArg::Value(return_block_params[i])
1125 }
1126 ir::BlockArg::TryCallExn(_) => {
1127 unreachable!("normal-return edges cannot use exceptional results")
1128 }
1129 })
1130 .collect::<SmallBlockArgVec>();
1131
1132 let mut cursor = FuncCursor::new(func);
1133 cursor.goto_first_insertion_point(return_block);
1134 cursor.ins().jump(normal_return_block, &jump_args);
1135
1136 Some(return_block)
1137 }
1138 _ => None,
1139 })
1140}
1141
1142fn replace_call_with_jump(
1150 allocs: &mut InliningAllocs,
1151 func: &mut ir::Function,
1152 call_inst: ir::Inst,
1153 callee: &ir::Function,
1154 entity_map: &EntityMap,
1155) -> Option<ir::UserStackMapEntryVec> {
1156 trace!("Replacing `call` with `jump`");
1157 trace!(
1158 " --> call instruction: {call_inst:?}: {}",
1159 func.dfg.display_inst(call_inst)
1160 );
1161
1162 let callee_entry_block = callee
1163 .layout
1164 .entry_block()
1165 .expect("callee function should have an entry block");
1166 let callee_param_values = callee.dfg.block_params(callee_entry_block);
1167 let caller_arg_values = SmallValueVec::from_iter(func.dfg.inst_args(call_inst).iter().copied());
1168 debug_assert_eq!(callee_param_values.len(), caller_arg_values.len());
1169 debug_assert_eq!(callee_param_values.len(), callee.signature.params.len());
1170 for (abi, (callee_param_value, caller_arg_value)) in callee
1171 .signature
1172 .params
1173 .iter()
1174 .zip(callee_param_values.into_iter().zip(caller_arg_values))
1175 {
1176 debug_assert_eq!(abi.value_type, callee.dfg.value_type(*callee_param_value));
1177 debug_assert_eq!(abi.value_type, func.dfg.value_type(caller_arg_value));
1178 allocs.set_inlined_value(callee, *callee_param_value, caller_arg_value);
1179 }
1180
1181 let inlined_entry_block = entity_map.inlined_block(callee_entry_block);
1188 func.dfg.replace(call_inst).jump(inlined_entry_block, &[]);
1189 trace!(
1190 " --> replaced with jump instruction: {call_inst:?}: {}",
1191 func.dfg.display_inst(call_inst)
1192 );
1193
1194 let stack_map_entries = func.dfg.take_user_stack_map_entries(call_inst);
1195 stack_map_entries
1196}
1197
1198#[derive(Default)]
1201struct EntityMap {
1202 block_offset: Option<u32>,
1215 global_value_offset: Option<u32>,
1216 sig_ref_offset: Option<u32>,
1217 func_ref_offset: Option<u32>,
1218 stack_slot_offset: Option<u32>,
1219 dynamic_type_offset: Option<u32>,
1220 dynamic_stack_slot_offset: Option<u32>,
1221 immediate_offset: Option<u32>,
1222}
1223
1224impl EntityMap {
1225 fn inlined_block(&self, callee_block: ir::Block) -> ir::Block {
1226 let offset = self
1227 .block_offset
1228 .expect("must create inlined `ir::Block`s before calling `EntityMap::inlined_block`");
1229 ir::Block::from_u32(offset + callee_block.as_u32())
1230 }
1231
1232 fn iter_inlined_blocks(&self, func: &ir::Function) -> impl Iterator<Item = ir::Block> + use<> {
1233 let start = self.block_offset.expect(
1234 "must create inlined `ir::Block`s before calling `EntityMap::iter_inlined_blocks`",
1235 );
1236
1237 let end = func.dfg.blocks.len();
1238 let end = u32::try_from(end).unwrap();
1239
1240 (start..end).map(|i| ir::Block::from_u32(i))
1241 }
1242
1243 fn inlined_global_value(&self, callee_global_value: ir::GlobalValue) -> ir::GlobalValue {
1244 let offset = self
1245 .global_value_offset
1246 .expect("must create inlined `ir::GlobalValue`s before calling `EntityMap::inlined_global_value`");
1247 ir::GlobalValue::from_u32(offset + callee_global_value.as_u32())
1248 }
1249
1250 fn inlined_sig_ref(&self, callee_sig_ref: ir::SigRef) -> ir::SigRef {
1251 let offset = self.sig_ref_offset.expect(
1252 "must create inlined `ir::SigRef`s before calling `EntityMap::inlined_sig_ref`",
1253 );
1254 ir::SigRef::from_u32(offset + callee_sig_ref.as_u32())
1255 }
1256
1257 fn inlined_func_ref(&self, callee_func_ref: ir::FuncRef) -> ir::FuncRef {
1258 let offset = self.func_ref_offset.expect(
1259 "must create inlined `ir::FuncRef`s before calling `EntityMap::inlined_func_ref`",
1260 );
1261 ir::FuncRef::from_u32(offset + callee_func_ref.as_u32())
1262 }
1263
1264 fn inlined_stack_slot(&self, callee_stack_slot: ir::StackSlot) -> ir::StackSlot {
1265 let offset = self.stack_slot_offset.expect(
1266 "must create inlined `ir::StackSlot`s before calling `EntityMap::inlined_stack_slot`",
1267 );
1268 ir::StackSlot::from_u32(offset + callee_stack_slot.as_u32())
1269 }
1270
1271 fn inlined_dynamic_type(&self, callee_dynamic_type: ir::DynamicType) -> ir::DynamicType {
1272 let offset = self.dynamic_type_offset.expect(
1273 "must create inlined `ir::DynamicType`s before calling `EntityMap::inlined_dynamic_type`",
1274 );
1275 ir::DynamicType::from_u32(offset + callee_dynamic_type.as_u32())
1276 }
1277
1278 fn inlined_dynamic_stack_slot(
1279 &self,
1280 callee_dynamic_stack_slot: ir::DynamicStackSlot,
1281 ) -> ir::DynamicStackSlot {
1282 let offset = self.dynamic_stack_slot_offset.expect(
1283 "must create inlined `ir::DynamicStackSlot`s before calling `EntityMap::inlined_dynamic_stack_slot`",
1284 );
1285 ir::DynamicStackSlot::from_u32(offset + callee_dynamic_stack_slot.as_u32())
1286 }
1287
1288 fn inlined_immediate(&self, callee_immediate: ir::Immediate) -> ir::Immediate {
1289 let offset = self.immediate_offset.expect(
1290 "must create inlined `ir::Immediate`s before calling `EntityMap::inlined_immediate`",
1291 );
1292 ir::Immediate::from_u32(offset + callee_immediate.as_u32())
1293 }
1294}
1295
1296fn create_entities(
1300 allocs: &mut InliningAllocs,
1301 func: &mut ir::Function,
1302 callee: &ir::Function,
1303) -> EntityMap {
1304 let mut entity_map = EntityMap::default();
1305
1306 entity_map.block_offset = Some(create_blocks(allocs, func, callee));
1307 entity_map.global_value_offset = Some(create_global_values(func, callee));
1308 entity_map.sig_ref_offset = Some(create_sig_refs(func, callee));
1309 create_user_external_name_refs(allocs, func, callee);
1310 entity_map.func_ref_offset = Some(create_func_refs(allocs, func, callee, &entity_map));
1311 entity_map.stack_slot_offset = Some(create_stack_slots(func, callee));
1312 entity_map.dynamic_type_offset = Some(create_dynamic_types(func, callee, &entity_map));
1313 entity_map.dynamic_stack_slot_offset =
1314 Some(create_dynamic_stack_slots(func, callee, &entity_map));
1315 entity_map.immediate_offset = Some(create_immediates(func, callee));
1316
1317 create_constants(allocs, func, callee);
1321
1322 entity_map
1323}
1324
1325fn create_blocks(
1327 allocs: &mut InliningAllocs,
1328 func: &mut ir::Function,
1329 callee: &ir::Function,
1330) -> u32 {
1331 let offset = func.dfg.blocks.len();
1332 let offset = u32::try_from(offset).unwrap();
1333
1334 func.dfg.blocks.reserve(callee.dfg.blocks.len());
1335 for callee_block in callee.dfg.blocks.iter() {
1336 let caller_block = func.dfg.blocks.add();
1337 trace!("Callee {callee_block:?} = inlined {caller_block:?}");
1338
1339 if callee.layout.is_cold(callee_block) {
1340 func.layout.set_cold(caller_block);
1341 }
1342
1343 if callee.layout.entry_block() != Some(callee_block) {
1347 for callee_param in callee.dfg.blocks[callee_block].params(&callee.dfg.value_lists) {
1348 let ty = callee.dfg.value_type(*callee_param);
1349 let caller_param = func.dfg.append_block_param(caller_block, ty);
1350
1351 allocs.set_inlined_value(callee, *callee_param, caller_param);
1352 }
1353 }
1354 }
1355
1356 offset
1357}
1358
1359fn create_global_values(func: &mut ir::Function, callee: &ir::Function) -> u32 {
1361 let gv_offset = func.global_values.len();
1362 let gv_offset = u32::try_from(gv_offset).unwrap();
1363
1364 func.global_values.reserve(callee.global_values.len());
1365 for gv in callee.global_values.values() {
1366 func.global_values.push(match gv {
1367 ir::GlobalValueData::Load {
1370 base,
1371 offset,
1372 global_type,
1373 flags,
1374 } => ir::GlobalValueData::Load {
1375 base: ir::GlobalValue::from_u32(base.as_u32() + gv_offset),
1376 offset: *offset,
1377 global_type: *global_type,
1378 flags: *flags,
1379 },
1380 ir::GlobalValueData::IAddImm {
1381 base,
1382 offset,
1383 global_type,
1384 } => ir::GlobalValueData::IAddImm {
1385 base: ir::GlobalValue::from_u32(base.as_u32() + gv_offset),
1386 offset: *offset,
1387 global_type: *global_type,
1388 },
1389
1390 ir::GlobalValueData::VMContext
1393 | ir::GlobalValueData::Symbol { .. }
1394 | ir::GlobalValueData::DynScaleTargetConst { .. } => gv.clone(),
1395 });
1396 }
1397
1398 gv_offset
1399}
1400
1401fn create_sig_refs(func: &mut ir::Function, callee: &ir::Function) -> u32 {
1403 let offset = func.dfg.signatures.len();
1404 let offset = u32::try_from(offset).unwrap();
1405
1406 func.dfg.signatures.reserve(callee.dfg.signatures.len());
1407 for sig in callee.dfg.signatures.values() {
1408 func.dfg.signatures.push(sig.clone());
1409 }
1410
1411 offset
1412}
1413
1414fn create_user_external_name_refs(
1415 allocs: &mut InliningAllocs,
1416 func: &mut ir::Function,
1417 callee: &ir::Function,
1418) {
1419 for (callee_named_func_ref, name) in callee.params.user_named_funcs().iter() {
1420 let caller_named_func_ref = func.declare_imported_user_function(name.clone());
1421 allocs.user_external_name_refs[callee_named_func_ref] = Some(caller_named_func_ref).into();
1422 }
1423}
1424
1425fn create_func_refs(
1427 allocs: &InliningAllocs,
1428 func: &mut ir::Function,
1429 callee: &ir::Function,
1430 entity_map: &EntityMap,
1431) -> u32 {
1432 let offset = func.dfg.ext_funcs.len();
1433 let offset = u32::try_from(offset).unwrap();
1434
1435 func.dfg.ext_funcs.reserve(callee.dfg.ext_funcs.len());
1436 for ir::ExtFuncData {
1437 name,
1438 signature,
1439 colocated,
1440 } in callee.dfg.ext_funcs.values()
1441 {
1442 func.dfg.ext_funcs.push(ir::ExtFuncData {
1443 name: match name {
1444 ir::ExternalName::User(name_ref) => {
1445 ir::ExternalName::User(allocs.user_external_name_refs[*name_ref].expect(
1446 "should have translated all `ir::UserExternalNameRef`s before translating \
1447 `ir::FuncRef`s",
1448 ))
1449 }
1450 ir::ExternalName::TestCase(_)
1451 | ir::ExternalName::LibCall(_)
1452 | ir::ExternalName::KnownSymbol(_) => name.clone(),
1453 },
1454 signature: entity_map.inlined_sig_ref(*signature),
1455 colocated: *colocated,
1456 });
1457 }
1458
1459 offset
1460}
1461
1462fn create_stack_slots(func: &mut ir::Function, callee: &ir::Function) -> u32 {
1464 let offset = func.sized_stack_slots.len();
1465 let offset = u32::try_from(offset).unwrap();
1466
1467 func.sized_stack_slots
1468 .reserve(callee.sized_stack_slots.len());
1469 for slot in callee.sized_stack_slots.values() {
1470 func.sized_stack_slots.push(slot.clone());
1471 }
1472
1473 offset
1474}
1475
1476fn create_dynamic_types(
1478 func: &mut ir::Function,
1479 callee: &ir::Function,
1480 entity_map: &EntityMap,
1481) -> u32 {
1482 let offset = func.dynamic_stack_slots.len();
1483 let offset = u32::try_from(offset).unwrap();
1484
1485 func.dfg
1486 .dynamic_types
1487 .reserve(callee.dfg.dynamic_types.len());
1488 for ir::DynamicTypeData {
1489 base_vector_ty,
1490 dynamic_scale,
1491 } in callee.dfg.dynamic_types.values()
1492 {
1493 func.dfg.dynamic_types.push(ir::DynamicTypeData {
1494 base_vector_ty: *base_vector_ty,
1495 dynamic_scale: entity_map.inlined_global_value(*dynamic_scale),
1496 });
1497 }
1498
1499 offset
1500}
1501
1502fn create_dynamic_stack_slots(
1504 func: &mut ir::Function,
1505 callee: &ir::Function,
1506 entity_map: &EntityMap,
1507) -> u32 {
1508 let offset = func.dynamic_stack_slots.len();
1509 let offset = u32::try_from(offset).unwrap();
1510
1511 func.dynamic_stack_slots
1512 .reserve(callee.dynamic_stack_slots.len());
1513 for ir::DynamicStackSlotData { kind, dyn_ty } in callee.dynamic_stack_slots.values() {
1514 func.dynamic_stack_slots.push(ir::DynamicStackSlotData {
1515 kind: *kind,
1516 dyn_ty: entity_map.inlined_dynamic_type(*dyn_ty),
1517 });
1518 }
1519
1520 offset
1521}
1522
1523fn create_immediates(func: &mut ir::Function, callee: &ir::Function) -> u32 {
1525 let offset = func.dfg.immediates.len();
1526 let offset = u32::try_from(offset).unwrap();
1527
1528 func.dfg.immediates.reserve(callee.dfg.immediates.len());
1529 for imm in callee.dfg.immediates.values() {
1530 func.dfg.immediates.push(imm.clone());
1531 }
1532
1533 offset
1534}
1535
1536fn create_constants(allocs: &mut InliningAllocs, func: &mut ir::Function, callee: &ir::Function) {
1538 for (callee_constant, data) in callee.dfg.constants.iter() {
1539 let inlined_constant = func.dfg.constants.insert(data.clone());
1540 allocs.constants[*callee_constant] = Some(inlined_constant).into();
1541 }
1542}