revmc_cranelift/
lib.rs

1#![doc = include_str!("../README.md")]
2#![cfg_attr(not(test), warn(unused_extern_crates))]
3#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))]
4
5use codegen::ir::Function;
6use cranelift::{
7    codegen::ir::{FuncRef, StackSlot},
8    prelude::*,
9};
10use cranelift_jit::{JITBuilder, JITModule};
11use cranelift_module::{DataDescription, FuncId, FuncOrDataId, Linkage, Module, ModuleError};
12use cranelift_object::{ObjectBuilder, ObjectModule};
13use pretty_clif::CommentWriter;
14use revmc_backend::{
15    eyre::eyre, Backend, BackendTypes, Builder, OptimizationLevel, Result, TypeMethods, U256,
16};
17use std::{cell::RefCell, collections::HashMap, io::Write, path::Path, rc::Rc};
18
19mod pretty_clif;
20
21pub use cranelift;
22pub use cranelift_jit;
23pub use cranelift_module;
24pub use cranelift_native;
25
26/// The Cranelift-based EVM bytecode compiler backend.
27#[allow(missing_debug_implementations)]
28#[must_use]
29pub struct EvmCraneliftBackend {
30    /// The function builder context, which is reused across multiple FunctionBuilder instances.
31    builder_context: FunctionBuilderContext,
32
33    /// The main Cranelift context, which holds the state for codegen. Cranelift
34    /// separates this from `Module` to allow for parallel compilation, with a
35    /// context per thread, though this isn't in the simple demo here.
36    ctx: codegen::Context,
37
38    /// The module, with the jit backend, which manages the JIT'd functions.
39    module: ModuleWrapper,
40
41    symbols: Symbols,
42
43    opt_level: OptimizationLevel,
44    comments: CommentWriter,
45    functions: Vec<FuncId>,
46}
47
48#[allow(clippy::new_without_default)]
49impl EvmCraneliftBackend {
50    /// Returns `Ok(())` if the current architecture is supported, or `Err(())` if the host machine
51    /// is not supported in the current configuration.
52    pub fn is_supported() -> Result<(), &'static str> {
53        cranelift_native::builder().map(drop)
54    }
55
56    /// Creates a new instance of the JIT compiler.
57    ///
58    /// # Panics
59    ///
60    /// Panics if the current architecture is not supported. See
61    /// [`is_supported`](Self::is_supported).
62    #[track_caller]
63    pub fn new(aot: bool, opt_level: OptimizationLevel) -> Self {
64        let symbols = Symbols::new();
65        let module = ModuleWrapper::new(aot, opt_level, &symbols).unwrap();
66        Self {
67            builder_context: FunctionBuilderContext::new(),
68            ctx: module.get().make_context(),
69            module,
70            symbols,
71            opt_level,
72            comments: CommentWriter::new(),
73            functions: Vec::new(),
74        }
75    }
76
77    fn finish_module(&mut self) -> Result<Option<ObjectModule>> {
78        let aot = match self.module {
79            ModuleWrapper::Jit(_) => {
80                // TODO: Can `free_memory` take `&mut self` pls?
81                let new = ModuleWrapper::new_jit(self.opt_level, self.symbols.clone())?;
82                let ModuleWrapper::Jit(old) = std::mem::replace(&mut self.module, new) else {
83                    unreachable!()
84                };
85                unsafe { old.free_memory() };
86                None
87            }
88            ModuleWrapper::Aot(_) => {
89                let new = ModuleWrapper::new_aot(self.opt_level)?;
90                let ModuleWrapper::Aot(old) = std::mem::replace(&mut self.module, new) else {
91                    unreachable!()
92                };
93                Some(old)
94            }
95        };
96        self.module.get().clear_context(&mut self.ctx);
97        Ok(aot)
98    }
99}
100
101impl BackendTypes for EvmCraneliftBackend {
102    type Type = Type;
103    type Value = Value;
104    type StackSlot = StackSlot;
105    type BasicBlock = Block;
106    type Function = FuncRef;
107}
108
109impl TypeMethods for EvmCraneliftBackend {
110    fn type_ptr(&self) -> Self::Type {
111        self.module.get().target_config().pointer_type()
112    }
113
114    fn type_ptr_sized_int(&self) -> Self::Type {
115        self.type_ptr()
116    }
117
118    fn type_int(&self, bits: u32) -> Self::Type {
119        bits.try_into().ok().and_then(Type::int).unwrap_or_else(|| unimplemented!("type: i{bits}"))
120    }
121
122    fn type_array(&self, ty: Self::Type, size: u32) -> Self::Type {
123        unimplemented!("type: [{size} x {ty}]")
124    }
125
126    fn type_bit_width(&self, ty: Self::Type) -> u32 {
127        ty.bits()
128    }
129}
130
131impl Backend for EvmCraneliftBackend {
132    type Builder<'a> = EvmCraneliftBuilder<'a>;
133    type FuncId = FuncId;
134
135    fn ir_extension(&self) -> &'static str {
136        "clif"
137    }
138
139    fn set_module_name(&mut self, name: &str) {
140        let _ = name;
141    }
142
143    fn set_is_dumping(&mut self, yes: bool) {
144        self.ctx.set_disasm(yes);
145    }
146
147    fn set_debug_assertions(&mut self, yes: bool) {
148        let _ = yes;
149    }
150
151    fn opt_level(&self) -> OptimizationLevel {
152        self.opt_level
153    }
154
155    fn set_opt_level(&mut self, level: OptimizationLevel) {
156        // Note that this will only affect new functions after a new module is created in
157        // `free_all_functions`.
158        self.opt_level = level;
159    }
160
161    fn is_aot(&self) -> bool {
162        self.module.is_aot()
163    }
164
165    fn dump_ir(&mut self, path: &Path) -> Result<()> {
166        crate::pretty_clif::write_clif_file(
167            path,
168            self.module.get().isa(),
169            &self.ctx.func,
170            &self.comments,
171        );
172        Ok(())
173    }
174
175    fn dump_disasm(&mut self, path: &Path) -> Result<()> {
176        if let Some(disasm) = &self.ctx.compiled_code().unwrap().vcode {
177            crate::pretty_clif::write_ir_file(path, |file| file.write_all(disasm.as_bytes()))
178        }
179        Ok(())
180    }
181
182    fn build_function(
183        &mut self,
184        name: &str,
185        ret: Option<Self::Type>,
186        params: &[Self::Type],
187        param_names: &[&str],
188        linkage: revmc_backend::Linkage,
189    ) -> Result<(Self::Builder<'_>, FuncId)> {
190        if let Some(ret) = ret {
191            self.ctx.func.signature.returns.push(AbiParam::new(ret));
192        }
193        for param in params {
194            self.ctx.func.signature.params.push(AbiParam::new(*param));
195        }
196        let _ = param_names;
197        let ptr_type = self.type_ptr();
198        let id = self.module.get_mut().declare_function(
199            name,
200            convert_linkage(linkage),
201            &self.ctx.func.signature,
202        )?;
203        self.functions.push(id);
204        let bcx = FunctionBuilder::new(&mut self.ctx.func, &mut self.builder_context);
205        let builder = EvmCraneliftBuilder {
206            module: &mut self.module,
207            comments: &mut self.comments,
208            bcx,
209            ptr_type,
210            symbols: self.symbols.clone(),
211        };
212        Ok((builder, id))
213    }
214
215    fn verify_module(&mut self) -> Result<()> {
216        Ok(())
217    }
218
219    fn optimize_module(&mut self) -> Result<()> {
220        // Define the function to jit. This finishes compilation, although
221        // there may be outstanding relocations to perform. Currently, jit
222        // cannot finish relocations until all functions to be called are
223        // defined. For this toy demo for now, we'll just finalize the
224        // function below.
225        for &id in &self.functions {
226            self.module.get_mut().define_function(id, &mut self.ctx)?;
227        }
228        self.functions.clear();
229
230        // Now that compilation is finished, we can clear out the context state.
231        self.module.get().clear_context(&mut self.ctx);
232
233        // Finalize the functions which we just defined, which resolves any outstanding relocations
234        // (patching in addresses, now that they're available).
235        self.module.finalize_definitions()?;
236
237        self.comments.clear();
238
239        Ok(())
240    }
241
242    fn write_object<W: std::io::Write>(&mut self, w: W) -> Result<()> {
243        let module =
244            self.finish_module()?.ok_or_else(|| eyre!("cannot write object in JIT mode"))?;
245        let product = module.finish();
246        product.object.write_stream(w).map_err(|e| eyre!("{e}"))?;
247        Ok(())
248    }
249
250    fn jit_function(&mut self, id: Self::FuncId) -> Result<usize> {
251        self.module.get_finalized_function(id).map(|ptr| ptr as usize)
252    }
253
254    unsafe fn free_function(&mut self, id: Self::FuncId) -> Result<()> {
255        // This doesn't exist yet.
256        let _ = id;
257        Ok(())
258    }
259
260    unsafe fn free_all_functions(&mut self) -> Result<()> {
261        self.finish_module().map(drop)
262    }
263}
264
265/// The Cranelift-based EVM bytecode compiler function builder.
266#[allow(missing_debug_implementations)]
267pub struct EvmCraneliftBuilder<'a> {
268    module: &'a mut ModuleWrapper,
269    comments: &'a mut CommentWriter,
270    bcx: FunctionBuilder<'a>,
271    ptr_type: Type,
272    symbols: Symbols,
273}
274
275impl<'a> BackendTypes for EvmCraneliftBuilder<'a> {
276    type Type = <EvmCraneliftBackend as BackendTypes>::Type;
277    type Value = <EvmCraneliftBackend as BackendTypes>::Value;
278    type StackSlot = <EvmCraneliftBackend as BackendTypes>::StackSlot;
279    type BasicBlock = <EvmCraneliftBackend as BackendTypes>::BasicBlock;
280    type Function = <EvmCraneliftBackend as BackendTypes>::Function;
281}
282
283impl<'a> TypeMethods for EvmCraneliftBuilder<'a> {
284    fn type_ptr(&self) -> Self::Type {
285        self.ptr_type
286    }
287
288    fn type_ptr_sized_int(&self) -> Self::Type {
289        self.ptr_type
290    }
291
292    fn type_int(&self, bits: u32) -> Self::Type {
293        bits.try_into().ok().and_then(Type::int).unwrap_or_else(|| unimplemented!("type: i{bits}"))
294    }
295
296    fn type_array(&self, ty: Self::Type, size: u32) -> Self::Type {
297        unimplemented!("type: [{size} x {ty}]")
298    }
299
300    fn type_bit_width(&self, ty: Self::Type) -> u32 {
301        ty.bits()
302    }
303}
304
305impl<'a> Builder for EvmCraneliftBuilder<'a> {
306    fn create_block(&mut self, name: &str) -> Self::BasicBlock {
307        let block = self.bcx.create_block();
308        if !name.is_empty() && self.comments.enabled() {
309            self.comments.add_comment(block, name);
310        }
311        block
312    }
313
314    fn create_block_after(&mut self, after: Self::BasicBlock, name: &str) -> Self::BasicBlock {
315        let block = self.create_block(name);
316        self.bcx.insert_block_after(block, after);
317        block
318    }
319
320    fn switch_to_block(&mut self, block: Self::BasicBlock) {
321        self.bcx.switch_to_block(block);
322    }
323
324    fn seal_block(&mut self, block: Self::BasicBlock) {
325        self.bcx.seal_block(block);
326    }
327
328    fn seal_all_blocks(&mut self) {
329        self.bcx.seal_all_blocks();
330    }
331
332    fn set_current_block_cold(&mut self) {
333        self.bcx.set_cold_block(self.bcx.current_block().unwrap());
334    }
335
336    fn current_block(&mut self) -> Option<Self::BasicBlock> {
337        self.bcx.current_block()
338    }
339
340    fn add_comment_to_current_inst(&mut self, comment: &str) {
341        let Some(block) = self.bcx.current_block() else { return };
342        let Some(inst) = self.bcx.func.layout.last_inst(block) else { return };
343        self.comments.add_comment(inst, comment);
344    }
345
346    fn fn_param(&mut self, index: usize) -> Self::Value {
347        let block = self.current_block().unwrap();
348        self.bcx.block_params(block)[index]
349    }
350
351    fn bool_const(&mut self, value: bool) -> Self::Value {
352        self.iconst(types::I8, value as i64)
353    }
354
355    fn iconst(&mut self, ty: Self::Type, value: i64) -> Self::Value {
356        self.bcx.ins().iconst(ty, value)
357    }
358
359    fn iconst_256(&mut self, value: U256) -> Self::Value {
360        let _ = value;
361        todo!("no i256 :(")
362    }
363
364    fn str_const(&mut self, value: &str) -> Self::Value {
365        // https://github.com/rust-lang/rustc_codegen_cranelift/blob/1122338eb88648ec36a2eb2b1c27031fa897964d/src/common.rs#L432
366
367        let mut data = DataDescription::new();
368        data.define(value.as_bytes().into());
369        let msg_id = self.module.get_mut().declare_anonymous_data(false, false).unwrap();
370
371        // Ignore DuplicateDefinition error, as the data will be the same
372        let _ = self.module.get_mut().define_data(msg_id, &data);
373
374        let local_msg_id = self.module.get().declare_data_in_func(msg_id, self.bcx.func);
375        if self.comments.enabled() {
376            self.comments.add_comment(local_msg_id, value);
377        }
378        self.bcx.ins().global_value(self.ptr_type, local_msg_id)
379    }
380
381    fn new_stack_slot_raw(&mut self, ty: Self::Type, name: &str) -> Self::StackSlot {
382        // https://github.com/rust-lang/rustc_codegen_cranelift/blob/1122338eb88648ec36a2eb2b1c27031fa897964d/src/common.rs#L388
383
384        /*
385        let _ = name;
386        let abi_align = 16;
387        if align <= abi_align {
388            self.bcx.create_sized_stack_slot(StackSlotData {
389                kind: StackSlotKind::ExplicitSlot,
390                // FIXME Don't force the size to a multiple of <abi_align> bytes once Cranelift gets
391                // a way to specify stack slot alignment.
392                size: (size + abi_align - 1) / abi_align * abi_align,
393            })
394        } else {
395            unimplemented!("{align} > {abi_align}")
396            /*
397            // Alignment is too big to handle using the above hack. Dynamically realign a stack slot
398            // instead. This wastes some space for the realignment.
399            let stack_slot = self.bcx.create_sized_stack_slot(StackSlotData {
400                kind: StackSlotKind::ExplicitSlot,
401                // FIXME Don't force the size to a multiple of <abi_align> bytes once Cranelift gets
402                // a way to specify stack slot alignment.
403                size: (size + align) / abi_align * abi_align,
404            });
405            let base_ptr = self.bcx.ins().stack_addr(self.pointer_type, stack_slot, 0);
406            let misalign_offset = self.bcx.ins().urem_imm(base_ptr, i64::from(align));
407            let realign_offset = self.bcx.ins().irsub_imm(misalign_offset, i64::from(align));
408            Pointer::new(self.bcx.ins().iadd(base_ptr, realign_offset))
409            */
410        }
411        */
412
413        let _ = name;
414        self.bcx.create_sized_stack_slot(StackSlotData {
415            kind: StackSlotKind::ExplicitSlot,
416            size: ty.bytes(),
417        })
418    }
419
420    fn stack_load(&mut self, ty: Self::Type, slot: Self::StackSlot, name: &str) -> Self::Value {
421        let _ = name;
422        self.bcx.ins().stack_load(ty, slot, 0)
423    }
424
425    fn stack_store(&mut self, value: Self::Value, slot: Self::StackSlot) {
426        self.bcx.ins().stack_store(value, slot, 0);
427    }
428
429    fn stack_addr(&mut self, ty: Self::Type, slot: Self::StackSlot) -> Self::Value {
430        self.bcx.ins().stack_addr(ty, slot, 0)
431    }
432
433    fn load(&mut self, ty: Self::Type, ptr: Self::Value, name: &str) -> Self::Value {
434        let _ = name;
435        self.bcx.ins().load(ty, MemFlags::trusted(), ptr, 0)
436    }
437
438    fn store(&mut self, value: Self::Value, ptr: Self::Value) {
439        self.bcx.ins().store(MemFlags::trusted(), value, ptr, 0);
440    }
441
442    fn nop(&mut self) {
443        self.bcx.ins().nop();
444    }
445
446    fn ret(&mut self, values: &[Self::Value]) {
447        self.bcx.ins().return_(values);
448    }
449
450    fn icmp(
451        &mut self,
452        cond: revmc_backend::IntCC,
453        lhs: Self::Value,
454        rhs: Self::Value,
455    ) -> Self::Value {
456        self.bcx.ins().icmp(convert_intcc(cond), lhs, rhs)
457    }
458
459    fn icmp_imm(&mut self, cond: revmc_backend::IntCC, lhs: Self::Value, rhs: i64) -> Self::Value {
460        self.bcx.ins().icmp_imm(convert_intcc(cond), lhs, rhs)
461    }
462
463    fn is_null(&mut self, ptr: Self::Value) -> Self::Value {
464        self.bcx.ins().icmp_imm(IntCC::Equal, ptr, 0)
465    }
466
467    fn is_not_null(&mut self, ptr: Self::Value) -> Self::Value {
468        self.bcx.ins().icmp_imm(IntCC::NotEqual, ptr, 0)
469    }
470
471    fn br(&mut self, dest: Self::BasicBlock) {
472        self.bcx.ins().jump(dest, &[]);
473    }
474
475    fn brif(
476        &mut self,
477        cond: Self::Value,
478        then_block: Self::BasicBlock,
479        else_block: Self::BasicBlock,
480    ) {
481        self.bcx.ins().brif(cond, then_block, &[], else_block, &[]);
482    }
483
484    fn switch(
485        &mut self,
486        index: Self::Value,
487        default: Self::BasicBlock,
488        targets: &[(u64, Self::BasicBlock)],
489        default_is_cold: bool,
490    ) {
491        let _ = default_is_cold;
492        let mut switch = cranelift::frontend::Switch::new();
493        for (value, block) in targets {
494            switch.set_entry(*value as u128, *block);
495        }
496        switch.emit(&mut self.bcx, index, default)
497    }
498
499    fn phi(&mut self, ty: Self::Type, incoming: &[(Self::Value, Self::BasicBlock)]) -> Self::Value {
500        let current = self.current_block().unwrap();
501        let param = self.bcx.append_block_param(current, ty);
502        for &(value, block) in incoming {
503            self.bcx.switch_to_block(block);
504            let last_inst = self.bcx.func.layout.last_inst(block).unwrap();
505            let src = self.bcx.ins().jump(current, &[value]);
506            self.bcx.func.transplant_inst(last_inst, src);
507        }
508        self.bcx.switch_to_block(current);
509        param
510    }
511
512    fn select(
513        &mut self,
514        cond: Self::Value,
515        then_value: Self::Value,
516        else_value: Self::Value,
517    ) -> Self::Value {
518        self.bcx.ins().select(cond, then_value, else_value)
519    }
520
521    fn lazy_select(
522        &mut self,
523        cond: Self::Value,
524        ty: Self::Type,
525        then_value: impl FnOnce(&mut Self) -> Self::Value,
526        else_value: impl FnOnce(&mut Self) -> Self::Value,
527    ) -> Self::Value {
528        let then_block = if let Some(current) = self.current_block() {
529            self.create_block_after(current, "then")
530        } else {
531            self.create_block("then")
532        };
533        let else_block = self.create_block_after(then_block, "else");
534        let done_block = self.create_block_after(else_block, "contd");
535        let done_value = self.bcx.append_block_param(done_block, ty);
536
537        self.brif(cond, then_block, else_block);
538
539        self.seal_block(then_block);
540        self.switch_to_block(then_block);
541        let then_value = then_value(self);
542        self.bcx.ins().jump(done_block, &[then_value]);
543
544        self.seal_block(else_block);
545        self.switch_to_block(else_block);
546        let else_value = else_value(self);
547        self.bcx.ins().jump(done_block, &[else_value]);
548
549        self.seal_block(done_block);
550        self.switch_to_block(done_block);
551        done_value
552    }
553
554    fn iadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
555        self.bcx.ins().iadd(lhs, rhs)
556    }
557
558    fn isub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
559        self.bcx.ins().isub(lhs, rhs)
560    }
561
562    fn imul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
563        self.bcx.ins().imul(lhs, rhs)
564    }
565
566    fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
567        self.bcx.ins().udiv(lhs, rhs)
568    }
569
570    fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
571        self.bcx.ins().sdiv(lhs, rhs)
572    }
573
574    fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
575        self.bcx.ins().urem(lhs, rhs)
576    }
577
578    fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
579        self.bcx.ins().srem(lhs, rhs)
580    }
581
582    fn iadd_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
583        self.bcx.ins().iadd_imm(lhs, rhs)
584    }
585
586    fn isub_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
587        self.iadd_imm(lhs, -rhs)
588    }
589
590    fn imul_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
591        self.bcx.ins().imul_imm(lhs, rhs)
592    }
593
594    fn uadd_overflow(&mut self, lhs: Self::Value, rhs: Self::Value) -> (Self::Value, Self::Value) {
595        self.bcx.ins().uadd_overflow(lhs, rhs)
596    }
597
598    fn usub_overflow(&mut self, lhs: Self::Value, rhs: Self::Value) -> (Self::Value, Self::Value) {
599        self.bcx.ins().usub_overflow(lhs, rhs)
600    }
601
602    fn umax(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
603        self.bcx.ins().umax(lhs, rhs)
604    }
605
606    fn umin(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
607        self.bcx.ins().umin(lhs, rhs)
608    }
609
610    fn bswap(&mut self, value: Self::Value) -> Self::Value {
611        self.bcx.ins().bswap(value)
612    }
613
614    fn bitor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
615        self.bcx.ins().bor(lhs, rhs)
616    }
617
618    fn bitand(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
619        self.bcx.ins().band(lhs, rhs)
620    }
621
622    fn bitxor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
623        self.bcx.ins().bxor(lhs, rhs)
624    }
625
626    fn bitnot(&mut self, value: Self::Value) -> Self::Value {
627        self.bcx.ins().bnot(value)
628    }
629
630    fn bitor_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
631        self.bcx.ins().bor_imm(lhs, rhs)
632    }
633
634    fn bitand_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
635        self.bcx.ins().band_imm(lhs, rhs)
636    }
637
638    fn bitxor_imm(&mut self, lhs: Self::Value, rhs: i64) -> Self::Value {
639        self.bcx.ins().bxor_imm(lhs, rhs)
640    }
641
642    fn ishl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
643        self.bcx.ins().ishl(lhs, rhs)
644    }
645
646    fn ushr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
647        self.bcx.ins().ushr(lhs, rhs)
648    }
649
650    fn sshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
651        self.bcx.ins().sshr(lhs, rhs)
652    }
653
654    fn zext(&mut self, ty: Self::Type, value: Self::Value) -> Self::Value {
655        self.bcx.ins().uextend(ty, value)
656    }
657
658    fn sext(&mut self, ty: Self::Type, value: Self::Value) -> Self::Value {
659        self.bcx.ins().sextend(ty, value)
660    }
661
662    fn ireduce(&mut self, to: Self::Type, value: Self::Value) -> Self::Value {
663        self.bcx.ins().ireduce(to, value)
664    }
665
666    fn gep(
667        &mut self,
668        ty: Self::Type,
669        ptr: Self::Value,
670        indexes: &[Self::Value],
671        name: &str,
672    ) -> Self::Value {
673        let _ = name;
674        let offset = self.bcx.ins().imul_imm(*indexes.first().unwrap(), ty.bytes() as i64);
675        self.bcx.ins().iadd(ptr, offset)
676    }
677
678    fn call(&mut self, function: Self::Function, args: &[Self::Value]) -> Option<Self::Value> {
679        let ins = self.bcx.ins().call(function, args);
680        self.bcx.inst_results(ins).first().copied()
681    }
682
683    fn memcpy(&mut self, dst: Self::Value, src: Self::Value, len: Self::Value) {
684        let config = self.module.get().target_config();
685        self.bcx.call_memcpy(config, dst, src, len)
686    }
687
688    fn unreachable(&mut self) {
689        self.bcx.ins().trap(TrapCode::UnreachableCodeReached);
690    }
691
692    fn get_or_build_function(
693        &mut self,
694        name: &str,
695        params: &[Self::Type],
696        ret: Option<Self::Type>,
697        linkage: revmc_backend::Linkage,
698        build: impl FnOnce(&mut Self),
699    ) -> Self::Function {
700        if let Some(f) = self.get_function(name) {
701            return f;
702        }
703
704        let mut sig = self.module.get().make_signature();
705        if let Some(ret) = ret {
706            sig.returns.push(AbiParam::new(ret));
707        }
708        for param in params {
709            sig.params.push(AbiParam::new(*param));
710        }
711
712        let id =
713            self.module.get_mut().declare_function(name, convert_linkage(linkage), &sig).unwrap();
714
715        let mut func = Function::new();
716        func.signature = sig;
717        let mut builder_ctx = FunctionBuilderContext::new();
718        let new_bcx = FunctionBuilder::new(&mut func, &mut builder_ctx);
719        // TODO: SAFETY: Not really safe, lifetime extension.
720        let new_bcx =
721            unsafe { std::mem::transmute::<FunctionBuilder<'_>, FunctionBuilder<'a>>(new_bcx) };
722        let old_bcx = std::mem::replace(&mut self.bcx, new_bcx);
723
724        let f = self.module.get_mut().declare_func_in_func(id, self.bcx.func);
725
726        let entry = self.bcx.create_block();
727        self.bcx.append_block_params_for_function_params(entry);
728        build(self);
729
730        self.bcx = old_bcx;
731
732        f
733    }
734
735    fn get_function(&mut self, name: &str) -> Option<Self::Function> {
736        self.module
737            .get()
738            .get_name(name)
739            .and_then(|id| match id {
740                FuncOrDataId::Func(f) => Some(f),
741                FuncOrDataId::Data(_) => None,
742            })
743            .map(|id| self.module.get_mut().declare_func_in_func(id, self.bcx.func))
744    }
745
746    fn add_function(
747        &mut self,
748        name: &str,
749        params: &[Self::Type],
750        ret: Option<Self::Type>,
751        address: Option<usize>,
752        linkage: revmc_backend::Linkage,
753    ) -> Self::Function {
754        let mut sig = self.module.get().make_signature();
755        if let Some(ret) = ret {
756            sig.returns.push(AbiParam::new(ret));
757        }
758        for param in params {
759            sig.params.push(AbiParam::new(*param));
760        }
761        if let Some(address) = address {
762            self.symbols.insert(name.to_string(), address as *const u8);
763        }
764        let id =
765            self.module.get_mut().declare_function(name, convert_linkage(linkage), &sig).unwrap();
766        self.module.get_mut().declare_func_in_func(id, self.bcx.func)
767    }
768
769    fn add_function_attribute(
770        &mut self,
771        function: Option<Self::Function>,
772        attribute: revmc_backend::Attribute,
773        loc: revmc_backend::FunctionAttributeLocation,
774    ) {
775        let _ = function;
776        let _ = attribute;
777        let _ = loc;
778        // TODO
779    }
780}
781
782#[derive(Clone, Debug, Default)]
783struct Symbols(Rc<RefCell<HashMap<String, *const u8>>>);
784
785impl Symbols {
786    fn new() -> Self {
787        Self::default()
788    }
789
790    fn get(&self, name: &str) -> Option<*const u8> {
791        self.0.borrow().get(name).copied()
792    }
793
794    fn insert(&self, name: String, ptr: *const u8) -> Option<*const u8> {
795        self.0.borrow_mut().insert(name, ptr)
796    }
797}
798
799enum ModuleWrapper {
800    Jit(JITModule),
801    Aot(ObjectModule),
802}
803
804impl ModuleWrapper {
805    fn new(aot: bool, opt_level: OptimizationLevel, symbols: &Symbols) -> Result<Self> {
806        if aot {
807            Self::new_aot(opt_level)
808        } else {
809            Self::new_jit(opt_level, symbols.clone())
810        }
811    }
812
813    fn new_jit(opt_level: OptimizationLevel, symbols: Symbols) -> Result<Self> {
814        let mut builder = JITBuilder::with_flags(
815            &[("opt_level", opt_level_flag(opt_level))],
816            cranelift_module::default_libcall_names(),
817        )?;
818        builder.symbol_lookup_fn(Box::new(move |s| symbols.get(s)));
819        Ok(Self::Jit(JITModule::new(builder)))
820    }
821
822    fn new_aot(opt_level: OptimizationLevel) -> Result<Self> {
823        let mut flag_builder = settings::builder();
824        flag_builder.set("opt_level", opt_level_flag(opt_level))?;
825        let isa_builder = cranelift_native::builder().map_err(|s| eyre!(s))?;
826        let isa = isa_builder.finish(settings::Flags::new(flag_builder))?;
827
828        let builder =
829            ObjectBuilder::new(isa, "jit".to_string(), cranelift_module::default_libcall_names())?;
830        Ok(Self::Aot(ObjectModule::new(builder)))
831    }
832
833    fn is_aot(&self) -> bool {
834        matches!(self, Self::Aot(_))
835    }
836
837    #[inline]
838    fn get(&self) -> &dyn Module {
839        match self {
840            Self::Jit(module) => module,
841            Self::Aot(module) => module,
842        }
843    }
844
845    #[inline]
846    fn get_mut(&mut self) -> &mut dyn Module {
847        match self {
848            Self::Jit(module) => module,
849            Self::Aot(module) => module,
850        }
851    }
852
853    fn finalize_definitions(&mut self) -> Result<(), ModuleError> {
854        match self {
855            Self::Jit(module) => module.finalize_definitions(),
856            Self::Aot(_) => Ok(()),
857        }
858    }
859
860    fn get_finalized_function(&self, id: FuncId) -> Result<*const u8> {
861        match self {
862            Self::Jit(module) => Ok(module.get_finalized_function(id)),
863            Self::Aot(_) => Err(eyre!("cannot get finalized JIT function in AOT mode")),
864        }
865    }
866}
867
868fn convert_intcc(cond: revmc_backend::IntCC) -> IntCC {
869    match cond {
870        revmc_backend::IntCC::Equal => IntCC::Equal,
871        revmc_backend::IntCC::NotEqual => IntCC::NotEqual,
872        revmc_backend::IntCC::SignedLessThan => IntCC::SignedLessThan,
873        revmc_backend::IntCC::SignedGreaterThanOrEqual => IntCC::SignedGreaterThanOrEqual,
874        revmc_backend::IntCC::SignedGreaterThan => IntCC::SignedGreaterThan,
875        revmc_backend::IntCC::SignedLessThanOrEqual => IntCC::SignedLessThanOrEqual,
876        revmc_backend::IntCC::UnsignedLessThan => IntCC::UnsignedLessThan,
877        revmc_backend::IntCC::UnsignedGreaterThanOrEqual => IntCC::UnsignedGreaterThanOrEqual,
878        revmc_backend::IntCC::UnsignedGreaterThan => IntCC::UnsignedGreaterThan,
879        revmc_backend::IntCC::UnsignedLessThanOrEqual => IntCC::UnsignedLessThanOrEqual,
880    }
881}
882
883fn convert_linkage(linkage: revmc_backend::Linkage) -> Linkage {
884    match linkage {
885        revmc_backend::Linkage::Import => Linkage::Import,
886        revmc_backend::Linkage::Public => Linkage::Export,
887        revmc_backend::Linkage::Private => Linkage::Local,
888    }
889}
890
891fn opt_level_flag(opt_level: OptimizationLevel) -> &'static str {
892    match opt_level {
893        OptimizationLevel::None => "none",
894        OptimizationLevel::Less | OptimizationLevel::Default | OptimizationLevel::Aggressive => {
895            "speed"
896        }
897    }
898}