1use super::functions::*;
6use crate::lcnf::*;
7use crate::native_backend::*;
8use std::collections::HashMap;
9
10#[allow(dead_code)]
12pub struct ExternalObjectCodegen {
13 pub(super) temp_counter: u32,
14}
15impl ExternalObjectCodegen {
16 #[allow(dead_code)]
18 pub fn new() -> Self {
19 ExternalObjectCodegen { temp_counter: 0 }
20 }
21 pub(super) fn fresh_reg(&mut self) -> Register {
22 let r = Register::virt(1300 + self.temp_counter);
23 self.temp_counter += 1;
24 r
25 }
26 #[allow(dead_code)]
28 pub fn emit_alloc_external(
29 &mut self,
30 data_reg: Register,
31 finalizer_fn: &str,
32 ) -> Vec<NativeInst> {
33 let dst = self.fresh_reg();
34 vec![
35 NativeInst::Comment(format!(
36 "Alloc external data={} finalizer={}",
37 data_reg, finalizer_fn
38 )),
39 NativeInst::Call {
40 dst: Some(dst),
41 func: NativeValue::FRef("lean_alloc_external".to_string()),
42 args: vec![
43 NativeValue::Reg(data_reg),
44 NativeValue::FRef(finalizer_fn.to_string()),
45 ],
46 ret_type: NativeType::Ptr,
47 },
48 ]
49 }
50 #[allow(dead_code)]
52 pub fn emit_get_external_data(&mut self, obj_reg: Register) -> Vec<NativeInst> {
53 let dst = self.fresh_reg();
54 vec![
55 NativeInst::Comment(format!("Get external data {}", obj_reg)),
56 NativeInst::Load {
57 dst,
58 ty: NativeType::Ptr,
59 addr: NativeValue::Reg(obj_reg),
60 },
61 ]
62 }
63}
64pub struct ClosureCodegen {
66 pub(super) temp_counter: u32,
68}
69impl ClosureCodegen {
70 pub fn new() -> Self {
71 ClosureCodegen { temp_counter: 0 }
72 }
73 pub(super) fn fresh_reg(&mut self) -> Register {
74 let r = Register::virt(800 + self.temp_counter);
75 self.temp_counter += 1;
76 r
77 }
78 pub fn emit_closure_create(
80 &mut self,
81 fn_name: &str,
82 arity: usize,
83 env_regs: &[Register],
84 ) -> Vec<NativeInst> {
85 let mut insts = Vec::new();
86 let closure_reg = self.fresh_reg();
87 insts.push(NativeInst::Comment(format!(
88 "Create closure @{} arity={} captured={}",
89 fn_name,
90 arity,
91 env_regs.len()
92 )));
93 insts.push(NativeInst::Call {
94 dst: Some(closure_reg),
95 func: NativeValue::FRef("lean_alloc_closure".to_string()),
96 args: vec![
97 NativeValue::FRef(fn_name.to_string()),
98 NativeValue::Imm(arity as i64),
99 NativeValue::Imm(env_regs.len() as i64),
100 ],
101 ret_type: NativeType::Ptr,
102 });
103 for (i, env_reg) in env_regs.iter().enumerate() {
104 insts.push(NativeInst::Call {
105 dst: None,
106 func: NativeValue::FRef("lean_closure_set".to_string()),
107 args: vec![
108 NativeValue::Reg(closure_reg),
109 NativeValue::Imm(i as i64),
110 NativeValue::Reg(*env_reg),
111 ],
112 ret_type: NativeType::Void,
113 });
114 }
115 insts
116 }
117 pub fn emit_closure_apply(
119 &mut self,
120 closure_reg: Register,
121 arg_regs: &[Register],
122 ) -> Vec<NativeInst> {
123 let mut insts = Vec::new();
124 let result_reg = self.fresh_reg();
125 insts.push(NativeInst::Comment(format!(
126 "Apply closure {} with {} args",
127 closure_reg,
128 arg_regs.len()
129 )));
130 let apply_fn = match arg_regs.len() {
131 1 => "lean_apply_1",
132 2 => "lean_apply_2",
133 3 => "lean_apply_3",
134 4 => "lean_apply_4",
135 _ => "lean_apply_n",
136 };
137 let mut args = vec![NativeValue::Reg(closure_reg)];
138 for r in arg_regs {
139 args.push(NativeValue::Reg(*r));
140 }
141 if arg_regs.len() > 4 {
142 args.push(NativeValue::Imm(arg_regs.len() as i64));
143 }
144 insts.push(NativeInst::Call {
145 dst: Some(result_reg),
146 func: NativeValue::FRef(apply_fn.to_string()),
147 args,
148 ret_type: NativeType::Ptr,
149 });
150 insts
151 }
152 pub fn emit_partial_apply(
158 &mut self,
159 closure_reg: Register,
160 arg_regs: &[Register],
161 ) -> Vec<NativeInst> {
162 let mut insts = Vec::new();
163 if arg_regs.is_empty() {
164 insts.push(NativeInst::Comment(
165 "Partial apply: no args, identity".to_string(),
166 ));
167 return insts;
168 }
169 let new_closure_reg = self.fresh_reg();
170 insts.push(NativeInst::Comment(format!(
171 "Partial apply {} with {} args",
172 closure_reg,
173 arg_regs.len()
174 )));
175 let mut args = vec![NativeValue::Reg(closure_reg)];
176 for r in arg_regs {
177 args.push(NativeValue::Reg(*r));
178 }
179 insts.push(NativeInst::Call {
180 dst: Some(new_closure_reg),
181 func: NativeValue::FRef("lean_apply_m".to_string()),
182 args,
183 ret_type: NativeType::Ptr,
184 });
185 insts
186 }
187}
188#[allow(dead_code)]
199#[derive(Debug, Clone, PartialEq, Eq)]
200pub struct StringLayout {
201 pub len_offset: usize,
203 pub data_offset: usize,
205 pub is_sso: bool,
207 pub sso_max_len: usize,
209 pub sso_total_size: usize,
211}
212impl StringLayout {
213 #[allow(dead_code)]
215 pub fn standard() -> Self {
216 StringLayout {
217 len_offset: ObjectLayout::HEADER_SIZE,
218 data_offset: ObjectLayout::HEADER_SIZE + 8,
219 is_sso: false,
220 sso_max_len: 0,
221 sso_total_size: 0,
222 }
223 }
224 #[allow(dead_code)]
226 pub fn sso(max_len: usize) -> Self {
227 let sso_total_size = align_up(
228 ObjectLayout::HEADER_SIZE + 4 + max_len,
229 ObjectLayout::DEFAULT_ALIGN,
230 );
231 StringLayout {
232 len_offset: ObjectLayout::HEADER_SIZE,
233 data_offset: ObjectLayout::HEADER_SIZE + 4,
234 is_sso: true,
235 sso_max_len: max_len,
236 sso_total_size,
237 }
238 }
239 #[allow(dead_code)]
241 pub fn alloc_size(&self, str_len: usize) -> usize {
242 if self.is_sso && str_len <= self.sso_max_len {
243 self.sso_total_size
244 } else {
245 align_up(self.data_offset + str_len, ObjectLayout::DEFAULT_ALIGN)
246 }
247 }
248}
249#[allow(dead_code)]
254pub struct RuntimeModuleBuilder {
255 pub(super) config: RuntimeConfig,
256 pub(super) rc: RcCodegen,
257 pub(super) alloc: AllocatorCodegen,
258 pub(super) closure_codegen: ClosureCodegen,
259 pub(super) array_codegen: ArrayCodegen,
260 pub(super) string_codegen: StringCodegen,
261 pub(super) thunk_codegen: ThunkCodegen,
262 pub(super) bignat_codegen: BigNatCodegen,
263 pub(super) external_codegen: ExternalObjectCodegen,
264 pub(super) layout_computer: LayoutComputer,
265 pub(super) instructions: Vec<NativeInst>,
267}
268impl RuntimeModuleBuilder {
269 #[allow(dead_code)]
271 pub fn new(config: RuntimeConfig) -> Self {
272 RuntimeModuleBuilder {
273 rc: RcCodegen::new(config.rc_strategy != RcStrategy::None),
274 alloc: AllocatorCodegen::new(config.alloc_strategy),
275 closure_codegen: ClosureCodegen::new(),
276 array_codegen: ArrayCodegen::new(config.alloc_strategy),
277 string_codegen: StringCodegen::default(),
278 thunk_codegen: ThunkCodegen::new(),
279 bignat_codegen: BigNatCodegen::new(),
280 external_codegen: ExternalObjectCodegen::new(),
281 layout_computer: LayoutComputer::new(),
282 instructions: Vec::new(),
283 config,
284 }
285 }
286 #[allow(dead_code)]
288 pub fn emit_ctor(&mut self, tag: u32, num_objs: usize, scalar_sz: usize) {
289 let insts = self.alloc.emit_alloc_ctor(tag, num_objs, scalar_sz);
290 self.instructions.extend(insts);
291 }
292 #[allow(dead_code)]
294 pub fn emit_closure(&mut self, fn_name: &str, arity: usize, env: &[Register]) {
295 let insts = self
296 .closure_codegen
297 .emit_closure_create(fn_name, arity, env);
298 self.instructions.extend(insts);
299 }
300 #[allow(dead_code)]
302 pub fn emit_inc(&mut self, reg: Register) {
303 let insts = self.rc.emit_rc_inc(reg);
304 self.instructions.extend(insts);
305 }
306 #[allow(dead_code)]
308 pub fn emit_dec(&mut self, reg: Register) {
309 let insts = self.rc.emit_rc_dec(reg);
310 self.instructions.extend(insts);
311 }
312 #[allow(dead_code)]
314 pub fn emit_nat_add(&mut self, lhs: Register, rhs: Register) {
315 let insts = self.bignat_codegen.emit_add(lhs, rhs);
316 self.instructions.extend(insts);
317 }
318 #[allow(dead_code)]
320 pub fn emit_str_append(&mut self, lhs: Register, rhs: Register) {
321 let insts = self.string_codegen.emit_string_append(lhs, rhs);
322 self.instructions.extend(insts);
323 }
324 #[allow(dead_code)]
326 pub fn instructions(&self) -> &[NativeInst] {
327 &self.instructions
328 }
329 #[allow(dead_code)]
331 pub fn instruction_count(&self) -> usize {
332 self.instructions.len()
333 }
334 #[allow(dead_code)]
336 pub fn call_count(&self) -> usize {
337 self.instructions
338 .iter()
339 .filter(|i| matches!(i, NativeInst::Call { .. }))
340 .count()
341 }
342 #[allow(dead_code)]
344 pub fn comment_count(&self) -> usize {
345 self.instructions
346 .iter()
347 .filter(|i| matches!(i, NativeInst::Comment(_)))
348 .count()
349 }
350}
351#[allow(dead_code)]
353pub struct ThunkCodegen {
354 pub(super) temp_counter: u32,
355}
356impl ThunkCodegen {
357 #[allow(dead_code)]
359 pub fn new() -> Self {
360 ThunkCodegen { temp_counter: 0 }
361 }
362 pub(super) fn fresh_reg(&mut self) -> Register {
363 let r = Register::virt(1100 + self.temp_counter);
364 self.temp_counter += 1;
365 r
366 }
367 #[allow(dead_code)]
369 pub fn emit_alloc_thunk(&mut self, fn_name: &str) -> Vec<NativeInst> {
370 let dst = self.fresh_reg();
371 vec![
372 NativeInst::Comment(format!("Alloc thunk @{}", fn_name)),
373 NativeInst::Call {
374 dst: Some(dst),
375 func: NativeValue::FRef("lean_alloc_thunk".to_string()),
376 args: vec![NativeValue::FRef(fn_name.to_string())],
377 ret_type: NativeType::Ptr,
378 },
379 ]
380 }
381 #[allow(dead_code)]
383 pub fn emit_force_thunk(&mut self, thunk_reg: Register) -> Vec<NativeInst> {
384 let dst = self.fresh_reg();
385 vec![
386 NativeInst::Comment(format!("Force thunk {}", thunk_reg)),
387 NativeInst::Call {
388 dst: Some(dst),
389 func: NativeValue::FRef("lean_thunk_get".to_string()),
390 args: vec![NativeValue::Reg(thunk_reg)],
391 ret_type: NativeType::Ptr,
392 },
393 ]
394 }
395 #[allow(dead_code)]
397 pub fn emit_is_evaluated(&mut self, thunk_reg: Register) -> Vec<NativeInst> {
398 let dst = self.fresh_reg();
399 vec![
400 NativeInst::Comment(format!("Thunk is_evaluated {}", thunk_reg)),
401 NativeInst::Call {
402 dst: Some(dst),
403 func: NativeValue::FRef("lean_thunk_is_evaluated".to_string()),
404 args: vec![NativeValue::Reg(thunk_reg)],
405 ret_type: NativeType::I8,
406 },
407 ]
408 }
409}
410#[derive(Debug, Clone, Copy, PartialEq, Eq)]
412pub enum ClosureRepr {
413 Standard,
415 Flat,
417 Defunctionalized,
419}
420#[allow(dead_code)]
422pub struct ArrayCodegen {
423 pub(super) temp_counter: u32,
424 pub(super) strategy: AllocStrategy,
425}
426impl ArrayCodegen {
427 #[allow(dead_code)]
429 pub fn new(strategy: AllocStrategy) -> Self {
430 ArrayCodegen {
431 temp_counter: 0,
432 strategy,
433 }
434 }
435 pub(super) fn fresh_reg(&mut self) -> Register {
436 let r = Register::virt(900 + self.temp_counter);
437 self.temp_counter += 1;
438 r
439 }
440 #[allow(dead_code)]
442 pub fn emit_alloc_array(&mut self, capacity: usize) -> Vec<NativeInst> {
443 let dst = self.fresh_reg();
444 vec![
445 NativeInst::Comment(format!("Alloc array capacity={}", capacity)),
446 NativeInst::Call {
447 dst: Some(dst),
448 func: NativeValue::FRef("lean_alloc_array".to_string()),
449 args: vec![NativeValue::Imm(0), NativeValue::Imm(capacity as i64)],
450 ret_type: NativeType::Ptr,
451 },
452 ]
453 }
454 #[allow(dead_code)]
456 pub fn emit_array_get(&mut self, arr_reg: Register, idx_reg: Register) -> Vec<NativeInst> {
457 let dst = self.fresh_reg();
458 vec![
459 NativeInst::Comment(format!("Array get {} [{}]", arr_reg, idx_reg)),
460 NativeInst::Call {
461 dst: Some(dst),
462 func: NativeValue::FRef("lean_array_get".to_string()),
463 args: vec![NativeValue::Reg(arr_reg), NativeValue::Reg(idx_reg)],
464 ret_type: NativeType::Ptr,
465 },
466 ]
467 }
468 #[allow(dead_code)]
470 pub fn emit_array_set(
471 &mut self,
472 arr_reg: Register,
473 idx_reg: Register,
474 val_reg: Register,
475 ) -> Vec<NativeInst> {
476 let dst = self.fresh_reg();
477 vec![
478 NativeInst::Comment(format!("Array set {} [{}] = {}", arr_reg, idx_reg, val_reg)),
479 NativeInst::Call {
480 dst: Some(dst),
481 func: NativeValue::FRef("lean_array_set".to_string()),
482 args: vec![
483 NativeValue::Reg(arr_reg),
484 NativeValue::Reg(idx_reg),
485 NativeValue::Reg(val_reg),
486 ],
487 ret_type: NativeType::Ptr,
488 },
489 ]
490 }
491 #[allow(dead_code)]
493 pub fn emit_array_push(&mut self, arr_reg: Register, val_reg: Register) -> Vec<NativeInst> {
494 let dst = self.fresh_reg();
495 vec![
496 NativeInst::Comment(format!("Array push {} val={}", arr_reg, val_reg)),
497 NativeInst::Call {
498 dst: Some(dst),
499 func: NativeValue::FRef("lean_array_push".to_string()),
500 args: vec![NativeValue::Reg(arr_reg), NativeValue::Reg(val_reg)],
501 ret_type: NativeType::Ptr,
502 },
503 ]
504 }
505 #[allow(dead_code)]
507 pub fn emit_array_size(&mut self, arr_reg: Register) -> Vec<NativeInst> {
508 let dst = self.fresh_reg();
509 vec![
510 NativeInst::Comment(format!("Array size {}", arr_reg)),
511 NativeInst::Call {
512 dst: Some(dst),
513 func: NativeValue::FRef("lean_array_size".to_string()),
514 args: vec![NativeValue::Reg(arr_reg)],
515 ret_type: NativeType::I64,
516 },
517 ]
518 }
519}
520#[allow(dead_code)]
522#[derive(Debug, Clone, Default)]
523pub struct LayoutCache {
524 pub(super) ctor_layouts: HashMap<String, ObjectLayout>,
525 pub(super) closure_layouts: HashMap<(usize, usize), ClosureLayout>,
526}
527impl LayoutCache {
528 #[allow(dead_code)]
530 pub fn new() -> Self {
531 Self::default()
532 }
533 #[allow(dead_code)]
535 pub fn get_ctor(
536 &mut self,
537 ctor_name: &str,
538 ctor_tag: u32,
539 num_obj: usize,
540 scalar_sz: usize,
541 ) -> &ObjectLayout {
542 let key = format!("{}#{}#{}#{}", ctor_name, ctor_tag, num_obj, scalar_sz);
543 self.ctor_layouts
544 .entry(key)
545 .or_insert_with(|| ObjectLayout::for_ctor(ctor_tag, num_obj, scalar_sz))
546 }
547 #[allow(dead_code)]
549 pub fn get_closure(&mut self, arity: usize, num_captured: usize) -> &ClosureLayout {
550 self.closure_layouts
551 .entry((arity, num_captured))
552 .or_insert_with(|| ClosureLayout::new(arity, num_captured))
553 }
554 #[allow(dead_code)]
556 pub fn clear(&mut self) {
557 self.ctor_layouts.clear();
558 self.closure_layouts.clear();
559 }
560 #[allow(dead_code)]
562 pub fn ctor_count(&self) -> usize {
563 self.ctor_layouts.len()
564 }
565 #[allow(dead_code)]
567 pub fn closure_count(&self) -> usize {
568 self.closure_layouts.len()
569 }
570}
571#[derive(Debug, Clone)]
573pub struct RuntimeConfig {
574 pub rc_strategy: RcStrategy,
576 pub alloc_strategy: AllocStrategy,
578 pub closure_repr: ClosureRepr,
580 pub debug_checks: bool,
582 pub thread_safe: bool,
584}
585#[derive(Debug, Clone, Copy, PartialEq, Eq)]
587pub enum RcStrategy {
588 Standard,
590 Deferred,
592 None,
594}
595#[derive(Debug, Clone, PartialEq, Eq)]
597pub struct ClosureLayout {
598 pub fn_ptr_offset: usize,
600 pub env_offset: usize,
602 pub env_size: usize,
604 pub arity: usize,
606 pub num_captured: usize,
608 pub object_layout: ObjectLayout,
610}
611impl ClosureLayout {
612 pub fn new(arity: usize, num_captured: usize) -> Self {
615 let fn_ptr_offset = ObjectLayout::HEADER_SIZE;
616 let env_offset = fn_ptr_offset + 16;
617 let env_size = num_captured * 8;
618 ClosureLayout {
619 fn_ptr_offset,
620 env_offset,
621 env_size,
622 arity,
623 num_captured,
624 object_layout: ObjectLayout::for_closure(arity, num_captured),
625 }
626 }
627 pub fn captured_var_offset(&self, idx: usize) -> usize {
629 assert!(idx < self.num_captured);
630 self.env_offset + idx * 8
631 }
632}
633#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
635pub enum ObjectTag {
636 Scalar,
638 Closure,
640 Array,
642 Struct,
644 External,
646 String,
648 BigNat,
650 Thunk,
652}
653impl ObjectTag {
654 pub fn to_u8(self) -> u8 {
656 match self {
657 ObjectTag::Scalar => 0,
658 ObjectTag::Closure => 1,
659 ObjectTag::Array => 2,
660 ObjectTag::Struct => 3,
661 ObjectTag::External => 4,
662 ObjectTag::String => 5,
663 ObjectTag::BigNat => 6,
664 ObjectTag::Thunk => 7,
665 }
666 }
667 pub fn from_u8(tag: u8) -> Option<Self> {
669 match tag {
670 0 => Some(ObjectTag::Scalar),
671 1 => Some(ObjectTag::Closure),
672 2 => Some(ObjectTag::Array),
673 3 => Some(ObjectTag::Struct),
674 4 => Some(ObjectTag::External),
675 5 => Some(ObjectTag::String),
676 6 => Some(ObjectTag::BigNat),
677 7 => Some(ObjectTag::Thunk),
678 _ => None,
679 }
680 }
681}
682#[derive(Debug, Clone, Copy, PartialEq, Eq)]
684pub enum AllocStrategy {
685 System,
687 Bump,
689 Pool,
691 LeanRuntime,
693}
694#[allow(dead_code)]
697pub struct RcUseAnalysis {
698 pub(super) use_counts: HashMap<LcnfVarId, usize>,
699}
700impl RcUseAnalysis {
701 #[allow(dead_code)]
703 pub fn new() -> Self {
704 RcUseAnalysis {
705 use_counts: HashMap::new(),
706 }
707 }
708 #[allow(dead_code)]
710 pub fn analyze_module(&mut self, module: &LcnfModule) {
711 for decl in &module.fun_decls {
712 self.analyze_expr(&decl.body);
713 }
714 }
715 #[allow(dead_code)]
717 pub fn analyze_expr(&mut self, expr: &LcnfExpr) {
718 match expr {
719 LcnfExpr::Let { value, body, .. } => {
720 self.analyze_let_value(value);
721 self.analyze_expr(body);
722 }
723 LcnfExpr::Case {
724 scrutinee,
725 alts,
726 default,
727 ..
728 } => {
729 self.inc_use(*scrutinee);
730 for alt in alts {
731 self.analyze_expr(&alt.body);
732 }
733 if let Some(def) = default {
734 self.analyze_expr(def);
735 }
736 }
737 LcnfExpr::Return(arg) => self.analyze_arg(arg),
738 LcnfExpr::TailCall(func, args) => {
739 self.analyze_arg(func);
740 for a in args {
741 self.analyze_arg(a);
742 }
743 }
744 LcnfExpr::Unreachable => {}
745 }
746 }
747 pub(super) fn analyze_let_value(&mut self, value: &LcnfLetValue) {
748 match value {
749 LcnfLetValue::App(func, args) => {
750 self.analyze_arg(func);
751 for a in args {
752 self.analyze_arg(a);
753 }
754 }
755 LcnfLetValue::Ctor(_, _, args) => {
756 for a in args {
757 self.analyze_arg(a);
758 }
759 }
760 LcnfLetValue::Proj(_, _, v) | LcnfLetValue::Reset(v) => {
761 self.inc_use(*v);
762 }
763 LcnfLetValue::FVar(v) => self.inc_use(*v),
764 LcnfLetValue::Reuse(slot, _, _, args) => {
765 self.inc_use(*slot);
766 for a in args {
767 self.analyze_arg(a);
768 }
769 }
770 LcnfLetValue::Lit(_) | LcnfLetValue::Erased => {}
771 }
772 }
773 pub(super) fn analyze_arg(&mut self, arg: &LcnfArg) {
774 if let LcnfArg::Var(v) = arg {
775 self.inc_use(*v);
776 }
777 }
778 pub(super) fn inc_use(&mut self, var: LcnfVarId) {
779 *self.use_counts.entry(var).or_insert(0) += 1;
780 }
781 #[allow(dead_code)]
783 pub fn use_count(&self, var: LcnfVarId) -> usize {
784 self.use_counts.get(&var).copied().unwrap_or(0)
785 }
786 #[allow(dead_code)]
788 pub fn multi_use_vars(&self) -> Vec<(LcnfVarId, usize)> {
789 self.use_counts
790 .iter()
791 .filter(|(_, &c)| c > 1)
792 .map(|(&v, &c)| (v, c))
793 .collect()
794 }
795}
796pub struct AllocatorCodegen {
798 pub(super) temp_counter: u32,
800 pub(super) strategy: AllocStrategy,
802}
803impl AllocatorCodegen {
804 pub fn new(strategy: AllocStrategy) -> Self {
806 AllocatorCodegen {
807 temp_counter: 0,
808 strategy,
809 }
810 }
811 pub(super) fn fresh_reg(&mut self) -> Register {
813 let r = Register::virt(700 + self.temp_counter);
814 self.temp_counter += 1;
815 r
816 }
817 pub fn emit_alloc(&mut self, size: usize, align: usize) -> Vec<NativeInst> {
819 let dst = self.fresh_reg();
820 let alloc_fn = match self.strategy {
821 AllocStrategy::System => "malloc",
822 AllocStrategy::Bump => "bump_alloc",
823 AllocStrategy::Pool => "pool_alloc",
824 AllocStrategy::LeanRuntime => "lean_alloc_small",
825 };
826 vec![
827 NativeInst::Comment(format!(
828 "Alloc {} bytes, align {} ({})",
829 size, align, alloc_fn
830 )),
831 NativeInst::Call {
832 dst: Some(dst),
833 func: NativeValue::FRef(alloc_fn.to_string()),
834 args: vec![NativeValue::Imm(size as i64)],
835 ret_type: NativeType::Ptr,
836 },
837 ]
838 }
839 pub fn emit_free(&mut self, ptr_reg: Register) -> Vec<NativeInst> {
841 let free_fn = match self.strategy {
842 AllocStrategy::System => "free",
843 AllocStrategy::Bump => {
844 return vec![NativeInst::Comment("Bump: no free".to_string())];
845 }
846 AllocStrategy::Pool => "pool_free",
847 AllocStrategy::LeanRuntime => "lean_free_small",
848 };
849 vec![
850 NativeInst::Comment(format!("Free {}", ptr_reg)),
851 NativeInst::Call {
852 dst: None,
853 func: NativeValue::FRef(free_fn.to_string()),
854 args: vec![NativeValue::Reg(ptr_reg)],
855 ret_type: NativeType::Void,
856 },
857 ]
858 }
859 pub fn emit_alloc_ctor(
861 &mut self,
862 tag: u32,
863 num_objs: usize,
864 scalar_sz: usize,
865 ) -> Vec<NativeInst> {
866 let dst = self.fresh_reg();
867 vec![
868 NativeInst::Comment(format!(
869 "Alloc ctor tag={}, objs={}, scalar={}",
870 tag, num_objs, scalar_sz
871 )),
872 NativeInst::Call {
873 dst: Some(dst),
874 func: NativeValue::FRef("lean_alloc_ctor".to_string()),
875 args: vec![
876 NativeValue::Imm(tag as i64),
877 NativeValue::Imm(num_objs as i64),
878 NativeValue::Imm(scalar_sz as i64),
879 ],
880 ret_type: NativeType::Ptr,
881 },
882 ]
883 }
884 pub fn emit_alloc_closure(
886 &mut self,
887 fn_name: &str,
888 arity: usize,
889 num_captured: usize,
890 ) -> Vec<NativeInst> {
891 let dst = self.fresh_reg();
892 vec![
893 NativeInst::Comment(format!(
894 "Alloc closure @{} arity={} captured={}",
895 fn_name, arity, num_captured
896 )),
897 NativeInst::Call {
898 dst: Some(dst),
899 func: NativeValue::FRef("lean_alloc_closure".to_string()),
900 args: vec![
901 NativeValue::FRef(fn_name.to_string()),
902 NativeValue::Imm(arity as i64),
903 NativeValue::Imm(num_captured as i64),
904 ],
905 ret_type: NativeType::Ptr,
906 },
907 ]
908 }
909}
910#[derive(Debug, Clone)]
912pub struct TypeInfo {
913 pub name: String,
915 pub constructors: Vec<(String, u32, Vec<LcnfType>)>,
917 pub is_recursive: bool,
919}
920#[derive(Debug, Clone, PartialEq, Eq)]
932pub struct ObjectLayout {
933 pub tag_bits: u8,
935 pub rc_offset: usize,
937 pub payload_offset: usize,
939 pub total_size: usize,
941 pub alignment: usize,
943 pub num_obj_fields: usize,
945 pub scalar_size: usize,
947 pub tag: ObjectTag,
949}
950impl ObjectLayout {
951 pub const HEADER_SIZE: usize = 16;
953 pub const RC_OFFSET: usize = 0;
955 pub const TAG_OFFSET: usize = 8;
957 pub const DEFAULT_ALIGN: usize = 8;
959 pub fn for_ctor(_ctor_tag: u32, num_obj_fields: usize, scalar_size: usize) -> Self {
961 let payload_size = num_obj_fields * 8 + scalar_size;
962 let total_size = Self::HEADER_SIZE + payload_size;
963 let aligned_size = align_up(total_size, Self::DEFAULT_ALIGN);
964 ObjectLayout {
965 tag_bits: 8,
966 rc_offset: Self::RC_OFFSET,
967 payload_offset: Self::HEADER_SIZE,
968 total_size: aligned_size,
969 alignment: Self::DEFAULT_ALIGN,
970 num_obj_fields,
971 scalar_size,
972 tag: ObjectTag::Struct,
973 }
974 }
975 pub fn for_closure(_arity: usize, num_captured: usize) -> Self {
977 let closure_header = 16;
978 let payload_size = closure_header + num_captured * 8;
979 let total_size = Self::HEADER_SIZE + payload_size;
980 let aligned_size = align_up(total_size, Self::DEFAULT_ALIGN);
981 ObjectLayout {
982 tag_bits: 8,
983 rc_offset: Self::RC_OFFSET,
984 payload_offset: Self::HEADER_SIZE,
985 total_size: aligned_size,
986 alignment: Self::DEFAULT_ALIGN,
987 num_obj_fields: num_captured,
988 scalar_size: closure_header,
989 tag: ObjectTag::Closure,
990 }
991 }
992 pub fn for_array(capacity: usize) -> Self {
994 let array_header = 16;
995 let payload_size = array_header + capacity * 8;
996 let total_size = Self::HEADER_SIZE + payload_size;
997 let aligned_size = align_up(total_size, Self::DEFAULT_ALIGN);
998 ObjectLayout {
999 tag_bits: 8,
1000 rc_offset: Self::RC_OFFSET,
1001 payload_offset: Self::HEADER_SIZE,
1002 total_size: aligned_size,
1003 alignment: Self::DEFAULT_ALIGN,
1004 num_obj_fields: capacity,
1005 scalar_size: array_header,
1006 tag: ObjectTag::Array,
1007 }
1008 }
1009 pub fn for_external() -> Self {
1011 let payload_size = 24;
1012 let total_size = Self::HEADER_SIZE + payload_size;
1013 ObjectLayout {
1014 tag_bits: 8,
1015 rc_offset: Self::RC_OFFSET,
1016 payload_offset: Self::HEADER_SIZE,
1017 total_size,
1018 alignment: Self::DEFAULT_ALIGN,
1019 num_obj_fields: 0,
1020 scalar_size: payload_size,
1021 tag: ObjectTag::External,
1022 }
1023 }
1024 pub fn obj_field_offset(&self, idx: usize) -> usize {
1026 assert!(idx < self.num_obj_fields);
1027 self.payload_offset + idx * 8
1028 }
1029 pub fn scalar_offset(&self) -> usize {
1031 self.payload_offset + self.num_obj_fields * 8
1032 }
1033}
1034pub struct RcCodegen {
1036 pub(super) enabled: bool,
1038 pub(super) temp_counter: u32,
1040}
1041impl RcCodegen {
1042 pub fn new(enabled: bool) -> Self {
1044 RcCodegen {
1045 enabled,
1046 temp_counter: 0,
1047 }
1048 }
1049 pub(super) fn fresh_reg(&mut self) -> Register {
1051 let r = Register::virt(500 + self.temp_counter);
1052 self.temp_counter += 1;
1053 r
1054 }
1055 pub fn emit_rc_inc(&mut self, obj_reg: Register) -> Vec<NativeInst> {
1063 if !self.enabled {
1064 return vec![NativeInst::Comment("RC inc (disabled)".to_string())];
1065 }
1066 vec![
1067 NativeInst::Comment(format!("RC inc {}", obj_reg)),
1068 NativeInst::Call {
1069 dst: None,
1070 func: NativeValue::FRef("lean_inc_ref".to_string()),
1071 args: vec![NativeValue::Reg(obj_reg)],
1072 ret_type: NativeType::Void,
1073 },
1074 ]
1075 }
1076 pub fn emit_rc_dec(&mut self, obj_reg: Register) -> Vec<NativeInst> {
1085 if !self.enabled {
1086 return vec![NativeInst::Comment("RC dec (disabled)".to_string())];
1087 }
1088 vec![
1089 NativeInst::Comment(format!("RC dec {}", obj_reg)),
1090 NativeInst::Call {
1091 dst: None,
1092 func: NativeValue::FRef("lean_dec_ref".to_string()),
1093 args: vec![NativeValue::Reg(obj_reg)],
1094 ret_type: NativeType::Void,
1095 },
1096 ]
1097 }
1098 pub fn emit_rc_is_unique(&mut self, obj_reg: Register) -> NativeInst {
1101 let dst = self.fresh_reg();
1102 NativeInst::Call {
1103 dst: Some(dst),
1104 func: NativeValue::FRef("lean_is_exclusive".to_string()),
1105 args: vec![NativeValue::Reg(obj_reg)],
1106 ret_type: NativeType::I8,
1107 }
1108 }
1109 pub fn emit_conditional_reset(
1112 &mut self,
1113 obj_reg: Register,
1114 _num_obj_fields: usize,
1115 _scalar_size: usize,
1116 ) -> Vec<NativeInst> {
1117 if !self.enabled {
1118 return vec![NativeInst::Comment(
1119 "Conditional reset (disabled)".to_string(),
1120 )];
1121 }
1122 let is_unique = self.fresh_reg();
1123 let result_reg = self.fresh_reg();
1124 vec![
1125 NativeInst::Comment(format!("Conditional reset {}", obj_reg)),
1126 NativeInst::Call {
1127 dst: Some(is_unique),
1128 func: NativeValue::FRef("lean_is_exclusive".to_string()),
1129 args: vec![NativeValue::Reg(obj_reg)],
1130 ret_type: NativeType::I8,
1131 },
1132 NativeInst::Select {
1133 dst: result_reg,
1134 ty: NativeType::Ptr,
1135 cond: NativeValue::Reg(is_unique),
1136 true_val: NativeValue::Reg(obj_reg),
1137 false_val: NativeValue::Imm(0),
1138 },
1139 ]
1140 }
1141 pub fn emit_rc_inc_n(&mut self, obj_reg: Register, count: usize) -> Vec<NativeInst> {
1143 if !self.enabled || count == 0 {
1144 return Vec::new();
1145 }
1146 let mut insts = Vec::new();
1147 if count == 1 {
1148 insts.extend(self.emit_rc_inc(obj_reg));
1149 } else {
1150 insts.push(NativeInst::Comment(format!(
1151 "RC inc {} (x{})",
1152 obj_reg, count
1153 )));
1154 insts.push(NativeInst::Call {
1155 dst: None,
1156 func: NativeValue::FRef("lean_inc_ref_n".to_string()),
1157 args: vec![NativeValue::Reg(obj_reg), NativeValue::Imm(count as i64)],
1158 ret_type: NativeType::Void,
1159 });
1160 }
1161 insts
1162 }
1163}
1164#[allow(dead_code)]
1166pub struct StringCodegen {
1167 pub(super) temp_counter: u32,
1168 pub(super) layout: StringLayout,
1169}
1170impl StringCodegen {
1171 #[allow(dead_code)]
1173 pub fn new(layout: StringLayout) -> Self {
1174 StringCodegen {
1175 temp_counter: 0,
1176 layout,
1177 }
1178 }
1179 pub(super) fn fresh_reg(&mut self) -> Register {
1180 let r = Register::virt(1000 + self.temp_counter);
1181 self.temp_counter += 1;
1182 r
1183 }
1184 #[allow(dead_code)]
1186 pub fn emit_string_lit(&mut self, value: &str) -> Vec<NativeInst> {
1187 let dst = self.fresh_reg();
1188 vec![
1189 NativeInst::Comment(format!("String literal {:?}", value)),
1190 NativeInst::Call {
1191 dst: Some(dst),
1192 func: NativeValue::FRef("lean_mk_string".to_string()),
1193 args: vec![NativeValue::Imm(value.len() as i64)],
1194 ret_type: NativeType::Ptr,
1195 },
1196 ]
1197 }
1198 #[allow(dead_code)]
1200 pub fn emit_string_append(&mut self, lhs: Register, rhs: Register) -> Vec<NativeInst> {
1201 let dst = self.fresh_reg();
1202 vec![
1203 NativeInst::Comment(format!("String append {} ++ {}", lhs, rhs)),
1204 NativeInst::Call {
1205 dst: Some(dst),
1206 func: NativeValue::FRef("lean_string_append".to_string()),
1207 args: vec![NativeValue::Reg(lhs), NativeValue::Reg(rhs)],
1208 ret_type: NativeType::Ptr,
1209 },
1210 ]
1211 }
1212 #[allow(dead_code)]
1214 pub fn emit_string_length(&mut self, str_reg: Register) -> Vec<NativeInst> {
1215 let dst = self.fresh_reg();
1216 vec![
1217 NativeInst::Comment(format!("String length {}", str_reg)),
1218 NativeInst::Load {
1219 dst,
1220 ty: NativeType::I64,
1221 addr: NativeValue::Reg(str_reg),
1222 },
1223 ]
1224 }
1225 #[allow(dead_code)]
1227 pub fn emit_string_eq(&mut self, lhs: Register, rhs: Register) -> Vec<NativeInst> {
1228 let dst = self.fresh_reg();
1229 vec![
1230 NativeInst::Comment(format!("String eq {} == {}", lhs, rhs)),
1231 NativeInst::Call {
1232 dst: Some(dst),
1233 func: NativeValue::FRef("lean_string_eq".to_string()),
1234 args: vec![NativeValue::Reg(lhs), NativeValue::Reg(rhs)],
1235 ret_type: NativeType::I8,
1236 },
1237 ]
1238 }
1239}
1240#[allow(dead_code)]
1242pub struct BigNatCodegen {
1243 pub(super) temp_counter: u32,
1244}
1245impl BigNatCodegen {
1246 #[allow(dead_code)]
1248 pub fn new() -> Self {
1249 BigNatCodegen { temp_counter: 0 }
1250 }
1251 pub(super) fn fresh_reg(&mut self) -> Register {
1252 let r = Register::virt(1200 + self.temp_counter);
1253 self.temp_counter += 1;
1254 r
1255 }
1256 #[allow(dead_code)]
1258 pub fn emit_add(&mut self, lhs: Register, rhs: Register) -> Vec<NativeInst> {
1259 let dst = self.fresh_reg();
1260 vec![
1261 NativeInst::Comment(format!("BigNat add {} + {}", lhs, rhs)),
1262 NativeInst::Call {
1263 dst: Some(dst),
1264 func: NativeValue::FRef("lean_nat_add".to_string()),
1265 args: vec![NativeValue::Reg(lhs), NativeValue::Reg(rhs)],
1266 ret_type: NativeType::Ptr,
1267 },
1268 ]
1269 }
1270 #[allow(dead_code)]
1272 pub fn emit_mul(&mut self, lhs: Register, rhs: Register) -> Vec<NativeInst> {
1273 let dst = self.fresh_reg();
1274 vec![
1275 NativeInst::Comment(format!("BigNat mul {} * {}", lhs, rhs)),
1276 NativeInst::Call {
1277 dst: Some(dst),
1278 func: NativeValue::FRef("lean_nat_mul".to_string()),
1279 args: vec![NativeValue::Reg(lhs), NativeValue::Reg(rhs)],
1280 ret_type: NativeType::Ptr,
1281 },
1282 ]
1283 }
1284 #[allow(dead_code)]
1286 pub fn emit_sub(&mut self, lhs: Register, rhs: Register) -> Vec<NativeInst> {
1287 let dst = self.fresh_reg();
1288 vec![
1289 NativeInst::Comment(format!("BigNat sub {} - {}", lhs, rhs)),
1290 NativeInst::Call {
1291 dst: Some(dst),
1292 func: NativeValue::FRef("lean_nat_sub".to_string()),
1293 args: vec![NativeValue::Reg(lhs), NativeValue::Reg(rhs)],
1294 ret_type: NativeType::Ptr,
1295 },
1296 ]
1297 }
1298 #[allow(dead_code)]
1300 pub fn emit_div(&mut self, lhs: Register, rhs: Register) -> Vec<NativeInst> {
1301 let dst = self.fresh_reg();
1302 vec![
1303 NativeInst::Comment(format!("BigNat div {} / {}", lhs, rhs)),
1304 NativeInst::Call {
1305 dst: Some(dst),
1306 func: NativeValue::FRef("lean_nat_div".to_string()),
1307 args: vec![NativeValue::Reg(lhs), NativeValue::Reg(rhs)],
1308 ret_type: NativeType::Ptr,
1309 },
1310 ]
1311 }
1312 #[allow(dead_code)]
1314 pub fn emit_cmp(&mut self, lhs: Register, rhs: Register) -> Vec<NativeInst> {
1315 let dst = self.fresh_reg();
1316 vec![
1317 NativeInst::Comment(format!("BigNat cmp {} vs {}", lhs, rhs)),
1318 NativeInst::Call {
1319 dst: Some(dst),
1320 func: NativeValue::FRef("lean_nat_cmp".to_string()),
1321 args: vec![NativeValue::Reg(lhs), NativeValue::Reg(rhs)],
1322 ret_type: NativeType::I8,
1323 },
1324 ]
1325 }
1326 #[allow(dead_code)]
1328 pub fn emit_of_u64(&mut self, val: i64) -> Vec<NativeInst> {
1329 let dst = self.fresh_reg();
1330 vec![
1331 NativeInst::Comment(format!("BigNat from u64 {}", val)),
1332 NativeInst::Call {
1333 dst: Some(dst),
1334 func: NativeValue::FRef("lean_nat_mk_small".to_string()),
1335 args: vec![NativeValue::Imm(val)],
1336 ret_type: NativeType::Ptr,
1337 },
1338 ]
1339 }
1340}
1341pub struct LayoutComputer {
1343 pub(super) cache: HashMap<String, ObjectLayout>,
1345 pub(super) type_info: HashMap<String, TypeInfo>,
1347}
1348impl LayoutComputer {
1349 pub fn new() -> Self {
1351 LayoutComputer {
1352 cache: HashMap::new(),
1353 type_info: HashMap::new(),
1354 }
1355 }
1356 pub fn register_type(&mut self, info: TypeInfo) {
1358 self.type_info.insert(info.name.clone(), info);
1359 }
1360 pub fn compute_ctor_layout(
1362 &mut self,
1363 ctor_name: &str,
1364 ctor_tag: u32,
1365 field_types: &[LcnfType],
1366 ) -> ObjectLayout {
1367 let cache_key = format!("{}#{}", ctor_name, ctor_tag);
1368 if let Some(layout) = self.cache.get(&cache_key) {
1369 return layout.clone();
1370 }
1371 let mut num_obj_fields = 0usize;
1372 let mut scalar_size = 0usize;
1373 for ty in field_types {
1374 if is_boxed_type(ty) {
1375 num_obj_fields += 1;
1376 } else {
1377 scalar_size += scalar_type_size(ty);
1378 }
1379 }
1380 let layout = ObjectLayout::for_ctor(ctor_tag, num_obj_fields, scalar_size);
1381 self.cache.insert(cache_key, layout.clone());
1382 layout
1383 }
1384 pub fn compute_layout(&mut self, type_name: &str) -> Vec<(String, ObjectLayout)> {
1386 if let Some(info) = self.type_info.get(type_name).cloned() {
1387 let mut layouts = Vec::new();
1388 for (ctor_name, ctor_tag, field_types) in &info.constructors {
1389 let layout = self.compute_ctor_layout(ctor_name, *ctor_tag, field_types);
1390 layouts.push((ctor_name.clone(), layout));
1391 }
1392 layouts
1393 } else {
1394 Vec::new()
1395 }
1396 }
1397 pub fn get_cached(&self, key: &str) -> Option<&ObjectLayout> {
1399 self.cache.get(key)
1400 }
1401}