1use crate::alloc::emit_alloc_fast_path;
2use crate::emit::*;
3use crate::pipeline::CodegenPipeline;
4use cranelift_codegen::ir::{
5 self, condcodes::IntCC, types, AbiParam, BlockArg, InstBuilder, MemFlags, Signature,
6 UserFuncName, Value,
7};
8use cranelift_codegen::Context;
9use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
10use cranelift_module::{DataDescription, FuncId, Linkage, Module};
11use recursion::{try_expand_and_collapse, MappableFrame};
12use tidepool_heap::layout;
13use tidepool_repr::*;
14
15enum EmitFrameToken {}
21
22enum EmitFrame<A> {
27 Var(VarId),
29 Lit(Literal),
30 LitString(Vec<u8>),
31
32 Con {
34 tag: DataConId,
35 fields: Vec<A>,
36 },
37 App {
38 fun: A,
39 arg: A,
40 },
41 PrimOp {
42 op: PrimOpKind,
43 args: Vec<A>,
44 },
45 Jump {
46 label: JoinId,
47 args: Vec<A>,
48 },
49
50 Case {
52 scrutinee: A,
53 binder: VarId,
54 alts: Vec<Alt<usize>>,
55 },
56
57 Lam {
59 binder: VarId,
60 body_idx: usize,
61 },
62
63 Join {
65 label: JoinId,
66 params: Vec<VarId>,
67 rhs_idx: usize,
68 body_idx: usize,
69 },
70
71 ThunkCon {
74 tag: DataConId,
75 field_indices: Vec<usize>,
76 },
77
78 LetBoundary(usize),
80}
81
82impl MappableFrame for EmitFrameToken {
83 type Frame<X> = EmitFrame<X>;
84
85 fn map_frame<A, B>(input: EmitFrame<A>, mut f: impl FnMut(A) -> B) -> EmitFrame<B> {
86 match input {
87 EmitFrame::Var(v) => EmitFrame::Var(v),
88 EmitFrame::Lit(l) => EmitFrame::Lit(l),
89 EmitFrame::LitString(b) => EmitFrame::LitString(b),
90 EmitFrame::Con { tag, fields } => EmitFrame::Con {
91 tag,
92 fields: fields.into_iter().map(&mut f).collect(),
93 },
94 EmitFrame::App { fun, arg } => EmitFrame::App {
95 fun: f(fun),
96 arg: f(arg),
97 },
98 EmitFrame::PrimOp { op, args } => EmitFrame::PrimOp {
99 op,
100 args: args.into_iter().map(&mut f).collect(),
101 },
102 EmitFrame::Jump { label, args } => EmitFrame::Jump {
103 label,
104 args: args.into_iter().map(&mut f).collect(),
105 },
106 EmitFrame::Case {
107 scrutinee,
108 binder,
109 alts,
110 } => EmitFrame::Case {
111 scrutinee: f(scrutinee),
112 binder,
113 alts,
114 },
115 EmitFrame::Lam { binder, body_idx } => EmitFrame::Lam { binder, body_idx },
116 EmitFrame::Join {
117 label,
118 params,
119 rhs_idx,
120 body_idx,
121 } => EmitFrame::Join {
122 label,
123 params,
124 rhs_idx,
125 body_idx,
126 },
127 EmitFrame::ThunkCon { tag, field_indices } => {
128 EmitFrame::ThunkCon { tag, field_indices }
129 }
130 EmitFrame::LetBoundary(idx) => EmitFrame::LetBoundary(idx),
131 }
132 }
133}
134
135fn expand_node(tree: &CoreExpr, idx: usize) -> Result<EmitFrame<usize>, EmitError> {
141 match &tree.nodes[idx] {
142 CoreFrame::Var(v) => Ok(EmitFrame::Var(*v)),
143 CoreFrame::Lit(Literal::LitString(bytes)) => Ok(EmitFrame::LitString(bytes.clone())),
144 CoreFrame::Lit(lit) => Ok(EmitFrame::Lit(lit.clone())),
145 CoreFrame::Con { tag, fields } => {
146 let has_non_trivial = fields.iter().any(|&f| !is_trivial_field(f, tree));
147 if has_non_trivial {
148 Ok(EmitFrame::ThunkCon {
149 tag: *tag,
150 field_indices: fields.clone(),
151 })
152 } else {
153 Ok(EmitFrame::Con {
154 tag: *tag,
155 fields: fields.clone(),
156 })
157 }
158 }
159 CoreFrame::App { fun, arg } => Ok(EmitFrame::App {
160 fun: *fun,
161 arg: *arg,
162 }),
163 CoreFrame::PrimOp { op, args } => Ok(EmitFrame::PrimOp {
164 op: *op,
165 args: args.clone(),
166 }),
167 CoreFrame::Jump { label, args } => Ok(EmitFrame::Jump {
168 label: *label,
169 args: args.clone(),
170 }),
171 CoreFrame::Case {
172 scrutinee,
173 binder,
174 alts,
175 } => Ok(EmitFrame::Case {
176 scrutinee: *scrutinee,
177 binder: *binder,
178 alts: alts.clone(),
179 }),
180 CoreFrame::Lam { binder, body } => Ok(EmitFrame::Lam {
181 binder: *binder,
182 body_idx: *body,
183 }),
184 CoreFrame::Join {
185 label,
186 params,
187 rhs,
188 body,
189 } => Ok(EmitFrame::Join {
190 label: *label,
191 params: params.clone(),
192 rhs_idx: *rhs,
193 body_idx: *body,
194 }),
195 CoreFrame::LetNonRec { .. } | CoreFrame::LetRec { .. } => Ok(EmitFrame::LetBoundary(idx)),
196 }
197}
198
199#[allow(clippy::too_many_arguments)]
201fn collapse_frame(
202 ctx: &mut EmitContext,
203 sess: &mut EmitSession,
204 builder: &mut FunctionBuilder,
205 frame: EmitFrame<SsaVal>,
206 tail: TailCtx,
207) -> Result<SsaVal, EmitError> {
208 match frame {
209 EmitFrame::LitString(ref bytes) => emit_lit_string(
210 sess.pipeline,
211 builder,
212 sess.vmctx,
213 sess.gc_sig,
214 sess.oom_func,
215 bytes,
216 &mut ctx.lambda_counter,
217 ),
218 EmitFrame::Lit(ref lit) => emit_lit(builder, sess.vmctx, sess.gc_sig, sess.oom_func, lit),
219 EmitFrame::Var(vid) => match ctx.env.get(&vid).copied() {
220 Some(v) => Ok(v),
221 None => {
222 let tag = (vid.0 >> 56) as u8;
223 if tag == tidepool_repr::ERROR_SENTINEL_TAG {
224 let kind = vid.0 & 0xFF;
230 let poison_addr = crate::host_fns::error_poison_ptr_lazy(kind) as i64;
231 let poison_val = builder.ins().iconst(types::I64, poison_addr);
232 return Ok(SsaVal::HeapPtr(poison_val));
233 }
234
235 ctx.trace_scope(&format!(
236 "MISS var {:?} (env has {} entries)",
237 vid,
238 ctx.env.len()
239 ));
240 let trap_fn = sess
241 .pipeline
242 .module
243 .declare_function("unresolved_var_trap", Linkage::Import, &{
244 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
245 sig.params.push(AbiParam::new(types::I64));
246 sig.returns.push(AbiParam::new(types::I64));
247 sig
248 })
249 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
250 let trap_ref = sess
251 .pipeline
252 .module
253 .declare_func_in_func(trap_fn, builder.func);
254 let var_id_val = builder.ins().iconst(types::I64, vid.0 as i64);
255 let inst = builder.ins().call(trap_ref, &[var_id_val]);
256 let result = builder.inst_results(inst)[0];
257 builder.declare_value_needs_stack_map(result);
258 Ok(SsaVal::HeapPtr(result))
259 }
260 },
261 EmitFrame::Con { tag, fields } => {
262 let field_vals: Vec<Value> = fields
263 .iter()
264 .map(|v| ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, *v))
265 .collect();
266
267 let num_fields = field_vals.len();
268 let size = 24 + 8 * num_fields as u64;
269 let ptr = emit_alloc_fast_path(builder, sess.vmctx, size, sess.gc_sig, sess.oom_func);
270
271 let tag_val = builder.ins().iconst(types::I8, layout::TAG_CON as i64);
272 builder.ins().store(MemFlags::trusted(), tag_val, ptr, 0);
273 let size_val = builder.ins().iconst(types::I16, size as i64);
274 builder.ins().store(MemFlags::trusted(), size_val, ptr, 1);
275
276 let con_tag_val = builder.ins().iconst(types::I64, tag.0 as i64);
277 builder
278 .ins()
279 .store(MemFlags::trusted(), con_tag_val, ptr, CON_TAG_OFFSET);
280 let num_fields_val = builder.ins().iconst(types::I16, num_fields as i64);
281 builder.ins().store(
282 MemFlags::trusted(),
283 num_fields_val,
284 ptr,
285 CON_NUM_FIELDS_OFFSET,
286 );
287
288 for (i, field_val) in field_vals.into_iter().enumerate() {
289 builder.ins().store(
290 MemFlags::trusted(),
291 field_val,
292 ptr,
293 CON_FIELDS_OFFSET + 8 * i as i32,
294 );
295 }
296
297 builder.declare_value_needs_stack_map(ptr);
298 Ok(SsaVal::HeapPtr(ptr))
299 }
300 EmitFrame::ThunkCon { tag, field_indices } => {
301 let num_fields = field_indices.len();
304 let size = 24 + 8 * num_fields as u64;
305 let ptr = emit_alloc_fast_path(builder, sess.vmctx, size, sess.gc_sig, sess.oom_func);
306
307 let tag_val = builder.ins().iconst(types::I8, layout::TAG_CON as i64);
308 builder.ins().store(MemFlags::trusted(), tag_val, ptr, 0);
309 let size_val = builder.ins().iconst(types::I16, size as i64);
310 builder.ins().store(MemFlags::trusted(), size_val, ptr, 1);
311
312 let con_tag_val = builder.ins().iconst(types::I64, tag.0 as i64);
313 builder
314 .ins()
315 .store(MemFlags::trusted(), con_tag_val, ptr, CON_TAG_OFFSET);
316 let num_fields_val = builder.ins().iconst(types::I16, num_fields as i64);
317 builder.ins().store(
318 MemFlags::trusted(),
319 num_fields_val,
320 ptr,
321 CON_NUM_FIELDS_OFFSET,
322 );
323
324 for (i, &f_idx) in field_indices.iter().enumerate() {
325 let field_val = if is_trivial_field(f_idx, sess.tree) {
326 let val = ctx.emit_node(sess, builder, f_idx, TailCtx::NonTail)?;
328 ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, val)
329 } else {
330 let thunk_val = emit_thunk(ctx, sess, builder, f_idx)?;
332 thunk_val.value()
333 };
334 builder.ins().store(
335 MemFlags::trusted(),
336 field_val,
337 ptr,
338 CON_FIELDS_OFFSET + 8 * i as i32,
339 );
340 }
341
342 builder.declare_value_needs_stack_map(ptr);
343 Ok(SsaVal::HeapPtr(ptr))
344 }
345 EmitFrame::PrimOp { ref op, ref args } => {
346 if matches!(op, tidepool_repr::PrimOpKind::Raise) {
347 let err_fn = sess
352 .pipeline
353 .module
354 .declare_function("runtime_error", Linkage::Import, &{
355 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
356 sig.params.push(AbiParam::new(types::I64));
357 sig.returns.push(AbiParam::new(types::I64));
358 sig
359 })
360 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
361 let err_ref = sess
362 .pipeline
363 .module
364 .declare_func_in_func(err_fn, builder.func);
365 let kind_val = builder.ins().iconst(types::I64, 2); let inst = builder.ins().call(err_ref, &[kind_val]);
367 let result = builder.inst_results(inst)[0];
368 builder.declare_value_needs_stack_map(result);
369 return Ok(SsaVal::HeapPtr(result));
370 }
371 let forced_args: Vec<SsaVal> = args
375 .iter()
376 .map(|a| force_thunk_ssaval(sess.pipeline, builder, sess.vmctx, *a))
377 .collect::<Result<Vec<_>, EmitError>>()?;
378 primop::emit_primop(sess, builder, op, &forced_args)
379 }
380 EmitFrame::App { fun, arg } => {
381 ctx.declare_env(builder);
382 let raw_fun_ptr = fun.value();
383 let arg_ptr = ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, arg);
384
385 let fun_tag = builder
389 .ins()
390 .load(types::I8, MemFlags::trusted(), raw_fun_ptr, 0);
391 let is_thunk = builder.ins().icmp_imm(
392 IntCC::Equal,
393 fun_tag,
394 tidepool_heap::layout::TAG_THUNK as i64,
395 );
396
397 let force_fun_block = builder.create_block();
398 let fun_ready_block = builder.create_block();
399 builder.append_block_param(fun_ready_block, types::I64);
400
401 builder.ins().brif(
402 is_thunk,
403 force_fun_block,
404 &[],
405 fun_ready_block,
406 &[BlockArg::Value(raw_fun_ptr)],
407 );
408
409 builder.switch_to_block(force_fun_block);
410 builder.seal_block(force_fun_block);
411
412 let force_fn = sess
413 .pipeline
414 .module
415 .declare_function("heap_force", Linkage::Import, &{
416 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
417 sig.params.push(AbiParam::new(types::I64)); sig.params.push(AbiParam::new(types::I64)); sig.returns.push(AbiParam::new(types::I64)); sig
421 })
422 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
423 let force_ref = sess
424 .pipeline
425 .module
426 .declare_func_in_func(force_fn, builder.func);
427 let force_call = builder.ins().call(force_ref, &[sess.vmctx, raw_fun_ptr]);
428 let forced_fun = builder.inst_results(force_call)[0];
429 builder.declare_value_needs_stack_map(forced_fun);
430 builder
431 .ins()
432 .jump(fun_ready_block, &[BlockArg::Value(forced_fun)]);
433
434 builder.switch_to_block(fun_ready_block);
435 builder.seal_block(fun_ready_block);
436 let fun_ptr = builder.block_params(fun_ready_block)[0];
437 builder.declare_value_needs_stack_map(fun_ptr);
438
439 let check_fn = sess
442 .pipeline
443 .module
444 .declare_function("debug_app_check", Linkage::Import, &{
445 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
446 sig.params.push(AbiParam::new(types::I64)); sig.returns.push(AbiParam::new(types::I64)); sig
449 })
450 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
451 let check_ref = sess
452 .pipeline
453 .module
454 .declare_func_in_func(check_fn, builder.func);
455 let check_inst = builder.ins().call(check_ref, &[fun_ptr]);
456 let check_result = builder.inst_results(check_inst)[0];
457
458 let call_block = builder.create_block();
460 let merge_block = builder.create_block();
461 builder.append_block_param(merge_block, types::I64);
462
463 let is_zero = builder.ins().icmp_imm(IntCC::Equal, check_result, 0);
464 builder.ins().brif(
465 is_zero,
466 call_block,
467 &[],
468 merge_block,
469 &[BlockArg::Value(check_result)],
470 );
471
472 builder.switch_to_block(call_block);
474 builder.seal_block(call_block);
475
476 let code_ptr = builder.ins().load(
477 types::I64,
478 MemFlags::trusted(),
479 fun_ptr,
480 CLOSURE_CODE_PTR_OFFSET,
481 );
482
483 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
484 sig.params.push(AbiParam::new(types::I64)); sig.params.push(AbiParam::new(types::I64)); sig.params.push(AbiParam::new(types::I64)); sig.returns.push(AbiParam::new(types::I64));
488 let call_sig = builder.import_signature(sig);
489
490 let inst =
491 builder
492 .ins()
493 .call_indirect(call_sig, code_ptr, &[sess.vmctx, fun_ptr, arg_ptr]);
494 let ret_val = builder.inst_results(inst)[0];
495
496 let ret_is_null = builder.ins().icmp_imm(IntCC::Equal, ret_val, 0);
498 let null_check_block = builder.create_block();
499 let ret_ok_block = builder.create_block();
500
501 builder
502 .ins()
503 .brif(ret_is_null, null_check_block, &[], ret_ok_block, &[]);
504
505 builder.switch_to_block(ret_ok_block);
507 builder.seal_block(ret_ok_block);
508 builder.ins().jump(merge_block, &[BlockArg::Value(ret_val)]);
509
510 builder.switch_to_block(null_check_block);
512 builder.seal_block(null_check_block);
513
514 let tail_callee = builder.ins().load(
515 types::I64,
516 MemFlags::trusted(),
517 sess.vmctx,
518 VMCTX_TAIL_CALLEE_OFFSET,
519 );
520 let has_tail_call = builder.ins().icmp_imm(IntCC::NotEqual, tail_callee, 0);
521
522 let resolve_block = builder.create_block();
523 let null_propagate_block = builder.create_block();
524
525 builder
526 .ins()
527 .brif(has_tail_call, resolve_block, &[], null_propagate_block, &[]);
528
529 builder.switch_to_block(null_propagate_block);
531 builder.seal_block(null_propagate_block);
532 let null_val = builder.ins().iconst(types::I64, 0);
533 builder
534 .ins()
535 .jump(merge_block, &[BlockArg::Value(null_val)]);
536
537 builder.switch_to_block(resolve_block);
539 builder.seal_block(resolve_block);
540
541 let resolve_fn = sess
542 .pipeline
543 .module
544 .declare_function("trampoline_resolve", Linkage::Import, &{
545 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
546 sig.params.push(AbiParam::new(types::I64)); sig.returns.push(AbiParam::new(types::I64)); sig
549 })
550 .map_err(|e: cranelift_module::ModuleError| {
551 EmitError::CraneliftError(e.to_string())
552 })?;
553 let resolve_ref = sess
554 .pipeline
555 .module
556 .declare_func_in_func(resolve_fn, builder.func);
557 let resolve_inst = builder.ins().call(resolve_ref, &[sess.vmctx]);
558 let resolved_val = builder.inst_results(resolve_inst)[0];
559 builder.declare_value_needs_stack_map(resolved_val);
560 builder
561 .ins()
562 .jump(merge_block, &[BlockArg::Value(resolved_val)]);
563
564 builder.switch_to_block(merge_block);
566 builder.seal_block(merge_block);
567 let merged_val = builder.block_params(merge_block)[0];
568 builder.declare_value_needs_stack_map(merged_val);
569 Ok(SsaVal::HeapPtr(merged_val))
570 }
571 EmitFrame::Lam { binder, body_idx } => emit_lam(ctx, sess, builder, binder, body_idx),
572 EmitFrame::Case {
573 scrutinee,
574 binder,
575 alts,
576 } => crate::emit::case::emit_case(ctx, sess, builder, scrutinee, &binder, &alts, tail),
577 EmitFrame::Join {
578 label,
579 params,
580 rhs_idx,
581 body_idx,
582 } => crate::emit::join::emit_join(ctx, sess, builder, &label, ¶ms, rhs_idx, body_idx),
583 EmitFrame::Jump { label, args } => {
584 let join_block = ctx.join_blocks.get(&label)?.block;
585
586 let arg_values: Vec<BlockArg> = args
587 .iter()
588 .map(|v| {
589 BlockArg::Value(ensure_heap_ptr(
590 builder,
591 sess.vmctx,
592 sess.gc_sig,
593 sess.oom_func,
594 *v,
595 ))
596 })
597 .collect();
598
599 builder.ins().jump(join_block, &arg_values);
600
601 let unreachable_block = builder.create_block();
602 builder.switch_to_block(unreachable_block);
603 builder.seal_block(unreachable_block);
604
605 Ok(SsaVal::Raw(
606 builder.ins().iconst(types::I64, 0),
607 LIT_TAG_INT,
608 ))
609 }
610 EmitFrame::LetBoundary(idx) => {
611 ctx.emit_node(sess, builder, idx, TailCtx::NonTail)
617 }
618 }
619}
620
621#[allow(clippy::too_many_arguments)]
623fn emit_subtree(
624 ctx: &mut EmitContext,
625 sess: &mut EmitSession,
626 builder: &mut FunctionBuilder,
627 idx: usize,
628) -> Result<SsaVal, EmitError> {
629 emit_subtree_with_tail(ctx, sess, builder, idx, TailCtx::NonTail)
630}
631
632fn emit_subtree_with_tail(
634 ctx: &mut EmitContext,
635 sess: &mut EmitSession,
636 builder: &mut FunctionBuilder,
637 idx: usize,
638 tail: TailCtx,
639) -> Result<SsaVal, EmitError> {
640 try_expand_and_collapse::<EmitFrameToken, _, _, _>(
641 idx,
642 |idx| expand_node(sess.tree, idx),
643 |frame| collapse_frame(ctx, sess, builder, frame, tail),
644 )
645}
646
647fn is_trivial_field(idx: usize, expr: &CoreExpr) -> bool {
654 match &expr.nodes[idx] {
655 CoreFrame::Var(_) => true,
656 CoreFrame::Lit(_) => true,
657 CoreFrame::Lam { .. } => true, CoreFrame::Con { fields, .. } => fields.iter().all(|&f| is_trivial_field(f, expr)),
659 CoreFrame::PrimOp { args, .. } => args.iter().all(|&a| is_trivial_field(a, expr)),
660 _ => false, }
662}
663
664fn compute_captures(
671 ctx: &EmitContext,
672 tree: &CoreExpr,
673 body_idx: usize,
674 exclude: Option<VarId>,
675 label: &str,
676) -> (CoreExpr, Vec<VarId>) {
677 let body_tree = tree.extract_subtree(body_idx);
678 let mut fvs = tidepool_repr::free_vars::free_vars(&body_tree);
679 if let Some(binder) = exclude {
680 fvs.remove(&binder);
681 }
682 let dropped: Vec<VarId> = fvs
683 .iter()
684 .filter(|v| !ctx.env.contains_key(v))
685 .copied()
686 .collect();
687 if !dropped.is_empty() {
688 ctx.trace_scope(&format!(
689 "{} capture: dropped {} free vars not in scope: {:?}",
690 label,
691 dropped.len(),
692 dropped
693 ));
694 }
695 let mut sorted_fvs: Vec<VarId> = fvs
696 .into_iter()
697 .filter(|v| ctx.env.contains_key(v))
698 .collect();
699 sorted_fvs.sort_by_key(|v| v.0);
700 (body_tree, sorted_fvs)
701}
702
703#[allow(clippy::too_many_arguments)]
704fn emit_lam(
705 ctx: &mut EmitContext,
706 sess: &mut EmitSession,
707 builder: &mut FunctionBuilder,
708 binder: VarId,
709 body_idx: usize,
710) -> Result<SsaVal, EmitError> {
711 let (body_tree, sorted_fvs) = compute_captures(ctx, sess.tree, body_idx, Some(binder), "lam");
712
713 let captures: Vec<(VarId, SsaVal)> = sorted_fvs
714 .iter()
715 .map(|v| {
716 let val = ctx.env.get(v).ok_or_else(|| {
717 EmitError::MissingCaptureVar(
718 *v,
719 format!("Lam capture: not in env (env has {} vars)", ctx.env.len()),
720 )
721 })?;
722 Ok::<_, EmitError>((*v, *val))
723 })
724 .collect::<Result<Vec<_>, EmitError>>()?;
725
726 let lambda_name = ctx.next_lambda_name();
727 let mut closure_sig = Signature::new(sess.pipeline.isa.default_call_conv());
728 closure_sig.params.push(AbiParam::new(types::I64)); closure_sig.params.push(AbiParam::new(types::I64)); closure_sig.params.push(AbiParam::new(types::I64)); closure_sig.returns.push(AbiParam::new(types::I64));
732
733 let lambda_func_id = sess
734 .pipeline
735 .module
736 .declare_function(&lambda_name, Linkage::Local, &closure_sig)
737 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
738 sess.pipeline
739 .register_lambda(lambda_func_id, lambda_name.clone());
740
741 let mut inner_ctx = Context::new();
742 inner_ctx.func.signature = closure_sig;
743 inner_ctx.func.name = UserFuncName::default();
744
745 let mut inner_fb_ctx = FunctionBuilderContext::new();
746 let mut inner_builder = FunctionBuilder::new(&mut inner_ctx.func, &mut inner_fb_ctx);
747 let inner_block = inner_builder.create_block();
748 inner_builder.append_block_params_for_function_params(inner_block);
749 inner_builder.switch_to_block(inner_block);
750 inner_builder.seal_block(inner_block);
751
752 let inner_vmctx = inner_builder.block_params(inner_block)[0];
753 let closure_self = inner_builder.block_params(inner_block)[1];
754 let arg_param = inner_builder.block_params(inner_block)[2];
755
756 inner_builder.declare_value_needs_stack_map(closure_self);
757 inner_builder.declare_value_needs_stack_map(arg_param);
758
759 let mut inner_gc_sig = Signature::new(sess.pipeline.isa.default_call_conv());
760 inner_gc_sig.params.push(AbiParam::new(types::I64));
761 let inner_gc_sig_ref = inner_builder.import_signature(inner_gc_sig);
762
763 let inner_oom_func = {
764 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
765 sig.returns.push(AbiParam::new(types::I64));
766 let func_id = sess
767 .pipeline
768 .module
769 .declare_function("runtime_oom", Linkage::Import, &sig)
770 .map_err(|e| EmitError::CraneliftError(format!("declare runtime_oom: {e}")))?;
771 sess.pipeline
772 .module
773 .declare_func_in_func(func_id, inner_builder.func)
774 };
775
776 let mut inner_emit = EmitContext::new(ctx.prefix.clone());
777 inner_emit.lambda_counter = ctx.lambda_counter;
778
779 inner_emit.trace_scope(&format!("insert lam binder {:?}", binder));
780 inner_emit.env.insert(binder, SsaVal::HeapPtr(arg_param));
781
782 for (i, (var_id, _)) in captures.iter().enumerate() {
783 let offset = CLOSURE_CAPTURED_OFFSET + 8 * i as i32;
784 let val = inner_builder
785 .ins()
786 .load(types::I64, MemFlags::trusted(), closure_self, offset);
787 inner_builder.declare_value_needs_stack_map(val);
788 inner_emit.trace_scope(&format!("insert lam capture {:?}", var_id));
789 inner_emit.env.insert(*var_id, SsaVal::HeapPtr(val));
790 }
791
792 let body_root = body_tree.nodes.len() - 1;
793 let mut inner_sess = EmitSession {
794 pipeline: sess.pipeline,
795 vmctx: inner_vmctx,
796 gc_sig: inner_gc_sig_ref,
797 oom_func: inner_oom_func,
798 tree: &body_tree,
799 };
800 let body_result = inner_emit.emit_node(
801 &mut inner_sess,
802 &mut inner_builder,
803 body_root,
804 TailCtx::Tail,
805 )?;
806 let ret_val = ensure_heap_ptr(
807 &mut inner_builder,
808 inner_vmctx,
809 inner_gc_sig_ref,
810 inner_oom_func,
811 body_result,
812 );
813
814 inner_builder.ins().return_(&[ret_val]);
815 inner_builder.finalize();
816
817 ctx.lambda_counter = inner_emit.lambda_counter;
818
819 if std::env::var("TIDEPOOL_DUMP_CLIF").is_ok() {
821 eprintln!("=== CLIF {} ({} captures) ===", lambda_name, captures.len());
822 for (i, (var_id, ssaval)) in captures.iter().enumerate() {
823 let kind = match ssaval {
824 SsaVal::HeapPtr(_) => "HeapPtr",
825 SsaVal::Raw(_, tag) => &format!("Raw(tag={})", tag),
826 };
827 eprintln!(" capture[{}]: VarId({:#x}) = {}", i, var_id.0, kind);
828 }
829 eprintln!("{}", inner_ctx.func.display());
830 eprintln!("=== END CLIF {} ===", lambda_name);
831 }
832
833 sess.pipeline
834 .define_function(lambda_func_id, &mut inner_ctx)?;
835
836 let func_ref = sess
837 .pipeline
838 .module
839 .declare_func_in_func(lambda_func_id, builder.func);
840 let code_ptr = builder.ins().func_addr(types::I64, func_ref);
841
842 let num_captures = captures.len();
843 let closure_size = 24 + 8 * num_captures as u64;
844 let closure_ptr = emit_alloc_fast_path(
845 builder,
846 sess.vmctx,
847 closure_size,
848 sess.gc_sig,
849 sess.oom_func,
850 );
851
852 let tag_val = builder.ins().iconst(types::I8, layout::TAG_CLOSURE as i64);
853 builder
854 .ins()
855 .store(MemFlags::trusted(), tag_val, closure_ptr, 0);
856 let size_val = builder.ins().iconst(types::I16, closure_size as i64);
857 builder
858 .ins()
859 .store(MemFlags::trusted(), size_val, closure_ptr, 1);
860
861 builder.ins().store(
862 MemFlags::trusted(),
863 code_ptr,
864 closure_ptr,
865 CLOSURE_CODE_PTR_OFFSET,
866 );
867 let num_cap_val = builder.ins().iconst(types::I16, num_captures as i64);
868 builder.ins().store(
869 MemFlags::trusted(),
870 num_cap_val,
871 closure_ptr,
872 CLOSURE_NUM_CAPTURED_OFFSET,
873 );
874
875 for (i, (_, ssaval)) in captures.iter().enumerate() {
876 let cap_val = ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, *ssaval);
877 let offset = CLOSURE_CAPTURED_OFFSET + 8 * i as i32;
878 builder
879 .ins()
880 .store(MemFlags::trusted(), cap_val, closure_ptr, offset);
881 }
882
883 builder.declare_value_needs_stack_map(closure_ptr);
884 Ok(SsaVal::HeapPtr(closure_ptr))
885}
886
887#[allow(clippy::too_many_arguments)]
899fn emit_thunk(
900 ctx: &mut EmitContext,
901 sess: &mut EmitSession,
902 builder: &mut FunctionBuilder,
903 body_idx: usize,
904) -> Result<SsaVal, EmitError> {
905 let (body_tree, sorted_fvs) = compute_captures(ctx, sess.tree, body_idx, None, "thunk");
907
908 let captures: Vec<(VarId, SsaVal)> = sorted_fvs
909 .iter()
910 .map(|v| {
911 let val = ctx.env.get(v).ok_or_else(|| {
912 EmitError::MissingCaptureVar(
913 *v,
914 format!("Thunk capture: not in env (env has {} vars)", ctx.env.len()),
915 )
916 })?;
917 Ok::<_, EmitError>((*v, *val))
918 })
919 .collect::<Result<Vec<_>, EmitError>>()?;
920
921 let thunk_name = ctx.next_thunk_name();
923 let mut thunk_sig = Signature::new(sess.pipeline.isa.default_call_conv());
924 thunk_sig.params.push(AbiParam::new(types::I64)); thunk_sig.params.push(AbiParam::new(types::I64)); thunk_sig.returns.push(AbiParam::new(types::I64));
927
928 let thunk_func_id = sess
929 .pipeline
930 .module
931 .declare_function(&thunk_name, Linkage::Local, &thunk_sig)
932 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
933 sess.pipeline
934 .register_lambda(thunk_func_id, thunk_name.clone());
935
936 let mut inner_ctx = Context::new();
938 inner_ctx.func.signature = thunk_sig;
939 inner_ctx.func.name = UserFuncName::default();
940
941 let mut inner_fb_ctx = FunctionBuilderContext::new();
942 let mut inner_builder = FunctionBuilder::new(&mut inner_ctx.func, &mut inner_fb_ctx);
943 let inner_block = inner_builder.create_block();
944 inner_builder.append_block_params_for_function_params(inner_block);
945 inner_builder.switch_to_block(inner_block);
946 inner_builder.seal_block(inner_block);
947
948 let inner_vmctx = inner_builder.block_params(inner_block)[0];
949 let thunk_self = inner_builder.block_params(inner_block)[1];
950
951 inner_builder.declare_value_needs_stack_map(thunk_self);
952
953 let mut inner_gc_sig = Signature::new(sess.pipeline.isa.default_call_conv());
954 inner_gc_sig.params.push(AbiParam::new(types::I64));
955 let inner_gc_sig_ref = inner_builder.import_signature(inner_gc_sig);
956
957 let inner_oom_func = {
958 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
959 sig.returns.push(AbiParam::new(types::I64));
960 let func_id = sess
961 .pipeline
962 .module
963 .declare_function("runtime_oom", Linkage::Import, &sig)
964 .map_err(|e| EmitError::CraneliftError(format!("declare runtime_oom: {e}")))?;
965 sess.pipeline
966 .module
967 .declare_func_in_func(func_id, inner_builder.func)
968 };
969
970 let mut inner_emit = EmitContext::new(ctx.prefix.clone());
971 inner_emit.lambda_counter = ctx.lambda_counter;
972
973 for (i, (var_id, _)) in captures.iter().enumerate() {
975 let offset = THUNK_CAPTURED_OFFSET + 8 * i as i32;
976 let val = inner_builder
977 .ins()
978 .load(types::I64, MemFlags::trusted(), thunk_self, offset);
979 inner_builder.declare_value_needs_stack_map(val);
980 inner_emit.trace_scope(&format!("insert thunk capture {:?}", var_id));
981 inner_emit.env.insert(*var_id, SsaVal::HeapPtr(val));
982 }
983
984 let body_root = body_tree.nodes.len() - 1;
986 let mut inner_sess = EmitSession {
987 pipeline: sess.pipeline,
988 vmctx: inner_vmctx,
989 gc_sig: inner_gc_sig_ref,
990 oom_func: inner_oom_func,
991 tree: &body_tree,
992 };
993 let body_result = inner_emit.emit_node(
994 &mut inner_sess,
995 &mut inner_builder,
996 body_root,
997 TailCtx::NonTail,
998 )?;
999 let ret_val = ensure_heap_ptr(
1000 &mut inner_builder,
1001 inner_vmctx,
1002 inner_gc_sig_ref,
1003 inner_oom_func,
1004 body_result,
1005 );
1006
1007 inner_builder.ins().return_(&[ret_val]);
1008 inner_builder.finalize();
1009
1010 ctx.lambda_counter = inner_emit.lambda_counter;
1011
1012 if std::env::var("TIDEPOOL_DUMP_CLIF").is_ok() {
1014 eprintln!("=== CLIF {} ({} captures) ===", thunk_name, captures.len());
1015 for (i, (var_id, ssaval)) in captures.iter().enumerate() {
1016 let kind = match ssaval {
1017 SsaVal::HeapPtr(_) => "HeapPtr",
1018 SsaVal::Raw(_, tag) => &format!("Raw(tag={})", tag),
1019 };
1020 eprintln!(" capture[{}]: VarId({:#x}) = {}", i, var_id.0, kind);
1021 }
1022 eprintln!("{}", inner_ctx.func.display());
1023 eprintln!("=== END CLIF {} ===", thunk_name);
1024 }
1025
1026 sess.pipeline
1027 .define_function(thunk_func_id, &mut inner_ctx)?;
1028
1029 let func_ref = sess
1031 .pipeline
1032 .module
1033 .declare_func_in_func(thunk_func_id, builder.func);
1034 let code_ptr = builder.ins().func_addr(types::I64, func_ref);
1035
1036 let num_captures = captures.len();
1038 let thunk_size = 24 + 8 * num_captures as u64;
1039 let thunk_ptr =
1040 emit_alloc_fast_path(builder, sess.vmctx, thunk_size, sess.gc_sig, sess.oom_func);
1041
1042 let tag_val = builder.ins().iconst(types::I8, layout::TAG_THUNK as i64);
1044 builder
1045 .ins()
1046 .store(MemFlags::trusted(), tag_val, thunk_ptr, 0);
1047 let size_val = builder.ins().iconst(types::I16, thunk_size as i64);
1048 builder
1049 .ins()
1050 .store(MemFlags::trusted(), size_val, thunk_ptr, 1);
1051
1052 let state_val = builder
1054 .ins()
1055 .iconst(types::I8, layout::THUNK_UNEVALUATED as i64);
1056 builder.ins().store(
1057 MemFlags::trusted(),
1058 state_val,
1059 thunk_ptr,
1060 THUNK_STATE_OFFSET,
1061 );
1062
1063 builder.ins().store(
1065 MemFlags::trusted(),
1066 code_ptr,
1067 thunk_ptr,
1068 THUNK_CODE_PTR_OFFSET,
1069 );
1070
1071 for (i, (_, ssaval)) in captures.iter().enumerate() {
1073 let cap_val = ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, *ssaval);
1074 let offset = THUNK_CAPTURED_OFFSET + 8 * i as i32;
1075 builder
1076 .ins()
1077 .store(MemFlags::trusted(), cap_val, thunk_ptr, offset);
1078 }
1079
1080 builder.declare_value_needs_stack_map(thunk_ptr);
1081 Ok(SsaVal::HeapPtr(thunk_ptr))
1082}
1083
1084pub fn compile_expr(
1092 pipeline: &mut CodegenPipeline,
1093 tree: &CoreExpr,
1094 name: &str,
1095) -> Result<FuncId, EmitError> {
1096 if std::env::var("TIDEPOOL_DUMP_TREE").is_ok() {
1097 eprintln!(
1098 "[tree] {} nodes:\n{}",
1099 tree.nodes.len(),
1100 tidepool_repr::pretty::pretty_print(tree)
1101 );
1102 let fvs = tidepool_repr::free_vars::free_vars(tree);
1103 if !fvs.is_empty() {
1104 eprintln!(
1105 "[tree] WARNING: {} free vars in input: {:?}",
1106 fvs.len(),
1107 fvs
1108 );
1109 }
1110 }
1111
1112 let sig = pipeline.make_func_signature();
1113 let func_id = pipeline.declare_function(name)?;
1114
1115 let mut ctx = Context::new();
1116 ctx.func.signature = sig;
1117 ctx.func.name = UserFuncName::default();
1118
1119 let mut fb_ctx = FunctionBuilderContext::new();
1120 let mut builder = FunctionBuilder::new(&mut ctx.func, &mut fb_ctx);
1121
1122 let entry_block = builder.create_block();
1123 builder.append_block_params_for_function_params(entry_block);
1124 builder.switch_to_block(entry_block);
1125 builder.seal_block(entry_block);
1126
1127 let vmctx = builder.block_params(entry_block)[0];
1128
1129 let mut gc_sig = Signature::new(pipeline.isa.default_call_conv());
1130 gc_sig.params.push(AbiParam::new(types::I64));
1131 let gc_sig_ref = builder.import_signature(gc_sig);
1132
1133 let oom_func = {
1134 let mut sig = Signature::new(pipeline.isa.default_call_conv());
1135 sig.returns.push(AbiParam::new(types::I64));
1136 let func_id = pipeline
1137 .module
1138 .declare_function("runtime_oom", Linkage::Import, &sig)
1139 .map_err(|e| EmitError::CraneliftError(format!("declare runtime_oom: {e}")))?;
1140 pipeline.module.declare_func_in_func(func_id, builder.func)
1141 };
1142
1143 let mut emit_ctx = EmitContext::new(name.to_string());
1144
1145 let mut sess = EmitSession {
1146 pipeline,
1147 vmctx,
1148 gc_sig: gc_sig_ref,
1149 oom_func,
1150 tree,
1151 };
1152
1153 let result = emit_ctx.emit_node(
1154 &mut sess,
1155 &mut builder,
1156 tree.nodes.len() - 1,
1157 TailCtx::NonTail,
1158 )?;
1159 let ret = ensure_heap_ptr(&mut builder, vmctx, gc_sig_ref, oom_func, result);
1160
1161 builder.ins().return_(&[ret]);
1162 builder.finalize();
1163
1164 pipeline.define_function(func_id, &mut ctx)?;
1165
1166 Ok(func_id)
1167}
1168
1169impl EmitContext {
1170 fn rhs_is_error_call(tree: &CoreExpr, rhs_idx: usize) -> bool {
1180 let mut idx = rhs_idx;
1181 loop {
1182 match &tree.nodes[idx] {
1183 CoreFrame::Var(v) => return (v.0 >> 56) as u8 == tidepool_repr::ERROR_SENTINEL_TAG,
1184 CoreFrame::App { fun, .. } => idx = *fun,
1185 _ => return false,
1186 }
1187 }
1188 }
1189
1190 fn extract_error_kind(tree: &CoreExpr, rhs_idx: usize) -> u64 {
1192 let mut idx = rhs_idx;
1193 loop {
1194 match &tree.nodes[idx] {
1195 CoreFrame::Var(v) if (v.0 >> 56) as u8 == tidepool_repr::ERROR_SENTINEL_TAG => {
1196 return v.0 & 0xFF
1197 }
1198 CoreFrame::App { fun, .. } => idx = *fun,
1199 _ => return 2, }
1201 }
1202 }
1203
1204 fn extract_error_message(tree: &CoreExpr, rhs_idx: usize) -> Option<Vec<u8>> {
1206 let mut idx = rhs_idx;
1207 loop {
1208 match &tree.nodes[idx] {
1209 CoreFrame::App { fun, arg } => {
1210 if let CoreFrame::Lit(Literal::LitString(bytes)) = &tree.nodes[*arg] {
1212 return Some(bytes.clone());
1213 }
1214 idx = *fun; }
1216 _ => return None,
1217 }
1218 }
1219 }
1220
1221 fn emit_error_poison(&self, tree: &CoreExpr, rhs_idx: usize) -> i64 {
1222 let kind = Self::extract_error_kind(tree, rhs_idx);
1223 match Self::extract_error_message(tree, rhs_idx) {
1224 Some(msg) => crate::host_fns::error_poison_ptr_lazy_msg(kind, &msg) as i64,
1225 None => crate::host_fns::error_poison_ptr_lazy(kind) as i64,
1226 }
1227 }
1228
1229 #[allow(clippy::too_many_arguments)]
1238 pub fn emit_node(
1239 &mut self,
1240 sess: &mut EmitSession,
1241 builder: &mut FunctionBuilder,
1242 root_idx: usize,
1243 tail: TailCtx,
1244 ) -> Result<SsaVal, EmitError> {
1245 let mut work: Vec<EmitWork> = vec![EmitWork::Eval(root_idx, tail)];
1246 let mut vals: Vec<SsaVal> = Vec::new();
1247
1248 while let Some(item) = work.pop() {
1249 match item {
1250 EmitWork::Eval(start_idx, tail_ctx) => {
1251 let mut idx = start_idx;
1253 loop {
1254 match &sess.tree.nodes[idx] {
1255 CoreFrame::LetNonRec { binder, rhs, body } => {
1256 let binder = *binder;
1257 let rhs = *rhs;
1258 let body = *body;
1259 let body_fvs = tidepool_repr::free_vars::free_vars(
1261 &sess.tree.extract_subtree(body),
1262 );
1263 if body_fvs.contains(&binder) {
1264 if Self::rhs_is_error_call(sess.tree, rhs) {
1265 let poison_addr = self.emit_error_poison(sess.tree, rhs);
1267 let poison_val =
1268 builder.ins().iconst(types::I64, poison_addr);
1269 self.trace_scope(&format!(
1270 "defer error LetNonRec {:?}",
1271 binder
1272 ));
1273 let old_val =
1274 self.env.insert(binder, SsaVal::HeapPtr(poison_val));
1275 work.push(EmitWork::LetCleanupMark(LetCleanup::Single(
1277 binder, old_val,
1278 )));
1279 } else {
1280 let old_val = self.env.get(&binder).cloned();
1283 work.push(EmitWork::LetCleanupMark(LetCleanup::Single(
1284 binder, old_val,
1285 )));
1286 work.push(EmitWork::Eval(body, tail_ctx));
1287 work.push(EmitWork::Bind(binder));
1288 work.push(EmitWork::Eval(rhs, TailCtx::NonTail));
1289 break; }
1291 } else {
1292 self.trace_scope(&format!("DCE skip LetNonRec {:?}", binder));
1293 }
1294 idx = body;
1295 continue;
1296 }
1297 CoreFrame::LetRec { bindings, body } => {
1298 let bindings = bindings.clone();
1299 let body = *body;
1300 let mut scope = EnvScope::new();
1302 for (b, _) in &bindings {
1303 scope.saved.push((*b, self.env.get(b).copied()));
1304 }
1305 work.push(EmitWork::LetCleanupMark(LetCleanup::Rec(scope)));
1306 self.emit_letrec_phases(
1307 sess, builder, &bindings, body, &mut work, tail_ctx,
1308 )?;
1309 break; }
1311 _ => {
1313 if tail_ctx.is_tail()
1314 && matches!(sess.tree.nodes[idx], CoreFrame::App { .. })
1315 {
1316 let result = self.emit_tail_app(sess, builder, idx)?;
1317 vals.push(result);
1318 } else {
1319 let result =
1320 emit_subtree_with_tail(self, sess, builder, idx, tail_ctx)?;
1321 vals.push(result);
1322 }
1323 break;
1324 }
1325 }
1326 }
1327 }
1328 EmitWork::Bind(binder) => {
1329 let val = vals.pop().ok_or_else(|| {
1330 EmitError::InternalError("Bind: empty value stack".into())
1331 })?;
1332 self.trace_scope(&format!("insert LetNonRec {:?}", binder));
1333 self.env.insert(binder, val);
1334 }
1335 EmitWork::LetRecPostSimple { binder, state_idx } => {
1336 let val = vals.pop().ok_or_else(|| {
1337 EmitError::InternalError("LetRecPostSimple: empty value stack".into())
1338 })?;
1339 self.trace_scope(&format!("insert LetRec(simple) {:?}", binder));
1340 self.env.insert(binder, val);
1341 self.letrec_post_simple_step(sess, builder, &binder, state_idx)?;
1342 }
1343 EmitWork::LetRecFinish {
1344 body,
1345 state_idx,
1346 tail,
1347 } => {
1348 self.letrec_finish_phases(sess, builder, state_idx)?;
1349 work.push(EmitWork::Eval(body, tail));
1351 }
1352 EmitWork::LetCleanupMark(cleanup) => match cleanup {
1353 LetCleanup::Single(var, old_val) => {
1354 self.trace_scope(&format!("restore LetCleanup {:?}", var));
1355 self.env.restore(var, old_val);
1356 }
1357 LetCleanup::Rec(scope) => {
1358 self.trace_scope("restore LetCleanup(rec)");
1359 self.env.restore_scope(scope);
1360 }
1361 },
1362 }
1363 }
1364
1365 vals.pop()
1366 .ok_or_else(|| EmitError::InternalError("emit_node: empty value stack".into()))
1367 }
1368
1369 fn emit_tail_app(
1370 &mut self,
1371 sess: &mut EmitSession,
1372 builder: &mut FunctionBuilder,
1373 idx: usize,
1374 ) -> Result<SsaVal, EmitError> {
1375 let (fun_idx, arg_idx) = match &sess.tree.nodes[idx] {
1376 CoreFrame::App { fun, arg } => (*fun, *arg),
1377 _ => unreachable!(),
1378 };
1379
1380 let fun_val = emit_subtree(self, sess, builder, fun_idx)?;
1382 let arg_val = emit_subtree(self, sess, builder, arg_idx)?;
1383
1384 let raw_fun_ptr = fun_val.value();
1385 let arg_ptr = ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, arg_val);
1386
1387 let fun_tag = builder
1389 .ins()
1390 .load(types::I8, MemFlags::trusted(), raw_fun_ptr, 0);
1391 let is_thunk = builder.ins().icmp_imm(
1392 IntCC::Equal,
1393 fun_tag,
1394 tidepool_heap::layout::TAG_THUNK as i64,
1395 );
1396
1397 let force_fun_block = builder.create_block();
1398 let fun_ready_block = builder.create_block();
1399 builder.append_block_param(fun_ready_block, types::I64);
1400
1401 builder.ins().brif(
1402 is_thunk,
1403 force_fun_block,
1404 &[],
1405 fun_ready_block,
1406 &[BlockArg::Value(raw_fun_ptr)],
1407 );
1408
1409 builder.switch_to_block(force_fun_block);
1410 builder.seal_block(force_fun_block);
1411
1412 let force_fn = sess
1413 .pipeline
1414 .module
1415 .declare_function("heap_force", Linkage::Import, &{
1416 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
1417 sig.params.push(AbiParam::new(types::I64));
1418 sig.params.push(AbiParam::new(types::I64));
1419 sig.returns.push(AbiParam::new(types::I64));
1420 sig
1421 })
1422 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
1423 let force_ref = sess
1424 .pipeline
1425 .module
1426 .declare_func_in_func(force_fn, builder.func);
1427 let force_call = builder.ins().call(force_ref, &[sess.vmctx, raw_fun_ptr]);
1428 let forced_fun = builder.inst_results(force_call)[0];
1429 builder.declare_value_needs_stack_map(forced_fun);
1430 builder
1431 .ins()
1432 .jump(fun_ready_block, &[BlockArg::Value(forced_fun)]);
1433
1434 builder.switch_to_block(fun_ready_block);
1435 builder.seal_block(fun_ready_block);
1436 let fun_ptr = builder.block_params(fun_ready_block)[0];
1437 builder.declare_value_needs_stack_map(fun_ptr);
1438
1439 let check_fn = sess
1441 .pipeline
1442 .module
1443 .declare_function("debug_app_check", Linkage::Import, &{
1444 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
1445 sig.params.push(AbiParam::new(types::I64));
1446 sig.returns.push(AbiParam::new(types::I64));
1447 sig
1448 })
1449 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
1450 let check_ref = sess
1451 .pipeline
1452 .module
1453 .declare_func_in_func(check_fn, builder.func);
1454 let check_inst = builder.ins().call(check_ref, &[fun_ptr]);
1455 let check_result = builder.inst_results(check_inst)[0];
1456
1457 let store_block = builder.create_block();
1459 let poison_block = builder.create_block();
1460
1461 let is_zero = builder.ins().icmp_imm(IntCC::Equal, check_result, 0);
1462 builder
1463 .ins()
1464 .brif(is_zero, store_block, &[], poison_block, &[]);
1465
1466 builder.switch_to_block(poison_block);
1468 builder.seal_block(poison_block);
1469 builder.ins().return_(&[check_result]);
1470
1471 builder.switch_to_block(store_block);
1473 builder.seal_block(store_block);
1474
1475 builder.ins().store(
1477 MemFlags::trusted(),
1478 fun_ptr,
1479 sess.vmctx,
1480 VMCTX_TAIL_CALLEE_OFFSET,
1481 );
1482 builder.ins().store(
1484 MemFlags::trusted(),
1485 arg_ptr,
1486 sess.vmctx,
1487 VMCTX_TAIL_ARG_OFFSET,
1488 );
1489
1490 let null_val = builder.ins().iconst(types::I64, 0);
1492 builder.ins().return_(&[null_val]);
1493
1494 let dead_block = builder.create_block();
1496 builder.switch_to_block(dead_block);
1497 builder.seal_block(dead_block);
1498
1499 let dummy = builder.ins().iconst(types::I64, 0);
1500 Ok(SsaVal::HeapPtr(dummy))
1501 }
1502
1503 #[allow(clippy::too_many_arguments)]
1506 fn emit_letrec_phases(
1507 &mut self,
1508 sess: &mut EmitSession,
1509 builder: &mut FunctionBuilder,
1510 bindings: &[(VarId, usize)],
1511 body: usize,
1512 work: &mut Vec<EmitWork>,
1513 tail: TailCtx,
1514 ) -> Result<(), EmitError> {
1515 let (rec_bindings, simple_bindings): (Vec<_>, Vec<_>) =
1518 bindings.iter().partition(|(_, rhs_idx)| {
1519 matches!(
1520 &sess.tree.nodes[*rhs_idx],
1521 CoreFrame::Lam { .. } | CoreFrame::Con { .. }
1522 )
1523 });
1524
1525 if rec_bindings.is_empty() {
1527 let state_idx = self.push_letrec_state(LetRecDeferredState {
1529 pending_capture_updates: std::collections::HashMap::new(),
1530 deferred_con_deps: Vec::new(),
1531 });
1532
1533 work.push(EmitWork::LetRecFinish {
1535 body,
1536 state_idx,
1537 tail,
1538 });
1539 for (binder, rhs_idx) in simple_bindings.iter().rev() {
1540 if Self::rhs_is_error_call(sess.tree, *rhs_idx) {
1541 let poison_addr = self.emit_error_poison(sess.tree, *rhs_idx);
1542 let poison_val = builder.ins().iconst(types::I64, poison_addr);
1543 self.trace_scope(&format!("defer error LetRec(simple) {:?}", binder));
1544 self.env.insert(*binder, SsaVal::HeapPtr(poison_val));
1545 } else {
1546 work.push(EmitWork::LetRecPostSimple {
1547 binder: *binder,
1548 state_idx,
1549 });
1550 work.push(EmitWork::Eval(*rhs_idx, TailCtx::NonTail));
1551 }
1552 }
1553 return Ok(());
1554 }
1555
1556 enum PreAlloc {
1558 Lam {
1559 binder: VarId,
1560 ptr: cranelift_codegen::ir::Value,
1561 fvs: Vec<VarId>,
1562 rhs_idx: usize,
1563 },
1564 Con {
1565 binder: VarId,
1566 ptr: cranelift_codegen::ir::Value,
1567 field_indices: Vec<usize>,
1568 },
1569 }
1570 let mut pre_allocs = Vec::with_capacity(rec_bindings.len());
1571
1572 for (binder, rhs_idx) in &rec_bindings {
1573 match &sess.tree.nodes[*rhs_idx] {
1574 CoreFrame::Lam {
1575 binder: lam_binder,
1576 body: lam_body,
1577 } => {
1578 let lam_body_tree = sess.tree.extract_subtree(*lam_body);
1579 let mut fvs = tidepool_repr::free_vars::free_vars(&lam_body_tree);
1580 fvs.remove(lam_binder);
1581 let dropped_fvs: Vec<VarId> = fvs
1582 .iter()
1583 .filter(|v| {
1584 !self.env.contains_key(v)
1585 && !rec_bindings.iter().any(|(b, _)| b == *v)
1586 && !simple_bindings.iter().any(|(b, _)| b == *v)
1587 })
1588 .copied()
1589 .collect();
1590 if !dropped_fvs.is_empty() {
1591 self.trace_scope(&format!(
1592 "LetRec lam {:?}: dropped FVs {:?}",
1593 binder, dropped_fvs
1594 ));
1595 }
1596 let mut sorted_fvs: Vec<VarId> = fvs
1597 .into_iter()
1598 .filter(|v| {
1599 self.env.contains_key(v)
1600 || rec_bindings.iter().any(|(b, _)| b == v)
1601 || simple_bindings.iter().any(|(b, _)| b == v)
1602 })
1603 .collect();
1604 sorted_fvs.sort_by_key(|v| v.0);
1605
1606 let num_captures = sorted_fvs.len();
1607 let closure_size = 24 + 8 * num_captures as u64;
1608 let closure_ptr = emit_alloc_fast_path(
1609 builder,
1610 sess.vmctx,
1611 closure_size,
1612 sess.gc_sig,
1613 sess.oom_func,
1614 );
1615
1616 let tag_val = builder.ins().iconst(types::I8, layout::TAG_CLOSURE as i64);
1617 builder
1618 .ins()
1619 .store(MemFlags::trusted(), tag_val, closure_ptr, 0);
1620 let size_val = builder.ins().iconst(types::I16, closure_size as i64);
1621 builder
1622 .ins()
1623 .store(MemFlags::trusted(), size_val, closure_ptr, 1);
1624 let num_cap_val = builder.ins().iconst(types::I16, num_captures as i64);
1625 builder.ins().store(
1626 MemFlags::trusted(),
1627 num_cap_val,
1628 closure_ptr,
1629 CLOSURE_NUM_CAPTURED_OFFSET,
1630 );
1631
1632 builder.declare_value_needs_stack_map(closure_ptr);
1633 pre_allocs.push(PreAlloc::Lam {
1634 binder: *binder,
1635 ptr: closure_ptr,
1636 fvs: sorted_fvs,
1637 rhs_idx: *rhs_idx,
1638 });
1639 }
1640 CoreFrame::Con { tag, fields } => {
1641 let num_fields = fields.len();
1642 let size = 24 + 8 * num_fields as u64;
1643 let ptr =
1644 emit_alloc_fast_path(builder, sess.vmctx, size, sess.gc_sig, sess.oom_func);
1645
1646 let tag_val = builder.ins().iconst(types::I8, layout::TAG_CON as i64);
1647 builder.ins().store(MemFlags::trusted(), tag_val, ptr, 0);
1648 let size_val = builder.ins().iconst(types::I16, size as i64);
1649 builder.ins().store(MemFlags::trusted(), size_val, ptr, 1);
1650 let con_tag_val = builder.ins().iconst(types::I64, tag.0 as i64);
1651 builder
1652 .ins()
1653 .store(MemFlags::trusted(), con_tag_val, ptr, CON_TAG_OFFSET);
1654 let num_fields_val = builder.ins().iconst(types::I16, num_fields as i64);
1655 builder.ins().store(
1656 MemFlags::trusted(),
1657 num_fields_val,
1658 ptr,
1659 CON_NUM_FIELDS_OFFSET,
1660 );
1661
1662 let null_val = builder.ins().iconst(types::I64, 0);
1665 for i in 0..num_fields {
1666 let offset = CON_FIELDS_OFFSET + 8 * i as i32;
1667 builder
1668 .ins()
1669 .store(MemFlags::trusted(), null_val, ptr, offset);
1670 }
1671
1672 builder.declare_value_needs_stack_map(ptr);
1673 pre_allocs.push(PreAlloc::Con {
1674 binder: *binder,
1675 ptr,
1676 field_indices: fields.clone(),
1677 });
1678 }
1679 other => {
1680 return Err(EmitError::InternalError(format!(
1681 "LetRec phase 1: expected Lam or Con, got {:?}",
1682 other
1683 )))
1684 }
1685 }
1686 }
1687
1688 for pa in &pre_allocs {
1690 let (binder, ptr) = match pa {
1691 PreAlloc::Lam { binder, ptr, .. } => (*binder, *ptr),
1692 PreAlloc::Con { binder, ptr, .. } => (*binder, *ptr),
1693 };
1694 self.trace_scope(&format!("insert LetRec(rec) {:?}", binder));
1695 self.env.insert(binder, SsaVal::HeapPtr(ptr));
1696 }
1697
1698 let mut deferred_simple = Vec::with_capacity(simple_bindings.len());
1702 for (binder, rhs_idx) in &simple_bindings {
1703 if Self::rhs_is_error_call(sess.tree, *rhs_idx) {
1704 let poison_addr = self.emit_error_poison(sess.tree, *rhs_idx);
1705 let poison_val = builder.ins().iconst(types::I64, poison_addr);
1706 self.trace_scope(&format!("defer error LetRec(trivial) {:?}", binder));
1707 self.env.insert(*binder, SsaVal::HeapPtr(poison_val));
1708 } else if matches!(&sess.tree.nodes[*rhs_idx], CoreFrame::Var(_)) {
1709 let rhs_val = emit_subtree(self, sess, builder, *rhs_idx)?;
1711 self.trace_scope(&format!("insert LetRec(trivial) {:?}", binder));
1712 self.env.insert(*binder, rhs_val);
1713 } else {
1714 deferred_simple.push((*binder, *rhs_idx));
1715 }
1716 }
1717
1718 let mut pending_capture_updates: std::collections::HashMap<VarId, Vec<ClosureCaptureSlot>> =
1722 std::collections::HashMap::with_capacity(rec_bindings.len());
1723
1724 for pa in &pre_allocs {
1725 let (closure_ptr, sorted_fvs, rhs_idx) = match pa {
1726 PreAlloc::Lam {
1727 ptr, fvs, rhs_idx, ..
1728 } => (*ptr, fvs, *rhs_idx),
1729 PreAlloc::Con { .. } => continue,
1730 };
1731 let (lam_binder, lam_body) = match &sess.tree.nodes[rhs_idx] {
1732 CoreFrame::Lam { binder, body } => (*binder, *body),
1733 other => {
1734 return Err(EmitError::InternalError(format!(
1735 "LetRec phase 3a: expected Lam, got {:?}",
1736 other
1737 )))
1738 }
1739 };
1740 let lam_body_tree = sess.tree.extract_subtree(lam_body);
1741
1742 let lambda_name = self.next_lambda_name();
1743 let mut closure_sig = Signature::new(sess.pipeline.isa.default_call_conv());
1744 closure_sig.params.push(AbiParam::new(types::I64));
1745 closure_sig.params.push(AbiParam::new(types::I64));
1746 closure_sig.params.push(AbiParam::new(types::I64));
1747 closure_sig.returns.push(AbiParam::new(types::I64));
1748
1749 let lambda_func_id = sess
1750 .pipeline
1751 .module
1752 .declare_function(&lambda_name, Linkage::Local, &closure_sig)
1753 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
1754 sess.pipeline
1755 .register_lambda(lambda_func_id, lambda_name.clone());
1756
1757 let mut inner_ctx = Context::new();
1758 inner_ctx.func.signature = closure_sig;
1759 inner_ctx.func.name = UserFuncName::default();
1760
1761 let mut inner_fb_ctx = FunctionBuilderContext::new();
1762 let mut inner_builder = FunctionBuilder::new(&mut inner_ctx.func, &mut inner_fb_ctx);
1763 let inner_block = inner_builder.create_block();
1764 inner_builder.append_block_params_for_function_params(inner_block);
1765 inner_builder.switch_to_block(inner_block);
1766 inner_builder.seal_block(inner_block);
1767
1768 let inner_vmctx = inner_builder.block_params(inner_block)[0];
1769 let inner_self = inner_builder.block_params(inner_block)[1];
1770 let inner_arg = inner_builder.block_params(inner_block)[2];
1771
1772 inner_builder.declare_value_needs_stack_map(inner_self);
1773 inner_builder.declare_value_needs_stack_map(inner_arg);
1774
1775 let mut inner_gc_sig = Signature::new(sess.pipeline.isa.default_call_conv());
1776 inner_gc_sig.params.push(AbiParam::new(types::I64));
1777 let inner_gc_sig_ref = inner_builder.import_signature(inner_gc_sig);
1778
1779 let inner_oom_func = {
1780 let mut sig = Signature::new(sess.pipeline.isa.default_call_conv());
1781 sig.returns.push(AbiParam::new(types::I64));
1782 let func_id = sess
1783 .pipeline
1784 .module
1785 .declare_function("runtime_oom", Linkage::Import, &sig)
1786 .map_err(|e| EmitError::CraneliftError(format!("declare runtime_oom: {e}")))?;
1787 sess.pipeline
1788 .module
1789 .declare_func_in_func(func_id, inner_builder.func)
1790 };
1791
1792 let mut inner_emit = EmitContext::new(self.prefix.clone());
1793 inner_emit.lambda_counter = self.lambda_counter;
1794 inner_emit
1795 .env
1796 .insert(lam_binder, SsaVal::HeapPtr(inner_arg));
1797
1798 for (i, var_id) in sorted_fvs.iter().enumerate() {
1800 let offset = CLOSURE_CAPTURED_OFFSET + 8 * i as i32;
1801 let val =
1802 inner_builder
1803 .ins()
1804 .load(types::I64, MemFlags::trusted(), inner_self, offset);
1805 inner_builder.declare_value_needs_stack_map(val);
1806 inner_emit.env.insert(*var_id, SsaVal::HeapPtr(val));
1807 }
1808
1809 let body_root = lam_body_tree.nodes.len() - 1;
1810 let mut inner_sess = EmitSession {
1811 pipeline: sess.pipeline,
1812 vmctx: inner_vmctx,
1813 gc_sig: inner_gc_sig_ref,
1814 oom_func: inner_oom_func,
1815 tree: &lam_body_tree,
1816 };
1817 let body_result = inner_emit.emit_node(
1818 &mut inner_sess,
1819 &mut inner_builder,
1820 body_root,
1821 TailCtx::Tail,
1822 )?;
1823 let ret_val = ensure_heap_ptr(
1824 &mut inner_builder,
1825 inner_vmctx,
1826 inner_gc_sig_ref,
1827 inner_oom_func,
1828 body_result,
1829 );
1830
1831 inner_builder.ins().return_(&[ret_val]);
1832 inner_builder.finalize();
1833
1834 self.lambda_counter = inner_emit.lambda_counter;
1835
1836 sess.pipeline
1837 .define_function(lambda_func_id, &mut inner_ctx)?;
1838
1839 let func_ref = sess
1840 .pipeline
1841 .module
1842 .declare_func_in_func(lambda_func_id, builder.func);
1843 let code_ptr = builder.ins().func_addr(types::I64, func_ref);
1844 builder.ins().store(
1845 MemFlags::trusted(),
1846 code_ptr,
1847 closure_ptr,
1848 CLOSURE_CODE_PTR_OFFSET,
1849 );
1850
1851 let null_val = builder.ins().iconst(types::I64, 0);
1853 for i in 0..sorted_fvs.len() {
1854 let offset = CLOSURE_CAPTURED_OFFSET + 8 * i as i32;
1855 builder
1856 .ins()
1857 .store(MemFlags::trusted(), null_val, closure_ptr, offset);
1858 }
1859
1860 for (i, var_id) in sorted_fvs.iter().enumerate() {
1862 let offset = CLOSURE_CAPTURED_OFFSET + 8 * i as i32;
1863 if let Some(ssaval) = self.env.get(var_id) {
1864 let cap_val =
1865 ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, *ssaval);
1866 builder
1867 .ins()
1868 .store(MemFlags::trusted(), cap_val, closure_ptr, offset);
1869 } else {
1870 pending_capture_updates
1871 .entry(*var_id)
1872 .or_default()
1873 .push(ClosureCaptureSlot {
1874 closure_ptr,
1875 offset,
1876 });
1877 }
1878 }
1879 }
1880
1881 let simple_binder_set: std::collections::HashSet<VarId> =
1883 deferred_simple.iter().map(|(b, _)| *b).collect();
1884 let mut deferred_cons: Vec<(VarId, cranelift_codegen::ir::Value, Vec<usize>)> =
1885 Vec::with_capacity(rec_bindings.len());
1886 for pa in &pre_allocs {
1887 if let PreAlloc::Con {
1888 binder,
1889 ptr,
1890 field_indices,
1891 } = pa
1892 {
1893 let needs_simple = field_indices.iter().any(|&f_idx| {
1894 matches!(&sess.tree.nodes[f_idx], CoreFrame::Var(v) if simple_binder_set.contains(v))
1895 });
1896 if needs_simple {
1897 deferred_cons.push((*binder, *ptr, field_indices.clone()));
1898 } else {
1899 for (i, &f_idx) in field_indices.iter().enumerate() {
1900 let field_val = if is_trivial_field(f_idx, sess.tree) {
1901 let val = emit_subtree(self, sess, builder, f_idx)?;
1902 ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, val)
1903 } else {
1904 let thunk_val = emit_thunk(self, sess, builder, f_idx)?;
1905 thunk_val.value()
1906 };
1907 builder.ins().store(
1908 MemFlags::trusted(),
1909 field_val,
1910 *ptr,
1911 CON_FIELDS_OFFSET + 8 * i as i32,
1912 );
1913 }
1914 }
1915 }
1916 }
1917
1918 let deferred_simple = {
1920 let deferred_set: std::collections::HashSet<VarId> =
1921 deferred_simple.iter().map(|(b, _)| *b).collect();
1922
1923 let mut direct_deps: std::collections::HashMap<VarId, Vec<VarId>> =
1924 std::collections::HashMap::with_capacity(bindings.len());
1925 for (binder, rhs_idx) in bindings {
1926 let fvs = tidepool_repr::free_vars::free_vars(&sess.tree.extract_subtree(*rhs_idx));
1927 direct_deps.insert(*binder, fvs.into_iter().collect());
1928 }
1929
1930 let mut reachable_deferred: std::collections::HashMap<
1931 VarId,
1932 std::collections::HashSet<VarId>,
1933 > = std::collections::HashMap::with_capacity(deferred_simple.len());
1934 for &(start_node, _) in &deferred_simple {
1935 let mut visited = std::collections::HashSet::new();
1936 let mut stack = vec![start_node];
1937 let mut reached = std::collections::HashSet::new();
1938
1939 while let Some(node) = stack.pop() {
1940 if !visited.insert(node) {
1941 continue;
1942 }
1943 if node != start_node && deferred_set.contains(&node) {
1944 reached.insert(node);
1945 }
1946 if let Some(neighbors) = direct_deps.get(&node) {
1947 for &next in neighbors {
1948 stack.push(next);
1949 }
1950 }
1951 }
1952 reachable_deferred.insert(start_node, reached);
1953 }
1954
1955 let mut sorted = Vec::with_capacity(deferred_simple.len());
1956 let mut remaining: Vec<(VarId, usize)> = deferred_simple;
1957 let mut progress = true;
1958 while !remaining.is_empty() && progress {
1959 progress = false;
1960 let mut next_remaining = Vec::with_capacity(remaining.len());
1961 for (binder, rhs_idx) in remaining {
1962 let blocked = reachable_deferred[&binder]
1963 .iter()
1964 .any(|fv| !sorted.iter().any(|(b, _): &(VarId, usize)| *b == *fv));
1965 if blocked {
1966 next_remaining.push((binder, rhs_idx));
1967 } else {
1968 sorted.push((binder, rhs_idx));
1969 progress = true;
1970 }
1971 }
1972 remaining = next_remaining;
1973 }
1974 sorted.extend(remaining);
1975 sorted
1976 };
1977
1978 let mut deferred_con_deps: Vec<DeferredConDep> = Vec::with_capacity(deferred_cons.len());
1980 for (_, ptr, field_indices) in &deferred_cons {
1981 let deps: std::collections::HashSet<VarId> = field_indices
1982 .iter()
1983 .filter_map(|&f_idx| {
1984 if let CoreFrame::Var(v) = &sess.tree.nodes[f_idx] {
1985 if simple_binder_set.contains(v) {
1986 return Some(*v);
1987 }
1988 }
1989 None
1990 })
1991 .collect();
1992 deferred_con_deps.push(DeferredConDep {
1993 ptr: *ptr,
1994 field_indices: field_indices.clone(),
1995 remaining_deps: deps,
1996 });
1997 }
1998
1999 let state_idx = self.push_letrec_state(LetRecDeferredState {
2001 pending_capture_updates,
2002 deferred_con_deps,
2003 });
2004
2005 work.push(EmitWork::LetRecFinish {
2007 body,
2008 state_idx,
2009 tail,
2010 });
2011
2012 for (binder, rhs_idx) in deferred_simple.iter().rev() {
2013 if Self::rhs_is_error_call(sess.tree, *rhs_idx) {
2014 let poison_addr = self.emit_error_poison(sess.tree, *rhs_idx);
2015 let poison_val = builder.ins().iconst(types::I64, poison_addr);
2016 self.trace_scope(&format!("defer error LetRec(deferred) {:?}", binder));
2017 self.env.insert(*binder, SsaVal::HeapPtr(poison_val));
2018 self.letrec_post_simple_step(sess, builder, binder, state_idx)?;
2023 } else {
2024 let refs_deferred_con = !self.letrec_state(state_idx).deferred_con_deps.is_empty()
2025 && self
2026 .letrec_state(state_idx)
2027 .deferred_con_deps
2028 .iter()
2029 .any(|d| d.remaining_deps.contains(binder));
2030 let can_thunkify = if refs_deferred_con {
2035 let body_tree = sess.tree.extract_subtree(*rhs_idx);
2036 let fvs = tidepool_repr::free_vars::free_vars(&body_tree);
2037 !fvs.iter().any(|v| {
2038 !self.env.contains_key(v) && deferred_simple.iter().any(|(b, _)| b == v)
2039 })
2040 } else {
2041 false
2042 };
2043 if can_thunkify {
2044 let thunk_val = emit_thunk(self, sess, builder, *rhs_idx)?;
2047 self.trace_scope(&format!("insert LetRec(simple) {:?}", binder));
2048 self.env.insert(*binder, thunk_val);
2049 self.letrec_post_simple_step(sess, builder, binder, state_idx)?;
2050 } else {
2051 work.push(EmitWork::LetRecPostSimple {
2053 binder: *binder,
2054 state_idx,
2055 });
2056 work.push(EmitWork::Eval(*rhs_idx, TailCtx::NonTail));
2057 }
2058 }
2059 }
2060
2061 Ok(())
2062 }
2063
2064 #[allow(clippy::too_many_arguments)]
2067 fn letrec_post_simple_step(
2068 &mut self,
2069 sess: &mut EmitSession,
2070 builder: &mut FunctionBuilder,
2071 binder: &VarId,
2072 state_idx: LetRecStateId,
2073 ) -> Result<(), EmitError> {
2074 let updates = self
2076 .letrec_state_mut(state_idx)
2077 .pending_capture_updates
2078 .remove(binder);
2079 if let Some(updates) = updates {
2080 if let Some(ssaval) = self.env.get(binder) {
2081 let cap_val =
2082 ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, *ssaval);
2083 for slot in updates {
2084 builder.ins().store(
2085 MemFlags::trusted(),
2086 cap_val,
2087 slot.closure_ptr,
2088 slot.offset,
2089 );
2090 }
2091 }
2092 }
2093
2094 let mut con_deps = std::mem::take(&mut self.letrec_state_mut(state_idx).deferred_con_deps);
2098 for dep in con_deps.iter_mut() {
2099 dep.remaining_deps.remove(binder);
2100 if dep.remaining_deps.is_empty() && !dep.field_indices.is_empty() {
2101 for (i, &f_idx) in dep.field_indices.iter().enumerate() {
2102 let field_val = if is_trivial_field(f_idx, sess.tree) {
2103 let val = emit_subtree(self, sess, builder, f_idx)?;
2104 ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, val)
2105 } else {
2106 let thunk_val = emit_thunk(self, sess, builder, f_idx)?;
2107 thunk_val.value()
2108 };
2109 builder.ins().store(
2110 MemFlags::trusted(),
2111 field_val,
2112 dep.ptr,
2113 CON_FIELDS_OFFSET + 8 * i as i32,
2114 );
2115 }
2116 dep.field_indices.clear();
2117 }
2118 }
2119 self.letrec_state_mut(state_idx).deferred_con_deps = con_deps;
2120
2121 Ok(())
2122 }
2123
2124 #[allow(clippy::too_many_arguments)]
2126 fn letrec_finish_phases(
2127 &mut self,
2128 sess: &mut EmitSession,
2129 builder: &mut FunctionBuilder,
2130 state_idx: LetRecStateId,
2131 ) -> Result<(), EmitError> {
2132 let pending = std::mem::take(&mut self.letrec_state_mut(state_idx).pending_capture_updates);
2134 for (var_id, updates) in pending {
2135 let ssaval = self.env.get(&var_id).ok_or_else(|| {
2136 EmitError::MissingCaptureVar(
2137 var_id,
2138 "LetRec Phase 3a' capture fill: not in env after Phase 3c".into(),
2139 )
2140 })?;
2141 let cap_val = ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, *ssaval);
2142 for slot in updates {
2143 builder
2144 .ins()
2145 .store(MemFlags::trusted(), cap_val, slot.closure_ptr, slot.offset);
2146 }
2147 }
2148
2149 let con_deps = std::mem::take(&mut self.letrec_state_mut(state_idx).deferred_con_deps);
2151 for dep in &con_deps {
2152 for (i, &f_idx) in dep.field_indices.iter().enumerate() {
2153 let field_val = if is_trivial_field(f_idx, sess.tree) {
2154 let val = emit_subtree(self, sess, builder, f_idx)?;
2155 ensure_heap_ptr(builder, sess.vmctx, sess.gc_sig, sess.oom_func, val)
2156 } else {
2157 let thunk_val = emit_thunk(self, sess, builder, f_idx)?;
2158 thunk_val.value()
2159 };
2160 builder.ins().store(
2161 MemFlags::trusted(),
2162 field_val,
2163 dep.ptr,
2164 CON_FIELDS_OFFSET + 8 * i as i32,
2165 );
2166 }
2167 }
2168
2169 Ok(())
2170 }
2171
2172 fn push_letrec_state(&mut self, state: LetRecDeferredState) -> LetRecStateId {
2173 let idx = self.letrec_states.len();
2174 self.letrec_states.push(state);
2175 LetRecStateId(idx)
2176 }
2177
2178 fn letrec_state(&self, id: LetRecStateId) -> &LetRecDeferredState {
2179 &self.letrec_states[id.0]
2180 }
2181
2182 fn letrec_state_mut(&mut self, id: LetRecStateId) -> &mut LetRecDeferredState {
2183 &mut self.letrec_states[id.0]
2184 }
2185}
2186
2187#[derive(Debug, Clone, Copy, PartialEq, Eq)]
2188pub(crate) struct LetRecStateId(usize);
2189
2190enum EmitWork {
2193 Eval(usize, TailCtx),
2195 Bind(VarId),
2197 LetRecPostSimple {
2199 binder: VarId,
2200 state_idx: LetRecStateId,
2201 },
2202 LetRecFinish {
2204 body: usize,
2205 state_idx: LetRecStateId,
2206 tail: TailCtx,
2207 },
2208 LetCleanupMark(LetCleanup),
2210}
2211
2212pub(crate) struct LetRecDeferredState {
2215 pending_capture_updates: std::collections::HashMap<VarId, Vec<ClosureCaptureSlot>>,
2216 deferred_con_deps: Vec<DeferredConDep>,
2217}
2218
2219pub(crate) struct ClosureCaptureSlot {
2220 pub closure_ptr: cranelift_codegen::ir::Value,
2221 pub offset: i32,
2222}
2223
2224struct DeferredConDep {
2227 ptr: cranelift_codegen::ir::Value,
2228 field_indices: Vec<usize>,
2230 remaining_deps: std::collections::HashSet<VarId>,
2232}
2233
2234enum LetCleanup {
2235 Single(VarId, Option<SsaVal>),
2236 Rec(EnvScope),
2237}
2238
2239fn emit_lit(
2240 builder: &mut FunctionBuilder,
2241 vmctx: Value,
2242 gc_sig: ir::SigRef,
2243 oom_func: ir::FuncRef,
2244 lit: &Literal,
2245) -> Result<SsaVal, EmitError> {
2246 let ptr = emit_alloc_fast_path(builder, vmctx, LIT_TOTAL_SIZE, gc_sig, oom_func);
2247
2248 let tag = builder.ins().iconst(types::I8, layout::TAG_LIT as i64);
2249 builder.ins().store(MemFlags::trusted(), tag, ptr, 0);
2250 let size = builder.ins().iconst(types::I16, LIT_TOTAL_SIZE as i64);
2251 builder.ins().store(MemFlags::trusted(), size, ptr, 1);
2252
2253 match lit {
2254 Literal::LitInt(n) => {
2255 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_INT);
2256 builder
2257 .ins()
2258 .store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
2259 let val = builder.ins().iconst(types::I64, *n);
2260 builder
2261 .ins()
2262 .store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
2263 builder.declare_value_needs_stack_map(ptr);
2264 Ok(SsaVal::HeapPtr(ptr))
2265 }
2266 Literal::LitWord(n) => {
2267 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_WORD);
2268 builder
2269 .ins()
2270 .store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
2271 let val = builder.ins().iconst(types::I64, *n as i64);
2272 builder
2273 .ins()
2274 .store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
2275 builder.declare_value_needs_stack_map(ptr);
2276 Ok(SsaVal::HeapPtr(ptr))
2277 }
2278 Literal::LitChar(c) => {
2279 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_CHAR);
2280 builder
2281 .ins()
2282 .store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
2283 let val = builder.ins().iconst(types::I64, *c as i64);
2284 builder
2285 .ins()
2286 .store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
2287 builder.declare_value_needs_stack_map(ptr);
2288 Ok(SsaVal::HeapPtr(ptr))
2289 }
2290 Literal::LitFloat(bits) => {
2291 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_FLOAT);
2292 builder
2293 .ins()
2294 .store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
2295 let val = builder.ins().iconst(types::I64, *bits as i64);
2296 builder
2297 .ins()
2298 .store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
2299 builder.declare_value_needs_stack_map(ptr);
2300 Ok(SsaVal::HeapPtr(ptr))
2301 }
2302 Literal::LitDouble(bits) => {
2303 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_DOUBLE);
2304 builder
2305 .ins()
2306 .store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
2307 let val = builder.ins().iconst(types::I64, *bits as i64);
2308 builder
2309 .ins()
2310 .store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
2311 builder.declare_value_needs_stack_map(ptr);
2312 Ok(SsaVal::HeapPtr(ptr))
2313 }
2314 Literal::LitString(_) => Err(EmitError::NotYetImplemented("LitString".into())),
2315 }
2316}
2317
2318fn emit_lit_string(
2323 pipeline: &mut CodegenPipeline,
2324 builder: &mut FunctionBuilder,
2325 vmctx: Value,
2326 gc_sig: ir::SigRef,
2327 oom_func: ir::FuncRef,
2328 bytes: &[u8],
2329 counter: &mut u32,
2330) -> Result<SsaVal, EmitError> {
2331 let data_name = format!("__litstr_{}", *counter);
2333 *counter += 1;
2334
2335 let data_id = pipeline
2336 .module
2337 .declare_data(&data_name, Linkage::Local, false, false)
2338 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
2339
2340 let mut data_desc = DataDescription::new();
2341 data_desc.set_align(8); let mut contents = Vec::with_capacity(8 + bytes.len() + 1);
2343 contents.extend_from_slice(&(bytes.len() as u64).to_le_bytes());
2344 contents.extend_from_slice(bytes);
2345 contents.push(0); data_desc.define(contents.into_boxed_slice());
2347
2348 pipeline
2349 .module
2350 .define_data(data_id, &data_desc)
2351 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
2352
2353 let local_data = pipeline.module.declare_data_in_func(data_id, builder.func);
2355 let data_ptr = builder.ins().symbol_value(types::I64, local_data);
2356
2357 let ptr = emit_alloc_fast_path(builder, vmctx, LIT_TOTAL_SIZE, gc_sig, oom_func);
2359
2360 let tag = builder.ins().iconst(types::I8, layout::TAG_LIT as i64);
2361 builder.ins().store(MemFlags::trusted(), tag, ptr, 0);
2362 let size = builder.ins().iconst(types::I16, LIT_TOTAL_SIZE as i64);
2363 builder.ins().store(MemFlags::trusted(), size, ptr, 1);
2364 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_STRING);
2365 builder
2366 .ins()
2367 .store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
2368 builder
2369 .ins()
2370 .store(MemFlags::trusted(), data_ptr, ptr, LIT_VALUE_OFFSET);
2371
2372 builder.declare_value_needs_stack_map(ptr);
2373 Ok(SsaVal::HeapPtr(ptr))
2374}
2375
2376pub(crate) fn force_thunk_ssaval(
2380 pipeline: &mut CodegenPipeline,
2381 builder: &mut FunctionBuilder,
2382 vmctx: Value,
2383 val: SsaVal,
2384) -> Result<SsaVal, EmitError> {
2385 match val {
2386 SsaVal::Raw(_, _) => Ok(val),
2387 SsaVal::HeapPtr(ptr) => {
2388 let tag = builder.ins().load(types::I8, MemFlags::trusted(), ptr, 0);
2389 let is_thunk = builder
2390 .ins()
2391 .icmp_imm(IntCC::Equal, tag, layout::TAG_THUNK as i64);
2392
2393 let force_block = builder.create_block();
2394 let ready_block = builder.create_block();
2395 builder.append_block_param(ready_block, types::I64);
2396
2397 builder.ins().brif(
2398 is_thunk,
2399 force_block,
2400 &[],
2401 ready_block,
2402 &[BlockArg::Value(ptr)],
2403 );
2404
2405 builder.switch_to_block(force_block);
2406 builder.seal_block(force_block);
2407
2408 let force_fn = pipeline
2409 .module
2410 .declare_function("heap_force", Linkage::Import, &{
2411 let mut sig = Signature::new(pipeline.isa.default_call_conv());
2412 sig.params.push(AbiParam::new(types::I64)); sig.params.push(AbiParam::new(types::I64)); sig.returns.push(AbiParam::new(types::I64)); sig
2416 })
2417 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
2418 let force_ref = pipeline.module.declare_func_in_func(force_fn, builder.func);
2419 let call = builder.ins().call(force_ref, &[vmctx, ptr]);
2420 let forced = builder.inst_results(call)[0];
2421 builder.declare_value_needs_stack_map(forced);
2422 builder.ins().jump(ready_block, &[BlockArg::Value(forced)]);
2423
2424 builder.switch_to_block(ready_block);
2425 builder.seal_block(ready_block);
2426 let result = builder.block_params(ready_block)[0];
2427 builder.declare_value_needs_stack_map(result);
2428 Ok(SsaVal::HeapPtr(result))
2429 }
2430 }
2431}
2432
2433pub(crate) fn ensure_heap_ptr(
2434 builder: &mut FunctionBuilder,
2435 vmctx: Value,
2436 gc_sig: ir::SigRef,
2437 oom_func: ir::FuncRef,
2438 val: SsaVal,
2439) -> Value {
2440 match val {
2441 SsaVal::HeapPtr(v) => v,
2442 SsaVal::Raw(v, lit_tag) => {
2443 let ptr = emit_alloc_fast_path(builder, vmctx, LIT_TOTAL_SIZE, gc_sig, oom_func);
2444 let tag = builder.ins().iconst(types::I8, layout::TAG_LIT as i64);
2445 builder.ins().store(MemFlags::trusted(), tag, ptr, 0);
2446 let size = builder.ins().iconst(types::I16, LIT_TOTAL_SIZE as i64);
2447 builder.ins().store(MemFlags::trusted(), size, ptr, 1);
2448 let lit_tag_val = builder.ins().iconst(types::I8, lit_tag);
2449 builder
2450 .ins()
2451 .store(MemFlags::trusted(), lit_tag_val, ptr, LIT_TAG_OFFSET);
2452 builder
2453 .ins()
2454 .store(MemFlags::trusted(), v, ptr, LIT_VALUE_OFFSET);
2455 builder.declare_value_needs_stack_map(ptr);
2456 ptr
2457 }
2458 }
2459}