1use crate::pipeline::CodegenPipeline;
2use crate::alloc::emit_alloc_fast_path;
3use crate::emit::*;
4use tidepool_repr::*;
5use tidepool_heap::layout;
6use cranelift_codegen::ir::{self, types, AbiParam, InstBuilder, MemFlags, Value, Signature, UserFuncName};
7use cranelift_codegen::Context;
8use cranelift_frontend::{FunctionBuilder, FunctionBuilderContext};
9use cranelift_module::{Module, Linkage, FuncId, DataDescription};
10
11pub fn compile_expr(
15 pipeline: &mut CodegenPipeline,
16 tree: &CoreExpr,
17 name: &str,
18) -> Result<FuncId, EmitError> {
19 let sig = pipeline.make_func_signature();
20 let func_id = pipeline.module.declare_function(name, Linkage::Export, &sig)
21 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
22
23 let mut ctx = Context::new();
24 ctx.func.signature = sig;
25 ctx.func.name = UserFuncName::default();
26
27 let mut fb_ctx = FunctionBuilderContext::new();
28 let mut builder = FunctionBuilder::new(&mut ctx.func, &mut fb_ctx);
29
30 let entry_block = builder.create_block();
31 builder.append_block_params_for_function_params(entry_block);
32 builder.switch_to_block(entry_block);
33 builder.seal_block(entry_block);
34
35 let vmctx = builder.block_params(entry_block)[0];
36
37 let mut gc_sig = Signature::new(pipeline.isa.default_call_conv());
38 gc_sig.params.push(AbiParam::new(types::I64));
39 let gc_sig_ref = builder.import_signature(gc_sig);
40
41 let mut emit_ctx = EmitContext::new(name.to_string());
42
43 let result = emit_ctx.emit_node(pipeline, &mut builder, vmctx, gc_sig_ref, tree, tree.nodes.len() - 1)?;
44 let ret = ensure_heap_ptr(&mut builder, vmctx, gc_sig_ref, result);
45
46 builder.ins().return_(&[ret]);
47 builder.finalize();
48
49 pipeline.define_function(func_id, &mut ctx);
50
51 Ok(func_id)
52}
53
54
55impl EmitContext {
56 pub fn emit_node(
57 &mut self,
58 pipeline: &mut CodegenPipeline,
59 builder: &mut FunctionBuilder,
60 vmctx: Value,
61 gc_sig: ir::SigRef,
62 tree: &CoreExpr,
63 mut idx: usize,
64 ) -> Result<SsaVal, EmitError> {
65 let mut let_cleanup: Vec<LetCleanup> = Vec::new();
68 let result = loop {
69 match &tree.nodes[idx] {
70 CoreFrame::Lit(Literal::LitString(bytes)) => {
71 break emit_lit_string(pipeline, builder, vmctx, gc_sig, bytes, &mut self.lambda_counter);
72 }
73 CoreFrame::Lit(lit) => break emit_lit(builder, vmctx, gc_sig, lit),
74 CoreFrame::Var(vid) => {
75 break match self.env.get(vid).copied() {
76 Some(v) => Ok(v),
77 None => {
78 let tag = (vid.0 >> 56) as u8;
80 if tag == 0x45 {
81 let kind = vid.0 & 0xFF;
83 let err_fn = pipeline.module.declare_function(
84 "runtime_error",
85 Linkage::Import,
86 &{
87 let mut sig = Signature::new(pipeline.isa.default_call_conv());
88 sig.params.push(AbiParam::new(types::I64)); sig.returns.push(AbiParam::new(types::I64));
90 sig
91 },
92 ).map_err(|e| EmitError::CraneliftError(e.to_string()))?;
93 let err_ref = pipeline.module.declare_func_in_func(err_fn, builder.func);
94 let kind_val = builder.ins().iconst(types::I64, kind as i64);
95 let inst = builder.ins().call(err_ref, &[kind_val]);
96 let result = builder.inst_results(inst)[0];
97 return Ok(SsaVal::HeapPtr(result));
98 }
99
100 eprintln!("[codegen] WARNING: unresolved var {:?} in {} (lambda_counter={})", vid, self.prefix, self.lambda_counter);
102 let trap_fn = pipeline.module.declare_function(
103 "unresolved_var_trap",
104 Linkage::Import,
105 &{
106 let mut sig = Signature::new(pipeline.isa.default_call_conv());
107 sig.params.push(AbiParam::new(types::I64)); sig.returns.push(AbiParam::new(types::I64));
109 sig
110 },
111 ).map_err(|e| EmitError::CraneliftError(e.to_string()))?;
112 let trap_ref = pipeline.module.declare_func_in_func(trap_fn, builder.func);
113 let var_id_val = builder.ins().iconst(types::I64, vid.0 as i64);
114 let inst = builder.ins().call(trap_ref, &[var_id_val]);
115 let result = builder.inst_results(inst)[0];
116 Ok(SsaVal::HeapPtr(result))
117 }
118 };
119 }
120 CoreFrame::Con { tag, fields } => {
121 let mut field_vals = Vec::new();
122 for &f_idx in fields {
123 let val = self.emit_node(pipeline, builder, vmctx, gc_sig, tree, f_idx)?;
124 field_vals.push(ensure_heap_ptr(builder, vmctx, gc_sig, val));
125 }
126
127 let num_fields = field_vals.len();
128 let size = 24 + 8 * num_fields as u64;
129 let ptr = emit_alloc_fast_path(builder, vmctx, size, gc_sig);
130
131 let tag_val = builder.ins().iconst(types::I8, layout::TAG_CON as i64);
132 builder.ins().store(MemFlags::trusted(), tag_val, ptr, 0);
133 let size_val = builder.ins().iconst(types::I16, size as i64);
134 builder.ins().store(MemFlags::trusted(), size_val, ptr, 1);
135
136 let con_tag_val = builder.ins().iconst(types::I64, tag.0 as i64);
137 builder.ins().store(MemFlags::trusted(), con_tag_val, ptr, CON_TAG_OFFSET);
138 let num_fields_val = builder.ins().iconst(types::I16, num_fields as i64);
139 builder.ins().store(MemFlags::trusted(), num_fields_val, ptr, CON_NUM_FIELDS_OFFSET);
140
141 for (i, field_val) in field_vals.into_iter().enumerate() {
142 builder.ins().store(MemFlags::trusted(), field_val, ptr, CON_FIELDS_START + 8 * i as i32);
143 }
144
145 builder.declare_value_needs_stack_map(ptr);
146 break Ok(SsaVal::HeapPtr(ptr));
147 }
148 CoreFrame::PrimOp { op, args } => {
149 let mut arg_vals = Vec::new();
150 for &a_idx in args {
151 arg_vals.push(self.emit_node(pipeline, builder, vmctx, gc_sig, tree, a_idx)?);
152 }
153 break primop::emit_primop(builder, op, &arg_vals);
154 }
155 CoreFrame::App { fun, arg } => {
156 let fun_val = self.emit_node(pipeline, builder, vmctx, gc_sig, tree, *fun)?;
157 let arg_val = self.emit_node(pipeline, builder, vmctx, gc_sig, tree, *arg)?;
158 let fun_ptr = fun_val.value();
159 let arg_ptr = ensure_heap_ptr(builder, vmctx, gc_sig, arg_val);
160
161 let code_ptr = builder.ins().load(types::I64, MemFlags::trusted(), fun_ptr, CLOSURE_CODE_PTR_OFFSET);
162
163 let mut sig = Signature::new(pipeline.isa.default_call_conv());
164 sig.params.push(AbiParam::new(types::I64)); sig.params.push(AbiParam::new(types::I64)); sig.params.push(AbiParam::new(types::I64)); sig.returns.push(AbiParam::new(types::I64));
168 let call_sig = builder.import_signature(sig);
169
170 let inst = builder.ins().call_indirect(call_sig, code_ptr, &[vmctx, fun_ptr, arg_ptr]);
171 let ret_val = builder.inst_results(inst)[0];
172 builder.declare_value_needs_stack_map(ret_val);
173 break Ok(SsaVal::HeapPtr(ret_val));
174 }
175 CoreFrame::Lam { binder, body } => {
176 let body_tree = tree.extract_subtree(*body);
177 let mut fvs = tidepool_repr::free_vars::free_vars(&body_tree);
178 fvs.remove(binder);
179
180 let mut sorted_fvs: Vec<VarId> = fvs.into_iter().filter(|v| self.env.contains_key(v)).collect();
181 sorted_fvs.sort_by_key(|v| v.0);
182
183 let captures: Vec<(VarId, SsaVal)> = sorted_fvs.iter().map(|v| (*v, self.env[v])).collect();
184
185 let lambda_name = self.next_lambda_name();
186 let mut closure_sig = Signature::new(pipeline.isa.default_call_conv());
187 closure_sig.params.push(AbiParam::new(types::I64)); closure_sig.params.push(AbiParam::new(types::I64)); closure_sig.params.push(AbiParam::new(types::I64)); closure_sig.returns.push(AbiParam::new(types::I64));
191
192 let lambda_func_id = pipeline.module.declare_function(&lambda_name, Linkage::Local, &closure_sig)
193 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
194 pipeline.register_lambda(lambda_func_id, lambda_name.clone());
195
196 let mut inner_ctx = Context::new();
197 inner_ctx.func.signature = closure_sig;
198 inner_ctx.func.name = UserFuncName::default();
199
200 let mut inner_fb_ctx = FunctionBuilderContext::new();
201 let mut inner_builder = FunctionBuilder::new(&mut inner_ctx.func, &mut inner_fb_ctx);
202 let inner_block = inner_builder.create_block();
203 inner_builder.append_block_params_for_function_params(inner_block);
204 inner_builder.switch_to_block(inner_block);
205 inner_builder.seal_block(inner_block);
206
207 let inner_vmctx = inner_builder.block_params(inner_block)[0];
208 let closure_self = inner_builder.block_params(inner_block)[1];
209 let arg_param = inner_builder.block_params(inner_block)[2];
210
211 inner_builder.declare_value_needs_stack_map(closure_self);
212 inner_builder.declare_value_needs_stack_map(arg_param);
213
214 let mut inner_gc_sig = Signature::new(pipeline.isa.default_call_conv());
215 inner_gc_sig.params.push(AbiParam::new(types::I64));
216 let inner_gc_sig_ref = inner_builder.import_signature(inner_gc_sig);
217
218 let mut inner_emit = EmitContext::new(self.prefix.clone());
219 inner_emit.lambda_counter = self.lambda_counter;
220
221 inner_emit.env.insert(*binder, SsaVal::HeapPtr(arg_param));
222
223 for (i, (var_id, _)) in captures.iter().enumerate() {
224 let offset = CLOSURE_CAPTURED_START + 8 * i as i32;
225 let val = inner_builder.ins().load(types::I64, MemFlags::trusted(), closure_self, offset);
226 inner_builder.declare_value_needs_stack_map(val);
227 inner_emit.env.insert(*var_id, SsaVal::HeapPtr(val));
228 }
229
230 let body_root = body_tree.nodes.len() - 1;
231 let body_result = inner_emit.emit_node(pipeline, &mut inner_builder, inner_vmctx, inner_gc_sig_ref, &body_tree, body_root)?;
232 let ret_val = ensure_heap_ptr(&mut inner_builder, inner_vmctx, inner_gc_sig_ref, body_result);
233
234 inner_builder.ins().return_(&[ret_val]);
235 inner_builder.finalize();
236
237 self.lambda_counter = inner_emit.lambda_counter;
238 pipeline.define_function(lambda_func_id, &mut inner_ctx);
239
240 let func_ref = pipeline.module.declare_func_in_func(lambda_func_id, builder.func);
241 let code_ptr = builder.ins().func_addr(types::I64, func_ref);
242
243 let num_captures = captures.len();
244 let closure_size = 24 + 8 * num_captures as u64;
245 let closure_ptr = emit_alloc_fast_path(builder, vmctx, closure_size, gc_sig);
246
247 let tag_val = builder.ins().iconst(types::I8, layout::TAG_CLOSURE as i64);
248 builder.ins().store(MemFlags::trusted(), tag_val, closure_ptr, 0);
249 let size_val = builder.ins().iconst(types::I16, closure_size as i64);
250 builder.ins().store(MemFlags::trusted(), size_val, closure_ptr, 1);
251
252 builder.ins().store(MemFlags::trusted(), code_ptr, closure_ptr, CLOSURE_CODE_PTR_OFFSET);
253 let num_cap_val = builder.ins().iconst(types::I16, num_captures as i64);
254 builder.ins().store(MemFlags::trusted(), num_cap_val, closure_ptr, CLOSURE_NUM_CAPTURED_OFFSET);
255
256 for (i, (_, ssaval)) in captures.iter().enumerate() {
257 let cap_val = ensure_heap_ptr(builder, vmctx, gc_sig, *ssaval);
258 let offset = CLOSURE_CAPTURED_START + 8 * i as i32;
259 builder.ins().store(MemFlags::trusted(), cap_val, closure_ptr, offset);
260 }
261
262 builder.declare_value_needs_stack_map(closure_ptr);
263 break Ok(SsaVal::HeapPtr(closure_ptr));
264 }
265 CoreFrame::LetNonRec { binder, rhs, body } => {
266 let body_fvs = tidepool_repr::free_vars::free_vars(&tree.extract_subtree(*body));
268 let known_bad = [8214565720323787988u64, 8214565720323787989, 8214565720323787990, 8214565720323784990, 3458764513820540932];
270 if known_bad.contains(&binder.0) {
271 eprintln!("[dce] {:?} in_fvs={}", binder, body_fvs.contains(binder));
272 }
273 if body_fvs.contains(binder) {
274 let rhs_val = self.emit_node(pipeline, builder, vmctx, gc_sig, tree, *rhs)?;
275 self.env.insert(*binder, rhs_val);
276 let_cleanup.push(LetCleanup::Single(*binder));
277 } else {
278 }
280 idx = *body;
281 continue;
282 }
283 CoreFrame::LetRec { bindings, body } => {
284 let (rec_bindings, simple_bindings): (Vec<_>, Vec<_>) = bindings.iter().partition(|(_, rhs_idx)| {
287 matches!(&tree.nodes[*rhs_idx], CoreFrame::Lam { .. } | CoreFrame::Con { .. })
288 });
289
290 for (binder, rhs_idx) in &simple_bindings {
292 let rhs_val = self.emit_node(pipeline, builder, vmctx, gc_sig, tree, *rhs_idx)?;
293 self.env.insert(*binder, rhs_val);
294 }
295
296 if rec_bindings.is_empty() {
298 let_cleanup.push(LetCleanup::Rec(bindings.iter().map(|(b, _)| *b).collect()));
299 idx = *body;
300 continue;
301 }
302
303 enum PreAlloc {
305 Lam { binder: VarId, ptr: cranelift_codegen::ir::Value, fvs: Vec<VarId>, rhs_idx: usize },
306 Con { binder: VarId, ptr: cranelift_codegen::ir::Value, field_indices: Vec<usize> },
307 }
308 let mut pre_allocs = Vec::new();
309
310 for (binder, rhs_idx) in &rec_bindings {
311 match &tree.nodes[*rhs_idx] {
312 CoreFrame::Lam { binder: lam_binder, body: lam_body } => {
313 let lam_body_tree = tree.extract_subtree(*lam_body);
314 let mut fvs = tidepool_repr::free_vars::free_vars(&lam_body_tree);
315 fvs.remove(lam_binder);
316 let mut sorted_fvs: Vec<VarId> = fvs.into_iter().filter(|v| {
317 self.env.contains_key(v) || rec_bindings.iter().any(|(b, _)| b == v)
318 }).collect();
319 sorted_fvs.sort_by_key(|v| v.0);
320
321 let num_captures = sorted_fvs.len();
322 let closure_size = 24 + 8 * num_captures as u64;
323 let closure_ptr = emit_alloc_fast_path(builder, vmctx, closure_size, gc_sig);
324
325 let tag_val = builder.ins().iconst(types::I8, layout::TAG_CLOSURE as i64);
326 builder.ins().store(MemFlags::trusted(), tag_val, closure_ptr, 0);
327 let size_val = builder.ins().iconst(types::I16, closure_size as i64);
328 builder.ins().store(MemFlags::trusted(), size_val, closure_ptr, 1);
329 let num_cap_val = builder.ins().iconst(types::I16, num_captures as i64);
330 builder.ins().store(MemFlags::trusted(), num_cap_val, closure_ptr, CLOSURE_NUM_CAPTURED_OFFSET);
331
332 builder.declare_value_needs_stack_map(closure_ptr);
333 pre_allocs.push(PreAlloc::Lam { binder: *binder, ptr: closure_ptr, fvs: sorted_fvs, rhs_idx: *rhs_idx });
334 }
335 CoreFrame::Con { tag, fields } => {
336 let num_fields = fields.len();
337 let size = 24 + 8 * num_fields as u64;
338 let ptr = emit_alloc_fast_path(builder, vmctx, size, gc_sig);
339
340 let tag_val = builder.ins().iconst(types::I8, layout::TAG_CON as i64);
341 builder.ins().store(MemFlags::trusted(), tag_val, ptr, 0);
342 let size_val = builder.ins().iconst(types::I16, size as i64);
343 builder.ins().store(MemFlags::trusted(), size_val, ptr, 1);
344 let con_tag_val = builder.ins().iconst(types::I64, tag.0 as i64);
345 builder.ins().store(MemFlags::trusted(), con_tag_val, ptr, CON_TAG_OFFSET);
346 let num_fields_val = builder.ins().iconst(types::I16, num_fields as i64);
347 builder.ins().store(MemFlags::trusted(), num_fields_val, ptr, CON_NUM_FIELDS_OFFSET);
348
349 builder.declare_value_needs_stack_map(ptr);
350 pre_allocs.push(PreAlloc::Con { binder: *binder, ptr, field_indices: fields.clone() });
351 }
352 _ => unreachable!(),
353 }
354 }
355
356 for pa in &pre_allocs {
358 let (binder, ptr) = match pa {
359 PreAlloc::Lam { binder, ptr, .. } => (*binder, *ptr),
360 PreAlloc::Con { binder, ptr, .. } => (*binder, *ptr),
361 };
362 self.env.insert(binder, SsaVal::HeapPtr(ptr));
363 }
364
365 for pa in &pre_allocs {
367 if let PreAlloc::Con { ptr, field_indices, .. } = pa {
368 for (i, &f_idx) in field_indices.iter().enumerate() {
369 let val = self.emit_node(pipeline, builder, vmctx, gc_sig, tree, f_idx)?;
370 let field_val = ensure_heap_ptr(builder, vmctx, gc_sig, val);
371 builder.ins().store(MemFlags::trusted(), field_val, *ptr, CON_FIELDS_START + 8 * i as i32);
372 }
373 }
374 }
375
376 for pa in pre_allocs {
378 let (closure_ptr, sorted_fvs, rhs_idx) = match pa {
379 PreAlloc::Lam { ptr, fvs, rhs_idx, .. } => (ptr, fvs, rhs_idx),
380 PreAlloc::Con { .. } => continue,
381 };
382 let (lam_binder, lam_body) = match &tree.nodes[rhs_idx] {
383 CoreFrame::Lam { binder, body } => (*binder, *body),
384 _ => unreachable!(),
385 };
386 let lam_body_tree = tree.extract_subtree(lam_body);
387
388 let captures: Vec<(VarId, SsaVal)> = sorted_fvs.iter().map(|v| (*v, self.env[v])).collect();
389
390 let lambda_name = self.next_lambda_name();
391 let mut closure_sig = Signature::new(pipeline.isa.default_call_conv());
392 closure_sig.params.push(AbiParam::new(types::I64));
393 closure_sig.params.push(AbiParam::new(types::I64));
394 closure_sig.params.push(AbiParam::new(types::I64));
395 closure_sig.returns.push(AbiParam::new(types::I64));
396
397 let lambda_func_id = pipeline.module.declare_function(&lambda_name, Linkage::Local, &closure_sig)
398 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
399 pipeline.register_lambda(lambda_func_id, lambda_name.clone());
400
401 let mut inner_ctx = Context::new();
402 inner_ctx.func.signature = closure_sig;
403 inner_ctx.func.name = UserFuncName::default();
404
405 let mut inner_fb_ctx = FunctionBuilderContext::new();
406 let mut inner_builder = FunctionBuilder::new(&mut inner_ctx.func, &mut inner_fb_ctx);
407 let inner_block = inner_builder.create_block();
408 inner_builder.append_block_params_for_function_params(inner_block);
409 inner_builder.switch_to_block(inner_block);
410 inner_builder.seal_block(inner_block);
411
412 let inner_vmctx = inner_builder.block_params(inner_block)[0];
413 let inner_self = inner_builder.block_params(inner_block)[1];
414 let inner_arg = inner_builder.block_params(inner_block)[2];
415
416 inner_builder.declare_value_needs_stack_map(inner_self);
417 inner_builder.declare_value_needs_stack_map(inner_arg);
418
419 let mut inner_gc_sig = Signature::new(pipeline.isa.default_call_conv());
420 inner_gc_sig.params.push(AbiParam::new(types::I64));
421 let inner_gc_sig_ref = inner_builder.import_signature(inner_gc_sig);
422
423 let mut inner_emit = EmitContext::new(self.prefix.clone());
424 inner_emit.lambda_counter = self.lambda_counter;
425 inner_emit.env.insert(lam_binder, SsaVal::HeapPtr(inner_arg));
426
427 for (i, (var_id, _)) in captures.iter().enumerate() {
428 let offset = CLOSURE_CAPTURED_START + 8 * i as i32;
429 let val = inner_builder.ins().load(types::I64, MemFlags::trusted(), inner_self, offset);
430 inner_builder.declare_value_needs_stack_map(val);
431 inner_emit.env.insert(*var_id, SsaVal::HeapPtr(val));
432 }
433
434 let body_root = lam_body_tree.nodes.len() - 1;
435 let body_result = inner_emit.emit_node(pipeline, &mut inner_builder, inner_vmctx, inner_gc_sig_ref, &lam_body_tree, body_root)?;
436 let ret_val = ensure_heap_ptr(&mut inner_builder, inner_vmctx, inner_gc_sig_ref, body_result);
437
438 inner_builder.ins().return_(&[ret_val]);
439 inner_builder.finalize();
440 self.lambda_counter = inner_emit.lambda_counter;
441 pipeline.define_function(lambda_func_id, &mut inner_ctx);
442
443 let func_ref = pipeline.module.declare_func_in_func(lambda_func_id, builder.func);
444 let code_ptr = builder.ins().func_addr(types::I64, func_ref);
445 builder.ins().store(MemFlags::trusted(), code_ptr, closure_ptr, CLOSURE_CODE_PTR_OFFSET);
446
447 for (i, (_, ssaval)) in captures.into_iter().enumerate() {
448 let cap_val = ensure_heap_ptr(builder, vmctx, gc_sig, ssaval);
449 let offset = CLOSURE_CAPTURED_START + 8 * i as i32;
450 builder.ins().store(MemFlags::trusted(), cap_val, closure_ptr, offset);
451 }
452 }
453
454 let_cleanup.push(LetCleanup::Rec(bindings.iter().map(|(b, _)| *b).collect()));
455 idx = *body;
456 continue;
457 }
458 CoreFrame::Case { scrutinee, binder, alts } => {
459 break crate::emit::case::emit_case(self, pipeline, builder, vmctx, gc_sig, tree, *scrutinee, binder, alts);
460 }
461 CoreFrame::Join { label, params, rhs, body } => {
462 break crate::emit::join::emit_join(self, pipeline, builder, vmctx, gc_sig, tree, label, params, *rhs, *body);
463 }
464 CoreFrame::Jump { label, args } => {
465 break crate::emit::join::emit_jump(self, pipeline, builder, vmctx, gc_sig, tree, label, args);
466 }
467 }
468 }; for cleanup in let_cleanup.into_iter().rev() {
471 match cleanup {
472 LetCleanup::Single(var) => { self.env.remove(&var); }
473 LetCleanup::Rec(vars) => { for var in vars { self.env.remove(&var); } }
474 }
475 }
476 result
477 }
478}
479
480enum LetCleanup {
481 Single(VarId),
482 Rec(Vec<VarId>),
483}
484
485fn emit_lit(
486 builder: &mut FunctionBuilder,
487 vmctx: Value,
488 gc_sig: ir::SigRef,
489 lit: &Literal,
490) -> Result<SsaVal, EmitError> {
491 let ptr = emit_alloc_fast_path(builder, vmctx, LIT_TOTAL_SIZE, gc_sig);
492
493 let tag = builder.ins().iconst(types::I8, layout::TAG_LIT as i64);
494 builder.ins().store(MemFlags::trusted(), tag, ptr, 0);
495 let size = builder.ins().iconst(types::I16, LIT_TOTAL_SIZE as i64);
496 builder.ins().store(MemFlags::trusted(), size, ptr, 1);
497
498 match lit {
499 Literal::LitInt(n) => {
500 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_INT);
501 builder.ins().store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
502 let val = builder.ins().iconst(types::I64, *n);
503 builder.ins().store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
504 builder.declare_value_needs_stack_map(ptr);
505 Ok(SsaVal::HeapPtr(ptr))
506 }
507 Literal::LitWord(n) => {
508 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_WORD);
509 builder.ins().store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
510 let val = builder.ins().iconst(types::I64, *n as i64);
511 builder.ins().store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
512 builder.declare_value_needs_stack_map(ptr);
513 Ok(SsaVal::HeapPtr(ptr))
514 }
515 Literal::LitChar(c) => {
516 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_CHAR);
517 builder.ins().store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
518 let val = builder.ins().iconst(types::I64, *c as i64);
519 builder.ins().store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
520 builder.declare_value_needs_stack_map(ptr);
521 Ok(SsaVal::HeapPtr(ptr))
522 }
523 Literal::LitFloat(bits) => {
524 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_FLOAT);
525 builder.ins().store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
526 let val = builder.ins().iconst(types::I64, *bits as i64);
527 builder.ins().store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
528 builder.declare_value_needs_stack_map(ptr);
529 Ok(SsaVal::HeapPtr(ptr))
530 }
531 Literal::LitDouble(bits) => {
532 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_DOUBLE);
533 builder.ins().store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
534 let val = builder.ins().iconst(types::I64, *bits as i64);
535 builder.ins().store(MemFlags::trusted(), val, ptr, LIT_VALUE_OFFSET);
536 builder.declare_value_needs_stack_map(ptr);
537 Ok(SsaVal::HeapPtr(ptr))
538 }
539 Literal::LitString(_) => Err(EmitError::NotYetImplemented("LitString".into())),
540 }
541}
542
543fn emit_lit_string(
548 pipeline: &mut CodegenPipeline,
549 builder: &mut FunctionBuilder,
550 vmctx: Value,
551 gc_sig: ir::SigRef,
552 bytes: &[u8],
553 counter: &mut u32,
554) -> Result<SsaVal, EmitError> {
555 let data_name = format!("__litstr_{}", *counter);
557 *counter += 1;
558
559 let data_id = pipeline.module
560 .declare_data(&data_name, Linkage::Local, false, false)
561 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
562
563 let mut data_desc = DataDescription::new();
564 data_desc.set_align(8); let mut contents = Vec::with_capacity(8 + bytes.len());
566 contents.extend_from_slice(&(bytes.len() as u64).to_le_bytes());
567 contents.extend_from_slice(bytes);
568 data_desc.define(contents.into_boxed_slice());
569
570 pipeline.module
571 .define_data(data_id, &data_desc)
572 .map_err(|e| EmitError::CraneliftError(e.to_string()))?;
573
574 let local_data = pipeline.module.declare_data_in_func(data_id, builder.func);
576 let data_ptr = builder.ins().symbol_value(types::I64, local_data);
577
578 let ptr = emit_alloc_fast_path(builder, vmctx, LIT_TOTAL_SIZE, gc_sig);
580
581 let tag = builder.ins().iconst(types::I8, layout::TAG_LIT as i64);
582 builder.ins().store(MemFlags::trusted(), tag, ptr, 0);
583 let size = builder.ins().iconst(types::I16, LIT_TOTAL_SIZE as i64);
584 builder.ins().store(MemFlags::trusted(), size, ptr, 1);
585 let lit_tag = builder.ins().iconst(types::I8, LIT_TAG_STRING);
586 builder.ins().store(MemFlags::trusted(), lit_tag, ptr, LIT_TAG_OFFSET);
587 builder.ins().store(MemFlags::trusted(), data_ptr, ptr, LIT_VALUE_OFFSET);
588
589 builder.declare_value_needs_stack_map(ptr);
590 Ok(SsaVal::HeapPtr(ptr))
591}
592
593pub(crate) fn ensure_heap_ptr(
594 builder: &mut FunctionBuilder,
595 vmctx: Value,
596 gc_sig: ir::SigRef,
597 val: SsaVal,
598) -> Value {
599 match val {
600 SsaVal::HeapPtr(v) => v,
601 SsaVal::Raw(v, lit_tag) => {
602 let ptr = emit_alloc_fast_path(builder, vmctx, LIT_TOTAL_SIZE, gc_sig);
603 let tag = builder.ins().iconst(types::I8, layout::TAG_LIT as i64);
604 builder.ins().store(MemFlags::trusted(), tag, ptr, 0);
605 let size = builder.ins().iconst(types::I16, LIT_TOTAL_SIZE as i64);
606 builder.ins().store(MemFlags::trusted(), size, ptr, 1);
607 let lit_tag_val = builder.ins().iconst(types::I8, lit_tag);
608 builder.ins().store(MemFlags::trusted(), lit_tag_val, ptr, LIT_TAG_OFFSET);
609 builder.ins().store(MemFlags::trusted(), v, ptr, LIT_VALUE_OFFSET);
610 builder.declare_value_needs_stack_map(ptr);
611 ptr
612 }
613 }
614}