use super::*;
pub(super) fn emit_record_new(ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.is_empty() {
return Err("&%{} requires at least struct_ref argument".into());
}
let struct_def = resolve_struct_ref(&args[0])?;
let field_count = struct_def.fields.len();
let field_args = &args[1..];
if field_args.len() != field_count * 2 {
return Err(format!(
"&%{{}}: expected {} tag-value pairs ({} args), got {}",
field_count,
field_count * 2,
field_args.len()
));
}
let struct_tag_id = *ctx
.tag_index
.get(&struct_def.name.to_string())
.ok_or_else(|| format!("unknown struct tag: {}", struct_def.name))?;
let total_size = ((2 + field_count) * 8) as i32;
let ptr_local = ctx.alloc_local_typed(ValType::I32);
emit_bump_alloc(ctx, total_size, ptr_local, "record");
ctx.emit(Instruction::LocalGet(ptr_local));
ctx.emit(f64_const(field_count as f64));
ctx.emit(Instruction::F64Store(mem_arg_f64(0)));
ctx.emit(Instruction::LocalGet(ptr_local));
ctx.emit(f64_const(struct_tag_id as f64));
ctx.emit(Instruction::F64Store(mem_arg_f64(8)));
for i in 0..field_count {
let value_expr = &field_args[i * 2 + 1]; ctx.emit(Instruction::LocalGet(ptr_local));
emit_expr(ctx, value_expr)?;
ctx.emit(Instruction::F64Store(mem_arg_f64(((2 + i) * 8) as u64)));
}
ctx.emit(Instruction::LocalGet(ptr_local));
ctx.emit(Instruction::F64ConvertI32U);
Ok(())
}
pub(super) fn resolve_struct_ref(node: &Calcit) -> Result<CalcitStruct, String> {
match node {
Calcit::Struct(s) => Ok(s.clone()),
Calcit::Import(CalcitImport { ns, def, .. }) => {
if let Some(Calcit::Struct(s)) = program::lookup_runtime_ready(ns, def) {
return Ok(s);
}
if let Some(compiled) = program::lookup_compiled_def(ns, def) {
if let Calcit::Struct(s) = &compiled.codegen_form {
return Ok(s.clone());
}
if let Calcit::Struct(s) = &compiled.preprocessed_code {
return Ok(s.clone());
}
if let Some(struct_def) = try_parse_defrecord_form(&compiled.codegen_form) {
return Ok(struct_def);
}
if let Some(struct_def) = try_parse_defrecord_form(&compiled.preprocessed_code) {
return Ok(struct_def);
}
return Err(format!("&%{{}}: compiled def {ns}/{def} is not a struct"));
}
if let Some(source) = program::lookup_def_code(ns, def) {
if let Some(struct_def) = try_parse_defrecord_form(&source) {
return Ok(struct_def);
}
}
Err(format!("&%{{}}: cannot resolve struct reference {ns}/{def}"))
}
other => Err(format!("&%{{}}: expected struct reference, got: {other}")),
}
}
pub(super) fn try_parse_defrecord_form(code: &Calcit) -> Option<CalcitStruct> {
let Calcit::List(xs) = code else { return None };
if xs.len() < 2 {
return None;
}
let is_defrecord = match &xs[0] {
Calcit::Symbol { sym, .. } => sym.as_ref() == "defrecord" || sym.as_ref().ends_with("/defrecord"),
_ => false,
};
if !is_defrecord {
return None;
}
let name = match &xs[1] {
Calcit::Tag(t) => t.clone(),
Calcit::Symbol { sym, .. } => {
let name_str = sym.as_ref().rsplit('/').next().unwrap_or(sym.as_ref());
cirru_edn::EdnTag::from(name_str)
}
Calcit::Import(CalcitImport { def, .. }) => cirru_edn::EdnTag::from(def.as_ref()),
_ => return None,
};
let mut fields: Vec<cirru_edn::EdnTag> = Vec::new();
for item in xs.iter().skip(2) {
if let Calcit::Tag(t) = item {
fields.push(t.clone());
}
}
fields.sort();
Some(CalcitStruct {
name,
fields: std::sync::Arc::new(fields),
field_types: std::sync::Arc::new(vec![]),
generics: std::sync::Arc::new(vec![]),
impls: vec![],
})
}
pub(super) fn emit_record_nth(ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.len() < 2 {
return Err("&record:nth requires at least 2 args (record, index)".into());
}
match &args[1] {
Calcit::Number(n) => {
let idx = *n as usize;
let offset = ((2 + idx) * 8) as u64;
emit_expr(ctx, &args[0])?;
ctx.emit(Instruction::I32TruncF64U);
ctx.emit(Instruction::F64Load(mem_arg_f64(offset)));
}
_ => {
emit_expr(ctx, &args[0])?;
ctx.emit(Instruction::I32TruncF64U);
let ptr_local = ctx.alloc_local_typed(ValType::I32);
ctx.emit(Instruction::LocalSet(ptr_local));
emit_expr(ctx, &args[1])?;
ctx.emit(Instruction::I32TruncF64U);
ctx.emit(Instruction::I32Const(2));
ctx.emit(Instruction::I32Add);
ctx.emit(Instruction::I32Const(8));
ctx.emit(Instruction::I32Mul);
ctx.emit(Instruction::LocalGet(ptr_local));
ctx.emit(Instruction::I32Add);
ctx.emit(Instruction::F64Load(mem_arg_f64(0)));
}
}
Ok(())
}
pub(super) fn emit_record_get(_ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.len() != 2 {
return Err("&record:get requires 2 args (record, tag)".into());
}
Err("&record:get not yet supported in WASM (use &record:nth via preprocessing optimization)".into())
}
pub(super) fn emit_record_count(ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.is_empty() {
return Err("&record:count requires 1 arg (record)".into());
}
emit_expr(ctx, &args[0])?;
ctx.emit(Instruction::I32TruncF64U);
ctx.emit(Instruction::F64Load(mem_arg_f64(0)));
Ok(())
}
pub(super) fn emit_record_field_tag(ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.len() != 2 {
return Err("&record:field-tag requires 2 args (record, index)".into());
}
emit_expr(ctx, &args[0])?;
ctx.emit(Instruction::I32TruncF64U);
let ptr_local = ctx.alloc_local_typed(ValType::I32);
ctx.emit(Instruction::LocalSet(ptr_local));
emit_expr(ctx, &args[1])?;
ctx.emit(Instruction::I32TruncF64U);
let idx_local = ctx.alloc_local_typed(ValType::I32);
ctx.emit(Instruction::LocalSet(idx_local));
ctx.emit(Instruction::LocalGet(ptr_local));
ctx.emit(Instruction::F64Load(mem_arg_f64(8)));
let struct_tag_local = ctx.alloc_local();
ctx.emit(Instruction::LocalSet(struct_tag_local));
let mut struct_entries = ctx
.record_field_tags
.iter()
.map(|(tag, fields)| (*tag, fields.clone()))
.collect::<Vec<_>>();
struct_entries.sort_by_key(|(tag, _)| *tag);
if struct_entries.is_empty() {
ctx.emit(f64_const(0.0));
return Ok(());
}
for (struct_tag_id, field_tag_ids) in &struct_entries {
ctx.emit(Instruction::LocalGet(struct_tag_local));
ctx.emit(f64_const(*struct_tag_id as f64));
ctx.emit(Instruction::F64Eq);
ctx.emit(Instruction::If(wasm_encoder::BlockType::Result(ValType::F64)));
if field_tag_ids.is_empty() {
ctx.emit(f64_const(0.0));
} else {
for (field_idx, field_tag_id) in field_tag_ids.iter().enumerate() {
ctx.emit(Instruction::LocalGet(idx_local));
ctx.emit(Instruction::I32Const(field_idx as i32));
ctx.emit(Instruction::I32Eq);
ctx.emit(Instruction::If(wasm_encoder::BlockType::Result(ValType::F64)));
ctx.emit(f64_const(*field_tag_id as f64));
ctx.emit(Instruction::Else);
}
ctx.emit(f64_const(0.0));
for _ in 0..field_tag_ids.len() {
ctx.emit(Instruction::End);
}
}
ctx.emit(Instruction::Else);
}
ctx.emit(f64_const(0.0));
for _ in 0..struct_entries.len() {
ctx.emit(Instruction::End);
}
Ok(())
}
pub(super) fn emit_record_matches(ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.len() != 2 {
return Err("&record:matches? expects 2 args".into());
}
emit_expr(ctx, &args[0])?;
ctx.emit(Instruction::I32TruncF64U);
ctx.emit(Instruction::F64Load(mem_arg_f64(8)));
emit_expr(ctx, &args[1])?;
ctx.emit(Instruction::I32TruncF64U);
ctx.emit(Instruction::F64Load(mem_arg_f64(8)));
ctx.emit(Instruction::F64Eq);
ctx.emit(Instruction::If(wasm_encoder::BlockType::Result(ValType::F64)));
ctx.block_depth += 1;
ctx.emit(f64_const(1.0));
ctx.emit(Instruction::Else);
ctx.emit(f64_const(0.0));
ctx.block_depth -= 1;
ctx.emit(Instruction::End);
Ok(())
}
pub(super) fn emit_tuple_new(ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.is_empty() {
return Err(":: requires at least a tag argument".into());
}
let tag_id = match &args[0] {
Calcit::Tag(t) => {
let tag_str = t.to_string();
*ctx
.tag_index
.get(&tag_str)
.ok_or_else(|| format!("unknown tag in tuple constructor: {tag_str}"))?
}
other => return Err(format!("::: expected tag as first arg, got: {other}")),
};
let payload = &args[1..];
let total_size = ((2 + payload.len()) * 8) as i32;
let ptr_local = ctx.alloc_local_typed(ValType::I32);
emit_bump_alloc(ctx, total_size, ptr_local, "tuple");
ctx.emit(Instruction::LocalGet(ptr_local));
ctx.emit(f64_const(payload.len() as f64));
ctx.emit(Instruction::F64Store(mem_arg_f64(0)));
ctx.emit(Instruction::LocalGet(ptr_local));
ctx.emit(f64_const(tag_id as f64));
ctx.emit(Instruction::F64Store(mem_arg_f64(8)));
for (i, val) in payload.iter().enumerate() {
ctx.emit(Instruction::LocalGet(ptr_local));
emit_expr(ctx, val)?;
ctx.emit(Instruction::F64Store(mem_arg_f64(((2 + i) * 8) as u64)));
}
ctx.emit(Instruction::LocalGet(ptr_local));
ctx.emit(Instruction::F64ConvertI32U);
Ok(())
}
pub(super) fn emit_tuple_nth(ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.len() != 2 {
return Err("&tuple:nth requires 2 args (tuple, index)".into());
}
let ptr = emit_ptr_to_i32(ctx, &args[0])?;
let idx_local = ctx.alloc_local_typed(ValType::I32);
emit_expr(ctx, &args[1])?;
ctx.emit(Instruction::I32TruncF64U);
ctx.emit(Instruction::LocalSet(idx_local));
ctx.emit(Instruction::LocalGet(idx_local));
ctx.emit(Instruction::I32Const(1));
ctx.emit(Instruction::I32Add);
ctx.emit(Instruction::I32Const(8));
ctx.emit(Instruction::I32Mul);
ctx.emit(Instruction::LocalGet(ptr));
ctx.emit(Instruction::I32Add);
ctx.emit(Instruction::F64Load(mem_arg_f64(0)));
Ok(())
}
pub(super) fn emit_tuple_count(ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.len() != 1 {
return Err("&tuple:count expects 1 arg".into());
}
emit_expr(ctx, &args[0])?;
ctx.emit(Instruction::I32TruncF64U);
ctx.emit(Instruction::F64Load(mem_arg_f64(0)));
Ok(())
}
pub(super) fn emit_tuple_assoc(ctx: &mut WasmGenCtx, args: &[Calcit]) -> Result<(), String> {
if args.len() != 3 {
return Err("&tuple:assoc expects 3 args".into());
}
let src = emit_ptr_to_i32(ctx, &args[0])?;
let count = emit_load_count_i32(ctx, src);
let idx = ctx.alloc_local_typed(ValType::I32);
emit_expr(ctx, &args[1])?;
ctx.emit(Instruction::I32TruncF64U);
ctx.emit(Instruction::LocalSet(idx));
let val = ctx.alloc_local();
emit_expr(ctx, &args[2])?;
ctx.emit(Instruction::LocalSet(val));
let total_slots = ctx.alloc_local_typed(ValType::I32);
ctx.emit(Instruction::LocalGet(count));
ctx.emit(Instruction::I32Const(2));
ctx.emit(Instruction::I32Add);
ctx.emit(Instruction::LocalSet(total_slots));
let size = ctx.alloc_local_typed(ValType::I32);
ctx.emit(Instruction::LocalGet(total_slots));
ctx.emit(Instruction::I32Const(8));
ctx.emit(Instruction::I32Mul);
ctx.emit(Instruction::LocalSet(size));
let dst = ctx.alloc_local_typed(ValType::I32);
emit_bump_alloc_dynamic(ctx, size, dst, "tuple");
let dst_base = emit_addr_offset(ctx, dst, 0);
let src_base = emit_addr_offset(ctx, src, 0);
emit_copy_f64_loop(ctx, dst_base, src_base, total_slots);
ctx.emit(Instruction::LocalGet(dst));
ctx.emit(Instruction::LocalGet(idx));
ctx.emit(Instruction::I32Const(1));
ctx.emit(Instruction::I32Add);
ctx.emit(Instruction::I32Const(8));
ctx.emit(Instruction::I32Mul);
ctx.emit(Instruction::I32Add);
ctx.emit(Instruction::LocalGet(val));
ctx.emit(Instruction::F64Store(mem_arg_f64(0)));
ctx.emit(Instruction::LocalGet(dst));
ctx.emit(Instruction::F64ConvertI32U);
Ok(())
}