mod expr;
mod fuel;
mod toplevel;
use crate::ast::{FnDef, TopLevel, VerifyKind};
use crate::codegen::{CodegenContext, ProjectOutput};
fn body_uses_error_prop(body: &std::sync::Arc<crate::ast::FnBody>) -> bool {
match body.as_ref() {
crate::ast::FnBody::Block(stmts) => stmts.iter().any(|s| match s {
crate::ast::Stmt::Binding(_, _, expr) => expr_uses_error_prop(expr),
crate::ast::Stmt::Expr(expr) => expr_uses_error_prop(expr),
}),
}
}
fn expr_uses_error_prop(expr: &crate::ast::Spanned<crate::ast::Expr>) -> bool {
use crate::ast::Expr;
match &expr.node {
Expr::ErrorProp(_) => true,
Expr::FnCall(f, args) => expr_uses_error_prop(f) || args.iter().any(expr_uses_error_prop),
Expr::BinOp(_, l, r) => expr_uses_error_prop(l) || expr_uses_error_prop(r),
Expr::Match { subject, arms, .. } => {
expr_uses_error_prop(subject) || arms.iter().any(|a| expr_uses_error_prop(&a.body))
}
Expr::Constructor(_, Some(arg)) => expr_uses_error_prop(arg),
Expr::List(elems) | Expr::Tuple(elems) => elems.iter().any(expr_uses_error_prop),
Expr::RecordCreate { fields, .. } => fields.iter().any(|(_, e)| expr_uses_error_prop(e)),
Expr::RecordUpdate { base, updates, .. } => {
expr_uses_error_prop(base) || updates.iter().any(|(_, e)| expr_uses_error_prop(e))
}
Expr::InterpolatedStr(parts) => parts.iter().any(|p| match p {
crate::ast::StrPart::Parsed(e) => expr_uses_error_prop(e),
_ => false,
}),
Expr::Attr(obj, _) => expr_uses_error_prop(obj),
Expr::TailCall(inner) => inner.args.iter().any(expr_uses_error_prop),
_ => false,
}
}
pub fn transpile(ctx: &CodegenContext) -> ProjectOutput {
let mut sections: Vec<String> = vec![
"// Generated by the Aver → Dafny transpiler".to_string(),
"// Pure core logic plus Oracle-lifted classified effects\n".to_string(),
crate::types::checker::proof_trust_header::generate_commented("// ") + "\n",
DAFNY_PRELUDE.to_string(),
];
let (recursion_plans, _recursion_issues) = crate::codegen::recursion::analyze_plans(ctx);
use crate::codegen::recursion::RecursionPlan;
let mutual_planned: std::collections::HashSet<String> = recursion_plans
.iter()
.filter(|(_, plan)| {
matches!(
plan,
RecursionPlan::MutualIntCountdown
| RecursionPlan::MutualStringPosAdvance { .. }
| RecursionPlan::MutualSizeOfRanked { .. }
)
})
.map(|(name, _)| name.clone())
.collect();
let mutual_fns_all: Vec<&FnDef> = ctx
.items
.iter()
.filter_map(|it| {
if let TopLevel::FnDef(fd) = it {
Some(fd)
} else {
None
}
})
.chain(ctx.modules.iter().flat_map(|m| m.fn_defs.iter()))
.filter(|fd| mutual_planned.contains(&fd.name))
.collect();
let mutual_components =
crate::call_graph::ordered_fn_components(&mutual_fns_all, &ctx.module_prefixes);
let mut mutual_fuel_sections: Vec<String> = Vec::new();
let mut fuel_emitted: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut axiom_fn_names: std::collections::HashSet<String> = std::collections::HashSet::new();
for component in &mutual_components {
let scc_fns: Vec<&FnDef> = component.iter().map(|fd| &**fd).collect();
match fuel::emit_mutual_fuel_group(&scc_fns, ctx, &recursion_plans) {
Some(code) => {
mutual_fuel_sections.push(code);
for fd in &scc_fns {
fuel_emitted.insert(fd.name.clone());
}
}
None => {
for fd in &scc_fns {
axiom_fn_names.insert(fd.name.clone());
}
}
}
}
let emit_pure_fn = |fd: &FnDef| -> String {
if fuel_emitted.contains(&fd.name) {
String::new() } else if axiom_fn_names.contains(&fd.name) {
toplevel::emit_fn_def_axiom(fd)
} else {
toplevel::emit_fn_def(fd, ctx)
}
};
for module in &ctx.modules {
for td in &module.type_defs {
if let Some(code) = toplevel::emit_type_def(td) {
sections.push(code);
}
}
}
for td in &ctx.type_defs {
if let Some(code) = toplevel::emit_type_def(td) {
sections.push(code);
}
}
let needs_axiom_for_error_prop = |fd: &FnDef| -> bool {
body_uses_error_prop(&fd.body)
&& crate::types::checker::effect_lifting::lower_pure_question_bang_fn(fd)
.ok()
.flatten()
.is_none()
};
let emit_pure_or_axiom = |fd: &FnDef| -> String {
if needs_axiom_for_error_prop(fd) {
toplevel::emit_fn_def_axiom(fd)
} else {
emit_pure_fn(fd)
}
};
for module in &ctx.modules {
for fd in &module.fn_defs {
if fd.effects.is_empty() {
sections.push(emit_pure_or_axiom(fd));
}
}
}
for item in &ctx.items {
if let TopLevel::FnDef(fd) = item
&& fd.effects.is_empty()
&& fd.name != "main"
{
sections.push(emit_pure_or_axiom(fd));
}
}
for section in mutual_fuel_sections {
sections.push(section);
}
let reachable = crate::codegen::common::verify_reachable_fn_names(&ctx.items);
let mut helpers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
for item in &ctx.items {
if let TopLevel::FnDef(fd) = item
&& !fd.effects.is_empty()
&& fd.name != "main"
&& !body_uses_error_prop(&fd.body)
&& reachable.contains(&fd.name)
&& fd
.effects
.iter()
.all(|e| crate::types::checker::effect_classification::is_classified(&e.node))
{
helpers.insert(
fd.name.clone(),
fd.effects.iter().map(|e| e.node.clone()).collect(),
);
}
}
for item in &ctx.items {
if let TopLevel::FnDef(fd) = item
&& !fd.effects.is_empty()
&& fd.name != "main"
&& !body_uses_error_prop(&fd.body)
&& reachable.contains(&fd.name)
&& fd
.effects
.iter()
.all(|e| crate::types::checker::effect_classification::is_classified(&e.node))
&& let Ok(Some(lifted)) =
crate::types::checker::effect_lifting::lift_fn_def_with_helpers(fd, &helpers)
{
sections.push(toplevel::emit_fn_def(&lifted, ctx));
}
}
let mut law_counter: std::collections::HashMap<String, usize> =
std::collections::HashMap::new();
for item in &ctx.items {
if let TopLevel::Verify(vb) = item
&& let VerifyKind::Law(law) = &vb.kind
{
let count = law_counter.entry(vb.fn_name.clone()).or_insert(0);
*count += 1;
let suffix = if *count > 1 {
format!("_{}", count)
} else {
String::new()
};
if !vb.cases.is_empty()
&& let Some(code) = toplevel::emit_law_samples(vb, law, ctx, &suffix)
{
sections.push(code);
}
let opaque_fns: std::collections::HashSet<String> =
axiom_fn_names.union(&fuel_emitted).cloned().collect();
sections.push(toplevel::emit_verify_law(vb, law, ctx, &opaque_fns));
}
}
let content = sections.join("\n");
let file_name = format!("{}.dfy", ctx.project_name);
ProjectOutput {
files: vec![(file_name, content)],
}
}
const DAFNY_PRELUDE: &str = r#"// --- Prelude: standard types and helpers ---
datatype Result<T, E> = Ok(value: T) | Err(error: E)
datatype Option<T> = None | Some(value: T)
// Oracle v1: BranchPath is the proof-side representation of a position
// in the structural tree of `!`/`?!` groups. Dewey-decimal under the hood
// ("", "0", "2.0", …); constructors mirror the Aver-source BranchPath
// opaque builtin (`.root`, `.child`, `.parse`) so the lifted bodies can
// reference them directly without case-splitting at the call site.
datatype BranchPath = BranchPath(dewey: string)
// Oracle v1: HttpResponse / HttpRequest / Header are built-in Aver
// record types that surface in `Http.*` effect signatures. Lifted
// effectful fns using those effects reference them in oracle / result
// types, so the proof export must know the shape.
datatype Header = Header(name: string, value: string)
datatype HttpResponse = HttpResponse(status: int, body: string, headers: seq<Header>)
datatype HttpRequest = HttpRequest(method_: string, path: string, body: string, headers: seq<Header>)
const BranchPath_Root: BranchPath := BranchPath("")
function BranchPath_child(p: BranchPath, idx: int): BranchPath
requires idx >= 0
{
if |p.dewey| == 0 then BranchPath(IntToString(idx))
else BranchPath(p.dewey + "." + IntToString(idx))
}
function BranchPath_parse(s: string): BranchPath {
BranchPath(s)
}
function ResultWithDefault<T, E>(r: Result<T, E>, d: T): T {
match r
case Ok(v) => v
case Err(_) => d
}
function OptionWithDefault<T>(o: Option<T>, d: T): T {
match o
case Some(v) => v
case None => d
}
function OptionToResult<T, E>(o: Option<T>, err: E): Result<T, E> {
match o
case Some(v) => Result.Ok(v)
case None => Result.Err(err)
}
function ListReverse<T>(xs: seq<T>): seq<T>
decreases |xs|
{
if |xs| == 0 then []
else ListReverse(xs[1..]) + [xs[0]]
}
function ListHead<T>(xs: seq<T>): Option<T> {
if |xs| == 0 then None
else Some(xs[0])
}
function ListTail<T>(xs: seq<T>): seq<T> {
if |xs| == 0 then []
else xs[1..]
}
function ListTake<T>(xs: seq<T>, n: int): seq<T> {
if n <= 0 then []
else if n >= |xs| then xs
else xs[..n]
}
function ListDrop<T>(xs: seq<T>, n: int): seq<T> {
if n <= 0 then xs
else if n >= |xs| then []
else xs[n..]
}
function MapGet<K, V>(m: map<K, V>, k: K): Option<V> {
if k in m then Some(m[k])
else None
}
// --- String/Char helpers (opaque stubs for verification) ---
function IntToString(n: int): string
function IntFromString(s: string): Result<int, string>
function FloatToString(r: real): string
function FloatFromString(s: string): Result<real, string>
function StringCharAt(s: string, i: int): Option<string> {
if 0 <= i < |s| then Option.Some([s[i]]) else Option.None
}
function StringChars(s: string): seq<string> {
seq(|s|, (i: int) requires 0 <= i < |s| => [s[i]])
}
function StringJoin(sep: string, parts: seq<string>): string
decreases |parts|
{
if |parts| == 0 then ""
else if |parts| == 1 then parts[0]
else parts[0] + sep + StringJoin(sep, parts[1..])
}
function CharToCode(c: string): int
function CharFromCode(n: int): Option<string>
function MapEntries<K, V>(m: map<K, V>): seq<(K, V)>
function MapFromList<K, V>(entries: seq<(K, V)>): map<K, V>
decreases |entries|
{
if |entries| == 0 then map[]
else MapFromList(entries[..|entries|-1])[entries[|entries|-1].0 := entries[|entries|-1].1]
}
function ByteToHex(b: int): Result<string, string>
function ByteFromHex(s: string): Result<int, string>
function ToString<T>(v: T): string
"#;
#[cfg(test)]
mod tests {
use super::*;
use crate::codegen::build_context;
use crate::source::parse_source;
use crate::tco;
use crate::types::checker::run_type_check_full;
use std::collections::HashSet;
fn ctx_from_source(src: &str, project_name: &str) -> CodegenContext {
let mut items = parse_source(src).expect("parse");
tco::transform_program(&mut items);
let tc = run_type_check_full(&items, None);
assert!(
tc.errors.is_empty(),
"source should typecheck: {:?}",
tc.errors
);
build_context(items, &tc, HashSet::new(), project_name.to_string(), vec![])
}
fn dafny_output(out: &ProjectOutput) -> &str {
out.files
.iter()
.find_map(|(name, content)| name.ends_with(".dfy").then_some(content.as_str()))
.expect("expected a .dfy file")
}
#[test]
fn prelude_carries_branch_path_datatype_and_helpers() {
let src = "module M\n intent = \"t\"\n\nfn pure(x: Int) -> Int\n x\n";
let ctx = ctx_from_source(src, "m");
let out = transpile(&ctx);
let dfy = dafny_output(&out);
assert!(dfy.contains("datatype BranchPath"));
assert!(dfy.contains("const BranchPath_Root"));
assert!(dfy.contains("function BranchPath_child"));
assert!(dfy.contains("function BranchPath_parse"));
}
#[test]
fn effectful_generative_fn_emits_lifted_form() {
let src = "module M\n\
\x20 intent = \"t\"\n\
\n\
fn pickOne() -> Int\n\
\x20 ! [Random.int]\n\
\x20 Random.int(1, 6)\n\
verify pickOne\n\
\x20 pickOne() => 1\n";
let ctx = ctx_from_source(src, "m");
let out = transpile(&ctx);
let dfy = dafny_output(&out);
assert!(
dfy.contains("function pickOne(path: BranchPath"),
"missing path param:\n{}",
dfy
);
assert!(
dfy.contains("rnd_Random_int"),
"missing oracle param:\n{}",
dfy
);
assert!(
dfy.contains("rnd_Random_int(path, 0, 1, 6)"),
"missing oracle call:\n{}",
dfy
);
}
#[test]
fn pure_functions_still_emit_as_before() {
let src = "module M\n intent = \"t\"\n\nfn double(x: Int) -> Int\n x + x\n";
let ctx = ctx_from_source(src, "m");
let out = transpile(&ctx);
let dfy = dafny_output(&out);
assert!(dfy.contains("function double(x: int): int"));
assert!(!dfy.contains("function double(path: BranchPath"));
}
#[test]
fn effectful_fn_with_unclassified_effect_is_still_skipped() {
let src = "module M\n\
\x20 intent = \"t\"\n\
\n\
fn configure(key: String, value: String) -> Unit\n\
\x20 ! [Env.set]\n\
\x20 Env.set(key, value)\n";
let ctx = ctx_from_source(src, "m");
let out = transpile(&ctx);
let dfy = dafny_output(&out);
assert!(
!dfy.contains("function configure"),
"stateful effectful fn should be skipped; got:\n{}",
dfy
);
}
#[test]
fn bang_product_emits_lifted_tuple_with_child_paths() {
let src = "module M\n\
\x20 intent = \"t\"\n\
\n\
fn pair() -> (Int, Int)\n\
\x20 ! [Random.int]\n\
\x20 (Random.int(1, 6), Random.int(1, 6))!\n\
verify pair\n\
\x20 pair() => (1, 1)\n";
let ctx = ctx_from_source(src, "m");
let out = transpile(&ctx);
let dfy = dafny_output(&out);
assert!(
dfy.contains("BranchPath_child(path, 0)"),
"branch 0 path missing:\n{}",
dfy
);
assert!(
dfy.contains("BranchPath_child(path, 1)"),
"branch 1 path missing:\n{}",
dfy
);
}
#[test]
fn branch_path_call_renders_with_underscore_names() {
let src = "module M\n\
\x20 intent = \"t\"\n\
\n\
fn mkPath() -> BranchPath\n\
\x20 BranchPath.child(BranchPath.Root, 2)\n";
let ctx = ctx_from_source(src, "m");
let out = transpile(&ctx);
let dfy = dafny_output(&out);
assert!(
dfy.contains("BranchPath_child(BranchPath_Root, 2)"),
"expected underscore-form call; got:\n{}",
dfy
);
}
}