mod builtins;
mod expr;
mod law_auto;
mod pattern;
pub(crate) mod recurrence;
mod shared;
mod toplevel;
mod types;
use std::collections::{HashMap, HashSet};
use crate::ast::{Expr, FnDef, Spanned, TopLevel, TypeDef, VerifyKind};
use crate::codegen::{CodegenContext, ProjectOutput};
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum VerifyEmitMode {
NativeDecide,
Sorry,
TheoremSkeleton,
}
pub use crate::codegen::recursion::{ProofModeIssue, RecursionPlan};
const LEAN_PRELUDE_HEADER: &str = r#"-- Generated by the Aver → Lean 4 transpiler
-- Pure core logic plus Oracle-lifted classified effects
set_option linter.unusedVariables false
-- Prelude: helper definitions for Aver builtins"#;
const LEAN_PRELUDE_FLOAT_COE: &str = r#"instance : Coe Int Float := ⟨fun n => Float.ofInt n⟩
def Float.fromInt (n : Int) : Float := Float.ofInt n
-- Aver's Float-to-Int operations match the runtime semantics
-- (`f64::floor() as i64` in VM, Rust codegen, WASM — all three use the
-- same IEEE 754 floor/round/ceil followed by Rust's saturating
-- `f64 as i64` cast):
-- * finite values within [i64::MIN, i64::MAX]: truncate toward zero
-- * finite > i64::MAX: saturate to i64::MAX
-- * finite < i64::MIN: saturate to i64::MIN
-- * +Inf: saturate to i64::MAX
-- * -Inf: saturate to i64::MIN
-- * NaN: 0 (Rust 1.45+ defined behavior)
--
-- Lean's `Float.floor : Float → Float` doesn't directly satisfy Aver's
-- `Float.floor : Float → Int`, so we synthesize via the saturating
-- `Float.toUInt64` (returns 0 for NaN/negative) with sign handling and
-- explicit bounds. Per-case correctness is asserted by `native_decide`
-- examples below; total semantic agreement with `f64 as i64` would
-- need a formal IEEE spec in Lean, which is out of scope.
--
-- Asymmetry with the Dafny backend: Lean has IEEE 754 `Float` natively
-- (`double` at runtime), so we use it. Dafny only offers mathematical
-- `real` (Cauchy-style, no NaN/Inf/overflow), which is a fundamental
-- type mismatch with Aver's IEEE Float — Dafny operations stay opaque
-- (`function FloatPi(): real` etc.) rather than synthesizing IEEE on
-- top of `bv64`, which would mean implementing the entire IEEE
-- arithmetic in Dafny by hand.
namespace AverFloat
def toInt (x : Float) : Int :=
if x.isNaN then 0
-- 2^63 is exactly representable in f64; values ≥ that saturate up.
else if x ≥ 9223372036854775808.0 then 9223372036854775807
-- -2^63 is exactly representable; values strictly below saturate down.
else if x < -9223372036854775808.0 then -9223372036854775808
else if x ≥ 0.0 then Int.ofNat x.toUInt64.toNat
else -(Int.ofNat (-x).toUInt64.toNat)
def floor (x : Float) : Int := toInt x.floor
def ceil (x : Float) : Int := toInt x.ceil
def round (x : Float) : Int := toInt x.round
def pow (x y : Float) : Float := x ^ y
-- Edge-case smoke checks: each `example` is discharged by reduction,
-- so any drift from these documented values fails Lake build.
example : AverFloat.toInt 0.0 = 0 := by native_decide
example : AverFloat.toInt 3.7 = 3 := by native_decide
example : AverFloat.toInt (-3.7) = -3 := by native_decide
example : AverFloat.toInt (1.0 / 0.0) = 9223372036854775807 := by native_decide
example : AverFloat.toInt (-1.0 / 0.0) = -9223372036854775808 := by native_decide
example : AverFloat.toInt (0.0 / 0.0) = 0 := by native_decide
example : AverFloat.floor 3.7 = 3 := by native_decide
example : AverFloat.floor (-3.7) = -4 := by native_decide
example : AverFloat.ceil 3.2 = 4 := by native_decide
example : AverFloat.ceil (-3.2) = -3 := by native_decide
-- Rounding mode (half-away-from-zero, matching Rust's `f64::round`):
example : AverFloat.round 0.5 = 1 := by native_decide
example : AverFloat.round (-0.5) = -1 := by native_decide
example : AverFloat.round 2.5 = 3 := by native_decide
example : AverFloat.round (-2.5) = -3 := by native_decide
end AverFloat"#;
const LEAN_PRELUDE_FLOAT_DEC_EQ: &str = r#"private unsafe def Float.unsafeDecEq (a b : Float) : Decidable (a = b) :=
if a == b then isTrue (unsafeCast ()) else isFalse (unsafeCast ())
@[implemented_by Float.unsafeDecEq]
private opaque Float.compDecEq (a b : Float) : Decidable (a = b)
instance : DecidableEq Float := Float.compDecEq"#;
const LEAN_PRELUDE_EXCEPT_DEC_EQ: &str = r#"instance [DecidableEq ε] [DecidableEq α] : DecidableEq (Except ε α)
| .ok a, .ok b =>
if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
| .error a, .error b =>
if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
| .ok _, .error _ => isFalse (by intro h; cases h)
| .error _, .ok _ => isFalse (by intro h; cases h)"#;
const LEAN_PRELUDE_EXCEPT_NS: &str = r#"namespace Except
def withDefault (r : Except ε α) (d : α) : α :=
match r with
| .ok v => v
| .error _ => d
end Except"#;
const LEAN_PRELUDE_OPTION_TO_EXCEPT: &str = r#"def Option.toExcept (o : Option α) (e : ε) : Except ε α :=
match o with
| some v => .ok v
| none => .error e"#;
const LEAN_PRELUDE_STRING_HADD: &str = r#"instance : HAdd String String String := ⟨String.append⟩"#;
const LEAN_PRELUDE_BRANCH_PATH: &str = r#"structure BranchPath where
dewey : String
deriving Repr, BEq, DecidableEq
def BranchPath.Root : BranchPath := { dewey := "" }
def BranchPath.child (p : BranchPath) (idx : Int) : BranchPath :=
if p.dewey.isEmpty then { dewey := toString idx }
else { dewey := p.dewey ++ "." ++ toString idx }
def BranchPath.parse (s : String) : BranchPath := { dewey := s }"#;
const LEAN_PRELUDE_PROOF_FUEL: &str = r#"def averStringPosFuel (s : String) (pos : Int) (rankBudget : Nat) : Nat :=
(((s.data.length) - pos.toNat) + 1) * rankBudget"#;
const LEAN_PRELUDE_AVER_MEASURE: &str = r#"namespace AverMeasure
def list (elemMeasure : α → Nat) : List α → Nat
| [] => 1
| x :: xs => elemMeasure x + list elemMeasure xs + 1
def option (elemMeasure : α → Nat) : Option α → Nat
| none => 1
| some x => elemMeasure x + 1
def except (errMeasure : ε → Nat) (okMeasure : α → Nat) : Except ε α → Nat
| .error e => errMeasure e + 1
| .ok v => okMeasure v + 1
end AverMeasure"#;
const AVER_MAP_PRELUDE_BASE: &str = r#"namespace AverMap
def empty : List (α × β) := []
def get [DecidableEq α] (m : List (α × β)) (k : α) : Option β :=
match m with
| [] => none
| (k', v) :: rest => if k = k' then some v else AverMap.get rest k
def set [DecidableEq α] (m : List (α × β)) (k : α) (v : β) : List (α × β) :=
let rec go : List (α × β) → List (α × β)
| [] => [(k, v)]
| (k', v') :: rest => if k = k' then (k, v) :: rest else (k', v') :: go rest
go m
def has [DecidableEq α] (m : List (α × β)) (k : α) : Bool :=
m.any (fun p => decide (k = p.1))
def remove [DecidableEq α] (m : List (α × β)) (k : α) : List (α × β) :=
m.filter (fun p => !(decide (k = p.1)))
def keys (m : List (α × β)) : List α := m.map Prod.fst
def values (m : List (α × β)) : List β := m.map Prod.snd
def entries (m : List (α × β)) : List (α × β) := m
def len (m : List (α × β)) : Nat := m.length
def fromList (entries : List (α × β)) : List (α × β) := entries"#;
const AVER_MAP_PRELUDE_HAS_SET_SELF: &str = r#"private theorem any_set_go_self [DecidableEq α] (k : α) (v : β) :
∀ (m : List (α × β)), List.any (AverMap.set.go k v m) (fun p => decide (k = p.1)) = true := by
intro m
induction m with
| nil =>
simp [AverMap.set.go, List.any]
| cons p tl ih =>
cases p with
| mk k' v' =>
by_cases h : k = k'
· simp [AverMap.set.go, List.any, h]
· simp [AverMap.set.go, List.any, h, ih]
theorem has_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
AverMap.has (AverMap.set m k v) k = true := by
simpa [AverMap.has, AverMap.set] using any_set_go_self k v m"#;
const AVER_MAP_PRELUDE_GET_SET_SELF: &str = r#"private theorem get_set_go_self [DecidableEq α] (k : α) (v : β) :
∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) k = some v := by
intro m
induction m with
| nil =>
simp [AverMap.set.go, AverMap.get]
| cons p tl ih =>
cases p with
| mk k' v' =>
by_cases h : k = k'
· simp [AverMap.set.go, AverMap.get, h]
· simp [AverMap.set.go, AverMap.get, h, ih]
theorem get_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
AverMap.get (AverMap.set m k v) k = some v := by
simpa [AverMap.set] using get_set_go_self k v m"#;
const AVER_MAP_PRELUDE_GET_SET_OTHER: &str = r#"private theorem get_set_go_other [DecidableEq α] (k key : α) (v : β) (h : key ≠ k) :
∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) key = AverMap.get m key := by
intro m
induction m with
| nil =>
simp [AverMap.set.go, AverMap.get, h]
| cons p tl ih =>
cases p with
| mk k' v' =>
by_cases hk : k = k'
· have hkey : key ≠ k' := by simpa [hk] using h
simp [AverMap.set.go, AverMap.get, hk, hkey]
· by_cases hkey : key = k'
· simp [AverMap.set.go, AverMap.get, hk, hkey]
· simp [AverMap.set.go, AverMap.get, hk, hkey, ih]
theorem get_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
AverMap.get (AverMap.set m k v) key = AverMap.get m key := by
simpa [AverMap.set] using get_set_go_other k key v h m"#;
const AVER_MAP_PRELUDE_HAS_SET_OTHER: &str = r#"theorem has_eq_isSome_get [DecidableEq α] (m : List (α × β)) (k : α) :
AverMap.has m k = (AverMap.get m k).isSome := by
induction m with
| nil =>
simp [AverMap.has, AverMap.get]
| cons p tl ih =>
cases p with
| mk k' v' =>
by_cases h : k = k'
· simp [AverMap.has, AverMap.get, List.any, h]
· simpa [AverMap.has, AverMap.get, List.any, h] using ih
theorem has_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
AverMap.has (AverMap.set m k v) key = AverMap.has m key := by
rw [AverMap.has_eq_isSome_get, AverMap.has_eq_isSome_get]
simp [AverMap.get_set_other, h]"#;
const AVER_MAP_PRELUDE_END: &str = r#"end AverMap"#;
const LEAN_PRELUDE_AVER_LIST: &str = r#"namespace AverList
def get (xs : List α) (i : Int) : Option α :=
if i < 0 then none else xs.get? i.toNat
private def insertSorted [Ord α] (x : α) : List α → List α
| [] => [x]
| y :: ys =>
if compare x y == Ordering.lt || compare x y == Ordering.eq then
x :: y :: ys
else
y :: insertSorted x ys
def sort [Ord α] (xs : List α) : List α :=
xs.foldl (fun acc x => insertSorted x acc) []
end AverList"#;
const LEAN_PRELUDE_STRING_HELPERS: &str = r#"def String.charAt (s : String) (i : Int) : Option String :=
if i < 0 then none
else (s.toList.get? i.toNat).map Char.toString
theorem String.charAt_length_none (s : String) : String.charAt s s.length = none := by
have hs : ¬ ((s.length : Int) < 0) := by omega
unfold String.charAt
simp [hs]
change s.data.length ≤ s.length
exact Nat.le_refl _
def String.slice (s : String) (start stop : Int) : String :=
let startN := if start < 0 then 0 else start.toNat
let stopN := if stop < 0 then 0 else stop.toNat
let chars := s.toList
String.mk ((chars.drop startN).take (stopN - startN))
private def trimFloatTrailingZerosChars (chars : List Char) : List Char :=
let noZeros := (chars.reverse.dropWhile (fun c => c == '0')).reverse
match noZeros.reverse with
| '.' :: rest => rest.reverse
| _ => noZeros
private def normalizeFloatString (s : String) : String :=
if s.toList.any (fun c => c == '.') then
String.mk (trimFloatTrailingZerosChars s.toList)
else s
def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)
def String.chars (s : String) : List String := s.toList.map Char.toString
private theorem char_to_string_append_mk (c : Char) (chars : List Char) :
Char.toString c ++ String.mk chars = String.mk (c :: chars) := by
rfl
private theorem string_intercalate_empty_char_strings_go (acc : String) (chars : List Char) :
String.intercalate.go acc "" (chars.map Char.toString) = acc ++ String.mk chars := by
induction chars generalizing acc with
| nil =>
simp [String.intercalate.go]
| cons c rest ih =>
calc
String.intercalate.go acc "" ((c :: rest).map Char.toString)
= String.intercalate.go (acc ++ Char.toString c) "" (rest.map Char.toString) := by
simp [String.intercalate.go]
_ = (acc ++ Char.toString c) ++ String.mk rest := by
simpa using ih (acc ++ Char.toString c)
_ = acc ++ String.mk (c :: rest) := by
simp [String.append_assoc, char_to_string_append_mk]
private theorem string_intercalate_empty_char_strings (chars : List Char) :
String.intercalate "" (chars.map Char.toString) = String.mk chars := by
cases chars with
| nil =>
simp [String.intercalate]
| cons c rest =>
simpa [String.intercalate] using string_intercalate_empty_char_strings_go c.toString rest
theorem String.intercalate_empty_chars (s : String) :
String.intercalate "" (String.chars s) = s := by
cases s with
| mk chars =>
simpa [String.chars] using string_intercalate_empty_char_strings chars
namespace AverString
def splitOnCharGo (currentRev : List Char) (sep : Char) : List Char → List String
| [] => [String.mk currentRev.reverse]
| c :: cs =>
if c == sep then
String.mk currentRev.reverse :: splitOnCharGo [] sep cs
else
splitOnCharGo (c :: currentRev) sep cs
def splitOnChar (s : String) (sep : Char) : List String :=
splitOnCharGo [] sep s.toList
def split (s delim : String) : List String :=
match delim.toList with
| [] => "" :: (s.toList.map Char.toString) ++ [""]
| [c] => splitOnChar s c
| _ => s.splitOn delim
@[simp] private theorem char_toString_data (c : Char) : c.toString.data = [c] := by
rfl
private theorem splitOnCharGo_until_sep
(prefixRev part tail : List Char) (sep : Char) :
part.all (fun c => c != sep) = true ->
splitOnCharGo prefixRev sep (part ++ sep :: tail) =
String.mk (prefixRev.reverse ++ part) :: splitOnCharGo [] sep tail := by
intro h_safe
induction part generalizing prefixRev with
| nil =>
simp [splitOnCharGo]
| cons c rest ih =>
simp at h_safe
have h_rest : (rest.all fun c => c != sep) = true := by
simpa using h_safe.2
simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
(ih (c :: prefixRev) h_rest)
private theorem splitOnCharGo_no_sep
(prefixRev chars : List Char) (sep : Char) :
chars.all (fun c => c != sep) = true ->
splitOnCharGo prefixRev sep chars =
[String.mk (prefixRev.reverse ++ chars)] := by
intro h_safe
induction chars generalizing prefixRev with
| nil =>
simp [splitOnCharGo]
| cons c rest ih =>
simp at h_safe
have h_rest : (rest.all fun c => c != sep) = true := by
simpa using h_safe.2
simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
(ih (c :: prefixRev) h_rest)
@[simp] theorem split_single_char_append
(head tail : String) (sep : Char) :
head.toList.all (fun c => c != sep) = true ->
split (head ++ Char.toString sep ++ tail) (Char.toString sep) =
head :: split tail (Char.toString sep) := by
intro h_safe
simpa [split, splitOnChar] using
(splitOnCharGo_until_sep [] head.data tail.data sep h_safe)
@[simp] theorem split_single_char_no_sep
(s : String) (sep : Char) :
s.toList.all (fun c => c != sep) = true ->
split s (Char.toString sep) = [s] := by
intro h_safe
simpa [split, splitOnChar] using
(splitOnCharGo_no_sep [] s.data sep h_safe)
private theorem intercalate_go_prefix
(pref acc sep : String) (rest : List String) :
String.intercalate.go (pref ++ sep ++ acc) sep rest =
pref ++ sep ++ String.intercalate.go acc sep rest := by
induction rest generalizing acc with
| nil =>
simp [String.intercalate.go, String.append_assoc]
| cons x xs ih =>
simpa [String.intercalate.go, String.append_assoc] using
(ih (acc ++ sep ++ x))
@[simp] theorem split_intercalate_trailing_single_char
(parts : List String) (sep : Char) :
parts.all (fun part => part.toList.all (fun c => c != sep)) = true ->
split (String.intercalate (Char.toString sep) parts ++ Char.toString sep) (Char.toString sep) =
match parts with
| [] => ["", ""]
| _ => parts ++ [""] := by
intro h_safe
induction parts with
| nil =>
simp [split, splitOnChar, splitOnCharGo]
| cons part rest ih =>
simp at h_safe
have h_part : (part.toList.all fun c => c != sep) = true := by
simpa using h_safe.1
cases rest with
| nil =>
have h_empty : ("".toList.all fun c => c != sep) = true := by simp
calc
split (String.intercalate.go part (Char.toString sep) [] ++ Char.toString sep) (Char.toString sep)
= split (part ++ Char.toString sep) (Char.toString sep) := by
simp [String.intercalate.go]
_ = part :: split "" (Char.toString sep) := by
simpa using split_single_char_append part "" sep h_part
_ = [part, ""] := by
simp [split_single_char_no_sep, h_empty]
| cons next rest' =>
have h_rest : ((next :: rest').all fun part => part.toList.all fun c => c != sep) = true := by
simpa using h_safe.2
calc
split
(String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep)
(Char.toString sep)
=
split
(part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep))
(Char.toString sep) := by
have h_join :
String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
= part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
calc
String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
= String.intercalate.go (part ++ Char.toString sep ++ next) (Char.toString sep) rest' ++ Char.toString sep := by
simp [String.intercalate.go]
_ = part ++ Char.toString sep ++ (String.intercalate.go next (Char.toString sep) rest' ++ Char.toString sep) := by
rw [intercalate_go_prefix part next (Char.toString sep) rest']
simp [String.append_assoc]
_ = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
simp [String.intercalate, String.intercalate.go]
simpa using congrArg (fun s => split s (Char.toString sep)) h_join
_ = part :: split
(String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
(Char.toString sep) := by
simpa using split_single_char_append
part
(String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
sep
h_part
_ = part :: (next :: rest' ++ [""]) := by
simpa using ih h_rest
end AverString"#;
const LEAN_PRELUDE_NUMERIC_PARSE: &str = r#"namespace AverDigits
def foldDigitsAcc (acc : Nat) : List Nat -> Nat
| [] => acc
| d :: ds => foldDigitsAcc (acc * 10 + d) ds
def foldDigits (digits : List Nat) : Nat :=
foldDigitsAcc 0 digits
private theorem foldDigitsAcc_append_singleton (acc : Nat) (xs : List Nat) (d : Nat) :
foldDigitsAcc acc (xs ++ [d]) = foldDigitsAcc acc xs * 10 + d := by
induction xs generalizing acc with
| nil =>
simp [foldDigitsAcc]
| cons x xs ih =>
simp [foldDigitsAcc, ih, Nat.left_distrib, Nat.add_assoc, Nat.add_left_comm]
private theorem foldDigits_append_singleton (xs : List Nat) (d : Nat) :
foldDigits (xs ++ [d]) = foldDigits xs * 10 + d := by
simpa [foldDigits] using foldDigitsAcc_append_singleton 0 xs d
def natDigits : Nat -> List Nat
| n =>
if n < 10 then
[n]
else
natDigits (n / 10) ++ [n % 10]
termination_by
n => n
theorem natDigits_nonempty (n : Nat) : natDigits n ≠ [] := by
by_cases h : n < 10
· rw [natDigits.eq_1]
simp [h]
· rw [natDigits.eq_1]
simp [h]
theorem natDigits_digits_lt_ten : ∀ n : Nat, ∀ d ∈ natDigits n, d < 10 := by
intro n d hd
by_cases h : n < 10
· rw [natDigits.eq_1] at hd
simp [h] at hd
rcases hd with rfl
exact h
· rw [natDigits.eq_1] at hd
simp [h] at hd
rcases hd with hd | hd
· exact natDigits_digits_lt_ten (n / 10) d hd
· subst hd
exact Nat.mod_lt n (by omega)
theorem foldDigits_natDigits : ∀ n : Nat, foldDigits (natDigits n) = n := by
intro n
by_cases h : n < 10
· rw [natDigits.eq_1]
simp [h, foldDigits, foldDigitsAcc]
· rw [natDigits.eq_1]
simp [h]
rw [foldDigits_append_singleton]
rw [foldDigits_natDigits (n / 10)]
omega
def digitChar : Nat -> Char
| 0 => '0' | 1 => '1' | 2 => '2' | 3 => '3' | 4 => '4'
| 5 => '5' | 6 => '6' | 7 => '7' | 8 => '8' | 9 => '9'
| _ => '0'
def charToDigit? : Char -> Option Nat
| '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3 | '4' => some 4
| '5' => some 5 | '6' => some 6 | '7' => some 7 | '8' => some 8 | '9' => some 9
| _ => none
theorem charToDigit_digitChar : ∀ d : Nat, d < 10 -> charToDigit? (digitChar d) = some d
| 0, _ => by simp [digitChar, charToDigit?]
| 1, _ => by simp [digitChar, charToDigit?]
| 2, _ => by simp [digitChar, charToDigit?]
| 3, _ => by simp [digitChar, charToDigit?]
| 4, _ => by simp [digitChar, charToDigit?]
| 5, _ => by simp [digitChar, charToDigit?]
| 6, _ => by simp [digitChar, charToDigit?]
| 7, _ => by simp [digitChar, charToDigit?]
| 8, _ => by simp [digitChar, charToDigit?]
| 9, _ => by simp [digitChar, charToDigit?]
| Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
omega
theorem digitChar_ne_minus : ∀ d : Nat, d < 10 -> digitChar d ≠ '-'
| 0, _ => by decide
| 1, _ => by decide
| 2, _ => by decide
| 3, _ => by decide
| 4, _ => by decide
| 5, _ => by decide
| 6, _ => by decide
| 7, _ => by decide
| 8, _ => by decide
| 9, _ => by decide
| Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
omega
theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->
Char.toString (digitChar d) ≠ " " ∧
Char.toString (digitChar d) ≠ "\t" ∧
Char.toString (digitChar d) ≠ "\n" ∧
Char.toString (digitChar d) ≠ "\r"
| 0, _ => by decide
| 1, _ => by decide
| 2, _ => by decide
| 3, _ => by decide
| 4, _ => by decide
| 5, _ => by decide
| 6, _ => by decide
| 7, _ => by decide
| 8, _ => by decide
| 9, _ => by decide
| Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
omega
theorem mapM_charToDigit_digits : ∀ ds : List Nat,
(∀ d ∈ ds, d < 10) -> List.mapM charToDigit? (ds.map digitChar) = some ds := by
intro ds hds
induction ds with
| nil =>
simp
| cons d ds ih =>
have hd : d < 10 := hds d (by simp)
have htail : ∀ x ∈ ds, x < 10 := by
intro x hx
exact hds x (by simp [hx])
simp [charToDigit_digitChar d hd, ih htail]
def natDigitsChars (n : Nat) : List Char :=
(natDigits n).map digitChar
def parseNatChars (chars : List Char) : Option Nat :=
match chars with
| [] => none
| _ => do
let digits <- List.mapM charToDigit? chars
pure (foldDigits digits)
theorem parseNatChars_nat (n : Nat) :
parseNatChars (natDigitsChars n) = some n := by
unfold parseNatChars natDigitsChars
cases h : (natDigits n).map digitChar with
| nil =>
exfalso
exact natDigits_nonempty n (List.map_eq_nil_iff.mp h)
| cons hd tl =>
have hdigits : List.mapM charToDigit? (List.map digitChar (natDigits n)) = some (natDigits n) :=
mapM_charToDigit_digits (natDigits n) (fun d hd => natDigits_digits_lt_ten n d hd)
rw [h] at hdigits
simp [h, hdigits, foldDigits_natDigits]
end AverDigits
def String.fromInt (n : Int) : String :=
match n with
| .ofNat m => String.mk (AverDigits.natDigitsChars m)
| .negSucc m => String.mk ('-' :: AverDigits.natDigitsChars (m + 1))
def Int.fromString (s : String) : Except String Int :=
match s.toList with
| [] => .error ("Cannot parse '" ++ s ++ "' as Int")
| '-' :: rest =>
match AverDigits.parseNatChars rest with
| some n => .ok (-Int.ofNat n)
| none => .error ("Cannot parse '" ++ s ++ "' as Int")
| chars =>
match AverDigits.parseNatChars chars with
| some n => .ok (Int.ofNat n)
| none => .error ("Cannot parse '" ++ s ++ "' as Int")
theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n
| .ofNat m => by
cases h : AverDigits.natDigits m with
| nil =>
exfalso
exact AverDigits.natDigits_nonempty m h
| cons d ds =>
have hd : d < 10 := AverDigits.natDigits_digits_lt_ten m d (by simp [h])
have hne : AverDigits.digitChar d ≠ '-' := AverDigits.digitChar_ne_minus d hd
have hparse : AverDigits.parseNatChars (AverDigits.digitChar d :: List.map AverDigits.digitChar ds) = some m := by
simpa [AverDigits.natDigitsChars, h] using AverDigits.parseNatChars_nat m
simp [String.fromInt, Int.fromString, AverDigits.natDigitsChars, h, hne, hparse]
| .negSucc m => by
simp [String.fromInt, Int.fromString, AverDigits.parseNatChars_nat]
rfl
private def charDigitsToNat (cs : List Char) : Nat :=
cs.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
private def parseExpPart : List Char → (Bool × List Char)
| '-' :: rest => (true, rest.takeWhile Char.isDigit)
| '+' :: rest => (false, rest.takeWhile Char.isDigit)
| rest => (false, rest.takeWhile Char.isDigit)
def Float.fromString (s : String) : Except String Float :=
let chars := s.toList
let (neg, chars) := match chars with
| '-' :: rest => (true, rest)
| _ => (false, chars)
let intPart := chars.takeWhile Char.isDigit
let rest := chars.dropWhile Char.isDigit
let (fracPart, rest) := match rest with
| '.' :: rest => (rest.takeWhile Char.isDigit, rest.dropWhile Char.isDigit)
| _ => ([], rest)
let (expNeg, expDigits) := match rest with
| 'e' :: rest => parseExpPart rest
| 'E' :: rest => parseExpPart rest
| _ => (false, [])
if intPart.isEmpty && fracPart.isEmpty then .error ("Invalid float: " ++ s)
else
let mantissa := charDigitsToNat (intPart ++ fracPart)
let fracLen : Int := fracPart.length
let expVal : Int := charDigitsToNat expDigits
let shift : Int := (if expNeg then -expVal else expVal) - fracLen
let f := if shift >= 0 then Float.ofScientific mantissa false shift.toNat
else Float.ofScientific mantissa true ((-shift).toNat)
.ok (if neg then -f else f)"#;
const LEAN_PRELUDE_CHAR_BYTE: &str = r#"def Char.toCode (s : String) : Int :=
match s.toList.head? with
| some c => (c.toNat : Int)
| none => panic! "Char.toCode: string is empty"
def Char.fromCode (n : Int) : Option String :=
if n < 0 || n > 1114111 then none
else if n >= 55296 && n <= 57343 then none
else some (Char.toString (Char.ofNat n.toNat))
def hexDigit (n : Int) : String :=
match n with
| 0 => "0" | 1 => "1" | 2 => "2" | 3 => "3"
| 4 => "4" | 5 => "5" | 6 => "6" | 7 => "7"
| 8 => "8" | 9 => "9" | 10 => "a" | 11 => "b"
| 12 => "c" | 13 => "d" | 14 => "e" | 15 => "f"
| _ => "?"
def byteToHex (code : Int) : String :=
hexDigit (code / 16) ++ hexDigit (code % 16)
namespace AverByte
private def hexValue (c : Char) : Option Int :=
match c with
| '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3
| '4' => some 4 | '5' => some 5 | '6' => some 6 | '7' => some 7
| '8' => some 8 | '9' => some 9 | 'a' => some 10 | 'b' => some 11
| 'c' => some 12 | 'd' => some 13 | 'e' => some 14 | 'f' => some 15
| 'A' => some 10 | 'B' => some 11 | 'C' => some 12 | 'D' => some 13
| 'E' => some 14 | 'F' => some 15
| _ => none
def toHex (n : Int) : Except String String :=
if n < 0 || n > 255 then
.error ("Byte.toHex: " ++ toString n ++ " is out of range 0-255")
else
.ok (byteToHex n)
def fromHex (s : String) : Except String Int :=
match s.toList with
| [hi, lo] =>
match hexValue hi, hexValue lo with
| some h, some l => .ok (h * 16 + l)
| _, _ => .error ("Byte.fromHex: invalid hex '" ++ s ++ "'")
| _ => .error ("Byte.fromHex: expected exactly 2 hex chars, got '" ++ s ++ "'")
end AverByte"#;
pub(crate) fn pure_fns(ctx: &CodegenContext) -> Vec<&FnDef> {
ctx.modules
.iter()
.flat_map(|m| m.fn_defs.iter())
.chain(ctx.fn_defs.iter())
.filter(|fd| toplevel::is_pure_fn(fd))
.collect()
}
pub(crate) fn recursive_type_names(ctx: &CodegenContext) -> HashSet<String> {
ctx.modules
.iter()
.flat_map(|m| m.type_defs.iter())
.chain(ctx.type_defs.iter())
.filter(|td| toplevel::is_recursive_type_def(td))
.map(|td| toplevel::type_def_name(td).to_string())
.collect()
}
pub(crate) fn recursive_pure_fn_names(ctx: &CodegenContext) -> HashSet<String> {
let pure_names: HashSet<String> = pure_fns(ctx)
.into_iter()
.map(|fd| fd.name.clone())
.collect();
ctx.recursive_fns
.iter()
.filter(|name| pure_names.contains(name.as_str()))
.cloned()
.collect()
}
fn verify_counter_key(vb: &crate::ast::VerifyBlock) -> String {
match &vb.kind {
VerifyKind::Cases => format!("fn:{}", vb.fn_name),
VerifyKind::Law(law) => format!("law:{}::{}", vb.fn_name, law.name),
}
}
fn lean_project_name(ctx: &CodegenContext) -> String {
crate::codegen::common::entry_basename(ctx)
}
pub(crate) fn find_type_def<'a>(ctx: &'a CodegenContext, type_name: &str) -> Option<&'a TypeDef> {
ctx.modules
.iter()
.flat_map(|m| m.type_defs.iter())
.chain(ctx.type_defs.iter())
.find(|td| toplevel::type_def_name(td) == type_name)
}
pub(super) fn bound_expr_to_lean(expr: &Spanned<Expr>) -> String {
match &expr.node {
Expr::Literal(crate::ast::Literal::Int(n)) => format!("{}", n),
Expr::Ident(name) => expr::aver_name_to_lean(name),
Expr::FnCall(f, args) => {
if let Some(dotted) = crate::codegen::common::expr_to_dotted_name(&f.node) {
if dotted == "List.len" && args.len() == 1 {
return format!("{}.length", bound_expr_to_lean(&args[0]));
}
let lean_args: Vec<String> = args.iter().map(bound_expr_to_lean).collect();
format!(
"({} {})",
expr::aver_name_to_lean(&dotted),
lean_args.join(" ")
)
} else {
"0".to_string()
}
}
Expr::Attr(obj, field) => format!(
"{}.{}",
bound_expr_to_lean(obj),
expr::aver_name_to_lean(field)
),
_ => "0".to_string(),
}
}
pub(crate) fn sizeof_measure_param_indices(fd: &FnDef) -> Vec<usize> {
fd.params
.iter()
.enumerate()
.filter_map(|(idx, (_, type_name))| {
(!crate::codegen::recursion::detect::is_scalar_like_type(type_name)).then_some(idx)
})
.collect()
}
pub fn proof_mode_findings(ctx: &CodegenContext) -> Vec<ProofModeIssue> {
let (_plans, issues) = crate::codegen::recursion::analyze_plans(ctx);
issues
}
pub fn proof_mode_issues(ctx: &CodegenContext) -> Vec<String> {
proof_mode_findings(ctx)
.into_iter()
.map(|issue| issue.message)
.collect()
}
pub fn transpile(ctx: &mut CodegenContext) -> ProjectOutput {
transpile_with_verify_mode(ctx, VerifyEmitMode::NativeDecide)
}
pub fn transpile_for_proof_mode(
ctx: &mut CodegenContext,
verify_mode: VerifyEmitMode,
) -> ProjectOutput {
ctx.refresh_facts();
transpile_unified(ctx, verify_mode, LeanEmitMode::Proof)
}
pub fn transpile_with_verify_mode(
ctx: &mut CodegenContext,
verify_mode: VerifyEmitMode,
) -> ProjectOutput {
ctx.refresh_facts();
transpile_unified(ctx, verify_mode, LeanEmitMode::Standard)
}
fn emit_lifted_effectful_functions(
ctx: &CodegenContext,
recursive_fns: &HashSet<String>,
sections: &mut Vec<String>,
) {
use crate::types::checker::effect_classification::is_classified;
let reachable = crate::codegen::common::verify_reachable_fn_names(&ctx.items);
let mut helpers: std::collections::HashMap<String, Vec<String>> =
std::collections::HashMap::new();
for item in &ctx.items {
let TopLevel::FnDef(fd) = item else { continue };
if fd.effects.is_empty() || fd.name == "main" {
continue;
}
if !fd.effects.iter().all(|e| is_classified(&e.node)) {
continue;
}
if !reachable.contains(&fd.name) {
continue;
}
helpers.insert(
fd.name.clone(),
fd.effects.iter().map(|e| e.node.clone()).collect(),
);
}
let mut lifted_fns: Vec<(String, crate::ast::FnDef)> = Vec::new();
for item in &ctx.items {
let TopLevel::FnDef(fd) = item else { continue };
if fd.effects.is_empty() || fd.name == "main" {
continue;
}
if !fd.effects.iter().all(|e| is_classified(&e.node)) {
continue;
}
if !reachable.contains(&fd.name) {
continue;
}
let Ok(Some(lifted)) =
crate::types::checker::effect_lifting::lift_fn_def_with_helpers(fd, &helpers)
else {
continue;
};
lifted_fns.push((fd.name.clone(), lifted));
}
let eligible_names: std::collections::HashSet<String> =
lifted_fns.iter().map(|(n, _)| n.clone()).collect();
let mut emitted: std::collections::HashSet<String> = std::collections::HashSet::new();
let mut order: Vec<usize> = Vec::new();
let mut remaining: Vec<usize> = (0..lifted_fns.len()).collect();
while !remaining.is_empty() {
let before = remaining.len();
remaining.retain(|&idx| {
let body_calls = collect_called_idents_in_body(&lifted_fns[idx].1.body);
let ready = body_calls
.iter()
.all(|name| !eligible_names.contains(name) || emitted.contains(name));
if ready {
emitted.insert(lifted_fns[idx].0.clone());
order.push(idx);
false
} else {
true
}
});
if remaining.len() == before {
order.append(&mut remaining);
}
}
for idx in order {
let (_, lifted) = &lifted_fns[idx];
if let Some(code) = toplevel::emit_fn_def(lifted, recursive_fns, ctx) {
sections.push(code);
sections.push(String::new());
}
}
}
fn collect_called_idents_in_body(body: &crate::ast::FnBody) -> std::collections::HashSet<String> {
use crate::ast::{Expr, Spanned, Stmt};
let mut out = std::collections::HashSet::new();
fn walk(expr: &Spanned<Expr>, out: &mut std::collections::HashSet<String>) {
match &expr.node {
Expr::FnCall(callee, args) => {
if let Expr::Ident(name) | Expr::Resolved { name, .. } = &callee.node {
out.insert(name.clone());
}
walk(callee, out);
for a in args {
walk(a, out);
}
}
Expr::BinOp(_, l, r) => {
walk(l, out);
walk(r, out);
}
Expr::Match { subject, arms } => {
walk(subject, out);
for arm in arms {
walk(&arm.body, out);
}
}
Expr::Attr(inner, _) | Expr::ErrorProp(inner) => walk(inner, out),
Expr::Constructor(_, Some(inner)) => walk(inner, out),
Expr::List(items) | Expr::Tuple(items) | Expr::IndependentProduct(items, _) => {
for i in items {
walk(i, out);
}
}
Expr::MapLiteral(pairs) => {
for (k, v) in pairs {
walk(k, out);
walk(v, out);
}
}
Expr::RecordCreate { fields, .. } => {
for (_, v) in fields {
walk(v, out);
}
}
Expr::RecordUpdate { base, updates, .. } => {
walk(base, out);
for (_, v) in updates {
walk(v, out);
}
}
Expr::InterpolatedStr(parts) => {
for part in parts {
if let crate::ast::StrPart::Parsed(inner) = part {
walk(inner, out);
}
}
}
_ => {}
}
}
for stmt in body.stmts() {
match stmt {
Stmt::Expr(e) => walk(e, &mut out),
Stmt::Binding(_, _, e) => walk(e, &mut out),
}
}
out
}
#[cfg(test)]
fn generate_prelude() -> String {
generate_prelude_for_body("", true)
}
#[cfg(test)]
fn generate_prelude_for_body(body: &str, include_all_helpers: bool) -> String {
let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
if include_all_helpers || crate::codegen::builtin_records::needs_trust_header(body) {
let empty = crate::codegen::common::DeclaredEffects {
bare_namespaces: std::collections::HashSet::new(),
methods: std::collections::HashSet::new(),
};
let has_ip = body.contains("BranchPath");
parts.push(
crate::types::checker::proof_trust_header::generate_commented("-- ", &empty, has_ip),
);
}
for record in crate::codegen::builtin_records::needed_records(body, include_all_helpers) {
parts.push(crate::codegen::builtin_records::render_lean(record));
}
for helper in crate::codegen::builtin_helpers::needed_helpers(body, include_all_helpers) {
match helper.key {
"BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
"AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
"StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
"NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
"CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
"AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
"AverMap" => parts.push(generate_map_prelude(body, include_all_helpers)),
"ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
"FloatInstances" => parts.extend([
LEAN_PRELUDE_FLOAT_COE.to_string(),
LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
]),
"ExceptInstances" => parts.extend([
LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
LEAN_PRELUDE_EXCEPT_NS.to_string(),
LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
]),
"StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
"ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
other => panic!(
"Lean backend has no implementation for builtin helper key '{}'. \
Add a match arm in generate_prelude_for_body or remove the key \
from BUILTIN_HELPERS.",
other
),
}
}
parts.join("\n\n")
}
fn generate_map_prelude(body: &str, include_all_helpers: bool) -> String {
let mut parts = vec![AVER_MAP_PRELUDE_BASE.to_string()];
let needs_has_set_self = include_all_helpers || body.contains("AverMap.has_set_self");
let needs_get_set_self = include_all_helpers || body.contains("AverMap.get_set_self");
let needs_get_set_other = include_all_helpers
|| body.contains("AverMap.get_set_other")
|| body.contains("AverMap.has_set_other");
let needs_has_set_other = include_all_helpers || body.contains("AverMap.has_set_other");
if needs_has_set_self {
parts.push(AVER_MAP_PRELUDE_HAS_SET_SELF.to_string());
}
if needs_get_set_self {
parts.push(AVER_MAP_PRELUDE_GET_SET_SELF.to_string());
}
if needs_get_set_other {
parts.push(AVER_MAP_PRELUDE_GET_SET_OTHER.to_string());
}
if needs_has_set_other {
parts.push(AVER_MAP_PRELUDE_HAS_SET_OTHER.to_string());
}
parts.push(AVER_MAP_PRELUDE_END.to_string());
parts.join("\n\n")
}
fn generate_lakefile_with_roots(project_name: &str, extra_roots: &[String]) -> String {
let mut roots: Vec<String> = vec![format!("`{}", project_name)];
for r in extra_roots {
roots.push(format!("`{}", r));
}
let roots_str = roots.join(", ");
format!(
r#"import Lake
open Lake DSL
package «{}» where
version := v!"0.1.0"
@[default_target]
lean_lib «{}» where
srcDir := "."
roots := #[{}]
"#,
project_name.to_lowercase(),
project_name,
roots_str
)
}
fn generate_toolchain() -> String {
"leanprover/lean4:v4.15.0\n".to_string()
}
#[derive(Clone, Copy)]
enum LeanEmitMode {
Standard,
Proof,
}
fn transpile_unified(
ctx: &CodegenContext,
verify_mode: VerifyEmitMode,
emit_mode: LeanEmitMode,
) -> ProjectOutput {
use crate::codegen::recursion::RecursionPlan;
let recursive_fns: HashSet<String> = ctx.recursive_fns.clone();
let recursive_names = recursive_pure_fn_names(ctx);
let recursive_types = recursive_type_names(ctx);
let (plans, _proof_issues) = match emit_mode {
LeanEmitMode::Proof => crate::codegen::recursion::analyze_plans(ctx),
LeanEmitMode::Standard => (HashMap::<String, RecursionPlan>::new(), Vec::new()),
};
let pure_per_scope = crate::codegen::common::route_pure_components_per_scope(
ctx,
toplevel::is_pure_fn,
|comp| {
let mut out: Vec<String> = Vec::new();
if comp.len() > 1 {
let code = match emit_mode {
LeanEmitMode::Proof => {
let all_supported = comp.iter().all(|fd| plans.contains_key(&fd.name));
if all_supported {
toplevel::emit_mutual_group_proof(comp, ctx, &plans)
} else {
toplevel::emit_mutual_group(comp, ctx)
}
}
LeanEmitMode::Standard => toplevel::emit_mutual_group(comp, ctx),
};
out.push(code);
out.push(String::new());
} else if let Some(fd) = comp.first() {
let emitted = match emit_mode {
LeanEmitMode::Proof => {
let is_recursive = recursive_names.contains(&fd.name);
if is_recursive && !plans.contains_key(&fd.name) {
toplevel::emit_fn_def(fd, &recursive_names, ctx)
} else {
toplevel::emit_fn_def_proof(fd, plans.get(&fd.name).cloned(), ctx)
}
}
LeanEmitMode::Standard => toplevel::emit_fn_def(fd, &recursive_fns, ctx),
};
if let Some(code) = emitted {
out.push(code);
out.push(String::new());
}
}
out
},
);
let mut entry_lifted_sections: Vec<String> = Vec::new();
let lifted_recursive_names = match emit_mode {
LeanEmitMode::Proof => &recursive_names,
LeanEmitMode::Standard => &recursive_fns,
};
emit_lifted_effectful_functions(ctx, lifted_recursive_names, &mut entry_lifted_sections);
let mut entry_decision_sections: Vec<String> = Vec::new();
for item in &ctx.items {
if let TopLevel::Decision(db) = item {
entry_decision_sections.push(toplevel::emit_decision(db));
entry_decision_sections.push(String::new());
}
}
let mut entry_verify_sections: Vec<String> = Vec::new();
let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
for item in &ctx.items {
if let TopLevel::Verify(vb) = item {
let key = verify_counter_key(vb);
let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
verify_case_counters.insert(key, next_idx);
entry_verify_sections.push(emitted);
entry_verify_sections.push(String::new());
}
}
let mut module_files: Vec<(String, String)> = Vec::new();
let mut union_body = String::new();
for module in &ctx.modules {
let mut body_sections: Vec<String> = Vec::new();
for td in &module.type_defs {
body_sections.push(toplevel::emit_type_def(td));
if toplevel::is_recursive_type_def(td) {
body_sections.push(toplevel::emit_recursive_decidable_eq(
toplevel::type_def_name(td),
));
if matches!(emit_mode, LeanEmitMode::Proof)
&& let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
{
body_sections.push(measure);
}
}
body_sections.push(String::new());
}
if let Some(scope_sections) = pure_per_scope.by_scope.get(&module.prefix) {
body_sections.extend(scope_sections.clone());
}
let body = body_sections.join("\n");
union_body.push_str(&body);
union_body.push('\n');
let mut imports = vec!["import AverCommon".to_string()];
for d in &module.depends {
imports.push(format!("import {}", d));
}
let opens: Vec<String> = module
.depends
.iter()
.map(|d| format!("open {}", d))
.collect();
let opens_str = if opens.is_empty() {
String::new()
} else {
format!("\n{}\n", opens.join("\n"))
};
let content = format!(
"{}\n{}\nnamespace {}\n\n{}\nend {}\n",
imports.join("\n"),
opens_str,
module.prefix,
body,
module.prefix
);
let path = module.prefix.replace('.', "/");
module_files.push((format!("{}.lean", path), content));
}
let mut entry_body_sections: Vec<String> = Vec::new();
for td in &ctx.type_defs {
entry_body_sections.push(toplevel::emit_type_def(td));
if toplevel::is_recursive_type_def(td) {
entry_body_sections.push(toplevel::emit_recursive_decidable_eq(
toplevel::type_def_name(td),
));
if matches!(emit_mode, LeanEmitMode::Proof)
&& let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
{
entry_body_sections.push(measure);
}
}
entry_body_sections.push(String::new());
}
if let Some(entry_pure) = pure_per_scope.by_scope.get("") {
entry_body_sections.extend(entry_pure.clone());
}
entry_body_sections.extend(entry_lifted_sections);
entry_body_sections.extend(entry_decision_sections);
entry_body_sections.extend(entry_verify_sections);
let entry_body = entry_body_sections.join("\n");
union_body.push_str(&entry_body);
union_body.push('\n');
let project_name = lean_project_name(ctx);
let mut entry_imports = vec!["import AverCommon".to_string()];
for m in &ctx.modules {
entry_imports.push(format!("import {}", m.prefix));
}
let entry_opens: Vec<String> = ctx
.modules
.iter()
.map(|m| format!("open {}", m.prefix))
.collect();
let mut entry_parts = vec![entry_imports.join("\n")];
if !entry_opens.is_empty() {
entry_parts.push(entry_opens.join("\n"));
}
let declared = crate::codegen::common::collect_declared_effects(ctx);
let has_ip = union_body.contains("BranchPath");
let has_classified =
crate::types::checker::effect_classification::classifications_for_proof_subset()
.iter()
.any(|c| declared.includes(c.method));
if has_ip || has_classified {
entry_parts.push(
crate::types::checker::proof_trust_header::generate_commented("-- ", &declared, has_ip),
);
}
let subtype_block = crate::types::checker::oracle_subtypes::lean_subtypes(&declared);
if !subtype_block.is_empty() {
union_body.push_str(&subtype_block);
union_body.push('\n');
entry_parts.push(subtype_block);
}
entry_parts.push(entry_body);
let entry_content = entry_parts.join("\n\n");
let common_content = build_common_lean(&union_body);
let mut extra_roots: Vec<String> = vec!["AverCommon".to_string()];
for m in &ctx.modules {
extra_roots.push(m.prefix.clone());
}
let lakefile = generate_lakefile_with_roots(&project_name, &extra_roots);
let toolchain = generate_toolchain();
let mut files = module_files;
files.push((format!("{}.lean", project_name), entry_content));
files.push(("AverCommon.lean".to_string(), common_content));
files.push(("lakefile.lean".to_string(), lakefile));
files.push(("lean-toolchain".to_string(), toolchain));
ProjectOutput { files }
}
fn build_common_lean(union_body: &str) -> String {
let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
for record in crate::codegen::builtin_records::needed_records(union_body, false) {
parts.push(crate::codegen::builtin_records::render_lean(record));
}
for helper in crate::codegen::builtin_helpers::needed_helpers(union_body, false) {
match helper.key {
"BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
"AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
"StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
"NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
"CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
"AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
"AverMap" => parts.push(generate_map_prelude(union_body, false)),
"ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
"FloatInstances" => parts.extend([
LEAN_PRELUDE_FLOAT_COE.to_string(),
LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
]),
"ExceptInstances" => parts.extend([
LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
LEAN_PRELUDE_EXCEPT_NS.to_string(),
LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
]),
"StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
"ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
other => panic!(
"Lean backend has no implementation for builtin helper key '{}'. \
Add a match arm in build_common_lean or remove the key from BUILTIN_HELPERS.",
other
),
}
}
parts.join("\n\n")
}
#[cfg(test)]
mod tests {
use super::{
VerifyEmitMode, generate_prelude, proof_mode_issues, recurrence, transpile,
transpile_for_proof_mode, transpile_with_verify_mode,
};
use crate::ast::{
BinOp, Expr, FnBody, FnDef, Literal, MatchArm, Pattern, Spanned, Stmt, TailCallData,
TopLevel, TypeDef, TypeVariant, VerifyBlock, VerifyGiven, VerifyGivenDomain, VerifyKind,
VerifyLaw,
};
fn sb(e: Expr) -> Spanned<Expr> {
Spanned::bare(e)
}
fn sbb(e: Expr) -> Box<Spanned<Expr>> {
Box::new(Spanned::bare(e))
}
use crate::codegen::{CodegenContext, build_context};
use crate::source::parse_source;
use std::collections::{HashMap, HashSet};
use std::sync::Arc as Rc;
fn empty_ctx() -> CodegenContext {
CodegenContext {
items: vec![],
fn_sigs: HashMap::new(),
memo_fns: HashSet::new(),
memo_safe_types: HashSet::new(),
type_defs: vec![],
fn_defs: vec![],
project_name: "verify_mode".to_string(),
modules: vec![],
module_prefixes: HashSet::new(),
policy: None,
emit_replay_runtime: false,
runtime_policy_from_env: false,
guest_entry: None,
emit_self_host_support: false,
extra_fn_defs: Vec::new(),
mutual_tco_members: HashSet::new(),
recursive_fns: HashSet::new(),
fn_analyses: HashMap::new(),
buffer_build_sinks: HashMap::new(),
buffer_fusion_sites: Vec::new(),
synthesized_buffered_fns: Vec::new(),
}
}
fn ctx_from_source(source: &str, project_name: &str) -> CodegenContext {
let mut items = parse_source(source).expect("source should parse");
crate::ir::pipeline::tco(&mut items);
let tc = crate::ir::pipeline::typecheck(
&items,
&crate::ir::TypecheckMode::Full { base_dir: None },
);
assert!(
tc.errors.is_empty(),
"source should typecheck without errors: {:?}",
tc.errors
);
build_context(
items,
&tc,
None,
HashSet::new(),
project_name.to_string(),
vec![],
)
}
fn generated_lean_file(out: &crate::codegen::ProjectOutput) -> String {
out.files
.iter()
.filter_map(|(name, content)| {
(name.ends_with(".lean") && name != "lakefile.lean").then_some(content.as_str())
})
.collect::<Vec<&str>>()
.join("\n")
}
fn empty_ctx_with_verify_case() -> CodegenContext {
let mut ctx = empty_ctx();
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "f".to_string(),
line: 1,
cases: vec![(
sb(Expr::Literal(Literal::Int(1))),
sb(Expr::Literal(Literal::Int(1))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Cases,
trace: false,
cases_givens: vec![],
}));
ctx
}
fn empty_ctx_with_two_verify_blocks_same_fn() -> CodegenContext {
let mut ctx = empty_ctx();
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "f".to_string(),
line: 1,
cases: vec![(
sb(Expr::Literal(Literal::Int(1))),
sb(Expr::Literal(Literal::Int(1))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Cases,
trace: false,
cases_givens: vec![],
}));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "f".to_string(),
line: 2,
cases: vec![(
sb(Expr::Literal(Literal::Int(2))),
sb(Expr::Literal(Literal::Int(2))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Cases,
trace: false,
cases_givens: vec![],
}));
ctx
}
fn empty_ctx_with_verify_law() -> CodegenContext {
let mut ctx = empty_ctx();
let add = FnDef {
name: "add".to_string(),
line: 1,
params: vec![
("a".to_string(), "Int".to_string()),
("b".to_string(), "Int".to_string()),
],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("a".to_string())),
sbb(Expr::Ident("b".to_string())),
)))),
resolution: None,
};
ctx.fn_defs.push(add.clone());
ctx.items.push(TopLevel::FnDef(add));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "add".to_string(),
line: 1,
cases: vec![
(
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(1))),
sb(Expr::Literal(Literal::Int(2))),
],
)),
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(2))),
sb(Expr::Literal(Literal::Int(1))),
],
)),
),
(
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(2))),
sb(Expr::Literal(Literal::Int(3))),
],
)),
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(3))),
sb(Expr::Literal(Literal::Int(2))),
],
)),
),
],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "commutative".to_string(),
givens: vec![
VerifyGiven {
name: "a".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
},
VerifyGiven {
name: "b".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![
sb(Expr::Literal(Literal::Int(2))),
sb(Expr::Literal(Literal::Int(3))),
]),
},
],
when: None,
lhs: sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Ident("a".to_string())),
sb(Expr::Ident("b".to_string())),
],
)),
rhs: sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Ident("b".to_string())),
sb(Expr::Ident("a".to_string())),
],
)),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
ctx
}
#[test]
fn prelude_normalizes_float_string_format() {
let prelude = generate_prelude();
assert!(
prelude.contains("private def normalizeFloatString (s : String) : String :="),
"missing normalizeFloatString helper in prelude"
);
assert!(
prelude.contains(
"def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)"
),
"String.fromFloat should normalize Lean float formatting"
);
}
#[test]
fn prelude_validates_char_from_code_unicode_bounds() {
let prelude = generate_prelude();
assert!(
prelude.contains("if n < 0 || n > 1114111 then none"),
"Char.fromCode should reject code points above Unicode max"
);
assert!(
prelude.contains("else if n >= 55296 && n <= 57343 then none"),
"Char.fromCode should reject surrogate code points"
);
}
#[test]
fn prelude_includes_map_set_helper_lemmas() {
let prelude = generate_prelude();
assert!(
prelude.contains("theorem has_set_self [DecidableEq α]"),
"missing AverMap.has_set_self helper theorem"
);
assert!(
prelude.contains("theorem get_set_self [DecidableEq α]"),
"missing AverMap.get_set_self helper theorem"
);
}
#[test]
fn lean_output_without_map_usage_omits_map_prelude() {
let mut ctx = ctx_from_source(
r#"
module NoMap
intent = "Simple pure program without maps."
fn addOne(n: Int) -> Int
n + 1
verify addOne
addOne(1) => 2
"#,
"nomap",
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(
!lean.contains("namespace AverMap"),
"did not expect AverMap prelude in program without map usage:\n{}",
lean
);
}
#[test]
fn transpile_emits_native_decide_for_verify_by_default() {
let mut ctx = empty_ctx_with_verify_case();
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("example : 1 = 1 := by native_decide"));
}
#[test]
fn transpile_can_emit_sorry_for_verify_when_requested() {
let mut ctx = empty_ctx_with_verify_case();
let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::Sorry);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("example : 1 = 1 := by sorry"));
}
#[test]
fn transpile_can_emit_theorem_skeletons_for_verify() {
let mut ctx = empty_ctx_with_verify_case();
let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
assert!(lean.contains(" sorry"));
}
#[test]
fn theorem_skeleton_numbering_is_global_per_function_across_verify_blocks() {
let mut ctx = empty_ctx_with_two_verify_blocks_same_fn();
let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
assert!(lean.contains("theorem f_verify_2 : 2 = 2 := by"));
}
#[test]
fn transpile_emits_named_theorems_for_verify_law() {
let mut ctx = empty_ctx_with_verify_law();
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("-- verify law add.commutative (2 cases)"));
assert!(lean.contains("-- given a: Int = 1..2"));
assert!(lean.contains("-- given b: Int = [2, 3]"));
assert!(lean.contains(
"theorem add_law_commutative : ∀ (a : Int) (b : Int), add a b = add b a := by"
));
assert!(lean.contains(" intro a b"));
assert!(lean.contains(" simp [add, Int.add_comm]"));
assert!(lean.contains(
"theorem add_law_commutative_sample_1 : add 1 2 = add 2 1 := by native_decide"
));
assert!(lean.contains(
"theorem add_law_commutative_sample_2 : add 2 3 = add 3 2 := by native_decide"
));
}
#[test]
fn generate_prelude_emits_int_roundtrip_theorem() {
let lean = generate_prelude();
assert!(lean.contains(
"theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n"
));
assert!(lean.contains("theorem String.intercalate_empty_chars (s : String) :"));
assert!(lean.contains("def splitOnCharGo"));
assert!(lean.contains("theorem split_single_char_append"));
assert!(lean.contains("theorem split_intercalate_trailing_single_char"));
assert!(lean.contains("namespace AverDigits"));
assert!(lean.contains("theorem String.charAt_length_none (s : String)"));
assert!(lean.contains("theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->"));
}
#[test]
fn transpile_emits_guarded_theorems_for_verify_law_when_clause() {
let mut ctx = ctx_from_source(
r#"
module GuardedLaw
intent =
"verify law with precondition"
fn pickGreater(a: Int, b: Int) -> Int
match a > b
true -> a
false -> b
verify pickGreater law ordered
given a: Int = [1, 2]
given b: Int = [1, 2]
when a > b
pickGreater(a, b) => a
"#,
"guarded_law",
);
let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
let lean = generated_lean_file(&out);
assert!(lean.contains("-- when (a > b)"));
assert!(lean.contains(
"theorem pickGreater_law_ordered : ∀ (a : Int) (b : Int), a = 1 ∨ a = 2 -> b = 1 ∨ b = 2 -> (a > b) = true -> pickGreater a b = a := by"
));
assert!(lean.contains(
"theorem pickGreater_law_ordered_sample_1 : (1 > 1) = true -> pickGreater 1 1 = 1 := by"
));
assert!(lean.contains(
"theorem pickGreater_law_ordered_sample_4 : (2 > 2) = true -> pickGreater 2 2 = 2 := by"
));
}
#[test]
fn transpile_uses_spec_theorem_names_for_declared_spec_laws() {
let mut ctx = ctx_from_source(
r#"
module SpecDemo
intent =
"spec demo"
fn absVal(x: Int) -> Int
match x < 0
true -> 0 - x
false -> x
fn absValSpec(x: Int) -> Int
match x < 0
true -> 0 - x
false -> x
verify absVal law absValSpec
given x: Int = [-2, -1, 0, 1, 2]
absVal(x) => absValSpec(x)
"#,
"spec_demo",
);
let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
let lean = generated_lean_file(&out);
assert!(lean.contains("-- verify law absVal.spec absValSpec (5 cases)"));
assert!(
lean.contains(
"theorem absVal_eq_absValSpec : ∀ (x : Int), absVal x = absValSpec x := by"
)
);
assert!(lean.contains("theorem absVal_eq_absValSpec_checked_domain :"));
assert!(lean.contains("theorem absVal_eq_absValSpec_sample_1 :"));
assert!(!lean.contains("theorem absVal_law_absValSpec :"));
}
#[test]
fn transpile_keeps_noncanonical_spec_laws_as_regular_law_names() {
let mut ctx = ctx_from_source(
r#"
module SpecLawShape
intent =
"shape probe"
fn foo(x: Int) -> Int
x + 1
fn fooSpec(seed: Int, x: Int) -> Int
x + seed
verify foo law fooSpec
given x: Int = [1, 2]
foo(x) => fooSpec(1, x)
"#,
"spec_law_shape",
);
let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
let lean = generated_lean_file(&out);
assert!(lean.contains("-- verify law foo.fooSpec (2 cases)"));
assert!(lean.contains("theorem foo_law_fooSpec : ∀ (x : Int), foo x = fooSpec 1 x := by"));
assert!(!lean.contains("theorem foo_eq_fooSpec :"));
}
#[test]
fn transpile_auto_proves_linear_int_canonical_spec_law_in_auto_mode() {
let mut ctx = ctx_from_source(
r#"
module SpecGap
intent =
"nontrivial canonical spec law"
fn inc(x: Int) -> Int
x + 1
fn incSpec(x: Int) -> Int
x + 2 - 1
verify inc law incSpec
given x: Int = [0, 1, 2]
inc(x) => incSpec(x)
"#,
"spec_gap",
);
let out = transpile(&mut ctx);
let lean = generated_lean_file(&out);
assert!(lean.contains("-- verify law inc.spec incSpec (3 cases)"));
assert!(lean.contains("theorem inc_eq_incSpec : ∀ (x : Int), inc x = incSpec x := by"));
assert!(lean.contains("change (x + 1) = ((x + 2) - 1)"));
assert!(lean.contains("omega"));
assert!(!lean.contains(
"-- universal theorem inc_eq_incSpec omitted: sampled law shape is not auto-proved yet"
));
assert!(lean.contains("theorem inc_eq_incSpec_checked_domain :"));
}
#[test]
fn transpile_auto_proves_guarded_canonical_spec_law_in_auto_mode() {
let mut ctx = ctx_from_source(
r#"
module GuardedSpecGap
intent =
"guarded canonical spec law"
fn clampNonNegative(x: Int) -> Int
match x < 0
true -> 0
false -> x
fn clampNonNegativeSpec(x: Int) -> Int
match x < 0
true -> 0
false -> x
verify clampNonNegative law clampNonNegativeSpec
given x: Int = [-2, -1, 0, 1, 2]
when x >= 0
clampNonNegative(x) => clampNonNegativeSpec(x)
"#,
"guarded_spec_gap",
);
let out = transpile(&mut ctx);
let lean = generated_lean_file(&out);
assert!(lean.contains("-- when (x >= 0)"));
assert!(lean.contains(
"theorem clampNonNegative_eq_clampNonNegativeSpec : ∀ (x : Int), x = (-2) ∨ x = (-1) ∨ x = 0 ∨ x = 1 ∨ x = 2 -> (x >= 0) = true -> clampNonNegative x = clampNonNegativeSpec x := by"
));
assert!(lean.contains("intro x h_x h_when"));
assert!(lean.contains("simpa [clampNonNegative, clampNonNegativeSpec]"));
assert!(!lean.contains(
"-- universal theorem clampNonNegative_eq_clampNonNegativeSpec omitted: sampled law shape is not auto-proved yet"
));
assert!(!lean.contains("cases h_x"));
}
#[test]
fn transpile_auto_proves_simp_normalized_canonical_spec_law_in_auto_mode() {
let mut ctx = ctx_from_source(
r#"
module SpecGapNonlinear
intent =
"nonlinear canonical spec law"
fn square(x: Int) -> Int
x * x
fn squareSpec(x: Int) -> Int
x * x + 0
verify square law squareSpec
given x: Int = [0, 1, 2]
square(x) => squareSpec(x)
"#,
"spec_gap_nonlinear",
);
let out = transpile(&mut ctx);
let lean = generated_lean_file(&out);
assert!(lean.contains("-- verify law square.spec squareSpec (3 cases)"));
assert!(
lean.contains(
"theorem square_eq_squareSpec : ∀ (x : Int), square x = squareSpec x := by"
)
);
assert!(lean.contains("simp [square, squareSpec]"));
assert!(!lean.contains(
"-- universal theorem square_eq_squareSpec omitted: sampled law shape is not auto-proved yet"
));
assert!(lean.contains("theorem square_eq_squareSpec_checked_domain :"));
assert!(lean.contains("theorem square_eq_squareSpec_sample_1 :"));
}
#[test]
fn transpile_auto_proves_reflexive_law_with_rfl() {
let mut ctx = empty_ctx();
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "idLaw".to_string(),
line: 1,
cases: vec![(
sb(Expr::Literal(Literal::Int(1))),
sb(Expr::Literal(Literal::Int(1))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "reflexive".to_string(),
givens: vec![VerifyGiven {
name: "x".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
}],
when: None,
lhs: sb(Expr::Ident("x".to_string())),
rhs: sb(Expr::Ident("x".to_string())),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("theorem idLaw_law_reflexive : ∀ (x : Int), x = x := by"));
assert!(lean.contains(" intro x"));
assert!(lean.contains(" rfl"));
}
#[test]
fn transpile_auto_proves_identity_law_for_int_add_wrapper() {
let mut ctx = empty_ctx_with_verify_law();
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "add".to_string(),
line: 10,
cases: vec![(
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(1))),
sb(Expr::Literal(Literal::Int(0))),
],
)),
sb(Expr::Literal(Literal::Int(1))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "identityZero".to_string(),
givens: vec![VerifyGiven {
name: "a".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![
sb(Expr::Literal(Literal::Int(0))),
sb(Expr::Literal(Literal::Int(1))),
]),
}],
when: None,
lhs: sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Ident("a".to_string())),
sb(Expr::Literal(Literal::Int(0))),
],
)),
rhs: sb(Expr::Ident("a".to_string())),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("theorem add_law_identityZero : ∀ (a : Int), add a 0 = a := by"));
assert!(lean.contains(" intro a"));
assert!(lean.contains(" simp [add]"));
}
#[test]
fn transpile_auto_proves_associative_law_for_int_add_wrapper() {
let mut ctx = empty_ctx_with_verify_law();
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "add".to_string(),
line: 20,
cases: vec![(
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(1))),
sb(Expr::Literal(Literal::Int(2))),
],
)),
sb(Expr::Literal(Literal::Int(3))),
],
)),
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(1))),
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(2))),
sb(Expr::Literal(Literal::Int(3))),
],
)),
],
)),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "associative".to_string(),
givens: vec![
VerifyGiven {
name: "a".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
1,
)))]),
},
VerifyGiven {
name: "b".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2,
)))]),
},
VerifyGiven {
name: "c".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3,
)))]),
},
],
when: None,
lhs: sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Ident("a".to_string())),
sb(Expr::Ident("b".to_string())),
],
)),
sb(Expr::Ident("c".to_string())),
],
)),
rhs: sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Ident("a".to_string())),
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Ident("b".to_string())),
sb(Expr::Ident("c".to_string())),
],
)),
],
)),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains(
"theorem add_law_associative : ∀ (a : Int) (b : Int) (c : Int), add (add a b) c = add a (add b c) := by"
));
assert!(lean.contains(" intro a b c"));
assert!(lean.contains(" simp [add, Int.add_assoc]"));
}
#[test]
fn transpile_auto_proves_sub_laws() {
let mut ctx = empty_ctx();
let sub = FnDef {
name: "sub".to_string(),
line: 1,
params: vec![
("a".to_string(), "Int".to_string()),
("b".to_string(), "Int".to_string()),
],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
BinOp::Sub,
sbb(Expr::Ident("a".to_string())),
sbb(Expr::Ident("b".to_string())),
)))),
resolution: None,
};
ctx.fn_defs.push(sub.clone());
ctx.items.push(TopLevel::FnDef(sub));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "sub".to_string(),
line: 10,
cases: vec![(
sb(Expr::FnCall(
sbb(Expr::Ident("sub".to_string())),
vec![
sb(Expr::Literal(Literal::Int(2))),
sb(Expr::Literal(Literal::Int(0))),
],
)),
sb(Expr::Literal(Literal::Int(2))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "rightIdentity".to_string(),
givens: vec![VerifyGiven {
name: "a".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
}],
when: None,
lhs: sb(Expr::FnCall(
sbb(Expr::Ident("sub".to_string())),
vec![
sb(Expr::Ident("a".to_string())),
sb(Expr::Literal(Literal::Int(0))),
],
)),
rhs: sb(Expr::Ident("a".to_string())),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "sub".to_string(),
line: 20,
cases: vec![(
sb(Expr::FnCall(
sbb(Expr::Ident("sub".to_string())),
vec![
sb(Expr::Literal(Literal::Int(2))),
sb(Expr::Literal(Literal::Int(1))),
],
)),
sb(Expr::BinOp(
BinOp::Sub,
sbb(Expr::Literal(Literal::Int(0))),
sbb(Expr::FnCall(
sbb(Expr::Ident("sub".to_string())),
vec![
sb(Expr::Literal(Literal::Int(1))),
sb(Expr::Literal(Literal::Int(2))),
],
)),
)),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "antiCommutative".to_string(),
givens: vec![
VerifyGiven {
name: "a".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2,
)))]),
},
VerifyGiven {
name: "b".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
1,
)))]),
},
],
when: None,
lhs: sb(Expr::FnCall(
sbb(Expr::Ident("sub".to_string())),
vec![
sb(Expr::Ident("a".to_string())),
sb(Expr::Ident("b".to_string())),
],
)),
rhs: sb(Expr::BinOp(
BinOp::Sub,
sbb(Expr::Literal(Literal::Int(0))),
sbb(Expr::FnCall(
sbb(Expr::Ident("sub".to_string())),
vec![
sb(Expr::Ident("b".to_string())),
sb(Expr::Ident("a".to_string())),
],
)),
)),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("theorem sub_law_rightIdentity : ∀ (a : Int), sub a 0 = a := by"));
assert!(lean.contains(" simp [sub]"));
assert!(lean.contains(
"theorem sub_law_antiCommutative : ∀ (a : Int) (b : Int), sub a b = (-sub b a) := by"
));
assert!(lean.contains(" simpa [sub] using (Int.neg_sub b a).symm"));
}
#[test]
fn transpile_auto_proves_unary_wrapper_equivalence_law() {
let mut ctx = empty_ctx();
let add = FnDef {
name: "add".to_string(),
line: 1,
params: vec![
("a".to_string(), "Int".to_string()),
("b".to_string(), "Int".to_string()),
],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("a".to_string())),
sbb(Expr::Ident("b".to_string())),
)))),
resolution: None,
};
let add_one = FnDef {
name: "addOne".to_string(),
line: 2,
params: vec![("n".to_string(), "Int".to_string())],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("n".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
)))),
resolution: None,
};
ctx.fn_defs.push(add.clone());
ctx.fn_defs.push(add_one.clone());
ctx.items.push(TopLevel::FnDef(add));
ctx.items.push(TopLevel::FnDef(add_one));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "addOne".to_string(),
line: 3,
cases: vec![(
sb(Expr::FnCall(
sbb(Expr::Ident("addOne".to_string())),
vec![sb(Expr::Literal(Literal::Int(2)))],
)),
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(2))),
sb(Expr::Literal(Literal::Int(1))),
],
)),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "identityViaAdd".to_string(),
givens: vec![VerifyGiven {
name: "n".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
}],
when: None,
lhs: sb(Expr::FnCall(
sbb(Expr::Ident("addOne".to_string())),
vec![sb(Expr::Ident("n".to_string()))],
)),
rhs: sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Ident("n".to_string())),
sb(Expr::Literal(Literal::Int(1))),
],
)),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(
lean.contains(
"theorem addOne_law_identityViaAdd : ∀ (n : Int), addOne n = add n 1 := by"
)
);
assert!(lean.contains(" simp [addOne, add]"));
}
#[test]
fn transpile_auto_proves_direct_map_set_laws() {
let mut ctx = empty_ctx();
let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Map".to_string())),
"set".to_string(),
)),
vec![m, k, v],
))
};
let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Map".to_string())),
"has".to_string(),
)),
vec![m, k],
))
};
let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Map".to_string())),
"get".to_string(),
)),
vec![m, k],
))
};
let some = |v: Spanned<Expr>| {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Option".to_string())),
"Some".to_string(),
)),
vec![v],
))
};
let map_empty = || {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Map".to_string())),
"empty".to_string(),
)),
vec![],
))
};
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "map".to_string(),
line: 1,
cases: vec![(
map_has(
map_set(
sb(Expr::Ident("m".to_string())),
sb(Expr::Ident("k".to_string())),
sb(Expr::Ident("v".to_string())),
),
sb(Expr::Ident("k".to_string())),
),
sb(Expr::Literal(Literal::Bool(true))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "setHasKey".to_string(),
givens: vec![
VerifyGiven {
name: "m".to_string(),
type_name: "Map<String, Int>".to_string(),
domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
},
VerifyGiven {
name: "k".to_string(),
type_name: "String".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
"a".to_string(),
)))]),
},
VerifyGiven {
name: "v".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
1,
)))]),
},
],
when: None,
lhs: map_has(
map_set(
sb(Expr::Ident("m".to_string())),
sb(Expr::Ident("k".to_string())),
sb(Expr::Ident("v".to_string())),
),
sb(Expr::Ident("k".to_string())),
),
rhs: sb(Expr::Literal(Literal::Bool(true))),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "map".to_string(),
line: 2,
cases: vec![(
map_get(
map_set(
sb(Expr::Ident("m".to_string())),
sb(Expr::Ident("k".to_string())),
sb(Expr::Ident("v".to_string())),
),
sb(Expr::Ident("k".to_string())),
),
some(sb(Expr::Ident("v".to_string()))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "setGetKey".to_string(),
givens: vec![
VerifyGiven {
name: "m".to_string(),
type_name: "Map<String, Int>".to_string(),
domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
},
VerifyGiven {
name: "k".to_string(),
type_name: "String".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
"a".to_string(),
)))]),
},
VerifyGiven {
name: "v".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
1,
)))]),
},
],
when: None,
lhs: map_get(
map_set(
sb(Expr::Ident("m".to_string())),
sb(Expr::Ident("k".to_string())),
sb(Expr::Ident("v".to_string())),
),
sb(Expr::Ident("k".to_string())),
),
rhs: some(sb(Expr::Ident("v".to_string()))),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("simpa using AverMap.has_set_self m k v"));
assert!(lean.contains("simpa using AverMap.get_set_self m k v"));
}
#[test]
fn transpile_auto_proves_direct_recursive_sum_law_by_structural_induction() {
let mut ctx = ctx_from_source(
r#"
module Mirror
intent =
"direct recursive sum induction probe"
type Tree
Leaf(Int)
Node(Tree, Tree)
fn mirror(t: Tree) -> Tree
match t
Tree.Leaf(v) -> Tree.Leaf(v)
Tree.Node(left, right) -> Tree.Node(mirror(right), mirror(left))
verify mirror law involutive
given t: Tree = [Tree.Leaf(1), Tree.Node(Tree.Leaf(1), Tree.Leaf(2))]
mirror(mirror(t)) => t
"#,
"mirror",
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(
lean.contains(
"theorem mirror_law_involutive : ∀ (t : Tree), mirror (mirror t) = t := by"
)
);
assert!(lean.contains(" induction t with"));
assert!(lean.contains(" | leaf f0 => simp [mirror]"));
assert!(lean.contains(" | node f0 f1 ih0 ih1 => simp_all [mirror]"));
assert!(!lean.contains(
"-- universal theorem mirror_law_involutive omitted: sampled law shape is not auto-proved yet"
));
}
#[test]
fn transpile_auto_proves_map_update_laws() {
let mut ctx = empty_ctx();
let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Map".to_string())),
"get".to_string(),
)),
vec![m, k],
))
};
let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Map".to_string())),
"set".to_string(),
)),
vec![m, k, v],
))
};
let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Map".to_string())),
"has".to_string(),
)),
vec![m, k],
))
};
let option_some = |v: Spanned<Expr>| {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Option".to_string())),
"Some".to_string(),
)),
vec![v],
))
};
let option_with_default = |opt: Spanned<Expr>, def: Spanned<Expr>| {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Option".to_string())),
"withDefault".to_string(),
)),
vec![opt, def],
))
};
let map_empty = || {
sb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("Map".to_string())),
"empty".to_string(),
)),
vec![],
))
};
let add_one = FnDef {
name: "addOne".to_string(),
line: 1,
params: vec![("n".to_string(), "Int".to_string())],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("n".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
)))),
resolution: None,
};
ctx.fn_defs.push(add_one.clone());
ctx.items.push(TopLevel::FnDef(add_one));
let inc_count = FnDef {
name: "incCount".to_string(),
line: 2,
params: vec![
("counts".to_string(), "Map<String, Int>".to_string()),
("word".to_string(), "String".to_string()),
],
return_type: "Map<String, Int>".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::Block(vec![
Stmt::Binding(
"current".to_string(),
None,
map_get(
sb(Expr::Ident("counts".to_string())),
sb(Expr::Ident("word".to_string())),
),
),
Stmt::Expr(sb(Expr::Match {
subject: sbb(Expr::Ident("current".to_string())),
arms: vec![
MatchArm {
pattern: Pattern::Constructor(
"Option.Some".to_string(),
vec!["n".to_string()],
),
body: Box::new(map_set(
sb(Expr::Ident("counts".to_string())),
sb(Expr::Ident("word".to_string())),
sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("n".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
)),
)),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
body: Box::new(map_set(
sb(Expr::Ident("counts".to_string())),
sb(Expr::Ident("word".to_string())),
sb(Expr::Literal(Literal::Int(1))),
)),
binding_slots: std::sync::OnceLock::new(),
},
],
})),
])),
resolution: None,
};
ctx.fn_defs.push(inc_count.clone());
ctx.items.push(TopLevel::FnDef(inc_count));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "incCount".to_string(),
line: 10,
cases: vec![(
map_has(
sb(Expr::FnCall(
sbb(Expr::Ident("incCount".to_string())),
vec![
sb(Expr::Ident("counts".to_string())),
sb(Expr::Ident("word".to_string())),
],
)),
sb(Expr::Ident("word".to_string())),
),
sb(Expr::Literal(Literal::Bool(true))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "keyPresent".to_string(),
givens: vec![
VerifyGiven {
name: "counts".to_string(),
type_name: "Map<String, Int>".to_string(),
domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
},
VerifyGiven {
name: "word".to_string(),
type_name: "String".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
"a".to_string(),
)))]),
},
],
when: None,
lhs: map_has(
sb(Expr::FnCall(
sbb(Expr::Ident("incCount".to_string())),
vec![
sb(Expr::Ident("counts".to_string())),
sb(Expr::Ident("word".to_string())),
],
)),
sb(Expr::Ident("word".to_string())),
),
rhs: sb(Expr::Literal(Literal::Bool(true))),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "incCount".to_string(),
line: 20,
cases: vec![(
map_get(
sb(Expr::FnCall(
sbb(Expr::Ident("incCount".to_string())),
vec![
sb(Expr::Ident("counts".to_string())),
sb(Expr::Literal(Literal::Str("a".to_string()))),
],
)),
sb(Expr::Literal(Literal::Str("a".to_string()))),
),
option_some(sb(Expr::FnCall(
sbb(Expr::Ident("addOne".to_string())),
vec![option_with_default(
map_get(
sb(Expr::Ident("counts".to_string())),
sb(Expr::Literal(Literal::Str("a".to_string()))),
),
sb(Expr::Literal(Literal::Int(0))),
)],
))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "existingKeyIncrements".to_string(),
givens: vec![VerifyGiven {
name: "counts".to_string(),
type_name: "Map<String, Int>".to_string(),
domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
}],
when: None,
lhs: map_get(
sb(Expr::FnCall(
sbb(Expr::Ident("incCount".to_string())),
vec![
sb(Expr::Ident("counts".to_string())),
sb(Expr::Literal(Literal::Str("a".to_string()))),
],
)),
sb(Expr::Literal(Literal::Str("a".to_string()))),
),
rhs: option_some(sb(Expr::FnCall(
sbb(Expr::Ident("addOne".to_string())),
vec![option_with_default(
map_get(
sb(Expr::Ident("counts".to_string())),
sb(Expr::Literal(Literal::Str("a".to_string()))),
),
sb(Expr::Literal(Literal::Int(0))),
)],
))),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(
lean.contains("cases h : AverMap.get counts word <;> simp [AverMap.has_set_self]"),
"expected keyPresent auto-proof with has_set_self"
);
assert!(
lean.contains("cases h : AverMap.get counts \"a\" <;> simp [AverMap.get_set_self, addOne, incCount]"),
"expected existingKeyIncrements auto-proof with get_set_self"
);
}
#[test]
fn transpile_parenthesizes_negative_int_call_args_in_law_samples() {
let mut ctx = empty_ctx();
let add = FnDef {
name: "add".to_string(),
line: 1,
params: vec![
("a".to_string(), "Int".to_string()),
("b".to_string(), "Int".to_string()),
],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("a".to_string())),
sbb(Expr::Ident("b".to_string())),
)))),
resolution: None,
};
ctx.fn_defs.push(add.clone());
ctx.items.push(TopLevel::FnDef(add));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "add".to_string(),
line: 1,
cases: vec![(
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(-2))),
sb(Expr::Literal(Literal::Int(-1))),
],
)),
sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Literal(Literal::Int(-1))),
sb(Expr::Literal(Literal::Int(-2))),
],
)),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "commutative".to_string(),
givens: vec![
VerifyGiven {
name: "a".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
-2,
)))]),
},
VerifyGiven {
name: "b".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
-1,
)))]),
},
],
when: None,
lhs: sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Ident("a".to_string())),
sb(Expr::Ident("b".to_string())),
],
)),
rhs: sb(Expr::FnCall(
sbb(Expr::Ident("add".to_string())),
vec![
sb(Expr::Ident("b".to_string())),
sb(Expr::Ident("a".to_string())),
],
)),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
let out = transpile(&mut ctx);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains(
"theorem add_law_commutative_sample_1 : add (-2) (-1) = add (-1) (-2) := by native_decide"
));
}
#[test]
fn verify_law_numbering_is_scoped_per_law_name() {
let mut ctx = empty_ctx();
let f = FnDef {
name: "f".to_string(),
line: 1,
params: vec![("x".to_string(), "Int".to_string())],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Ident("x".to_string())))),
resolution: None,
};
ctx.fn_defs.push(f.clone());
ctx.items.push(TopLevel::FnDef(f));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "f".to_string(),
line: 1,
cases: vec![(
sb(Expr::Literal(Literal::Int(1))),
sb(Expr::Literal(Literal::Int(1))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Cases,
trace: false,
cases_givens: vec![],
}));
ctx.items.push(TopLevel::Verify(VerifyBlock {
fn_name: "f".to_string(),
line: 2,
cases: vec![(
sb(Expr::Literal(Literal::Int(2))),
sb(Expr::Literal(Literal::Int(2))),
)],
case_spans: vec![],
case_givens: vec![],
case_hostile_origins: vec![],
case_hostile_profiles: vec![],
kind: VerifyKind::Law(Box::new(VerifyLaw {
name: "identity".to_string(),
givens: vec![VerifyGiven {
name: "x".to_string(),
type_name: "Int".to_string(),
domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
}],
when: None,
lhs: sb(Expr::Ident("x".to_string())),
rhs: sb(Expr::Ident("x".to_string())),
sample_guards: vec![],
})),
trace: false,
cases_givens: vec![],
}));
let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
assert!(lean.contains("theorem f_law_identity : ∀ (x : Int), x = x := by"));
assert!(lean.contains("theorem f_law_identity_sample_1 : 2 = 2 := by"));
assert!(!lean.contains("theorem f_law_identity_sample_2 : 2 = 2 := by"));
}
#[test]
fn proof_mode_accepts_single_int_countdown_recursion() {
let mut ctx = empty_ctx();
let down = FnDef {
name: "down".to_string(),
line: 1,
params: vec![("n".to_string(), "Int".to_string())],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::Ident("n".to_string())),
arms: vec![
MatchArm {
pattern: Pattern::Literal(Literal::Int(0)),
body: sbb(Expr::Literal(Literal::Int(0))),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Wildcard,
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"down".to_string(),
vec![sb(Expr::BinOp(
BinOp::Sub,
sbb(Expr::Ident("n".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
))],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(down.clone()));
ctx.fn_defs.push(down);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected Int countdown recursion to be accepted, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("def down__fuel"));
assert!(lean.contains("def down (n : Int) : Int :="));
assert!(lean.contains("down__fuel ((Int.natAbs n) + 1) n"));
}
#[test]
fn proof_mode_accepts_single_int_countdown_on_nonfirst_param() {
let mut ctx = empty_ctx();
let repeat_like = FnDef {
name: "repeatLike".to_string(),
line: 1,
params: vec![
("char".to_string(), "String".to_string()),
("n".to_string(), "Int".to_string()),
],
return_type: "List<String>".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::BinOp(
BinOp::Lte,
sbb(Expr::Ident("n".to_string())),
sbb(Expr::Literal(Literal::Int(0))),
)),
arms: vec![
MatchArm {
pattern: Pattern::Literal(Literal::Bool(true)),
body: sbb(Expr::List(vec![])),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Literal(Literal::Bool(false)),
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"repeatLike".to_string(),
vec![
sb(Expr::Ident("char".to_string())),
sb(Expr::BinOp(
BinOp::Sub,
sbb(Expr::Ident("n".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
)),
],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(repeat_like.clone()));
ctx.fn_defs.push(repeat_like);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected non-first Int countdown recursion to be accepted, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("def repeatLike__fuel"));
assert!(lean.contains("def repeatLike (char : String) (n : Int) : List String :="));
assert!(lean.contains("repeatLike__fuel ((Int.natAbs n) + 1) char n"));
}
#[test]
fn proof_mode_accepts_negative_guarded_int_ascent() {
let mut ctx = empty_ctx();
let normalize = FnDef {
name: "normalize".to_string(),
line: 1,
params: vec![("angle".to_string(), "Int".to_string())],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::BinOp(
BinOp::Lt,
sbb(Expr::Ident("angle".to_string())),
sbb(Expr::Literal(Literal::Int(0))),
)),
arms: vec![
MatchArm {
pattern: Pattern::Literal(Literal::Bool(true)),
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"normalize".to_string(),
vec![sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("angle".to_string())),
sbb(Expr::Literal(Literal::Int(360))),
))],
)))),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Literal(Literal::Bool(false)),
body: sbb(Expr::Ident("angle".to_string())),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(normalize.clone()));
ctx.fn_defs.push(normalize);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected negative-guarded Int ascent recursion to be accepted, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = out
.files
.iter()
.find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
.expect("expected generated Lean file");
assert!(lean.contains("def normalize__fuel"));
assert!(lean.contains("normalize__fuel ((Int.natAbs angle) + 1) angle"));
}
#[test]
fn proof_mode_accepts_single_list_structural_recursion() {
let mut ctx = empty_ctx();
let len = FnDef {
name: "len".to_string(),
line: 1,
params: vec![("xs".to_string(), "List<Int>".to_string())],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::Ident("xs".to_string())),
arms: vec![
MatchArm {
pattern: Pattern::EmptyList,
body: sbb(Expr::Literal(Literal::Int(0))),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Cons("h".to_string(), "t".to_string()),
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"len".to_string(),
vec![sb(Expr::Ident("t".to_string()))],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(len.clone()));
ctx.fn_defs.push(len);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected List structural recursion to be accepted, got: {:?}",
issues
);
}
#[test]
fn proof_mode_accepts_single_list_structural_recursion_on_nonfirst_param() {
let mut ctx = empty_ctx();
let len_from = FnDef {
name: "lenFrom".to_string(),
line: 1,
params: vec![
("count".to_string(), "Int".to_string()),
("xs".to_string(), "List<Int>".to_string()),
],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::Ident("xs".to_string())),
arms: vec![
MatchArm {
pattern: Pattern::EmptyList,
body: sbb(Expr::Ident("count".to_string())),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Cons("h".to_string(), "t".to_string()),
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"lenFrom".to_string(),
vec![
sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("count".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
)),
sb(Expr::Ident("t".to_string())),
],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(len_from.clone()));
ctx.fn_defs.push(len_from);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected non-first List structural recursion to be accepted, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("termination_by xs.length"));
assert!(!lean.contains("partial def lenFrom"));
}
#[test]
fn proof_mode_accepts_single_string_pos_advance_recursion() {
let mut ctx = empty_ctx();
let skip_ws = FnDef {
name: "skipWs".to_string(),
line: 1,
params: vec![
("s".to_string(), "String".to_string()),
("pos".to_string(), "Int".to_string()),
],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::FnCall(
sbb(Expr::Attr(
sbb(Expr::Ident("String".to_string())),
"charAt".to_string(),
)),
vec![
sb(Expr::Ident("s".to_string())),
sb(Expr::Ident("pos".to_string())),
],
)),
arms: vec![
MatchArm {
pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
body: sbb(Expr::Ident("pos".to_string())),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Wildcard,
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"skipWs".to_string(),
vec![
sb(Expr::Ident("s".to_string())),
sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("pos".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
)),
],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(skip_ws.clone()));
ctx.fn_defs.push(skip_ws);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected String+pos recursion to be accepted, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("def skipWs__fuel"));
assert!(!lean.contains("partial def skipWs"));
}
#[test]
fn proof_mode_accepts_mutual_int_countdown_recursion() {
let mut ctx = empty_ctx();
let even = FnDef {
name: "even".to_string(),
line: 1,
params: vec![("n".to_string(), "Int".to_string())],
return_type: "Bool".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::Ident("n".to_string())),
arms: vec![
MatchArm {
pattern: Pattern::Literal(Literal::Int(0)),
body: sbb(Expr::Literal(Literal::Bool(true))),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Wildcard,
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"odd".to_string(),
vec![sb(Expr::BinOp(
BinOp::Sub,
sbb(Expr::Ident("n".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
))],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
let odd = FnDef {
name: "odd".to_string(),
line: 2,
params: vec![("n".to_string(), "Int".to_string())],
return_type: "Bool".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::Ident("n".to_string())),
arms: vec![
MatchArm {
pattern: Pattern::Literal(Literal::Int(0)),
body: sbb(Expr::Literal(Literal::Bool(false))),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Wildcard,
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"even".to_string(),
vec![sb(Expr::BinOp(
BinOp::Sub,
sbb(Expr::Ident("n".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
))],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(even.clone()));
ctx.items.push(TopLevel::FnDef(odd.clone()));
ctx.fn_defs.push(even);
ctx.fn_defs.push(odd);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected mutual Int countdown recursion to be accepted, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("def even__fuel"));
assert!(lean.contains("def odd__fuel"));
assert!(lean.contains("def even (n : Int) : Bool :="));
assert!(lean.contains("even__fuel ((Int.natAbs n) + 1) n"));
}
#[test]
fn proof_mode_accepts_mutual_string_pos_recursion_with_ranked_same_edges() {
let mut ctx = empty_ctx();
let f = FnDef {
name: "f".to_string(),
line: 1,
params: vec![
("s".to_string(), "String".to_string()),
("pos".to_string(), "Int".to_string()),
],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::BinOp(
BinOp::Gte,
sbb(Expr::Ident("pos".to_string())),
sbb(Expr::Literal(Literal::Int(3))),
)),
arms: vec![
MatchArm {
pattern: Pattern::Literal(Literal::Bool(true)),
body: sbb(Expr::Ident("pos".to_string())),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Wildcard,
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"g".to_string(),
vec![
sb(Expr::Ident("s".to_string())),
sb(Expr::Ident("pos".to_string())),
],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
let g = FnDef {
name: "g".to_string(),
line: 2,
params: vec![
("s".to_string(), "String".to_string()),
("pos".to_string(), "Int".to_string()),
],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::BinOp(
BinOp::Gte,
sbb(Expr::Ident("pos".to_string())),
sbb(Expr::Literal(Literal::Int(3))),
)),
arms: vec![
MatchArm {
pattern: Pattern::Literal(Literal::Bool(true)),
body: sbb(Expr::Ident("pos".to_string())),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Wildcard,
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"f".to_string(),
vec![
sb(Expr::Ident("s".to_string())),
sb(Expr::BinOp(
BinOp::Add,
sbb(Expr::Ident("pos".to_string())),
sbb(Expr::Literal(Literal::Int(1))),
)),
],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(f.clone()));
ctx.items.push(TopLevel::FnDef(g.clone()));
ctx.fn_defs.push(f);
ctx.fn_defs.push(g);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected mutual String+pos recursion to be accepted, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("def f__fuel"));
assert!(lean.contains("def g__fuel"));
assert!(!lean.contains("partial def f"));
}
#[test]
fn proof_mode_accepts_mutual_ranked_sizeof_recursion() {
let mut ctx = empty_ctx();
let f = FnDef {
name: "f".to_string(),
line: 1,
params: vec![("xs".to_string(), "List<Int>".to_string())],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::TailCall(Box::new(
TailCallData::new(
"g".to_string(),
vec![
sb(Expr::Literal(Literal::Str("acc".to_string()))),
sb(Expr::Ident("xs".to_string())),
],
),
))))),
resolution: None,
};
let g = FnDef {
name: "g".to_string(),
line: 2,
params: vec![
("acc".to_string(), "String".to_string()),
("xs".to_string(), "List<Int>".to_string()),
],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::Match {
subject: sbb(Expr::Ident("xs".to_string())),
arms: vec![
MatchArm {
pattern: Pattern::EmptyList,
body: sbb(Expr::Literal(Literal::Int(0))),
binding_slots: std::sync::OnceLock::new(),
},
MatchArm {
pattern: Pattern::Cons("h".to_string(), "t".to_string()),
body: sbb(Expr::TailCall(Box::new(TailCallData::new(
"f".to_string(),
vec![sb(Expr::Ident("t".to_string()))],
)))),
binding_slots: std::sync::OnceLock::new(),
},
],
}))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(f.clone()));
ctx.items.push(TopLevel::FnDef(g.clone()));
ctx.fn_defs.push(f);
ctx.fn_defs.push(g);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected mutual ranked-sizeOf recursion to be accepted, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("mutual"));
assert!(lean.contains("def f__fuel"));
assert!(lean.contains("def g__fuel"));
assert!(!lean.contains("partial def f"));
assert!(!lean.contains("partial def g"));
}
#[test]
fn proof_mode_rejects_recursive_pure_functions() {
let mut ctx = empty_ctx();
let recursive_fn = FnDef {
name: "loop".to_string(),
line: 1,
params: vec![("n".to_string(), "Int".to_string())],
return_type: "Int".to_string(),
effects: vec![],
desc: None,
body: Rc::new(FnBody::from_expr(sb(Expr::FnCall(
sbb(Expr::Ident("loop".to_string())),
vec![sb(Expr::Ident("n".to_string()))],
)))),
resolution: None,
};
ctx.items.push(TopLevel::FnDef(recursive_fn.clone()));
ctx.fn_defs.push(recursive_fn);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.iter().any(|i| i.contains("outside proof subset")),
"expected recursive function blocker, got: {:?}",
issues
);
}
#[test]
fn proof_mode_allows_recursive_types() {
let mut ctx = empty_ctx();
let recursive_type = TypeDef::Sum {
name: "Node".to_string(),
variants: vec![TypeVariant {
name: "Cons".to_string(),
fields: vec!["Node".to_string()],
}],
line: 1,
};
ctx.items.push(TopLevel::TypeDef(recursive_type.clone()));
ctx.type_defs.push(recursive_type);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues
.iter()
.all(|i| !i.contains("recursive types require unsafe DecidableEq shim")),
"did not expect recursive type blocker, got: {:?}",
issues
);
}
#[test]
fn law_auto_example_exports_real_proof_artifacts() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/formal/law_auto.av"),
"law_auto",
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("theorem add_law_commutative :"));
assert!(lean.contains("theorem id'_law_reflexive : ∀ (x : Int), x = x := by"));
assert!(lean.contains("theorem incCount_law_keyPresent :"));
assert!(lean.contains("AverMap.has_set_self"));
assert!(lean.contains("theorem add_law_commutative_sample_1 :"));
assert!(lean.contains(":= by native_decide"));
}
#[test]
fn json_example_stays_inside_proof_subset() {
let mut ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected json example to stay inside proof subset, got: {:?}",
issues
);
}
#[test]
fn json_example_uses_total_defs_and_domain_guarded_laws_in_proof_mode() {
let mut ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(!lean.contains("partial def"));
assert!(lean.contains("def skipWs__fuel"));
assert!(lean.contains("def parseValue__fuel"));
assert!(lean.contains("def toString' (j : Json) : String :="));
assert!(
lean.contains(
"def averMeasureJsonEntries_String (items : List (String × Json)) : Nat :="
)
);
assert!(lean.contains(
"| .jsonObject x0 => (averMeasureJsonEntries_String (AverMap.entries x0)) + 1"
));
assert!(lean.contains("-- when jsonRoundtripSafe j"));
assert!(!lean.contains("-- hint: verify law '"));
assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
assert!(
lean.contains(
"theorem toString'_law_parseRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
)
);
assert!(lean.contains(
"jsonRoundtripSafe j = true -> fromString (toString' j) = Except.ok j := by"
));
assert!(
lean.contains("theorem finishFloat_law_fromCanonicalFloat : ∀ (f : Float), f = 3.5 ∨")
);
assert!(lean.contains("theorem finishInt_law_fromCanonicalInt_checked_domain :"));
assert!(lean.contains(
"theorem toString'_law_parseValueRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
));
assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
assert!(lean.contains(
"example : fromString \"null\" = Except.ok Json.jsonNull := by native_decide"
));
}
#[test]
fn transpile_injects_builtin_network_types_and_vector_get_support() {
let mut ctx = ctx_from_source(
r#"
fn firstOrMissing(xs: Vector<String>) -> Result<String, String>
Option.toResult(Vector.get(xs, 0), "missing")
fn defaultHeaders() -> Map<String, List<String>>
{"content-type" => ["application/json"]}
fn mkResponse(body: String) -> HttpResponse
HttpResponse(status = 200, body = body, headers = defaultHeaders())
fn requestPath(req: HttpRequest) -> String
req.path
fn connPort(conn: Tcp.Connection) -> Int
conn.port
"#,
"network_helpers",
);
let out = transpile(&mut ctx);
let lean = generated_lean_file(&out);
assert!(lean.contains("structure HttpResponse where"));
assert!(lean.contains("structure HttpRequest where"));
assert!(lean.contains("structure Tcp_Connection where"));
assert!(lean.contains("port : Int"));
assert!(lean.contains("List (String × List String)"));
}
#[test]
fn law_auto_example_has_no_sorry_in_proof_mode() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/formal/law_auto.av"),
"law_auto",
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(
!lean.contains("sorry"),
"expected law_auto proof export to avoid sorry, got:\n{}",
lean
);
}
#[test]
fn map_example_has_no_sorry_in_proof_mode() {
let mut ctx = ctx_from_source(include_str!("../../../examples/data/map.av"), "map");
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected map example to stay inside proof subset, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("theorem incCount_law_trackedCountStepsByOne :"));
assert!(lean.contains("sorry"));
assert!(lean.contains("theorem countWords_law_presenceMatchesContains_sample_1 :"));
assert!(lean.contains("theorem countWords_law_trackedWordCount_sample_1 :"));
assert!(lean.contains("AverMap.has_set_self"));
assert!(lean.contains("AverMap.get_set_self"));
}
#[test]
fn spec_laws_example_has_no_sorry_in_proof_mode() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/formal/spec_laws.av"),
"spec_laws",
);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected spec_laws example to stay inside proof subset, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(
!lean.contains("sorry"),
"expected spec_laws proof export to avoid sorry, got:\n{}",
lean
);
assert!(lean.contains("theorem absVal_eq_absValSpec :"));
assert!(lean.contains("theorem clampNonNegative_eq_clampNonNegativeSpec :"));
}
#[test]
fn rle_example_exports_sampled_roundtrip_laws_without_sorry() {
let mut ctx = ctx_from_source(include_str!("../../../examples/data/rle.av"), "rle");
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(
lean.contains("sorry"),
"expected rle proof export to contain sorry for unproved universal theorems"
);
assert!(lean.contains(
"theorem encode_law_roundtrip_sample_1 : decode (encode []) = [] := by native_decide"
));
assert!(lean.contains(
"theorem encodeString_law_string_roundtrip_sample_1 : decodeString (encodeString \"\") = \"\" := by native_decide"
));
}
#[test]
fn fibonacci_example_uses_fuelized_int_countdown_in_proof_mode() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/data/fibonacci.av"),
"fibonacci",
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("def fibTR__fuel"));
assert!(lean.contains("def fibTR (n : Int) (a : Int) (b : Int) : Int :="));
assert!(lean.contains("fibTR__fuel ((Int.natAbs n) + 1) n a b"));
assert!(!lean.contains("partial def fibTR"));
}
#[test]
fn fibonacci_example_stays_inside_proof_subset() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/data/fibonacci.av"),
"fibonacci",
);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected fibonacci example to stay inside proof subset, got: {:?}",
issues
);
}
#[test]
fn fibonacci_example_matches_general_linear_recurrence_shapes() {
let ctx = ctx_from_source(
include_str!("../../../examples/data/fibonacci.av"),
"fibonacci",
);
let fib = ctx.fn_defs.iter().find(|fd| fd.name == "fib").unwrap();
let fib_tr = ctx.fn_defs.iter().find(|fd| fd.name == "fibTR").unwrap();
let fib_spec = ctx.fn_defs.iter().find(|fd| fd.name == "fibSpec").unwrap();
assert!(recurrence::detect_tailrec_int_linear_pair_wrapper(fib).is_some());
assert!(recurrence::detect_tailrec_int_linear_pair_worker(fib_tr).is_some());
assert!(recurrence::detect_second_order_int_linear_recurrence(fib_spec).is_some());
}
#[test]
fn fibonacci_example_auto_proves_general_linear_recurrence_spec_law() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/data/fibonacci.av"),
"fibonacci",
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("private def fibSpec__nat : Nat -> Int"));
assert!(!lean.contains("partial def fibSpec"));
assert!(lean.contains("private theorem fib_eq_fibSpec__worker_nat_shift"));
assert!(lean.contains("private theorem fib_eq_fibSpec__helper_nat"));
assert!(lean.contains("private theorem fib_eq_fibSpec__helper_seed"));
assert!(lean.contains("theorem fib_eq_fibSpec : ∀ (n : Int), fib n = fibSpec n := by"));
assert!(!lean.contains(
"-- universal theorem fib_eq_fibSpec omitted: sampled law shape is not auto-proved yet"
));
}
#[test]
fn pell_like_example_auto_proves_same_general_shape() {
let mut ctx = ctx_from_source(
r#"
module Pell
intent =
"linear recurrence probe"
fn pellTR(n: Int, a: Int, b: Int) -> Int
match n
0 -> a
_ -> pellTR(n - 1, b, a + 2 * b)
fn pell(n: Int) -> Int
match n < 0
true -> 0
false -> pellTR(n, 0, 1)
fn pellSpec(n: Int) -> Int
match n < 0
true -> 0
false -> match n
0 -> 0
1 -> 1
_ -> pellSpec(n - 2) + 2 * pellSpec(n - 1)
verify pell law pellSpec
given n: Int = [0, 1, 2, 3]
pell(n) => pellSpec(n)
"#,
"pell",
);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected pell example to stay inside proof subset, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("private def pellSpec__nat : Nat -> Int"));
assert!(lean.contains("private theorem pell_eq_pellSpec__worker_nat_shift"));
assert!(lean.contains("theorem pell_eq_pellSpec : ∀ (n : Int), pell n = pellSpec n := by"));
assert!(!lean.contains(
"-- universal theorem pell_eq_pellSpec omitted: sampled law shape is not auto-proved yet"
));
}
#[test]
fn nonlinear_pair_state_recurrence_is_not_auto_proved_as_linear_shape() {
let mut ctx = ctx_from_source(
r#"
module WeirdRec
intent =
"reject nonlinear pair-state recurrence from linear recurrence prover"
fn weirdTR(n: Int, a: Int, b: Int) -> Int
match n
0 -> a
_ -> weirdTR(n - 1, b, a * b)
fn weird(n: Int) -> Int
match n < 0
true -> 0
false -> weirdTR(n, 0, 1)
fn weirdSpec(n: Int) -> Int
match n < 0
true -> 0
false -> match n
0 -> 0
1 -> 1
_ -> weirdSpec(n - 1) * weirdSpec(n - 2)
verify weird law weirdSpec
given n: Int = [0, 1, 2, 3]
weird(n) => weirdSpec(n)
"#,
"weirdrec",
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("sorry"));
assert!(!lean.contains("private theorem weird_eq_weirdSpec__worker_nat_shift"));
assert!(lean.contains("theorem weird_eq_weirdSpec_sample_1 :"));
}
#[test]
fn date_example_stays_inside_proof_subset() {
let mut ctx = ctx_from_source(include_str!("../../../examples/data/date.av"), "date");
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected date example to stay inside proof subset, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(!lean.contains("partial def"));
assert!(lean.contains("def parseIntSlice (s : String) (from' : Int) (to : Int) : Int :="));
}
#[test]
fn temperature_example_stays_inside_proof_subset() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/core/temperature.av"),
"temperature",
);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected temperature example to stay inside proof subset, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(!lean.contains("partial def"));
assert!(
lean.contains("example : celsiusToFahr 0.0 = 32.0 := by native_decide"),
"expected verify examples to survive proof export, got:\n{}",
lean
);
}
#[test]
fn quicksort_example_stays_inside_proof_subset() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/data/quicksort.av"),
"quicksort",
);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected quicksort example to stay inside proof subset, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("def isOrderedFrom"));
assert!(!lean.contains("partial def isOrderedFrom"));
assert!(lean.contains("termination_by xs.length"));
}
#[test]
fn grok_s_language_example_uses_total_ranked_sizeof_mutual_recursion() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/core/grok_s_language.av"),
"grok_s_language",
);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected grok_s_language example to stay inside proof subset, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("mutual"));
assert!(lean.contains("def eval__fuel"));
assert!(lean.contains("def parseListItems__fuel"));
assert!(!lean.contains("partial def eval"));
assert!(!lean.contains("termination_by (sizeOf e,"));
assert!(lean.contains("-- when validSymbolNames e"));
assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
assert!(lean.contains(
"theorem toString'_law_parseRoundtrip : ∀ (e : Sexpr), e = Sexpr.atomNum 42 ∨"
));
assert!(
lean.contains("validSymbolNames e = true -> parse (toString' e) = Except.ok e := by")
);
assert!(lean.contains("theorem toString'_law_parseSexprRoundtrip :"));
assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
}
#[test]
fn lambda_example_keeps_only_eval_outside_proof_subset() {
let mut ctx = ctx_from_source(include_str!("../../../examples/core/lambda.av"), "lambda");
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert_eq!(
issues,
vec!["recursive function 'eval' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)".to_string()]
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("def termToString__fuel"));
assert!(lean.contains("def subst__fuel"));
assert!(lean.contains("def countS__fuel"));
assert!(!lean.contains("partial def termToString"));
assert!(!lean.contains("partial def subst"));
assert!(!lean.contains("partial def countS"));
assert!(lean.contains("partial def eval"));
}
#[test]
fn mission_control_example_stays_inside_proof_subset() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/apps/mission_control.av"),
"mission_control",
);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected mission_control example to stay inside proof subset, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(!lean.contains("partial def normalizeAngle"));
assert!(lean.contains("def normalizeAngle__fuel"));
}
#[test]
fn notepad_store_example_stays_inside_proof_subset() {
let mut ctx = ctx_from_source(
include_str!("../../../examples/apps/notepad/store.av"),
"notepad_store",
);
ctx.refresh_facts();
let issues = proof_mode_issues(&ctx);
assert!(
issues.is_empty(),
"expected notepad/store example to stay inside proof subset, got: {:?}",
issues
);
let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
let lean = generated_lean_file(&out);
assert!(lean.contains("def deserializeLine (line : String) : Except String Note :="));
assert!(lean.contains("Except String (List Note)"));
assert!(!lean.contains("partial def deserializeLine"));
assert!(lean.contains("-- when noteRoundtripSafe note"));
assert!(lean.contains("-- when notesRoundtripSafe notes"));
assert!(lean.contains(
"theorem serializeLine_law_lineRoundtrip : ∀ (note : Note), note = { id' := 1, title := \"Hello\", body := \"World\" : Note } ∨"
));
assert!(lean.contains(
"theorem serializeLines_law_notesRoundtrip : ∀ (notes : List Note), notes = [] ∨"
));
assert!(lean.contains("notesRoundtripSafe notes = true ->"));
assert!(lean.contains("parseNotes (s!\"{String.intercalate \"\\n\" (serializeLines notes)}\\n\") = Except.ok notes"));
assert!(lean.contains("theorem serializeLine_law_lineRoundtrip_sample_1 :"));
assert!(lean.contains("theorem serializeLines_law_notesRoundtrip_sample_1 :"));
}
}