1mod builtins;
7mod expr;
8mod law_auto;
9mod pattern;
10pub(crate) mod recurrence;
11mod shared;
12mod toplevel;
13mod types;
14
15use std::collections::{HashMap, HashSet};
16
17use crate::ast::{Expr, FnDef, Spanned, TopLevel, TypeDef, VerifyKind};
18use crate::call_graph;
19use crate::codegen::{CodegenContext, ProjectOutput};
20
21#[derive(Clone, Copy, Debug, Eq, PartialEq)]
23pub enum VerifyEmitMode {
24 NativeDecide,
26 Sorry,
28 TheoremSkeleton,
32}
33
34pub use crate::codegen::recursion::{ProofModeIssue, RecursionPlan};
39
40const LEAN_PRELUDE_HEADER: &str = r#"-- Generated by the Aver → Lean 4 transpiler
41-- Pure core logic plus Oracle-lifted classified effects
42
43set_option linter.unusedVariables false
44
45-- Prelude: helper definitions for Aver builtins"#;
46
47const LEAN_PRELUDE_FLOAT_COE: &str = r#"instance : Coe Int Float := ⟨fun n => Float.ofInt n⟩
48
49def Float.fromInt (n : Int) : Float := Float.ofInt n
50
51-- Aver's Float-to-Int operations match the runtime semantics
52-- (`f64::floor() as i64` in VM, Rust codegen, WASM — all three use the
53-- same IEEE 754 floor/round/ceil followed by Rust's saturating
54-- `f64 as i64` cast):
55-- * finite values within [i64::MIN, i64::MAX]: truncate toward zero
56-- * finite > i64::MAX: saturate to i64::MAX
57-- * finite < i64::MIN: saturate to i64::MIN
58-- * +Inf: saturate to i64::MAX
59-- * -Inf: saturate to i64::MIN
60-- * NaN: 0 (Rust 1.45+ defined behavior)
61--
62-- Lean's `Float.floor : Float → Float` doesn't directly satisfy Aver's
63-- `Float.floor : Float → Int`, so we synthesize via the saturating
64-- `Float.toUInt64` (returns 0 for NaN/negative) with sign handling and
65-- explicit bounds. Per-case correctness is asserted by `native_decide`
66-- examples below; total semantic agreement with `f64 as i64` would
67-- need a formal IEEE spec in Lean, which is out of scope.
68--
69-- Asymmetry with the Dafny backend: Lean has IEEE 754 `Float` natively
70-- (`double` at runtime), so we use it. Dafny only offers mathematical
71-- `real` (Cauchy-style, no NaN/Inf/overflow), which is a fundamental
72-- type mismatch with Aver's IEEE Float — Dafny operations stay opaque
73-- (`function FloatPi(): real` etc.) rather than synthesizing IEEE on
74-- top of `bv64`, which would mean implementing the entire IEEE
75-- arithmetic in Dafny by hand.
76namespace AverFloat
77def toInt (x : Float) : Int :=
78 if x.isNaN then 0
79 -- 2^63 is exactly representable in f64; values ≥ that saturate up.
80 else if x ≥ 9223372036854775808.0 then 9223372036854775807
81 -- -2^63 is exactly representable; values strictly below saturate down.
82 else if x < -9223372036854775808.0 then -9223372036854775808
83 else if x ≥ 0.0 then Int.ofNat x.toUInt64.toNat
84 else -(Int.ofNat (-x).toUInt64.toNat)
85
86def floor (x : Float) : Int := toInt x.floor
87def ceil (x : Float) : Int := toInt x.ceil
88def round (x : Float) : Int := toInt x.round
89
90def pow (x y : Float) : Float := x ^ y
91
92-- Edge-case smoke checks: each `example` is discharged by reduction,
93-- so any drift from these documented values fails Lake build.
94example : AverFloat.toInt 0.0 = 0 := by native_decide
95example : AverFloat.toInt 3.7 = 3 := by native_decide
96example : AverFloat.toInt (-3.7) = -3 := by native_decide
97example : AverFloat.toInt (1.0 / 0.0) = 9223372036854775807 := by native_decide
98example : AverFloat.toInt (-1.0 / 0.0) = -9223372036854775808 := by native_decide
99example : AverFloat.toInt (0.0 / 0.0) = 0 := by native_decide
100example : AverFloat.floor 3.7 = 3 := by native_decide
101example : AverFloat.floor (-3.7) = -4 := by native_decide
102example : AverFloat.ceil 3.2 = 4 := by native_decide
103example : AverFloat.ceil (-3.2) = -3 := by native_decide
104-- Rounding mode (half-away-from-zero, matching Rust's `f64::round`):
105example : AverFloat.round 0.5 = 1 := by native_decide
106example : AverFloat.round (-0.5) = -1 := by native_decide
107example : AverFloat.round 2.5 = 3 := by native_decide
108example : AverFloat.round (-2.5) = -3 := by native_decide
109end AverFloat"#;
110
111const LEAN_PRELUDE_FLOAT_DEC_EQ: &str = r#"private unsafe def Float.unsafeDecEq (a b : Float) : Decidable (a = b) :=
112 if a == b then isTrue (unsafeCast ()) else isFalse (unsafeCast ())
113@[implemented_by Float.unsafeDecEq]
114private opaque Float.compDecEq (a b : Float) : Decidable (a = b)
115instance : DecidableEq Float := Float.compDecEq"#;
116
117const LEAN_PRELUDE_EXCEPT_DEC_EQ: &str = r#"instance [DecidableEq ε] [DecidableEq α] : DecidableEq (Except ε α)
118 | .ok a, .ok b =>
119 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
120 | .error a, .error b =>
121 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
122 | .ok _, .error _ => isFalse (by intro h; cases h)
123 | .error _, .ok _ => isFalse (by intro h; cases h)"#;
124
125const LEAN_PRELUDE_EXCEPT_NS: &str = r#"namespace Except
126def withDefault (r : Except ε α) (d : α) : α :=
127 match r with
128 | .ok v => v
129 | .error _ => d
130end Except"#;
131
132const LEAN_PRELUDE_OPTION_TO_EXCEPT: &str = r#"def Option.toExcept (o : Option α) (e : ε) : Except ε α :=
133 match o with
134 | some v => .ok v
135 | none => .error e"#;
136
137const LEAN_PRELUDE_STRING_HADD: &str = r#"instance : HAdd String String String := ⟨String.append⟩"#;
138
139const LEAN_PRELUDE_BRANCH_PATH: &str = r#"structure BranchPath where
143 dewey : String
144 deriving Repr, BEq, DecidableEq
145
146def BranchPath.Root : BranchPath := { dewey := "" }
147
148def BranchPath.child (p : BranchPath) (idx : Int) : BranchPath :=
149 if p.dewey.isEmpty then { dewey := toString idx }
150 else { dewey := p.dewey ++ "." ++ toString idx }
151
152def BranchPath.parse (s : String) : BranchPath := { dewey := s }"#;
153
154const LEAN_PRELUDE_PROOF_FUEL: &str = r#"def averStringPosFuel (s : String) (pos : Int) (rankBudget : Nat) : Nat :=
155 (((s.data.length) - pos.toNat) + 1) * rankBudget"#;
156
157const LEAN_PRELUDE_AVER_MEASURE: &str = r#"namespace AverMeasure
158def list (elemMeasure : α → Nat) : List α → Nat
159 | [] => 1
160 | x :: xs => elemMeasure x + list elemMeasure xs + 1
161def option (elemMeasure : α → Nat) : Option α → Nat
162 | none => 1
163 | some x => elemMeasure x + 1
164def except (errMeasure : ε → Nat) (okMeasure : α → Nat) : Except ε α → Nat
165 | .error e => errMeasure e + 1
166 | .ok v => okMeasure v + 1
167end AverMeasure"#;
168
169const AVER_MAP_PRELUDE_BASE: &str = r#"namespace AverMap
170def empty : List (α × β) := []
171def get [DecidableEq α] (m : List (α × β)) (k : α) : Option β :=
172 match m with
173 | [] => none
174 | (k', v) :: rest => if k = k' then some v else AverMap.get rest k
175def set [DecidableEq α] (m : List (α × β)) (k : α) (v : β) : List (α × β) :=
176 let rec go : List (α × β) → List (α × β)
177 | [] => [(k, v)]
178 | (k', v') :: rest => if k = k' then (k, v) :: rest else (k', v') :: go rest
179 go m
180def has [DecidableEq α] (m : List (α × β)) (k : α) : Bool :=
181 m.any (fun p => decide (k = p.1))
182def remove [DecidableEq α] (m : List (α × β)) (k : α) : List (α × β) :=
183 m.filter (fun p => !(decide (k = p.1)))
184def keys (m : List (α × β)) : List α := m.map Prod.fst
185def values (m : List (α × β)) : List β := m.map Prod.snd
186def entries (m : List (α × β)) : List (α × β) := m
187def len (m : List (α × β)) : Nat := m.length
188def fromList (entries : List (α × β)) : List (α × β) := entries"#;
189
190const AVER_MAP_PRELUDE_HAS_SET_SELF: &str = r#"private theorem any_set_go_self [DecidableEq α] (k : α) (v : β) :
191 ∀ (m : List (α × β)), List.any (AverMap.set.go k v m) (fun p => decide (k = p.1)) = true := by
192 intro m
193 induction m with
194 | nil =>
195 simp [AverMap.set.go, List.any]
196 | cons p tl ih =>
197 cases p with
198 | mk k' v' =>
199 by_cases h : k = k'
200 · simp [AverMap.set.go, List.any, h]
201 · simp [AverMap.set.go, List.any, h, ih]
202
203theorem has_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
204 AverMap.has (AverMap.set m k v) k = true := by
205 simpa [AverMap.has, AverMap.set] using any_set_go_self k v m"#;
206
207const AVER_MAP_PRELUDE_GET_SET_SELF: &str = r#"private theorem get_set_go_self [DecidableEq α] (k : α) (v : β) :
208 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) k = some v := by
209 intro m
210 induction m with
211 | nil =>
212 simp [AverMap.set.go, AverMap.get]
213 | cons p tl ih =>
214 cases p with
215 | mk k' v' =>
216 by_cases h : k = k'
217 · simp [AverMap.set.go, AverMap.get, h]
218 · simp [AverMap.set.go, AverMap.get, h, ih]
219
220theorem get_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
221 AverMap.get (AverMap.set m k v) k = some v := by
222 simpa [AverMap.set] using get_set_go_self k v m"#;
223
224const AVER_MAP_PRELUDE_GET_SET_OTHER: &str = r#"private theorem get_set_go_other [DecidableEq α] (k key : α) (v : β) (h : key ≠ k) :
225 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) key = AverMap.get m key := by
226 intro m
227 induction m with
228 | nil =>
229 simp [AverMap.set.go, AverMap.get, h]
230 | cons p tl ih =>
231 cases p with
232 | mk k' v' =>
233 by_cases hk : k = k'
234 · have hkey : key ≠ k' := by simpa [hk] using h
235 simp [AverMap.set.go, AverMap.get, hk, hkey]
236 · by_cases hkey : key = k'
237 · simp [AverMap.set.go, AverMap.get, hk, hkey]
238 · simp [AverMap.set.go, AverMap.get, hk, hkey, ih]
239
240theorem get_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
241 AverMap.get (AverMap.set m k v) key = AverMap.get m key := by
242 simpa [AverMap.set] using get_set_go_other k key v h m"#;
243
244const AVER_MAP_PRELUDE_HAS_SET_OTHER: &str = r#"theorem has_eq_isSome_get [DecidableEq α] (m : List (α × β)) (k : α) :
245 AverMap.has m k = (AverMap.get m k).isSome := by
246 induction m with
247 | nil =>
248 simp [AverMap.has, AverMap.get]
249 | cons p tl ih =>
250 cases p with
251 | mk k' v' =>
252 by_cases h : k = k'
253 · simp [AverMap.has, AverMap.get, List.any, h]
254 · simpa [AverMap.has, AverMap.get, List.any, h] using ih
255
256theorem has_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
257 AverMap.has (AverMap.set m k v) key = AverMap.has m key := by
258 rw [AverMap.has_eq_isSome_get, AverMap.has_eq_isSome_get]
259 simp [AverMap.get_set_other, h]"#;
260
261const AVER_MAP_PRELUDE_END: &str = r#"end AverMap"#;
262
263const LEAN_PRELUDE_AVER_LIST: &str = r#"namespace AverList
264def get (xs : List α) (i : Int) : Option α :=
265 if i < 0 then none else xs.get? i.toNat
266private def insertSorted [Ord α] (x : α) : List α → List α
267 | [] => [x]
268 | y :: ys =>
269 if compare x y == Ordering.lt || compare x y == Ordering.eq then
270 x :: y :: ys
271 else
272 y :: insertSorted x ys
273def sort [Ord α] (xs : List α) : List α :=
274 xs.foldl (fun acc x => insertSorted x acc) []
275end AverList"#;
276
277const LEAN_PRELUDE_STRING_HELPERS: &str = r#"def String.charAt (s : String) (i : Int) : Option String :=
285 if i < 0 then none
286 else (s.toList.get? i.toNat).map Char.toString
287theorem String.charAt_length_none (s : String) : String.charAt s s.length = none := by
288 have hs : ¬ ((s.length : Int) < 0) := by omega
289 unfold String.charAt
290 simp [hs]
291 change s.data.length ≤ s.length
292 exact Nat.le_refl _
293def String.slice (s : String) (start stop : Int) : String :=
294 let startN := if start < 0 then 0 else start.toNat
295 let stopN := if stop < 0 then 0 else stop.toNat
296 let chars := s.toList
297 String.mk ((chars.drop startN).take (stopN - startN))
298private def trimFloatTrailingZerosChars (chars : List Char) : List Char :=
299 let noZeros := (chars.reverse.dropWhile (fun c => c == '0')).reverse
300 match noZeros.reverse with
301 | '.' :: rest => rest.reverse
302 | _ => noZeros
303private def normalizeFloatString (s : String) : String :=
304 if s.toList.any (fun c => c == '.') then
305 String.mk (trimFloatTrailingZerosChars s.toList)
306 else s
307def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)
308def String.chars (s : String) : List String := s.toList.map Char.toString
309private theorem char_to_string_append_mk (c : Char) (chars : List Char) :
310 Char.toString c ++ String.mk chars = String.mk (c :: chars) := by
311 rfl
312private theorem string_intercalate_empty_char_strings_go (acc : String) (chars : List Char) :
313 String.intercalate.go acc "" (chars.map Char.toString) = acc ++ String.mk chars := by
314 induction chars generalizing acc with
315 | nil =>
316 simp [String.intercalate.go]
317 | cons c rest ih =>
318 calc
319 String.intercalate.go acc "" ((c :: rest).map Char.toString)
320 = String.intercalate.go (acc ++ Char.toString c) "" (rest.map Char.toString) := by
321 simp [String.intercalate.go]
322 _ = (acc ++ Char.toString c) ++ String.mk rest := by
323 simpa using ih (acc ++ Char.toString c)
324 _ = acc ++ String.mk (c :: rest) := by
325 simp [String.append_assoc, char_to_string_append_mk]
326private theorem string_intercalate_empty_char_strings (chars : List Char) :
327 String.intercalate "" (chars.map Char.toString) = String.mk chars := by
328 cases chars with
329 | nil =>
330 simp [String.intercalate]
331 | cons c rest =>
332 simpa [String.intercalate] using string_intercalate_empty_char_strings_go c.toString rest
333theorem String.intercalate_empty_chars (s : String) :
334 String.intercalate "" (String.chars s) = s := by
335 cases s with
336 | mk chars =>
337 simpa [String.chars] using string_intercalate_empty_char_strings chars
338namespace AverString
339def splitOnCharGo (currentRev : List Char) (sep : Char) : List Char → List String
340 | [] => [String.mk currentRev.reverse]
341 | c :: cs =>
342 if c == sep then
343 String.mk currentRev.reverse :: splitOnCharGo [] sep cs
344 else
345 splitOnCharGo (c :: currentRev) sep cs
346def splitOnChar (s : String) (sep : Char) : List String :=
347 splitOnCharGo [] sep s.toList
348def split (s delim : String) : List String :=
349 match delim.toList with
350 | [] => "" :: (s.toList.map Char.toString) ++ [""]
351 | [c] => splitOnChar s c
352 | _ => s.splitOn delim
353@[simp] private theorem char_toString_data (c : Char) : c.toString.data = [c] := by
354 rfl
355private theorem splitOnCharGo_until_sep
356 (prefixRev part tail : List Char) (sep : Char) :
357 part.all (fun c => c != sep) = true ->
358 splitOnCharGo prefixRev sep (part ++ sep :: tail) =
359 String.mk (prefixRev.reverse ++ part) :: splitOnCharGo [] sep tail := by
360 intro h_safe
361 induction part generalizing prefixRev with
362 | nil =>
363 simp [splitOnCharGo]
364 | cons c rest ih =>
365 simp at h_safe
366 have h_rest : (rest.all fun c => c != sep) = true := by
367 simpa using h_safe.2
368 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
369 (ih (c :: prefixRev) h_rest)
370private theorem splitOnCharGo_no_sep
371 (prefixRev chars : List Char) (sep : Char) :
372 chars.all (fun c => c != sep) = true ->
373 splitOnCharGo prefixRev sep chars =
374 [String.mk (prefixRev.reverse ++ chars)] := by
375 intro h_safe
376 induction chars generalizing prefixRev with
377 | nil =>
378 simp [splitOnCharGo]
379 | cons c rest ih =>
380 simp at h_safe
381 have h_rest : (rest.all fun c => c != sep) = true := by
382 simpa using h_safe.2
383 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
384 (ih (c :: prefixRev) h_rest)
385@[simp] theorem split_single_char_append
386 (head tail : String) (sep : Char) :
387 head.toList.all (fun c => c != sep) = true ->
388 split (head ++ Char.toString sep ++ tail) (Char.toString sep) =
389 head :: split tail (Char.toString sep) := by
390 intro h_safe
391 simpa [split, splitOnChar] using
392 (splitOnCharGo_until_sep [] head.data tail.data sep h_safe)
393@[simp] theorem split_single_char_no_sep
394 (s : String) (sep : Char) :
395 s.toList.all (fun c => c != sep) = true ->
396 split s (Char.toString sep) = [s] := by
397 intro h_safe
398 simpa [split, splitOnChar] using
399 (splitOnCharGo_no_sep [] s.data sep h_safe)
400private theorem intercalate_go_prefix
401 (pref acc sep : String) (rest : List String) :
402 String.intercalate.go (pref ++ sep ++ acc) sep rest =
403 pref ++ sep ++ String.intercalate.go acc sep rest := by
404 induction rest generalizing acc with
405 | nil =>
406 simp [String.intercalate.go, String.append_assoc]
407 | cons x xs ih =>
408 simpa [String.intercalate.go, String.append_assoc] using
409 (ih (acc ++ sep ++ x))
410@[simp] theorem split_intercalate_trailing_single_char
411 (parts : List String) (sep : Char) :
412 parts.all (fun part => part.toList.all (fun c => c != sep)) = true ->
413 split (String.intercalate (Char.toString sep) parts ++ Char.toString sep) (Char.toString sep) =
414 match parts with
415 | [] => ["", ""]
416 | _ => parts ++ [""] := by
417 intro h_safe
418 induction parts with
419 | nil =>
420 simp [split, splitOnChar, splitOnCharGo]
421 | cons part rest ih =>
422 simp at h_safe
423 have h_part : (part.toList.all fun c => c != sep) = true := by
424 simpa using h_safe.1
425 cases rest with
426 | nil =>
427 have h_empty : ("".toList.all fun c => c != sep) = true := by simp
428 calc
429 split (String.intercalate.go part (Char.toString sep) [] ++ Char.toString sep) (Char.toString sep)
430 = split (part ++ Char.toString sep) (Char.toString sep) := by
431 simp [String.intercalate.go]
432 _ = part :: split "" (Char.toString sep) := by
433 simpa using split_single_char_append part "" sep h_part
434 _ = [part, ""] := by
435 simp [split_single_char_no_sep, h_empty]
436 | cons next rest' =>
437 have h_rest : ((next :: rest').all fun part => part.toList.all fun c => c != sep) = true := by
438 simpa using h_safe.2
439 calc
440 split
441 (String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep)
442 (Char.toString sep)
443 =
444 split
445 (part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep))
446 (Char.toString sep) := by
447 have h_join :
448 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
449 = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
450 calc
451 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
452 = String.intercalate.go (part ++ Char.toString sep ++ next) (Char.toString sep) rest' ++ Char.toString sep := by
453 simp [String.intercalate.go]
454 _ = part ++ Char.toString sep ++ (String.intercalate.go next (Char.toString sep) rest' ++ Char.toString sep) := by
455 rw [intercalate_go_prefix part next (Char.toString sep) rest']
456 simp [String.append_assoc]
457 _ = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
458 simp [String.intercalate, String.intercalate.go]
459 simpa using congrArg (fun s => split s (Char.toString sep)) h_join
460 _ = part :: split
461 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
462 (Char.toString sep) := by
463 simpa using split_single_char_append
464 part
465 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
466 sep
467 h_part
468 _ = part :: (next :: rest' ++ [""]) := by
469 simpa using ih h_rest
470end AverString"#;
471
472const LEAN_PRELUDE_NUMERIC_PARSE: &str = r#"namespace AverDigits
473def foldDigitsAcc (acc : Nat) : List Nat -> Nat
474 | [] => acc
475 | d :: ds => foldDigitsAcc (acc * 10 + d) ds
476
477def foldDigits (digits : List Nat) : Nat :=
478 foldDigitsAcc 0 digits
479
480private theorem foldDigitsAcc_append_singleton (acc : Nat) (xs : List Nat) (d : Nat) :
481 foldDigitsAcc acc (xs ++ [d]) = foldDigitsAcc acc xs * 10 + d := by
482 induction xs generalizing acc with
483 | nil =>
484 simp [foldDigitsAcc]
485 | cons x xs ih =>
486 simp [foldDigitsAcc, ih, Nat.left_distrib, Nat.add_assoc, Nat.add_left_comm]
487
488private theorem foldDigits_append_singleton (xs : List Nat) (d : Nat) :
489 foldDigits (xs ++ [d]) = foldDigits xs * 10 + d := by
490 simpa [foldDigits] using foldDigitsAcc_append_singleton 0 xs d
491
492def natDigits : Nat -> List Nat
493 | n =>
494 if n < 10 then
495 [n]
496 else
497 natDigits (n / 10) ++ [n % 10]
498termination_by
499 n => n
500
501theorem natDigits_nonempty (n : Nat) : natDigits n ≠ [] := by
502 by_cases h : n < 10
503 · rw [natDigits.eq_1]
504 simp [h]
505 · rw [natDigits.eq_1]
506 simp [h]
507
508theorem natDigits_digits_lt_ten : ∀ n : Nat, ∀ d ∈ natDigits n, d < 10 := by
509 intro n d hd
510 by_cases h : n < 10
511 · rw [natDigits.eq_1] at hd
512 simp [h] at hd
513 rcases hd with rfl
514 exact h
515 · rw [natDigits.eq_1] at hd
516 simp [h] at hd
517 rcases hd with hd | hd
518 · exact natDigits_digits_lt_ten (n / 10) d hd
519 · subst hd
520 exact Nat.mod_lt n (by omega)
521
522theorem foldDigits_natDigits : ∀ n : Nat, foldDigits (natDigits n) = n := by
523 intro n
524 by_cases h : n < 10
525 · rw [natDigits.eq_1]
526 simp [h, foldDigits, foldDigitsAcc]
527 · rw [natDigits.eq_1]
528 simp [h]
529 rw [foldDigits_append_singleton]
530 rw [foldDigits_natDigits (n / 10)]
531 omega
532
533def digitChar : Nat -> Char
534 | 0 => '0' | 1 => '1' | 2 => '2' | 3 => '3' | 4 => '4'
535 | 5 => '5' | 6 => '6' | 7 => '7' | 8 => '8' | 9 => '9'
536 | _ => '0'
537
538def charToDigit? : Char -> Option Nat
539 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3 | '4' => some 4
540 | '5' => some 5 | '6' => some 6 | '7' => some 7 | '8' => some 8 | '9' => some 9
541 | _ => none
542
543theorem charToDigit_digitChar : ∀ d : Nat, d < 10 -> charToDigit? (digitChar d) = some d
544 | 0, _ => by simp [digitChar, charToDigit?]
545 | 1, _ => by simp [digitChar, charToDigit?]
546 | 2, _ => by simp [digitChar, charToDigit?]
547 | 3, _ => by simp [digitChar, charToDigit?]
548 | 4, _ => by simp [digitChar, charToDigit?]
549 | 5, _ => by simp [digitChar, charToDigit?]
550 | 6, _ => by simp [digitChar, charToDigit?]
551 | 7, _ => by simp [digitChar, charToDigit?]
552 | 8, _ => by simp [digitChar, charToDigit?]
553 | 9, _ => by simp [digitChar, charToDigit?]
554 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
555 omega
556
557theorem digitChar_ne_minus : ∀ d : Nat, d < 10 -> digitChar d ≠ '-'
558 | 0, _ => by decide
559 | 1, _ => by decide
560 | 2, _ => by decide
561 | 3, _ => by decide
562 | 4, _ => by decide
563 | 5, _ => by decide
564 | 6, _ => by decide
565 | 7, _ => by decide
566 | 8, _ => by decide
567 | 9, _ => by decide
568 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
569 omega
570
571theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->
572 Char.toString (digitChar d) ≠ " " ∧
573 Char.toString (digitChar d) ≠ "\t" ∧
574 Char.toString (digitChar d) ≠ "\n" ∧
575 Char.toString (digitChar d) ≠ "\r"
576 | 0, _ => by decide
577 | 1, _ => by decide
578 | 2, _ => by decide
579 | 3, _ => by decide
580 | 4, _ => by decide
581 | 5, _ => by decide
582 | 6, _ => by decide
583 | 7, _ => by decide
584 | 8, _ => by decide
585 | 9, _ => by decide
586 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
587 omega
588
589theorem mapM_charToDigit_digits : ∀ ds : List Nat,
590 (∀ d ∈ ds, d < 10) -> List.mapM charToDigit? (ds.map digitChar) = some ds := by
591 intro ds hds
592 induction ds with
593 | nil =>
594 simp
595 | cons d ds ih =>
596 have hd : d < 10 := hds d (by simp)
597 have htail : ∀ x ∈ ds, x < 10 := by
598 intro x hx
599 exact hds x (by simp [hx])
600 simp [charToDigit_digitChar d hd, ih htail]
601
602def natDigitsChars (n : Nat) : List Char :=
603 (natDigits n).map digitChar
604
605def parseNatChars (chars : List Char) : Option Nat :=
606 match chars with
607 | [] => none
608 | _ => do
609 let digits <- List.mapM charToDigit? chars
610 pure (foldDigits digits)
611
612theorem parseNatChars_nat (n : Nat) :
613 parseNatChars (natDigitsChars n) = some n := by
614 unfold parseNatChars natDigitsChars
615 cases h : (natDigits n).map digitChar with
616 | nil =>
617 exfalso
618 exact natDigits_nonempty n (List.map_eq_nil_iff.mp h)
619 | cons hd tl =>
620 have hdigits : List.mapM charToDigit? (List.map digitChar (natDigits n)) = some (natDigits n) :=
621 mapM_charToDigit_digits (natDigits n) (fun d hd => natDigits_digits_lt_ten n d hd)
622 rw [h] at hdigits
623 simp [h, hdigits, foldDigits_natDigits]
624end AverDigits
625
626def String.fromInt (n : Int) : String :=
627 match n with
628 | .ofNat m => String.mk (AverDigits.natDigitsChars m)
629 | .negSucc m => String.mk ('-' :: AverDigits.natDigitsChars (m + 1))
630
631def Int.fromString (s : String) : Except String Int :=
632 match s.toList with
633 | [] => .error ("Cannot parse '" ++ s ++ "' as Int")
634 | '-' :: rest =>
635 match AverDigits.parseNatChars rest with
636 | some n => .ok (-Int.ofNat n)
637 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
638 | chars =>
639 match AverDigits.parseNatChars chars with
640 | some n => .ok (Int.ofNat n)
641 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
642
643theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n
644 | .ofNat m => by
645 cases h : AverDigits.natDigits m with
646 | nil =>
647 exfalso
648 exact AverDigits.natDigits_nonempty m h
649 | cons d ds =>
650 have hd : d < 10 := AverDigits.natDigits_digits_lt_ten m d (by simp [h])
651 have hne : AverDigits.digitChar d ≠ '-' := AverDigits.digitChar_ne_minus d hd
652 have hparse : AverDigits.parseNatChars (AverDigits.digitChar d :: List.map AverDigits.digitChar ds) = some m := by
653 simpa [AverDigits.natDigitsChars, h] using AverDigits.parseNatChars_nat m
654 simp [String.fromInt, Int.fromString, AverDigits.natDigitsChars, h, hne, hparse]
655 | .negSucc m => by
656 simp [String.fromInt, Int.fromString, AverDigits.parseNatChars_nat]
657 rfl
658
659private def charDigitsToNat (cs : List Char) : Nat :=
660 cs.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
661
662private def parseExpPart : List Char → (Bool × List Char)
663 | '-' :: rest => (true, rest.takeWhile Char.isDigit)
664 | '+' :: rest => (false, rest.takeWhile Char.isDigit)
665 | rest => (false, rest.takeWhile Char.isDigit)
666
667def Float.fromString (s : String) : Except String Float :=
668 let chars := s.toList
669 let (neg, chars) := match chars with
670 | '-' :: rest => (true, rest)
671 | _ => (false, chars)
672 let intPart := chars.takeWhile Char.isDigit
673 let rest := chars.dropWhile Char.isDigit
674 let (fracPart, rest) := match rest with
675 | '.' :: rest => (rest.takeWhile Char.isDigit, rest.dropWhile Char.isDigit)
676 | _ => ([], rest)
677 let (expNeg, expDigits) := match rest with
678 | 'e' :: rest => parseExpPart rest
679 | 'E' :: rest => parseExpPart rest
680 | _ => (false, [])
681 if intPart.isEmpty && fracPart.isEmpty then .error ("Invalid float: " ++ s)
682 else
683 let mantissa := charDigitsToNat (intPart ++ fracPart)
684 let fracLen : Int := fracPart.length
685 let expVal : Int := charDigitsToNat expDigits
686 let shift : Int := (if expNeg then -expVal else expVal) - fracLen
687 let f := if shift >= 0 then Float.ofScientific mantissa false shift.toNat
688 else Float.ofScientific mantissa true ((-shift).toNat)
689 .ok (if neg then -f else f)"#;
690
691const LEAN_PRELUDE_CHAR_BYTE: &str = r#"def Char.toCode (s : String) : Int :=
692 match s.toList.head? with
693 | some c => (c.toNat : Int)
694 | none => panic! "Char.toCode: string is empty"
695def Char.fromCode (n : Int) : Option String :=
696 if n < 0 || n > 1114111 then none
697 else if n >= 55296 && n <= 57343 then none
698 else some (Char.toString (Char.ofNat n.toNat))
699
700def hexDigit (n : Int) : String :=
701 match n with
702 | 0 => "0" | 1 => "1" | 2 => "2" | 3 => "3"
703 | 4 => "4" | 5 => "5" | 6 => "6" | 7 => "7"
704 | 8 => "8" | 9 => "9" | 10 => "a" | 11 => "b"
705 | 12 => "c" | 13 => "d" | 14 => "e" | 15 => "f"
706 | _ => "?"
707
708def byteToHex (code : Int) : String :=
709 hexDigit (code / 16) ++ hexDigit (code % 16)
710
711namespace AverByte
712private def hexValue (c : Char) : Option Int :=
713 match c with
714 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3
715 | '4' => some 4 | '5' => some 5 | '6' => some 6 | '7' => some 7
716 | '8' => some 8 | '9' => some 9 | 'a' => some 10 | 'b' => some 11
717 | 'c' => some 12 | 'd' => some 13 | 'e' => some 14 | 'f' => some 15
718 | 'A' => some 10 | 'B' => some 11 | 'C' => some 12 | 'D' => some 13
719 | 'E' => some 14 | 'F' => some 15
720 | _ => none
721def toHex (n : Int) : Except String String :=
722 if n < 0 || n > 255 then
723 .error ("Byte.toHex: " ++ toString n ++ " is out of range 0-255")
724 else
725 .ok (byteToHex n)
726def fromHex (s : String) : Except String Int :=
727 match s.toList with
728 | [hi, lo] =>
729 match hexValue hi, hexValue lo with
730 | some h, some l => .ok (h * 16 + l)
731 | _, _ => .error ("Byte.fromHex: invalid hex '" ++ s ++ "'")
732 | _ => .error ("Byte.fromHex: expected exactly 2 hex chars, got '" ++ s ++ "'")
733end AverByte"#;
734
735pub(crate) fn pure_fns(ctx: &CodegenContext) -> Vec<&FnDef> {
736 ctx.modules
737 .iter()
738 .flat_map(|m| m.fn_defs.iter())
739 .chain(ctx.fn_defs.iter())
740 .filter(|fd| toplevel::is_pure_fn(fd))
741 .collect()
742}
743
744pub(crate) fn recursive_type_names(ctx: &CodegenContext) -> HashSet<String> {
745 ctx.modules
746 .iter()
747 .flat_map(|m| m.type_defs.iter())
748 .chain(ctx.type_defs.iter())
749 .filter(|td| toplevel::is_recursive_type_def(td))
750 .map(|td| toplevel::type_def_name(td).to_string())
751 .collect()
752}
753
754pub(crate) fn recursive_pure_fn_names(ctx: &CodegenContext) -> HashSet<String> {
755 let pure_names: HashSet<String> = pure_fns(ctx)
756 .into_iter()
757 .map(|fd| fd.name.clone())
758 .collect();
759 let mut callgraph_items = ctx.items.clone();
760 for module in &ctx.modules {
761 for fd in &module.fn_defs {
762 callgraph_items.push(TopLevel::FnDef(fd.clone()));
763 }
764 }
765 call_graph::find_recursive_fns(&callgraph_items)
766 .into_iter()
767 .filter(|name| pure_names.contains(name))
768 .collect()
769}
770
771fn verify_counter_key(vb: &crate::ast::VerifyBlock) -> String {
772 match &vb.kind {
773 VerifyKind::Cases => format!("fn:{}", vb.fn_name),
774 VerifyKind::Law(law) => format!("law:{}::{}", vb.fn_name, law.name),
775 }
776}
777
778fn lean_project_name(ctx: &CodegenContext) -> String {
779 crate::codegen::common::entry_basename(ctx)
780}
781
782pub(crate) fn find_type_def<'a>(ctx: &'a CodegenContext, type_name: &str) -> Option<&'a TypeDef> {
783 ctx.modules
784 .iter()
785 .flat_map(|m| m.type_defs.iter())
786 .chain(ctx.type_defs.iter())
787 .find(|td| toplevel::type_def_name(td) == type_name)
788}
789
790pub(super) fn bound_expr_to_lean(expr: &Spanned<Expr>) -> String {
791 match &expr.node {
792 Expr::Literal(crate::ast::Literal::Int(n)) => format!("{}", n),
793 Expr::Ident(name) => expr::aver_name_to_lean(name),
794 Expr::FnCall(f, args) => {
795 if let Some(dotted) = crate::codegen::common::expr_to_dotted_name(&f.node) {
796 if dotted == "List.len" && args.len() == 1 {
798 return format!("{}.length", bound_expr_to_lean(&args[0]));
799 }
800 let lean_args: Vec<String> = args.iter().map(bound_expr_to_lean).collect();
801 format!(
802 "({} {})",
803 expr::aver_name_to_lean(&dotted),
804 lean_args.join(" ")
805 )
806 } else {
807 "0".to_string()
808 }
809 }
810 Expr::Attr(obj, field) => format!(
811 "{}.{}",
812 bound_expr_to_lean(obj),
813 expr::aver_name_to_lean(field)
814 ),
815 _ => "0".to_string(),
816 }
817}
818
819pub(crate) fn sizeof_measure_param_indices(fd: &FnDef) -> Vec<usize> {
820 fd.params
821 .iter()
822 .enumerate()
823 .filter_map(|(idx, (_, type_name))| {
824 (!crate::codegen::recursion::detect::is_scalar_like_type(type_name)).then_some(idx)
825 })
826 .collect()
827}
828
829pub fn proof_mode_findings(ctx: &CodegenContext) -> Vec<ProofModeIssue> {
834 let (_plans, issues) = crate::codegen::recursion::analyze_plans(ctx);
835 issues
836}
837
838pub fn proof_mode_issues(ctx: &CodegenContext) -> Vec<String> {
839 proof_mode_findings(ctx)
840 .into_iter()
841 .map(|issue| issue.message)
842 .collect()
843}
844
845pub fn transpile(ctx: &CodegenContext) -> ProjectOutput {
847 transpile_with_verify_mode(ctx, VerifyEmitMode::NativeDecide)
848}
849
850pub fn transpile_for_proof_mode(
855 ctx: &CodegenContext,
856 verify_mode: VerifyEmitMode,
857) -> ProjectOutput {
858 transpile_unified(ctx, verify_mode, LeanEmitMode::Proof)
859}
860
861pub fn transpile_with_verify_mode(
867 ctx: &CodegenContext,
868 verify_mode: VerifyEmitMode,
869) -> ProjectOutput {
870 transpile_unified(ctx, verify_mode, LeanEmitMode::Standard)
871}
872
873fn emit_lifted_effectful_functions(
882 ctx: &CodegenContext,
883 recursive_fns: &HashSet<String>,
884 sections: &mut Vec<String>,
885) {
886 use crate::types::checker::effect_classification::is_classified;
887
888 let reachable = crate::codegen::common::verify_reachable_fn_names(&ctx.items);
894
895 let mut helpers: std::collections::HashMap<String, Vec<String>> =
900 std::collections::HashMap::new();
901 for item in &ctx.items {
902 let TopLevel::FnDef(fd) = item else { continue };
903 if fd.effects.is_empty() || fd.name == "main" {
904 continue;
905 }
906 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
907 continue;
908 }
909 if !reachable.contains(&fd.name) {
910 continue;
911 }
912 helpers.insert(
913 fd.name.clone(),
914 fd.effects.iter().map(|e| e.node.clone()).collect(),
915 );
916 }
917
918 let mut lifted_fns: Vec<(String, crate::ast::FnDef)> = Vec::new();
919 for item in &ctx.items {
920 let TopLevel::FnDef(fd) = item else { continue };
921 if fd.effects.is_empty() || fd.name == "main" {
922 continue;
923 }
924 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
925 continue;
926 }
927 if !reachable.contains(&fd.name) {
928 continue;
929 }
930 let Ok(Some(lifted)) =
931 crate::types::checker::effect_lifting::lift_fn_def_with_helpers(fd, &helpers)
932 else {
933 continue;
934 };
935 lifted_fns.push((fd.name.clone(), lifted));
936 }
937
938 let eligible_names: std::collections::HashSet<String> =
946 lifted_fns.iter().map(|(n, _)| n.clone()).collect();
947 let mut emitted: std::collections::HashSet<String> = std::collections::HashSet::new();
948 let mut order: Vec<usize> = Vec::new();
949 let mut remaining: Vec<usize> = (0..lifted_fns.len()).collect();
950 while !remaining.is_empty() {
951 let before = remaining.len();
952 remaining.retain(|&idx| {
953 let body_calls = collect_called_idents_in_body(&lifted_fns[idx].1.body);
954 let ready = body_calls
955 .iter()
956 .all(|name| !eligible_names.contains(name) || emitted.contains(name));
957 if ready {
958 emitted.insert(lifted_fns[idx].0.clone());
959 order.push(idx);
960 false
961 } else {
962 true
963 }
964 });
965 if remaining.len() == before {
966 order.append(&mut remaining);
970 }
971 }
972
973 for idx in order {
974 let (_, lifted) = &lifted_fns[idx];
975 if let Some(code) = toplevel::emit_fn_def(lifted, recursive_fns, ctx) {
976 sections.push(code);
977 sections.push(String::new());
978 }
979 }
980}
981
982fn collect_called_idents_in_body(body: &crate::ast::FnBody) -> std::collections::HashSet<String> {
983 use crate::ast::{Expr, Spanned, Stmt};
984 let mut out = std::collections::HashSet::new();
985 fn walk(expr: &Spanned<Expr>, out: &mut std::collections::HashSet<String>) {
986 match &expr.node {
987 Expr::FnCall(callee, args) => {
988 if let Expr::Ident(name) | Expr::Resolved { name, .. } = &callee.node {
989 out.insert(name.clone());
990 }
991 walk(callee, out);
992 for a in args {
993 walk(a, out);
994 }
995 }
996 Expr::BinOp(_, l, r) => {
997 walk(l, out);
998 walk(r, out);
999 }
1000 Expr::Match { subject, arms } => {
1001 walk(subject, out);
1002 for arm in arms {
1003 walk(&arm.body, out);
1004 }
1005 }
1006 Expr::Attr(inner, _) | Expr::ErrorProp(inner) => walk(inner, out),
1007 Expr::Constructor(_, Some(inner)) => walk(inner, out),
1008 Expr::List(items) | Expr::Tuple(items) | Expr::IndependentProduct(items, _) => {
1009 for i in items {
1010 walk(i, out);
1011 }
1012 }
1013 Expr::MapLiteral(pairs) => {
1014 for (k, v) in pairs {
1015 walk(k, out);
1016 walk(v, out);
1017 }
1018 }
1019 Expr::RecordCreate { fields, .. } => {
1020 for (_, v) in fields {
1021 walk(v, out);
1022 }
1023 }
1024 Expr::RecordUpdate { base, updates, .. } => {
1025 walk(base, out);
1026 for (_, v) in updates {
1027 walk(v, out);
1028 }
1029 }
1030 Expr::InterpolatedStr(parts) => {
1031 for part in parts {
1032 if let crate::ast::StrPart::Parsed(inner) = part {
1033 walk(inner, out);
1034 }
1035 }
1036 }
1037 _ => {}
1038 }
1039 }
1040 for stmt in body.stmts() {
1041 match stmt {
1042 Stmt::Expr(e) => walk(e, &mut out),
1043 Stmt::Binding(_, _, e) => walk(e, &mut out),
1044 }
1045 }
1046 out
1047}
1048
1049#[cfg(test)]
1050fn generate_prelude() -> String {
1051 generate_prelude_for_body("", true)
1052}
1053
1054#[cfg(test)]
1055fn generate_prelude_for_body(body: &str, include_all_helpers: bool) -> String {
1056 let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
1061 if include_all_helpers || crate::codegen::builtin_records::needs_trust_header(body) {
1062 let empty = crate::codegen::common::DeclaredEffects {
1068 bare_namespaces: std::collections::HashSet::new(),
1069 methods: std::collections::HashSet::new(),
1070 };
1071 let has_ip = body.contains("BranchPath");
1072 parts.push(
1073 crate::types::checker::proof_trust_header::generate_commented("-- ", &empty, has_ip),
1074 );
1075 }
1076 for record in crate::codegen::builtin_records::needed_records(body, include_all_helpers) {
1078 parts.push(crate::codegen::builtin_records::render_lean(record));
1079 }
1080
1081 for helper in crate::codegen::builtin_helpers::needed_helpers(body, include_all_helpers) {
1085 match helper.key {
1086 "BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
1087 "AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
1088 "StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
1089 "NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
1090 "CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
1091 "AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
1092 "AverMap" => parts.push(generate_map_prelude(body, include_all_helpers)),
1093 "ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
1094 "FloatInstances" => parts.extend([
1095 LEAN_PRELUDE_FLOAT_COE.to_string(),
1096 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1097 ]),
1098 "ExceptInstances" => parts.extend([
1099 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1100 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1101 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1102 ]),
1103 "StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
1104 "ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
1108 other => panic!(
1109 "Lean backend has no implementation for builtin helper key '{}'. \
1110 Add a match arm in generate_prelude_for_body or remove the key \
1111 from BUILTIN_HELPERS.",
1112 other
1113 ),
1114 }
1115 }
1116
1117 parts.join("\n\n")
1118}
1119
1120fn generate_map_prelude(body: &str, include_all_helpers: bool) -> String {
1121 let mut parts = vec![AVER_MAP_PRELUDE_BASE.to_string()];
1122
1123 let needs_has_set_self = include_all_helpers || body.contains("AverMap.has_set_self");
1124 let needs_get_set_self = include_all_helpers || body.contains("AverMap.get_set_self");
1125 let needs_get_set_other = include_all_helpers
1126 || body.contains("AverMap.get_set_other")
1127 || body.contains("AverMap.has_set_other");
1128 let needs_has_set_other = include_all_helpers || body.contains("AverMap.has_set_other");
1129
1130 if needs_has_set_self {
1131 parts.push(AVER_MAP_PRELUDE_HAS_SET_SELF.to_string());
1132 }
1133 if needs_get_set_self {
1134 parts.push(AVER_MAP_PRELUDE_GET_SET_SELF.to_string());
1135 }
1136 if needs_get_set_other {
1137 parts.push(AVER_MAP_PRELUDE_GET_SET_OTHER.to_string());
1138 }
1139 if needs_has_set_other {
1140 parts.push(AVER_MAP_PRELUDE_HAS_SET_OTHER.to_string());
1141 }
1142
1143 parts.push(AVER_MAP_PRELUDE_END.to_string());
1144 parts.join("\n\n")
1145}
1146
1147fn generate_lakefile_with_roots(project_name: &str, extra_roots: &[String]) -> String {
1148 let mut roots: Vec<String> = vec![format!("`{}", project_name)];
1149 for r in extra_roots {
1150 roots.push(format!("`{}", r));
1151 }
1152 let roots_str = roots.join(", ");
1153 format!(
1154 r#"import Lake
1155open Lake DSL
1156
1157package «{}» where
1158 version := v!"0.1.0"
1159
1160@[default_target]
1161lean_lib «{}» where
1162 srcDir := "."
1163 roots := #[{}]
1164"#,
1165 project_name.to_lowercase(),
1166 project_name,
1167 roots_str
1168 )
1169}
1170
1171fn generate_toolchain() -> String {
1172 "leanprover/lean4:v4.15.0\n".to_string()
1173}
1174
1175#[derive(Clone, Copy)]
1176enum LeanEmitMode {
1177 Standard,
1178 Proof,
1179}
1180
1181fn transpile_unified(
1193 ctx: &CodegenContext,
1194 verify_mode: VerifyEmitMode,
1195 emit_mode: LeanEmitMode,
1196) -> ProjectOutput {
1197 use crate::codegen::recursion::RecursionPlan;
1198
1199 let recursive_fns = call_graph::find_recursive_fns(&ctx.items);
1200 let recursive_names = recursive_pure_fn_names(ctx);
1201 let recursive_types = recursive_type_names(ctx);
1202 let (plans, _proof_issues) = match emit_mode {
1203 LeanEmitMode::Proof => crate::codegen::recursion::analyze_plans(ctx),
1204 LeanEmitMode::Standard => (HashMap::<String, RecursionPlan>::new(), Vec::new()),
1205 };
1206
1207 let pure_per_scope = crate::codegen::common::route_pure_components_per_scope(
1212 ctx,
1213 toplevel::is_pure_fn,
1214 |comp| {
1215 let mut out: Vec<String> = Vec::new();
1216 if comp.len() > 1 {
1217 let code = match emit_mode {
1218 LeanEmitMode::Proof => {
1219 let all_supported = comp.iter().all(|fd| plans.contains_key(&fd.name));
1220 if all_supported {
1221 toplevel::emit_mutual_group_proof(comp, ctx, &plans)
1222 } else {
1223 toplevel::emit_mutual_group(comp, ctx)
1224 }
1225 }
1226 LeanEmitMode::Standard => toplevel::emit_mutual_group(comp, ctx),
1227 };
1228 out.push(code);
1229 out.push(String::new());
1230 } else if let Some(fd) = comp.first() {
1231 let emitted = match emit_mode {
1232 LeanEmitMode::Proof => {
1233 let is_recursive = recursive_names.contains(&fd.name);
1234 if is_recursive && !plans.contains_key(&fd.name) {
1235 toplevel::emit_fn_def(fd, &recursive_names, ctx)
1236 } else {
1237 toplevel::emit_fn_def_proof(fd, plans.get(&fd.name).cloned(), ctx)
1238 }
1239 }
1240 LeanEmitMode::Standard => toplevel::emit_fn_def(fd, &recursive_fns, ctx),
1241 };
1242 if let Some(code) = emitted {
1243 out.push(code);
1244 out.push(String::new());
1245 }
1246 }
1247 out
1248 },
1249 );
1250
1251 let mut entry_lifted_sections: Vec<String> = Vec::new();
1253 let lifted_recursive_names = match emit_mode {
1254 LeanEmitMode::Proof => &recursive_names,
1255 LeanEmitMode::Standard => &recursive_fns,
1256 };
1257 emit_lifted_effectful_functions(ctx, lifted_recursive_names, &mut entry_lifted_sections);
1258
1259 let mut entry_decision_sections: Vec<String> = Vec::new();
1260 for item in &ctx.items {
1261 if let TopLevel::Decision(db) = item {
1262 entry_decision_sections.push(toplevel::emit_decision(db));
1263 entry_decision_sections.push(String::new());
1264 }
1265 }
1266
1267 let mut entry_verify_sections: Vec<String> = Vec::new();
1268 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1269 for item in &ctx.items {
1270 if let TopLevel::Verify(vb) = item {
1271 let key = verify_counter_key(vb);
1272 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1273 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1274 verify_case_counters.insert(key, next_idx);
1275 entry_verify_sections.push(emitted);
1276 entry_verify_sections.push(String::new());
1277 }
1278 }
1279
1280 let mut module_files: Vec<(String, String)> = Vec::new();
1282 let mut union_body = String::new();
1283
1284 for module in &ctx.modules {
1285 let mut body_sections: Vec<String> = Vec::new();
1286 for td in &module.type_defs {
1287 body_sections.push(toplevel::emit_type_def(td));
1288 if toplevel::is_recursive_type_def(td) {
1289 body_sections.push(toplevel::emit_recursive_decidable_eq(
1290 toplevel::type_def_name(td),
1291 ));
1292 if matches!(emit_mode, LeanEmitMode::Proof)
1293 && let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
1294 {
1295 body_sections.push(measure);
1296 }
1297 }
1298 body_sections.push(String::new());
1299 }
1300 if let Some(scope_sections) = pure_per_scope.by_scope.get(&module.prefix) {
1301 body_sections.extend(scope_sections.clone());
1302 }
1303 let body = body_sections.join("\n");
1304 union_body.push_str(&body);
1305 union_body.push('\n');
1306
1307 let mut imports = vec!["import AverCommon".to_string()];
1308 for d in &module.depends {
1309 imports.push(format!("import {}", d));
1310 }
1311 let opens: Vec<String> = module
1315 .depends
1316 .iter()
1317 .map(|d| format!("open {}", d))
1318 .collect();
1319
1320 let opens_str = if opens.is_empty() {
1321 String::new()
1322 } else {
1323 format!("\n{}\n", opens.join("\n"))
1324 };
1325 let content = format!(
1326 "{}\n{}\nnamespace {}\n\n{}\nend {}\n",
1327 imports.join("\n"),
1328 opens_str,
1329 module.prefix,
1330 body,
1331 module.prefix
1332 );
1333 let path = module.prefix.replace('.', "/");
1334 module_files.push((format!("{}.lean", path), content));
1335 }
1336
1337 let mut entry_body_sections: Vec<String> = Vec::new();
1339 for td in &ctx.type_defs {
1340 entry_body_sections.push(toplevel::emit_type_def(td));
1341 if toplevel::is_recursive_type_def(td) {
1342 entry_body_sections.push(toplevel::emit_recursive_decidable_eq(
1343 toplevel::type_def_name(td),
1344 ));
1345 if matches!(emit_mode, LeanEmitMode::Proof)
1346 && let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
1347 {
1348 entry_body_sections.push(measure);
1349 }
1350 }
1351 entry_body_sections.push(String::new());
1352 }
1353 if let Some(entry_pure) = pure_per_scope.by_scope.get("") {
1354 entry_body_sections.extend(entry_pure.clone());
1355 }
1356 entry_body_sections.extend(entry_lifted_sections);
1357 entry_body_sections.extend(entry_decision_sections);
1358 entry_body_sections.extend(entry_verify_sections);
1359
1360 let entry_body = entry_body_sections.join("\n");
1361 union_body.push_str(&entry_body);
1362 union_body.push('\n');
1363
1364 let project_name = lean_project_name(ctx);
1365 let mut entry_imports = vec!["import AverCommon".to_string()];
1366 for m in &ctx.modules {
1367 entry_imports.push(format!("import {}", m.prefix));
1368 }
1369 let entry_opens: Vec<String> = ctx
1370 .modules
1371 .iter()
1372 .map(|m| format!("open {}", m.prefix))
1373 .collect();
1374 let mut entry_parts = vec![entry_imports.join("\n")];
1375 if !entry_opens.is_empty() {
1376 entry_parts.push(entry_opens.join("\n"));
1377 }
1378 let declared = crate::codegen::common::collect_declared_effects(ctx);
1379 let has_ip = union_body.contains("BranchPath");
1380 let has_classified =
1381 crate::types::checker::effect_classification::classifications_for_proof_subset()
1382 .iter()
1383 .any(|c| declared.includes(c.method));
1384 if has_ip || has_classified {
1385 entry_parts.push(
1386 crate::types::checker::proof_trust_header::generate_commented("-- ", &declared, has_ip),
1387 );
1388 }
1389 let subtype_block = crate::types::checker::oracle_subtypes::lean_subtypes(&declared);
1390 if !subtype_block.is_empty() {
1391 entry_parts.push(subtype_block);
1392 }
1393 entry_parts.push(entry_body);
1394 let entry_content = entry_parts.join("\n\n");
1395
1396 let common_content = build_common_lean(&union_body);
1398
1399 let mut extra_roots: Vec<String> = vec!["AverCommon".to_string()];
1401 for m in &ctx.modules {
1402 extra_roots.push(m.prefix.clone());
1403 }
1404 let lakefile = generate_lakefile_with_roots(&project_name, &extra_roots);
1405 let toolchain = generate_toolchain();
1406
1407 let mut files = module_files;
1408 files.push((format!("{}.lean", project_name), entry_content));
1409 files.push(("AverCommon.lean".to_string(), common_content));
1410 files.push(("lakefile.lean".to_string(), lakefile));
1411 files.push(("lean-toolchain".to_string(), toolchain));
1412 ProjectOutput { files }
1413}
1414
1415fn build_common_lean(union_body: &str) -> String {
1416 let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
1417 for record in crate::codegen::builtin_records::needed_records(union_body, false) {
1418 parts.push(crate::codegen::builtin_records::render_lean(record));
1419 }
1420 for helper in crate::codegen::builtin_helpers::needed_helpers(union_body, false) {
1421 match helper.key {
1422 "BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
1423 "AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
1424 "StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
1425 "NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
1426 "CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
1427 "AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
1428 "AverMap" => parts.push(generate_map_prelude(union_body, false)),
1429 "ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
1430 "FloatInstances" => parts.extend([
1431 LEAN_PRELUDE_FLOAT_COE.to_string(),
1432 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1433 ]),
1434 "ExceptInstances" => parts.extend([
1435 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1436 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1437 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1438 ]),
1439 "StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
1440 "ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
1441 other => panic!(
1442 "Lean backend has no implementation for builtin helper key '{}'. \
1443 Add a match arm in build_common_lean or remove the key from BUILTIN_HELPERS.",
1444 other
1445 ),
1446 }
1447 }
1448 parts.join("\n\n")
1449}
1450
1451#[cfg(test)]
1452mod tests {
1453 use super::{
1454 VerifyEmitMode, generate_prelude, proof_mode_issues, recurrence, transpile,
1455 transpile_for_proof_mode, transpile_with_verify_mode,
1456 };
1457 use crate::ast::{
1458 BinOp, Expr, FnBody, FnDef, Literal, MatchArm, Pattern, Spanned, Stmt, TailCallData,
1459 TopLevel, TypeDef, TypeVariant, VerifyBlock, VerifyGiven, VerifyGivenDomain, VerifyKind,
1460 VerifyLaw,
1461 };
1462
1463 fn sb(e: Expr) -> Spanned<Expr> {
1465 Spanned::bare(e)
1466 }
1467 fn sbb(e: Expr) -> Box<Spanned<Expr>> {
1469 Box::new(Spanned::bare(e))
1470 }
1471 use crate::codegen::{CodegenContext, build_context};
1472 use crate::source::parse_source;
1473 use crate::tco;
1474 use crate::types::checker::run_type_check_full;
1475 use std::collections::{HashMap, HashSet};
1476 use std::sync::Arc as Rc;
1477
1478 fn empty_ctx() -> CodegenContext {
1479 CodegenContext {
1480 items: vec![],
1481 fn_sigs: HashMap::new(),
1482 memo_fns: HashSet::new(),
1483 memo_safe_types: HashSet::new(),
1484 type_defs: vec![],
1485 fn_defs: vec![],
1486 project_name: "verify_mode".to_string(),
1487 modules: vec![],
1488 module_prefixes: HashSet::new(),
1489 policy: None,
1490 emit_replay_runtime: false,
1491 runtime_policy_from_env: false,
1492 guest_entry: None,
1493 emit_self_host_support: false,
1494 extra_fn_defs: Vec::new(),
1495 mutual_tco_members: HashSet::new(),
1496 }
1497 }
1498
1499 fn ctx_from_source(source: &str, project_name: &str) -> CodegenContext {
1500 let mut items = parse_source(source).expect("source should parse");
1501 tco::transform_program(&mut items);
1502 let tc = run_type_check_full(&items, None);
1503 assert!(
1504 tc.errors.is_empty(),
1505 "source should typecheck without errors: {:?}",
1506 tc.errors
1507 );
1508 build_context(items, &tc, HashSet::new(), project_name.to_string(), vec![])
1509 }
1510
1511 fn generated_lean_file(out: &crate::codegen::ProjectOutput) -> String {
1519 out.files
1520 .iter()
1521 .filter_map(|(name, content)| {
1522 (name.ends_with(".lean") && name != "lakefile.lean").then_some(content.as_str())
1523 })
1524 .collect::<Vec<&str>>()
1525 .join("\n")
1526 }
1527
1528 fn empty_ctx_with_verify_case() -> CodegenContext {
1529 let mut ctx = empty_ctx();
1530 ctx.items.push(TopLevel::Verify(VerifyBlock {
1531 fn_name: "f".to_string(),
1532 line: 1,
1533 cases: vec![(
1534 sb(Expr::Literal(Literal::Int(1))),
1535 sb(Expr::Literal(Literal::Int(1))),
1536 )],
1537 case_spans: vec![],
1538 case_givens: vec![],
1539 case_hostile_origins: vec![],
1540 case_hostile_profiles: vec![],
1541 kind: VerifyKind::Cases,
1542 trace: false,
1543 cases_givens: vec![],
1544 }));
1545 ctx
1546 }
1547
1548 fn empty_ctx_with_two_verify_blocks_same_fn() -> CodegenContext {
1549 let mut ctx = empty_ctx();
1550 ctx.items.push(TopLevel::Verify(VerifyBlock {
1551 fn_name: "f".to_string(),
1552 line: 1,
1553 cases: vec![(
1554 sb(Expr::Literal(Literal::Int(1))),
1555 sb(Expr::Literal(Literal::Int(1))),
1556 )],
1557 case_spans: vec![],
1558 case_givens: vec![],
1559 case_hostile_origins: vec![],
1560 case_hostile_profiles: vec![],
1561 kind: VerifyKind::Cases,
1562 trace: false,
1563 cases_givens: vec![],
1564 }));
1565 ctx.items.push(TopLevel::Verify(VerifyBlock {
1566 fn_name: "f".to_string(),
1567 line: 2,
1568 cases: vec![(
1569 sb(Expr::Literal(Literal::Int(2))),
1570 sb(Expr::Literal(Literal::Int(2))),
1571 )],
1572 case_spans: vec![],
1573 case_givens: vec![],
1574 case_hostile_origins: vec![],
1575 case_hostile_profiles: vec![],
1576 kind: VerifyKind::Cases,
1577 trace: false,
1578 cases_givens: vec![],
1579 }));
1580 ctx
1581 }
1582
1583 fn empty_ctx_with_verify_law() -> CodegenContext {
1584 let mut ctx = empty_ctx();
1585 let add = FnDef {
1586 name: "add".to_string(),
1587 line: 1,
1588 params: vec![
1589 ("a".to_string(), "Int".to_string()),
1590 ("b".to_string(), "Int".to_string()),
1591 ],
1592 return_type: "Int".to_string(),
1593 effects: vec![],
1594 desc: None,
1595 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
1596 BinOp::Add,
1597 sbb(Expr::Ident("a".to_string())),
1598 sbb(Expr::Ident("b".to_string())),
1599 )))),
1600 resolution: None,
1601 };
1602 ctx.fn_defs.push(add.clone());
1603 ctx.items.push(TopLevel::FnDef(add));
1604 ctx.items.push(TopLevel::Verify(VerifyBlock {
1605 fn_name: "add".to_string(),
1606 line: 1,
1607 cases: vec![
1608 (
1609 sb(Expr::FnCall(
1610 sbb(Expr::Ident("add".to_string())),
1611 vec![
1612 sb(Expr::Literal(Literal::Int(1))),
1613 sb(Expr::Literal(Literal::Int(2))),
1614 ],
1615 )),
1616 sb(Expr::FnCall(
1617 sbb(Expr::Ident("add".to_string())),
1618 vec![
1619 sb(Expr::Literal(Literal::Int(2))),
1620 sb(Expr::Literal(Literal::Int(1))),
1621 ],
1622 )),
1623 ),
1624 (
1625 sb(Expr::FnCall(
1626 sbb(Expr::Ident("add".to_string())),
1627 vec![
1628 sb(Expr::Literal(Literal::Int(2))),
1629 sb(Expr::Literal(Literal::Int(3))),
1630 ],
1631 )),
1632 sb(Expr::FnCall(
1633 sbb(Expr::Ident("add".to_string())),
1634 vec![
1635 sb(Expr::Literal(Literal::Int(3))),
1636 sb(Expr::Literal(Literal::Int(2))),
1637 ],
1638 )),
1639 ),
1640 ],
1641 case_spans: vec![],
1642 case_givens: vec![],
1643 case_hostile_origins: vec![],
1644 case_hostile_profiles: vec![],
1645 kind: VerifyKind::Law(Box::new(VerifyLaw {
1646 name: "commutative".to_string(),
1647 givens: vec![
1648 VerifyGiven {
1649 name: "a".to_string(),
1650 type_name: "Int".to_string(),
1651 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
1652 },
1653 VerifyGiven {
1654 name: "b".to_string(),
1655 type_name: "Int".to_string(),
1656 domain: VerifyGivenDomain::Explicit(vec![
1657 sb(Expr::Literal(Literal::Int(2))),
1658 sb(Expr::Literal(Literal::Int(3))),
1659 ]),
1660 },
1661 ],
1662 when: None,
1663 lhs: sb(Expr::FnCall(
1664 sbb(Expr::Ident("add".to_string())),
1665 vec![
1666 sb(Expr::Ident("a".to_string())),
1667 sb(Expr::Ident("b".to_string())),
1668 ],
1669 )),
1670 rhs: sb(Expr::FnCall(
1671 sbb(Expr::Ident("add".to_string())),
1672 vec![
1673 sb(Expr::Ident("b".to_string())),
1674 sb(Expr::Ident("a".to_string())),
1675 ],
1676 )),
1677 sample_guards: vec![],
1678 })),
1679 trace: false,
1680 cases_givens: vec![],
1681 }));
1682 ctx
1683 }
1684
1685 #[test]
1686 fn prelude_normalizes_float_string_format() {
1687 let prelude = generate_prelude();
1688 assert!(
1689 prelude.contains("private def normalizeFloatString (s : String) : String :="),
1690 "missing normalizeFloatString helper in prelude"
1691 );
1692 assert!(
1693 prelude.contains(
1694 "def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)"
1695 ),
1696 "String.fromFloat should normalize Lean float formatting"
1697 );
1698 }
1699
1700 #[test]
1701 fn prelude_validates_char_from_code_unicode_bounds() {
1702 let prelude = generate_prelude();
1703 assert!(
1704 prelude.contains("if n < 0 || n > 1114111 then none"),
1705 "Char.fromCode should reject code points above Unicode max"
1706 );
1707 assert!(
1708 prelude.contains("else if n >= 55296 && n <= 57343 then none"),
1709 "Char.fromCode should reject surrogate code points"
1710 );
1711 }
1712
1713 #[test]
1714 fn prelude_includes_map_set_helper_lemmas() {
1715 let prelude = generate_prelude();
1716 assert!(
1717 prelude.contains("theorem has_set_self [DecidableEq α]"),
1718 "missing AverMap.has_set_self helper theorem"
1719 );
1720 assert!(
1721 prelude.contains("theorem get_set_self [DecidableEq α]"),
1722 "missing AverMap.get_set_self helper theorem"
1723 );
1724 }
1725
1726 #[test]
1727 fn lean_output_without_map_usage_omits_map_prelude() {
1728 let ctx = ctx_from_source(
1729 r#"
1730module NoMap
1731 intent = "Simple pure program without maps."
1732
1733fn addOne(n: Int) -> Int
1734 n + 1
1735
1736verify addOne
1737 addOne(1) => 2
1738"#,
1739 "nomap",
1740 );
1741 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
1742 let lean = generated_lean_file(&out);
1743
1744 assert!(
1745 !lean.contains("namespace AverMap"),
1746 "did not expect AverMap prelude in program without map usage:\n{}",
1747 lean
1748 );
1749 }
1750
1751 #[test]
1752 fn transpile_emits_native_decide_for_verify_by_default() {
1753 let out = transpile(&empty_ctx_with_verify_case());
1754 let lean = out
1755 .files
1756 .iter()
1757 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1758 .expect("expected generated Lean file");
1759 assert!(lean.contains("example : 1 = 1 := by native_decide"));
1760 }
1761
1762 #[test]
1763 fn transpile_can_emit_sorry_for_verify_when_requested() {
1764 let out = transpile_with_verify_mode(&empty_ctx_with_verify_case(), VerifyEmitMode::Sorry);
1765 let lean = out
1766 .files
1767 .iter()
1768 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1769 .expect("expected generated Lean file");
1770 assert!(lean.contains("example : 1 = 1 := by sorry"));
1771 }
1772
1773 #[test]
1774 fn transpile_can_emit_theorem_skeletons_for_verify() {
1775 let out = transpile_with_verify_mode(
1776 &empty_ctx_with_verify_case(),
1777 VerifyEmitMode::TheoremSkeleton,
1778 );
1779 let lean = out
1780 .files
1781 .iter()
1782 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1783 .expect("expected generated Lean file");
1784 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1785 assert!(lean.contains(" sorry"));
1786 }
1787
1788 #[test]
1789 fn theorem_skeleton_numbering_is_global_per_function_across_verify_blocks() {
1790 let out = transpile_with_verify_mode(
1791 &empty_ctx_with_two_verify_blocks_same_fn(),
1792 VerifyEmitMode::TheoremSkeleton,
1793 );
1794 let lean = out
1795 .files
1796 .iter()
1797 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1798 .expect("expected generated Lean file");
1799 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1800 assert!(lean.contains("theorem f_verify_2 : 2 = 2 := by"));
1801 }
1802
1803 #[test]
1804 fn transpile_emits_named_theorems_for_verify_law() {
1805 let out = transpile(&empty_ctx_with_verify_law());
1806 let lean = out
1807 .files
1808 .iter()
1809 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1810 .expect("expected generated Lean file");
1811 assert!(lean.contains("-- verify law add.commutative (2 cases)"));
1812 assert!(lean.contains("-- given a: Int = 1..2"));
1813 assert!(lean.contains("-- given b: Int = [2, 3]"));
1814 assert!(lean.contains(
1815 "theorem add_law_commutative : ∀ (a : Int) (b : Int), add a b = add b a := by"
1816 ));
1817 assert!(lean.contains(" intro a b"));
1818 assert!(lean.contains(" simp [add, Int.add_comm]"));
1819 assert!(lean.contains(
1820 "theorem add_law_commutative_sample_1 : add 1 2 = add 2 1 := by native_decide"
1821 ));
1822 assert!(lean.contains(
1823 "theorem add_law_commutative_sample_2 : add 2 3 = add 3 2 := by native_decide"
1824 ));
1825 }
1826
1827 #[test]
1828 fn generate_prelude_emits_int_roundtrip_theorem() {
1829 let lean = generate_prelude();
1830 assert!(lean.contains(
1831 "theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n"
1832 ));
1833 assert!(lean.contains("theorem String.intercalate_empty_chars (s : String) :"));
1834 assert!(lean.contains("def splitOnCharGo"));
1835 assert!(lean.contains("theorem split_single_char_append"));
1836 assert!(lean.contains("theorem split_intercalate_trailing_single_char"));
1837 assert!(lean.contains("namespace AverDigits"));
1838 assert!(lean.contains("theorem String.charAt_length_none (s : String)"));
1839 assert!(lean.contains("theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->"));
1840 }
1841
1842 #[test]
1843 fn transpile_emits_guarded_theorems_for_verify_law_when_clause() {
1844 let ctx = ctx_from_source(
1845 r#"
1846module GuardedLaw
1847 intent =
1848 "verify law with precondition"
1849
1850fn pickGreater(a: Int, b: Int) -> Int
1851 match a > b
1852 true -> a
1853 false -> b
1854
1855verify pickGreater law ordered
1856 given a: Int = [1, 2]
1857 given b: Int = [1, 2]
1858 when a > b
1859 pickGreater(a, b) => a
1860"#,
1861 "guarded_law",
1862 );
1863 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
1864 let lean = generated_lean_file(&out);
1865
1866 assert!(lean.contains("-- when (a > b)"));
1867 assert!(lean.contains(
1868 "theorem pickGreater_law_ordered : ∀ (a : Int) (b : Int), a = 1 ∨ a = 2 -> b = 1 ∨ b = 2 -> (a > b) = true -> pickGreater a b = a := by"
1869 ));
1870 assert!(lean.contains(
1871 "theorem pickGreater_law_ordered_sample_1 : (1 > 1) = true -> pickGreater 1 1 = 1 := by"
1872 ));
1873 assert!(lean.contains(
1874 "theorem pickGreater_law_ordered_sample_4 : (2 > 2) = true -> pickGreater 2 2 = 2 := by"
1875 ));
1876 }
1877
1878 #[test]
1879 fn transpile_uses_spec_theorem_names_for_declared_spec_laws() {
1880 let ctx = ctx_from_source(
1881 r#"
1882module SpecDemo
1883 intent =
1884 "spec demo"
1885
1886fn absVal(x: Int) -> Int
1887 match x < 0
1888 true -> 0 - x
1889 false -> x
1890
1891fn absValSpec(x: Int) -> Int
1892 match x < 0
1893 true -> 0 - x
1894 false -> x
1895
1896verify absVal law absValSpec
1897 given x: Int = [-2, -1, 0, 1, 2]
1898 absVal(x) => absValSpec(x)
1899"#,
1900 "spec_demo",
1901 );
1902 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
1903 let lean = generated_lean_file(&out);
1904
1905 assert!(lean.contains("-- verify law absVal.spec absValSpec (5 cases)"));
1906 assert!(
1907 lean.contains(
1908 "theorem absVal_eq_absValSpec : ∀ (x : Int), absVal x = absValSpec x := by"
1909 )
1910 );
1911 assert!(lean.contains("theorem absVal_eq_absValSpec_checked_domain :"));
1912 assert!(lean.contains("theorem absVal_eq_absValSpec_sample_1 :"));
1913 assert!(!lean.contains("theorem absVal_law_absValSpec :"));
1914 }
1915
1916 #[test]
1917 fn transpile_keeps_noncanonical_spec_laws_as_regular_law_names() {
1918 let ctx = ctx_from_source(
1919 r#"
1920module SpecLawShape
1921 intent =
1922 "shape probe"
1923
1924fn foo(x: Int) -> Int
1925 x + 1
1926
1927fn fooSpec(seed: Int, x: Int) -> Int
1928 x + seed
1929
1930verify foo law fooSpec
1931 given x: Int = [1, 2]
1932 foo(x) => fooSpec(1, x)
1933"#,
1934 "spec_law_shape",
1935 );
1936 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
1937 let lean = generated_lean_file(&out);
1938
1939 assert!(lean.contains("-- verify law foo.fooSpec (2 cases)"));
1940 assert!(lean.contains("theorem foo_law_fooSpec : ∀ (x : Int), foo x = fooSpec 1 x := by"));
1941 assert!(!lean.contains("theorem foo_eq_fooSpec :"));
1942 }
1943
1944 #[test]
1945 fn transpile_auto_proves_linear_int_canonical_spec_law_in_auto_mode() {
1946 let ctx = ctx_from_source(
1947 r#"
1948module SpecGap
1949 intent =
1950 "nontrivial canonical spec law"
1951
1952fn inc(x: Int) -> Int
1953 x + 1
1954
1955fn incSpec(x: Int) -> Int
1956 x + 2 - 1
1957
1958verify inc law incSpec
1959 given x: Int = [0, 1, 2]
1960 inc(x) => incSpec(x)
1961"#,
1962 "spec_gap",
1963 );
1964 let out = transpile(&ctx);
1965 let lean = generated_lean_file(&out);
1966
1967 assert!(lean.contains("-- verify law inc.spec incSpec (3 cases)"));
1968 assert!(lean.contains("theorem inc_eq_incSpec : ∀ (x : Int), inc x = incSpec x := by"));
1969 assert!(lean.contains("change (x + 1) = ((x + 2) - 1)"));
1970 assert!(lean.contains("omega"));
1971 assert!(!lean.contains(
1972 "-- universal theorem inc_eq_incSpec omitted: sampled law shape is not auto-proved yet"
1973 ));
1974 assert!(lean.contains("theorem inc_eq_incSpec_checked_domain :"));
1975 }
1976
1977 #[test]
1978 fn transpile_auto_proves_guarded_canonical_spec_law_in_auto_mode() {
1979 let ctx = ctx_from_source(
1980 r#"
1981module GuardedSpecGap
1982 intent =
1983 "guarded canonical spec law"
1984
1985fn clampNonNegative(x: Int) -> Int
1986 match x < 0
1987 true -> 0
1988 false -> x
1989
1990fn clampNonNegativeSpec(x: Int) -> Int
1991 match x < 0
1992 true -> 0
1993 false -> x
1994
1995verify clampNonNegative law clampNonNegativeSpec
1996 given x: Int = [-2, -1, 0, 1, 2]
1997 when x >= 0
1998 clampNonNegative(x) => clampNonNegativeSpec(x)
1999"#,
2000 "guarded_spec_gap",
2001 );
2002 let out = transpile(&ctx);
2003 let lean = generated_lean_file(&out);
2004
2005 assert!(lean.contains("-- when (x >= 0)"));
2006 assert!(lean.contains(
2007 "theorem clampNonNegative_eq_clampNonNegativeSpec : ∀ (x : Int), x = (-2) ∨ x = (-1) ∨ x = 0 ∨ x = 1 ∨ x = 2 -> (x >= 0) = true -> clampNonNegative x = clampNonNegativeSpec x := by"
2008 ));
2009 assert!(lean.contains("intro x h_x h_when"));
2010 assert!(lean.contains("simpa [clampNonNegative, clampNonNegativeSpec]"));
2011 assert!(!lean.contains(
2012 "-- universal theorem clampNonNegative_eq_clampNonNegativeSpec omitted: sampled law shape is not auto-proved yet"
2013 ));
2014 assert!(!lean.contains("cases h_x"));
2015 }
2016
2017 #[test]
2018 fn transpile_auto_proves_simp_normalized_canonical_spec_law_in_auto_mode() {
2019 let ctx = ctx_from_source(
2020 r#"
2021module SpecGapNonlinear
2022 intent =
2023 "nonlinear canonical spec law"
2024
2025fn square(x: Int) -> Int
2026 x * x
2027
2028fn squareSpec(x: Int) -> Int
2029 x * x + 0
2030
2031verify square law squareSpec
2032 given x: Int = [0, 1, 2]
2033 square(x) => squareSpec(x)
2034"#,
2035 "spec_gap_nonlinear",
2036 );
2037 let out = transpile(&ctx);
2038 let lean = generated_lean_file(&out);
2039
2040 assert!(lean.contains("-- verify law square.spec squareSpec (3 cases)"));
2041 assert!(
2042 lean.contains(
2043 "theorem square_eq_squareSpec : ∀ (x : Int), square x = squareSpec x := by"
2044 )
2045 );
2046 assert!(lean.contains("simp [square, squareSpec]"));
2047 assert!(!lean.contains(
2048 "-- universal theorem square_eq_squareSpec omitted: sampled law shape is not auto-proved yet"
2049 ));
2050 assert!(lean.contains("theorem square_eq_squareSpec_checked_domain :"));
2051 assert!(lean.contains("theorem square_eq_squareSpec_sample_1 :"));
2052 }
2053
2054 #[test]
2055 fn transpile_auto_proves_reflexive_law_with_rfl() {
2056 let mut ctx = empty_ctx();
2057 ctx.items.push(TopLevel::Verify(VerifyBlock {
2058 fn_name: "idLaw".to_string(),
2059 line: 1,
2060 cases: vec![(
2061 sb(Expr::Literal(Literal::Int(1))),
2062 sb(Expr::Literal(Literal::Int(1))),
2063 )],
2064 case_spans: vec![],
2065 case_givens: vec![],
2066 case_hostile_origins: vec![],
2067 case_hostile_profiles: vec![],
2068 kind: VerifyKind::Law(Box::new(VerifyLaw {
2069 name: "reflexive".to_string(),
2070 givens: vec![VerifyGiven {
2071 name: "x".to_string(),
2072 type_name: "Int".to_string(),
2073 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2074 }],
2075 when: None,
2076 lhs: sb(Expr::Ident("x".to_string())),
2077 rhs: sb(Expr::Ident("x".to_string())),
2078 sample_guards: vec![],
2079 })),
2080 trace: false,
2081 cases_givens: vec![],
2082 }));
2083 let out = transpile(&ctx);
2084 let lean = out
2085 .files
2086 .iter()
2087 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2088 .expect("expected generated Lean file");
2089 assert!(lean.contains("theorem idLaw_law_reflexive : ∀ (x : Int), x = x := by"));
2090 assert!(lean.contains(" intro x"));
2091 assert!(lean.contains(" rfl"));
2092 }
2093
2094 #[test]
2095 fn transpile_auto_proves_identity_law_for_int_add_wrapper() {
2096 let mut ctx = empty_ctx_with_verify_law();
2097 ctx.items.push(TopLevel::Verify(VerifyBlock {
2098 fn_name: "add".to_string(),
2099 line: 10,
2100 cases: vec![(
2101 sb(Expr::FnCall(
2102 sbb(Expr::Ident("add".to_string())),
2103 vec![
2104 sb(Expr::Literal(Literal::Int(1))),
2105 sb(Expr::Literal(Literal::Int(0))),
2106 ],
2107 )),
2108 sb(Expr::Literal(Literal::Int(1))),
2109 )],
2110 case_spans: vec![],
2111 case_givens: vec![],
2112 case_hostile_origins: vec![],
2113 case_hostile_profiles: vec![],
2114 kind: VerifyKind::Law(Box::new(VerifyLaw {
2115 name: "identityZero".to_string(),
2116 givens: vec![VerifyGiven {
2117 name: "a".to_string(),
2118 type_name: "Int".to_string(),
2119 domain: VerifyGivenDomain::Explicit(vec![
2120 sb(Expr::Literal(Literal::Int(0))),
2121 sb(Expr::Literal(Literal::Int(1))),
2122 ]),
2123 }],
2124 when: None,
2125 lhs: sb(Expr::FnCall(
2126 sbb(Expr::Ident("add".to_string())),
2127 vec![
2128 sb(Expr::Ident("a".to_string())),
2129 sb(Expr::Literal(Literal::Int(0))),
2130 ],
2131 )),
2132 rhs: sb(Expr::Ident("a".to_string())),
2133 sample_guards: vec![],
2134 })),
2135 trace: false,
2136 cases_givens: vec![],
2137 }));
2138 let out = transpile(&ctx);
2139 let lean = out
2140 .files
2141 .iter()
2142 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2143 .expect("expected generated Lean file");
2144 assert!(lean.contains("theorem add_law_identityZero : ∀ (a : Int), add a 0 = a := by"));
2145 assert!(lean.contains(" intro a"));
2146 assert!(lean.contains(" simp [add]"));
2147 }
2148
2149 #[test]
2150 fn transpile_auto_proves_associative_law_for_int_add_wrapper() {
2151 let mut ctx = empty_ctx_with_verify_law();
2152 ctx.items.push(TopLevel::Verify(VerifyBlock {
2153 fn_name: "add".to_string(),
2154 line: 20,
2155 cases: vec![(
2156 sb(Expr::FnCall(
2157 sbb(Expr::Ident("add".to_string())),
2158 vec![
2159 sb(Expr::FnCall(
2160 sbb(Expr::Ident("add".to_string())),
2161 vec![
2162 sb(Expr::Literal(Literal::Int(1))),
2163 sb(Expr::Literal(Literal::Int(2))),
2164 ],
2165 )),
2166 sb(Expr::Literal(Literal::Int(3))),
2167 ],
2168 )),
2169 sb(Expr::FnCall(
2170 sbb(Expr::Ident("add".to_string())),
2171 vec![
2172 sb(Expr::Literal(Literal::Int(1))),
2173 sb(Expr::FnCall(
2174 sbb(Expr::Ident("add".to_string())),
2175 vec![
2176 sb(Expr::Literal(Literal::Int(2))),
2177 sb(Expr::Literal(Literal::Int(3))),
2178 ],
2179 )),
2180 ],
2181 )),
2182 )],
2183 case_spans: vec![],
2184 case_givens: vec![],
2185 case_hostile_origins: vec![],
2186 case_hostile_profiles: vec![],
2187 kind: VerifyKind::Law(Box::new(VerifyLaw {
2188 name: "associative".to_string(),
2189 givens: vec![
2190 VerifyGiven {
2191 name: "a".to_string(),
2192 type_name: "Int".to_string(),
2193 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2194 1,
2195 )))]),
2196 },
2197 VerifyGiven {
2198 name: "b".to_string(),
2199 type_name: "Int".to_string(),
2200 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2201 2,
2202 )))]),
2203 },
2204 VerifyGiven {
2205 name: "c".to_string(),
2206 type_name: "Int".to_string(),
2207 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2208 3,
2209 )))]),
2210 },
2211 ],
2212 when: None,
2213 lhs: sb(Expr::FnCall(
2214 sbb(Expr::Ident("add".to_string())),
2215 vec![
2216 sb(Expr::FnCall(
2217 sbb(Expr::Ident("add".to_string())),
2218 vec![
2219 sb(Expr::Ident("a".to_string())),
2220 sb(Expr::Ident("b".to_string())),
2221 ],
2222 )),
2223 sb(Expr::Ident("c".to_string())),
2224 ],
2225 )),
2226 rhs: sb(Expr::FnCall(
2227 sbb(Expr::Ident("add".to_string())),
2228 vec![
2229 sb(Expr::Ident("a".to_string())),
2230 sb(Expr::FnCall(
2231 sbb(Expr::Ident("add".to_string())),
2232 vec![
2233 sb(Expr::Ident("b".to_string())),
2234 sb(Expr::Ident("c".to_string())),
2235 ],
2236 )),
2237 ],
2238 )),
2239 sample_guards: vec![],
2240 })),
2241 trace: false,
2242 cases_givens: vec![],
2243 }));
2244 let out = transpile(&ctx);
2245 let lean = out
2246 .files
2247 .iter()
2248 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2249 .expect("expected generated Lean file");
2250 assert!(lean.contains(
2251 "theorem add_law_associative : ∀ (a : Int) (b : Int) (c : Int), add (add a b) c = add a (add b c) := by"
2252 ));
2253 assert!(lean.contains(" intro a b c"));
2254 assert!(lean.contains(" simp [add, Int.add_assoc]"));
2255 }
2256
2257 #[test]
2258 fn transpile_auto_proves_sub_laws() {
2259 let mut ctx = empty_ctx();
2260 let sub = FnDef {
2261 name: "sub".to_string(),
2262 line: 1,
2263 params: vec![
2264 ("a".to_string(), "Int".to_string()),
2265 ("b".to_string(), "Int".to_string()),
2266 ],
2267 return_type: "Int".to_string(),
2268 effects: vec![],
2269 desc: None,
2270 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2271 BinOp::Sub,
2272 sbb(Expr::Ident("a".to_string())),
2273 sbb(Expr::Ident("b".to_string())),
2274 )))),
2275 resolution: None,
2276 };
2277 ctx.fn_defs.push(sub.clone());
2278 ctx.items.push(TopLevel::FnDef(sub));
2279
2280 ctx.items.push(TopLevel::Verify(VerifyBlock {
2281 fn_name: "sub".to_string(),
2282 line: 10,
2283 cases: vec![(
2284 sb(Expr::FnCall(
2285 sbb(Expr::Ident("sub".to_string())),
2286 vec![
2287 sb(Expr::Literal(Literal::Int(2))),
2288 sb(Expr::Literal(Literal::Int(0))),
2289 ],
2290 )),
2291 sb(Expr::Literal(Literal::Int(2))),
2292 )],
2293 case_spans: vec![],
2294 case_givens: vec![],
2295 case_hostile_origins: vec![],
2296 case_hostile_profiles: vec![],
2297 kind: VerifyKind::Law(Box::new(VerifyLaw {
2298 name: "rightIdentity".to_string(),
2299 givens: vec![VerifyGiven {
2300 name: "a".to_string(),
2301 type_name: "Int".to_string(),
2302 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2303 }],
2304 when: None,
2305 lhs: sb(Expr::FnCall(
2306 sbb(Expr::Ident("sub".to_string())),
2307 vec![
2308 sb(Expr::Ident("a".to_string())),
2309 sb(Expr::Literal(Literal::Int(0))),
2310 ],
2311 )),
2312 rhs: sb(Expr::Ident("a".to_string())),
2313 sample_guards: vec![],
2314 })),
2315 trace: false,
2316 cases_givens: vec![],
2317 }));
2318 ctx.items.push(TopLevel::Verify(VerifyBlock {
2319 fn_name: "sub".to_string(),
2320 line: 20,
2321 cases: vec![(
2322 sb(Expr::FnCall(
2323 sbb(Expr::Ident("sub".to_string())),
2324 vec![
2325 sb(Expr::Literal(Literal::Int(2))),
2326 sb(Expr::Literal(Literal::Int(1))),
2327 ],
2328 )),
2329 sb(Expr::BinOp(
2330 BinOp::Sub,
2331 sbb(Expr::Literal(Literal::Int(0))),
2332 sbb(Expr::FnCall(
2333 sbb(Expr::Ident("sub".to_string())),
2334 vec![
2335 sb(Expr::Literal(Literal::Int(1))),
2336 sb(Expr::Literal(Literal::Int(2))),
2337 ],
2338 )),
2339 )),
2340 )],
2341 case_spans: vec![],
2342 case_givens: vec![],
2343 case_hostile_origins: vec![],
2344 case_hostile_profiles: vec![],
2345 kind: VerifyKind::Law(Box::new(VerifyLaw {
2346 name: "antiCommutative".to_string(),
2347 givens: vec![
2348 VerifyGiven {
2349 name: "a".to_string(),
2350 type_name: "Int".to_string(),
2351 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2352 2,
2353 )))]),
2354 },
2355 VerifyGiven {
2356 name: "b".to_string(),
2357 type_name: "Int".to_string(),
2358 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2359 1,
2360 )))]),
2361 },
2362 ],
2363 when: None,
2364 lhs: sb(Expr::FnCall(
2365 sbb(Expr::Ident("sub".to_string())),
2366 vec![
2367 sb(Expr::Ident("a".to_string())),
2368 sb(Expr::Ident("b".to_string())),
2369 ],
2370 )),
2371 rhs: sb(Expr::BinOp(
2372 BinOp::Sub,
2373 sbb(Expr::Literal(Literal::Int(0))),
2374 sbb(Expr::FnCall(
2375 sbb(Expr::Ident("sub".to_string())),
2376 vec![
2377 sb(Expr::Ident("b".to_string())),
2378 sb(Expr::Ident("a".to_string())),
2379 ],
2380 )),
2381 )),
2382 sample_guards: vec![],
2383 })),
2384 trace: false,
2385 cases_givens: vec![],
2386 }));
2387
2388 let out = transpile(&ctx);
2389 let lean = out
2390 .files
2391 .iter()
2392 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2393 .expect("expected generated Lean file");
2394 assert!(lean.contains("theorem sub_law_rightIdentity : ∀ (a : Int), sub a 0 = a := by"));
2395 assert!(lean.contains(" simp [sub]"));
2396 assert!(lean.contains(
2397 "theorem sub_law_antiCommutative : ∀ (a : Int) (b : Int), sub a b = (-sub b a) := by"
2398 ));
2399 assert!(lean.contains(" simpa [sub] using (Int.neg_sub b a).symm"));
2400 }
2401
2402 #[test]
2403 fn transpile_auto_proves_unary_wrapper_equivalence_law() {
2404 let mut ctx = empty_ctx();
2405 let add = FnDef {
2406 name: "add".to_string(),
2407 line: 1,
2408 params: vec![
2409 ("a".to_string(), "Int".to_string()),
2410 ("b".to_string(), "Int".to_string()),
2411 ],
2412 return_type: "Int".to_string(),
2413 effects: vec![],
2414 desc: None,
2415 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2416 BinOp::Add,
2417 sbb(Expr::Ident("a".to_string())),
2418 sbb(Expr::Ident("b".to_string())),
2419 )))),
2420 resolution: None,
2421 };
2422 let add_one = FnDef {
2423 name: "addOne".to_string(),
2424 line: 2,
2425 params: vec![("n".to_string(), "Int".to_string())],
2426 return_type: "Int".to_string(),
2427 effects: vec![],
2428 desc: None,
2429 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2430 BinOp::Add,
2431 sbb(Expr::Ident("n".to_string())),
2432 sbb(Expr::Literal(Literal::Int(1))),
2433 )))),
2434 resolution: None,
2435 };
2436 ctx.fn_defs.push(add.clone());
2437 ctx.fn_defs.push(add_one.clone());
2438 ctx.items.push(TopLevel::FnDef(add));
2439 ctx.items.push(TopLevel::FnDef(add_one));
2440 ctx.items.push(TopLevel::Verify(VerifyBlock {
2441 fn_name: "addOne".to_string(),
2442 line: 3,
2443 cases: vec![(
2444 sb(Expr::FnCall(
2445 sbb(Expr::Ident("addOne".to_string())),
2446 vec![sb(Expr::Literal(Literal::Int(2)))],
2447 )),
2448 sb(Expr::FnCall(
2449 sbb(Expr::Ident("add".to_string())),
2450 vec![
2451 sb(Expr::Literal(Literal::Int(2))),
2452 sb(Expr::Literal(Literal::Int(1))),
2453 ],
2454 )),
2455 )],
2456 case_spans: vec![],
2457 case_givens: vec![],
2458 case_hostile_origins: vec![],
2459 case_hostile_profiles: vec![],
2460 kind: VerifyKind::Law(Box::new(VerifyLaw {
2461 name: "identityViaAdd".to_string(),
2462 givens: vec![VerifyGiven {
2463 name: "n".to_string(),
2464 type_name: "Int".to_string(),
2465 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2466 }],
2467 when: None,
2468 lhs: sb(Expr::FnCall(
2469 sbb(Expr::Ident("addOne".to_string())),
2470 vec![sb(Expr::Ident("n".to_string()))],
2471 )),
2472 rhs: sb(Expr::FnCall(
2473 sbb(Expr::Ident("add".to_string())),
2474 vec![
2475 sb(Expr::Ident("n".to_string())),
2476 sb(Expr::Literal(Literal::Int(1))),
2477 ],
2478 )),
2479 sample_guards: vec![],
2480 })),
2481 trace: false,
2482 cases_givens: vec![],
2483 }));
2484 let out = transpile(&ctx);
2485 let lean = out
2486 .files
2487 .iter()
2488 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2489 .expect("expected generated Lean file");
2490 assert!(
2491 lean.contains(
2492 "theorem addOne_law_identityViaAdd : ∀ (n : Int), addOne n = add n 1 := by"
2493 )
2494 );
2495 assert!(lean.contains(" simp [addOne, add]"));
2496 }
2497
2498 #[test]
2499 fn transpile_auto_proves_direct_map_set_laws() {
2500 let mut ctx = empty_ctx();
2501
2502 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2503 sb(Expr::FnCall(
2504 sbb(Expr::Attr(
2505 sbb(Expr::Ident("Map".to_string())),
2506 "set".to_string(),
2507 )),
2508 vec![m, k, v],
2509 ))
2510 };
2511 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2512 sb(Expr::FnCall(
2513 sbb(Expr::Attr(
2514 sbb(Expr::Ident("Map".to_string())),
2515 "has".to_string(),
2516 )),
2517 vec![m, k],
2518 ))
2519 };
2520 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2521 sb(Expr::FnCall(
2522 sbb(Expr::Attr(
2523 sbb(Expr::Ident("Map".to_string())),
2524 "get".to_string(),
2525 )),
2526 vec![m, k],
2527 ))
2528 };
2529 let some = |v: Spanned<Expr>| {
2530 sb(Expr::FnCall(
2531 sbb(Expr::Attr(
2532 sbb(Expr::Ident("Option".to_string())),
2533 "Some".to_string(),
2534 )),
2535 vec![v],
2536 ))
2537 };
2538 let map_empty = || {
2539 sb(Expr::FnCall(
2540 sbb(Expr::Attr(
2541 sbb(Expr::Ident("Map".to_string())),
2542 "empty".to_string(),
2543 )),
2544 vec![],
2545 ))
2546 };
2547
2548 ctx.items.push(TopLevel::Verify(VerifyBlock {
2549 fn_name: "map".to_string(),
2550 line: 1,
2551 cases: vec![(
2552 map_has(
2553 map_set(
2554 sb(Expr::Ident("m".to_string())),
2555 sb(Expr::Ident("k".to_string())),
2556 sb(Expr::Ident("v".to_string())),
2557 ),
2558 sb(Expr::Ident("k".to_string())),
2559 ),
2560 sb(Expr::Literal(Literal::Bool(true))),
2561 )],
2562 case_spans: vec![],
2563 case_givens: vec![],
2564 case_hostile_origins: vec![],
2565 case_hostile_profiles: vec![],
2566 kind: VerifyKind::Law(Box::new(VerifyLaw {
2567 name: "setHasKey".to_string(),
2568 givens: vec![
2569 VerifyGiven {
2570 name: "m".to_string(),
2571 type_name: "Map<String, Int>".to_string(),
2572 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2573 },
2574 VerifyGiven {
2575 name: "k".to_string(),
2576 type_name: "String".to_string(),
2577 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2578 "a".to_string(),
2579 )))]),
2580 },
2581 VerifyGiven {
2582 name: "v".to_string(),
2583 type_name: "Int".to_string(),
2584 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2585 1,
2586 )))]),
2587 },
2588 ],
2589 when: None,
2590 lhs: map_has(
2591 map_set(
2592 sb(Expr::Ident("m".to_string())),
2593 sb(Expr::Ident("k".to_string())),
2594 sb(Expr::Ident("v".to_string())),
2595 ),
2596 sb(Expr::Ident("k".to_string())),
2597 ),
2598 rhs: sb(Expr::Literal(Literal::Bool(true))),
2599 sample_guards: vec![],
2600 })),
2601 trace: false,
2602 cases_givens: vec![],
2603 }));
2604
2605 ctx.items.push(TopLevel::Verify(VerifyBlock {
2606 fn_name: "map".to_string(),
2607 line: 2,
2608 cases: vec![(
2609 map_get(
2610 map_set(
2611 sb(Expr::Ident("m".to_string())),
2612 sb(Expr::Ident("k".to_string())),
2613 sb(Expr::Ident("v".to_string())),
2614 ),
2615 sb(Expr::Ident("k".to_string())),
2616 ),
2617 some(sb(Expr::Ident("v".to_string()))),
2618 )],
2619 case_spans: vec![],
2620 case_givens: vec![],
2621 case_hostile_origins: vec![],
2622 case_hostile_profiles: vec![],
2623 kind: VerifyKind::Law(Box::new(VerifyLaw {
2624 name: "setGetKey".to_string(),
2625 givens: vec![
2626 VerifyGiven {
2627 name: "m".to_string(),
2628 type_name: "Map<String, Int>".to_string(),
2629 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2630 },
2631 VerifyGiven {
2632 name: "k".to_string(),
2633 type_name: "String".to_string(),
2634 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2635 "a".to_string(),
2636 )))]),
2637 },
2638 VerifyGiven {
2639 name: "v".to_string(),
2640 type_name: "Int".to_string(),
2641 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2642 1,
2643 )))]),
2644 },
2645 ],
2646 when: None,
2647 lhs: map_get(
2648 map_set(
2649 sb(Expr::Ident("m".to_string())),
2650 sb(Expr::Ident("k".to_string())),
2651 sb(Expr::Ident("v".to_string())),
2652 ),
2653 sb(Expr::Ident("k".to_string())),
2654 ),
2655 rhs: some(sb(Expr::Ident("v".to_string()))),
2656 sample_guards: vec![],
2657 })),
2658 trace: false,
2659 cases_givens: vec![],
2660 }));
2661
2662 let out = transpile(&ctx);
2663 let lean = out
2664 .files
2665 .iter()
2666 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2667 .expect("expected generated Lean file");
2668 assert!(lean.contains("simpa using AverMap.has_set_self m k v"));
2669 assert!(lean.contains("simpa using AverMap.get_set_self m k v"));
2670 }
2671
2672 #[test]
2673 fn transpile_auto_proves_direct_recursive_sum_law_by_structural_induction() {
2674 let ctx = ctx_from_source(
2675 r#"
2676module Mirror
2677 intent =
2678 "direct recursive sum induction probe"
2679
2680type Tree
2681 Leaf(Int)
2682 Node(Tree, Tree)
2683
2684fn mirror(t: Tree) -> Tree
2685 match t
2686 Tree.Leaf(v) -> Tree.Leaf(v)
2687 Tree.Node(left, right) -> Tree.Node(mirror(right), mirror(left))
2688
2689verify mirror law involutive
2690 given t: Tree = [Tree.Leaf(1), Tree.Node(Tree.Leaf(1), Tree.Leaf(2))]
2691 mirror(mirror(t)) => t
2692"#,
2693 "mirror",
2694 );
2695 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
2696 let lean = generated_lean_file(&out);
2697
2698 assert!(
2699 lean.contains(
2700 "theorem mirror_law_involutive : ∀ (t : Tree), mirror (mirror t) = t := by"
2701 )
2702 );
2703 assert!(lean.contains(" induction t with"));
2704 assert!(lean.contains(" | leaf f0 => simp [mirror]"));
2705 assert!(lean.contains(" | node f0 f1 ih0 ih1 => simp_all [mirror]"));
2706 assert!(!lean.contains(
2707 "-- universal theorem mirror_law_involutive omitted: sampled law shape is not auto-proved yet"
2708 ));
2709 }
2710
2711 #[test]
2712 fn transpile_auto_proves_map_update_laws() {
2713 let mut ctx = empty_ctx();
2714
2715 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2716 sb(Expr::FnCall(
2717 sbb(Expr::Attr(
2718 sbb(Expr::Ident("Map".to_string())),
2719 "get".to_string(),
2720 )),
2721 vec![m, k],
2722 ))
2723 };
2724 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2725 sb(Expr::FnCall(
2726 sbb(Expr::Attr(
2727 sbb(Expr::Ident("Map".to_string())),
2728 "set".to_string(),
2729 )),
2730 vec![m, k, v],
2731 ))
2732 };
2733 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2734 sb(Expr::FnCall(
2735 sbb(Expr::Attr(
2736 sbb(Expr::Ident("Map".to_string())),
2737 "has".to_string(),
2738 )),
2739 vec![m, k],
2740 ))
2741 };
2742 let option_some = |v: Spanned<Expr>| {
2743 sb(Expr::FnCall(
2744 sbb(Expr::Attr(
2745 sbb(Expr::Ident("Option".to_string())),
2746 "Some".to_string(),
2747 )),
2748 vec![v],
2749 ))
2750 };
2751 let option_with_default = |opt: Spanned<Expr>, def: Spanned<Expr>| {
2752 sb(Expr::FnCall(
2753 sbb(Expr::Attr(
2754 sbb(Expr::Ident("Option".to_string())),
2755 "withDefault".to_string(),
2756 )),
2757 vec![opt, def],
2758 ))
2759 };
2760 let map_empty = || {
2761 sb(Expr::FnCall(
2762 sbb(Expr::Attr(
2763 sbb(Expr::Ident("Map".to_string())),
2764 "empty".to_string(),
2765 )),
2766 vec![],
2767 ))
2768 };
2769
2770 let add_one = FnDef {
2771 name: "addOne".to_string(),
2772 line: 1,
2773 params: vec![("n".to_string(), "Int".to_string())],
2774 return_type: "Int".to_string(),
2775 effects: vec![],
2776 desc: None,
2777 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2778 BinOp::Add,
2779 sbb(Expr::Ident("n".to_string())),
2780 sbb(Expr::Literal(Literal::Int(1))),
2781 )))),
2782 resolution: None,
2783 };
2784 ctx.fn_defs.push(add_one.clone());
2785 ctx.items.push(TopLevel::FnDef(add_one));
2786
2787 let inc_count = FnDef {
2788 name: "incCount".to_string(),
2789 line: 2,
2790 params: vec![
2791 ("counts".to_string(), "Map<String, Int>".to_string()),
2792 ("word".to_string(), "String".to_string()),
2793 ],
2794 return_type: "Map<String, Int>".to_string(),
2795 effects: vec![],
2796 desc: None,
2797 body: Rc::new(FnBody::Block(vec![
2798 Stmt::Binding(
2799 "current".to_string(),
2800 None,
2801 map_get(
2802 sb(Expr::Ident("counts".to_string())),
2803 sb(Expr::Ident("word".to_string())),
2804 ),
2805 ),
2806 Stmt::Expr(sb(Expr::Match {
2807 subject: sbb(Expr::Ident("current".to_string())),
2808 arms: vec![
2809 MatchArm {
2810 pattern: Pattern::Constructor(
2811 "Option.Some".to_string(),
2812 vec!["n".to_string()],
2813 ),
2814 body: Box::new(map_set(
2815 sb(Expr::Ident("counts".to_string())),
2816 sb(Expr::Ident("word".to_string())),
2817 sb(Expr::BinOp(
2818 BinOp::Add,
2819 sbb(Expr::Ident("n".to_string())),
2820 sbb(Expr::Literal(Literal::Int(1))),
2821 )),
2822 )),
2823 },
2824 MatchArm {
2825 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
2826 body: Box::new(map_set(
2827 sb(Expr::Ident("counts".to_string())),
2828 sb(Expr::Ident("word".to_string())),
2829 sb(Expr::Literal(Literal::Int(1))),
2830 )),
2831 },
2832 ],
2833 })),
2834 ])),
2835 resolution: None,
2836 };
2837 ctx.fn_defs.push(inc_count.clone());
2838 ctx.items.push(TopLevel::FnDef(inc_count));
2839
2840 ctx.items.push(TopLevel::Verify(VerifyBlock {
2841 fn_name: "incCount".to_string(),
2842 line: 10,
2843 cases: vec![(
2844 map_has(
2845 sb(Expr::FnCall(
2846 sbb(Expr::Ident("incCount".to_string())),
2847 vec![
2848 sb(Expr::Ident("counts".to_string())),
2849 sb(Expr::Ident("word".to_string())),
2850 ],
2851 )),
2852 sb(Expr::Ident("word".to_string())),
2853 ),
2854 sb(Expr::Literal(Literal::Bool(true))),
2855 )],
2856 case_spans: vec![],
2857 case_givens: vec![],
2858 case_hostile_origins: vec![],
2859 case_hostile_profiles: vec![],
2860 kind: VerifyKind::Law(Box::new(VerifyLaw {
2861 name: "keyPresent".to_string(),
2862 givens: vec![
2863 VerifyGiven {
2864 name: "counts".to_string(),
2865 type_name: "Map<String, Int>".to_string(),
2866 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2867 },
2868 VerifyGiven {
2869 name: "word".to_string(),
2870 type_name: "String".to_string(),
2871 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2872 "a".to_string(),
2873 )))]),
2874 },
2875 ],
2876 when: None,
2877 lhs: map_has(
2878 sb(Expr::FnCall(
2879 sbb(Expr::Ident("incCount".to_string())),
2880 vec![
2881 sb(Expr::Ident("counts".to_string())),
2882 sb(Expr::Ident("word".to_string())),
2883 ],
2884 )),
2885 sb(Expr::Ident("word".to_string())),
2886 ),
2887 rhs: sb(Expr::Literal(Literal::Bool(true))),
2888 sample_guards: vec![],
2889 })),
2890 trace: false,
2891 cases_givens: vec![],
2892 }));
2893
2894 ctx.items.push(TopLevel::Verify(VerifyBlock {
2895 fn_name: "incCount".to_string(),
2896 line: 20,
2897 cases: vec![(
2898 map_get(
2899 sb(Expr::FnCall(
2900 sbb(Expr::Ident("incCount".to_string())),
2901 vec![
2902 sb(Expr::Ident("counts".to_string())),
2903 sb(Expr::Literal(Literal::Str("a".to_string()))),
2904 ],
2905 )),
2906 sb(Expr::Literal(Literal::Str("a".to_string()))),
2907 ),
2908 option_some(sb(Expr::FnCall(
2909 sbb(Expr::Ident("addOne".to_string())),
2910 vec![option_with_default(
2911 map_get(
2912 sb(Expr::Ident("counts".to_string())),
2913 sb(Expr::Literal(Literal::Str("a".to_string()))),
2914 ),
2915 sb(Expr::Literal(Literal::Int(0))),
2916 )],
2917 ))),
2918 )],
2919 case_spans: vec![],
2920 case_givens: vec![],
2921 case_hostile_origins: vec![],
2922 case_hostile_profiles: vec![],
2923 kind: VerifyKind::Law(Box::new(VerifyLaw {
2924 name: "existingKeyIncrements".to_string(),
2925 givens: vec![VerifyGiven {
2926 name: "counts".to_string(),
2927 type_name: "Map<String, Int>".to_string(),
2928 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2929 }],
2930 when: None,
2931 lhs: map_get(
2932 sb(Expr::FnCall(
2933 sbb(Expr::Ident("incCount".to_string())),
2934 vec![
2935 sb(Expr::Ident("counts".to_string())),
2936 sb(Expr::Literal(Literal::Str("a".to_string()))),
2937 ],
2938 )),
2939 sb(Expr::Literal(Literal::Str("a".to_string()))),
2940 ),
2941 rhs: option_some(sb(Expr::FnCall(
2942 sbb(Expr::Ident("addOne".to_string())),
2943 vec![option_with_default(
2944 map_get(
2945 sb(Expr::Ident("counts".to_string())),
2946 sb(Expr::Literal(Literal::Str("a".to_string()))),
2947 ),
2948 sb(Expr::Literal(Literal::Int(0))),
2949 )],
2950 ))),
2951 sample_guards: vec![],
2952 })),
2953 trace: false,
2954 cases_givens: vec![],
2955 }));
2956
2957 let out = transpile(&ctx);
2958 let lean = out
2959 .files
2960 .iter()
2961 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2962 .expect("expected generated Lean file");
2963 assert!(
2964 lean.contains("cases h : AverMap.get counts word <;> simp [AverMap.has_set_self]"),
2965 "expected keyPresent auto-proof with has_set_self"
2966 );
2967 assert!(
2968 lean.contains("cases h : AverMap.get counts \"a\" <;> simp [AverMap.get_set_self, addOne, incCount]"),
2969 "expected existingKeyIncrements auto-proof with get_set_self"
2970 );
2971 }
2972
2973 #[test]
2974 fn transpile_parenthesizes_negative_int_call_args_in_law_samples() {
2975 let mut ctx = empty_ctx();
2976 let add = FnDef {
2977 name: "add".to_string(),
2978 line: 1,
2979 params: vec![
2980 ("a".to_string(), "Int".to_string()),
2981 ("b".to_string(), "Int".to_string()),
2982 ],
2983 return_type: "Int".to_string(),
2984 effects: vec![],
2985 desc: None,
2986 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2987 BinOp::Add,
2988 sbb(Expr::Ident("a".to_string())),
2989 sbb(Expr::Ident("b".to_string())),
2990 )))),
2991 resolution: None,
2992 };
2993 ctx.fn_defs.push(add.clone());
2994 ctx.items.push(TopLevel::FnDef(add));
2995 ctx.items.push(TopLevel::Verify(VerifyBlock {
2996 fn_name: "add".to_string(),
2997 line: 1,
2998 cases: vec![(
2999 sb(Expr::FnCall(
3000 sbb(Expr::Ident("add".to_string())),
3001 vec![
3002 sb(Expr::Literal(Literal::Int(-2))),
3003 sb(Expr::Literal(Literal::Int(-1))),
3004 ],
3005 )),
3006 sb(Expr::FnCall(
3007 sbb(Expr::Ident("add".to_string())),
3008 vec![
3009 sb(Expr::Literal(Literal::Int(-1))),
3010 sb(Expr::Literal(Literal::Int(-2))),
3011 ],
3012 )),
3013 )],
3014 case_spans: vec![],
3015 case_givens: vec![],
3016 case_hostile_origins: vec![],
3017 case_hostile_profiles: vec![],
3018 kind: VerifyKind::Law(Box::new(VerifyLaw {
3019 name: "commutative".to_string(),
3020 givens: vec![
3021 VerifyGiven {
3022 name: "a".to_string(),
3023 type_name: "Int".to_string(),
3024 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3025 -2,
3026 )))]),
3027 },
3028 VerifyGiven {
3029 name: "b".to_string(),
3030 type_name: "Int".to_string(),
3031 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3032 -1,
3033 )))]),
3034 },
3035 ],
3036 when: None,
3037 lhs: sb(Expr::FnCall(
3038 sbb(Expr::Ident("add".to_string())),
3039 vec![
3040 sb(Expr::Ident("a".to_string())),
3041 sb(Expr::Ident("b".to_string())),
3042 ],
3043 )),
3044 rhs: sb(Expr::FnCall(
3045 sbb(Expr::Ident("add".to_string())),
3046 vec![
3047 sb(Expr::Ident("b".to_string())),
3048 sb(Expr::Ident("a".to_string())),
3049 ],
3050 )),
3051 sample_guards: vec![],
3052 })),
3053 trace: false,
3054 cases_givens: vec![],
3055 }));
3056
3057 let out = transpile(&ctx);
3058 let lean = out
3059 .files
3060 .iter()
3061 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3062 .expect("expected generated Lean file");
3063 assert!(lean.contains(
3064 "theorem add_law_commutative_sample_1 : add (-2) (-1) = add (-1) (-2) := by native_decide"
3065 ));
3066 }
3067
3068 #[test]
3069 fn verify_law_numbering_is_scoped_per_law_name() {
3070 let mut ctx = empty_ctx();
3071 let f = FnDef {
3072 name: "f".to_string(),
3073 line: 1,
3074 params: vec![("x".to_string(), "Int".to_string())],
3075 return_type: "Int".to_string(),
3076 effects: vec![],
3077 desc: None,
3078 body: Rc::new(FnBody::from_expr(sb(Expr::Ident("x".to_string())))),
3079 resolution: None,
3080 };
3081 ctx.fn_defs.push(f.clone());
3082 ctx.items.push(TopLevel::FnDef(f));
3083 ctx.items.push(TopLevel::Verify(VerifyBlock {
3084 fn_name: "f".to_string(),
3085 line: 1,
3086 cases: vec![(
3087 sb(Expr::Literal(Literal::Int(1))),
3088 sb(Expr::Literal(Literal::Int(1))),
3089 )],
3090 case_spans: vec![],
3091 case_givens: vec![],
3092 case_hostile_origins: vec![],
3093 case_hostile_profiles: vec![],
3094 kind: VerifyKind::Cases,
3095 trace: false,
3096 cases_givens: vec![],
3097 }));
3098 ctx.items.push(TopLevel::Verify(VerifyBlock {
3099 fn_name: "f".to_string(),
3100 line: 2,
3101 cases: vec![(
3102 sb(Expr::Literal(Literal::Int(2))),
3103 sb(Expr::Literal(Literal::Int(2))),
3104 )],
3105 case_spans: vec![],
3106 case_givens: vec![],
3107 case_hostile_origins: vec![],
3108 case_hostile_profiles: vec![],
3109 kind: VerifyKind::Law(Box::new(VerifyLaw {
3110 name: "identity".to_string(),
3111 givens: vec![VerifyGiven {
3112 name: "x".to_string(),
3113 type_name: "Int".to_string(),
3114 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
3115 }],
3116 when: None,
3117 lhs: sb(Expr::Ident("x".to_string())),
3118 rhs: sb(Expr::Ident("x".to_string())),
3119 sample_guards: vec![],
3120 })),
3121 trace: false,
3122 cases_givens: vec![],
3123 }));
3124 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
3125 let lean = out
3126 .files
3127 .iter()
3128 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3129 .expect("expected generated Lean file");
3130 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
3131 assert!(lean.contains("theorem f_law_identity : ∀ (x : Int), x = x := by"));
3132 assert!(lean.contains("theorem f_law_identity_sample_1 : 2 = 2 := by"));
3133 assert!(!lean.contains("theorem f_law_identity_sample_2 : 2 = 2 := by"));
3134 }
3135
3136 #[test]
3137 fn proof_mode_accepts_single_int_countdown_recursion() {
3138 let mut ctx = empty_ctx();
3139 let down = FnDef {
3140 name: "down".to_string(),
3141 line: 1,
3142 params: vec![("n".to_string(), "Int".to_string())],
3143 return_type: "Int".to_string(),
3144 effects: vec![],
3145 desc: None,
3146 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3147 subject: sbb(Expr::Ident("n".to_string())),
3148 arms: vec![
3149 MatchArm {
3150 pattern: Pattern::Literal(Literal::Int(0)),
3151 body: sbb(Expr::Literal(Literal::Int(0))),
3152 },
3153 MatchArm {
3154 pattern: Pattern::Wildcard,
3155 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3156 "down".to_string(),
3157 vec![sb(Expr::BinOp(
3158 BinOp::Sub,
3159 sbb(Expr::Ident("n".to_string())),
3160 sbb(Expr::Literal(Literal::Int(1))),
3161 ))],
3162 )))),
3163 },
3164 ],
3165 }))),
3166 resolution: None,
3167 };
3168 ctx.items.push(TopLevel::FnDef(down.clone()));
3169 ctx.fn_defs.push(down);
3170
3171 let issues = proof_mode_issues(&ctx);
3172 assert!(
3173 issues.is_empty(),
3174 "expected Int countdown recursion to be accepted, got: {:?}",
3175 issues
3176 );
3177
3178 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3179 let lean = out
3180 .files
3181 .iter()
3182 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3183 .expect("expected generated Lean file");
3184 assert!(lean.contains("def down__fuel"));
3185 assert!(lean.contains("def down (n : Int) : Int :="));
3186 assert!(lean.contains("down__fuel ((Int.natAbs n) + 1) n"));
3187 }
3188
3189 #[test]
3190 fn proof_mode_accepts_single_int_countdown_on_nonfirst_param() {
3191 let mut ctx = empty_ctx();
3192 let repeat_like = FnDef {
3193 name: "repeatLike".to_string(),
3194 line: 1,
3195 params: vec![
3196 ("char".to_string(), "String".to_string()),
3197 ("n".to_string(), "Int".to_string()),
3198 ],
3199 return_type: "List<String>".to_string(),
3200 effects: vec![],
3201 desc: None,
3202 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3203 subject: sbb(Expr::BinOp(
3204 BinOp::Lte,
3205 sbb(Expr::Ident("n".to_string())),
3206 sbb(Expr::Literal(Literal::Int(0))),
3207 )),
3208 arms: vec![
3209 MatchArm {
3210 pattern: Pattern::Literal(Literal::Bool(true)),
3211 body: sbb(Expr::List(vec![])),
3212 },
3213 MatchArm {
3214 pattern: Pattern::Literal(Literal::Bool(false)),
3215 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3216 "repeatLike".to_string(),
3217 vec![
3218 sb(Expr::Ident("char".to_string())),
3219 sb(Expr::BinOp(
3220 BinOp::Sub,
3221 sbb(Expr::Ident("n".to_string())),
3222 sbb(Expr::Literal(Literal::Int(1))),
3223 )),
3224 ],
3225 )))),
3226 },
3227 ],
3228 }))),
3229 resolution: None,
3230 };
3231 ctx.items.push(TopLevel::FnDef(repeat_like.clone()));
3232 ctx.fn_defs.push(repeat_like);
3233
3234 let issues = proof_mode_issues(&ctx);
3235 assert!(
3236 issues.is_empty(),
3237 "expected non-first Int countdown recursion to be accepted, got: {:?}",
3238 issues
3239 );
3240
3241 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3242 let lean = out
3243 .files
3244 .iter()
3245 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3246 .expect("expected generated Lean file");
3247 assert!(lean.contains("def repeatLike__fuel"));
3248 assert!(lean.contains("def repeatLike (char : String) (n : Int) : List String :="));
3249 assert!(lean.contains("repeatLike__fuel ((Int.natAbs n) + 1) char n"));
3250 }
3251
3252 #[test]
3253 fn proof_mode_accepts_negative_guarded_int_ascent() {
3254 let mut ctx = empty_ctx();
3255 let normalize = FnDef {
3256 name: "normalize".to_string(),
3257 line: 1,
3258 params: vec![("angle".to_string(), "Int".to_string())],
3259 return_type: "Int".to_string(),
3260 effects: vec![],
3261 desc: None,
3262 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3263 subject: sbb(Expr::BinOp(
3264 BinOp::Lt,
3265 sbb(Expr::Ident("angle".to_string())),
3266 sbb(Expr::Literal(Literal::Int(0))),
3267 )),
3268 arms: vec![
3269 MatchArm {
3270 pattern: Pattern::Literal(Literal::Bool(true)),
3271 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3272 "normalize".to_string(),
3273 vec![sb(Expr::BinOp(
3274 BinOp::Add,
3275 sbb(Expr::Ident("angle".to_string())),
3276 sbb(Expr::Literal(Literal::Int(360))),
3277 ))],
3278 )))),
3279 },
3280 MatchArm {
3281 pattern: Pattern::Literal(Literal::Bool(false)),
3282 body: sbb(Expr::Ident("angle".to_string())),
3283 },
3284 ],
3285 }))),
3286 resolution: None,
3287 };
3288 ctx.items.push(TopLevel::FnDef(normalize.clone()));
3289 ctx.fn_defs.push(normalize);
3290
3291 let issues = proof_mode_issues(&ctx);
3292 assert!(
3293 issues.is_empty(),
3294 "expected negative-guarded Int ascent recursion to be accepted, got: {:?}",
3295 issues
3296 );
3297
3298 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3299 let lean = out
3300 .files
3301 .iter()
3302 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3303 .expect("expected generated Lean file");
3304 assert!(lean.contains("def normalize__fuel"));
3305 assert!(lean.contains("normalize__fuel ((Int.natAbs angle) + 1) angle"));
3306 }
3307
3308 #[test]
3309 fn proof_mode_accepts_single_list_structural_recursion() {
3310 let mut ctx = empty_ctx();
3311 let len = FnDef {
3312 name: "len".to_string(),
3313 line: 1,
3314 params: vec![("xs".to_string(), "List<Int>".to_string())],
3315 return_type: "Int".to_string(),
3316 effects: vec![],
3317 desc: None,
3318 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3319 subject: sbb(Expr::Ident("xs".to_string())),
3320 arms: vec![
3321 MatchArm {
3322 pattern: Pattern::EmptyList,
3323 body: sbb(Expr::Literal(Literal::Int(0))),
3324 },
3325 MatchArm {
3326 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3327 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3328 "len".to_string(),
3329 vec![sb(Expr::Ident("t".to_string()))],
3330 )))),
3331 },
3332 ],
3333 }))),
3334 resolution: None,
3335 };
3336 ctx.items.push(TopLevel::FnDef(len.clone()));
3337 ctx.fn_defs.push(len);
3338
3339 let issues = proof_mode_issues(&ctx);
3340 assert!(
3341 issues.is_empty(),
3342 "expected List structural recursion to be accepted, got: {:?}",
3343 issues
3344 );
3345 }
3346
3347 #[test]
3348 fn proof_mode_accepts_single_list_structural_recursion_on_nonfirst_param() {
3349 let mut ctx = empty_ctx();
3350 let len_from = FnDef {
3351 name: "lenFrom".to_string(),
3352 line: 1,
3353 params: vec![
3354 ("count".to_string(), "Int".to_string()),
3355 ("xs".to_string(), "List<Int>".to_string()),
3356 ],
3357 return_type: "Int".to_string(),
3358 effects: vec![],
3359 desc: None,
3360 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3361 subject: sbb(Expr::Ident("xs".to_string())),
3362 arms: vec![
3363 MatchArm {
3364 pattern: Pattern::EmptyList,
3365 body: sbb(Expr::Ident("count".to_string())),
3366 },
3367 MatchArm {
3368 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3369 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3370 "lenFrom".to_string(),
3371 vec![
3372 sb(Expr::BinOp(
3373 BinOp::Add,
3374 sbb(Expr::Ident("count".to_string())),
3375 sbb(Expr::Literal(Literal::Int(1))),
3376 )),
3377 sb(Expr::Ident("t".to_string())),
3378 ],
3379 )))),
3380 },
3381 ],
3382 }))),
3383 resolution: None,
3384 };
3385 ctx.items.push(TopLevel::FnDef(len_from.clone()));
3386 ctx.fn_defs.push(len_from);
3387
3388 let issues = proof_mode_issues(&ctx);
3389 assert!(
3390 issues.is_empty(),
3391 "expected non-first List structural recursion to be accepted, got: {:?}",
3392 issues
3393 );
3394
3395 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3396 let lean = generated_lean_file(&out);
3397 assert!(lean.contains("termination_by xs.length"));
3398 assert!(!lean.contains("partial def lenFrom"));
3399 }
3400
3401 #[test]
3402 fn proof_mode_accepts_single_string_pos_advance_recursion() {
3403 let mut ctx = empty_ctx();
3404 let skip_ws = FnDef {
3405 name: "skipWs".to_string(),
3406 line: 1,
3407 params: vec![
3408 ("s".to_string(), "String".to_string()),
3409 ("pos".to_string(), "Int".to_string()),
3410 ],
3411 return_type: "Int".to_string(),
3412 effects: vec![],
3413 desc: None,
3414 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3415 subject: sbb(Expr::FnCall(
3416 sbb(Expr::Attr(
3417 sbb(Expr::Ident("String".to_string())),
3418 "charAt".to_string(),
3419 )),
3420 vec![
3421 sb(Expr::Ident("s".to_string())),
3422 sb(Expr::Ident("pos".to_string())),
3423 ],
3424 )),
3425 arms: vec![
3426 MatchArm {
3427 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
3428 body: sbb(Expr::Ident("pos".to_string())),
3429 },
3430 MatchArm {
3431 pattern: Pattern::Wildcard,
3432 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3433 "skipWs".to_string(),
3434 vec![
3435 sb(Expr::Ident("s".to_string())),
3436 sb(Expr::BinOp(
3437 BinOp::Add,
3438 sbb(Expr::Ident("pos".to_string())),
3439 sbb(Expr::Literal(Literal::Int(1))),
3440 )),
3441 ],
3442 )))),
3443 },
3444 ],
3445 }))),
3446 resolution: None,
3447 };
3448 ctx.items.push(TopLevel::FnDef(skip_ws.clone()));
3449 ctx.fn_defs.push(skip_ws);
3450
3451 let issues = proof_mode_issues(&ctx);
3452 assert!(
3453 issues.is_empty(),
3454 "expected String+pos recursion to be accepted, got: {:?}",
3455 issues
3456 );
3457
3458 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3459 let lean = generated_lean_file(&out);
3460 assert!(lean.contains("def skipWs__fuel"));
3461 assert!(!lean.contains("partial def skipWs"));
3462 }
3463
3464 #[test]
3465 fn proof_mode_accepts_mutual_int_countdown_recursion() {
3466 let mut ctx = empty_ctx();
3467 let even = FnDef {
3468 name: "even".to_string(),
3469 line: 1,
3470 params: vec![("n".to_string(), "Int".to_string())],
3471 return_type: "Bool".to_string(),
3472 effects: vec![],
3473 desc: None,
3474 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3475 subject: sbb(Expr::Ident("n".to_string())),
3476 arms: vec![
3477 MatchArm {
3478 pattern: Pattern::Literal(Literal::Int(0)),
3479 body: sbb(Expr::Literal(Literal::Bool(true))),
3480 },
3481 MatchArm {
3482 pattern: Pattern::Wildcard,
3483 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3484 "odd".to_string(),
3485 vec![sb(Expr::BinOp(
3486 BinOp::Sub,
3487 sbb(Expr::Ident("n".to_string())),
3488 sbb(Expr::Literal(Literal::Int(1))),
3489 ))],
3490 )))),
3491 },
3492 ],
3493 }))),
3494 resolution: None,
3495 };
3496 let odd = FnDef {
3497 name: "odd".to_string(),
3498 line: 2,
3499 params: vec![("n".to_string(), "Int".to_string())],
3500 return_type: "Bool".to_string(),
3501 effects: vec![],
3502 desc: None,
3503 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3504 subject: sbb(Expr::Ident("n".to_string())),
3505 arms: vec![
3506 MatchArm {
3507 pattern: Pattern::Literal(Literal::Int(0)),
3508 body: sbb(Expr::Literal(Literal::Bool(false))),
3509 },
3510 MatchArm {
3511 pattern: Pattern::Wildcard,
3512 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3513 "even".to_string(),
3514 vec![sb(Expr::BinOp(
3515 BinOp::Sub,
3516 sbb(Expr::Ident("n".to_string())),
3517 sbb(Expr::Literal(Literal::Int(1))),
3518 ))],
3519 )))),
3520 },
3521 ],
3522 }))),
3523 resolution: None,
3524 };
3525 ctx.items.push(TopLevel::FnDef(even.clone()));
3526 ctx.items.push(TopLevel::FnDef(odd.clone()));
3527 ctx.fn_defs.push(even);
3528 ctx.fn_defs.push(odd);
3529
3530 let issues = proof_mode_issues(&ctx);
3531 assert!(
3532 issues.is_empty(),
3533 "expected mutual Int countdown recursion to be accepted, got: {:?}",
3534 issues
3535 );
3536
3537 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3538 let lean = generated_lean_file(&out);
3539 assert!(lean.contains("def even__fuel"));
3540 assert!(lean.contains("def odd__fuel"));
3541 assert!(lean.contains("def even (n : Int) : Bool :="));
3542 assert!(lean.contains("even__fuel ((Int.natAbs n) + 1) n"));
3543 }
3544
3545 #[test]
3546 fn proof_mode_accepts_mutual_string_pos_recursion_with_ranked_same_edges() {
3547 let mut ctx = empty_ctx();
3548 let f = FnDef {
3549 name: "f".to_string(),
3550 line: 1,
3551 params: vec![
3552 ("s".to_string(), "String".to_string()),
3553 ("pos".to_string(), "Int".to_string()),
3554 ],
3555 return_type: "Int".to_string(),
3556 effects: vec![],
3557 desc: None,
3558 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3559 subject: sbb(Expr::BinOp(
3560 BinOp::Gte,
3561 sbb(Expr::Ident("pos".to_string())),
3562 sbb(Expr::Literal(Literal::Int(3))),
3563 )),
3564 arms: vec![
3565 MatchArm {
3566 pattern: Pattern::Literal(Literal::Bool(true)),
3567 body: sbb(Expr::Ident("pos".to_string())),
3568 },
3569 MatchArm {
3570 pattern: Pattern::Wildcard,
3571 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3572 "g".to_string(),
3573 vec![
3574 sb(Expr::Ident("s".to_string())),
3575 sb(Expr::Ident("pos".to_string())),
3576 ],
3577 )))),
3578 },
3579 ],
3580 }))),
3581 resolution: None,
3582 };
3583 let g = FnDef {
3584 name: "g".to_string(),
3585 line: 2,
3586 params: vec![
3587 ("s".to_string(), "String".to_string()),
3588 ("pos".to_string(), "Int".to_string()),
3589 ],
3590 return_type: "Int".to_string(),
3591 effects: vec![],
3592 desc: None,
3593 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3594 subject: sbb(Expr::BinOp(
3595 BinOp::Gte,
3596 sbb(Expr::Ident("pos".to_string())),
3597 sbb(Expr::Literal(Literal::Int(3))),
3598 )),
3599 arms: vec![
3600 MatchArm {
3601 pattern: Pattern::Literal(Literal::Bool(true)),
3602 body: sbb(Expr::Ident("pos".to_string())),
3603 },
3604 MatchArm {
3605 pattern: Pattern::Wildcard,
3606 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3607 "f".to_string(),
3608 vec![
3609 sb(Expr::Ident("s".to_string())),
3610 sb(Expr::BinOp(
3611 BinOp::Add,
3612 sbb(Expr::Ident("pos".to_string())),
3613 sbb(Expr::Literal(Literal::Int(1))),
3614 )),
3615 ],
3616 )))),
3617 },
3618 ],
3619 }))),
3620 resolution: None,
3621 };
3622 ctx.items.push(TopLevel::FnDef(f.clone()));
3623 ctx.items.push(TopLevel::FnDef(g.clone()));
3624 ctx.fn_defs.push(f);
3625 ctx.fn_defs.push(g);
3626
3627 let issues = proof_mode_issues(&ctx);
3628 assert!(
3629 issues.is_empty(),
3630 "expected mutual String+pos recursion to be accepted, got: {:?}",
3631 issues
3632 );
3633
3634 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3635 let lean = generated_lean_file(&out);
3636 assert!(lean.contains("def f__fuel"));
3637 assert!(lean.contains("def g__fuel"));
3638 assert!(!lean.contains("partial def f"));
3639 }
3640
3641 #[test]
3642 fn proof_mode_accepts_mutual_ranked_sizeof_recursion() {
3643 let mut ctx = empty_ctx();
3644 let f = FnDef {
3645 name: "f".to_string(),
3646 line: 1,
3647 params: vec![("xs".to_string(), "List<Int>".to_string())],
3648 return_type: "Int".to_string(),
3649 effects: vec![],
3650 desc: None,
3651 body: Rc::new(FnBody::from_expr(sb(Expr::TailCall(Box::new(
3652 TailCallData::new(
3653 "g".to_string(),
3654 vec![
3655 sb(Expr::Literal(Literal::Str("acc".to_string()))),
3656 sb(Expr::Ident("xs".to_string())),
3657 ],
3658 ),
3659 ))))),
3660 resolution: None,
3661 };
3662 let g = FnDef {
3663 name: "g".to_string(),
3664 line: 2,
3665 params: vec![
3666 ("acc".to_string(), "String".to_string()),
3667 ("xs".to_string(), "List<Int>".to_string()),
3668 ],
3669 return_type: "Int".to_string(),
3670 effects: vec![],
3671 desc: None,
3672 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3673 subject: sbb(Expr::Ident("xs".to_string())),
3674 arms: vec![
3675 MatchArm {
3676 pattern: Pattern::EmptyList,
3677 body: sbb(Expr::Literal(Literal::Int(0))),
3678 },
3679 MatchArm {
3680 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3681 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3682 "f".to_string(),
3683 vec![sb(Expr::Ident("t".to_string()))],
3684 )))),
3685 },
3686 ],
3687 }))),
3688 resolution: None,
3689 };
3690 ctx.items.push(TopLevel::FnDef(f.clone()));
3691 ctx.items.push(TopLevel::FnDef(g.clone()));
3692 ctx.fn_defs.push(f);
3693 ctx.fn_defs.push(g);
3694
3695 let issues = proof_mode_issues(&ctx);
3696 assert!(
3697 issues.is_empty(),
3698 "expected mutual ranked-sizeOf recursion to be accepted, got: {:?}",
3699 issues
3700 );
3701
3702 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3703 let lean = generated_lean_file(&out);
3704 assert!(lean.contains("mutual"));
3705 assert!(lean.contains("def f__fuel"));
3706 assert!(lean.contains("def g__fuel"));
3707 assert!(!lean.contains("partial def f"));
3708 assert!(!lean.contains("partial def g"));
3709 }
3710
3711 #[test]
3712 fn proof_mode_rejects_recursive_pure_functions() {
3713 let mut ctx = empty_ctx();
3714 let recursive_fn = FnDef {
3715 name: "loop".to_string(),
3716 line: 1,
3717 params: vec![("n".to_string(), "Int".to_string())],
3718 return_type: "Int".to_string(),
3719 effects: vec![],
3720 desc: None,
3721 body: Rc::new(FnBody::from_expr(sb(Expr::FnCall(
3722 sbb(Expr::Ident("loop".to_string())),
3723 vec![sb(Expr::Ident("n".to_string()))],
3724 )))),
3725 resolution: None,
3726 };
3727 ctx.items.push(TopLevel::FnDef(recursive_fn.clone()));
3728 ctx.fn_defs.push(recursive_fn);
3729
3730 let issues = proof_mode_issues(&ctx);
3731 assert!(
3732 issues.iter().any(|i| i.contains("outside proof subset")),
3733 "expected recursive function blocker, got: {:?}",
3734 issues
3735 );
3736 }
3737
3738 #[test]
3739 fn proof_mode_allows_recursive_types() {
3740 let mut ctx = empty_ctx();
3741 let recursive_type = TypeDef::Sum {
3742 name: "Node".to_string(),
3743 variants: vec![TypeVariant {
3744 name: "Cons".to_string(),
3745 fields: vec!["Node".to_string()],
3746 }],
3747 line: 1,
3748 };
3749 ctx.items.push(TopLevel::TypeDef(recursive_type.clone()));
3750 ctx.type_defs.push(recursive_type);
3751
3752 let issues = proof_mode_issues(&ctx);
3753 assert!(
3754 issues
3755 .iter()
3756 .all(|i| !i.contains("recursive types require unsafe DecidableEq shim")),
3757 "did not expect recursive type blocker, got: {:?}",
3758 issues
3759 );
3760 }
3761
3762 #[test]
3763 fn law_auto_example_exports_real_proof_artifacts() {
3764 let ctx = ctx_from_source(
3765 include_str!("../../../examples/formal/law_auto.av"),
3766 "law_auto",
3767 );
3768 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3769 let lean = generated_lean_file(&out);
3770
3771 assert!(lean.contains("theorem add_law_commutative :"));
3772 assert!(lean.contains("theorem id'_law_reflexive : ∀ (x : Int), x = x := by"));
3773 assert!(lean.contains("theorem incCount_law_keyPresent :"));
3774 assert!(lean.contains("AverMap.has_set_self"));
3775 assert!(lean.contains("theorem add_law_commutative_sample_1 :"));
3776 assert!(lean.contains(":= by native_decide"));
3777 }
3778
3779 #[test]
3780 fn json_example_stays_inside_proof_subset() {
3781 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3782 let issues = proof_mode_issues(&ctx);
3783 assert!(
3784 issues.is_empty(),
3785 "expected json example to stay inside proof subset, got: {:?}",
3786 issues
3787 );
3788 }
3789
3790 #[test]
3791 fn json_example_uses_total_defs_and_domain_guarded_laws_in_proof_mode() {
3792 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3793 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3794 let lean = generated_lean_file(&out);
3795
3796 assert!(!lean.contains("partial def"));
3797 assert!(lean.contains("def skipWs__fuel"));
3798 assert!(lean.contains("def parseValue__fuel"));
3799 assert!(lean.contains("def toString' (j : Json) : String :="));
3800 assert!(
3801 lean.contains(
3802 "def averMeasureJsonEntries_String (items : List (String × Json)) : Nat :="
3803 )
3804 );
3805 assert!(lean.contains(
3806 "| .jsonObject x0 => (averMeasureJsonEntries_String (AverMap.entries x0)) + 1"
3807 ));
3808 assert!(lean.contains("-- when jsonRoundtripSafe j"));
3809 assert!(!lean.contains("-- hint: verify law '"));
3810 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
3811 assert!(
3812 lean.contains(
3813 "theorem toString'_law_parseRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3814 )
3815 );
3816 assert!(lean.contains(
3817 "jsonRoundtripSafe j = true -> fromString (toString' j) = Except.ok j := by"
3818 ));
3819 assert!(
3820 lean.contains("theorem finishFloat_law_fromCanonicalFloat : ∀ (f : Float), f = 3.5 ∨")
3821 );
3822 assert!(lean.contains("theorem finishInt_law_fromCanonicalInt_checked_domain :"));
3823 assert!(lean.contains(
3824 "theorem toString'_law_parseValueRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3825 ));
3826 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
3827 assert!(lean.contains(
3828 "example : fromString \"null\" = Except.ok Json.jsonNull := by native_decide"
3829 ));
3830 }
3831
3832 #[test]
3833 fn transpile_injects_builtin_network_types_and_vector_get_support() {
3834 let ctx = ctx_from_source(
3835 r#"
3836fn firstOrMissing(xs: Vector<String>) -> Result<String, String>
3837 Option.toResult(Vector.get(xs, 0), "missing")
3838
3839fn defaultHeaders() -> Map<String, List<String>>
3840 {"content-type" => ["application/json"]}
3841
3842fn mkResponse(body: String) -> HttpResponse
3843 HttpResponse(status = 200, body = body, headers = defaultHeaders())
3844
3845fn requestPath(req: HttpRequest) -> String
3846 req.path
3847
3848fn connPort(conn: Tcp.Connection) -> Int
3849 conn.port
3850"#,
3851 "network_helpers",
3852 );
3853 let out = transpile(&ctx);
3854 let lean = generated_lean_file(&out);
3855
3856 assert!(lean.contains("structure HttpResponse where"));
3857 assert!(lean.contains("structure HttpRequest where"));
3858 assert!(lean.contains("structure Tcp_Connection where"));
3859 assert!(lean.contains("port : Int"));
3860 assert!(lean.contains("List (String × List String)"));
3862 }
3863
3864 #[test]
3865 fn law_auto_example_has_no_sorry_in_proof_mode() {
3866 let ctx = ctx_from_source(
3867 include_str!("../../../examples/formal/law_auto.av"),
3868 "law_auto",
3869 );
3870 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3871 let lean = generated_lean_file(&out);
3872 assert!(
3873 !lean.contains("sorry"),
3874 "expected law_auto proof export to avoid sorry, got:\n{}",
3875 lean
3876 );
3877 }
3878
3879 #[test]
3880 fn map_example_has_no_sorry_in_proof_mode() {
3881 let ctx = ctx_from_source(include_str!("../../../examples/data/map.av"), "map");
3882 let issues = proof_mode_issues(&ctx);
3883 assert!(
3884 issues.is_empty(),
3885 "expected map example to stay inside proof subset, got: {:?}",
3886 issues
3887 );
3888
3889 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3890 let lean = generated_lean_file(&out);
3891 assert!(lean.contains("theorem incCount_law_trackedCountStepsByOne :"));
3893 assert!(lean.contains("sorry"));
3894 assert!(lean.contains("theorem countWords_law_presenceMatchesContains_sample_1 :"));
3896 assert!(lean.contains("theorem countWords_law_trackedWordCount_sample_1 :"));
3897 assert!(lean.contains("AverMap.has_set_self"));
3898 assert!(lean.contains("AverMap.get_set_self"));
3899 }
3900
3901 #[test]
3902 fn spec_laws_example_has_no_sorry_in_proof_mode() {
3903 let ctx = ctx_from_source(
3904 include_str!("../../../examples/formal/spec_laws.av"),
3905 "spec_laws",
3906 );
3907 let issues = proof_mode_issues(&ctx);
3908 assert!(
3909 issues.is_empty(),
3910 "expected spec_laws example to stay inside proof subset, got: {:?}",
3911 issues
3912 );
3913
3914 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3915 let lean = generated_lean_file(&out);
3916 assert!(
3917 !lean.contains("sorry"),
3918 "expected spec_laws proof export to avoid sorry, got:\n{}",
3919 lean
3920 );
3921 assert!(lean.contains("theorem absVal_eq_absValSpec :"));
3922 assert!(lean.contains("theorem clampNonNegative_eq_clampNonNegativeSpec :"));
3923 }
3924
3925 #[test]
3926 fn rle_example_exports_sampled_roundtrip_laws_without_sorry() {
3927 let ctx = ctx_from_source(include_str!("../../../examples/data/rle.av"), "rle");
3928 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3929 let lean = generated_lean_file(&out);
3930
3931 assert!(
3932 lean.contains("sorry"),
3933 "expected rle proof export to contain sorry for unproved universal theorems"
3934 );
3935 assert!(lean.contains(
3936 "theorem encode_law_roundtrip_sample_1 : decode (encode []) = [] := by native_decide"
3937 ));
3938 assert!(lean.contains(
3939 "theorem encodeString_law_string_roundtrip_sample_1 : decodeString (encodeString \"\") = \"\" := by native_decide"
3940 ));
3941 }
3942
3943 #[test]
3944 fn fibonacci_example_uses_fuelized_int_countdown_in_proof_mode() {
3945 let ctx = ctx_from_source(
3946 include_str!("../../../examples/data/fibonacci.av"),
3947 "fibonacci",
3948 );
3949 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3950 let lean = generated_lean_file(&out);
3951
3952 assert!(lean.contains("def fibTR__fuel"));
3953 assert!(lean.contains("def fibTR (n : Int) (a : Int) (b : Int) : Int :="));
3954 assert!(lean.contains("fibTR__fuel ((Int.natAbs n) + 1) n a b"));
3955 assert!(!lean.contains("partial def fibTR"));
3956 }
3957
3958 #[test]
3959 fn fibonacci_example_stays_inside_proof_subset() {
3960 let ctx = ctx_from_source(
3961 include_str!("../../../examples/data/fibonacci.av"),
3962 "fibonacci",
3963 );
3964 let issues = proof_mode_issues(&ctx);
3965 assert!(
3966 issues.is_empty(),
3967 "expected fibonacci example to stay inside proof subset, got: {:?}",
3968 issues
3969 );
3970 }
3971
3972 #[test]
3973 fn fibonacci_example_matches_general_linear_recurrence_shapes() {
3974 let ctx = ctx_from_source(
3975 include_str!("../../../examples/data/fibonacci.av"),
3976 "fibonacci",
3977 );
3978 let fib = ctx.fn_defs.iter().find(|fd| fd.name == "fib").unwrap();
3979 let fib_tr = ctx.fn_defs.iter().find(|fd| fd.name == "fibTR").unwrap();
3980 let fib_spec = ctx.fn_defs.iter().find(|fd| fd.name == "fibSpec").unwrap();
3981
3982 assert!(recurrence::detect_tailrec_int_linear_pair_wrapper(fib).is_some());
3983 assert!(recurrence::detect_tailrec_int_linear_pair_worker(fib_tr).is_some());
3984 assert!(recurrence::detect_second_order_int_linear_recurrence(fib_spec).is_some());
3985 }
3986
3987 #[test]
3988 fn fibonacci_example_auto_proves_general_linear_recurrence_spec_law() {
3989 let ctx = ctx_from_source(
3990 include_str!("../../../examples/data/fibonacci.av"),
3991 "fibonacci",
3992 );
3993 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3994 let lean = generated_lean_file(&out);
3995
3996 assert!(lean.contains("private def fibSpec__nat : Nat -> Int"));
3997 assert!(!lean.contains("partial def fibSpec"));
3998 assert!(lean.contains("private theorem fib_eq_fibSpec__worker_nat_shift"));
3999 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_nat"));
4000 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_seed"));
4001 assert!(lean.contains("theorem fib_eq_fibSpec : ∀ (n : Int), fib n = fibSpec n := by"));
4002 assert!(!lean.contains(
4003 "-- universal theorem fib_eq_fibSpec omitted: sampled law shape is not auto-proved yet"
4004 ));
4005 }
4006
4007 #[test]
4008 fn pell_like_example_auto_proves_same_general_shape() {
4009 let ctx = ctx_from_source(
4010 r#"
4011module Pell
4012 intent =
4013 "linear recurrence probe"
4014
4015fn pellTR(n: Int, a: Int, b: Int) -> Int
4016 match n
4017 0 -> a
4018 _ -> pellTR(n - 1, b, a + 2 * b)
4019
4020fn pell(n: Int) -> Int
4021 match n < 0
4022 true -> 0
4023 false -> pellTR(n, 0, 1)
4024
4025fn pellSpec(n: Int) -> Int
4026 match n < 0
4027 true -> 0
4028 false -> match n
4029 0 -> 0
4030 1 -> 1
4031 _ -> pellSpec(n - 2) + 2 * pellSpec(n - 1)
4032
4033verify pell law pellSpec
4034 given n: Int = [0, 1, 2, 3]
4035 pell(n) => pellSpec(n)
4036"#,
4037 "pell",
4038 );
4039 let issues = proof_mode_issues(&ctx);
4040 assert!(
4041 issues.is_empty(),
4042 "expected pell example to stay inside proof subset, got: {:?}",
4043 issues
4044 );
4045
4046 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4047 let lean = generated_lean_file(&out);
4048 assert!(lean.contains("private def pellSpec__nat : Nat -> Int"));
4049 assert!(lean.contains("private theorem pell_eq_pellSpec__worker_nat_shift"));
4050 assert!(lean.contains("theorem pell_eq_pellSpec : ∀ (n : Int), pell n = pellSpec n := by"));
4051 assert!(!lean.contains(
4052 "-- universal theorem pell_eq_pellSpec omitted: sampled law shape is not auto-proved yet"
4053 ));
4054 }
4055
4056 #[test]
4057 fn nonlinear_pair_state_recurrence_is_not_auto_proved_as_linear_shape() {
4058 let ctx = ctx_from_source(
4059 r#"
4060module WeirdRec
4061 intent =
4062 "reject nonlinear pair-state recurrence from linear recurrence prover"
4063
4064fn weirdTR(n: Int, a: Int, b: Int) -> Int
4065 match n
4066 0 -> a
4067 _ -> weirdTR(n - 1, b, a * b)
4068
4069fn weird(n: Int) -> Int
4070 match n < 0
4071 true -> 0
4072 false -> weirdTR(n, 0, 1)
4073
4074fn weirdSpec(n: Int) -> Int
4075 match n < 0
4076 true -> 0
4077 false -> match n
4078 0 -> 0
4079 1 -> 1
4080 _ -> weirdSpec(n - 1) * weirdSpec(n - 2)
4081
4082verify weird law weirdSpec
4083 given n: Int = [0, 1, 2, 3]
4084 weird(n) => weirdSpec(n)
4085"#,
4086 "weirdrec",
4087 );
4088 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4089 let lean = generated_lean_file(&out);
4090
4091 assert!(lean.contains("sorry"));
4093 assert!(!lean.contains("private theorem weird_eq_weirdSpec__worker_nat_shift"));
4094 assert!(lean.contains("theorem weird_eq_weirdSpec_sample_1 :"));
4095 }
4096
4097 #[test]
4098 fn date_example_stays_inside_proof_subset() {
4099 let ctx = ctx_from_source(include_str!("../../../examples/data/date.av"), "date");
4100 let issues = proof_mode_issues(&ctx);
4101 assert!(
4102 issues.is_empty(),
4103 "expected date example to stay inside proof subset, got: {:?}",
4104 issues
4105 );
4106
4107 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4108 let lean = generated_lean_file(&out);
4109 assert!(!lean.contains("partial def"));
4110 assert!(lean.contains("def parseIntSlice (s : String) (from' : Int) (to : Int) : Int :="));
4111 }
4112
4113 #[test]
4114 fn temperature_example_stays_inside_proof_subset() {
4115 let ctx = ctx_from_source(
4116 include_str!("../../../examples/core/temperature.av"),
4117 "temperature",
4118 );
4119 let issues = proof_mode_issues(&ctx);
4120 assert!(
4121 issues.is_empty(),
4122 "expected temperature example to stay inside proof subset, got: {:?}",
4123 issues
4124 );
4125
4126 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4127 let lean = generated_lean_file(&out);
4128 assert!(!lean.contains("partial def"));
4129 assert!(
4130 lean.contains("example : celsiusToFahr 0.0 = 32.0 := by native_decide"),
4131 "expected verify examples to survive proof export, got:\n{}",
4132 lean
4133 );
4134 }
4135
4136 #[test]
4137 fn quicksort_example_stays_inside_proof_subset() {
4138 let ctx = ctx_from_source(
4139 include_str!("../../../examples/data/quicksort.av"),
4140 "quicksort",
4141 );
4142 let issues = proof_mode_issues(&ctx);
4143 assert!(
4144 issues.is_empty(),
4145 "expected quicksort example to stay inside proof subset, got: {:?}",
4146 issues
4147 );
4148
4149 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4150 let lean = generated_lean_file(&out);
4151 assert!(lean.contains("def isOrderedFrom"));
4152 assert!(!lean.contains("partial def isOrderedFrom"));
4153 assert!(lean.contains("termination_by xs.length"));
4154 }
4155
4156 #[test]
4157 fn grok_s_language_example_uses_total_ranked_sizeof_mutual_recursion() {
4158 let ctx = ctx_from_source(
4159 include_str!("../../../examples/core/grok_s_language.av"),
4160 "grok_s_language",
4161 );
4162 let issues = proof_mode_issues(&ctx);
4163 assert!(
4164 issues.is_empty(),
4165 "expected grok_s_language example to stay inside proof subset, got: {:?}",
4166 issues
4167 );
4168
4169 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4170 let lean = generated_lean_file(&out);
4171 assert!(lean.contains("mutual"));
4172 assert!(lean.contains("def eval__fuel"));
4173 assert!(lean.contains("def parseListItems__fuel"));
4174 assert!(!lean.contains("partial def eval"));
4175 assert!(!lean.contains("termination_by (sizeOf e,"));
4176 assert!(lean.contains("-- when validSymbolNames e"));
4177 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
4178 assert!(lean.contains(
4179 "theorem toString'_law_parseRoundtrip : ∀ (e : Sexpr), e = Sexpr.atomNum 42 ∨"
4180 ));
4181 assert!(
4182 lean.contains("validSymbolNames e = true -> parse (toString' e) = Except.ok e := by")
4183 );
4184 assert!(lean.contains("theorem toString'_law_parseSexprRoundtrip :"));
4185 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
4186 }
4187
4188 #[test]
4189 fn lambda_example_keeps_only_eval_outside_proof_subset() {
4190 let ctx = ctx_from_source(include_str!("../../../examples/core/lambda.av"), "lambda");
4191 let issues = proof_mode_issues(&ctx);
4192 assert_eq!(
4193 issues,
4194 vec!["recursive function 'eval' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)".to_string()]
4195 );
4196
4197 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4198 let lean = generated_lean_file(&out);
4199 assert!(lean.contains("def termToString__fuel"));
4200 assert!(lean.contains("def subst__fuel"));
4201 assert!(lean.contains("def countS__fuel"));
4202 assert!(!lean.contains("partial def termToString"));
4203 assert!(!lean.contains("partial def subst"));
4204 assert!(!lean.contains("partial def countS"));
4205 assert!(lean.contains("partial def eval"));
4206 }
4207
4208 #[test]
4209 fn mission_control_example_stays_inside_proof_subset() {
4210 let ctx = ctx_from_source(
4211 include_str!("../../../examples/apps/mission_control.av"),
4212 "mission_control",
4213 );
4214 let issues = proof_mode_issues(&ctx);
4215 assert!(
4216 issues.is_empty(),
4217 "expected mission_control example to stay inside proof subset, got: {:?}",
4218 issues
4219 );
4220
4221 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4222 let lean = generated_lean_file(&out);
4223 assert!(!lean.contains("partial def normalizeAngle"));
4224 assert!(lean.contains("def normalizeAngle__fuel"));
4225 }
4226
4227 #[test]
4228 fn notepad_store_example_stays_inside_proof_subset() {
4229 let ctx = ctx_from_source(
4230 include_str!("../../../examples/apps/notepad/store.av"),
4231 "notepad_store",
4232 );
4233 let issues = proof_mode_issues(&ctx);
4234 assert!(
4235 issues.is_empty(),
4236 "expected notepad/store example to stay inside proof subset, got: {:?}",
4237 issues
4238 );
4239
4240 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4241 let lean = generated_lean_file(&out);
4242 assert!(lean.contains("def deserializeLine (line : String) : Except String Note :="));
4243 assert!(lean.contains("Except String (List Note)"));
4244 assert!(!lean.contains("partial def deserializeLine"));
4245 assert!(lean.contains("-- when noteRoundtripSafe note"));
4246 assert!(lean.contains("-- when notesRoundtripSafe notes"));
4247 assert!(lean.contains(
4248 "theorem serializeLine_law_lineRoundtrip : ∀ (note : Note), note = { id' := 1, title := \"Hello\", body := \"World\" : Note } ∨"
4249 ));
4250 assert!(lean.contains(
4251 "theorem serializeLines_law_notesRoundtrip : ∀ (notes : List Note), notes = [] ∨"
4252 ));
4253 assert!(lean.contains("notesRoundtripSafe notes = true ->"));
4254 assert!(lean.contains("parseNotes (s!\"{String.intercalate \"\\n\" (serializeLines notes)}\\n\") = Except.ok notes"));
4255 assert!(lean.contains("theorem serializeLine_law_lineRoundtrip_sample_1 :"));
4256 assert!(lean.contains("theorem serializeLines_law_notesRoundtrip_sample_1 :"));
4257 }
4258}