1mod builtins;
7mod expr;
8mod law_auto;
9mod pattern;
10pub(crate) mod recurrence;
11mod shared;
12mod toplevel;
13mod types;
14
15use std::collections::{HashMap, HashSet};
16
17use crate::ast::{Expr, FnDef, Spanned, TopLevel, TypeDef, VerifyKind};
18use crate::call_graph;
19use crate::codegen::{CodegenContext, ProjectOutput};
20
21#[derive(Clone, Copy, Debug, Eq, PartialEq)]
23pub enum VerifyEmitMode {
24 NativeDecide,
26 Sorry,
28 TheoremSkeleton,
32}
33
34pub use crate::codegen::recursion::{ProofModeIssue, RecursionPlan};
39
40const LEAN_PRELUDE_HEADER: &str = r#"-- Generated by the Aver → Lean 4 transpiler
41-- Pure core logic plus Oracle-lifted classified effects
42
43set_option linter.unusedVariables false
44
45-- Prelude: helper definitions for Aver builtins"#;
46
47const LEAN_PRELUDE_FLOAT_COE: &str = r#"instance : Coe Int Float := ⟨fun n => Float.ofInt n⟩
48
49def Float.fromInt (n : Int) : Float := Float.ofInt n
50
51-- Aver's Float-to-Int operations match the runtime semantics
52-- (`f64::floor() as i64` in VM, Rust codegen, WASM — all three use the
53-- same IEEE 754 floor/round/ceil followed by Rust's saturating
54-- `f64 as i64` cast):
55-- * finite values within [i64::MIN, i64::MAX]: truncate toward zero
56-- * finite > i64::MAX: saturate to i64::MAX
57-- * finite < i64::MIN: saturate to i64::MIN
58-- * +Inf: saturate to i64::MAX
59-- * -Inf: saturate to i64::MIN
60-- * NaN: 0 (Rust 1.45+ defined behavior)
61--
62-- Lean's `Float.floor : Float → Float` doesn't directly satisfy Aver's
63-- `Float.floor : Float → Int`, so we synthesize via the saturating
64-- `Float.toUInt64` (returns 0 for NaN/negative) with sign handling and
65-- explicit bounds. Per-case correctness is asserted by `native_decide`
66-- examples below; total semantic agreement with `f64 as i64` would
67-- need a formal IEEE spec in Lean, which is out of scope.
68--
69-- Asymmetry with the Dafny backend: Lean has IEEE 754 `Float` natively
70-- (`double` at runtime), so we use it. Dafny only offers mathematical
71-- `real` (Cauchy-style, no NaN/Inf/overflow), which is a fundamental
72-- type mismatch with Aver's IEEE Float — Dafny operations stay opaque
73-- (`function FloatPi(): real` etc.) rather than synthesizing IEEE on
74-- top of `bv64`, which would mean implementing the entire IEEE
75-- arithmetic in Dafny by hand.
76namespace AverFloat
77def toInt (x : Float) : Int :=
78 if x.isNaN then 0
79 -- 2^63 is exactly representable in f64; values ≥ that saturate up.
80 else if x ≥ 9223372036854775808.0 then 9223372036854775807
81 -- -2^63 is exactly representable; values strictly below saturate down.
82 else if x < -9223372036854775808.0 then -9223372036854775808
83 else if x ≥ 0.0 then Int.ofNat x.toUInt64.toNat
84 else -(Int.ofNat (-x).toUInt64.toNat)
85
86def floor (x : Float) : Int := toInt x.floor
87def ceil (x : Float) : Int := toInt x.ceil
88def round (x : Float) : Int := toInt x.round
89
90def pow (x y : Float) : Float := x ^ y
91
92-- Edge-case smoke checks: each `example` is discharged by reduction,
93-- so any drift from these documented values fails Lake build.
94example : AverFloat.toInt 0.0 = 0 := by native_decide
95example : AverFloat.toInt 3.7 = 3 := by native_decide
96example : AverFloat.toInt (-3.7) = -3 := by native_decide
97example : AverFloat.toInt (1.0 / 0.0) = 9223372036854775807 := by native_decide
98example : AverFloat.toInt (-1.0 / 0.0) = -9223372036854775808 := by native_decide
99example : AverFloat.toInt (0.0 / 0.0) = 0 := by native_decide
100example : AverFloat.floor 3.7 = 3 := by native_decide
101example : AverFloat.floor (-3.7) = -4 := by native_decide
102example : AverFloat.ceil 3.2 = 4 := by native_decide
103example : AverFloat.ceil (-3.2) = -3 := by native_decide
104-- Rounding mode (half-away-from-zero, matching Rust's `f64::round`):
105example : AverFloat.round 0.5 = 1 := by native_decide
106example : AverFloat.round (-0.5) = -1 := by native_decide
107example : AverFloat.round 2.5 = 3 := by native_decide
108example : AverFloat.round (-2.5) = -3 := by native_decide
109end AverFloat"#;
110
111const LEAN_PRELUDE_FLOAT_DEC_EQ: &str = r#"private unsafe def Float.unsafeDecEq (a b : Float) : Decidable (a = b) :=
112 if a == b then isTrue (unsafeCast ()) else isFalse (unsafeCast ())
113@[implemented_by Float.unsafeDecEq]
114private opaque Float.compDecEq (a b : Float) : Decidable (a = b)
115instance : DecidableEq Float := Float.compDecEq"#;
116
117const LEAN_PRELUDE_EXCEPT_DEC_EQ: &str = r#"instance [DecidableEq ε] [DecidableEq α] : DecidableEq (Except ε α)
118 | .ok a, .ok b =>
119 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
120 | .error a, .error b =>
121 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
122 | .ok _, .error _ => isFalse (by intro h; cases h)
123 | .error _, .ok _ => isFalse (by intro h; cases h)"#;
124
125const LEAN_PRELUDE_EXCEPT_NS: &str = r#"namespace Except
126def withDefault (r : Except ε α) (d : α) : α :=
127 match r with
128 | .ok v => v
129 | .error _ => d
130end Except"#;
131
132const LEAN_PRELUDE_OPTION_TO_EXCEPT: &str = r#"def Option.toExcept (o : Option α) (e : ε) : Except ε α :=
133 match o with
134 | some v => .ok v
135 | none => .error e"#;
136
137const LEAN_PRELUDE_STRING_HADD: &str = r#"instance : HAdd String String String := ⟨String.append⟩"#;
138
139const LEAN_PRELUDE_BRANCH_PATH: &str = r#"structure BranchPath where
143 dewey : String
144 deriving Repr, BEq, DecidableEq
145
146def BranchPath.Root : BranchPath := { dewey := "" }
147
148def BranchPath.child (p : BranchPath) (idx : Int) : BranchPath :=
149 if p.dewey.isEmpty then { dewey := toString idx }
150 else { dewey := p.dewey ++ "." ++ toString idx }
151
152def BranchPath.parse (s : String) : BranchPath := { dewey := s }"#;
153
154const LEAN_PRELUDE_PROOF_FUEL: &str = r#"def averStringPosFuel (s : String) (pos : Int) (rankBudget : Nat) : Nat :=
155 (((s.data.length) - pos.toNat) + 1) * rankBudget"#;
156
157const LEAN_PRELUDE_AVER_MEASURE: &str = r#"namespace AverMeasure
158def list (elemMeasure : α → Nat) : List α → Nat
159 | [] => 1
160 | x :: xs => elemMeasure x + list elemMeasure xs + 1
161def option (elemMeasure : α → Nat) : Option α → Nat
162 | none => 1
163 | some x => elemMeasure x + 1
164def except (errMeasure : ε → Nat) (okMeasure : α → Nat) : Except ε α → Nat
165 | .error e => errMeasure e + 1
166 | .ok v => okMeasure v + 1
167end AverMeasure"#;
168
169const AVER_MAP_PRELUDE_BASE: &str = r#"namespace AverMap
170def empty : List (α × β) := []
171def get [DecidableEq α] (m : List (α × β)) (k : α) : Option β :=
172 match m with
173 | [] => none
174 | (k', v) :: rest => if k = k' then some v else AverMap.get rest k
175def set [DecidableEq α] (m : List (α × β)) (k : α) (v : β) : List (α × β) :=
176 let rec go : List (α × β) → List (α × β)
177 | [] => [(k, v)]
178 | (k', v') :: rest => if k = k' then (k, v) :: rest else (k', v') :: go rest
179 go m
180def has [DecidableEq α] (m : List (α × β)) (k : α) : Bool :=
181 m.any (fun p => decide (k = p.1))
182def remove [DecidableEq α] (m : List (α × β)) (k : α) : List (α × β) :=
183 m.filter (fun p => !(decide (k = p.1)))
184def keys (m : List (α × β)) : List α := m.map Prod.fst
185def values (m : List (α × β)) : List β := m.map Prod.snd
186def entries (m : List (α × β)) : List (α × β) := m
187def len (m : List (α × β)) : Nat := m.length
188def fromList (entries : List (α × β)) : List (α × β) := entries"#;
189
190const AVER_MAP_PRELUDE_HAS_SET_SELF: &str = r#"private theorem any_set_go_self [DecidableEq α] (k : α) (v : β) :
191 ∀ (m : List (α × β)), List.any (AverMap.set.go k v m) (fun p => decide (k = p.1)) = true := by
192 intro m
193 induction m with
194 | nil =>
195 simp [AverMap.set.go, List.any]
196 | cons p tl ih =>
197 cases p with
198 | mk k' v' =>
199 by_cases h : k = k'
200 · simp [AverMap.set.go, List.any, h]
201 · simp [AverMap.set.go, List.any, h, ih]
202
203theorem has_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
204 AverMap.has (AverMap.set m k v) k = true := by
205 simpa [AverMap.has, AverMap.set] using any_set_go_self k v m"#;
206
207const AVER_MAP_PRELUDE_GET_SET_SELF: &str = r#"private theorem get_set_go_self [DecidableEq α] (k : α) (v : β) :
208 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) k = some v := by
209 intro m
210 induction m with
211 | nil =>
212 simp [AverMap.set.go, AverMap.get]
213 | cons p tl ih =>
214 cases p with
215 | mk k' v' =>
216 by_cases h : k = k'
217 · simp [AverMap.set.go, AverMap.get, h]
218 · simp [AverMap.set.go, AverMap.get, h, ih]
219
220theorem get_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
221 AverMap.get (AverMap.set m k v) k = some v := by
222 simpa [AverMap.set] using get_set_go_self k v m"#;
223
224const AVER_MAP_PRELUDE_GET_SET_OTHER: &str = r#"private theorem get_set_go_other [DecidableEq α] (k key : α) (v : β) (h : key ≠ k) :
225 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) key = AverMap.get m key := by
226 intro m
227 induction m with
228 | nil =>
229 simp [AverMap.set.go, AverMap.get, h]
230 | cons p tl ih =>
231 cases p with
232 | mk k' v' =>
233 by_cases hk : k = k'
234 · have hkey : key ≠ k' := by simpa [hk] using h
235 simp [AverMap.set.go, AverMap.get, hk, hkey]
236 · by_cases hkey : key = k'
237 · simp [AverMap.set.go, AverMap.get, hk, hkey]
238 · simp [AverMap.set.go, AverMap.get, hk, hkey, ih]
239
240theorem get_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
241 AverMap.get (AverMap.set m k v) key = AverMap.get m key := by
242 simpa [AverMap.set] using get_set_go_other k key v h m"#;
243
244const AVER_MAP_PRELUDE_HAS_SET_OTHER: &str = r#"theorem has_eq_isSome_get [DecidableEq α] (m : List (α × β)) (k : α) :
245 AverMap.has m k = (AverMap.get m k).isSome := by
246 induction m with
247 | nil =>
248 simp [AverMap.has, AverMap.get]
249 | cons p tl ih =>
250 cases p with
251 | mk k' v' =>
252 by_cases h : k = k'
253 · simp [AverMap.has, AverMap.get, List.any, h]
254 · simpa [AverMap.has, AverMap.get, List.any, h] using ih
255
256theorem has_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
257 AverMap.has (AverMap.set m k v) key = AverMap.has m key := by
258 rw [AverMap.has_eq_isSome_get, AverMap.has_eq_isSome_get]
259 simp [AverMap.get_set_other, h]"#;
260
261const AVER_MAP_PRELUDE_END: &str = r#"end AverMap"#;
262
263const LEAN_PRELUDE_AVER_LIST: &str = r#"namespace AverList
264def get (xs : List α) (i : Int) : Option α :=
265 if i < 0 then none else xs.get? i.toNat
266private def insertSorted [Ord α] (x : α) : List α → List α
267 | [] => [x]
268 | y :: ys =>
269 if compare x y == Ordering.lt || compare x y == Ordering.eq then
270 x :: y :: ys
271 else
272 y :: insertSorted x ys
273def sort [Ord α] (xs : List α) : List α :=
274 xs.foldl (fun acc x => insertSorted x acc) []
275end AverList"#;
276
277const LEAN_PRELUDE_STRING_HELPERS: &str = r#"def String.charAt (s : String) (i : Int) : Option String :=
285 if i < 0 then none
286 else (s.toList.get? i.toNat).map Char.toString
287theorem String.charAt_length_none (s : String) : String.charAt s s.length = none := by
288 have hs : ¬ ((s.length : Int) < 0) := by omega
289 unfold String.charAt
290 simp [hs]
291 change s.data.length ≤ s.length
292 exact Nat.le_refl _
293def String.slice (s : String) (start stop : Int) : String :=
294 let startN := if start < 0 then 0 else start.toNat
295 let stopN := if stop < 0 then 0 else stop.toNat
296 let chars := s.toList
297 String.mk ((chars.drop startN).take (stopN - startN))
298private def trimFloatTrailingZerosChars (chars : List Char) : List Char :=
299 let noZeros := (chars.reverse.dropWhile (fun c => c == '0')).reverse
300 match noZeros.reverse with
301 | '.' :: rest => rest.reverse
302 | _ => noZeros
303private def normalizeFloatString (s : String) : String :=
304 if s.toList.any (fun c => c == '.') then
305 String.mk (trimFloatTrailingZerosChars s.toList)
306 else s
307def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)
308def String.chars (s : String) : List String := s.toList.map Char.toString
309private theorem char_to_string_append_mk (c : Char) (chars : List Char) :
310 Char.toString c ++ String.mk chars = String.mk (c :: chars) := by
311 rfl
312private theorem string_intercalate_empty_char_strings_go (acc : String) (chars : List Char) :
313 String.intercalate.go acc "" (chars.map Char.toString) = acc ++ String.mk chars := by
314 induction chars generalizing acc with
315 | nil =>
316 simp [String.intercalate.go]
317 | cons c rest ih =>
318 calc
319 String.intercalate.go acc "" ((c :: rest).map Char.toString)
320 = String.intercalate.go (acc ++ Char.toString c) "" (rest.map Char.toString) := by
321 simp [String.intercalate.go]
322 _ = (acc ++ Char.toString c) ++ String.mk rest := by
323 simpa using ih (acc ++ Char.toString c)
324 _ = acc ++ String.mk (c :: rest) := by
325 simp [String.append_assoc, char_to_string_append_mk]
326private theorem string_intercalate_empty_char_strings (chars : List Char) :
327 String.intercalate "" (chars.map Char.toString) = String.mk chars := by
328 cases chars with
329 | nil =>
330 simp [String.intercalate]
331 | cons c rest =>
332 simpa [String.intercalate] using string_intercalate_empty_char_strings_go c.toString rest
333theorem String.intercalate_empty_chars (s : String) :
334 String.intercalate "" (String.chars s) = s := by
335 cases s with
336 | mk chars =>
337 simpa [String.chars] using string_intercalate_empty_char_strings chars
338namespace AverString
339def splitOnCharGo (currentRev : List Char) (sep : Char) : List Char → List String
340 | [] => [String.mk currentRev.reverse]
341 | c :: cs =>
342 if c == sep then
343 String.mk currentRev.reverse :: splitOnCharGo [] sep cs
344 else
345 splitOnCharGo (c :: currentRev) sep cs
346def splitOnChar (s : String) (sep : Char) : List String :=
347 splitOnCharGo [] sep s.toList
348def split (s delim : String) : List String :=
349 match delim.toList with
350 | [] => "" :: (s.toList.map Char.toString) ++ [""]
351 | [c] => splitOnChar s c
352 | _ => s.splitOn delim
353@[simp] private theorem char_toString_data (c : Char) : c.toString.data = [c] := by
354 rfl
355private theorem splitOnCharGo_until_sep
356 (prefixRev part tail : List Char) (sep : Char) :
357 part.all (fun c => c != sep) = true ->
358 splitOnCharGo prefixRev sep (part ++ sep :: tail) =
359 String.mk (prefixRev.reverse ++ part) :: splitOnCharGo [] sep tail := by
360 intro h_safe
361 induction part generalizing prefixRev with
362 | nil =>
363 simp [splitOnCharGo]
364 | cons c rest ih =>
365 simp at h_safe
366 have h_rest : (rest.all fun c => c != sep) = true := by
367 simpa using h_safe.2
368 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
369 (ih (c :: prefixRev) h_rest)
370private theorem splitOnCharGo_no_sep
371 (prefixRev chars : List Char) (sep : Char) :
372 chars.all (fun c => c != sep) = true ->
373 splitOnCharGo prefixRev sep chars =
374 [String.mk (prefixRev.reverse ++ chars)] := by
375 intro h_safe
376 induction chars generalizing prefixRev with
377 | nil =>
378 simp [splitOnCharGo]
379 | cons c rest ih =>
380 simp at h_safe
381 have h_rest : (rest.all fun c => c != sep) = true := by
382 simpa using h_safe.2
383 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
384 (ih (c :: prefixRev) h_rest)
385@[simp] theorem split_single_char_append
386 (head tail : String) (sep : Char) :
387 head.toList.all (fun c => c != sep) = true ->
388 split (head ++ Char.toString sep ++ tail) (Char.toString sep) =
389 head :: split tail (Char.toString sep) := by
390 intro h_safe
391 simpa [split, splitOnChar] using
392 (splitOnCharGo_until_sep [] head.data tail.data sep h_safe)
393@[simp] theorem split_single_char_no_sep
394 (s : String) (sep : Char) :
395 s.toList.all (fun c => c != sep) = true ->
396 split s (Char.toString sep) = [s] := by
397 intro h_safe
398 simpa [split, splitOnChar] using
399 (splitOnCharGo_no_sep [] s.data sep h_safe)
400private theorem intercalate_go_prefix
401 (pref acc sep : String) (rest : List String) :
402 String.intercalate.go (pref ++ sep ++ acc) sep rest =
403 pref ++ sep ++ String.intercalate.go acc sep rest := by
404 induction rest generalizing acc with
405 | nil =>
406 simp [String.intercalate.go, String.append_assoc]
407 | cons x xs ih =>
408 simpa [String.intercalate.go, String.append_assoc] using
409 (ih (acc ++ sep ++ x))
410@[simp] theorem split_intercalate_trailing_single_char
411 (parts : List String) (sep : Char) :
412 parts.all (fun part => part.toList.all (fun c => c != sep)) = true ->
413 split (String.intercalate (Char.toString sep) parts ++ Char.toString sep) (Char.toString sep) =
414 match parts with
415 | [] => ["", ""]
416 | _ => parts ++ [""] := by
417 intro h_safe
418 induction parts with
419 | nil =>
420 simp [split, splitOnChar, splitOnCharGo]
421 | cons part rest ih =>
422 simp at h_safe
423 have h_part : (part.toList.all fun c => c != sep) = true := by
424 simpa using h_safe.1
425 cases rest with
426 | nil =>
427 have h_empty : ("".toList.all fun c => c != sep) = true := by simp
428 calc
429 split (String.intercalate.go part (Char.toString sep) [] ++ Char.toString sep) (Char.toString sep)
430 = split (part ++ Char.toString sep) (Char.toString sep) := by
431 simp [String.intercalate.go]
432 _ = part :: split "" (Char.toString sep) := by
433 simpa using split_single_char_append part "" sep h_part
434 _ = [part, ""] := by
435 simp [split_single_char_no_sep, h_empty]
436 | cons next rest' =>
437 have h_rest : ((next :: rest').all fun part => part.toList.all fun c => c != sep) = true := by
438 simpa using h_safe.2
439 calc
440 split
441 (String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep)
442 (Char.toString sep)
443 =
444 split
445 (part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep))
446 (Char.toString sep) := by
447 have h_join :
448 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
449 = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
450 calc
451 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
452 = String.intercalate.go (part ++ Char.toString sep ++ next) (Char.toString sep) rest' ++ Char.toString sep := by
453 simp [String.intercalate.go]
454 _ = part ++ Char.toString sep ++ (String.intercalate.go next (Char.toString sep) rest' ++ Char.toString sep) := by
455 rw [intercalate_go_prefix part next (Char.toString sep) rest']
456 simp [String.append_assoc]
457 _ = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
458 simp [String.intercalate, String.intercalate.go]
459 simpa using congrArg (fun s => split s (Char.toString sep)) h_join
460 _ = part :: split
461 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
462 (Char.toString sep) := by
463 simpa using split_single_char_append
464 part
465 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
466 sep
467 h_part
468 _ = part :: (next :: rest' ++ [""]) := by
469 simpa using ih h_rest
470end AverString"#;
471
472const LEAN_PRELUDE_NUMERIC_PARSE: &str = r#"namespace AverDigits
473def foldDigitsAcc (acc : Nat) : List Nat -> Nat
474 | [] => acc
475 | d :: ds => foldDigitsAcc (acc * 10 + d) ds
476
477def foldDigits (digits : List Nat) : Nat :=
478 foldDigitsAcc 0 digits
479
480private theorem foldDigitsAcc_append_singleton (acc : Nat) (xs : List Nat) (d : Nat) :
481 foldDigitsAcc acc (xs ++ [d]) = foldDigitsAcc acc xs * 10 + d := by
482 induction xs generalizing acc with
483 | nil =>
484 simp [foldDigitsAcc]
485 | cons x xs ih =>
486 simp [foldDigitsAcc, ih, Nat.left_distrib, Nat.add_assoc, Nat.add_left_comm]
487
488private theorem foldDigits_append_singleton (xs : List Nat) (d : Nat) :
489 foldDigits (xs ++ [d]) = foldDigits xs * 10 + d := by
490 simpa [foldDigits] using foldDigitsAcc_append_singleton 0 xs d
491
492def natDigits : Nat -> List Nat
493 | n =>
494 if n < 10 then
495 [n]
496 else
497 natDigits (n / 10) ++ [n % 10]
498termination_by
499 n => n
500
501theorem natDigits_nonempty (n : Nat) : natDigits n ≠ [] := by
502 by_cases h : n < 10
503 · rw [natDigits.eq_1]
504 simp [h]
505 · rw [natDigits.eq_1]
506 simp [h]
507
508theorem natDigits_digits_lt_ten : ∀ n : Nat, ∀ d ∈ natDigits n, d < 10 := by
509 intro n d hd
510 by_cases h : n < 10
511 · rw [natDigits.eq_1] at hd
512 simp [h] at hd
513 rcases hd with rfl
514 exact h
515 · rw [natDigits.eq_1] at hd
516 simp [h] at hd
517 rcases hd with hd | hd
518 · exact natDigits_digits_lt_ten (n / 10) d hd
519 · subst hd
520 exact Nat.mod_lt n (by omega)
521
522theorem foldDigits_natDigits : ∀ n : Nat, foldDigits (natDigits n) = n := by
523 intro n
524 by_cases h : n < 10
525 · rw [natDigits.eq_1]
526 simp [h, foldDigits, foldDigitsAcc]
527 · rw [natDigits.eq_1]
528 simp [h]
529 rw [foldDigits_append_singleton]
530 rw [foldDigits_natDigits (n / 10)]
531 omega
532
533def digitChar : Nat -> Char
534 | 0 => '0' | 1 => '1' | 2 => '2' | 3 => '3' | 4 => '4'
535 | 5 => '5' | 6 => '6' | 7 => '7' | 8 => '8' | 9 => '9'
536 | _ => '0'
537
538def charToDigit? : Char -> Option Nat
539 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3 | '4' => some 4
540 | '5' => some 5 | '6' => some 6 | '7' => some 7 | '8' => some 8 | '9' => some 9
541 | _ => none
542
543theorem charToDigit_digitChar : ∀ d : Nat, d < 10 -> charToDigit? (digitChar d) = some d
544 | 0, _ => by simp [digitChar, charToDigit?]
545 | 1, _ => by simp [digitChar, charToDigit?]
546 | 2, _ => by simp [digitChar, charToDigit?]
547 | 3, _ => by simp [digitChar, charToDigit?]
548 | 4, _ => by simp [digitChar, charToDigit?]
549 | 5, _ => by simp [digitChar, charToDigit?]
550 | 6, _ => by simp [digitChar, charToDigit?]
551 | 7, _ => by simp [digitChar, charToDigit?]
552 | 8, _ => by simp [digitChar, charToDigit?]
553 | 9, _ => by simp [digitChar, charToDigit?]
554 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
555 omega
556
557theorem digitChar_ne_minus : ∀ d : Nat, d < 10 -> digitChar d ≠ '-'
558 | 0, _ => by decide
559 | 1, _ => by decide
560 | 2, _ => by decide
561 | 3, _ => by decide
562 | 4, _ => by decide
563 | 5, _ => by decide
564 | 6, _ => by decide
565 | 7, _ => by decide
566 | 8, _ => by decide
567 | 9, _ => by decide
568 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
569 omega
570
571theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->
572 Char.toString (digitChar d) ≠ " " ∧
573 Char.toString (digitChar d) ≠ "\t" ∧
574 Char.toString (digitChar d) ≠ "\n" ∧
575 Char.toString (digitChar d) ≠ "\r"
576 | 0, _ => by decide
577 | 1, _ => by decide
578 | 2, _ => by decide
579 | 3, _ => by decide
580 | 4, _ => by decide
581 | 5, _ => by decide
582 | 6, _ => by decide
583 | 7, _ => by decide
584 | 8, _ => by decide
585 | 9, _ => by decide
586 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
587 omega
588
589theorem mapM_charToDigit_digits : ∀ ds : List Nat,
590 (∀ d ∈ ds, d < 10) -> List.mapM charToDigit? (ds.map digitChar) = some ds := by
591 intro ds hds
592 induction ds with
593 | nil =>
594 simp
595 | cons d ds ih =>
596 have hd : d < 10 := hds d (by simp)
597 have htail : ∀ x ∈ ds, x < 10 := by
598 intro x hx
599 exact hds x (by simp [hx])
600 simp [charToDigit_digitChar d hd, ih htail]
601
602def natDigitsChars (n : Nat) : List Char :=
603 (natDigits n).map digitChar
604
605def parseNatChars (chars : List Char) : Option Nat :=
606 match chars with
607 | [] => none
608 | _ => do
609 let digits <- List.mapM charToDigit? chars
610 pure (foldDigits digits)
611
612theorem parseNatChars_nat (n : Nat) :
613 parseNatChars (natDigitsChars n) = some n := by
614 unfold parseNatChars natDigitsChars
615 cases h : (natDigits n).map digitChar with
616 | nil =>
617 exfalso
618 exact natDigits_nonempty n (List.map_eq_nil_iff.mp h)
619 | cons hd tl =>
620 have hdigits : List.mapM charToDigit? (List.map digitChar (natDigits n)) = some (natDigits n) :=
621 mapM_charToDigit_digits (natDigits n) (fun d hd => natDigits_digits_lt_ten n d hd)
622 rw [h] at hdigits
623 simp [h, hdigits, foldDigits_natDigits]
624end AverDigits
625
626def String.fromInt (n : Int) : String :=
627 match n with
628 | .ofNat m => String.mk (AverDigits.natDigitsChars m)
629 | .negSucc m => String.mk ('-' :: AverDigits.natDigitsChars (m + 1))
630
631def Int.fromString (s : String) : Except String Int :=
632 match s.toList with
633 | [] => .error ("Cannot parse '" ++ s ++ "' as Int")
634 | '-' :: rest =>
635 match AverDigits.parseNatChars rest with
636 | some n => .ok (-Int.ofNat n)
637 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
638 | chars =>
639 match AverDigits.parseNatChars chars with
640 | some n => .ok (Int.ofNat n)
641 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
642
643theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n
644 | .ofNat m => by
645 cases h : AverDigits.natDigits m with
646 | nil =>
647 exfalso
648 exact AverDigits.natDigits_nonempty m h
649 | cons d ds =>
650 have hd : d < 10 := AverDigits.natDigits_digits_lt_ten m d (by simp [h])
651 have hne : AverDigits.digitChar d ≠ '-' := AverDigits.digitChar_ne_minus d hd
652 have hparse : AverDigits.parseNatChars (AverDigits.digitChar d :: List.map AverDigits.digitChar ds) = some m := by
653 simpa [AverDigits.natDigitsChars, h] using AverDigits.parseNatChars_nat m
654 simp [String.fromInt, Int.fromString, AverDigits.natDigitsChars, h, hne, hparse]
655 | .negSucc m => by
656 simp [String.fromInt, Int.fromString, AverDigits.parseNatChars_nat]
657 rfl
658
659private def charDigitsToNat (cs : List Char) : Nat :=
660 cs.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
661
662private def parseExpPart : List Char → (Bool × List Char)
663 | '-' :: rest => (true, rest.takeWhile Char.isDigit)
664 | '+' :: rest => (false, rest.takeWhile Char.isDigit)
665 | rest => (false, rest.takeWhile Char.isDigit)
666
667def Float.fromString (s : String) : Except String Float :=
668 let chars := s.toList
669 let (neg, chars) := match chars with
670 | '-' :: rest => (true, rest)
671 | _ => (false, chars)
672 let intPart := chars.takeWhile Char.isDigit
673 let rest := chars.dropWhile Char.isDigit
674 let (fracPart, rest) := match rest with
675 | '.' :: rest => (rest.takeWhile Char.isDigit, rest.dropWhile Char.isDigit)
676 | _ => ([], rest)
677 let (expNeg, expDigits) := match rest with
678 | 'e' :: rest => parseExpPart rest
679 | 'E' :: rest => parseExpPart rest
680 | _ => (false, [])
681 if intPart.isEmpty && fracPart.isEmpty then .error ("Invalid float: " ++ s)
682 else
683 let mantissa := charDigitsToNat (intPart ++ fracPart)
684 let fracLen : Int := fracPart.length
685 let expVal : Int := charDigitsToNat expDigits
686 let shift : Int := (if expNeg then -expVal else expVal) - fracLen
687 let f := if shift >= 0 then Float.ofScientific mantissa false shift.toNat
688 else Float.ofScientific mantissa true ((-shift).toNat)
689 .ok (if neg then -f else f)"#;
690
691const LEAN_PRELUDE_CHAR_BYTE: &str = r#"def Char.toCode (s : String) : Int :=
692 match s.toList.head? with
693 | some c => (c.toNat : Int)
694 | none => panic! "Char.toCode: string is empty"
695def Char.fromCode (n : Int) : Option String :=
696 if n < 0 || n > 1114111 then none
697 else if n >= 55296 && n <= 57343 then none
698 else some (Char.toString (Char.ofNat n.toNat))
699
700def hexDigit (n : Int) : String :=
701 match n with
702 | 0 => "0" | 1 => "1" | 2 => "2" | 3 => "3"
703 | 4 => "4" | 5 => "5" | 6 => "6" | 7 => "7"
704 | 8 => "8" | 9 => "9" | 10 => "a" | 11 => "b"
705 | 12 => "c" | 13 => "d" | 14 => "e" | 15 => "f"
706 | _ => "?"
707
708def byteToHex (code : Int) : String :=
709 hexDigit (code / 16) ++ hexDigit (code % 16)
710
711namespace AverByte
712private def hexValue (c : Char) : Option Int :=
713 match c with
714 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3
715 | '4' => some 4 | '5' => some 5 | '6' => some 6 | '7' => some 7
716 | '8' => some 8 | '9' => some 9 | 'a' => some 10 | 'b' => some 11
717 | 'c' => some 12 | 'd' => some 13 | 'e' => some 14 | 'f' => some 15
718 | 'A' => some 10 | 'B' => some 11 | 'C' => some 12 | 'D' => some 13
719 | 'E' => some 14 | 'F' => some 15
720 | _ => none
721def toHex (n : Int) : Except String String :=
722 if n < 0 || n > 255 then
723 .error ("Byte.toHex: " ++ toString n ++ " is out of range 0-255")
724 else
725 .ok (byteToHex n)
726def fromHex (s : String) : Except String Int :=
727 match s.toList with
728 | [hi, lo] =>
729 match hexValue hi, hexValue lo with
730 | some h, some l => .ok (h * 16 + l)
731 | _, _ => .error ("Byte.fromHex: invalid hex '" ++ s ++ "'")
732 | _ => .error ("Byte.fromHex: expected exactly 2 hex chars, got '" ++ s ++ "'")
733end AverByte"#;
734
735pub(crate) fn pure_fns(ctx: &CodegenContext) -> Vec<&FnDef> {
736 ctx.modules
737 .iter()
738 .flat_map(|m| m.fn_defs.iter())
739 .chain(ctx.fn_defs.iter())
740 .filter(|fd| toplevel::is_pure_fn(fd))
741 .collect()
742}
743
744pub(crate) fn recursive_type_names(ctx: &CodegenContext) -> HashSet<String> {
745 ctx.modules
746 .iter()
747 .flat_map(|m| m.type_defs.iter())
748 .chain(ctx.type_defs.iter())
749 .filter(|td| toplevel::is_recursive_type_def(td))
750 .map(|td| toplevel::type_def_name(td).to_string())
751 .collect()
752}
753
754pub(crate) fn recursive_pure_fn_names(ctx: &CodegenContext) -> HashSet<String> {
755 let pure_names: HashSet<String> = pure_fns(ctx)
756 .into_iter()
757 .map(|fd| fd.name.clone())
758 .collect();
759 let mut callgraph_items = ctx.items.clone();
760 for module in &ctx.modules {
761 for fd in &module.fn_defs {
762 callgraph_items.push(TopLevel::FnDef(fd.clone()));
763 }
764 }
765 call_graph::find_recursive_fns(&callgraph_items)
766 .into_iter()
767 .filter(|name| pure_names.contains(name))
768 .collect()
769}
770
771fn verify_counter_key(vb: &crate::ast::VerifyBlock) -> String {
772 match &vb.kind {
773 VerifyKind::Cases => format!("fn:{}", vb.fn_name),
774 VerifyKind::Law(law) => format!("law:{}::{}", vb.fn_name, law.name),
775 }
776}
777
778fn lean_project_name(ctx: &CodegenContext) -> String {
779 crate::codegen::common::entry_basename(ctx)
780}
781
782pub(crate) fn find_type_def<'a>(ctx: &'a CodegenContext, type_name: &str) -> Option<&'a TypeDef> {
783 ctx.modules
784 .iter()
785 .flat_map(|m| m.type_defs.iter())
786 .chain(ctx.type_defs.iter())
787 .find(|td| toplevel::type_def_name(td) == type_name)
788}
789
790pub(super) fn bound_expr_to_lean(expr: &Spanned<Expr>) -> String {
791 match &expr.node {
792 Expr::Literal(crate::ast::Literal::Int(n)) => format!("{}", n),
793 Expr::Ident(name) => expr::aver_name_to_lean(name),
794 Expr::FnCall(f, args) => {
795 if let Some(dotted) = crate::codegen::common::expr_to_dotted_name(&f.node) {
796 if dotted == "List.len" && args.len() == 1 {
798 return format!("{}.length", bound_expr_to_lean(&args[0]));
799 }
800 let lean_args: Vec<String> = args.iter().map(bound_expr_to_lean).collect();
801 format!(
802 "({} {})",
803 expr::aver_name_to_lean(&dotted),
804 lean_args.join(" ")
805 )
806 } else {
807 "0".to_string()
808 }
809 }
810 Expr::Attr(obj, field) => format!(
811 "{}.{}",
812 bound_expr_to_lean(obj),
813 expr::aver_name_to_lean(field)
814 ),
815 _ => "0".to_string(),
816 }
817}
818
819pub(crate) fn sizeof_measure_param_indices(fd: &FnDef) -> Vec<usize> {
820 fd.params
821 .iter()
822 .enumerate()
823 .filter_map(|(idx, (_, type_name))| {
824 (!crate::codegen::recursion::detect::is_scalar_like_type(type_name)).then_some(idx)
825 })
826 .collect()
827}
828
829pub fn proof_mode_findings(ctx: &CodegenContext) -> Vec<ProofModeIssue> {
834 let (_plans, issues) = crate::codegen::recursion::analyze_plans(ctx);
835 issues
836}
837
838pub fn proof_mode_issues(ctx: &CodegenContext) -> Vec<String> {
839 proof_mode_findings(ctx)
840 .into_iter()
841 .map(|issue| issue.message)
842 .collect()
843}
844
845pub fn transpile(ctx: &CodegenContext) -> ProjectOutput {
847 transpile_with_verify_mode(ctx, VerifyEmitMode::NativeDecide)
848}
849
850pub fn transpile_for_proof_mode(
855 ctx: &CodegenContext,
856 verify_mode: VerifyEmitMode,
857) -> ProjectOutput {
858 transpile_unified(ctx, verify_mode, LeanEmitMode::Proof)
859}
860
861pub fn transpile_with_verify_mode(
867 ctx: &CodegenContext,
868 verify_mode: VerifyEmitMode,
869) -> ProjectOutput {
870 transpile_unified(ctx, verify_mode, LeanEmitMode::Standard)
871}
872
873fn emit_lifted_effectful_functions(
882 ctx: &CodegenContext,
883 recursive_fns: &HashSet<String>,
884 sections: &mut Vec<String>,
885) {
886 use crate::types::checker::effect_classification::is_classified;
887
888 let reachable = crate::codegen::common::verify_reachable_fn_names(&ctx.items);
894
895 let mut helpers: std::collections::HashMap<String, Vec<String>> =
900 std::collections::HashMap::new();
901 for item in &ctx.items {
902 let TopLevel::FnDef(fd) = item else { continue };
903 if fd.effects.is_empty() || fd.name == "main" {
904 continue;
905 }
906 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
907 continue;
908 }
909 if !reachable.contains(&fd.name) {
910 continue;
911 }
912 helpers.insert(
913 fd.name.clone(),
914 fd.effects.iter().map(|e| e.node.clone()).collect(),
915 );
916 }
917
918 let mut lifted_fns: Vec<(String, crate::ast::FnDef)> = Vec::new();
919 for item in &ctx.items {
920 let TopLevel::FnDef(fd) = item else { continue };
921 if fd.effects.is_empty() || fd.name == "main" {
922 continue;
923 }
924 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
925 continue;
926 }
927 if !reachable.contains(&fd.name) {
928 continue;
929 }
930 let Ok(Some(lifted)) =
931 crate::types::checker::effect_lifting::lift_fn_def_with_helpers(fd, &helpers)
932 else {
933 continue;
934 };
935 lifted_fns.push((fd.name.clone(), lifted));
936 }
937
938 let eligible_names: std::collections::HashSet<String> =
946 lifted_fns.iter().map(|(n, _)| n.clone()).collect();
947 let mut emitted: std::collections::HashSet<String> = std::collections::HashSet::new();
948 let mut order: Vec<usize> = Vec::new();
949 let mut remaining: Vec<usize> = (0..lifted_fns.len()).collect();
950 while !remaining.is_empty() {
951 let before = remaining.len();
952 remaining.retain(|&idx| {
953 let body_calls = collect_called_idents_in_body(&lifted_fns[idx].1.body);
954 let ready = body_calls
955 .iter()
956 .all(|name| !eligible_names.contains(name) || emitted.contains(name));
957 if ready {
958 emitted.insert(lifted_fns[idx].0.clone());
959 order.push(idx);
960 false
961 } else {
962 true
963 }
964 });
965 if remaining.len() == before {
966 order.append(&mut remaining);
970 }
971 }
972
973 for idx in order {
974 let (_, lifted) = &lifted_fns[idx];
975 if let Some(code) = toplevel::emit_fn_def(lifted, recursive_fns, ctx) {
976 sections.push(code);
977 sections.push(String::new());
978 }
979 }
980}
981
982fn collect_called_idents_in_body(body: &crate::ast::FnBody) -> std::collections::HashSet<String> {
983 use crate::ast::{Expr, Spanned, Stmt};
984 let mut out = std::collections::HashSet::new();
985 fn walk(expr: &Spanned<Expr>, out: &mut std::collections::HashSet<String>) {
986 match &expr.node {
987 Expr::FnCall(callee, args) => {
988 if let Expr::Ident(name) | Expr::Resolved { name, .. } = &callee.node {
989 out.insert(name.clone());
990 }
991 walk(callee, out);
992 for a in args {
993 walk(a, out);
994 }
995 }
996 Expr::BinOp(_, l, r) => {
997 walk(l, out);
998 walk(r, out);
999 }
1000 Expr::Match { subject, arms } => {
1001 walk(subject, out);
1002 for arm in arms {
1003 walk(&arm.body, out);
1004 }
1005 }
1006 Expr::Attr(inner, _) | Expr::ErrorProp(inner) => walk(inner, out),
1007 Expr::Constructor(_, Some(inner)) => walk(inner, out),
1008 Expr::List(items) | Expr::Tuple(items) | Expr::IndependentProduct(items, _) => {
1009 for i in items {
1010 walk(i, out);
1011 }
1012 }
1013 Expr::MapLiteral(pairs) => {
1014 for (k, v) in pairs {
1015 walk(k, out);
1016 walk(v, out);
1017 }
1018 }
1019 Expr::RecordCreate { fields, .. } => {
1020 for (_, v) in fields {
1021 walk(v, out);
1022 }
1023 }
1024 Expr::RecordUpdate { base, updates, .. } => {
1025 walk(base, out);
1026 for (_, v) in updates {
1027 walk(v, out);
1028 }
1029 }
1030 Expr::InterpolatedStr(parts) => {
1031 for part in parts {
1032 if let crate::ast::StrPart::Parsed(inner) = part {
1033 walk(inner, out);
1034 }
1035 }
1036 }
1037 _ => {}
1038 }
1039 }
1040 for stmt in body.stmts() {
1041 match stmt {
1042 Stmt::Expr(e) => walk(e, &mut out),
1043 Stmt::Binding(_, _, e) => walk(e, &mut out),
1044 }
1045 }
1046 out
1047}
1048
1049#[cfg(test)]
1050fn generate_prelude() -> String {
1051 generate_prelude_for_body("", true)
1052}
1053
1054#[cfg(test)]
1055fn generate_prelude_for_body(body: &str, include_all_helpers: bool) -> String {
1056 let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
1061 if include_all_helpers || crate::codegen::builtin_records::needs_trust_header(body) {
1062 let empty = crate::codegen::common::DeclaredEffects {
1068 bare_namespaces: std::collections::HashSet::new(),
1069 methods: std::collections::HashSet::new(),
1070 };
1071 let has_ip = body.contains("BranchPath");
1072 parts.push(
1073 crate::types::checker::proof_trust_header::generate_commented("-- ", &empty, has_ip),
1074 );
1075 }
1076 for record in crate::codegen::builtin_records::needed_records(body, include_all_helpers) {
1078 parts.push(crate::codegen::builtin_records::render_lean(record));
1079 }
1080
1081 for helper in crate::codegen::builtin_helpers::needed_helpers(body, include_all_helpers) {
1085 match helper.key {
1086 "BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
1087 "AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
1088 "StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
1089 "NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
1090 "CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
1091 "AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
1092 "AverMap" => parts.push(generate_map_prelude(body, include_all_helpers)),
1093 "ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
1094 "FloatInstances" => parts.extend([
1095 LEAN_PRELUDE_FLOAT_COE.to_string(),
1096 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1097 ]),
1098 "ExceptInstances" => parts.extend([
1099 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1100 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1101 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1102 ]),
1103 "StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
1104 "ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
1108 other => panic!(
1109 "Lean backend has no implementation for builtin helper key '{}'. \
1110 Add a match arm in generate_prelude_for_body or remove the key \
1111 from BUILTIN_HELPERS.",
1112 other
1113 ),
1114 }
1115 }
1116
1117 parts.join("\n\n")
1118}
1119
1120fn generate_map_prelude(body: &str, include_all_helpers: bool) -> String {
1121 let mut parts = vec![AVER_MAP_PRELUDE_BASE.to_string()];
1122
1123 let needs_has_set_self = include_all_helpers || body.contains("AverMap.has_set_self");
1124 let needs_get_set_self = include_all_helpers || body.contains("AverMap.get_set_self");
1125 let needs_get_set_other = include_all_helpers
1126 || body.contains("AverMap.get_set_other")
1127 || body.contains("AverMap.has_set_other");
1128 let needs_has_set_other = include_all_helpers || body.contains("AverMap.has_set_other");
1129
1130 if needs_has_set_self {
1131 parts.push(AVER_MAP_PRELUDE_HAS_SET_SELF.to_string());
1132 }
1133 if needs_get_set_self {
1134 parts.push(AVER_MAP_PRELUDE_GET_SET_SELF.to_string());
1135 }
1136 if needs_get_set_other {
1137 parts.push(AVER_MAP_PRELUDE_GET_SET_OTHER.to_string());
1138 }
1139 if needs_has_set_other {
1140 parts.push(AVER_MAP_PRELUDE_HAS_SET_OTHER.to_string());
1141 }
1142
1143 parts.push(AVER_MAP_PRELUDE_END.to_string());
1144 parts.join("\n\n")
1145}
1146
1147fn generate_lakefile_with_roots(project_name: &str, extra_roots: &[String]) -> String {
1148 let mut roots: Vec<String> = vec![format!("`{}", project_name)];
1149 for r in extra_roots {
1150 roots.push(format!("`{}", r));
1151 }
1152 let roots_str = roots.join(", ");
1153 format!(
1154 r#"import Lake
1155open Lake DSL
1156
1157package «{}» where
1158 version := v!"0.1.0"
1159
1160@[default_target]
1161lean_lib «{}» where
1162 srcDir := "."
1163 roots := #[{}]
1164"#,
1165 project_name.to_lowercase(),
1166 project_name,
1167 roots_str
1168 )
1169}
1170
1171fn generate_toolchain() -> String {
1172 "leanprover/lean4:v4.15.0\n".to_string()
1173}
1174
1175#[derive(Clone, Copy)]
1176enum LeanEmitMode {
1177 Standard,
1178 Proof,
1179}
1180
1181fn transpile_unified(
1193 ctx: &CodegenContext,
1194 verify_mode: VerifyEmitMode,
1195 emit_mode: LeanEmitMode,
1196) -> ProjectOutput {
1197 use crate::codegen::recursion::RecursionPlan;
1198
1199 let recursive_fns = call_graph::find_recursive_fns(&ctx.items);
1200 let recursive_names = recursive_pure_fn_names(ctx);
1201 let recursive_types = recursive_type_names(ctx);
1202 let (plans, _proof_issues) = match emit_mode {
1203 LeanEmitMode::Proof => crate::codegen::recursion::analyze_plans(ctx),
1204 LeanEmitMode::Standard => (HashMap::<String, RecursionPlan>::new(), Vec::new()),
1205 };
1206
1207 let pure_per_scope = crate::codegen::common::route_pure_components_per_scope(
1212 ctx,
1213 toplevel::is_pure_fn,
1214 |comp| {
1215 let mut out: Vec<String> = Vec::new();
1216 if comp.len() > 1 {
1217 let code = match emit_mode {
1218 LeanEmitMode::Proof => {
1219 let all_supported = comp.iter().all(|fd| plans.contains_key(&fd.name));
1220 if all_supported {
1221 toplevel::emit_mutual_group_proof(comp, ctx, &plans)
1222 } else {
1223 toplevel::emit_mutual_group(comp, ctx)
1224 }
1225 }
1226 LeanEmitMode::Standard => toplevel::emit_mutual_group(comp, ctx),
1227 };
1228 out.push(code);
1229 out.push(String::new());
1230 } else if let Some(fd) = comp.first() {
1231 let emitted = match emit_mode {
1232 LeanEmitMode::Proof => {
1233 let is_recursive = recursive_names.contains(&fd.name);
1234 if is_recursive && !plans.contains_key(&fd.name) {
1235 toplevel::emit_fn_def(fd, &recursive_names, ctx)
1236 } else {
1237 toplevel::emit_fn_def_proof(fd, plans.get(&fd.name).cloned(), ctx)
1238 }
1239 }
1240 LeanEmitMode::Standard => toplevel::emit_fn_def(fd, &recursive_fns, ctx),
1241 };
1242 if let Some(code) = emitted {
1243 out.push(code);
1244 out.push(String::new());
1245 }
1246 }
1247 out
1248 },
1249 );
1250
1251 let mut entry_lifted_sections: Vec<String> = Vec::new();
1253 let lifted_recursive_names = match emit_mode {
1254 LeanEmitMode::Proof => &recursive_names,
1255 LeanEmitMode::Standard => &recursive_fns,
1256 };
1257 emit_lifted_effectful_functions(ctx, lifted_recursive_names, &mut entry_lifted_sections);
1258
1259 let mut entry_decision_sections: Vec<String> = Vec::new();
1260 for item in &ctx.items {
1261 if let TopLevel::Decision(db) = item {
1262 entry_decision_sections.push(toplevel::emit_decision(db));
1263 entry_decision_sections.push(String::new());
1264 }
1265 }
1266
1267 let mut entry_verify_sections: Vec<String> = Vec::new();
1268 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1269 for item in &ctx.items {
1270 if let TopLevel::Verify(vb) = item {
1271 let key = verify_counter_key(vb);
1272 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1273 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1274 verify_case_counters.insert(key, next_idx);
1275 entry_verify_sections.push(emitted);
1276 entry_verify_sections.push(String::new());
1277 }
1278 }
1279
1280 let mut module_files: Vec<(String, String)> = Vec::new();
1282 let mut union_body = String::new();
1283
1284 for module in &ctx.modules {
1285 let mut body_sections: Vec<String> = Vec::new();
1286 for td in &module.type_defs {
1287 body_sections.push(toplevel::emit_type_def(td));
1288 if toplevel::is_recursive_type_def(td) {
1289 body_sections.push(toplevel::emit_recursive_decidable_eq(
1290 toplevel::type_def_name(td),
1291 ));
1292 if matches!(emit_mode, LeanEmitMode::Proof)
1293 && let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
1294 {
1295 body_sections.push(measure);
1296 }
1297 }
1298 body_sections.push(String::new());
1299 }
1300 if let Some(scope_sections) = pure_per_scope.by_scope.get(&module.prefix) {
1301 body_sections.extend(scope_sections.clone());
1302 }
1303 let body = body_sections.join("\n");
1304 union_body.push_str(&body);
1305 union_body.push('\n');
1306
1307 let mut imports = vec!["import AverCommon".to_string()];
1308 for d in &module.depends {
1309 imports.push(format!("import {}", d));
1310 }
1311 let opens: Vec<String> = module
1315 .depends
1316 .iter()
1317 .map(|d| format!("open {}", d))
1318 .collect();
1319
1320 let opens_str = if opens.is_empty() {
1321 String::new()
1322 } else {
1323 format!("\n{}\n", opens.join("\n"))
1324 };
1325 let content = format!(
1326 "{}\n{}\nnamespace {}\n\n{}\nend {}\n",
1327 imports.join("\n"),
1328 opens_str,
1329 module.prefix,
1330 body,
1331 module.prefix
1332 );
1333 let path = module.prefix.replace('.', "/");
1334 module_files.push((format!("{}.lean", path), content));
1335 }
1336
1337 let mut entry_body_sections: Vec<String> = Vec::new();
1339 for td in &ctx.type_defs {
1340 entry_body_sections.push(toplevel::emit_type_def(td));
1341 if toplevel::is_recursive_type_def(td) {
1342 entry_body_sections.push(toplevel::emit_recursive_decidable_eq(
1343 toplevel::type_def_name(td),
1344 ));
1345 if matches!(emit_mode, LeanEmitMode::Proof)
1346 && let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
1347 {
1348 entry_body_sections.push(measure);
1349 }
1350 }
1351 entry_body_sections.push(String::new());
1352 }
1353 if let Some(entry_pure) = pure_per_scope.by_scope.get("") {
1354 entry_body_sections.extend(entry_pure.clone());
1355 }
1356 entry_body_sections.extend(entry_lifted_sections);
1357 entry_body_sections.extend(entry_decision_sections);
1358 entry_body_sections.extend(entry_verify_sections);
1359
1360 let entry_body = entry_body_sections.join("\n");
1361 union_body.push_str(&entry_body);
1362 union_body.push('\n');
1363
1364 let project_name = lean_project_name(ctx);
1365 let mut entry_imports = vec!["import AverCommon".to_string()];
1366 for m in &ctx.modules {
1367 entry_imports.push(format!("import {}", m.prefix));
1368 }
1369 let entry_opens: Vec<String> = ctx
1370 .modules
1371 .iter()
1372 .map(|m| format!("open {}", m.prefix))
1373 .collect();
1374 let mut entry_parts = vec![entry_imports.join("\n")];
1375 if !entry_opens.is_empty() {
1376 entry_parts.push(entry_opens.join("\n"));
1377 }
1378 let declared = crate::codegen::common::collect_declared_effects(ctx);
1379 let has_ip = union_body.contains("BranchPath");
1380 let has_classified =
1381 crate::types::checker::effect_classification::classifications_for_proof_subset()
1382 .iter()
1383 .any(|c| declared.includes(c.method));
1384 if has_ip || has_classified {
1385 entry_parts.push(
1386 crate::types::checker::proof_trust_header::generate_commented("-- ", &declared, has_ip),
1387 );
1388 }
1389 let subtype_block = crate::types::checker::oracle_subtypes::lean_subtypes(&declared);
1390 if !subtype_block.is_empty() {
1391 entry_parts.push(subtype_block);
1392 }
1393 entry_parts.push(entry_body);
1394 let entry_content = entry_parts.join("\n\n");
1395
1396 let common_content = build_common_lean(&union_body);
1398
1399 let mut extra_roots: Vec<String> = vec!["AverCommon".to_string()];
1401 for m in &ctx.modules {
1402 extra_roots.push(m.prefix.clone());
1403 }
1404 let lakefile = generate_lakefile_with_roots(&project_name, &extra_roots);
1405 let toolchain = generate_toolchain();
1406
1407 let mut files = module_files;
1408 files.push((format!("{}.lean", project_name), entry_content));
1409 files.push(("AverCommon.lean".to_string(), common_content));
1410 files.push(("lakefile.lean".to_string(), lakefile));
1411 files.push(("lean-toolchain".to_string(), toolchain));
1412 ProjectOutput { files }
1413}
1414
1415fn build_common_lean(union_body: &str) -> String {
1416 let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
1417 for record in crate::codegen::builtin_records::needed_records(union_body, false) {
1418 parts.push(crate::codegen::builtin_records::render_lean(record));
1419 }
1420 for helper in crate::codegen::builtin_helpers::needed_helpers(union_body, false) {
1421 match helper.key {
1422 "BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
1423 "AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
1424 "StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
1425 "NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
1426 "CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
1427 "AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
1428 "AverMap" => parts.push(generate_map_prelude(union_body, false)),
1429 "ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
1430 "FloatInstances" => parts.extend([
1431 LEAN_PRELUDE_FLOAT_COE.to_string(),
1432 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1433 ]),
1434 "ExceptInstances" => parts.extend([
1435 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1436 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1437 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1438 ]),
1439 "StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
1440 "ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
1441 other => panic!(
1442 "Lean backend has no implementation for builtin helper key '{}'. \
1443 Add a match arm in build_common_lean or remove the key from BUILTIN_HELPERS.",
1444 other
1445 ),
1446 }
1447 }
1448 parts.join("\n\n")
1449}
1450
1451#[cfg(test)]
1452mod tests {
1453 use super::{
1454 VerifyEmitMode, generate_prelude, proof_mode_issues, recurrence, transpile,
1455 transpile_for_proof_mode, transpile_with_verify_mode,
1456 };
1457 use crate::ast::{
1458 BinOp, Expr, FnBody, FnDef, Literal, MatchArm, Pattern, Spanned, Stmt, TailCallData,
1459 TopLevel, TypeDef, TypeVariant, VerifyBlock, VerifyGiven, VerifyGivenDomain, VerifyKind,
1460 VerifyLaw,
1461 };
1462
1463 fn sb(e: Expr) -> Spanned<Expr> {
1465 Spanned::bare(e)
1466 }
1467 fn sbb(e: Expr) -> Box<Spanned<Expr>> {
1469 Box::new(Spanned::bare(e))
1470 }
1471 use crate::codegen::{CodegenContext, build_context};
1472 use crate::source::parse_source;
1473 use crate::tco;
1474 use crate::types::checker::run_type_check_full;
1475 use std::collections::{HashMap, HashSet};
1476 use std::sync::Arc as Rc;
1477
1478 fn empty_ctx() -> CodegenContext {
1479 CodegenContext {
1480 items: vec![],
1481 fn_sigs: HashMap::new(),
1482 memo_fns: HashSet::new(),
1483 memo_safe_types: HashSet::new(),
1484 type_defs: vec![],
1485 fn_defs: vec![],
1486 project_name: "verify_mode".to_string(),
1487 modules: vec![],
1488 module_prefixes: HashSet::new(),
1489 policy: None,
1490 emit_replay_runtime: false,
1491 runtime_policy_from_env: false,
1492 guest_entry: None,
1493 emit_self_host_support: false,
1494 extra_fn_defs: Vec::new(),
1495 mutual_tco_members: HashSet::new(),
1496 buffer_build_sinks: HashMap::new(),
1497 buffer_fusion_sites: Vec::new(),
1498 synthesized_buffered_fns: Vec::new(),
1499 }
1500 }
1501
1502 fn ctx_from_source(source: &str, project_name: &str) -> CodegenContext {
1503 let mut items = parse_source(source).expect("source should parse");
1504 tco::transform_program(&mut items);
1505 let tc = run_type_check_full(&items, None);
1506 assert!(
1507 tc.errors.is_empty(),
1508 "source should typecheck without errors: {:?}",
1509 tc.errors
1510 );
1511 build_context(items, &tc, HashSet::new(), project_name.to_string(), vec![])
1512 }
1513
1514 fn generated_lean_file(out: &crate::codegen::ProjectOutput) -> String {
1522 out.files
1523 .iter()
1524 .filter_map(|(name, content)| {
1525 (name.ends_with(".lean") && name != "lakefile.lean").then_some(content.as_str())
1526 })
1527 .collect::<Vec<&str>>()
1528 .join("\n")
1529 }
1530
1531 fn empty_ctx_with_verify_case() -> CodegenContext {
1532 let mut ctx = empty_ctx();
1533 ctx.items.push(TopLevel::Verify(VerifyBlock {
1534 fn_name: "f".to_string(),
1535 line: 1,
1536 cases: vec![(
1537 sb(Expr::Literal(Literal::Int(1))),
1538 sb(Expr::Literal(Literal::Int(1))),
1539 )],
1540 case_spans: vec![],
1541 case_givens: vec![],
1542 case_hostile_origins: vec![],
1543 case_hostile_profiles: vec![],
1544 kind: VerifyKind::Cases,
1545 trace: false,
1546 cases_givens: vec![],
1547 }));
1548 ctx
1549 }
1550
1551 fn empty_ctx_with_two_verify_blocks_same_fn() -> CodegenContext {
1552 let mut ctx = empty_ctx();
1553 ctx.items.push(TopLevel::Verify(VerifyBlock {
1554 fn_name: "f".to_string(),
1555 line: 1,
1556 cases: vec![(
1557 sb(Expr::Literal(Literal::Int(1))),
1558 sb(Expr::Literal(Literal::Int(1))),
1559 )],
1560 case_spans: vec![],
1561 case_givens: vec![],
1562 case_hostile_origins: vec![],
1563 case_hostile_profiles: vec![],
1564 kind: VerifyKind::Cases,
1565 trace: false,
1566 cases_givens: vec![],
1567 }));
1568 ctx.items.push(TopLevel::Verify(VerifyBlock {
1569 fn_name: "f".to_string(),
1570 line: 2,
1571 cases: vec![(
1572 sb(Expr::Literal(Literal::Int(2))),
1573 sb(Expr::Literal(Literal::Int(2))),
1574 )],
1575 case_spans: vec![],
1576 case_givens: vec![],
1577 case_hostile_origins: vec![],
1578 case_hostile_profiles: vec![],
1579 kind: VerifyKind::Cases,
1580 trace: false,
1581 cases_givens: vec![],
1582 }));
1583 ctx
1584 }
1585
1586 fn empty_ctx_with_verify_law() -> CodegenContext {
1587 let mut ctx = empty_ctx();
1588 let add = FnDef {
1589 name: "add".to_string(),
1590 line: 1,
1591 params: vec![
1592 ("a".to_string(), "Int".to_string()),
1593 ("b".to_string(), "Int".to_string()),
1594 ],
1595 return_type: "Int".to_string(),
1596 effects: vec![],
1597 desc: None,
1598 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
1599 BinOp::Add,
1600 sbb(Expr::Ident("a".to_string())),
1601 sbb(Expr::Ident("b".to_string())),
1602 )))),
1603 resolution: None,
1604 };
1605 ctx.fn_defs.push(add.clone());
1606 ctx.items.push(TopLevel::FnDef(add));
1607 ctx.items.push(TopLevel::Verify(VerifyBlock {
1608 fn_name: "add".to_string(),
1609 line: 1,
1610 cases: vec![
1611 (
1612 sb(Expr::FnCall(
1613 sbb(Expr::Ident("add".to_string())),
1614 vec![
1615 sb(Expr::Literal(Literal::Int(1))),
1616 sb(Expr::Literal(Literal::Int(2))),
1617 ],
1618 )),
1619 sb(Expr::FnCall(
1620 sbb(Expr::Ident("add".to_string())),
1621 vec![
1622 sb(Expr::Literal(Literal::Int(2))),
1623 sb(Expr::Literal(Literal::Int(1))),
1624 ],
1625 )),
1626 ),
1627 (
1628 sb(Expr::FnCall(
1629 sbb(Expr::Ident("add".to_string())),
1630 vec![
1631 sb(Expr::Literal(Literal::Int(2))),
1632 sb(Expr::Literal(Literal::Int(3))),
1633 ],
1634 )),
1635 sb(Expr::FnCall(
1636 sbb(Expr::Ident("add".to_string())),
1637 vec![
1638 sb(Expr::Literal(Literal::Int(3))),
1639 sb(Expr::Literal(Literal::Int(2))),
1640 ],
1641 )),
1642 ),
1643 ],
1644 case_spans: vec![],
1645 case_givens: vec![],
1646 case_hostile_origins: vec![],
1647 case_hostile_profiles: vec![],
1648 kind: VerifyKind::Law(Box::new(VerifyLaw {
1649 name: "commutative".to_string(),
1650 givens: vec![
1651 VerifyGiven {
1652 name: "a".to_string(),
1653 type_name: "Int".to_string(),
1654 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
1655 },
1656 VerifyGiven {
1657 name: "b".to_string(),
1658 type_name: "Int".to_string(),
1659 domain: VerifyGivenDomain::Explicit(vec![
1660 sb(Expr::Literal(Literal::Int(2))),
1661 sb(Expr::Literal(Literal::Int(3))),
1662 ]),
1663 },
1664 ],
1665 when: None,
1666 lhs: sb(Expr::FnCall(
1667 sbb(Expr::Ident("add".to_string())),
1668 vec![
1669 sb(Expr::Ident("a".to_string())),
1670 sb(Expr::Ident("b".to_string())),
1671 ],
1672 )),
1673 rhs: sb(Expr::FnCall(
1674 sbb(Expr::Ident("add".to_string())),
1675 vec![
1676 sb(Expr::Ident("b".to_string())),
1677 sb(Expr::Ident("a".to_string())),
1678 ],
1679 )),
1680 sample_guards: vec![],
1681 })),
1682 trace: false,
1683 cases_givens: vec![],
1684 }));
1685 ctx
1686 }
1687
1688 #[test]
1689 fn prelude_normalizes_float_string_format() {
1690 let prelude = generate_prelude();
1691 assert!(
1692 prelude.contains("private def normalizeFloatString (s : String) : String :="),
1693 "missing normalizeFloatString helper in prelude"
1694 );
1695 assert!(
1696 prelude.contains(
1697 "def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)"
1698 ),
1699 "String.fromFloat should normalize Lean float formatting"
1700 );
1701 }
1702
1703 #[test]
1704 fn prelude_validates_char_from_code_unicode_bounds() {
1705 let prelude = generate_prelude();
1706 assert!(
1707 prelude.contains("if n < 0 || n > 1114111 then none"),
1708 "Char.fromCode should reject code points above Unicode max"
1709 );
1710 assert!(
1711 prelude.contains("else if n >= 55296 && n <= 57343 then none"),
1712 "Char.fromCode should reject surrogate code points"
1713 );
1714 }
1715
1716 #[test]
1717 fn prelude_includes_map_set_helper_lemmas() {
1718 let prelude = generate_prelude();
1719 assert!(
1720 prelude.contains("theorem has_set_self [DecidableEq α]"),
1721 "missing AverMap.has_set_self helper theorem"
1722 );
1723 assert!(
1724 prelude.contains("theorem get_set_self [DecidableEq α]"),
1725 "missing AverMap.get_set_self helper theorem"
1726 );
1727 }
1728
1729 #[test]
1730 fn lean_output_without_map_usage_omits_map_prelude() {
1731 let ctx = ctx_from_source(
1732 r#"
1733module NoMap
1734 intent = "Simple pure program without maps."
1735
1736fn addOne(n: Int) -> Int
1737 n + 1
1738
1739verify addOne
1740 addOne(1) => 2
1741"#,
1742 "nomap",
1743 );
1744 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
1745 let lean = generated_lean_file(&out);
1746
1747 assert!(
1748 !lean.contains("namespace AverMap"),
1749 "did not expect AverMap prelude in program without map usage:\n{}",
1750 lean
1751 );
1752 }
1753
1754 #[test]
1755 fn transpile_emits_native_decide_for_verify_by_default() {
1756 let out = transpile(&empty_ctx_with_verify_case());
1757 let lean = out
1758 .files
1759 .iter()
1760 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1761 .expect("expected generated Lean file");
1762 assert!(lean.contains("example : 1 = 1 := by native_decide"));
1763 }
1764
1765 #[test]
1766 fn transpile_can_emit_sorry_for_verify_when_requested() {
1767 let out = transpile_with_verify_mode(&empty_ctx_with_verify_case(), VerifyEmitMode::Sorry);
1768 let lean = out
1769 .files
1770 .iter()
1771 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1772 .expect("expected generated Lean file");
1773 assert!(lean.contains("example : 1 = 1 := by sorry"));
1774 }
1775
1776 #[test]
1777 fn transpile_can_emit_theorem_skeletons_for_verify() {
1778 let out = transpile_with_verify_mode(
1779 &empty_ctx_with_verify_case(),
1780 VerifyEmitMode::TheoremSkeleton,
1781 );
1782 let lean = out
1783 .files
1784 .iter()
1785 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1786 .expect("expected generated Lean file");
1787 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1788 assert!(lean.contains(" sorry"));
1789 }
1790
1791 #[test]
1792 fn theorem_skeleton_numbering_is_global_per_function_across_verify_blocks() {
1793 let out = transpile_with_verify_mode(
1794 &empty_ctx_with_two_verify_blocks_same_fn(),
1795 VerifyEmitMode::TheoremSkeleton,
1796 );
1797 let lean = out
1798 .files
1799 .iter()
1800 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1801 .expect("expected generated Lean file");
1802 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1803 assert!(lean.contains("theorem f_verify_2 : 2 = 2 := by"));
1804 }
1805
1806 #[test]
1807 fn transpile_emits_named_theorems_for_verify_law() {
1808 let out = transpile(&empty_ctx_with_verify_law());
1809 let lean = out
1810 .files
1811 .iter()
1812 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1813 .expect("expected generated Lean file");
1814 assert!(lean.contains("-- verify law add.commutative (2 cases)"));
1815 assert!(lean.contains("-- given a: Int = 1..2"));
1816 assert!(lean.contains("-- given b: Int = [2, 3]"));
1817 assert!(lean.contains(
1818 "theorem add_law_commutative : ∀ (a : Int) (b : Int), add a b = add b a := by"
1819 ));
1820 assert!(lean.contains(" intro a b"));
1821 assert!(lean.contains(" simp [add, Int.add_comm]"));
1822 assert!(lean.contains(
1823 "theorem add_law_commutative_sample_1 : add 1 2 = add 2 1 := by native_decide"
1824 ));
1825 assert!(lean.contains(
1826 "theorem add_law_commutative_sample_2 : add 2 3 = add 3 2 := by native_decide"
1827 ));
1828 }
1829
1830 #[test]
1831 fn generate_prelude_emits_int_roundtrip_theorem() {
1832 let lean = generate_prelude();
1833 assert!(lean.contains(
1834 "theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n"
1835 ));
1836 assert!(lean.contains("theorem String.intercalate_empty_chars (s : String) :"));
1837 assert!(lean.contains("def splitOnCharGo"));
1838 assert!(lean.contains("theorem split_single_char_append"));
1839 assert!(lean.contains("theorem split_intercalate_trailing_single_char"));
1840 assert!(lean.contains("namespace AverDigits"));
1841 assert!(lean.contains("theorem String.charAt_length_none (s : String)"));
1842 assert!(lean.contains("theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->"));
1843 }
1844
1845 #[test]
1846 fn transpile_emits_guarded_theorems_for_verify_law_when_clause() {
1847 let ctx = ctx_from_source(
1848 r#"
1849module GuardedLaw
1850 intent =
1851 "verify law with precondition"
1852
1853fn pickGreater(a: Int, b: Int) -> Int
1854 match a > b
1855 true -> a
1856 false -> b
1857
1858verify pickGreater law ordered
1859 given a: Int = [1, 2]
1860 given b: Int = [1, 2]
1861 when a > b
1862 pickGreater(a, b) => a
1863"#,
1864 "guarded_law",
1865 );
1866 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
1867 let lean = generated_lean_file(&out);
1868
1869 assert!(lean.contains("-- when (a > b)"));
1870 assert!(lean.contains(
1871 "theorem pickGreater_law_ordered : ∀ (a : Int) (b : Int), a = 1 ∨ a = 2 -> b = 1 ∨ b = 2 -> (a > b) = true -> pickGreater a b = a := by"
1872 ));
1873 assert!(lean.contains(
1874 "theorem pickGreater_law_ordered_sample_1 : (1 > 1) = true -> pickGreater 1 1 = 1 := by"
1875 ));
1876 assert!(lean.contains(
1877 "theorem pickGreater_law_ordered_sample_4 : (2 > 2) = true -> pickGreater 2 2 = 2 := by"
1878 ));
1879 }
1880
1881 #[test]
1882 fn transpile_uses_spec_theorem_names_for_declared_spec_laws() {
1883 let ctx = ctx_from_source(
1884 r#"
1885module SpecDemo
1886 intent =
1887 "spec demo"
1888
1889fn absVal(x: Int) -> Int
1890 match x < 0
1891 true -> 0 - x
1892 false -> x
1893
1894fn absValSpec(x: Int) -> Int
1895 match x < 0
1896 true -> 0 - x
1897 false -> x
1898
1899verify absVal law absValSpec
1900 given x: Int = [-2, -1, 0, 1, 2]
1901 absVal(x) => absValSpec(x)
1902"#,
1903 "spec_demo",
1904 );
1905 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
1906 let lean = generated_lean_file(&out);
1907
1908 assert!(lean.contains("-- verify law absVal.spec absValSpec (5 cases)"));
1909 assert!(
1910 lean.contains(
1911 "theorem absVal_eq_absValSpec : ∀ (x : Int), absVal x = absValSpec x := by"
1912 )
1913 );
1914 assert!(lean.contains("theorem absVal_eq_absValSpec_checked_domain :"));
1915 assert!(lean.contains("theorem absVal_eq_absValSpec_sample_1 :"));
1916 assert!(!lean.contains("theorem absVal_law_absValSpec :"));
1917 }
1918
1919 #[test]
1920 fn transpile_keeps_noncanonical_spec_laws_as_regular_law_names() {
1921 let ctx = ctx_from_source(
1922 r#"
1923module SpecLawShape
1924 intent =
1925 "shape probe"
1926
1927fn foo(x: Int) -> Int
1928 x + 1
1929
1930fn fooSpec(seed: Int, x: Int) -> Int
1931 x + seed
1932
1933verify foo law fooSpec
1934 given x: Int = [1, 2]
1935 foo(x) => fooSpec(1, x)
1936"#,
1937 "spec_law_shape",
1938 );
1939 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
1940 let lean = generated_lean_file(&out);
1941
1942 assert!(lean.contains("-- verify law foo.fooSpec (2 cases)"));
1943 assert!(lean.contains("theorem foo_law_fooSpec : ∀ (x : Int), foo x = fooSpec 1 x := by"));
1944 assert!(!lean.contains("theorem foo_eq_fooSpec :"));
1945 }
1946
1947 #[test]
1948 fn transpile_auto_proves_linear_int_canonical_spec_law_in_auto_mode() {
1949 let ctx = ctx_from_source(
1950 r#"
1951module SpecGap
1952 intent =
1953 "nontrivial canonical spec law"
1954
1955fn inc(x: Int) -> Int
1956 x + 1
1957
1958fn incSpec(x: Int) -> Int
1959 x + 2 - 1
1960
1961verify inc law incSpec
1962 given x: Int = [0, 1, 2]
1963 inc(x) => incSpec(x)
1964"#,
1965 "spec_gap",
1966 );
1967 let out = transpile(&ctx);
1968 let lean = generated_lean_file(&out);
1969
1970 assert!(lean.contains("-- verify law inc.spec incSpec (3 cases)"));
1971 assert!(lean.contains("theorem inc_eq_incSpec : ∀ (x : Int), inc x = incSpec x := by"));
1972 assert!(lean.contains("change (x + 1) = ((x + 2) - 1)"));
1973 assert!(lean.contains("omega"));
1974 assert!(!lean.contains(
1975 "-- universal theorem inc_eq_incSpec omitted: sampled law shape is not auto-proved yet"
1976 ));
1977 assert!(lean.contains("theorem inc_eq_incSpec_checked_domain :"));
1978 }
1979
1980 #[test]
1981 fn transpile_auto_proves_guarded_canonical_spec_law_in_auto_mode() {
1982 let ctx = ctx_from_source(
1983 r#"
1984module GuardedSpecGap
1985 intent =
1986 "guarded canonical spec law"
1987
1988fn clampNonNegative(x: Int) -> Int
1989 match x < 0
1990 true -> 0
1991 false -> x
1992
1993fn clampNonNegativeSpec(x: Int) -> Int
1994 match x < 0
1995 true -> 0
1996 false -> x
1997
1998verify clampNonNegative law clampNonNegativeSpec
1999 given x: Int = [-2, -1, 0, 1, 2]
2000 when x >= 0
2001 clampNonNegative(x) => clampNonNegativeSpec(x)
2002"#,
2003 "guarded_spec_gap",
2004 );
2005 let out = transpile(&ctx);
2006 let lean = generated_lean_file(&out);
2007
2008 assert!(lean.contains("-- when (x >= 0)"));
2009 assert!(lean.contains(
2010 "theorem clampNonNegative_eq_clampNonNegativeSpec : ∀ (x : Int), x = (-2) ∨ x = (-1) ∨ x = 0 ∨ x = 1 ∨ x = 2 -> (x >= 0) = true -> clampNonNegative x = clampNonNegativeSpec x := by"
2011 ));
2012 assert!(lean.contains("intro x h_x h_when"));
2013 assert!(lean.contains("simpa [clampNonNegative, clampNonNegativeSpec]"));
2014 assert!(!lean.contains(
2015 "-- universal theorem clampNonNegative_eq_clampNonNegativeSpec omitted: sampled law shape is not auto-proved yet"
2016 ));
2017 assert!(!lean.contains("cases h_x"));
2018 }
2019
2020 #[test]
2021 fn transpile_auto_proves_simp_normalized_canonical_spec_law_in_auto_mode() {
2022 let ctx = ctx_from_source(
2023 r#"
2024module SpecGapNonlinear
2025 intent =
2026 "nonlinear canonical spec law"
2027
2028fn square(x: Int) -> Int
2029 x * x
2030
2031fn squareSpec(x: Int) -> Int
2032 x * x + 0
2033
2034verify square law squareSpec
2035 given x: Int = [0, 1, 2]
2036 square(x) => squareSpec(x)
2037"#,
2038 "spec_gap_nonlinear",
2039 );
2040 let out = transpile(&ctx);
2041 let lean = generated_lean_file(&out);
2042
2043 assert!(lean.contains("-- verify law square.spec squareSpec (3 cases)"));
2044 assert!(
2045 lean.contains(
2046 "theorem square_eq_squareSpec : ∀ (x : Int), square x = squareSpec x := by"
2047 )
2048 );
2049 assert!(lean.contains("simp [square, squareSpec]"));
2050 assert!(!lean.contains(
2051 "-- universal theorem square_eq_squareSpec omitted: sampled law shape is not auto-proved yet"
2052 ));
2053 assert!(lean.contains("theorem square_eq_squareSpec_checked_domain :"));
2054 assert!(lean.contains("theorem square_eq_squareSpec_sample_1 :"));
2055 }
2056
2057 #[test]
2058 fn transpile_auto_proves_reflexive_law_with_rfl() {
2059 let mut ctx = empty_ctx();
2060 ctx.items.push(TopLevel::Verify(VerifyBlock {
2061 fn_name: "idLaw".to_string(),
2062 line: 1,
2063 cases: vec![(
2064 sb(Expr::Literal(Literal::Int(1))),
2065 sb(Expr::Literal(Literal::Int(1))),
2066 )],
2067 case_spans: vec![],
2068 case_givens: vec![],
2069 case_hostile_origins: vec![],
2070 case_hostile_profiles: vec![],
2071 kind: VerifyKind::Law(Box::new(VerifyLaw {
2072 name: "reflexive".to_string(),
2073 givens: vec![VerifyGiven {
2074 name: "x".to_string(),
2075 type_name: "Int".to_string(),
2076 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2077 }],
2078 when: None,
2079 lhs: sb(Expr::Ident("x".to_string())),
2080 rhs: sb(Expr::Ident("x".to_string())),
2081 sample_guards: vec![],
2082 })),
2083 trace: false,
2084 cases_givens: vec![],
2085 }));
2086 let out = transpile(&ctx);
2087 let lean = out
2088 .files
2089 .iter()
2090 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2091 .expect("expected generated Lean file");
2092 assert!(lean.contains("theorem idLaw_law_reflexive : ∀ (x : Int), x = x := by"));
2093 assert!(lean.contains(" intro x"));
2094 assert!(lean.contains(" rfl"));
2095 }
2096
2097 #[test]
2098 fn transpile_auto_proves_identity_law_for_int_add_wrapper() {
2099 let mut ctx = empty_ctx_with_verify_law();
2100 ctx.items.push(TopLevel::Verify(VerifyBlock {
2101 fn_name: "add".to_string(),
2102 line: 10,
2103 cases: vec![(
2104 sb(Expr::FnCall(
2105 sbb(Expr::Ident("add".to_string())),
2106 vec![
2107 sb(Expr::Literal(Literal::Int(1))),
2108 sb(Expr::Literal(Literal::Int(0))),
2109 ],
2110 )),
2111 sb(Expr::Literal(Literal::Int(1))),
2112 )],
2113 case_spans: vec![],
2114 case_givens: vec![],
2115 case_hostile_origins: vec![],
2116 case_hostile_profiles: vec![],
2117 kind: VerifyKind::Law(Box::new(VerifyLaw {
2118 name: "identityZero".to_string(),
2119 givens: vec![VerifyGiven {
2120 name: "a".to_string(),
2121 type_name: "Int".to_string(),
2122 domain: VerifyGivenDomain::Explicit(vec![
2123 sb(Expr::Literal(Literal::Int(0))),
2124 sb(Expr::Literal(Literal::Int(1))),
2125 ]),
2126 }],
2127 when: None,
2128 lhs: sb(Expr::FnCall(
2129 sbb(Expr::Ident("add".to_string())),
2130 vec![
2131 sb(Expr::Ident("a".to_string())),
2132 sb(Expr::Literal(Literal::Int(0))),
2133 ],
2134 )),
2135 rhs: sb(Expr::Ident("a".to_string())),
2136 sample_guards: vec![],
2137 })),
2138 trace: false,
2139 cases_givens: vec![],
2140 }));
2141 let out = transpile(&ctx);
2142 let lean = out
2143 .files
2144 .iter()
2145 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2146 .expect("expected generated Lean file");
2147 assert!(lean.contains("theorem add_law_identityZero : ∀ (a : Int), add a 0 = a := by"));
2148 assert!(lean.contains(" intro a"));
2149 assert!(lean.contains(" simp [add]"));
2150 }
2151
2152 #[test]
2153 fn transpile_auto_proves_associative_law_for_int_add_wrapper() {
2154 let mut ctx = empty_ctx_with_verify_law();
2155 ctx.items.push(TopLevel::Verify(VerifyBlock {
2156 fn_name: "add".to_string(),
2157 line: 20,
2158 cases: vec![(
2159 sb(Expr::FnCall(
2160 sbb(Expr::Ident("add".to_string())),
2161 vec![
2162 sb(Expr::FnCall(
2163 sbb(Expr::Ident("add".to_string())),
2164 vec![
2165 sb(Expr::Literal(Literal::Int(1))),
2166 sb(Expr::Literal(Literal::Int(2))),
2167 ],
2168 )),
2169 sb(Expr::Literal(Literal::Int(3))),
2170 ],
2171 )),
2172 sb(Expr::FnCall(
2173 sbb(Expr::Ident("add".to_string())),
2174 vec![
2175 sb(Expr::Literal(Literal::Int(1))),
2176 sb(Expr::FnCall(
2177 sbb(Expr::Ident("add".to_string())),
2178 vec![
2179 sb(Expr::Literal(Literal::Int(2))),
2180 sb(Expr::Literal(Literal::Int(3))),
2181 ],
2182 )),
2183 ],
2184 )),
2185 )],
2186 case_spans: vec![],
2187 case_givens: vec![],
2188 case_hostile_origins: vec![],
2189 case_hostile_profiles: vec![],
2190 kind: VerifyKind::Law(Box::new(VerifyLaw {
2191 name: "associative".to_string(),
2192 givens: vec![
2193 VerifyGiven {
2194 name: "a".to_string(),
2195 type_name: "Int".to_string(),
2196 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2197 1,
2198 )))]),
2199 },
2200 VerifyGiven {
2201 name: "b".to_string(),
2202 type_name: "Int".to_string(),
2203 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2204 2,
2205 )))]),
2206 },
2207 VerifyGiven {
2208 name: "c".to_string(),
2209 type_name: "Int".to_string(),
2210 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2211 3,
2212 )))]),
2213 },
2214 ],
2215 when: None,
2216 lhs: sb(Expr::FnCall(
2217 sbb(Expr::Ident("add".to_string())),
2218 vec![
2219 sb(Expr::FnCall(
2220 sbb(Expr::Ident("add".to_string())),
2221 vec![
2222 sb(Expr::Ident("a".to_string())),
2223 sb(Expr::Ident("b".to_string())),
2224 ],
2225 )),
2226 sb(Expr::Ident("c".to_string())),
2227 ],
2228 )),
2229 rhs: sb(Expr::FnCall(
2230 sbb(Expr::Ident("add".to_string())),
2231 vec![
2232 sb(Expr::Ident("a".to_string())),
2233 sb(Expr::FnCall(
2234 sbb(Expr::Ident("add".to_string())),
2235 vec![
2236 sb(Expr::Ident("b".to_string())),
2237 sb(Expr::Ident("c".to_string())),
2238 ],
2239 )),
2240 ],
2241 )),
2242 sample_guards: vec![],
2243 })),
2244 trace: false,
2245 cases_givens: vec![],
2246 }));
2247 let out = transpile(&ctx);
2248 let lean = out
2249 .files
2250 .iter()
2251 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2252 .expect("expected generated Lean file");
2253 assert!(lean.contains(
2254 "theorem add_law_associative : ∀ (a : Int) (b : Int) (c : Int), add (add a b) c = add a (add b c) := by"
2255 ));
2256 assert!(lean.contains(" intro a b c"));
2257 assert!(lean.contains(" simp [add, Int.add_assoc]"));
2258 }
2259
2260 #[test]
2261 fn transpile_auto_proves_sub_laws() {
2262 let mut ctx = empty_ctx();
2263 let sub = FnDef {
2264 name: "sub".to_string(),
2265 line: 1,
2266 params: vec![
2267 ("a".to_string(), "Int".to_string()),
2268 ("b".to_string(), "Int".to_string()),
2269 ],
2270 return_type: "Int".to_string(),
2271 effects: vec![],
2272 desc: None,
2273 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2274 BinOp::Sub,
2275 sbb(Expr::Ident("a".to_string())),
2276 sbb(Expr::Ident("b".to_string())),
2277 )))),
2278 resolution: None,
2279 };
2280 ctx.fn_defs.push(sub.clone());
2281 ctx.items.push(TopLevel::FnDef(sub));
2282
2283 ctx.items.push(TopLevel::Verify(VerifyBlock {
2284 fn_name: "sub".to_string(),
2285 line: 10,
2286 cases: vec![(
2287 sb(Expr::FnCall(
2288 sbb(Expr::Ident("sub".to_string())),
2289 vec![
2290 sb(Expr::Literal(Literal::Int(2))),
2291 sb(Expr::Literal(Literal::Int(0))),
2292 ],
2293 )),
2294 sb(Expr::Literal(Literal::Int(2))),
2295 )],
2296 case_spans: vec![],
2297 case_givens: vec![],
2298 case_hostile_origins: vec![],
2299 case_hostile_profiles: vec![],
2300 kind: VerifyKind::Law(Box::new(VerifyLaw {
2301 name: "rightIdentity".to_string(),
2302 givens: vec![VerifyGiven {
2303 name: "a".to_string(),
2304 type_name: "Int".to_string(),
2305 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2306 }],
2307 when: None,
2308 lhs: sb(Expr::FnCall(
2309 sbb(Expr::Ident("sub".to_string())),
2310 vec![
2311 sb(Expr::Ident("a".to_string())),
2312 sb(Expr::Literal(Literal::Int(0))),
2313 ],
2314 )),
2315 rhs: sb(Expr::Ident("a".to_string())),
2316 sample_guards: vec![],
2317 })),
2318 trace: false,
2319 cases_givens: vec![],
2320 }));
2321 ctx.items.push(TopLevel::Verify(VerifyBlock {
2322 fn_name: "sub".to_string(),
2323 line: 20,
2324 cases: vec![(
2325 sb(Expr::FnCall(
2326 sbb(Expr::Ident("sub".to_string())),
2327 vec![
2328 sb(Expr::Literal(Literal::Int(2))),
2329 sb(Expr::Literal(Literal::Int(1))),
2330 ],
2331 )),
2332 sb(Expr::BinOp(
2333 BinOp::Sub,
2334 sbb(Expr::Literal(Literal::Int(0))),
2335 sbb(Expr::FnCall(
2336 sbb(Expr::Ident("sub".to_string())),
2337 vec![
2338 sb(Expr::Literal(Literal::Int(1))),
2339 sb(Expr::Literal(Literal::Int(2))),
2340 ],
2341 )),
2342 )),
2343 )],
2344 case_spans: vec![],
2345 case_givens: vec![],
2346 case_hostile_origins: vec![],
2347 case_hostile_profiles: vec![],
2348 kind: VerifyKind::Law(Box::new(VerifyLaw {
2349 name: "antiCommutative".to_string(),
2350 givens: vec![
2351 VerifyGiven {
2352 name: "a".to_string(),
2353 type_name: "Int".to_string(),
2354 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2355 2,
2356 )))]),
2357 },
2358 VerifyGiven {
2359 name: "b".to_string(),
2360 type_name: "Int".to_string(),
2361 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2362 1,
2363 )))]),
2364 },
2365 ],
2366 when: None,
2367 lhs: sb(Expr::FnCall(
2368 sbb(Expr::Ident("sub".to_string())),
2369 vec![
2370 sb(Expr::Ident("a".to_string())),
2371 sb(Expr::Ident("b".to_string())),
2372 ],
2373 )),
2374 rhs: sb(Expr::BinOp(
2375 BinOp::Sub,
2376 sbb(Expr::Literal(Literal::Int(0))),
2377 sbb(Expr::FnCall(
2378 sbb(Expr::Ident("sub".to_string())),
2379 vec![
2380 sb(Expr::Ident("b".to_string())),
2381 sb(Expr::Ident("a".to_string())),
2382 ],
2383 )),
2384 )),
2385 sample_guards: vec![],
2386 })),
2387 trace: false,
2388 cases_givens: vec![],
2389 }));
2390
2391 let out = transpile(&ctx);
2392 let lean = out
2393 .files
2394 .iter()
2395 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2396 .expect("expected generated Lean file");
2397 assert!(lean.contains("theorem sub_law_rightIdentity : ∀ (a : Int), sub a 0 = a := by"));
2398 assert!(lean.contains(" simp [sub]"));
2399 assert!(lean.contains(
2400 "theorem sub_law_antiCommutative : ∀ (a : Int) (b : Int), sub a b = (-sub b a) := by"
2401 ));
2402 assert!(lean.contains(" simpa [sub] using (Int.neg_sub b a).symm"));
2403 }
2404
2405 #[test]
2406 fn transpile_auto_proves_unary_wrapper_equivalence_law() {
2407 let mut ctx = empty_ctx();
2408 let add = FnDef {
2409 name: "add".to_string(),
2410 line: 1,
2411 params: vec![
2412 ("a".to_string(), "Int".to_string()),
2413 ("b".to_string(), "Int".to_string()),
2414 ],
2415 return_type: "Int".to_string(),
2416 effects: vec![],
2417 desc: None,
2418 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2419 BinOp::Add,
2420 sbb(Expr::Ident("a".to_string())),
2421 sbb(Expr::Ident("b".to_string())),
2422 )))),
2423 resolution: None,
2424 };
2425 let add_one = FnDef {
2426 name: "addOne".to_string(),
2427 line: 2,
2428 params: vec![("n".to_string(), "Int".to_string())],
2429 return_type: "Int".to_string(),
2430 effects: vec![],
2431 desc: None,
2432 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2433 BinOp::Add,
2434 sbb(Expr::Ident("n".to_string())),
2435 sbb(Expr::Literal(Literal::Int(1))),
2436 )))),
2437 resolution: None,
2438 };
2439 ctx.fn_defs.push(add.clone());
2440 ctx.fn_defs.push(add_one.clone());
2441 ctx.items.push(TopLevel::FnDef(add));
2442 ctx.items.push(TopLevel::FnDef(add_one));
2443 ctx.items.push(TopLevel::Verify(VerifyBlock {
2444 fn_name: "addOne".to_string(),
2445 line: 3,
2446 cases: vec![(
2447 sb(Expr::FnCall(
2448 sbb(Expr::Ident("addOne".to_string())),
2449 vec![sb(Expr::Literal(Literal::Int(2)))],
2450 )),
2451 sb(Expr::FnCall(
2452 sbb(Expr::Ident("add".to_string())),
2453 vec![
2454 sb(Expr::Literal(Literal::Int(2))),
2455 sb(Expr::Literal(Literal::Int(1))),
2456 ],
2457 )),
2458 )],
2459 case_spans: vec![],
2460 case_givens: vec![],
2461 case_hostile_origins: vec![],
2462 case_hostile_profiles: vec![],
2463 kind: VerifyKind::Law(Box::new(VerifyLaw {
2464 name: "identityViaAdd".to_string(),
2465 givens: vec![VerifyGiven {
2466 name: "n".to_string(),
2467 type_name: "Int".to_string(),
2468 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2469 }],
2470 when: None,
2471 lhs: sb(Expr::FnCall(
2472 sbb(Expr::Ident("addOne".to_string())),
2473 vec![sb(Expr::Ident("n".to_string()))],
2474 )),
2475 rhs: sb(Expr::FnCall(
2476 sbb(Expr::Ident("add".to_string())),
2477 vec![
2478 sb(Expr::Ident("n".to_string())),
2479 sb(Expr::Literal(Literal::Int(1))),
2480 ],
2481 )),
2482 sample_guards: vec![],
2483 })),
2484 trace: false,
2485 cases_givens: vec![],
2486 }));
2487 let out = transpile(&ctx);
2488 let lean = out
2489 .files
2490 .iter()
2491 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2492 .expect("expected generated Lean file");
2493 assert!(
2494 lean.contains(
2495 "theorem addOne_law_identityViaAdd : ∀ (n : Int), addOne n = add n 1 := by"
2496 )
2497 );
2498 assert!(lean.contains(" simp [addOne, add]"));
2499 }
2500
2501 #[test]
2502 fn transpile_auto_proves_direct_map_set_laws() {
2503 let mut ctx = empty_ctx();
2504
2505 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2506 sb(Expr::FnCall(
2507 sbb(Expr::Attr(
2508 sbb(Expr::Ident("Map".to_string())),
2509 "set".to_string(),
2510 )),
2511 vec![m, k, v],
2512 ))
2513 };
2514 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2515 sb(Expr::FnCall(
2516 sbb(Expr::Attr(
2517 sbb(Expr::Ident("Map".to_string())),
2518 "has".to_string(),
2519 )),
2520 vec![m, k],
2521 ))
2522 };
2523 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2524 sb(Expr::FnCall(
2525 sbb(Expr::Attr(
2526 sbb(Expr::Ident("Map".to_string())),
2527 "get".to_string(),
2528 )),
2529 vec![m, k],
2530 ))
2531 };
2532 let some = |v: Spanned<Expr>| {
2533 sb(Expr::FnCall(
2534 sbb(Expr::Attr(
2535 sbb(Expr::Ident("Option".to_string())),
2536 "Some".to_string(),
2537 )),
2538 vec![v],
2539 ))
2540 };
2541 let map_empty = || {
2542 sb(Expr::FnCall(
2543 sbb(Expr::Attr(
2544 sbb(Expr::Ident("Map".to_string())),
2545 "empty".to_string(),
2546 )),
2547 vec![],
2548 ))
2549 };
2550
2551 ctx.items.push(TopLevel::Verify(VerifyBlock {
2552 fn_name: "map".to_string(),
2553 line: 1,
2554 cases: vec![(
2555 map_has(
2556 map_set(
2557 sb(Expr::Ident("m".to_string())),
2558 sb(Expr::Ident("k".to_string())),
2559 sb(Expr::Ident("v".to_string())),
2560 ),
2561 sb(Expr::Ident("k".to_string())),
2562 ),
2563 sb(Expr::Literal(Literal::Bool(true))),
2564 )],
2565 case_spans: vec![],
2566 case_givens: vec![],
2567 case_hostile_origins: vec![],
2568 case_hostile_profiles: vec![],
2569 kind: VerifyKind::Law(Box::new(VerifyLaw {
2570 name: "setHasKey".to_string(),
2571 givens: vec![
2572 VerifyGiven {
2573 name: "m".to_string(),
2574 type_name: "Map<String, Int>".to_string(),
2575 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2576 },
2577 VerifyGiven {
2578 name: "k".to_string(),
2579 type_name: "String".to_string(),
2580 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2581 "a".to_string(),
2582 )))]),
2583 },
2584 VerifyGiven {
2585 name: "v".to_string(),
2586 type_name: "Int".to_string(),
2587 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2588 1,
2589 )))]),
2590 },
2591 ],
2592 when: None,
2593 lhs: map_has(
2594 map_set(
2595 sb(Expr::Ident("m".to_string())),
2596 sb(Expr::Ident("k".to_string())),
2597 sb(Expr::Ident("v".to_string())),
2598 ),
2599 sb(Expr::Ident("k".to_string())),
2600 ),
2601 rhs: sb(Expr::Literal(Literal::Bool(true))),
2602 sample_guards: vec![],
2603 })),
2604 trace: false,
2605 cases_givens: vec![],
2606 }));
2607
2608 ctx.items.push(TopLevel::Verify(VerifyBlock {
2609 fn_name: "map".to_string(),
2610 line: 2,
2611 cases: vec![(
2612 map_get(
2613 map_set(
2614 sb(Expr::Ident("m".to_string())),
2615 sb(Expr::Ident("k".to_string())),
2616 sb(Expr::Ident("v".to_string())),
2617 ),
2618 sb(Expr::Ident("k".to_string())),
2619 ),
2620 some(sb(Expr::Ident("v".to_string()))),
2621 )],
2622 case_spans: vec![],
2623 case_givens: vec![],
2624 case_hostile_origins: vec![],
2625 case_hostile_profiles: vec![],
2626 kind: VerifyKind::Law(Box::new(VerifyLaw {
2627 name: "setGetKey".to_string(),
2628 givens: vec![
2629 VerifyGiven {
2630 name: "m".to_string(),
2631 type_name: "Map<String, Int>".to_string(),
2632 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2633 },
2634 VerifyGiven {
2635 name: "k".to_string(),
2636 type_name: "String".to_string(),
2637 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2638 "a".to_string(),
2639 )))]),
2640 },
2641 VerifyGiven {
2642 name: "v".to_string(),
2643 type_name: "Int".to_string(),
2644 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2645 1,
2646 )))]),
2647 },
2648 ],
2649 when: None,
2650 lhs: map_get(
2651 map_set(
2652 sb(Expr::Ident("m".to_string())),
2653 sb(Expr::Ident("k".to_string())),
2654 sb(Expr::Ident("v".to_string())),
2655 ),
2656 sb(Expr::Ident("k".to_string())),
2657 ),
2658 rhs: some(sb(Expr::Ident("v".to_string()))),
2659 sample_guards: vec![],
2660 })),
2661 trace: false,
2662 cases_givens: vec![],
2663 }));
2664
2665 let out = transpile(&ctx);
2666 let lean = out
2667 .files
2668 .iter()
2669 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2670 .expect("expected generated Lean file");
2671 assert!(lean.contains("simpa using AverMap.has_set_self m k v"));
2672 assert!(lean.contains("simpa using AverMap.get_set_self m k v"));
2673 }
2674
2675 #[test]
2676 fn transpile_auto_proves_direct_recursive_sum_law_by_structural_induction() {
2677 let ctx = ctx_from_source(
2678 r#"
2679module Mirror
2680 intent =
2681 "direct recursive sum induction probe"
2682
2683type Tree
2684 Leaf(Int)
2685 Node(Tree, Tree)
2686
2687fn mirror(t: Tree) -> Tree
2688 match t
2689 Tree.Leaf(v) -> Tree.Leaf(v)
2690 Tree.Node(left, right) -> Tree.Node(mirror(right), mirror(left))
2691
2692verify mirror law involutive
2693 given t: Tree = [Tree.Leaf(1), Tree.Node(Tree.Leaf(1), Tree.Leaf(2))]
2694 mirror(mirror(t)) => t
2695"#,
2696 "mirror",
2697 );
2698 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
2699 let lean = generated_lean_file(&out);
2700
2701 assert!(
2702 lean.contains(
2703 "theorem mirror_law_involutive : ∀ (t : Tree), mirror (mirror t) = t := by"
2704 )
2705 );
2706 assert!(lean.contains(" induction t with"));
2707 assert!(lean.contains(" | leaf f0 => simp [mirror]"));
2708 assert!(lean.contains(" | node f0 f1 ih0 ih1 => simp_all [mirror]"));
2709 assert!(!lean.contains(
2710 "-- universal theorem mirror_law_involutive omitted: sampled law shape is not auto-proved yet"
2711 ));
2712 }
2713
2714 #[test]
2715 fn transpile_auto_proves_map_update_laws() {
2716 let mut ctx = empty_ctx();
2717
2718 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2719 sb(Expr::FnCall(
2720 sbb(Expr::Attr(
2721 sbb(Expr::Ident("Map".to_string())),
2722 "get".to_string(),
2723 )),
2724 vec![m, k],
2725 ))
2726 };
2727 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2728 sb(Expr::FnCall(
2729 sbb(Expr::Attr(
2730 sbb(Expr::Ident("Map".to_string())),
2731 "set".to_string(),
2732 )),
2733 vec![m, k, v],
2734 ))
2735 };
2736 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2737 sb(Expr::FnCall(
2738 sbb(Expr::Attr(
2739 sbb(Expr::Ident("Map".to_string())),
2740 "has".to_string(),
2741 )),
2742 vec![m, k],
2743 ))
2744 };
2745 let option_some = |v: Spanned<Expr>| {
2746 sb(Expr::FnCall(
2747 sbb(Expr::Attr(
2748 sbb(Expr::Ident("Option".to_string())),
2749 "Some".to_string(),
2750 )),
2751 vec![v],
2752 ))
2753 };
2754 let option_with_default = |opt: Spanned<Expr>, def: Spanned<Expr>| {
2755 sb(Expr::FnCall(
2756 sbb(Expr::Attr(
2757 sbb(Expr::Ident("Option".to_string())),
2758 "withDefault".to_string(),
2759 )),
2760 vec![opt, def],
2761 ))
2762 };
2763 let map_empty = || {
2764 sb(Expr::FnCall(
2765 sbb(Expr::Attr(
2766 sbb(Expr::Ident("Map".to_string())),
2767 "empty".to_string(),
2768 )),
2769 vec![],
2770 ))
2771 };
2772
2773 let add_one = FnDef {
2774 name: "addOne".to_string(),
2775 line: 1,
2776 params: vec![("n".to_string(), "Int".to_string())],
2777 return_type: "Int".to_string(),
2778 effects: vec![],
2779 desc: None,
2780 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2781 BinOp::Add,
2782 sbb(Expr::Ident("n".to_string())),
2783 sbb(Expr::Literal(Literal::Int(1))),
2784 )))),
2785 resolution: None,
2786 };
2787 ctx.fn_defs.push(add_one.clone());
2788 ctx.items.push(TopLevel::FnDef(add_one));
2789
2790 let inc_count = FnDef {
2791 name: "incCount".to_string(),
2792 line: 2,
2793 params: vec![
2794 ("counts".to_string(), "Map<String, Int>".to_string()),
2795 ("word".to_string(), "String".to_string()),
2796 ],
2797 return_type: "Map<String, Int>".to_string(),
2798 effects: vec![],
2799 desc: None,
2800 body: Rc::new(FnBody::Block(vec![
2801 Stmt::Binding(
2802 "current".to_string(),
2803 None,
2804 map_get(
2805 sb(Expr::Ident("counts".to_string())),
2806 sb(Expr::Ident("word".to_string())),
2807 ),
2808 ),
2809 Stmt::Expr(sb(Expr::Match {
2810 subject: sbb(Expr::Ident("current".to_string())),
2811 arms: vec![
2812 MatchArm {
2813 pattern: Pattern::Constructor(
2814 "Option.Some".to_string(),
2815 vec!["n".to_string()],
2816 ),
2817 body: Box::new(map_set(
2818 sb(Expr::Ident("counts".to_string())),
2819 sb(Expr::Ident("word".to_string())),
2820 sb(Expr::BinOp(
2821 BinOp::Add,
2822 sbb(Expr::Ident("n".to_string())),
2823 sbb(Expr::Literal(Literal::Int(1))),
2824 )),
2825 )),
2826 },
2827 MatchArm {
2828 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
2829 body: Box::new(map_set(
2830 sb(Expr::Ident("counts".to_string())),
2831 sb(Expr::Ident("word".to_string())),
2832 sb(Expr::Literal(Literal::Int(1))),
2833 )),
2834 },
2835 ],
2836 })),
2837 ])),
2838 resolution: None,
2839 };
2840 ctx.fn_defs.push(inc_count.clone());
2841 ctx.items.push(TopLevel::FnDef(inc_count));
2842
2843 ctx.items.push(TopLevel::Verify(VerifyBlock {
2844 fn_name: "incCount".to_string(),
2845 line: 10,
2846 cases: vec![(
2847 map_has(
2848 sb(Expr::FnCall(
2849 sbb(Expr::Ident("incCount".to_string())),
2850 vec![
2851 sb(Expr::Ident("counts".to_string())),
2852 sb(Expr::Ident("word".to_string())),
2853 ],
2854 )),
2855 sb(Expr::Ident("word".to_string())),
2856 ),
2857 sb(Expr::Literal(Literal::Bool(true))),
2858 )],
2859 case_spans: vec![],
2860 case_givens: vec![],
2861 case_hostile_origins: vec![],
2862 case_hostile_profiles: vec![],
2863 kind: VerifyKind::Law(Box::new(VerifyLaw {
2864 name: "keyPresent".to_string(),
2865 givens: vec![
2866 VerifyGiven {
2867 name: "counts".to_string(),
2868 type_name: "Map<String, Int>".to_string(),
2869 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2870 },
2871 VerifyGiven {
2872 name: "word".to_string(),
2873 type_name: "String".to_string(),
2874 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2875 "a".to_string(),
2876 )))]),
2877 },
2878 ],
2879 when: None,
2880 lhs: map_has(
2881 sb(Expr::FnCall(
2882 sbb(Expr::Ident("incCount".to_string())),
2883 vec![
2884 sb(Expr::Ident("counts".to_string())),
2885 sb(Expr::Ident("word".to_string())),
2886 ],
2887 )),
2888 sb(Expr::Ident("word".to_string())),
2889 ),
2890 rhs: sb(Expr::Literal(Literal::Bool(true))),
2891 sample_guards: vec![],
2892 })),
2893 trace: false,
2894 cases_givens: vec![],
2895 }));
2896
2897 ctx.items.push(TopLevel::Verify(VerifyBlock {
2898 fn_name: "incCount".to_string(),
2899 line: 20,
2900 cases: vec![(
2901 map_get(
2902 sb(Expr::FnCall(
2903 sbb(Expr::Ident("incCount".to_string())),
2904 vec![
2905 sb(Expr::Ident("counts".to_string())),
2906 sb(Expr::Literal(Literal::Str("a".to_string()))),
2907 ],
2908 )),
2909 sb(Expr::Literal(Literal::Str("a".to_string()))),
2910 ),
2911 option_some(sb(Expr::FnCall(
2912 sbb(Expr::Ident("addOne".to_string())),
2913 vec![option_with_default(
2914 map_get(
2915 sb(Expr::Ident("counts".to_string())),
2916 sb(Expr::Literal(Literal::Str("a".to_string()))),
2917 ),
2918 sb(Expr::Literal(Literal::Int(0))),
2919 )],
2920 ))),
2921 )],
2922 case_spans: vec![],
2923 case_givens: vec![],
2924 case_hostile_origins: vec![],
2925 case_hostile_profiles: vec![],
2926 kind: VerifyKind::Law(Box::new(VerifyLaw {
2927 name: "existingKeyIncrements".to_string(),
2928 givens: vec![VerifyGiven {
2929 name: "counts".to_string(),
2930 type_name: "Map<String, Int>".to_string(),
2931 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2932 }],
2933 when: None,
2934 lhs: map_get(
2935 sb(Expr::FnCall(
2936 sbb(Expr::Ident("incCount".to_string())),
2937 vec![
2938 sb(Expr::Ident("counts".to_string())),
2939 sb(Expr::Literal(Literal::Str("a".to_string()))),
2940 ],
2941 )),
2942 sb(Expr::Literal(Literal::Str("a".to_string()))),
2943 ),
2944 rhs: option_some(sb(Expr::FnCall(
2945 sbb(Expr::Ident("addOne".to_string())),
2946 vec![option_with_default(
2947 map_get(
2948 sb(Expr::Ident("counts".to_string())),
2949 sb(Expr::Literal(Literal::Str("a".to_string()))),
2950 ),
2951 sb(Expr::Literal(Literal::Int(0))),
2952 )],
2953 ))),
2954 sample_guards: vec![],
2955 })),
2956 trace: false,
2957 cases_givens: vec![],
2958 }));
2959
2960 let out = transpile(&ctx);
2961 let lean = out
2962 .files
2963 .iter()
2964 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2965 .expect("expected generated Lean file");
2966 assert!(
2967 lean.contains("cases h : AverMap.get counts word <;> simp [AverMap.has_set_self]"),
2968 "expected keyPresent auto-proof with has_set_self"
2969 );
2970 assert!(
2971 lean.contains("cases h : AverMap.get counts \"a\" <;> simp [AverMap.get_set_self, addOne, incCount]"),
2972 "expected existingKeyIncrements auto-proof with get_set_self"
2973 );
2974 }
2975
2976 #[test]
2977 fn transpile_parenthesizes_negative_int_call_args_in_law_samples() {
2978 let mut ctx = empty_ctx();
2979 let add = FnDef {
2980 name: "add".to_string(),
2981 line: 1,
2982 params: vec![
2983 ("a".to_string(), "Int".to_string()),
2984 ("b".to_string(), "Int".to_string()),
2985 ],
2986 return_type: "Int".to_string(),
2987 effects: vec![],
2988 desc: None,
2989 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2990 BinOp::Add,
2991 sbb(Expr::Ident("a".to_string())),
2992 sbb(Expr::Ident("b".to_string())),
2993 )))),
2994 resolution: None,
2995 };
2996 ctx.fn_defs.push(add.clone());
2997 ctx.items.push(TopLevel::FnDef(add));
2998 ctx.items.push(TopLevel::Verify(VerifyBlock {
2999 fn_name: "add".to_string(),
3000 line: 1,
3001 cases: vec![(
3002 sb(Expr::FnCall(
3003 sbb(Expr::Ident("add".to_string())),
3004 vec![
3005 sb(Expr::Literal(Literal::Int(-2))),
3006 sb(Expr::Literal(Literal::Int(-1))),
3007 ],
3008 )),
3009 sb(Expr::FnCall(
3010 sbb(Expr::Ident("add".to_string())),
3011 vec![
3012 sb(Expr::Literal(Literal::Int(-1))),
3013 sb(Expr::Literal(Literal::Int(-2))),
3014 ],
3015 )),
3016 )],
3017 case_spans: vec![],
3018 case_givens: vec![],
3019 case_hostile_origins: vec![],
3020 case_hostile_profiles: vec![],
3021 kind: VerifyKind::Law(Box::new(VerifyLaw {
3022 name: "commutative".to_string(),
3023 givens: vec![
3024 VerifyGiven {
3025 name: "a".to_string(),
3026 type_name: "Int".to_string(),
3027 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3028 -2,
3029 )))]),
3030 },
3031 VerifyGiven {
3032 name: "b".to_string(),
3033 type_name: "Int".to_string(),
3034 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3035 -1,
3036 )))]),
3037 },
3038 ],
3039 when: None,
3040 lhs: sb(Expr::FnCall(
3041 sbb(Expr::Ident("add".to_string())),
3042 vec![
3043 sb(Expr::Ident("a".to_string())),
3044 sb(Expr::Ident("b".to_string())),
3045 ],
3046 )),
3047 rhs: sb(Expr::FnCall(
3048 sbb(Expr::Ident("add".to_string())),
3049 vec![
3050 sb(Expr::Ident("b".to_string())),
3051 sb(Expr::Ident("a".to_string())),
3052 ],
3053 )),
3054 sample_guards: vec![],
3055 })),
3056 trace: false,
3057 cases_givens: vec![],
3058 }));
3059
3060 let out = transpile(&ctx);
3061 let lean = out
3062 .files
3063 .iter()
3064 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3065 .expect("expected generated Lean file");
3066 assert!(lean.contains(
3067 "theorem add_law_commutative_sample_1 : add (-2) (-1) = add (-1) (-2) := by native_decide"
3068 ));
3069 }
3070
3071 #[test]
3072 fn verify_law_numbering_is_scoped_per_law_name() {
3073 let mut ctx = empty_ctx();
3074 let f = FnDef {
3075 name: "f".to_string(),
3076 line: 1,
3077 params: vec![("x".to_string(), "Int".to_string())],
3078 return_type: "Int".to_string(),
3079 effects: vec![],
3080 desc: None,
3081 body: Rc::new(FnBody::from_expr(sb(Expr::Ident("x".to_string())))),
3082 resolution: None,
3083 };
3084 ctx.fn_defs.push(f.clone());
3085 ctx.items.push(TopLevel::FnDef(f));
3086 ctx.items.push(TopLevel::Verify(VerifyBlock {
3087 fn_name: "f".to_string(),
3088 line: 1,
3089 cases: vec![(
3090 sb(Expr::Literal(Literal::Int(1))),
3091 sb(Expr::Literal(Literal::Int(1))),
3092 )],
3093 case_spans: vec![],
3094 case_givens: vec![],
3095 case_hostile_origins: vec![],
3096 case_hostile_profiles: vec![],
3097 kind: VerifyKind::Cases,
3098 trace: false,
3099 cases_givens: vec![],
3100 }));
3101 ctx.items.push(TopLevel::Verify(VerifyBlock {
3102 fn_name: "f".to_string(),
3103 line: 2,
3104 cases: vec![(
3105 sb(Expr::Literal(Literal::Int(2))),
3106 sb(Expr::Literal(Literal::Int(2))),
3107 )],
3108 case_spans: vec![],
3109 case_givens: vec![],
3110 case_hostile_origins: vec![],
3111 case_hostile_profiles: vec![],
3112 kind: VerifyKind::Law(Box::new(VerifyLaw {
3113 name: "identity".to_string(),
3114 givens: vec![VerifyGiven {
3115 name: "x".to_string(),
3116 type_name: "Int".to_string(),
3117 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
3118 }],
3119 when: None,
3120 lhs: sb(Expr::Ident("x".to_string())),
3121 rhs: sb(Expr::Ident("x".to_string())),
3122 sample_guards: vec![],
3123 })),
3124 trace: false,
3125 cases_givens: vec![],
3126 }));
3127 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
3128 let lean = out
3129 .files
3130 .iter()
3131 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3132 .expect("expected generated Lean file");
3133 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
3134 assert!(lean.contains("theorem f_law_identity : ∀ (x : Int), x = x := by"));
3135 assert!(lean.contains("theorem f_law_identity_sample_1 : 2 = 2 := by"));
3136 assert!(!lean.contains("theorem f_law_identity_sample_2 : 2 = 2 := by"));
3137 }
3138
3139 #[test]
3140 fn proof_mode_accepts_single_int_countdown_recursion() {
3141 let mut ctx = empty_ctx();
3142 let down = FnDef {
3143 name: "down".to_string(),
3144 line: 1,
3145 params: vec![("n".to_string(), "Int".to_string())],
3146 return_type: "Int".to_string(),
3147 effects: vec![],
3148 desc: None,
3149 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3150 subject: sbb(Expr::Ident("n".to_string())),
3151 arms: vec![
3152 MatchArm {
3153 pattern: Pattern::Literal(Literal::Int(0)),
3154 body: sbb(Expr::Literal(Literal::Int(0))),
3155 },
3156 MatchArm {
3157 pattern: Pattern::Wildcard,
3158 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3159 "down".to_string(),
3160 vec![sb(Expr::BinOp(
3161 BinOp::Sub,
3162 sbb(Expr::Ident("n".to_string())),
3163 sbb(Expr::Literal(Literal::Int(1))),
3164 ))],
3165 )))),
3166 },
3167 ],
3168 }))),
3169 resolution: None,
3170 };
3171 ctx.items.push(TopLevel::FnDef(down.clone()));
3172 ctx.fn_defs.push(down);
3173
3174 let issues = proof_mode_issues(&ctx);
3175 assert!(
3176 issues.is_empty(),
3177 "expected Int countdown recursion to be accepted, got: {:?}",
3178 issues
3179 );
3180
3181 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3182 let lean = out
3183 .files
3184 .iter()
3185 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3186 .expect("expected generated Lean file");
3187 assert!(lean.contains("def down__fuel"));
3188 assert!(lean.contains("def down (n : Int) : Int :="));
3189 assert!(lean.contains("down__fuel ((Int.natAbs n) + 1) n"));
3190 }
3191
3192 #[test]
3193 fn proof_mode_accepts_single_int_countdown_on_nonfirst_param() {
3194 let mut ctx = empty_ctx();
3195 let repeat_like = FnDef {
3196 name: "repeatLike".to_string(),
3197 line: 1,
3198 params: vec![
3199 ("char".to_string(), "String".to_string()),
3200 ("n".to_string(), "Int".to_string()),
3201 ],
3202 return_type: "List<String>".to_string(),
3203 effects: vec![],
3204 desc: None,
3205 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3206 subject: sbb(Expr::BinOp(
3207 BinOp::Lte,
3208 sbb(Expr::Ident("n".to_string())),
3209 sbb(Expr::Literal(Literal::Int(0))),
3210 )),
3211 arms: vec![
3212 MatchArm {
3213 pattern: Pattern::Literal(Literal::Bool(true)),
3214 body: sbb(Expr::List(vec![])),
3215 },
3216 MatchArm {
3217 pattern: Pattern::Literal(Literal::Bool(false)),
3218 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3219 "repeatLike".to_string(),
3220 vec![
3221 sb(Expr::Ident("char".to_string())),
3222 sb(Expr::BinOp(
3223 BinOp::Sub,
3224 sbb(Expr::Ident("n".to_string())),
3225 sbb(Expr::Literal(Literal::Int(1))),
3226 )),
3227 ],
3228 )))),
3229 },
3230 ],
3231 }))),
3232 resolution: None,
3233 };
3234 ctx.items.push(TopLevel::FnDef(repeat_like.clone()));
3235 ctx.fn_defs.push(repeat_like);
3236
3237 let issues = proof_mode_issues(&ctx);
3238 assert!(
3239 issues.is_empty(),
3240 "expected non-first Int countdown recursion to be accepted, got: {:?}",
3241 issues
3242 );
3243
3244 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3245 let lean = out
3246 .files
3247 .iter()
3248 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3249 .expect("expected generated Lean file");
3250 assert!(lean.contains("def repeatLike__fuel"));
3251 assert!(lean.contains("def repeatLike (char : String) (n : Int) : List String :="));
3252 assert!(lean.contains("repeatLike__fuel ((Int.natAbs n) + 1) char n"));
3253 }
3254
3255 #[test]
3256 fn proof_mode_accepts_negative_guarded_int_ascent() {
3257 let mut ctx = empty_ctx();
3258 let normalize = FnDef {
3259 name: "normalize".to_string(),
3260 line: 1,
3261 params: vec![("angle".to_string(), "Int".to_string())],
3262 return_type: "Int".to_string(),
3263 effects: vec![],
3264 desc: None,
3265 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3266 subject: sbb(Expr::BinOp(
3267 BinOp::Lt,
3268 sbb(Expr::Ident("angle".to_string())),
3269 sbb(Expr::Literal(Literal::Int(0))),
3270 )),
3271 arms: vec![
3272 MatchArm {
3273 pattern: Pattern::Literal(Literal::Bool(true)),
3274 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3275 "normalize".to_string(),
3276 vec![sb(Expr::BinOp(
3277 BinOp::Add,
3278 sbb(Expr::Ident("angle".to_string())),
3279 sbb(Expr::Literal(Literal::Int(360))),
3280 ))],
3281 )))),
3282 },
3283 MatchArm {
3284 pattern: Pattern::Literal(Literal::Bool(false)),
3285 body: sbb(Expr::Ident("angle".to_string())),
3286 },
3287 ],
3288 }))),
3289 resolution: None,
3290 };
3291 ctx.items.push(TopLevel::FnDef(normalize.clone()));
3292 ctx.fn_defs.push(normalize);
3293
3294 let issues = proof_mode_issues(&ctx);
3295 assert!(
3296 issues.is_empty(),
3297 "expected negative-guarded Int ascent recursion to be accepted, got: {:?}",
3298 issues
3299 );
3300
3301 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3302 let lean = out
3303 .files
3304 .iter()
3305 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3306 .expect("expected generated Lean file");
3307 assert!(lean.contains("def normalize__fuel"));
3308 assert!(lean.contains("normalize__fuel ((Int.natAbs angle) + 1) angle"));
3309 }
3310
3311 #[test]
3312 fn proof_mode_accepts_single_list_structural_recursion() {
3313 let mut ctx = empty_ctx();
3314 let len = FnDef {
3315 name: "len".to_string(),
3316 line: 1,
3317 params: vec![("xs".to_string(), "List<Int>".to_string())],
3318 return_type: "Int".to_string(),
3319 effects: vec![],
3320 desc: None,
3321 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3322 subject: sbb(Expr::Ident("xs".to_string())),
3323 arms: vec![
3324 MatchArm {
3325 pattern: Pattern::EmptyList,
3326 body: sbb(Expr::Literal(Literal::Int(0))),
3327 },
3328 MatchArm {
3329 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3330 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3331 "len".to_string(),
3332 vec![sb(Expr::Ident("t".to_string()))],
3333 )))),
3334 },
3335 ],
3336 }))),
3337 resolution: None,
3338 };
3339 ctx.items.push(TopLevel::FnDef(len.clone()));
3340 ctx.fn_defs.push(len);
3341
3342 let issues = proof_mode_issues(&ctx);
3343 assert!(
3344 issues.is_empty(),
3345 "expected List structural recursion to be accepted, got: {:?}",
3346 issues
3347 );
3348 }
3349
3350 #[test]
3351 fn proof_mode_accepts_single_list_structural_recursion_on_nonfirst_param() {
3352 let mut ctx = empty_ctx();
3353 let len_from = FnDef {
3354 name: "lenFrom".to_string(),
3355 line: 1,
3356 params: vec![
3357 ("count".to_string(), "Int".to_string()),
3358 ("xs".to_string(), "List<Int>".to_string()),
3359 ],
3360 return_type: "Int".to_string(),
3361 effects: vec![],
3362 desc: None,
3363 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3364 subject: sbb(Expr::Ident("xs".to_string())),
3365 arms: vec![
3366 MatchArm {
3367 pattern: Pattern::EmptyList,
3368 body: sbb(Expr::Ident("count".to_string())),
3369 },
3370 MatchArm {
3371 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3372 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3373 "lenFrom".to_string(),
3374 vec![
3375 sb(Expr::BinOp(
3376 BinOp::Add,
3377 sbb(Expr::Ident("count".to_string())),
3378 sbb(Expr::Literal(Literal::Int(1))),
3379 )),
3380 sb(Expr::Ident("t".to_string())),
3381 ],
3382 )))),
3383 },
3384 ],
3385 }))),
3386 resolution: None,
3387 };
3388 ctx.items.push(TopLevel::FnDef(len_from.clone()));
3389 ctx.fn_defs.push(len_from);
3390
3391 let issues = proof_mode_issues(&ctx);
3392 assert!(
3393 issues.is_empty(),
3394 "expected non-first List structural recursion to be accepted, got: {:?}",
3395 issues
3396 );
3397
3398 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3399 let lean = generated_lean_file(&out);
3400 assert!(lean.contains("termination_by xs.length"));
3401 assert!(!lean.contains("partial def lenFrom"));
3402 }
3403
3404 #[test]
3405 fn proof_mode_accepts_single_string_pos_advance_recursion() {
3406 let mut ctx = empty_ctx();
3407 let skip_ws = FnDef {
3408 name: "skipWs".to_string(),
3409 line: 1,
3410 params: vec![
3411 ("s".to_string(), "String".to_string()),
3412 ("pos".to_string(), "Int".to_string()),
3413 ],
3414 return_type: "Int".to_string(),
3415 effects: vec![],
3416 desc: None,
3417 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3418 subject: sbb(Expr::FnCall(
3419 sbb(Expr::Attr(
3420 sbb(Expr::Ident("String".to_string())),
3421 "charAt".to_string(),
3422 )),
3423 vec![
3424 sb(Expr::Ident("s".to_string())),
3425 sb(Expr::Ident("pos".to_string())),
3426 ],
3427 )),
3428 arms: vec![
3429 MatchArm {
3430 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
3431 body: sbb(Expr::Ident("pos".to_string())),
3432 },
3433 MatchArm {
3434 pattern: Pattern::Wildcard,
3435 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3436 "skipWs".to_string(),
3437 vec![
3438 sb(Expr::Ident("s".to_string())),
3439 sb(Expr::BinOp(
3440 BinOp::Add,
3441 sbb(Expr::Ident("pos".to_string())),
3442 sbb(Expr::Literal(Literal::Int(1))),
3443 )),
3444 ],
3445 )))),
3446 },
3447 ],
3448 }))),
3449 resolution: None,
3450 };
3451 ctx.items.push(TopLevel::FnDef(skip_ws.clone()));
3452 ctx.fn_defs.push(skip_ws);
3453
3454 let issues = proof_mode_issues(&ctx);
3455 assert!(
3456 issues.is_empty(),
3457 "expected String+pos recursion to be accepted, got: {:?}",
3458 issues
3459 );
3460
3461 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3462 let lean = generated_lean_file(&out);
3463 assert!(lean.contains("def skipWs__fuel"));
3464 assert!(!lean.contains("partial def skipWs"));
3465 }
3466
3467 #[test]
3468 fn proof_mode_accepts_mutual_int_countdown_recursion() {
3469 let mut ctx = empty_ctx();
3470 let even = FnDef {
3471 name: "even".to_string(),
3472 line: 1,
3473 params: vec![("n".to_string(), "Int".to_string())],
3474 return_type: "Bool".to_string(),
3475 effects: vec![],
3476 desc: None,
3477 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3478 subject: sbb(Expr::Ident("n".to_string())),
3479 arms: vec![
3480 MatchArm {
3481 pattern: Pattern::Literal(Literal::Int(0)),
3482 body: sbb(Expr::Literal(Literal::Bool(true))),
3483 },
3484 MatchArm {
3485 pattern: Pattern::Wildcard,
3486 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3487 "odd".to_string(),
3488 vec![sb(Expr::BinOp(
3489 BinOp::Sub,
3490 sbb(Expr::Ident("n".to_string())),
3491 sbb(Expr::Literal(Literal::Int(1))),
3492 ))],
3493 )))),
3494 },
3495 ],
3496 }))),
3497 resolution: None,
3498 };
3499 let odd = FnDef {
3500 name: "odd".to_string(),
3501 line: 2,
3502 params: vec![("n".to_string(), "Int".to_string())],
3503 return_type: "Bool".to_string(),
3504 effects: vec![],
3505 desc: None,
3506 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3507 subject: sbb(Expr::Ident("n".to_string())),
3508 arms: vec![
3509 MatchArm {
3510 pattern: Pattern::Literal(Literal::Int(0)),
3511 body: sbb(Expr::Literal(Literal::Bool(false))),
3512 },
3513 MatchArm {
3514 pattern: Pattern::Wildcard,
3515 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3516 "even".to_string(),
3517 vec![sb(Expr::BinOp(
3518 BinOp::Sub,
3519 sbb(Expr::Ident("n".to_string())),
3520 sbb(Expr::Literal(Literal::Int(1))),
3521 ))],
3522 )))),
3523 },
3524 ],
3525 }))),
3526 resolution: None,
3527 };
3528 ctx.items.push(TopLevel::FnDef(even.clone()));
3529 ctx.items.push(TopLevel::FnDef(odd.clone()));
3530 ctx.fn_defs.push(even);
3531 ctx.fn_defs.push(odd);
3532
3533 let issues = proof_mode_issues(&ctx);
3534 assert!(
3535 issues.is_empty(),
3536 "expected mutual Int countdown recursion to be accepted, got: {:?}",
3537 issues
3538 );
3539
3540 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3541 let lean = generated_lean_file(&out);
3542 assert!(lean.contains("def even__fuel"));
3543 assert!(lean.contains("def odd__fuel"));
3544 assert!(lean.contains("def even (n : Int) : Bool :="));
3545 assert!(lean.contains("even__fuel ((Int.natAbs n) + 1) n"));
3546 }
3547
3548 #[test]
3549 fn proof_mode_accepts_mutual_string_pos_recursion_with_ranked_same_edges() {
3550 let mut ctx = empty_ctx();
3551 let f = FnDef {
3552 name: "f".to_string(),
3553 line: 1,
3554 params: vec![
3555 ("s".to_string(), "String".to_string()),
3556 ("pos".to_string(), "Int".to_string()),
3557 ],
3558 return_type: "Int".to_string(),
3559 effects: vec![],
3560 desc: None,
3561 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3562 subject: sbb(Expr::BinOp(
3563 BinOp::Gte,
3564 sbb(Expr::Ident("pos".to_string())),
3565 sbb(Expr::Literal(Literal::Int(3))),
3566 )),
3567 arms: vec![
3568 MatchArm {
3569 pattern: Pattern::Literal(Literal::Bool(true)),
3570 body: sbb(Expr::Ident("pos".to_string())),
3571 },
3572 MatchArm {
3573 pattern: Pattern::Wildcard,
3574 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3575 "g".to_string(),
3576 vec![
3577 sb(Expr::Ident("s".to_string())),
3578 sb(Expr::Ident("pos".to_string())),
3579 ],
3580 )))),
3581 },
3582 ],
3583 }))),
3584 resolution: None,
3585 };
3586 let g = FnDef {
3587 name: "g".to_string(),
3588 line: 2,
3589 params: vec![
3590 ("s".to_string(), "String".to_string()),
3591 ("pos".to_string(), "Int".to_string()),
3592 ],
3593 return_type: "Int".to_string(),
3594 effects: vec![],
3595 desc: None,
3596 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3597 subject: sbb(Expr::BinOp(
3598 BinOp::Gte,
3599 sbb(Expr::Ident("pos".to_string())),
3600 sbb(Expr::Literal(Literal::Int(3))),
3601 )),
3602 arms: vec![
3603 MatchArm {
3604 pattern: Pattern::Literal(Literal::Bool(true)),
3605 body: sbb(Expr::Ident("pos".to_string())),
3606 },
3607 MatchArm {
3608 pattern: Pattern::Wildcard,
3609 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3610 "f".to_string(),
3611 vec![
3612 sb(Expr::Ident("s".to_string())),
3613 sb(Expr::BinOp(
3614 BinOp::Add,
3615 sbb(Expr::Ident("pos".to_string())),
3616 sbb(Expr::Literal(Literal::Int(1))),
3617 )),
3618 ],
3619 )))),
3620 },
3621 ],
3622 }))),
3623 resolution: None,
3624 };
3625 ctx.items.push(TopLevel::FnDef(f.clone()));
3626 ctx.items.push(TopLevel::FnDef(g.clone()));
3627 ctx.fn_defs.push(f);
3628 ctx.fn_defs.push(g);
3629
3630 let issues = proof_mode_issues(&ctx);
3631 assert!(
3632 issues.is_empty(),
3633 "expected mutual String+pos recursion to be accepted, got: {:?}",
3634 issues
3635 );
3636
3637 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3638 let lean = generated_lean_file(&out);
3639 assert!(lean.contains("def f__fuel"));
3640 assert!(lean.contains("def g__fuel"));
3641 assert!(!lean.contains("partial def f"));
3642 }
3643
3644 #[test]
3645 fn proof_mode_accepts_mutual_ranked_sizeof_recursion() {
3646 let mut ctx = empty_ctx();
3647 let f = FnDef {
3648 name: "f".to_string(),
3649 line: 1,
3650 params: vec![("xs".to_string(), "List<Int>".to_string())],
3651 return_type: "Int".to_string(),
3652 effects: vec![],
3653 desc: None,
3654 body: Rc::new(FnBody::from_expr(sb(Expr::TailCall(Box::new(
3655 TailCallData::new(
3656 "g".to_string(),
3657 vec![
3658 sb(Expr::Literal(Literal::Str("acc".to_string()))),
3659 sb(Expr::Ident("xs".to_string())),
3660 ],
3661 ),
3662 ))))),
3663 resolution: None,
3664 };
3665 let g = FnDef {
3666 name: "g".to_string(),
3667 line: 2,
3668 params: vec![
3669 ("acc".to_string(), "String".to_string()),
3670 ("xs".to_string(), "List<Int>".to_string()),
3671 ],
3672 return_type: "Int".to_string(),
3673 effects: vec![],
3674 desc: None,
3675 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3676 subject: sbb(Expr::Ident("xs".to_string())),
3677 arms: vec![
3678 MatchArm {
3679 pattern: Pattern::EmptyList,
3680 body: sbb(Expr::Literal(Literal::Int(0))),
3681 },
3682 MatchArm {
3683 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3684 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3685 "f".to_string(),
3686 vec![sb(Expr::Ident("t".to_string()))],
3687 )))),
3688 },
3689 ],
3690 }))),
3691 resolution: None,
3692 };
3693 ctx.items.push(TopLevel::FnDef(f.clone()));
3694 ctx.items.push(TopLevel::FnDef(g.clone()));
3695 ctx.fn_defs.push(f);
3696 ctx.fn_defs.push(g);
3697
3698 let issues = proof_mode_issues(&ctx);
3699 assert!(
3700 issues.is_empty(),
3701 "expected mutual ranked-sizeOf recursion to be accepted, got: {:?}",
3702 issues
3703 );
3704
3705 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3706 let lean = generated_lean_file(&out);
3707 assert!(lean.contains("mutual"));
3708 assert!(lean.contains("def f__fuel"));
3709 assert!(lean.contains("def g__fuel"));
3710 assert!(!lean.contains("partial def f"));
3711 assert!(!lean.contains("partial def g"));
3712 }
3713
3714 #[test]
3715 fn proof_mode_rejects_recursive_pure_functions() {
3716 let mut ctx = empty_ctx();
3717 let recursive_fn = FnDef {
3718 name: "loop".to_string(),
3719 line: 1,
3720 params: vec![("n".to_string(), "Int".to_string())],
3721 return_type: "Int".to_string(),
3722 effects: vec![],
3723 desc: None,
3724 body: Rc::new(FnBody::from_expr(sb(Expr::FnCall(
3725 sbb(Expr::Ident("loop".to_string())),
3726 vec![sb(Expr::Ident("n".to_string()))],
3727 )))),
3728 resolution: None,
3729 };
3730 ctx.items.push(TopLevel::FnDef(recursive_fn.clone()));
3731 ctx.fn_defs.push(recursive_fn);
3732
3733 let issues = proof_mode_issues(&ctx);
3734 assert!(
3735 issues.iter().any(|i| i.contains("outside proof subset")),
3736 "expected recursive function blocker, got: {:?}",
3737 issues
3738 );
3739 }
3740
3741 #[test]
3742 fn proof_mode_allows_recursive_types() {
3743 let mut ctx = empty_ctx();
3744 let recursive_type = TypeDef::Sum {
3745 name: "Node".to_string(),
3746 variants: vec![TypeVariant {
3747 name: "Cons".to_string(),
3748 fields: vec!["Node".to_string()],
3749 }],
3750 line: 1,
3751 };
3752 ctx.items.push(TopLevel::TypeDef(recursive_type.clone()));
3753 ctx.type_defs.push(recursive_type);
3754
3755 let issues = proof_mode_issues(&ctx);
3756 assert!(
3757 issues
3758 .iter()
3759 .all(|i| !i.contains("recursive types require unsafe DecidableEq shim")),
3760 "did not expect recursive type blocker, got: {:?}",
3761 issues
3762 );
3763 }
3764
3765 #[test]
3766 fn law_auto_example_exports_real_proof_artifacts() {
3767 let ctx = ctx_from_source(
3768 include_str!("../../../examples/formal/law_auto.av"),
3769 "law_auto",
3770 );
3771 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3772 let lean = generated_lean_file(&out);
3773
3774 assert!(lean.contains("theorem add_law_commutative :"));
3775 assert!(lean.contains("theorem id'_law_reflexive : ∀ (x : Int), x = x := by"));
3776 assert!(lean.contains("theorem incCount_law_keyPresent :"));
3777 assert!(lean.contains("AverMap.has_set_self"));
3778 assert!(lean.contains("theorem add_law_commutative_sample_1 :"));
3779 assert!(lean.contains(":= by native_decide"));
3780 }
3781
3782 #[test]
3783 fn json_example_stays_inside_proof_subset() {
3784 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3785 let issues = proof_mode_issues(&ctx);
3786 assert!(
3787 issues.is_empty(),
3788 "expected json example to stay inside proof subset, got: {:?}",
3789 issues
3790 );
3791 }
3792
3793 #[test]
3794 fn json_example_uses_total_defs_and_domain_guarded_laws_in_proof_mode() {
3795 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3796 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3797 let lean = generated_lean_file(&out);
3798
3799 assert!(!lean.contains("partial def"));
3800 assert!(lean.contains("def skipWs__fuel"));
3801 assert!(lean.contains("def parseValue__fuel"));
3802 assert!(lean.contains("def toString' (j : Json) : String :="));
3803 assert!(
3804 lean.contains(
3805 "def averMeasureJsonEntries_String (items : List (String × Json)) : Nat :="
3806 )
3807 );
3808 assert!(lean.contains(
3809 "| .jsonObject x0 => (averMeasureJsonEntries_String (AverMap.entries x0)) + 1"
3810 ));
3811 assert!(lean.contains("-- when jsonRoundtripSafe j"));
3812 assert!(!lean.contains("-- hint: verify law '"));
3813 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
3814 assert!(
3815 lean.contains(
3816 "theorem toString'_law_parseRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3817 )
3818 );
3819 assert!(lean.contains(
3820 "jsonRoundtripSafe j = true -> fromString (toString' j) = Except.ok j := by"
3821 ));
3822 assert!(
3823 lean.contains("theorem finishFloat_law_fromCanonicalFloat : ∀ (f : Float), f = 3.5 ∨")
3824 );
3825 assert!(lean.contains("theorem finishInt_law_fromCanonicalInt_checked_domain :"));
3826 assert!(lean.contains(
3827 "theorem toString'_law_parseValueRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3828 ));
3829 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
3830 assert!(lean.contains(
3831 "example : fromString \"null\" = Except.ok Json.jsonNull := by native_decide"
3832 ));
3833 }
3834
3835 #[test]
3836 fn transpile_injects_builtin_network_types_and_vector_get_support() {
3837 let ctx = ctx_from_source(
3838 r#"
3839fn firstOrMissing(xs: Vector<String>) -> Result<String, String>
3840 Option.toResult(Vector.get(xs, 0), "missing")
3841
3842fn defaultHeaders() -> Map<String, List<String>>
3843 {"content-type" => ["application/json"]}
3844
3845fn mkResponse(body: String) -> HttpResponse
3846 HttpResponse(status = 200, body = body, headers = defaultHeaders())
3847
3848fn requestPath(req: HttpRequest) -> String
3849 req.path
3850
3851fn connPort(conn: Tcp.Connection) -> Int
3852 conn.port
3853"#,
3854 "network_helpers",
3855 );
3856 let out = transpile(&ctx);
3857 let lean = generated_lean_file(&out);
3858
3859 assert!(lean.contains("structure HttpResponse where"));
3860 assert!(lean.contains("structure HttpRequest where"));
3861 assert!(lean.contains("structure Tcp_Connection where"));
3862 assert!(lean.contains("port : Int"));
3863 assert!(lean.contains("List (String × List String)"));
3865 }
3866
3867 #[test]
3868 fn law_auto_example_has_no_sorry_in_proof_mode() {
3869 let ctx = ctx_from_source(
3870 include_str!("../../../examples/formal/law_auto.av"),
3871 "law_auto",
3872 );
3873 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3874 let lean = generated_lean_file(&out);
3875 assert!(
3876 !lean.contains("sorry"),
3877 "expected law_auto proof export to avoid sorry, got:\n{}",
3878 lean
3879 );
3880 }
3881
3882 #[test]
3883 fn map_example_has_no_sorry_in_proof_mode() {
3884 let ctx = ctx_from_source(include_str!("../../../examples/data/map.av"), "map");
3885 let issues = proof_mode_issues(&ctx);
3886 assert!(
3887 issues.is_empty(),
3888 "expected map example to stay inside proof subset, got: {:?}",
3889 issues
3890 );
3891
3892 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3893 let lean = generated_lean_file(&out);
3894 assert!(lean.contains("theorem incCount_law_trackedCountStepsByOne :"));
3896 assert!(lean.contains("sorry"));
3897 assert!(lean.contains("theorem countWords_law_presenceMatchesContains_sample_1 :"));
3899 assert!(lean.contains("theorem countWords_law_trackedWordCount_sample_1 :"));
3900 assert!(lean.contains("AverMap.has_set_self"));
3901 assert!(lean.contains("AverMap.get_set_self"));
3902 }
3903
3904 #[test]
3905 fn spec_laws_example_has_no_sorry_in_proof_mode() {
3906 let ctx = ctx_from_source(
3907 include_str!("../../../examples/formal/spec_laws.av"),
3908 "spec_laws",
3909 );
3910 let issues = proof_mode_issues(&ctx);
3911 assert!(
3912 issues.is_empty(),
3913 "expected spec_laws example to stay inside proof subset, got: {:?}",
3914 issues
3915 );
3916
3917 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3918 let lean = generated_lean_file(&out);
3919 assert!(
3920 !lean.contains("sorry"),
3921 "expected spec_laws proof export to avoid sorry, got:\n{}",
3922 lean
3923 );
3924 assert!(lean.contains("theorem absVal_eq_absValSpec :"));
3925 assert!(lean.contains("theorem clampNonNegative_eq_clampNonNegativeSpec :"));
3926 }
3927
3928 #[test]
3929 fn rle_example_exports_sampled_roundtrip_laws_without_sorry() {
3930 let ctx = ctx_from_source(include_str!("../../../examples/data/rle.av"), "rle");
3931 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3932 let lean = generated_lean_file(&out);
3933
3934 assert!(
3935 lean.contains("sorry"),
3936 "expected rle proof export to contain sorry for unproved universal theorems"
3937 );
3938 assert!(lean.contains(
3939 "theorem encode_law_roundtrip_sample_1 : decode (encode []) = [] := by native_decide"
3940 ));
3941 assert!(lean.contains(
3942 "theorem encodeString_law_string_roundtrip_sample_1 : decodeString (encodeString \"\") = \"\" := by native_decide"
3943 ));
3944 }
3945
3946 #[test]
3947 fn fibonacci_example_uses_fuelized_int_countdown_in_proof_mode() {
3948 let ctx = ctx_from_source(
3949 include_str!("../../../examples/data/fibonacci.av"),
3950 "fibonacci",
3951 );
3952 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3953 let lean = generated_lean_file(&out);
3954
3955 assert!(lean.contains("def fibTR__fuel"));
3956 assert!(lean.contains("def fibTR (n : Int) (a : Int) (b : Int) : Int :="));
3957 assert!(lean.contains("fibTR__fuel ((Int.natAbs n) + 1) n a b"));
3958 assert!(!lean.contains("partial def fibTR"));
3959 }
3960
3961 #[test]
3962 fn fibonacci_example_stays_inside_proof_subset() {
3963 let ctx = ctx_from_source(
3964 include_str!("../../../examples/data/fibonacci.av"),
3965 "fibonacci",
3966 );
3967 let issues = proof_mode_issues(&ctx);
3968 assert!(
3969 issues.is_empty(),
3970 "expected fibonacci example to stay inside proof subset, got: {:?}",
3971 issues
3972 );
3973 }
3974
3975 #[test]
3976 fn fibonacci_example_matches_general_linear_recurrence_shapes() {
3977 let ctx = ctx_from_source(
3978 include_str!("../../../examples/data/fibonacci.av"),
3979 "fibonacci",
3980 );
3981 let fib = ctx.fn_defs.iter().find(|fd| fd.name == "fib").unwrap();
3982 let fib_tr = ctx.fn_defs.iter().find(|fd| fd.name == "fibTR").unwrap();
3983 let fib_spec = ctx.fn_defs.iter().find(|fd| fd.name == "fibSpec").unwrap();
3984
3985 assert!(recurrence::detect_tailrec_int_linear_pair_wrapper(fib).is_some());
3986 assert!(recurrence::detect_tailrec_int_linear_pair_worker(fib_tr).is_some());
3987 assert!(recurrence::detect_second_order_int_linear_recurrence(fib_spec).is_some());
3988 }
3989
3990 #[test]
3991 fn fibonacci_example_auto_proves_general_linear_recurrence_spec_law() {
3992 let ctx = ctx_from_source(
3993 include_str!("../../../examples/data/fibonacci.av"),
3994 "fibonacci",
3995 );
3996 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3997 let lean = generated_lean_file(&out);
3998
3999 assert!(lean.contains("private def fibSpec__nat : Nat -> Int"));
4000 assert!(!lean.contains("partial def fibSpec"));
4001 assert!(lean.contains("private theorem fib_eq_fibSpec__worker_nat_shift"));
4002 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_nat"));
4003 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_seed"));
4004 assert!(lean.contains("theorem fib_eq_fibSpec : ∀ (n : Int), fib n = fibSpec n := by"));
4005 assert!(!lean.contains(
4006 "-- universal theorem fib_eq_fibSpec omitted: sampled law shape is not auto-proved yet"
4007 ));
4008 }
4009
4010 #[test]
4011 fn pell_like_example_auto_proves_same_general_shape() {
4012 let ctx = ctx_from_source(
4013 r#"
4014module Pell
4015 intent =
4016 "linear recurrence probe"
4017
4018fn pellTR(n: Int, a: Int, b: Int) -> Int
4019 match n
4020 0 -> a
4021 _ -> pellTR(n - 1, b, a + 2 * b)
4022
4023fn pell(n: Int) -> Int
4024 match n < 0
4025 true -> 0
4026 false -> pellTR(n, 0, 1)
4027
4028fn pellSpec(n: Int) -> Int
4029 match n < 0
4030 true -> 0
4031 false -> match n
4032 0 -> 0
4033 1 -> 1
4034 _ -> pellSpec(n - 2) + 2 * pellSpec(n - 1)
4035
4036verify pell law pellSpec
4037 given n: Int = [0, 1, 2, 3]
4038 pell(n) => pellSpec(n)
4039"#,
4040 "pell",
4041 );
4042 let issues = proof_mode_issues(&ctx);
4043 assert!(
4044 issues.is_empty(),
4045 "expected pell example to stay inside proof subset, got: {:?}",
4046 issues
4047 );
4048
4049 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4050 let lean = generated_lean_file(&out);
4051 assert!(lean.contains("private def pellSpec__nat : Nat -> Int"));
4052 assert!(lean.contains("private theorem pell_eq_pellSpec__worker_nat_shift"));
4053 assert!(lean.contains("theorem pell_eq_pellSpec : ∀ (n : Int), pell n = pellSpec n := by"));
4054 assert!(!lean.contains(
4055 "-- universal theorem pell_eq_pellSpec omitted: sampled law shape is not auto-proved yet"
4056 ));
4057 }
4058
4059 #[test]
4060 fn nonlinear_pair_state_recurrence_is_not_auto_proved_as_linear_shape() {
4061 let ctx = ctx_from_source(
4062 r#"
4063module WeirdRec
4064 intent =
4065 "reject nonlinear pair-state recurrence from linear recurrence prover"
4066
4067fn weirdTR(n: Int, a: Int, b: Int) -> Int
4068 match n
4069 0 -> a
4070 _ -> weirdTR(n - 1, b, a * b)
4071
4072fn weird(n: Int) -> Int
4073 match n < 0
4074 true -> 0
4075 false -> weirdTR(n, 0, 1)
4076
4077fn weirdSpec(n: Int) -> Int
4078 match n < 0
4079 true -> 0
4080 false -> match n
4081 0 -> 0
4082 1 -> 1
4083 _ -> weirdSpec(n - 1) * weirdSpec(n - 2)
4084
4085verify weird law weirdSpec
4086 given n: Int = [0, 1, 2, 3]
4087 weird(n) => weirdSpec(n)
4088"#,
4089 "weirdrec",
4090 );
4091 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4092 let lean = generated_lean_file(&out);
4093
4094 assert!(lean.contains("sorry"));
4096 assert!(!lean.contains("private theorem weird_eq_weirdSpec__worker_nat_shift"));
4097 assert!(lean.contains("theorem weird_eq_weirdSpec_sample_1 :"));
4098 }
4099
4100 #[test]
4101 fn date_example_stays_inside_proof_subset() {
4102 let ctx = ctx_from_source(include_str!("../../../examples/data/date.av"), "date");
4103 let issues = proof_mode_issues(&ctx);
4104 assert!(
4105 issues.is_empty(),
4106 "expected date example to stay inside proof subset, got: {:?}",
4107 issues
4108 );
4109
4110 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4111 let lean = generated_lean_file(&out);
4112 assert!(!lean.contains("partial def"));
4113 assert!(lean.contains("def parseIntSlice (s : String) (from' : Int) (to : Int) : Int :="));
4114 }
4115
4116 #[test]
4117 fn temperature_example_stays_inside_proof_subset() {
4118 let ctx = ctx_from_source(
4119 include_str!("../../../examples/core/temperature.av"),
4120 "temperature",
4121 );
4122 let issues = proof_mode_issues(&ctx);
4123 assert!(
4124 issues.is_empty(),
4125 "expected temperature example to stay inside proof subset, got: {:?}",
4126 issues
4127 );
4128
4129 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4130 let lean = generated_lean_file(&out);
4131 assert!(!lean.contains("partial def"));
4132 assert!(
4133 lean.contains("example : celsiusToFahr 0.0 = 32.0 := by native_decide"),
4134 "expected verify examples to survive proof export, got:\n{}",
4135 lean
4136 );
4137 }
4138
4139 #[test]
4140 fn quicksort_example_stays_inside_proof_subset() {
4141 let ctx = ctx_from_source(
4142 include_str!("../../../examples/data/quicksort.av"),
4143 "quicksort",
4144 );
4145 let issues = proof_mode_issues(&ctx);
4146 assert!(
4147 issues.is_empty(),
4148 "expected quicksort example to stay inside proof subset, got: {:?}",
4149 issues
4150 );
4151
4152 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4153 let lean = generated_lean_file(&out);
4154 assert!(lean.contains("def isOrderedFrom"));
4155 assert!(!lean.contains("partial def isOrderedFrom"));
4156 assert!(lean.contains("termination_by xs.length"));
4157 }
4158
4159 #[test]
4160 fn grok_s_language_example_uses_total_ranked_sizeof_mutual_recursion() {
4161 let ctx = ctx_from_source(
4162 include_str!("../../../examples/core/grok_s_language.av"),
4163 "grok_s_language",
4164 );
4165 let issues = proof_mode_issues(&ctx);
4166 assert!(
4167 issues.is_empty(),
4168 "expected grok_s_language example to stay inside proof subset, got: {:?}",
4169 issues
4170 );
4171
4172 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4173 let lean = generated_lean_file(&out);
4174 assert!(lean.contains("mutual"));
4175 assert!(lean.contains("def eval__fuel"));
4176 assert!(lean.contains("def parseListItems__fuel"));
4177 assert!(!lean.contains("partial def eval"));
4178 assert!(!lean.contains("termination_by (sizeOf e,"));
4179 assert!(lean.contains("-- when validSymbolNames e"));
4180 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
4181 assert!(lean.contains(
4182 "theorem toString'_law_parseRoundtrip : ∀ (e : Sexpr), e = Sexpr.atomNum 42 ∨"
4183 ));
4184 assert!(
4185 lean.contains("validSymbolNames e = true -> parse (toString' e) = Except.ok e := by")
4186 );
4187 assert!(lean.contains("theorem toString'_law_parseSexprRoundtrip :"));
4188 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
4189 }
4190
4191 #[test]
4192 fn lambda_example_keeps_only_eval_outside_proof_subset() {
4193 let ctx = ctx_from_source(include_str!("../../../examples/core/lambda.av"), "lambda");
4194 let issues = proof_mode_issues(&ctx);
4195 assert_eq!(
4196 issues,
4197 vec!["recursive function 'eval' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)".to_string()]
4198 );
4199
4200 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4201 let lean = generated_lean_file(&out);
4202 assert!(lean.contains("def termToString__fuel"));
4203 assert!(lean.contains("def subst__fuel"));
4204 assert!(lean.contains("def countS__fuel"));
4205 assert!(!lean.contains("partial def termToString"));
4206 assert!(!lean.contains("partial def subst"));
4207 assert!(!lean.contains("partial def countS"));
4208 assert!(lean.contains("partial def eval"));
4209 }
4210
4211 #[test]
4212 fn mission_control_example_stays_inside_proof_subset() {
4213 let ctx = ctx_from_source(
4214 include_str!("../../../examples/apps/mission_control.av"),
4215 "mission_control",
4216 );
4217 let issues = proof_mode_issues(&ctx);
4218 assert!(
4219 issues.is_empty(),
4220 "expected mission_control example to stay inside proof subset, got: {:?}",
4221 issues
4222 );
4223
4224 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4225 let lean = generated_lean_file(&out);
4226 assert!(!lean.contains("partial def normalizeAngle"));
4227 assert!(lean.contains("def normalizeAngle__fuel"));
4228 }
4229
4230 #[test]
4231 fn notepad_store_example_stays_inside_proof_subset() {
4232 let ctx = ctx_from_source(
4233 include_str!("../../../examples/apps/notepad/store.av"),
4234 "notepad_store",
4235 );
4236 let issues = proof_mode_issues(&ctx);
4237 assert!(
4238 issues.is_empty(),
4239 "expected notepad/store example to stay inside proof subset, got: {:?}",
4240 issues
4241 );
4242
4243 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4244 let lean = generated_lean_file(&out);
4245 assert!(lean.contains("def deserializeLine (line : String) : Except String Note :="));
4246 assert!(lean.contains("Except String (List Note)"));
4247 assert!(!lean.contains("partial def deserializeLine"));
4248 assert!(lean.contains("-- when noteRoundtripSafe note"));
4249 assert!(lean.contains("-- when notesRoundtripSafe notes"));
4250 assert!(lean.contains(
4251 "theorem serializeLine_law_lineRoundtrip : ∀ (note : Note), note = { id' := 1, title := \"Hello\", body := \"World\" : Note } ∨"
4252 ));
4253 assert!(lean.contains(
4254 "theorem serializeLines_law_notesRoundtrip : ∀ (notes : List Note), notes = [] ∨"
4255 ));
4256 assert!(lean.contains("notesRoundtripSafe notes = true ->"));
4257 assert!(lean.contains("parseNotes (s!\"{String.intercalate \"\\n\" (serializeLines notes)}\\n\") = Except.ok notes"));
4258 assert!(lean.contains("theorem serializeLine_law_lineRoundtrip_sample_1 :"));
4259 assert!(lean.contains("theorem serializeLines_law_notesRoundtrip_sample_1 :"));
4260 }
4261}