1mod builtins;
7mod expr;
8mod law_auto;
9mod pattern;
10pub(crate) mod recurrence;
11mod shared;
12mod toplevel;
13mod types;
14
15use std::collections::{HashMap, HashSet};
16
17use crate::ast::{Expr, FnDef, Spanned, TopLevel, TypeDef, VerifyKind};
18use crate::codegen::{CodegenContext, ProjectOutput};
19
20#[derive(Clone, Copy, Debug, Eq, PartialEq)]
22pub enum VerifyEmitMode {
23 NativeDecide,
25 Sorry,
27 TheoremSkeleton,
31}
32
33pub use crate::codegen::recursion::{ProofModeIssue, RecursionPlan};
38
39const LEAN_PRELUDE_HEADER: &str = r#"-- Generated by the Aver → Lean 4 transpiler
40-- Pure core logic plus Oracle-lifted classified effects
41
42set_option linter.unusedVariables false
43
44-- Prelude: helper definitions for Aver builtins"#;
45
46const LEAN_PRELUDE_FLOAT_COE: &str = r#"instance : Coe Int Float := ⟨fun n => Float.ofInt n⟩
47
48def Float.fromInt (n : Int) : Float := Float.ofInt n
49
50-- Aver's Float-to-Int operations match the runtime semantics
51-- (`f64::floor() as i64` in VM, Rust codegen, WASM — all three use the
52-- same IEEE 754 floor/round/ceil followed by Rust's saturating
53-- `f64 as i64` cast):
54-- * finite values within [i64::MIN, i64::MAX]: truncate toward zero
55-- * finite > i64::MAX: saturate to i64::MAX
56-- * finite < i64::MIN: saturate to i64::MIN
57-- * +Inf: saturate to i64::MAX
58-- * -Inf: saturate to i64::MIN
59-- * NaN: 0 (Rust 1.45+ defined behavior)
60--
61-- Lean's `Float.floor : Float → Float` doesn't directly satisfy Aver's
62-- `Float.floor : Float → Int`, so we synthesize via the saturating
63-- `Float.toUInt64` (returns 0 for NaN/negative) with sign handling and
64-- explicit bounds. Per-case correctness is asserted by `native_decide`
65-- examples below; total semantic agreement with `f64 as i64` would
66-- need a formal IEEE spec in Lean, which is out of scope.
67--
68-- Asymmetry with the Dafny backend: Lean has IEEE 754 `Float` natively
69-- (`double` at runtime), so we use it. Dafny only offers mathematical
70-- `real` (Cauchy-style, no NaN/Inf/overflow), which is a fundamental
71-- type mismatch with Aver's IEEE Float — Dafny operations stay opaque
72-- (`function FloatPi(): real` etc.) rather than synthesizing IEEE on
73-- top of `bv64`, which would mean implementing the entire IEEE
74-- arithmetic in Dafny by hand.
75namespace AverFloat
76def toInt (x : Float) : Int :=
77 if x.isNaN then 0
78 -- 2^63 is exactly representable in f64; values ≥ that saturate up.
79 else if x ≥ 9223372036854775808.0 then 9223372036854775807
80 -- -2^63 is exactly representable; values strictly below saturate down.
81 else if x < -9223372036854775808.0 then -9223372036854775808
82 else if x ≥ 0.0 then Int.ofNat x.toUInt64.toNat
83 else -(Int.ofNat (-x).toUInt64.toNat)
84
85def floor (x : Float) : Int := toInt x.floor
86def ceil (x : Float) : Int := toInt x.ceil
87def round (x : Float) : Int := toInt x.round
88
89def pow (x y : Float) : Float := x ^ y
90
91-- Edge-case smoke checks: each `example` is discharged by reduction,
92-- so any drift from these documented values fails Lake build.
93example : AverFloat.toInt 0.0 = 0 := by native_decide
94example : AverFloat.toInt 3.7 = 3 := by native_decide
95example : AverFloat.toInt (-3.7) = -3 := by native_decide
96example : AverFloat.toInt (1.0 / 0.0) = 9223372036854775807 := by native_decide
97example : AverFloat.toInt (-1.0 / 0.0) = -9223372036854775808 := by native_decide
98example : AverFloat.toInt (0.0 / 0.0) = 0 := by native_decide
99example : AverFloat.floor 3.7 = 3 := by native_decide
100example : AverFloat.floor (-3.7) = -4 := by native_decide
101example : AverFloat.ceil 3.2 = 4 := by native_decide
102example : AverFloat.ceil (-3.2) = -3 := by native_decide
103-- Rounding mode (half-away-from-zero, matching Rust's `f64::round`):
104example : AverFloat.round 0.5 = 1 := by native_decide
105example : AverFloat.round (-0.5) = -1 := by native_decide
106example : AverFloat.round 2.5 = 3 := by native_decide
107example : AverFloat.round (-2.5) = -3 := by native_decide
108end AverFloat"#;
109
110const LEAN_PRELUDE_FLOAT_DEC_EQ: &str = r#"private unsafe def Float.unsafeDecEq (a b : Float) : Decidable (a = b) :=
111 if a == b then isTrue (unsafeCast ()) else isFalse (unsafeCast ())
112@[implemented_by Float.unsafeDecEq]
113private opaque Float.compDecEq (a b : Float) : Decidable (a = b)
114instance : DecidableEq Float := Float.compDecEq"#;
115
116const LEAN_PRELUDE_EXCEPT_DEC_EQ: &str = r#"instance [DecidableEq ε] [DecidableEq α] : DecidableEq (Except ε α)
117 | .ok a, .ok b =>
118 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
119 | .error a, .error b =>
120 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
121 | .ok _, .error _ => isFalse (by intro h; cases h)
122 | .error _, .ok _ => isFalse (by intro h; cases h)"#;
123
124const LEAN_PRELUDE_EXCEPT_NS: &str = r#"namespace Except
125def withDefault (r : Except ε α) (d : α) : α :=
126 match r with
127 | .ok v => v
128 | .error _ => d
129end Except"#;
130
131const LEAN_PRELUDE_OPTION_TO_EXCEPT: &str = r#"def Option.toExcept (o : Option α) (e : ε) : Except ε α :=
132 match o with
133 | some v => .ok v
134 | none => .error e"#;
135
136const LEAN_PRELUDE_STRING_HADD: &str = r#"instance : HAdd String String String := ⟨String.append⟩"#;
137
138const LEAN_PRELUDE_BRANCH_PATH: &str = r#"structure BranchPath where
142 dewey : String
143 deriving Repr, BEq, DecidableEq
144
145def BranchPath.Root : BranchPath := { dewey := "" }
146
147def BranchPath.child (p : BranchPath) (idx : Int) : BranchPath :=
148 if p.dewey.isEmpty then { dewey := toString idx }
149 else { dewey := p.dewey ++ "." ++ toString idx }
150
151def BranchPath.parse (s : String) : BranchPath := { dewey := s }"#;
152
153const LEAN_PRELUDE_PROOF_FUEL: &str = r#"def averStringPosFuel (s : String) (pos : Int) (rankBudget : Nat) : Nat :=
154 (((s.data.length) - pos.toNat) + 1) * rankBudget"#;
155
156const LEAN_PRELUDE_AVER_MEASURE: &str = r#"namespace AverMeasure
157def list (elemMeasure : α → Nat) : List α → Nat
158 | [] => 1
159 | x :: xs => elemMeasure x + list elemMeasure xs + 1
160def option (elemMeasure : α → Nat) : Option α → Nat
161 | none => 1
162 | some x => elemMeasure x + 1
163def except (errMeasure : ε → Nat) (okMeasure : α → Nat) : Except ε α → Nat
164 | .error e => errMeasure e + 1
165 | .ok v => okMeasure v + 1
166end AverMeasure"#;
167
168const AVER_MAP_PRELUDE_BASE: &str = r#"namespace AverMap
169def empty : List (α × β) := []
170def get [DecidableEq α] (m : List (α × β)) (k : α) : Option β :=
171 match m with
172 | [] => none
173 | (k', v) :: rest => if k = k' then some v else AverMap.get rest k
174def set [DecidableEq α] (m : List (α × β)) (k : α) (v : β) : List (α × β) :=
175 let rec go : List (α × β) → List (α × β)
176 | [] => [(k, v)]
177 | (k', v') :: rest => if k = k' then (k, v) :: rest else (k', v') :: go rest
178 go m
179def has [DecidableEq α] (m : List (α × β)) (k : α) : Bool :=
180 m.any (fun p => decide (k = p.1))
181def remove [DecidableEq α] (m : List (α × β)) (k : α) : List (α × β) :=
182 m.filter (fun p => !(decide (k = p.1)))
183def keys (m : List (α × β)) : List α := m.map Prod.fst
184def values (m : List (α × β)) : List β := m.map Prod.snd
185def entries (m : List (α × β)) : List (α × β) := m
186def len (m : List (α × β)) : Nat := m.length
187def fromList (entries : List (α × β)) : List (α × β) := entries"#;
188
189const AVER_MAP_PRELUDE_HAS_SET_SELF: &str = r#"private theorem any_set_go_self [DecidableEq α] (k : α) (v : β) :
190 ∀ (m : List (α × β)), List.any (AverMap.set.go k v m) (fun p => decide (k = p.1)) = true := by
191 intro m
192 induction m with
193 | nil =>
194 simp [AverMap.set.go, List.any]
195 | cons p tl ih =>
196 cases p with
197 | mk k' v' =>
198 by_cases h : k = k'
199 · simp [AverMap.set.go, List.any, h]
200 · simp [AverMap.set.go, List.any, h, ih]
201
202theorem has_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
203 AverMap.has (AverMap.set m k v) k = true := by
204 simpa [AverMap.has, AverMap.set] using any_set_go_self k v m"#;
205
206const AVER_MAP_PRELUDE_GET_SET_SELF: &str = r#"private theorem get_set_go_self [DecidableEq α] (k : α) (v : β) :
207 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) k = some v := by
208 intro m
209 induction m with
210 | nil =>
211 simp [AverMap.set.go, AverMap.get]
212 | cons p tl ih =>
213 cases p with
214 | mk k' v' =>
215 by_cases h : k = k'
216 · simp [AverMap.set.go, AverMap.get, h]
217 · simp [AverMap.set.go, AverMap.get, h, ih]
218
219theorem get_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
220 AverMap.get (AverMap.set m k v) k = some v := by
221 simpa [AverMap.set] using get_set_go_self k v m"#;
222
223const AVER_MAP_PRELUDE_GET_SET_OTHER: &str = r#"private theorem get_set_go_other [DecidableEq α] (k key : α) (v : β) (h : key ≠ k) :
224 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) key = AverMap.get m key := by
225 intro m
226 induction m with
227 | nil =>
228 simp [AverMap.set.go, AverMap.get, h]
229 | cons p tl ih =>
230 cases p with
231 | mk k' v' =>
232 by_cases hk : k = k'
233 · have hkey : key ≠ k' := by simpa [hk] using h
234 simp [AverMap.set.go, AverMap.get, hk, hkey]
235 · by_cases hkey : key = k'
236 · simp [AverMap.set.go, AverMap.get, hk, hkey]
237 · simp [AverMap.set.go, AverMap.get, hk, hkey, ih]
238
239theorem get_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
240 AverMap.get (AverMap.set m k v) key = AverMap.get m key := by
241 simpa [AverMap.set] using get_set_go_other k key v h m"#;
242
243const AVER_MAP_PRELUDE_HAS_SET_OTHER: &str = r#"theorem has_eq_isSome_get [DecidableEq α] (m : List (α × β)) (k : α) :
244 AverMap.has m k = (AverMap.get m k).isSome := by
245 induction m with
246 | nil =>
247 simp [AverMap.has, AverMap.get]
248 | cons p tl ih =>
249 cases p with
250 | mk k' v' =>
251 by_cases h : k = k'
252 · simp [AverMap.has, AverMap.get, List.any, h]
253 · simpa [AverMap.has, AverMap.get, List.any, h] using ih
254
255theorem has_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
256 AverMap.has (AverMap.set m k v) key = AverMap.has m key := by
257 rw [AverMap.has_eq_isSome_get, AverMap.has_eq_isSome_get]
258 simp [AverMap.get_set_other, h]"#;
259
260const AVER_MAP_PRELUDE_END: &str = r#"end AverMap"#;
261
262const LEAN_PRELUDE_AVER_LIST: &str = r#"namespace AverList
263def get (xs : List α) (i : Int) : Option α :=
264 if i < 0 then none else xs.get? i.toNat
265private def insertSorted [Ord α] (x : α) : List α → List α
266 | [] => [x]
267 | y :: ys =>
268 if compare x y == Ordering.lt || compare x y == Ordering.eq then
269 x :: y :: ys
270 else
271 y :: insertSorted x ys
272def sort [Ord α] (xs : List α) : List α :=
273 xs.foldl (fun acc x => insertSorted x acc) []
274end AverList"#;
275
276const LEAN_PRELUDE_STRING_HELPERS: &str = r#"def String.charAt (s : String) (i : Int) : Option String :=
284 if i < 0 then none
285 else (s.toList.get? i.toNat).map Char.toString
286theorem String.charAt_length_none (s : String) : String.charAt s s.length = none := by
287 have hs : ¬ ((s.length : Int) < 0) := by omega
288 unfold String.charAt
289 simp [hs]
290 change s.data.length ≤ s.length
291 exact Nat.le_refl _
292def String.slice (s : String) (start stop : Int) : String :=
293 let startN := if start < 0 then 0 else start.toNat
294 let stopN := if stop < 0 then 0 else stop.toNat
295 let chars := s.toList
296 String.mk ((chars.drop startN).take (stopN - startN))
297private def trimFloatTrailingZerosChars (chars : List Char) : List Char :=
298 let noZeros := (chars.reverse.dropWhile (fun c => c == '0')).reverse
299 match noZeros.reverse with
300 | '.' :: rest => rest.reverse
301 | _ => noZeros
302private def normalizeFloatString (s : String) : String :=
303 if s.toList.any (fun c => c == '.') then
304 String.mk (trimFloatTrailingZerosChars s.toList)
305 else s
306def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)
307def String.chars (s : String) : List String := s.toList.map Char.toString
308private theorem char_to_string_append_mk (c : Char) (chars : List Char) :
309 Char.toString c ++ String.mk chars = String.mk (c :: chars) := by
310 rfl
311private theorem string_intercalate_empty_char_strings_go (acc : String) (chars : List Char) :
312 String.intercalate.go acc "" (chars.map Char.toString) = acc ++ String.mk chars := by
313 induction chars generalizing acc with
314 | nil =>
315 simp [String.intercalate.go]
316 | cons c rest ih =>
317 calc
318 String.intercalate.go acc "" ((c :: rest).map Char.toString)
319 = String.intercalate.go (acc ++ Char.toString c) "" (rest.map Char.toString) := by
320 simp [String.intercalate.go]
321 _ = (acc ++ Char.toString c) ++ String.mk rest := by
322 simpa using ih (acc ++ Char.toString c)
323 _ = acc ++ String.mk (c :: rest) := by
324 simp [String.append_assoc, char_to_string_append_mk]
325private theorem string_intercalate_empty_char_strings (chars : List Char) :
326 String.intercalate "" (chars.map Char.toString) = String.mk chars := by
327 cases chars with
328 | nil =>
329 simp [String.intercalate]
330 | cons c rest =>
331 simpa [String.intercalate] using string_intercalate_empty_char_strings_go c.toString rest
332theorem String.intercalate_empty_chars (s : String) :
333 String.intercalate "" (String.chars s) = s := by
334 cases s with
335 | mk chars =>
336 simpa [String.chars] using string_intercalate_empty_char_strings chars
337namespace AverString
338def splitOnCharGo (currentRev : List Char) (sep : Char) : List Char → List String
339 | [] => [String.mk currentRev.reverse]
340 | c :: cs =>
341 if c == sep then
342 String.mk currentRev.reverse :: splitOnCharGo [] sep cs
343 else
344 splitOnCharGo (c :: currentRev) sep cs
345def splitOnChar (s : String) (sep : Char) : List String :=
346 splitOnCharGo [] sep s.toList
347def split (s delim : String) : List String :=
348 match delim.toList with
349 | [] => "" :: (s.toList.map Char.toString) ++ [""]
350 | [c] => splitOnChar s c
351 | _ => s.splitOn delim
352@[simp] private theorem char_toString_data (c : Char) : c.toString.data = [c] := by
353 rfl
354private theorem splitOnCharGo_until_sep
355 (prefixRev part tail : List Char) (sep : Char) :
356 part.all (fun c => c != sep) = true ->
357 splitOnCharGo prefixRev sep (part ++ sep :: tail) =
358 String.mk (prefixRev.reverse ++ part) :: splitOnCharGo [] sep tail := by
359 intro h_safe
360 induction part generalizing prefixRev with
361 | nil =>
362 simp [splitOnCharGo]
363 | cons c rest ih =>
364 simp at h_safe
365 have h_rest : (rest.all fun c => c != sep) = true := by
366 simpa using h_safe.2
367 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
368 (ih (c :: prefixRev) h_rest)
369private theorem splitOnCharGo_no_sep
370 (prefixRev chars : List Char) (sep : Char) :
371 chars.all (fun c => c != sep) = true ->
372 splitOnCharGo prefixRev sep chars =
373 [String.mk (prefixRev.reverse ++ chars)] := by
374 intro h_safe
375 induction chars generalizing prefixRev with
376 | nil =>
377 simp [splitOnCharGo]
378 | cons c rest ih =>
379 simp at h_safe
380 have h_rest : (rest.all fun c => c != sep) = true := by
381 simpa using h_safe.2
382 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
383 (ih (c :: prefixRev) h_rest)
384@[simp] theorem split_single_char_append
385 (head tail : String) (sep : Char) :
386 head.toList.all (fun c => c != sep) = true ->
387 split (head ++ Char.toString sep ++ tail) (Char.toString sep) =
388 head :: split tail (Char.toString sep) := by
389 intro h_safe
390 simpa [split, splitOnChar] using
391 (splitOnCharGo_until_sep [] head.data tail.data sep h_safe)
392@[simp] theorem split_single_char_no_sep
393 (s : String) (sep : Char) :
394 s.toList.all (fun c => c != sep) = true ->
395 split s (Char.toString sep) = [s] := by
396 intro h_safe
397 simpa [split, splitOnChar] using
398 (splitOnCharGo_no_sep [] s.data sep h_safe)
399private theorem intercalate_go_prefix
400 (pref acc sep : String) (rest : List String) :
401 String.intercalate.go (pref ++ sep ++ acc) sep rest =
402 pref ++ sep ++ String.intercalate.go acc sep rest := by
403 induction rest generalizing acc with
404 | nil =>
405 simp [String.intercalate.go, String.append_assoc]
406 | cons x xs ih =>
407 simpa [String.intercalate.go, String.append_assoc] using
408 (ih (acc ++ sep ++ x))
409@[simp] theorem split_intercalate_trailing_single_char
410 (parts : List String) (sep : Char) :
411 parts.all (fun part => part.toList.all (fun c => c != sep)) = true ->
412 split (String.intercalate (Char.toString sep) parts ++ Char.toString sep) (Char.toString sep) =
413 match parts with
414 | [] => ["", ""]
415 | _ => parts ++ [""] := by
416 intro h_safe
417 induction parts with
418 | nil =>
419 simp [split, splitOnChar, splitOnCharGo]
420 | cons part rest ih =>
421 simp at h_safe
422 have h_part : (part.toList.all fun c => c != sep) = true := by
423 simpa using h_safe.1
424 cases rest with
425 | nil =>
426 have h_empty : ("".toList.all fun c => c != sep) = true := by simp
427 calc
428 split (String.intercalate.go part (Char.toString sep) [] ++ Char.toString sep) (Char.toString sep)
429 = split (part ++ Char.toString sep) (Char.toString sep) := by
430 simp [String.intercalate.go]
431 _ = part :: split "" (Char.toString sep) := by
432 simpa using split_single_char_append part "" sep h_part
433 _ = [part, ""] := by
434 simp [split_single_char_no_sep, h_empty]
435 | cons next rest' =>
436 have h_rest : ((next :: rest').all fun part => part.toList.all fun c => c != sep) = true := by
437 simpa using h_safe.2
438 calc
439 split
440 (String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep)
441 (Char.toString sep)
442 =
443 split
444 (part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep))
445 (Char.toString sep) := by
446 have h_join :
447 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
448 = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
449 calc
450 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
451 = String.intercalate.go (part ++ Char.toString sep ++ next) (Char.toString sep) rest' ++ Char.toString sep := by
452 simp [String.intercalate.go]
453 _ = part ++ Char.toString sep ++ (String.intercalate.go next (Char.toString sep) rest' ++ Char.toString sep) := by
454 rw [intercalate_go_prefix part next (Char.toString sep) rest']
455 simp [String.append_assoc]
456 _ = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
457 simp [String.intercalate, String.intercalate.go]
458 simpa using congrArg (fun s => split s (Char.toString sep)) h_join
459 _ = part :: split
460 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
461 (Char.toString sep) := by
462 simpa using split_single_char_append
463 part
464 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
465 sep
466 h_part
467 _ = part :: (next :: rest' ++ [""]) := by
468 simpa using ih h_rest
469end AverString"#;
470
471const LEAN_PRELUDE_NUMERIC_PARSE: &str = r#"namespace AverDigits
472def foldDigitsAcc (acc : Nat) : List Nat -> Nat
473 | [] => acc
474 | d :: ds => foldDigitsAcc (acc * 10 + d) ds
475
476def foldDigits (digits : List Nat) : Nat :=
477 foldDigitsAcc 0 digits
478
479private theorem foldDigitsAcc_append_singleton (acc : Nat) (xs : List Nat) (d : Nat) :
480 foldDigitsAcc acc (xs ++ [d]) = foldDigitsAcc acc xs * 10 + d := by
481 induction xs generalizing acc with
482 | nil =>
483 simp [foldDigitsAcc]
484 | cons x xs ih =>
485 simp [foldDigitsAcc, ih, Nat.left_distrib, Nat.add_assoc, Nat.add_left_comm]
486
487private theorem foldDigits_append_singleton (xs : List Nat) (d : Nat) :
488 foldDigits (xs ++ [d]) = foldDigits xs * 10 + d := by
489 simpa [foldDigits] using foldDigitsAcc_append_singleton 0 xs d
490
491def natDigits : Nat -> List Nat
492 | n =>
493 if n < 10 then
494 [n]
495 else
496 natDigits (n / 10) ++ [n % 10]
497termination_by
498 n => n
499
500theorem natDigits_nonempty (n : Nat) : natDigits n ≠ [] := by
501 by_cases h : n < 10
502 · rw [natDigits.eq_1]
503 simp [h]
504 · rw [natDigits.eq_1]
505 simp [h]
506
507theorem natDigits_digits_lt_ten : ∀ n : Nat, ∀ d ∈ natDigits n, d < 10 := by
508 intro n d hd
509 by_cases h : n < 10
510 · rw [natDigits.eq_1] at hd
511 simp [h] at hd
512 rcases hd with rfl
513 exact h
514 · rw [natDigits.eq_1] at hd
515 simp [h] at hd
516 rcases hd with hd | hd
517 · exact natDigits_digits_lt_ten (n / 10) d hd
518 · subst hd
519 exact Nat.mod_lt n (by omega)
520
521theorem foldDigits_natDigits : ∀ n : Nat, foldDigits (natDigits n) = n := by
522 intro n
523 by_cases h : n < 10
524 · rw [natDigits.eq_1]
525 simp [h, foldDigits, foldDigitsAcc]
526 · rw [natDigits.eq_1]
527 simp [h]
528 rw [foldDigits_append_singleton]
529 rw [foldDigits_natDigits (n / 10)]
530 omega
531
532def digitChar : Nat -> Char
533 | 0 => '0' | 1 => '1' | 2 => '2' | 3 => '3' | 4 => '4'
534 | 5 => '5' | 6 => '6' | 7 => '7' | 8 => '8' | 9 => '9'
535 | _ => '0'
536
537def charToDigit? : Char -> Option Nat
538 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3 | '4' => some 4
539 | '5' => some 5 | '6' => some 6 | '7' => some 7 | '8' => some 8 | '9' => some 9
540 | _ => none
541
542theorem charToDigit_digitChar : ∀ d : Nat, d < 10 -> charToDigit? (digitChar d) = some d
543 | 0, _ => by simp [digitChar, charToDigit?]
544 | 1, _ => by simp [digitChar, charToDigit?]
545 | 2, _ => by simp [digitChar, charToDigit?]
546 | 3, _ => by simp [digitChar, charToDigit?]
547 | 4, _ => by simp [digitChar, charToDigit?]
548 | 5, _ => by simp [digitChar, charToDigit?]
549 | 6, _ => by simp [digitChar, charToDigit?]
550 | 7, _ => by simp [digitChar, charToDigit?]
551 | 8, _ => by simp [digitChar, charToDigit?]
552 | 9, _ => by simp [digitChar, charToDigit?]
553 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
554 omega
555
556theorem digitChar_ne_minus : ∀ d : Nat, d < 10 -> digitChar d ≠ '-'
557 | 0, _ => by decide
558 | 1, _ => by decide
559 | 2, _ => by decide
560 | 3, _ => by decide
561 | 4, _ => by decide
562 | 5, _ => by decide
563 | 6, _ => by decide
564 | 7, _ => by decide
565 | 8, _ => by decide
566 | 9, _ => by decide
567 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
568 omega
569
570theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->
571 Char.toString (digitChar d) ≠ " " ∧
572 Char.toString (digitChar d) ≠ "\t" ∧
573 Char.toString (digitChar d) ≠ "\n" ∧
574 Char.toString (digitChar d) ≠ "\r"
575 | 0, _ => by decide
576 | 1, _ => by decide
577 | 2, _ => by decide
578 | 3, _ => by decide
579 | 4, _ => by decide
580 | 5, _ => by decide
581 | 6, _ => by decide
582 | 7, _ => by decide
583 | 8, _ => by decide
584 | 9, _ => by decide
585 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
586 omega
587
588theorem mapM_charToDigit_digits : ∀ ds : List Nat,
589 (∀ d ∈ ds, d < 10) -> List.mapM charToDigit? (ds.map digitChar) = some ds := by
590 intro ds hds
591 induction ds with
592 | nil =>
593 simp
594 | cons d ds ih =>
595 have hd : d < 10 := hds d (by simp)
596 have htail : ∀ x ∈ ds, x < 10 := by
597 intro x hx
598 exact hds x (by simp [hx])
599 simp [charToDigit_digitChar d hd, ih htail]
600
601def natDigitsChars (n : Nat) : List Char :=
602 (natDigits n).map digitChar
603
604def parseNatChars (chars : List Char) : Option Nat :=
605 match chars with
606 | [] => none
607 | _ => do
608 let digits <- List.mapM charToDigit? chars
609 pure (foldDigits digits)
610
611theorem parseNatChars_nat (n : Nat) :
612 parseNatChars (natDigitsChars n) = some n := by
613 unfold parseNatChars natDigitsChars
614 cases h : (natDigits n).map digitChar with
615 | nil =>
616 exfalso
617 exact natDigits_nonempty n (List.map_eq_nil_iff.mp h)
618 | cons hd tl =>
619 have hdigits : List.mapM charToDigit? (List.map digitChar (natDigits n)) = some (natDigits n) :=
620 mapM_charToDigit_digits (natDigits n) (fun d hd => natDigits_digits_lt_ten n d hd)
621 rw [h] at hdigits
622 simp [h, hdigits, foldDigits_natDigits]
623end AverDigits
624
625def String.fromInt (n : Int) : String :=
626 match n with
627 | .ofNat m => String.mk (AverDigits.natDigitsChars m)
628 | .negSucc m => String.mk ('-' :: AverDigits.natDigitsChars (m + 1))
629
630def Int.fromString (s : String) : Except String Int :=
631 match s.toList with
632 | [] => .error ("Cannot parse '" ++ s ++ "' as Int")
633 | '-' :: rest =>
634 match AverDigits.parseNatChars rest with
635 | some n => .ok (-Int.ofNat n)
636 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
637 | chars =>
638 match AverDigits.parseNatChars chars with
639 | some n => .ok (Int.ofNat n)
640 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
641
642theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n
643 | .ofNat m => by
644 cases h : AverDigits.natDigits m with
645 | nil =>
646 exfalso
647 exact AverDigits.natDigits_nonempty m h
648 | cons d ds =>
649 have hd : d < 10 := AverDigits.natDigits_digits_lt_ten m d (by simp [h])
650 have hne : AverDigits.digitChar d ≠ '-' := AverDigits.digitChar_ne_minus d hd
651 have hparse : AverDigits.parseNatChars (AverDigits.digitChar d :: List.map AverDigits.digitChar ds) = some m := by
652 simpa [AverDigits.natDigitsChars, h] using AverDigits.parseNatChars_nat m
653 simp [String.fromInt, Int.fromString, AverDigits.natDigitsChars, h, hne, hparse]
654 | .negSucc m => by
655 simp [String.fromInt, Int.fromString, AverDigits.parseNatChars_nat]
656 rfl
657
658private def charDigitsToNat (cs : List Char) : Nat :=
659 cs.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
660
661private def parseExpPart : List Char → (Bool × List Char)
662 | '-' :: rest => (true, rest.takeWhile Char.isDigit)
663 | '+' :: rest => (false, rest.takeWhile Char.isDigit)
664 | rest => (false, rest.takeWhile Char.isDigit)
665
666def Float.fromString (s : String) : Except String Float :=
667 let chars := s.toList
668 let (neg, chars) := match chars with
669 | '-' :: rest => (true, rest)
670 | _ => (false, chars)
671 let intPart := chars.takeWhile Char.isDigit
672 let rest := chars.dropWhile Char.isDigit
673 let (fracPart, rest) := match rest with
674 | '.' :: rest => (rest.takeWhile Char.isDigit, rest.dropWhile Char.isDigit)
675 | _ => ([], rest)
676 let (expNeg, expDigits) := match rest with
677 | 'e' :: rest => parseExpPart rest
678 | 'E' :: rest => parseExpPart rest
679 | _ => (false, [])
680 if intPart.isEmpty && fracPart.isEmpty then .error ("Invalid float: " ++ s)
681 else
682 let mantissa := charDigitsToNat (intPart ++ fracPart)
683 let fracLen : Int := fracPart.length
684 let expVal : Int := charDigitsToNat expDigits
685 let shift : Int := (if expNeg then -expVal else expVal) - fracLen
686 let f := if shift >= 0 then Float.ofScientific mantissa false shift.toNat
687 else Float.ofScientific mantissa true ((-shift).toNat)
688 .ok (if neg then -f else f)"#;
689
690const LEAN_PRELUDE_CHAR_BYTE: &str = r#"def Char.toCode (s : String) : Int :=
691 match s.toList.head? with
692 | some c => (c.toNat : Int)
693 | none => panic! "Char.toCode: string is empty"
694def Char.fromCode (n : Int) : Option String :=
695 if n < 0 || n > 1114111 then none
696 else if n >= 55296 && n <= 57343 then none
697 else some (Char.toString (Char.ofNat n.toNat))
698
699def hexDigit (n : Int) : String :=
700 match n with
701 | 0 => "0" | 1 => "1" | 2 => "2" | 3 => "3"
702 | 4 => "4" | 5 => "5" | 6 => "6" | 7 => "7"
703 | 8 => "8" | 9 => "9" | 10 => "a" | 11 => "b"
704 | 12 => "c" | 13 => "d" | 14 => "e" | 15 => "f"
705 | _ => "?"
706
707def byteToHex (code : Int) : String :=
708 hexDigit (code / 16) ++ hexDigit (code % 16)
709
710namespace AverByte
711private def hexValue (c : Char) : Option Int :=
712 match c with
713 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3
714 | '4' => some 4 | '5' => some 5 | '6' => some 6 | '7' => some 7
715 | '8' => some 8 | '9' => some 9 | 'a' => some 10 | 'b' => some 11
716 | 'c' => some 12 | 'd' => some 13 | 'e' => some 14 | 'f' => some 15
717 | 'A' => some 10 | 'B' => some 11 | 'C' => some 12 | 'D' => some 13
718 | 'E' => some 14 | 'F' => some 15
719 | _ => none
720def toHex (n : Int) : Except String String :=
721 if n < 0 || n > 255 then
722 .error ("Byte.toHex: " ++ toString n ++ " is out of range 0-255")
723 else
724 .ok (byteToHex n)
725def fromHex (s : String) : Except String Int :=
726 match s.toList with
727 | [hi, lo] =>
728 match hexValue hi, hexValue lo with
729 | some h, some l => .ok (h * 16 + l)
730 | _, _ => .error ("Byte.fromHex: invalid hex '" ++ s ++ "'")
731 | _ => .error ("Byte.fromHex: expected exactly 2 hex chars, got '" ++ s ++ "'")
732end AverByte"#;
733
734pub(crate) fn pure_fns(ctx: &CodegenContext) -> Vec<&FnDef> {
735 ctx.modules
736 .iter()
737 .flat_map(|m| m.fn_defs.iter())
738 .chain(ctx.fn_defs.iter())
739 .filter(|fd| toplevel::is_pure_fn(fd))
740 .collect()
741}
742
743pub(crate) fn recursive_type_names(ctx: &CodegenContext) -> HashSet<String> {
744 ctx.modules
745 .iter()
746 .flat_map(|m| m.type_defs.iter())
747 .chain(ctx.type_defs.iter())
748 .filter(|td| toplevel::is_recursive_type_def(td))
749 .map(|td| toplevel::type_def_name(td).to_string())
750 .collect()
751}
752
753pub(crate) fn recursive_pure_fn_names(ctx: &CodegenContext) -> HashSet<String> {
754 let pure_names: HashSet<String> = pure_fns(ctx)
755 .into_iter()
756 .map(|fd| fd.name.clone())
757 .collect();
758 ctx.recursive_fns
764 .iter()
765 .filter(|name| pure_names.contains(name.as_str()))
766 .cloned()
767 .collect()
768}
769
770fn verify_counter_key(vb: &crate::ast::VerifyBlock) -> String {
771 match &vb.kind {
772 VerifyKind::Cases => format!("fn:{}", vb.fn_name),
773 VerifyKind::Law(law) => format!("law:{}::{}", vb.fn_name, law.name),
774 }
775}
776
777fn lean_project_name(ctx: &CodegenContext) -> String {
778 crate::codegen::common::entry_basename(ctx)
779}
780
781pub(crate) fn find_type_def<'a>(ctx: &'a CodegenContext, type_name: &str) -> Option<&'a TypeDef> {
782 ctx.modules
783 .iter()
784 .flat_map(|m| m.type_defs.iter())
785 .chain(ctx.type_defs.iter())
786 .find(|td| toplevel::type_def_name(td) == type_name)
787}
788
789pub(super) fn bound_expr_to_lean(expr: &Spanned<Expr>) -> String {
790 match &expr.node {
791 Expr::Literal(crate::ast::Literal::Int(n)) => format!("{}", n),
792 Expr::Ident(name) => expr::aver_name_to_lean(name),
793 Expr::FnCall(f, args) => {
794 if let Some(dotted) = crate::codegen::common::expr_to_dotted_name(&f.node) {
795 if dotted == "List.len" && args.len() == 1 {
797 return format!("{}.length", bound_expr_to_lean(&args[0]));
798 }
799 let lean_args: Vec<String> = args.iter().map(bound_expr_to_lean).collect();
800 format!(
801 "({} {})",
802 expr::aver_name_to_lean(&dotted),
803 lean_args.join(" ")
804 )
805 } else {
806 "0".to_string()
807 }
808 }
809 Expr::Attr(obj, field) => format!(
810 "{}.{}",
811 bound_expr_to_lean(obj),
812 expr::aver_name_to_lean(field)
813 ),
814 _ => "0".to_string(),
815 }
816}
817
818pub(crate) fn sizeof_measure_param_indices(fd: &FnDef) -> Vec<usize> {
819 fd.params
820 .iter()
821 .enumerate()
822 .filter_map(|(idx, (_, type_name))| {
823 (!crate::codegen::recursion::detect::is_scalar_like_type(type_name)).then_some(idx)
824 })
825 .collect()
826}
827
828pub fn proof_mode_findings(ctx: &CodegenContext) -> Vec<ProofModeIssue> {
833 let (_plans, issues) = crate::codegen::recursion::analyze_plans(ctx);
834 issues
835}
836
837pub fn proof_mode_issues(ctx: &CodegenContext) -> Vec<String> {
838 proof_mode_findings(ctx)
839 .into_iter()
840 .map(|issue| issue.message)
841 .collect()
842}
843
844pub fn transpile(ctx: &mut CodegenContext) -> ProjectOutput {
853 transpile_with_verify_mode(ctx, VerifyEmitMode::NativeDecide)
854}
855
856pub fn transpile_for_proof_mode(
861 ctx: &mut CodegenContext,
862 verify_mode: VerifyEmitMode,
863) -> ProjectOutput {
864 ctx.refresh_facts();
865 transpile_unified(ctx, verify_mode, LeanEmitMode::Proof)
866}
867
868pub fn transpile_with_verify_mode(
874 ctx: &mut CodegenContext,
875 verify_mode: VerifyEmitMode,
876) -> ProjectOutput {
877 ctx.refresh_facts();
878 transpile_unified(ctx, verify_mode, LeanEmitMode::Standard)
879}
880
881fn emit_lifted_effectful_functions(
890 ctx: &CodegenContext,
891 recursive_fns: &HashSet<String>,
892 sections: &mut Vec<String>,
893) {
894 use crate::types::checker::effect_classification::is_classified;
895
896 let reachable = crate::codegen::common::verify_reachable_fn_names(&ctx.items);
902
903 let mut helpers: std::collections::HashMap<String, Vec<String>> =
908 std::collections::HashMap::new();
909 for item in &ctx.items {
910 let TopLevel::FnDef(fd) = item else { continue };
911 if fd.effects.is_empty() || fd.name == "main" {
912 continue;
913 }
914 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
915 continue;
916 }
917 if !reachable.contains(&fd.name) {
918 continue;
919 }
920 helpers.insert(
921 fd.name.clone(),
922 fd.effects.iter().map(|e| e.node.clone()).collect(),
923 );
924 }
925
926 let mut lifted_fns: Vec<(String, crate::ast::FnDef)> = Vec::new();
927 for item in &ctx.items {
928 let TopLevel::FnDef(fd) = item else { continue };
929 if fd.effects.is_empty() || fd.name == "main" {
930 continue;
931 }
932 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
933 continue;
934 }
935 if !reachable.contains(&fd.name) {
936 continue;
937 }
938 let Ok(Some(lifted)) =
939 crate::types::checker::effect_lifting::lift_fn_def_with_helpers(fd, &helpers)
940 else {
941 continue;
942 };
943 lifted_fns.push((fd.name.clone(), lifted));
944 }
945
946 let eligible_names: std::collections::HashSet<String> =
954 lifted_fns.iter().map(|(n, _)| n.clone()).collect();
955 let mut emitted: std::collections::HashSet<String> = std::collections::HashSet::new();
956 let mut order: Vec<usize> = Vec::new();
957 let mut remaining: Vec<usize> = (0..lifted_fns.len()).collect();
958 while !remaining.is_empty() {
959 let before = remaining.len();
960 remaining.retain(|&idx| {
961 let body_calls = collect_called_idents_in_body(&lifted_fns[idx].1.body);
962 let ready = body_calls
963 .iter()
964 .all(|name| !eligible_names.contains(name) || emitted.contains(name));
965 if ready {
966 emitted.insert(lifted_fns[idx].0.clone());
967 order.push(idx);
968 false
969 } else {
970 true
971 }
972 });
973 if remaining.len() == before {
974 order.append(&mut remaining);
978 }
979 }
980
981 for idx in order {
982 let (_, lifted) = &lifted_fns[idx];
983 if let Some(code) = toplevel::emit_fn_def(lifted, recursive_fns, ctx) {
984 sections.push(code);
985 sections.push(String::new());
986 }
987 }
988}
989
990fn collect_called_idents_in_body(body: &crate::ast::FnBody) -> std::collections::HashSet<String> {
991 use crate::ast::{Expr, Spanned, Stmt};
992 let mut out = std::collections::HashSet::new();
993 fn walk(expr: &Spanned<Expr>, out: &mut std::collections::HashSet<String>) {
994 match &expr.node {
995 Expr::FnCall(callee, args) => {
996 if let Expr::Ident(name) | Expr::Resolved { name, .. } = &callee.node {
997 out.insert(name.clone());
998 }
999 walk(callee, out);
1000 for a in args {
1001 walk(a, out);
1002 }
1003 }
1004 Expr::BinOp(_, l, r) => {
1005 walk(l, out);
1006 walk(r, out);
1007 }
1008 Expr::Match { subject, arms } => {
1009 walk(subject, out);
1010 for arm in arms {
1011 walk(&arm.body, out);
1012 }
1013 }
1014 Expr::Attr(inner, _) | Expr::ErrorProp(inner) => walk(inner, out),
1015 Expr::Constructor(_, Some(inner)) => walk(inner, out),
1016 Expr::List(items) | Expr::Tuple(items) | Expr::IndependentProduct(items, _) => {
1017 for i in items {
1018 walk(i, out);
1019 }
1020 }
1021 Expr::MapLiteral(pairs) => {
1022 for (k, v) in pairs {
1023 walk(k, out);
1024 walk(v, out);
1025 }
1026 }
1027 Expr::RecordCreate { fields, .. } => {
1028 for (_, v) in fields {
1029 walk(v, out);
1030 }
1031 }
1032 Expr::RecordUpdate { base, updates, .. } => {
1033 walk(base, out);
1034 for (_, v) in updates {
1035 walk(v, out);
1036 }
1037 }
1038 Expr::InterpolatedStr(parts) => {
1039 for part in parts {
1040 if let crate::ast::StrPart::Parsed(inner) = part {
1041 walk(inner, out);
1042 }
1043 }
1044 }
1045 _ => {}
1046 }
1047 }
1048 for stmt in body.stmts() {
1049 match stmt {
1050 Stmt::Expr(e) => walk(e, &mut out),
1051 Stmt::Binding(_, _, e) => walk(e, &mut out),
1052 }
1053 }
1054 out
1055}
1056
1057#[cfg(test)]
1058fn generate_prelude() -> String {
1059 generate_prelude_for_body("", true)
1060}
1061
1062#[cfg(test)]
1063fn generate_prelude_for_body(body: &str, include_all_helpers: bool) -> String {
1064 let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
1069 if include_all_helpers || crate::codegen::builtin_records::needs_trust_header(body) {
1070 let empty = crate::codegen::common::DeclaredEffects {
1076 bare_namespaces: std::collections::HashSet::new(),
1077 methods: std::collections::HashSet::new(),
1078 };
1079 let has_ip = body.contains("BranchPath");
1080 parts.push(
1081 crate::types::checker::proof_trust_header::generate_commented("-- ", &empty, has_ip),
1082 );
1083 }
1084 for record in crate::codegen::builtin_records::needed_records(body, include_all_helpers) {
1086 parts.push(crate::codegen::builtin_records::render_lean(record));
1087 }
1088
1089 for helper in crate::codegen::builtin_helpers::needed_helpers(body, include_all_helpers) {
1093 match helper.key {
1094 "BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
1095 "AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
1096 "StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
1097 "NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
1098 "CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
1099 "AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
1100 "AverMap" => parts.push(generate_map_prelude(body, include_all_helpers)),
1101 "ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
1102 "FloatInstances" => parts.extend([
1103 LEAN_PRELUDE_FLOAT_COE.to_string(),
1104 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1105 ]),
1106 "ExceptInstances" => parts.extend([
1107 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1108 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1109 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1110 ]),
1111 "StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
1112 "ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
1116 other => panic!(
1117 "Lean backend has no implementation for builtin helper key '{}'. \
1118 Add a match arm in generate_prelude_for_body or remove the key \
1119 from BUILTIN_HELPERS.",
1120 other
1121 ),
1122 }
1123 }
1124
1125 parts.join("\n\n")
1126}
1127
1128fn generate_map_prelude(body: &str, include_all_helpers: bool) -> String {
1129 let mut parts = vec![AVER_MAP_PRELUDE_BASE.to_string()];
1130
1131 let needs_has_set_self = include_all_helpers || body.contains("AverMap.has_set_self");
1132 let needs_get_set_self = include_all_helpers || body.contains("AverMap.get_set_self");
1133 let needs_get_set_other = include_all_helpers
1134 || body.contains("AverMap.get_set_other")
1135 || body.contains("AverMap.has_set_other");
1136 let needs_has_set_other = include_all_helpers || body.contains("AverMap.has_set_other");
1137
1138 if needs_has_set_self {
1139 parts.push(AVER_MAP_PRELUDE_HAS_SET_SELF.to_string());
1140 }
1141 if needs_get_set_self {
1142 parts.push(AVER_MAP_PRELUDE_GET_SET_SELF.to_string());
1143 }
1144 if needs_get_set_other {
1145 parts.push(AVER_MAP_PRELUDE_GET_SET_OTHER.to_string());
1146 }
1147 if needs_has_set_other {
1148 parts.push(AVER_MAP_PRELUDE_HAS_SET_OTHER.to_string());
1149 }
1150
1151 parts.push(AVER_MAP_PRELUDE_END.to_string());
1152 parts.join("\n\n")
1153}
1154
1155fn generate_lakefile_with_roots(project_name: &str, extra_roots: &[String]) -> String {
1156 let mut roots: Vec<String> = vec![format!("`{}", project_name)];
1157 for r in extra_roots {
1158 roots.push(format!("`{}", r));
1159 }
1160 let roots_str = roots.join(", ");
1161 format!(
1162 r#"import Lake
1163open Lake DSL
1164
1165package «{}» where
1166 version := v!"0.1.0"
1167
1168@[default_target]
1169lean_lib «{}» where
1170 srcDir := "."
1171 roots := #[{}]
1172"#,
1173 project_name.to_lowercase(),
1174 project_name,
1175 roots_str
1176 )
1177}
1178
1179fn generate_toolchain() -> String {
1180 "leanprover/lean4:v4.15.0\n".to_string()
1181}
1182
1183#[derive(Clone, Copy)]
1184enum LeanEmitMode {
1185 Standard,
1186 Proof,
1187}
1188
1189fn transpile_unified(
1201 ctx: &CodegenContext,
1202 verify_mode: VerifyEmitMode,
1203 emit_mode: LeanEmitMode,
1204) -> ProjectOutput {
1205 use crate::codegen::recursion::RecursionPlan;
1206
1207 let recursive_fns: HashSet<String> = ctx.recursive_fns.clone();
1210 let recursive_names = recursive_pure_fn_names(ctx);
1211 let recursive_types = recursive_type_names(ctx);
1212 let (plans, _proof_issues) = match emit_mode {
1213 LeanEmitMode::Proof => crate::codegen::recursion::analyze_plans(ctx),
1214 LeanEmitMode::Standard => (HashMap::<String, RecursionPlan>::new(), Vec::new()),
1215 };
1216
1217 let pure_per_scope = crate::codegen::common::route_pure_components_per_scope(
1222 ctx,
1223 toplevel::is_pure_fn,
1224 |comp| {
1225 let mut out: Vec<String> = Vec::new();
1226 if comp.len() > 1 {
1227 let code = match emit_mode {
1228 LeanEmitMode::Proof => {
1229 let all_supported = comp.iter().all(|fd| plans.contains_key(&fd.name));
1230 if all_supported {
1231 toplevel::emit_mutual_group_proof(comp, ctx, &plans)
1232 } else {
1233 toplevel::emit_mutual_group(comp, ctx)
1234 }
1235 }
1236 LeanEmitMode::Standard => toplevel::emit_mutual_group(comp, ctx),
1237 };
1238 out.push(code);
1239 out.push(String::new());
1240 } else if let Some(fd) = comp.first() {
1241 let emitted = match emit_mode {
1242 LeanEmitMode::Proof => {
1243 let is_recursive = recursive_names.contains(&fd.name);
1244 if is_recursive && !plans.contains_key(&fd.name) {
1245 toplevel::emit_fn_def(fd, &recursive_names, ctx)
1246 } else {
1247 toplevel::emit_fn_def_proof(fd, plans.get(&fd.name).cloned(), ctx)
1248 }
1249 }
1250 LeanEmitMode::Standard => toplevel::emit_fn_def(fd, &recursive_fns, ctx),
1251 };
1252 if let Some(code) = emitted {
1253 out.push(code);
1254 out.push(String::new());
1255 }
1256 }
1257 out
1258 },
1259 );
1260
1261 let mut entry_lifted_sections: Vec<String> = Vec::new();
1263 let lifted_recursive_names = match emit_mode {
1264 LeanEmitMode::Proof => &recursive_names,
1265 LeanEmitMode::Standard => &recursive_fns,
1266 };
1267 emit_lifted_effectful_functions(ctx, lifted_recursive_names, &mut entry_lifted_sections);
1268
1269 let mut entry_decision_sections: Vec<String> = Vec::new();
1270 for item in &ctx.items {
1271 if let TopLevel::Decision(db) = item {
1272 entry_decision_sections.push(toplevel::emit_decision(db));
1273 entry_decision_sections.push(String::new());
1274 }
1275 }
1276
1277 let mut entry_verify_sections: Vec<String> = Vec::new();
1278 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1279 for item in &ctx.items {
1280 if let TopLevel::Verify(vb) = item {
1281 let key = verify_counter_key(vb);
1282 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1283 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1284 verify_case_counters.insert(key, next_idx);
1285 entry_verify_sections.push(emitted);
1286 entry_verify_sections.push(String::new());
1287 }
1288 }
1289
1290 let mut module_files: Vec<(String, String)> = Vec::new();
1292 let mut union_body = String::new();
1293
1294 for module in &ctx.modules {
1295 let mut body_sections: Vec<String> = Vec::new();
1296 for td in &module.type_defs {
1297 body_sections.push(toplevel::emit_type_def(td));
1298 if toplevel::is_recursive_type_def(td) {
1299 body_sections.push(toplevel::emit_recursive_decidable_eq(
1300 toplevel::type_def_name(td),
1301 ));
1302 if matches!(emit_mode, LeanEmitMode::Proof)
1303 && let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
1304 {
1305 body_sections.push(measure);
1306 }
1307 }
1308 body_sections.push(String::new());
1309 }
1310 if let Some(scope_sections) = pure_per_scope.by_scope.get(&module.prefix) {
1311 body_sections.extend(scope_sections.clone());
1312 }
1313 let body = body_sections.join("\n");
1314 union_body.push_str(&body);
1315 union_body.push('\n');
1316
1317 let mut imports = vec!["import AverCommon".to_string()];
1318 for d in &module.depends {
1319 imports.push(format!("import {}", d));
1320 }
1321 let opens: Vec<String> = module
1325 .depends
1326 .iter()
1327 .map(|d| format!("open {}", d))
1328 .collect();
1329
1330 let opens_str = if opens.is_empty() {
1331 String::new()
1332 } else {
1333 format!("\n{}\n", opens.join("\n"))
1334 };
1335 let content = format!(
1336 "{}\n{}\nnamespace {}\n\n{}\nend {}\n",
1337 imports.join("\n"),
1338 opens_str,
1339 module.prefix,
1340 body,
1341 module.prefix
1342 );
1343 let path = module.prefix.replace('.', "/");
1344 module_files.push((format!("{}.lean", path), content));
1345 }
1346
1347 let mut entry_body_sections: Vec<String> = Vec::new();
1349 for td in &ctx.type_defs {
1350 entry_body_sections.push(toplevel::emit_type_def(td));
1351 if toplevel::is_recursive_type_def(td) {
1352 entry_body_sections.push(toplevel::emit_recursive_decidable_eq(
1353 toplevel::type_def_name(td),
1354 ));
1355 if matches!(emit_mode, LeanEmitMode::Proof)
1356 && let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
1357 {
1358 entry_body_sections.push(measure);
1359 }
1360 }
1361 entry_body_sections.push(String::new());
1362 }
1363 if let Some(entry_pure) = pure_per_scope.by_scope.get("") {
1364 entry_body_sections.extend(entry_pure.clone());
1365 }
1366 entry_body_sections.extend(entry_lifted_sections);
1367 entry_body_sections.extend(entry_decision_sections);
1368 entry_body_sections.extend(entry_verify_sections);
1369
1370 let entry_body = entry_body_sections.join("\n");
1371 union_body.push_str(&entry_body);
1372 union_body.push('\n');
1373
1374 let project_name = lean_project_name(ctx);
1375 let mut entry_imports = vec!["import AverCommon".to_string()];
1376 for m in &ctx.modules {
1377 entry_imports.push(format!("import {}", m.prefix));
1378 }
1379 let entry_opens: Vec<String> = ctx
1380 .modules
1381 .iter()
1382 .map(|m| format!("open {}", m.prefix))
1383 .collect();
1384 let mut entry_parts = vec![entry_imports.join("\n")];
1385 if !entry_opens.is_empty() {
1386 entry_parts.push(entry_opens.join("\n"));
1387 }
1388 let declared = crate::codegen::common::collect_declared_effects(ctx);
1389 let has_ip = union_body.contains("BranchPath");
1390 let has_classified =
1391 crate::types::checker::effect_classification::classifications_for_proof_subset()
1392 .iter()
1393 .any(|c| declared.includes(c.method));
1394 if has_ip || has_classified {
1395 entry_parts.push(
1396 crate::types::checker::proof_trust_header::generate_commented("-- ", &declared, has_ip),
1397 );
1398 }
1399 let subtype_block = crate::types::checker::oracle_subtypes::lean_subtypes(&declared);
1400 if !subtype_block.is_empty() {
1401 union_body.push_str(&subtype_block);
1410 union_body.push('\n');
1411 entry_parts.push(subtype_block);
1412 }
1413 entry_parts.push(entry_body);
1414 let entry_content = entry_parts.join("\n\n");
1415
1416 let common_content = build_common_lean(&union_body);
1418
1419 let mut extra_roots: Vec<String> = vec!["AverCommon".to_string()];
1421 for m in &ctx.modules {
1422 extra_roots.push(m.prefix.clone());
1423 }
1424 let lakefile = generate_lakefile_with_roots(&project_name, &extra_roots);
1425 let toolchain = generate_toolchain();
1426
1427 let mut files = module_files;
1428 files.push((format!("{}.lean", project_name), entry_content));
1429 files.push(("AverCommon.lean".to_string(), common_content));
1430 files.push(("lakefile.lean".to_string(), lakefile));
1431 files.push(("lean-toolchain".to_string(), toolchain));
1432 ProjectOutput { files }
1433}
1434
1435fn build_common_lean(union_body: &str) -> String {
1436 let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
1437 for record in crate::codegen::builtin_records::needed_records(union_body, false) {
1438 parts.push(crate::codegen::builtin_records::render_lean(record));
1439 }
1440 for helper in crate::codegen::builtin_helpers::needed_helpers(union_body, false) {
1441 match helper.key {
1442 "BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
1443 "AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
1444 "StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
1445 "NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
1446 "CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
1447 "AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
1448 "AverMap" => parts.push(generate_map_prelude(union_body, false)),
1449 "ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
1450 "FloatInstances" => parts.extend([
1451 LEAN_PRELUDE_FLOAT_COE.to_string(),
1452 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1453 ]),
1454 "ExceptInstances" => parts.extend([
1455 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1456 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1457 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1458 ]),
1459 "StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
1460 "ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
1461 other => panic!(
1462 "Lean backend has no implementation for builtin helper key '{}'. \
1463 Add a match arm in build_common_lean or remove the key from BUILTIN_HELPERS.",
1464 other
1465 ),
1466 }
1467 }
1468 parts.join("\n\n")
1469}
1470
1471#[cfg(test)]
1472mod tests {
1473 use super::{
1474 VerifyEmitMode, generate_prelude, proof_mode_issues, recurrence, transpile,
1475 transpile_for_proof_mode, transpile_with_verify_mode,
1476 };
1477 use crate::ast::{
1478 BinOp, Expr, FnBody, FnDef, Literal, MatchArm, Pattern, Spanned, Stmt, TailCallData,
1479 TopLevel, TypeDef, TypeVariant, VerifyBlock, VerifyGiven, VerifyGivenDomain, VerifyKind,
1480 VerifyLaw,
1481 };
1482
1483 fn sb(e: Expr) -> Spanned<Expr> {
1485 Spanned::bare(e)
1486 }
1487 fn sbb(e: Expr) -> Box<Spanned<Expr>> {
1489 Box::new(Spanned::bare(e))
1490 }
1491 use crate::codegen::{CodegenContext, build_context};
1492 use crate::source::parse_source;
1493
1494 use std::collections::{HashMap, HashSet};
1495 use std::sync::Arc as Rc;
1496
1497 fn empty_ctx() -> CodegenContext {
1498 CodegenContext {
1499 items: vec![],
1500 fn_sigs: HashMap::new(),
1501 memo_fns: HashSet::new(),
1502 memo_safe_types: HashSet::new(),
1503 type_defs: vec![],
1504 fn_defs: vec![],
1505 project_name: "verify_mode".to_string(),
1506 modules: vec![],
1507 module_prefixes: HashSet::new(),
1508 policy: None,
1509 emit_replay_runtime: false,
1510 runtime_policy_from_env: false,
1511 guest_entry: None,
1512 emit_self_host_support: false,
1513 extra_fn_defs: Vec::new(),
1514 mutual_tco_members: HashSet::new(),
1515 recursive_fns: HashSet::new(),
1516 fn_analyses: HashMap::new(),
1517 buffer_build_sinks: HashMap::new(),
1518 buffer_fusion_sites: Vec::new(),
1519 synthesized_buffered_fns: Vec::new(),
1520 }
1521 }
1522
1523 fn ctx_from_source(source: &str, project_name: &str) -> CodegenContext {
1524 let mut items = parse_source(source).expect("source should parse");
1525 crate::ir::pipeline::tco(&mut items);
1526 let tc = crate::ir::pipeline::typecheck(
1527 &items,
1528 &crate::ir::TypecheckMode::Full { base_dir: None },
1529 );
1530 assert!(
1531 tc.errors.is_empty(),
1532 "source should typecheck without errors: {:?}",
1533 tc.errors
1534 );
1535 build_context(
1536 items,
1537 &tc,
1538 None,
1539 HashSet::new(),
1540 project_name.to_string(),
1541 vec![],
1542 )
1543 }
1544
1545 fn generated_lean_file(out: &crate::codegen::ProjectOutput) -> String {
1553 out.files
1554 .iter()
1555 .filter_map(|(name, content)| {
1556 (name.ends_with(".lean") && name != "lakefile.lean").then_some(content.as_str())
1557 })
1558 .collect::<Vec<&str>>()
1559 .join("\n")
1560 }
1561
1562 fn empty_ctx_with_verify_case() -> CodegenContext {
1563 let mut ctx = empty_ctx();
1564 ctx.items.push(TopLevel::Verify(VerifyBlock {
1565 fn_name: "f".to_string(),
1566 line: 1,
1567 cases: vec![(
1568 sb(Expr::Literal(Literal::Int(1))),
1569 sb(Expr::Literal(Literal::Int(1))),
1570 )],
1571 case_spans: vec![],
1572 case_givens: vec![],
1573 case_hostile_origins: vec![],
1574 case_hostile_profiles: vec![],
1575 kind: VerifyKind::Cases,
1576 trace: false,
1577 cases_givens: vec![],
1578 }));
1579 ctx
1580 }
1581
1582 fn empty_ctx_with_two_verify_blocks_same_fn() -> CodegenContext {
1583 let mut ctx = empty_ctx();
1584 ctx.items.push(TopLevel::Verify(VerifyBlock {
1585 fn_name: "f".to_string(),
1586 line: 1,
1587 cases: vec![(
1588 sb(Expr::Literal(Literal::Int(1))),
1589 sb(Expr::Literal(Literal::Int(1))),
1590 )],
1591 case_spans: vec![],
1592 case_givens: vec![],
1593 case_hostile_origins: vec![],
1594 case_hostile_profiles: vec![],
1595 kind: VerifyKind::Cases,
1596 trace: false,
1597 cases_givens: vec![],
1598 }));
1599 ctx.items.push(TopLevel::Verify(VerifyBlock {
1600 fn_name: "f".to_string(),
1601 line: 2,
1602 cases: vec![(
1603 sb(Expr::Literal(Literal::Int(2))),
1604 sb(Expr::Literal(Literal::Int(2))),
1605 )],
1606 case_spans: vec![],
1607 case_givens: vec![],
1608 case_hostile_origins: vec![],
1609 case_hostile_profiles: vec![],
1610 kind: VerifyKind::Cases,
1611 trace: false,
1612 cases_givens: vec![],
1613 }));
1614 ctx
1615 }
1616
1617 fn empty_ctx_with_verify_law() -> CodegenContext {
1618 let mut ctx = empty_ctx();
1619 let add = FnDef {
1620 name: "add".to_string(),
1621 line: 1,
1622 params: vec![
1623 ("a".to_string(), "Int".to_string()),
1624 ("b".to_string(), "Int".to_string()),
1625 ],
1626 return_type: "Int".to_string(),
1627 effects: vec![],
1628 desc: None,
1629 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
1630 BinOp::Add,
1631 sbb(Expr::Ident("a".to_string())),
1632 sbb(Expr::Ident("b".to_string())),
1633 )))),
1634 resolution: None,
1635 };
1636 ctx.fn_defs.push(add.clone());
1637 ctx.items.push(TopLevel::FnDef(add));
1638 ctx.items.push(TopLevel::Verify(VerifyBlock {
1639 fn_name: "add".to_string(),
1640 line: 1,
1641 cases: vec![
1642 (
1643 sb(Expr::FnCall(
1644 sbb(Expr::Ident("add".to_string())),
1645 vec![
1646 sb(Expr::Literal(Literal::Int(1))),
1647 sb(Expr::Literal(Literal::Int(2))),
1648 ],
1649 )),
1650 sb(Expr::FnCall(
1651 sbb(Expr::Ident("add".to_string())),
1652 vec![
1653 sb(Expr::Literal(Literal::Int(2))),
1654 sb(Expr::Literal(Literal::Int(1))),
1655 ],
1656 )),
1657 ),
1658 (
1659 sb(Expr::FnCall(
1660 sbb(Expr::Ident("add".to_string())),
1661 vec![
1662 sb(Expr::Literal(Literal::Int(2))),
1663 sb(Expr::Literal(Literal::Int(3))),
1664 ],
1665 )),
1666 sb(Expr::FnCall(
1667 sbb(Expr::Ident("add".to_string())),
1668 vec![
1669 sb(Expr::Literal(Literal::Int(3))),
1670 sb(Expr::Literal(Literal::Int(2))),
1671 ],
1672 )),
1673 ),
1674 ],
1675 case_spans: vec![],
1676 case_givens: vec![],
1677 case_hostile_origins: vec![],
1678 case_hostile_profiles: vec![],
1679 kind: VerifyKind::Law(Box::new(VerifyLaw {
1680 name: "commutative".to_string(),
1681 givens: vec![
1682 VerifyGiven {
1683 name: "a".to_string(),
1684 type_name: "Int".to_string(),
1685 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
1686 },
1687 VerifyGiven {
1688 name: "b".to_string(),
1689 type_name: "Int".to_string(),
1690 domain: VerifyGivenDomain::Explicit(vec![
1691 sb(Expr::Literal(Literal::Int(2))),
1692 sb(Expr::Literal(Literal::Int(3))),
1693 ]),
1694 },
1695 ],
1696 when: None,
1697 lhs: sb(Expr::FnCall(
1698 sbb(Expr::Ident("add".to_string())),
1699 vec![
1700 sb(Expr::Ident("a".to_string())),
1701 sb(Expr::Ident("b".to_string())),
1702 ],
1703 )),
1704 rhs: sb(Expr::FnCall(
1705 sbb(Expr::Ident("add".to_string())),
1706 vec![
1707 sb(Expr::Ident("b".to_string())),
1708 sb(Expr::Ident("a".to_string())),
1709 ],
1710 )),
1711 sample_guards: vec![],
1712 })),
1713 trace: false,
1714 cases_givens: vec![],
1715 }));
1716 ctx
1717 }
1718
1719 #[test]
1720 fn prelude_normalizes_float_string_format() {
1721 let prelude = generate_prelude();
1722 assert!(
1723 prelude.contains("private def normalizeFloatString (s : String) : String :="),
1724 "missing normalizeFloatString helper in prelude"
1725 );
1726 assert!(
1727 prelude.contains(
1728 "def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)"
1729 ),
1730 "String.fromFloat should normalize Lean float formatting"
1731 );
1732 }
1733
1734 #[test]
1735 fn prelude_validates_char_from_code_unicode_bounds() {
1736 let prelude = generate_prelude();
1737 assert!(
1738 prelude.contains("if n < 0 || n > 1114111 then none"),
1739 "Char.fromCode should reject code points above Unicode max"
1740 );
1741 assert!(
1742 prelude.contains("else if n >= 55296 && n <= 57343 then none"),
1743 "Char.fromCode should reject surrogate code points"
1744 );
1745 }
1746
1747 #[test]
1748 fn prelude_includes_map_set_helper_lemmas() {
1749 let prelude = generate_prelude();
1750 assert!(
1751 prelude.contains("theorem has_set_self [DecidableEq α]"),
1752 "missing AverMap.has_set_self helper theorem"
1753 );
1754 assert!(
1755 prelude.contains("theorem get_set_self [DecidableEq α]"),
1756 "missing AverMap.get_set_self helper theorem"
1757 );
1758 }
1759
1760 #[test]
1761 fn lean_output_without_map_usage_omits_map_prelude() {
1762 let mut ctx = ctx_from_source(
1763 r#"
1764module NoMap
1765 intent = "Simple pure program without maps."
1766
1767fn addOne(n: Int) -> Int
1768 n + 1
1769
1770verify addOne
1771 addOne(1) => 2
1772"#,
1773 "nomap",
1774 );
1775 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
1776 let lean = generated_lean_file(&out);
1777
1778 assert!(
1779 !lean.contains("namespace AverMap"),
1780 "did not expect AverMap prelude in program without map usage:\n{}",
1781 lean
1782 );
1783 }
1784
1785 #[test]
1786 fn transpile_emits_native_decide_for_verify_by_default() {
1787 let mut ctx = empty_ctx_with_verify_case();
1788 let out = transpile(&mut ctx);
1789 let lean = out
1790 .files
1791 .iter()
1792 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1793 .expect("expected generated Lean file");
1794 assert!(lean.contains("example : 1 = 1 := by native_decide"));
1795 }
1796
1797 #[test]
1798 fn transpile_can_emit_sorry_for_verify_when_requested() {
1799 let mut ctx = empty_ctx_with_verify_case();
1800 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::Sorry);
1801 let lean = out
1802 .files
1803 .iter()
1804 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1805 .expect("expected generated Lean file");
1806 assert!(lean.contains("example : 1 = 1 := by sorry"));
1807 }
1808
1809 #[test]
1810 fn transpile_can_emit_theorem_skeletons_for_verify() {
1811 let mut ctx = empty_ctx_with_verify_case();
1812 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1813 let lean = out
1814 .files
1815 .iter()
1816 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1817 .expect("expected generated Lean file");
1818 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1819 assert!(lean.contains(" sorry"));
1820 }
1821
1822 #[test]
1823 fn theorem_skeleton_numbering_is_global_per_function_across_verify_blocks() {
1824 let mut ctx = empty_ctx_with_two_verify_blocks_same_fn();
1825 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1826 let lean = out
1827 .files
1828 .iter()
1829 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1830 .expect("expected generated Lean file");
1831 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1832 assert!(lean.contains("theorem f_verify_2 : 2 = 2 := by"));
1833 }
1834
1835 #[test]
1836 fn transpile_emits_named_theorems_for_verify_law() {
1837 let mut ctx = empty_ctx_with_verify_law();
1838 let out = transpile(&mut ctx);
1839 let lean = out
1840 .files
1841 .iter()
1842 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1843 .expect("expected generated Lean file");
1844 assert!(lean.contains("-- verify law add.commutative (2 cases)"));
1845 assert!(lean.contains("-- given a: Int = 1..2"));
1846 assert!(lean.contains("-- given b: Int = [2, 3]"));
1847 assert!(lean.contains(
1848 "theorem add_law_commutative : ∀ (a : Int) (b : Int), add a b = add b a := by"
1849 ));
1850 assert!(lean.contains(" intro a b"));
1851 assert!(lean.contains(" simp [add, Int.add_comm]"));
1852 assert!(lean.contains(
1853 "theorem add_law_commutative_sample_1 : add 1 2 = add 2 1 := by native_decide"
1854 ));
1855 assert!(lean.contains(
1856 "theorem add_law_commutative_sample_2 : add 2 3 = add 3 2 := by native_decide"
1857 ));
1858 }
1859
1860 #[test]
1861 fn generate_prelude_emits_int_roundtrip_theorem() {
1862 let lean = generate_prelude();
1863 assert!(lean.contains(
1864 "theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n"
1865 ));
1866 assert!(lean.contains("theorem String.intercalate_empty_chars (s : String) :"));
1867 assert!(lean.contains("def splitOnCharGo"));
1868 assert!(lean.contains("theorem split_single_char_append"));
1869 assert!(lean.contains("theorem split_intercalate_trailing_single_char"));
1870 assert!(lean.contains("namespace AverDigits"));
1871 assert!(lean.contains("theorem String.charAt_length_none (s : String)"));
1872 assert!(lean.contains("theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->"));
1873 }
1874
1875 #[test]
1876 fn transpile_emits_guarded_theorems_for_verify_law_when_clause() {
1877 let mut ctx = ctx_from_source(
1878 r#"
1879module GuardedLaw
1880 intent =
1881 "verify law with precondition"
1882
1883fn pickGreater(a: Int, b: Int) -> Int
1884 match a > b
1885 true -> a
1886 false -> b
1887
1888verify pickGreater law ordered
1889 given a: Int = [1, 2]
1890 given b: Int = [1, 2]
1891 when a > b
1892 pickGreater(a, b) => a
1893"#,
1894 "guarded_law",
1895 );
1896 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1897 let lean = generated_lean_file(&out);
1898
1899 assert!(lean.contains("-- when (a > b)"));
1900 assert!(lean.contains(
1901 "theorem pickGreater_law_ordered : ∀ (a : Int) (b : Int), a = 1 ∨ a = 2 -> b = 1 ∨ b = 2 -> (a > b) = true -> pickGreater a b = a := by"
1902 ));
1903 assert!(lean.contains(
1904 "theorem pickGreater_law_ordered_sample_1 : (1 > 1) = true -> pickGreater 1 1 = 1 := by"
1905 ));
1906 assert!(lean.contains(
1907 "theorem pickGreater_law_ordered_sample_4 : (2 > 2) = true -> pickGreater 2 2 = 2 := by"
1908 ));
1909 }
1910
1911 #[test]
1912 fn transpile_uses_spec_theorem_names_for_declared_spec_laws() {
1913 let mut ctx = ctx_from_source(
1914 r#"
1915module SpecDemo
1916 intent =
1917 "spec demo"
1918
1919fn absVal(x: Int) -> Int
1920 match x < 0
1921 true -> 0 - x
1922 false -> x
1923
1924fn absValSpec(x: Int) -> Int
1925 match x < 0
1926 true -> 0 - x
1927 false -> x
1928
1929verify absVal law absValSpec
1930 given x: Int = [-2, -1, 0, 1, 2]
1931 absVal(x) => absValSpec(x)
1932"#,
1933 "spec_demo",
1934 );
1935 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1936 let lean = generated_lean_file(&out);
1937
1938 assert!(lean.contains("-- verify law absVal.spec absValSpec (5 cases)"));
1939 assert!(
1940 lean.contains(
1941 "theorem absVal_eq_absValSpec : ∀ (x : Int), absVal x = absValSpec x := by"
1942 )
1943 );
1944 assert!(lean.contains("theorem absVal_eq_absValSpec_checked_domain :"));
1945 assert!(lean.contains("theorem absVal_eq_absValSpec_sample_1 :"));
1946 assert!(!lean.contains("theorem absVal_law_absValSpec :"));
1947 }
1948
1949 #[test]
1950 fn transpile_keeps_noncanonical_spec_laws_as_regular_law_names() {
1951 let mut ctx = ctx_from_source(
1952 r#"
1953module SpecLawShape
1954 intent =
1955 "shape probe"
1956
1957fn foo(x: Int) -> Int
1958 x + 1
1959
1960fn fooSpec(seed: Int, x: Int) -> Int
1961 x + seed
1962
1963verify foo law fooSpec
1964 given x: Int = [1, 2]
1965 foo(x) => fooSpec(1, x)
1966"#,
1967 "spec_law_shape",
1968 );
1969 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1970 let lean = generated_lean_file(&out);
1971
1972 assert!(lean.contains("-- verify law foo.fooSpec (2 cases)"));
1973 assert!(lean.contains("theorem foo_law_fooSpec : ∀ (x : Int), foo x = fooSpec 1 x := by"));
1974 assert!(!lean.contains("theorem foo_eq_fooSpec :"));
1975 }
1976
1977 #[test]
1978 fn transpile_auto_proves_linear_int_canonical_spec_law_in_auto_mode() {
1979 let mut ctx = ctx_from_source(
1980 r#"
1981module SpecGap
1982 intent =
1983 "nontrivial canonical spec law"
1984
1985fn inc(x: Int) -> Int
1986 x + 1
1987
1988fn incSpec(x: Int) -> Int
1989 x + 2 - 1
1990
1991verify inc law incSpec
1992 given x: Int = [0, 1, 2]
1993 inc(x) => incSpec(x)
1994"#,
1995 "spec_gap",
1996 );
1997 let out = transpile(&mut ctx);
1998 let lean = generated_lean_file(&out);
1999
2000 assert!(lean.contains("-- verify law inc.spec incSpec (3 cases)"));
2001 assert!(lean.contains("theorem inc_eq_incSpec : ∀ (x : Int), inc x = incSpec x := by"));
2002 assert!(lean.contains("change (x + 1) = ((x + 2) - 1)"));
2003 assert!(lean.contains("omega"));
2004 assert!(!lean.contains(
2005 "-- universal theorem inc_eq_incSpec omitted: sampled law shape is not auto-proved yet"
2006 ));
2007 assert!(lean.contains("theorem inc_eq_incSpec_checked_domain :"));
2008 }
2009
2010 #[test]
2011 fn transpile_auto_proves_guarded_canonical_spec_law_in_auto_mode() {
2012 let mut ctx = ctx_from_source(
2013 r#"
2014module GuardedSpecGap
2015 intent =
2016 "guarded canonical spec law"
2017
2018fn clampNonNegative(x: Int) -> Int
2019 match x < 0
2020 true -> 0
2021 false -> x
2022
2023fn clampNonNegativeSpec(x: Int) -> Int
2024 match x < 0
2025 true -> 0
2026 false -> x
2027
2028verify clampNonNegative law clampNonNegativeSpec
2029 given x: Int = [-2, -1, 0, 1, 2]
2030 when x >= 0
2031 clampNonNegative(x) => clampNonNegativeSpec(x)
2032"#,
2033 "guarded_spec_gap",
2034 );
2035 let out = transpile(&mut ctx);
2036 let lean = generated_lean_file(&out);
2037
2038 assert!(lean.contains("-- when (x >= 0)"));
2039 assert!(lean.contains(
2040 "theorem clampNonNegative_eq_clampNonNegativeSpec : ∀ (x : Int), x = (-2) ∨ x = (-1) ∨ x = 0 ∨ x = 1 ∨ x = 2 -> (x >= 0) = true -> clampNonNegative x = clampNonNegativeSpec x := by"
2041 ));
2042 assert!(lean.contains("intro x h_x h_when"));
2043 assert!(lean.contains("simpa [clampNonNegative, clampNonNegativeSpec]"));
2044 assert!(!lean.contains(
2045 "-- universal theorem clampNonNegative_eq_clampNonNegativeSpec omitted: sampled law shape is not auto-proved yet"
2046 ));
2047 assert!(!lean.contains("cases h_x"));
2048 }
2049
2050 #[test]
2051 fn transpile_auto_proves_simp_normalized_canonical_spec_law_in_auto_mode() {
2052 let mut ctx = ctx_from_source(
2053 r#"
2054module SpecGapNonlinear
2055 intent =
2056 "nonlinear canonical spec law"
2057
2058fn square(x: Int) -> Int
2059 x * x
2060
2061fn squareSpec(x: Int) -> Int
2062 x * x + 0
2063
2064verify square law squareSpec
2065 given x: Int = [0, 1, 2]
2066 square(x) => squareSpec(x)
2067"#,
2068 "spec_gap_nonlinear",
2069 );
2070 let out = transpile(&mut ctx);
2071 let lean = generated_lean_file(&out);
2072
2073 assert!(lean.contains("-- verify law square.spec squareSpec (3 cases)"));
2074 assert!(
2075 lean.contains(
2076 "theorem square_eq_squareSpec : ∀ (x : Int), square x = squareSpec x := by"
2077 )
2078 );
2079 assert!(lean.contains("simp [square, squareSpec]"));
2080 assert!(!lean.contains(
2081 "-- universal theorem square_eq_squareSpec omitted: sampled law shape is not auto-proved yet"
2082 ));
2083 assert!(lean.contains("theorem square_eq_squareSpec_checked_domain :"));
2084 assert!(lean.contains("theorem square_eq_squareSpec_sample_1 :"));
2085 }
2086
2087 #[test]
2088 fn transpile_auto_proves_reflexive_law_with_rfl() {
2089 let mut ctx = empty_ctx();
2090 ctx.items.push(TopLevel::Verify(VerifyBlock {
2091 fn_name: "idLaw".to_string(),
2092 line: 1,
2093 cases: vec![(
2094 sb(Expr::Literal(Literal::Int(1))),
2095 sb(Expr::Literal(Literal::Int(1))),
2096 )],
2097 case_spans: vec![],
2098 case_givens: vec![],
2099 case_hostile_origins: vec![],
2100 case_hostile_profiles: vec![],
2101 kind: VerifyKind::Law(Box::new(VerifyLaw {
2102 name: "reflexive".to_string(),
2103 givens: vec![VerifyGiven {
2104 name: "x".to_string(),
2105 type_name: "Int".to_string(),
2106 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2107 }],
2108 when: None,
2109 lhs: sb(Expr::Ident("x".to_string())),
2110 rhs: sb(Expr::Ident("x".to_string())),
2111 sample_guards: vec![],
2112 })),
2113 trace: false,
2114 cases_givens: vec![],
2115 }));
2116 let out = transpile(&mut ctx);
2117 let lean = out
2118 .files
2119 .iter()
2120 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2121 .expect("expected generated Lean file");
2122 assert!(lean.contains("theorem idLaw_law_reflexive : ∀ (x : Int), x = x := by"));
2123 assert!(lean.contains(" intro x"));
2124 assert!(lean.contains(" rfl"));
2125 }
2126
2127 #[test]
2128 fn transpile_auto_proves_identity_law_for_int_add_wrapper() {
2129 let mut ctx = empty_ctx_with_verify_law();
2130 ctx.items.push(TopLevel::Verify(VerifyBlock {
2131 fn_name: "add".to_string(),
2132 line: 10,
2133 cases: vec![(
2134 sb(Expr::FnCall(
2135 sbb(Expr::Ident("add".to_string())),
2136 vec![
2137 sb(Expr::Literal(Literal::Int(1))),
2138 sb(Expr::Literal(Literal::Int(0))),
2139 ],
2140 )),
2141 sb(Expr::Literal(Literal::Int(1))),
2142 )],
2143 case_spans: vec![],
2144 case_givens: vec![],
2145 case_hostile_origins: vec![],
2146 case_hostile_profiles: vec![],
2147 kind: VerifyKind::Law(Box::new(VerifyLaw {
2148 name: "identityZero".to_string(),
2149 givens: vec![VerifyGiven {
2150 name: "a".to_string(),
2151 type_name: "Int".to_string(),
2152 domain: VerifyGivenDomain::Explicit(vec![
2153 sb(Expr::Literal(Literal::Int(0))),
2154 sb(Expr::Literal(Literal::Int(1))),
2155 ]),
2156 }],
2157 when: None,
2158 lhs: sb(Expr::FnCall(
2159 sbb(Expr::Ident("add".to_string())),
2160 vec![
2161 sb(Expr::Ident("a".to_string())),
2162 sb(Expr::Literal(Literal::Int(0))),
2163 ],
2164 )),
2165 rhs: sb(Expr::Ident("a".to_string())),
2166 sample_guards: vec![],
2167 })),
2168 trace: false,
2169 cases_givens: vec![],
2170 }));
2171 let out = transpile(&mut ctx);
2172 let lean = out
2173 .files
2174 .iter()
2175 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2176 .expect("expected generated Lean file");
2177 assert!(lean.contains("theorem add_law_identityZero : ∀ (a : Int), add a 0 = a := by"));
2178 assert!(lean.contains(" intro a"));
2179 assert!(lean.contains(" simp [add]"));
2180 }
2181
2182 #[test]
2183 fn transpile_auto_proves_associative_law_for_int_add_wrapper() {
2184 let mut ctx = empty_ctx_with_verify_law();
2185 ctx.items.push(TopLevel::Verify(VerifyBlock {
2186 fn_name: "add".to_string(),
2187 line: 20,
2188 cases: vec![(
2189 sb(Expr::FnCall(
2190 sbb(Expr::Ident("add".to_string())),
2191 vec![
2192 sb(Expr::FnCall(
2193 sbb(Expr::Ident("add".to_string())),
2194 vec![
2195 sb(Expr::Literal(Literal::Int(1))),
2196 sb(Expr::Literal(Literal::Int(2))),
2197 ],
2198 )),
2199 sb(Expr::Literal(Literal::Int(3))),
2200 ],
2201 )),
2202 sb(Expr::FnCall(
2203 sbb(Expr::Ident("add".to_string())),
2204 vec![
2205 sb(Expr::Literal(Literal::Int(1))),
2206 sb(Expr::FnCall(
2207 sbb(Expr::Ident("add".to_string())),
2208 vec![
2209 sb(Expr::Literal(Literal::Int(2))),
2210 sb(Expr::Literal(Literal::Int(3))),
2211 ],
2212 )),
2213 ],
2214 )),
2215 )],
2216 case_spans: vec![],
2217 case_givens: vec![],
2218 case_hostile_origins: vec![],
2219 case_hostile_profiles: vec![],
2220 kind: VerifyKind::Law(Box::new(VerifyLaw {
2221 name: "associative".to_string(),
2222 givens: vec![
2223 VerifyGiven {
2224 name: "a".to_string(),
2225 type_name: "Int".to_string(),
2226 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2227 1,
2228 )))]),
2229 },
2230 VerifyGiven {
2231 name: "b".to_string(),
2232 type_name: "Int".to_string(),
2233 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2234 2,
2235 )))]),
2236 },
2237 VerifyGiven {
2238 name: "c".to_string(),
2239 type_name: "Int".to_string(),
2240 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2241 3,
2242 )))]),
2243 },
2244 ],
2245 when: None,
2246 lhs: sb(Expr::FnCall(
2247 sbb(Expr::Ident("add".to_string())),
2248 vec![
2249 sb(Expr::FnCall(
2250 sbb(Expr::Ident("add".to_string())),
2251 vec![
2252 sb(Expr::Ident("a".to_string())),
2253 sb(Expr::Ident("b".to_string())),
2254 ],
2255 )),
2256 sb(Expr::Ident("c".to_string())),
2257 ],
2258 )),
2259 rhs: sb(Expr::FnCall(
2260 sbb(Expr::Ident("add".to_string())),
2261 vec![
2262 sb(Expr::Ident("a".to_string())),
2263 sb(Expr::FnCall(
2264 sbb(Expr::Ident("add".to_string())),
2265 vec![
2266 sb(Expr::Ident("b".to_string())),
2267 sb(Expr::Ident("c".to_string())),
2268 ],
2269 )),
2270 ],
2271 )),
2272 sample_guards: vec![],
2273 })),
2274 trace: false,
2275 cases_givens: vec![],
2276 }));
2277 let out = transpile(&mut ctx);
2278 let lean = out
2279 .files
2280 .iter()
2281 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2282 .expect("expected generated Lean file");
2283 assert!(lean.contains(
2284 "theorem add_law_associative : ∀ (a : Int) (b : Int) (c : Int), add (add a b) c = add a (add b c) := by"
2285 ));
2286 assert!(lean.contains(" intro a b c"));
2287 assert!(lean.contains(" simp [add, Int.add_assoc]"));
2288 }
2289
2290 #[test]
2291 fn transpile_auto_proves_sub_laws() {
2292 let mut ctx = empty_ctx();
2293 let sub = FnDef {
2294 name: "sub".to_string(),
2295 line: 1,
2296 params: vec![
2297 ("a".to_string(), "Int".to_string()),
2298 ("b".to_string(), "Int".to_string()),
2299 ],
2300 return_type: "Int".to_string(),
2301 effects: vec![],
2302 desc: None,
2303 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2304 BinOp::Sub,
2305 sbb(Expr::Ident("a".to_string())),
2306 sbb(Expr::Ident("b".to_string())),
2307 )))),
2308 resolution: None,
2309 };
2310 ctx.fn_defs.push(sub.clone());
2311 ctx.items.push(TopLevel::FnDef(sub));
2312
2313 ctx.items.push(TopLevel::Verify(VerifyBlock {
2314 fn_name: "sub".to_string(),
2315 line: 10,
2316 cases: vec![(
2317 sb(Expr::FnCall(
2318 sbb(Expr::Ident("sub".to_string())),
2319 vec![
2320 sb(Expr::Literal(Literal::Int(2))),
2321 sb(Expr::Literal(Literal::Int(0))),
2322 ],
2323 )),
2324 sb(Expr::Literal(Literal::Int(2))),
2325 )],
2326 case_spans: vec![],
2327 case_givens: vec![],
2328 case_hostile_origins: vec![],
2329 case_hostile_profiles: vec![],
2330 kind: VerifyKind::Law(Box::new(VerifyLaw {
2331 name: "rightIdentity".to_string(),
2332 givens: vec![VerifyGiven {
2333 name: "a".to_string(),
2334 type_name: "Int".to_string(),
2335 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2336 }],
2337 when: None,
2338 lhs: sb(Expr::FnCall(
2339 sbb(Expr::Ident("sub".to_string())),
2340 vec![
2341 sb(Expr::Ident("a".to_string())),
2342 sb(Expr::Literal(Literal::Int(0))),
2343 ],
2344 )),
2345 rhs: sb(Expr::Ident("a".to_string())),
2346 sample_guards: vec![],
2347 })),
2348 trace: false,
2349 cases_givens: vec![],
2350 }));
2351 ctx.items.push(TopLevel::Verify(VerifyBlock {
2352 fn_name: "sub".to_string(),
2353 line: 20,
2354 cases: vec![(
2355 sb(Expr::FnCall(
2356 sbb(Expr::Ident("sub".to_string())),
2357 vec![
2358 sb(Expr::Literal(Literal::Int(2))),
2359 sb(Expr::Literal(Literal::Int(1))),
2360 ],
2361 )),
2362 sb(Expr::BinOp(
2363 BinOp::Sub,
2364 sbb(Expr::Literal(Literal::Int(0))),
2365 sbb(Expr::FnCall(
2366 sbb(Expr::Ident("sub".to_string())),
2367 vec![
2368 sb(Expr::Literal(Literal::Int(1))),
2369 sb(Expr::Literal(Literal::Int(2))),
2370 ],
2371 )),
2372 )),
2373 )],
2374 case_spans: vec![],
2375 case_givens: vec![],
2376 case_hostile_origins: vec![],
2377 case_hostile_profiles: vec![],
2378 kind: VerifyKind::Law(Box::new(VerifyLaw {
2379 name: "antiCommutative".to_string(),
2380 givens: vec![
2381 VerifyGiven {
2382 name: "a".to_string(),
2383 type_name: "Int".to_string(),
2384 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2385 2,
2386 )))]),
2387 },
2388 VerifyGiven {
2389 name: "b".to_string(),
2390 type_name: "Int".to_string(),
2391 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2392 1,
2393 )))]),
2394 },
2395 ],
2396 when: None,
2397 lhs: sb(Expr::FnCall(
2398 sbb(Expr::Ident("sub".to_string())),
2399 vec![
2400 sb(Expr::Ident("a".to_string())),
2401 sb(Expr::Ident("b".to_string())),
2402 ],
2403 )),
2404 rhs: sb(Expr::BinOp(
2405 BinOp::Sub,
2406 sbb(Expr::Literal(Literal::Int(0))),
2407 sbb(Expr::FnCall(
2408 sbb(Expr::Ident("sub".to_string())),
2409 vec![
2410 sb(Expr::Ident("b".to_string())),
2411 sb(Expr::Ident("a".to_string())),
2412 ],
2413 )),
2414 )),
2415 sample_guards: vec![],
2416 })),
2417 trace: false,
2418 cases_givens: vec![],
2419 }));
2420
2421 let out = transpile(&mut ctx);
2422 let lean = out
2423 .files
2424 .iter()
2425 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2426 .expect("expected generated Lean file");
2427 assert!(lean.contains("theorem sub_law_rightIdentity : ∀ (a : Int), sub a 0 = a := by"));
2428 assert!(lean.contains(" simp [sub]"));
2429 assert!(lean.contains(
2430 "theorem sub_law_antiCommutative : ∀ (a : Int) (b : Int), sub a b = (-sub b a) := by"
2431 ));
2432 assert!(lean.contains(" simpa [sub] using (Int.neg_sub b a).symm"));
2433 }
2434
2435 #[test]
2436 fn transpile_auto_proves_unary_wrapper_equivalence_law() {
2437 let mut ctx = empty_ctx();
2438 let add = FnDef {
2439 name: "add".to_string(),
2440 line: 1,
2441 params: vec![
2442 ("a".to_string(), "Int".to_string()),
2443 ("b".to_string(), "Int".to_string()),
2444 ],
2445 return_type: "Int".to_string(),
2446 effects: vec![],
2447 desc: None,
2448 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2449 BinOp::Add,
2450 sbb(Expr::Ident("a".to_string())),
2451 sbb(Expr::Ident("b".to_string())),
2452 )))),
2453 resolution: None,
2454 };
2455 let add_one = FnDef {
2456 name: "addOne".to_string(),
2457 line: 2,
2458 params: vec![("n".to_string(), "Int".to_string())],
2459 return_type: "Int".to_string(),
2460 effects: vec![],
2461 desc: None,
2462 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2463 BinOp::Add,
2464 sbb(Expr::Ident("n".to_string())),
2465 sbb(Expr::Literal(Literal::Int(1))),
2466 )))),
2467 resolution: None,
2468 };
2469 ctx.fn_defs.push(add.clone());
2470 ctx.fn_defs.push(add_one.clone());
2471 ctx.items.push(TopLevel::FnDef(add));
2472 ctx.items.push(TopLevel::FnDef(add_one));
2473 ctx.items.push(TopLevel::Verify(VerifyBlock {
2474 fn_name: "addOne".to_string(),
2475 line: 3,
2476 cases: vec![(
2477 sb(Expr::FnCall(
2478 sbb(Expr::Ident("addOne".to_string())),
2479 vec![sb(Expr::Literal(Literal::Int(2)))],
2480 )),
2481 sb(Expr::FnCall(
2482 sbb(Expr::Ident("add".to_string())),
2483 vec![
2484 sb(Expr::Literal(Literal::Int(2))),
2485 sb(Expr::Literal(Literal::Int(1))),
2486 ],
2487 )),
2488 )],
2489 case_spans: vec![],
2490 case_givens: vec![],
2491 case_hostile_origins: vec![],
2492 case_hostile_profiles: vec![],
2493 kind: VerifyKind::Law(Box::new(VerifyLaw {
2494 name: "identityViaAdd".to_string(),
2495 givens: vec![VerifyGiven {
2496 name: "n".to_string(),
2497 type_name: "Int".to_string(),
2498 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2499 }],
2500 when: None,
2501 lhs: sb(Expr::FnCall(
2502 sbb(Expr::Ident("addOne".to_string())),
2503 vec![sb(Expr::Ident("n".to_string()))],
2504 )),
2505 rhs: sb(Expr::FnCall(
2506 sbb(Expr::Ident("add".to_string())),
2507 vec![
2508 sb(Expr::Ident("n".to_string())),
2509 sb(Expr::Literal(Literal::Int(1))),
2510 ],
2511 )),
2512 sample_guards: vec![],
2513 })),
2514 trace: false,
2515 cases_givens: vec![],
2516 }));
2517 let out = transpile(&mut ctx);
2518 let lean = out
2519 .files
2520 .iter()
2521 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2522 .expect("expected generated Lean file");
2523 assert!(
2524 lean.contains(
2525 "theorem addOne_law_identityViaAdd : ∀ (n : Int), addOne n = add n 1 := by"
2526 )
2527 );
2528 assert!(lean.contains(" simp [addOne, add]"));
2529 }
2530
2531 #[test]
2532 fn transpile_auto_proves_direct_map_set_laws() {
2533 let mut ctx = empty_ctx();
2534
2535 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2536 sb(Expr::FnCall(
2537 sbb(Expr::Attr(
2538 sbb(Expr::Ident("Map".to_string())),
2539 "set".to_string(),
2540 )),
2541 vec![m, k, v],
2542 ))
2543 };
2544 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2545 sb(Expr::FnCall(
2546 sbb(Expr::Attr(
2547 sbb(Expr::Ident("Map".to_string())),
2548 "has".to_string(),
2549 )),
2550 vec![m, k],
2551 ))
2552 };
2553 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2554 sb(Expr::FnCall(
2555 sbb(Expr::Attr(
2556 sbb(Expr::Ident("Map".to_string())),
2557 "get".to_string(),
2558 )),
2559 vec![m, k],
2560 ))
2561 };
2562 let some = |v: Spanned<Expr>| {
2563 sb(Expr::FnCall(
2564 sbb(Expr::Attr(
2565 sbb(Expr::Ident("Option".to_string())),
2566 "Some".to_string(),
2567 )),
2568 vec![v],
2569 ))
2570 };
2571 let map_empty = || {
2572 sb(Expr::FnCall(
2573 sbb(Expr::Attr(
2574 sbb(Expr::Ident("Map".to_string())),
2575 "empty".to_string(),
2576 )),
2577 vec![],
2578 ))
2579 };
2580
2581 ctx.items.push(TopLevel::Verify(VerifyBlock {
2582 fn_name: "map".to_string(),
2583 line: 1,
2584 cases: vec![(
2585 map_has(
2586 map_set(
2587 sb(Expr::Ident("m".to_string())),
2588 sb(Expr::Ident("k".to_string())),
2589 sb(Expr::Ident("v".to_string())),
2590 ),
2591 sb(Expr::Ident("k".to_string())),
2592 ),
2593 sb(Expr::Literal(Literal::Bool(true))),
2594 )],
2595 case_spans: vec![],
2596 case_givens: vec![],
2597 case_hostile_origins: vec![],
2598 case_hostile_profiles: vec![],
2599 kind: VerifyKind::Law(Box::new(VerifyLaw {
2600 name: "setHasKey".to_string(),
2601 givens: vec![
2602 VerifyGiven {
2603 name: "m".to_string(),
2604 type_name: "Map<String, Int>".to_string(),
2605 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2606 },
2607 VerifyGiven {
2608 name: "k".to_string(),
2609 type_name: "String".to_string(),
2610 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2611 "a".to_string(),
2612 )))]),
2613 },
2614 VerifyGiven {
2615 name: "v".to_string(),
2616 type_name: "Int".to_string(),
2617 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2618 1,
2619 )))]),
2620 },
2621 ],
2622 when: None,
2623 lhs: map_has(
2624 map_set(
2625 sb(Expr::Ident("m".to_string())),
2626 sb(Expr::Ident("k".to_string())),
2627 sb(Expr::Ident("v".to_string())),
2628 ),
2629 sb(Expr::Ident("k".to_string())),
2630 ),
2631 rhs: sb(Expr::Literal(Literal::Bool(true))),
2632 sample_guards: vec![],
2633 })),
2634 trace: false,
2635 cases_givens: vec![],
2636 }));
2637
2638 ctx.items.push(TopLevel::Verify(VerifyBlock {
2639 fn_name: "map".to_string(),
2640 line: 2,
2641 cases: vec![(
2642 map_get(
2643 map_set(
2644 sb(Expr::Ident("m".to_string())),
2645 sb(Expr::Ident("k".to_string())),
2646 sb(Expr::Ident("v".to_string())),
2647 ),
2648 sb(Expr::Ident("k".to_string())),
2649 ),
2650 some(sb(Expr::Ident("v".to_string()))),
2651 )],
2652 case_spans: vec![],
2653 case_givens: vec![],
2654 case_hostile_origins: vec![],
2655 case_hostile_profiles: vec![],
2656 kind: VerifyKind::Law(Box::new(VerifyLaw {
2657 name: "setGetKey".to_string(),
2658 givens: vec![
2659 VerifyGiven {
2660 name: "m".to_string(),
2661 type_name: "Map<String, Int>".to_string(),
2662 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2663 },
2664 VerifyGiven {
2665 name: "k".to_string(),
2666 type_name: "String".to_string(),
2667 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2668 "a".to_string(),
2669 )))]),
2670 },
2671 VerifyGiven {
2672 name: "v".to_string(),
2673 type_name: "Int".to_string(),
2674 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2675 1,
2676 )))]),
2677 },
2678 ],
2679 when: None,
2680 lhs: map_get(
2681 map_set(
2682 sb(Expr::Ident("m".to_string())),
2683 sb(Expr::Ident("k".to_string())),
2684 sb(Expr::Ident("v".to_string())),
2685 ),
2686 sb(Expr::Ident("k".to_string())),
2687 ),
2688 rhs: some(sb(Expr::Ident("v".to_string()))),
2689 sample_guards: vec![],
2690 })),
2691 trace: false,
2692 cases_givens: vec![],
2693 }));
2694
2695 let out = transpile(&mut ctx);
2696 let lean = out
2697 .files
2698 .iter()
2699 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2700 .expect("expected generated Lean file");
2701 assert!(lean.contains("simpa using AverMap.has_set_self m k v"));
2702 assert!(lean.contains("simpa using AverMap.get_set_self m k v"));
2703 }
2704
2705 #[test]
2706 fn transpile_auto_proves_direct_recursive_sum_law_by_structural_induction() {
2707 let mut ctx = ctx_from_source(
2708 r#"
2709module Mirror
2710 intent =
2711 "direct recursive sum induction probe"
2712
2713type Tree
2714 Leaf(Int)
2715 Node(Tree, Tree)
2716
2717fn mirror(t: Tree) -> Tree
2718 match t
2719 Tree.Leaf(v) -> Tree.Leaf(v)
2720 Tree.Node(left, right) -> Tree.Node(mirror(right), mirror(left))
2721
2722verify mirror law involutive
2723 given t: Tree = [Tree.Leaf(1), Tree.Node(Tree.Leaf(1), Tree.Leaf(2))]
2724 mirror(mirror(t)) => t
2725"#,
2726 "mirror",
2727 );
2728 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
2729 let lean = generated_lean_file(&out);
2730
2731 assert!(
2732 lean.contains(
2733 "theorem mirror_law_involutive : ∀ (t : Tree), mirror (mirror t) = t := by"
2734 )
2735 );
2736 assert!(lean.contains(" induction t with"));
2737 assert!(lean.contains(" | leaf f0 => simp [mirror]"));
2738 assert!(lean.contains(" | node f0 f1 ih0 ih1 => simp_all [mirror]"));
2739 assert!(!lean.contains(
2740 "-- universal theorem mirror_law_involutive omitted: sampled law shape is not auto-proved yet"
2741 ));
2742 }
2743
2744 #[test]
2745 fn transpile_auto_proves_map_update_laws() {
2746 let mut ctx = empty_ctx();
2747
2748 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2749 sb(Expr::FnCall(
2750 sbb(Expr::Attr(
2751 sbb(Expr::Ident("Map".to_string())),
2752 "get".to_string(),
2753 )),
2754 vec![m, k],
2755 ))
2756 };
2757 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2758 sb(Expr::FnCall(
2759 sbb(Expr::Attr(
2760 sbb(Expr::Ident("Map".to_string())),
2761 "set".to_string(),
2762 )),
2763 vec![m, k, v],
2764 ))
2765 };
2766 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2767 sb(Expr::FnCall(
2768 sbb(Expr::Attr(
2769 sbb(Expr::Ident("Map".to_string())),
2770 "has".to_string(),
2771 )),
2772 vec![m, k],
2773 ))
2774 };
2775 let option_some = |v: Spanned<Expr>| {
2776 sb(Expr::FnCall(
2777 sbb(Expr::Attr(
2778 sbb(Expr::Ident("Option".to_string())),
2779 "Some".to_string(),
2780 )),
2781 vec![v],
2782 ))
2783 };
2784 let option_with_default = |opt: Spanned<Expr>, def: Spanned<Expr>| {
2785 sb(Expr::FnCall(
2786 sbb(Expr::Attr(
2787 sbb(Expr::Ident("Option".to_string())),
2788 "withDefault".to_string(),
2789 )),
2790 vec![opt, def],
2791 ))
2792 };
2793 let map_empty = || {
2794 sb(Expr::FnCall(
2795 sbb(Expr::Attr(
2796 sbb(Expr::Ident("Map".to_string())),
2797 "empty".to_string(),
2798 )),
2799 vec![],
2800 ))
2801 };
2802
2803 let add_one = FnDef {
2804 name: "addOne".to_string(),
2805 line: 1,
2806 params: vec![("n".to_string(), "Int".to_string())],
2807 return_type: "Int".to_string(),
2808 effects: vec![],
2809 desc: None,
2810 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2811 BinOp::Add,
2812 sbb(Expr::Ident("n".to_string())),
2813 sbb(Expr::Literal(Literal::Int(1))),
2814 )))),
2815 resolution: None,
2816 };
2817 ctx.fn_defs.push(add_one.clone());
2818 ctx.items.push(TopLevel::FnDef(add_one));
2819
2820 let inc_count = FnDef {
2821 name: "incCount".to_string(),
2822 line: 2,
2823 params: vec![
2824 ("counts".to_string(), "Map<String, Int>".to_string()),
2825 ("word".to_string(), "String".to_string()),
2826 ],
2827 return_type: "Map<String, Int>".to_string(),
2828 effects: vec![],
2829 desc: None,
2830 body: Rc::new(FnBody::Block(vec![
2831 Stmt::Binding(
2832 "current".to_string(),
2833 None,
2834 map_get(
2835 sb(Expr::Ident("counts".to_string())),
2836 sb(Expr::Ident("word".to_string())),
2837 ),
2838 ),
2839 Stmt::Expr(sb(Expr::Match {
2840 subject: sbb(Expr::Ident("current".to_string())),
2841 arms: vec![
2842 MatchArm {
2843 pattern: Pattern::Constructor(
2844 "Option.Some".to_string(),
2845 vec!["n".to_string()],
2846 ),
2847 body: Box::new(map_set(
2848 sb(Expr::Ident("counts".to_string())),
2849 sb(Expr::Ident("word".to_string())),
2850 sb(Expr::BinOp(
2851 BinOp::Add,
2852 sbb(Expr::Ident("n".to_string())),
2853 sbb(Expr::Literal(Literal::Int(1))),
2854 )),
2855 )),
2856 binding_slots: std::sync::OnceLock::new(),
2857 },
2858 MatchArm {
2859 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
2860 body: Box::new(map_set(
2861 sb(Expr::Ident("counts".to_string())),
2862 sb(Expr::Ident("word".to_string())),
2863 sb(Expr::Literal(Literal::Int(1))),
2864 )),
2865 binding_slots: std::sync::OnceLock::new(),
2866 },
2867 ],
2868 })),
2869 ])),
2870 resolution: None,
2871 };
2872 ctx.fn_defs.push(inc_count.clone());
2873 ctx.items.push(TopLevel::FnDef(inc_count));
2874
2875 ctx.items.push(TopLevel::Verify(VerifyBlock {
2876 fn_name: "incCount".to_string(),
2877 line: 10,
2878 cases: vec![(
2879 map_has(
2880 sb(Expr::FnCall(
2881 sbb(Expr::Ident("incCount".to_string())),
2882 vec![
2883 sb(Expr::Ident("counts".to_string())),
2884 sb(Expr::Ident("word".to_string())),
2885 ],
2886 )),
2887 sb(Expr::Ident("word".to_string())),
2888 ),
2889 sb(Expr::Literal(Literal::Bool(true))),
2890 )],
2891 case_spans: vec![],
2892 case_givens: vec![],
2893 case_hostile_origins: vec![],
2894 case_hostile_profiles: vec![],
2895 kind: VerifyKind::Law(Box::new(VerifyLaw {
2896 name: "keyPresent".to_string(),
2897 givens: vec![
2898 VerifyGiven {
2899 name: "counts".to_string(),
2900 type_name: "Map<String, Int>".to_string(),
2901 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2902 },
2903 VerifyGiven {
2904 name: "word".to_string(),
2905 type_name: "String".to_string(),
2906 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2907 "a".to_string(),
2908 )))]),
2909 },
2910 ],
2911 when: None,
2912 lhs: map_has(
2913 sb(Expr::FnCall(
2914 sbb(Expr::Ident("incCount".to_string())),
2915 vec![
2916 sb(Expr::Ident("counts".to_string())),
2917 sb(Expr::Ident("word".to_string())),
2918 ],
2919 )),
2920 sb(Expr::Ident("word".to_string())),
2921 ),
2922 rhs: sb(Expr::Literal(Literal::Bool(true))),
2923 sample_guards: vec![],
2924 })),
2925 trace: false,
2926 cases_givens: vec![],
2927 }));
2928
2929 ctx.items.push(TopLevel::Verify(VerifyBlock {
2930 fn_name: "incCount".to_string(),
2931 line: 20,
2932 cases: vec![(
2933 map_get(
2934 sb(Expr::FnCall(
2935 sbb(Expr::Ident("incCount".to_string())),
2936 vec![
2937 sb(Expr::Ident("counts".to_string())),
2938 sb(Expr::Literal(Literal::Str("a".to_string()))),
2939 ],
2940 )),
2941 sb(Expr::Literal(Literal::Str("a".to_string()))),
2942 ),
2943 option_some(sb(Expr::FnCall(
2944 sbb(Expr::Ident("addOne".to_string())),
2945 vec![option_with_default(
2946 map_get(
2947 sb(Expr::Ident("counts".to_string())),
2948 sb(Expr::Literal(Literal::Str("a".to_string()))),
2949 ),
2950 sb(Expr::Literal(Literal::Int(0))),
2951 )],
2952 ))),
2953 )],
2954 case_spans: vec![],
2955 case_givens: vec![],
2956 case_hostile_origins: vec![],
2957 case_hostile_profiles: vec![],
2958 kind: VerifyKind::Law(Box::new(VerifyLaw {
2959 name: "existingKeyIncrements".to_string(),
2960 givens: vec![VerifyGiven {
2961 name: "counts".to_string(),
2962 type_name: "Map<String, Int>".to_string(),
2963 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2964 }],
2965 when: None,
2966 lhs: map_get(
2967 sb(Expr::FnCall(
2968 sbb(Expr::Ident("incCount".to_string())),
2969 vec![
2970 sb(Expr::Ident("counts".to_string())),
2971 sb(Expr::Literal(Literal::Str("a".to_string()))),
2972 ],
2973 )),
2974 sb(Expr::Literal(Literal::Str("a".to_string()))),
2975 ),
2976 rhs: option_some(sb(Expr::FnCall(
2977 sbb(Expr::Ident("addOne".to_string())),
2978 vec![option_with_default(
2979 map_get(
2980 sb(Expr::Ident("counts".to_string())),
2981 sb(Expr::Literal(Literal::Str("a".to_string()))),
2982 ),
2983 sb(Expr::Literal(Literal::Int(0))),
2984 )],
2985 ))),
2986 sample_guards: vec![],
2987 })),
2988 trace: false,
2989 cases_givens: vec![],
2990 }));
2991
2992 let out = transpile(&mut ctx);
2993 let lean = out
2994 .files
2995 .iter()
2996 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2997 .expect("expected generated Lean file");
2998 assert!(
2999 lean.contains("cases h : AverMap.get counts word <;> simp [AverMap.has_set_self]"),
3000 "expected keyPresent auto-proof with has_set_self"
3001 );
3002 assert!(
3003 lean.contains("cases h : AverMap.get counts \"a\" <;> simp [AverMap.get_set_self, addOne, incCount]"),
3004 "expected existingKeyIncrements auto-proof with get_set_self"
3005 );
3006 }
3007
3008 #[test]
3009 fn transpile_parenthesizes_negative_int_call_args_in_law_samples() {
3010 let mut ctx = empty_ctx();
3011 let add = FnDef {
3012 name: "add".to_string(),
3013 line: 1,
3014 params: vec![
3015 ("a".to_string(), "Int".to_string()),
3016 ("b".to_string(), "Int".to_string()),
3017 ],
3018 return_type: "Int".to_string(),
3019 effects: vec![],
3020 desc: None,
3021 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
3022 BinOp::Add,
3023 sbb(Expr::Ident("a".to_string())),
3024 sbb(Expr::Ident("b".to_string())),
3025 )))),
3026 resolution: None,
3027 };
3028 ctx.fn_defs.push(add.clone());
3029 ctx.items.push(TopLevel::FnDef(add));
3030 ctx.items.push(TopLevel::Verify(VerifyBlock {
3031 fn_name: "add".to_string(),
3032 line: 1,
3033 cases: vec![(
3034 sb(Expr::FnCall(
3035 sbb(Expr::Ident("add".to_string())),
3036 vec![
3037 sb(Expr::Literal(Literal::Int(-2))),
3038 sb(Expr::Literal(Literal::Int(-1))),
3039 ],
3040 )),
3041 sb(Expr::FnCall(
3042 sbb(Expr::Ident("add".to_string())),
3043 vec![
3044 sb(Expr::Literal(Literal::Int(-1))),
3045 sb(Expr::Literal(Literal::Int(-2))),
3046 ],
3047 )),
3048 )],
3049 case_spans: vec![],
3050 case_givens: vec![],
3051 case_hostile_origins: vec![],
3052 case_hostile_profiles: vec![],
3053 kind: VerifyKind::Law(Box::new(VerifyLaw {
3054 name: "commutative".to_string(),
3055 givens: vec![
3056 VerifyGiven {
3057 name: "a".to_string(),
3058 type_name: "Int".to_string(),
3059 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3060 -2,
3061 )))]),
3062 },
3063 VerifyGiven {
3064 name: "b".to_string(),
3065 type_name: "Int".to_string(),
3066 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3067 -1,
3068 )))]),
3069 },
3070 ],
3071 when: None,
3072 lhs: sb(Expr::FnCall(
3073 sbb(Expr::Ident("add".to_string())),
3074 vec![
3075 sb(Expr::Ident("a".to_string())),
3076 sb(Expr::Ident("b".to_string())),
3077 ],
3078 )),
3079 rhs: sb(Expr::FnCall(
3080 sbb(Expr::Ident("add".to_string())),
3081 vec![
3082 sb(Expr::Ident("b".to_string())),
3083 sb(Expr::Ident("a".to_string())),
3084 ],
3085 )),
3086 sample_guards: vec![],
3087 })),
3088 trace: false,
3089 cases_givens: vec![],
3090 }));
3091
3092 let out = transpile(&mut ctx);
3093 let lean = out
3094 .files
3095 .iter()
3096 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3097 .expect("expected generated Lean file");
3098 assert!(lean.contains(
3099 "theorem add_law_commutative_sample_1 : add (-2) (-1) = add (-1) (-2) := by native_decide"
3100 ));
3101 }
3102
3103 #[test]
3104 fn verify_law_numbering_is_scoped_per_law_name() {
3105 let mut ctx = empty_ctx();
3106 let f = FnDef {
3107 name: "f".to_string(),
3108 line: 1,
3109 params: vec![("x".to_string(), "Int".to_string())],
3110 return_type: "Int".to_string(),
3111 effects: vec![],
3112 desc: None,
3113 body: Rc::new(FnBody::from_expr(sb(Expr::Ident("x".to_string())))),
3114 resolution: None,
3115 };
3116 ctx.fn_defs.push(f.clone());
3117 ctx.items.push(TopLevel::FnDef(f));
3118 ctx.items.push(TopLevel::Verify(VerifyBlock {
3119 fn_name: "f".to_string(),
3120 line: 1,
3121 cases: vec![(
3122 sb(Expr::Literal(Literal::Int(1))),
3123 sb(Expr::Literal(Literal::Int(1))),
3124 )],
3125 case_spans: vec![],
3126 case_givens: vec![],
3127 case_hostile_origins: vec![],
3128 case_hostile_profiles: vec![],
3129 kind: VerifyKind::Cases,
3130 trace: false,
3131 cases_givens: vec![],
3132 }));
3133 ctx.items.push(TopLevel::Verify(VerifyBlock {
3134 fn_name: "f".to_string(),
3135 line: 2,
3136 cases: vec![(
3137 sb(Expr::Literal(Literal::Int(2))),
3138 sb(Expr::Literal(Literal::Int(2))),
3139 )],
3140 case_spans: vec![],
3141 case_givens: vec![],
3142 case_hostile_origins: vec![],
3143 case_hostile_profiles: vec![],
3144 kind: VerifyKind::Law(Box::new(VerifyLaw {
3145 name: "identity".to_string(),
3146 givens: vec![VerifyGiven {
3147 name: "x".to_string(),
3148 type_name: "Int".to_string(),
3149 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
3150 }],
3151 when: None,
3152 lhs: sb(Expr::Ident("x".to_string())),
3153 rhs: sb(Expr::Ident("x".to_string())),
3154 sample_guards: vec![],
3155 })),
3156 trace: false,
3157 cases_givens: vec![],
3158 }));
3159 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
3160 let lean = out
3161 .files
3162 .iter()
3163 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3164 .expect("expected generated Lean file");
3165 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
3166 assert!(lean.contains("theorem f_law_identity : ∀ (x : Int), x = x := by"));
3167 assert!(lean.contains("theorem f_law_identity_sample_1 : 2 = 2 := by"));
3168 assert!(!lean.contains("theorem f_law_identity_sample_2 : 2 = 2 := by"));
3169 }
3170
3171 #[test]
3172 fn proof_mode_accepts_single_int_countdown_recursion() {
3173 let mut ctx = empty_ctx();
3174 let down = FnDef {
3175 name: "down".to_string(),
3176 line: 1,
3177 params: vec![("n".to_string(), "Int".to_string())],
3178 return_type: "Int".to_string(),
3179 effects: vec![],
3180 desc: None,
3181 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3182 subject: sbb(Expr::Ident("n".to_string())),
3183 arms: vec![
3184 MatchArm {
3185 pattern: Pattern::Literal(Literal::Int(0)),
3186 body: sbb(Expr::Literal(Literal::Int(0))),
3187 binding_slots: std::sync::OnceLock::new(),
3188 },
3189 MatchArm {
3190 pattern: Pattern::Wildcard,
3191 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3192 "down".to_string(),
3193 vec![sb(Expr::BinOp(
3194 BinOp::Sub,
3195 sbb(Expr::Ident("n".to_string())),
3196 sbb(Expr::Literal(Literal::Int(1))),
3197 ))],
3198 )))),
3199 binding_slots: std::sync::OnceLock::new(),
3200 },
3201 ],
3202 }))),
3203 resolution: None,
3204 };
3205 ctx.items.push(TopLevel::FnDef(down.clone()));
3206 ctx.fn_defs.push(down);
3207
3208 ctx.refresh_facts();
3209 let issues = proof_mode_issues(&ctx);
3210 assert!(
3211 issues.is_empty(),
3212 "expected Int countdown recursion to be accepted, got: {:?}",
3213 issues
3214 );
3215
3216 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3217 let lean = out
3218 .files
3219 .iter()
3220 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3221 .expect("expected generated Lean file");
3222 assert!(lean.contains("def down__fuel"));
3223 assert!(lean.contains("def down (n : Int) : Int :="));
3224 assert!(lean.contains("down__fuel ((Int.natAbs n) + 1) n"));
3225 }
3226
3227 #[test]
3228 fn proof_mode_accepts_single_int_countdown_on_nonfirst_param() {
3229 let mut ctx = empty_ctx();
3230 let repeat_like = FnDef {
3231 name: "repeatLike".to_string(),
3232 line: 1,
3233 params: vec![
3234 ("char".to_string(), "String".to_string()),
3235 ("n".to_string(), "Int".to_string()),
3236 ],
3237 return_type: "List<String>".to_string(),
3238 effects: vec![],
3239 desc: None,
3240 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3241 subject: sbb(Expr::BinOp(
3242 BinOp::Lte,
3243 sbb(Expr::Ident("n".to_string())),
3244 sbb(Expr::Literal(Literal::Int(0))),
3245 )),
3246 arms: vec![
3247 MatchArm {
3248 pattern: Pattern::Literal(Literal::Bool(true)),
3249 body: sbb(Expr::List(vec![])),
3250 binding_slots: std::sync::OnceLock::new(),
3251 },
3252 MatchArm {
3253 pattern: Pattern::Literal(Literal::Bool(false)),
3254 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3255 "repeatLike".to_string(),
3256 vec![
3257 sb(Expr::Ident("char".to_string())),
3258 sb(Expr::BinOp(
3259 BinOp::Sub,
3260 sbb(Expr::Ident("n".to_string())),
3261 sbb(Expr::Literal(Literal::Int(1))),
3262 )),
3263 ],
3264 )))),
3265 binding_slots: std::sync::OnceLock::new(),
3266 },
3267 ],
3268 }))),
3269 resolution: None,
3270 };
3271 ctx.items.push(TopLevel::FnDef(repeat_like.clone()));
3272 ctx.fn_defs.push(repeat_like);
3273
3274 ctx.refresh_facts();
3275 let issues = proof_mode_issues(&ctx);
3276 assert!(
3277 issues.is_empty(),
3278 "expected non-first Int countdown recursion to be accepted, got: {:?}",
3279 issues
3280 );
3281
3282 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3283 let lean = out
3284 .files
3285 .iter()
3286 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3287 .expect("expected generated Lean file");
3288 assert!(lean.contains("def repeatLike__fuel"));
3289 assert!(lean.contains("def repeatLike (char : String) (n : Int) : List String :="));
3290 assert!(lean.contains("repeatLike__fuel ((Int.natAbs n) + 1) char n"));
3291 }
3292
3293 #[test]
3294 fn proof_mode_accepts_negative_guarded_int_ascent() {
3295 let mut ctx = empty_ctx();
3296 let normalize = FnDef {
3297 name: "normalize".to_string(),
3298 line: 1,
3299 params: vec![("angle".to_string(), "Int".to_string())],
3300 return_type: "Int".to_string(),
3301 effects: vec![],
3302 desc: None,
3303 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3304 subject: sbb(Expr::BinOp(
3305 BinOp::Lt,
3306 sbb(Expr::Ident("angle".to_string())),
3307 sbb(Expr::Literal(Literal::Int(0))),
3308 )),
3309 arms: vec![
3310 MatchArm {
3311 pattern: Pattern::Literal(Literal::Bool(true)),
3312 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3313 "normalize".to_string(),
3314 vec![sb(Expr::BinOp(
3315 BinOp::Add,
3316 sbb(Expr::Ident("angle".to_string())),
3317 sbb(Expr::Literal(Literal::Int(360))),
3318 ))],
3319 )))),
3320 binding_slots: std::sync::OnceLock::new(),
3321 },
3322 MatchArm {
3323 pattern: Pattern::Literal(Literal::Bool(false)),
3324 body: sbb(Expr::Ident("angle".to_string())),
3325 binding_slots: std::sync::OnceLock::new(),
3326 },
3327 ],
3328 }))),
3329 resolution: None,
3330 };
3331 ctx.items.push(TopLevel::FnDef(normalize.clone()));
3332 ctx.fn_defs.push(normalize);
3333
3334 ctx.refresh_facts();
3335 let issues = proof_mode_issues(&ctx);
3336 assert!(
3337 issues.is_empty(),
3338 "expected negative-guarded Int ascent recursion to be accepted, got: {:?}",
3339 issues
3340 );
3341
3342 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3343 let lean = out
3344 .files
3345 .iter()
3346 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3347 .expect("expected generated Lean file");
3348 assert!(lean.contains("def normalize__fuel"));
3349 assert!(lean.contains("normalize__fuel ((Int.natAbs angle) + 1) angle"));
3350 }
3351
3352 #[test]
3353 fn proof_mode_accepts_single_list_structural_recursion() {
3354 let mut ctx = empty_ctx();
3355 let len = FnDef {
3356 name: "len".to_string(),
3357 line: 1,
3358 params: vec![("xs".to_string(), "List<Int>".to_string())],
3359 return_type: "Int".to_string(),
3360 effects: vec![],
3361 desc: None,
3362 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3363 subject: sbb(Expr::Ident("xs".to_string())),
3364 arms: vec![
3365 MatchArm {
3366 pattern: Pattern::EmptyList,
3367 body: sbb(Expr::Literal(Literal::Int(0))),
3368 binding_slots: std::sync::OnceLock::new(),
3369 },
3370 MatchArm {
3371 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3372 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3373 "len".to_string(),
3374 vec![sb(Expr::Ident("t".to_string()))],
3375 )))),
3376 binding_slots: std::sync::OnceLock::new(),
3377 },
3378 ],
3379 }))),
3380 resolution: None,
3381 };
3382 ctx.items.push(TopLevel::FnDef(len.clone()));
3383 ctx.fn_defs.push(len);
3384
3385 ctx.refresh_facts();
3386 let issues = proof_mode_issues(&ctx);
3387 assert!(
3388 issues.is_empty(),
3389 "expected List structural recursion to be accepted, got: {:?}",
3390 issues
3391 );
3392 }
3393
3394 #[test]
3395 fn proof_mode_accepts_single_list_structural_recursion_on_nonfirst_param() {
3396 let mut ctx = empty_ctx();
3397 let len_from = FnDef {
3398 name: "lenFrom".to_string(),
3399 line: 1,
3400 params: vec![
3401 ("count".to_string(), "Int".to_string()),
3402 ("xs".to_string(), "List<Int>".to_string()),
3403 ],
3404 return_type: "Int".to_string(),
3405 effects: vec![],
3406 desc: None,
3407 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3408 subject: sbb(Expr::Ident("xs".to_string())),
3409 arms: vec![
3410 MatchArm {
3411 pattern: Pattern::EmptyList,
3412 body: sbb(Expr::Ident("count".to_string())),
3413 binding_slots: std::sync::OnceLock::new(),
3414 },
3415 MatchArm {
3416 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3417 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3418 "lenFrom".to_string(),
3419 vec![
3420 sb(Expr::BinOp(
3421 BinOp::Add,
3422 sbb(Expr::Ident("count".to_string())),
3423 sbb(Expr::Literal(Literal::Int(1))),
3424 )),
3425 sb(Expr::Ident("t".to_string())),
3426 ],
3427 )))),
3428 binding_slots: std::sync::OnceLock::new(),
3429 },
3430 ],
3431 }))),
3432 resolution: None,
3433 };
3434 ctx.items.push(TopLevel::FnDef(len_from.clone()));
3435 ctx.fn_defs.push(len_from);
3436
3437 ctx.refresh_facts();
3438 let issues = proof_mode_issues(&ctx);
3439 assert!(
3440 issues.is_empty(),
3441 "expected non-first List structural recursion to be accepted, got: {:?}",
3442 issues
3443 );
3444
3445 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3446 let lean = generated_lean_file(&out);
3447 assert!(lean.contains("termination_by xs.length"));
3448 assert!(!lean.contains("partial def lenFrom"));
3449 }
3450
3451 #[test]
3452 fn proof_mode_accepts_single_string_pos_advance_recursion() {
3453 let mut ctx = empty_ctx();
3454 let skip_ws = FnDef {
3455 name: "skipWs".to_string(),
3456 line: 1,
3457 params: vec![
3458 ("s".to_string(), "String".to_string()),
3459 ("pos".to_string(), "Int".to_string()),
3460 ],
3461 return_type: "Int".to_string(),
3462 effects: vec![],
3463 desc: None,
3464 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3465 subject: sbb(Expr::FnCall(
3466 sbb(Expr::Attr(
3467 sbb(Expr::Ident("String".to_string())),
3468 "charAt".to_string(),
3469 )),
3470 vec![
3471 sb(Expr::Ident("s".to_string())),
3472 sb(Expr::Ident("pos".to_string())),
3473 ],
3474 )),
3475 arms: vec![
3476 MatchArm {
3477 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
3478 body: sbb(Expr::Ident("pos".to_string())),
3479 binding_slots: std::sync::OnceLock::new(),
3480 },
3481 MatchArm {
3482 pattern: Pattern::Wildcard,
3483 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3484 "skipWs".to_string(),
3485 vec![
3486 sb(Expr::Ident("s".to_string())),
3487 sb(Expr::BinOp(
3488 BinOp::Add,
3489 sbb(Expr::Ident("pos".to_string())),
3490 sbb(Expr::Literal(Literal::Int(1))),
3491 )),
3492 ],
3493 )))),
3494 binding_slots: std::sync::OnceLock::new(),
3495 },
3496 ],
3497 }))),
3498 resolution: None,
3499 };
3500 ctx.items.push(TopLevel::FnDef(skip_ws.clone()));
3501 ctx.fn_defs.push(skip_ws);
3502
3503 ctx.refresh_facts();
3504 let issues = proof_mode_issues(&ctx);
3505 assert!(
3506 issues.is_empty(),
3507 "expected String+pos recursion to be accepted, got: {:?}",
3508 issues
3509 );
3510
3511 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3512 let lean = generated_lean_file(&out);
3513 assert!(lean.contains("def skipWs__fuel"));
3514 assert!(!lean.contains("partial def skipWs"));
3515 }
3516
3517 #[test]
3518 fn proof_mode_accepts_mutual_int_countdown_recursion() {
3519 let mut ctx = empty_ctx();
3520 let even = FnDef {
3521 name: "even".to_string(),
3522 line: 1,
3523 params: vec![("n".to_string(), "Int".to_string())],
3524 return_type: "Bool".to_string(),
3525 effects: vec![],
3526 desc: None,
3527 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3528 subject: sbb(Expr::Ident("n".to_string())),
3529 arms: vec![
3530 MatchArm {
3531 pattern: Pattern::Literal(Literal::Int(0)),
3532 body: sbb(Expr::Literal(Literal::Bool(true))),
3533 binding_slots: std::sync::OnceLock::new(),
3534 },
3535 MatchArm {
3536 pattern: Pattern::Wildcard,
3537 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3538 "odd".to_string(),
3539 vec![sb(Expr::BinOp(
3540 BinOp::Sub,
3541 sbb(Expr::Ident("n".to_string())),
3542 sbb(Expr::Literal(Literal::Int(1))),
3543 ))],
3544 )))),
3545 binding_slots: std::sync::OnceLock::new(),
3546 },
3547 ],
3548 }))),
3549 resolution: None,
3550 };
3551 let odd = FnDef {
3552 name: "odd".to_string(),
3553 line: 2,
3554 params: vec![("n".to_string(), "Int".to_string())],
3555 return_type: "Bool".to_string(),
3556 effects: vec![],
3557 desc: None,
3558 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3559 subject: sbb(Expr::Ident("n".to_string())),
3560 arms: vec![
3561 MatchArm {
3562 pattern: Pattern::Literal(Literal::Int(0)),
3563 body: sbb(Expr::Literal(Literal::Bool(false))),
3564 binding_slots: std::sync::OnceLock::new(),
3565 },
3566 MatchArm {
3567 pattern: Pattern::Wildcard,
3568 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3569 "even".to_string(),
3570 vec![sb(Expr::BinOp(
3571 BinOp::Sub,
3572 sbb(Expr::Ident("n".to_string())),
3573 sbb(Expr::Literal(Literal::Int(1))),
3574 ))],
3575 )))),
3576 binding_slots: std::sync::OnceLock::new(),
3577 },
3578 ],
3579 }))),
3580 resolution: None,
3581 };
3582 ctx.items.push(TopLevel::FnDef(even.clone()));
3583 ctx.items.push(TopLevel::FnDef(odd.clone()));
3584 ctx.fn_defs.push(even);
3585 ctx.fn_defs.push(odd);
3586
3587 ctx.refresh_facts();
3588 let issues = proof_mode_issues(&ctx);
3589 assert!(
3590 issues.is_empty(),
3591 "expected mutual Int countdown recursion to be accepted, got: {:?}",
3592 issues
3593 );
3594
3595 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3596 let lean = generated_lean_file(&out);
3597 assert!(lean.contains("def even__fuel"));
3598 assert!(lean.contains("def odd__fuel"));
3599 assert!(lean.contains("def even (n : Int) : Bool :="));
3600 assert!(lean.contains("even__fuel ((Int.natAbs n) + 1) n"));
3601 }
3602
3603 #[test]
3604 fn proof_mode_accepts_mutual_string_pos_recursion_with_ranked_same_edges() {
3605 let mut ctx = empty_ctx();
3606 let f = FnDef {
3607 name: "f".to_string(),
3608 line: 1,
3609 params: vec![
3610 ("s".to_string(), "String".to_string()),
3611 ("pos".to_string(), "Int".to_string()),
3612 ],
3613 return_type: "Int".to_string(),
3614 effects: vec![],
3615 desc: None,
3616 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3617 subject: sbb(Expr::BinOp(
3618 BinOp::Gte,
3619 sbb(Expr::Ident("pos".to_string())),
3620 sbb(Expr::Literal(Literal::Int(3))),
3621 )),
3622 arms: vec![
3623 MatchArm {
3624 pattern: Pattern::Literal(Literal::Bool(true)),
3625 body: sbb(Expr::Ident("pos".to_string())),
3626 binding_slots: std::sync::OnceLock::new(),
3627 },
3628 MatchArm {
3629 pattern: Pattern::Wildcard,
3630 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3631 "g".to_string(),
3632 vec![
3633 sb(Expr::Ident("s".to_string())),
3634 sb(Expr::Ident("pos".to_string())),
3635 ],
3636 )))),
3637 binding_slots: std::sync::OnceLock::new(),
3638 },
3639 ],
3640 }))),
3641 resolution: None,
3642 };
3643 let g = FnDef {
3644 name: "g".to_string(),
3645 line: 2,
3646 params: vec![
3647 ("s".to_string(), "String".to_string()),
3648 ("pos".to_string(), "Int".to_string()),
3649 ],
3650 return_type: "Int".to_string(),
3651 effects: vec![],
3652 desc: None,
3653 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3654 subject: sbb(Expr::BinOp(
3655 BinOp::Gte,
3656 sbb(Expr::Ident("pos".to_string())),
3657 sbb(Expr::Literal(Literal::Int(3))),
3658 )),
3659 arms: vec![
3660 MatchArm {
3661 pattern: Pattern::Literal(Literal::Bool(true)),
3662 body: sbb(Expr::Ident("pos".to_string())),
3663 binding_slots: std::sync::OnceLock::new(),
3664 },
3665 MatchArm {
3666 pattern: Pattern::Wildcard,
3667 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3668 "f".to_string(),
3669 vec![
3670 sb(Expr::Ident("s".to_string())),
3671 sb(Expr::BinOp(
3672 BinOp::Add,
3673 sbb(Expr::Ident("pos".to_string())),
3674 sbb(Expr::Literal(Literal::Int(1))),
3675 )),
3676 ],
3677 )))),
3678 binding_slots: std::sync::OnceLock::new(),
3679 },
3680 ],
3681 }))),
3682 resolution: None,
3683 };
3684 ctx.items.push(TopLevel::FnDef(f.clone()));
3685 ctx.items.push(TopLevel::FnDef(g.clone()));
3686 ctx.fn_defs.push(f);
3687 ctx.fn_defs.push(g);
3688
3689 ctx.refresh_facts();
3690 let issues = proof_mode_issues(&ctx);
3691 assert!(
3692 issues.is_empty(),
3693 "expected mutual String+pos recursion to be accepted, got: {:?}",
3694 issues
3695 );
3696
3697 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3698 let lean = generated_lean_file(&out);
3699 assert!(lean.contains("def f__fuel"));
3700 assert!(lean.contains("def g__fuel"));
3701 assert!(!lean.contains("partial def f"));
3702 }
3703
3704 #[test]
3705 fn proof_mode_accepts_mutual_ranked_sizeof_recursion() {
3706 let mut ctx = empty_ctx();
3707 let f = FnDef {
3708 name: "f".to_string(),
3709 line: 1,
3710 params: vec![("xs".to_string(), "List<Int>".to_string())],
3711 return_type: "Int".to_string(),
3712 effects: vec![],
3713 desc: None,
3714 body: Rc::new(FnBody::from_expr(sb(Expr::TailCall(Box::new(
3715 TailCallData::new(
3716 "g".to_string(),
3717 vec![
3718 sb(Expr::Literal(Literal::Str("acc".to_string()))),
3719 sb(Expr::Ident("xs".to_string())),
3720 ],
3721 ),
3722 ))))),
3723 resolution: None,
3724 };
3725 let g = FnDef {
3726 name: "g".to_string(),
3727 line: 2,
3728 params: vec![
3729 ("acc".to_string(), "String".to_string()),
3730 ("xs".to_string(), "List<Int>".to_string()),
3731 ],
3732 return_type: "Int".to_string(),
3733 effects: vec![],
3734 desc: None,
3735 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3736 subject: sbb(Expr::Ident("xs".to_string())),
3737 arms: vec![
3738 MatchArm {
3739 pattern: Pattern::EmptyList,
3740 body: sbb(Expr::Literal(Literal::Int(0))),
3741 binding_slots: std::sync::OnceLock::new(),
3742 },
3743 MatchArm {
3744 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3745 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3746 "f".to_string(),
3747 vec![sb(Expr::Ident("t".to_string()))],
3748 )))),
3749 binding_slots: std::sync::OnceLock::new(),
3750 },
3751 ],
3752 }))),
3753 resolution: None,
3754 };
3755 ctx.items.push(TopLevel::FnDef(f.clone()));
3756 ctx.items.push(TopLevel::FnDef(g.clone()));
3757 ctx.fn_defs.push(f);
3758 ctx.fn_defs.push(g);
3759
3760 ctx.refresh_facts();
3761 let issues = proof_mode_issues(&ctx);
3762 assert!(
3763 issues.is_empty(),
3764 "expected mutual ranked-sizeOf recursion to be accepted, got: {:?}",
3765 issues
3766 );
3767
3768 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3769 let lean = generated_lean_file(&out);
3770 assert!(lean.contains("mutual"));
3771 assert!(lean.contains("def f__fuel"));
3772 assert!(lean.contains("def g__fuel"));
3773 assert!(!lean.contains("partial def f"));
3774 assert!(!lean.contains("partial def g"));
3775 }
3776
3777 #[test]
3778 fn proof_mode_rejects_recursive_pure_functions() {
3779 let mut ctx = empty_ctx();
3780 let recursive_fn = FnDef {
3781 name: "loop".to_string(),
3782 line: 1,
3783 params: vec![("n".to_string(), "Int".to_string())],
3784 return_type: "Int".to_string(),
3785 effects: vec![],
3786 desc: None,
3787 body: Rc::new(FnBody::from_expr(sb(Expr::FnCall(
3788 sbb(Expr::Ident("loop".to_string())),
3789 vec![sb(Expr::Ident("n".to_string()))],
3790 )))),
3791 resolution: None,
3792 };
3793 ctx.items.push(TopLevel::FnDef(recursive_fn.clone()));
3794 ctx.fn_defs.push(recursive_fn);
3795
3796 ctx.refresh_facts();
3797 let issues = proof_mode_issues(&ctx);
3798 assert!(
3799 issues.iter().any(|i| i.contains("outside proof subset")),
3800 "expected recursive function blocker, got: {:?}",
3801 issues
3802 );
3803 }
3804
3805 #[test]
3806 fn proof_mode_allows_recursive_types() {
3807 let mut ctx = empty_ctx();
3808 let recursive_type = TypeDef::Sum {
3809 name: "Node".to_string(),
3810 variants: vec![TypeVariant {
3811 name: "Cons".to_string(),
3812 fields: vec!["Node".to_string()],
3813 }],
3814 line: 1,
3815 };
3816 ctx.items.push(TopLevel::TypeDef(recursive_type.clone()));
3817 ctx.type_defs.push(recursive_type);
3818
3819 ctx.refresh_facts();
3820 let issues = proof_mode_issues(&ctx);
3821 assert!(
3822 issues
3823 .iter()
3824 .all(|i| !i.contains("recursive types require unsafe DecidableEq shim")),
3825 "did not expect recursive type blocker, got: {:?}",
3826 issues
3827 );
3828 }
3829
3830 #[test]
3831 fn law_auto_example_exports_real_proof_artifacts() {
3832 let mut ctx = ctx_from_source(
3833 include_str!("../../../examples/formal/law_auto.av"),
3834 "law_auto",
3835 );
3836 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3837 let lean = generated_lean_file(&out);
3838
3839 assert!(lean.contains("theorem add_law_commutative :"));
3840 assert!(lean.contains("theorem id'_law_reflexive : ∀ (x : Int), x = x := by"));
3841 assert!(lean.contains("theorem incCount_law_keyPresent :"));
3842 assert!(lean.contains("AverMap.has_set_self"));
3843 assert!(lean.contains("theorem add_law_commutative_sample_1 :"));
3844 assert!(lean.contains(":= by native_decide"));
3845 }
3846
3847 #[test]
3848 fn json_example_stays_inside_proof_subset() {
3849 let mut ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3850 ctx.refresh_facts();
3851 let issues = proof_mode_issues(&ctx);
3852 assert!(
3853 issues.is_empty(),
3854 "expected json example to stay inside proof subset, got: {:?}",
3855 issues
3856 );
3857 }
3858
3859 #[test]
3860 fn json_example_uses_total_defs_and_domain_guarded_laws_in_proof_mode() {
3861 let mut ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3862 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3863 let lean = generated_lean_file(&out);
3864
3865 assert!(!lean.contains("partial def"));
3866 assert!(lean.contains("def skipWs__fuel"));
3867 assert!(lean.contains("def parseValue__fuel"));
3868 assert!(lean.contains("def toString' (j : Json) : String :="));
3869 assert!(
3870 lean.contains(
3871 "def averMeasureJsonEntries_String (items : List (String × Json)) : Nat :="
3872 )
3873 );
3874 assert!(lean.contains(
3875 "| .jsonObject x0 => (averMeasureJsonEntries_String (AverMap.entries x0)) + 1"
3876 ));
3877 assert!(lean.contains("-- when jsonRoundtripSafe j"));
3878 assert!(!lean.contains("-- hint: verify law '"));
3879 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
3880 assert!(
3881 lean.contains(
3882 "theorem toString'_law_parseRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3883 )
3884 );
3885 assert!(lean.contains(
3886 "jsonRoundtripSafe j = true -> fromString (toString' j) = Except.ok j := by"
3887 ));
3888 assert!(
3889 lean.contains("theorem finishFloat_law_fromCanonicalFloat : ∀ (f : Float), f = 3.5 ∨")
3890 );
3891 assert!(lean.contains("theorem finishInt_law_fromCanonicalInt_checked_domain :"));
3892 assert!(lean.contains(
3893 "theorem toString'_law_parseValueRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3894 ));
3895 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
3896 assert!(lean.contains(
3897 "example : fromString \"null\" = Except.ok Json.jsonNull := by native_decide"
3898 ));
3899 }
3900
3901 #[test]
3902 fn transpile_injects_builtin_network_types_and_vector_get_support() {
3903 let mut ctx = ctx_from_source(
3904 r#"
3905fn firstOrMissing(xs: Vector<String>) -> Result<String, String>
3906 Option.toResult(Vector.get(xs, 0), "missing")
3907
3908fn defaultHeaders() -> Map<String, List<String>>
3909 {"content-type" => ["application/json"]}
3910
3911fn mkResponse(body: String) -> HttpResponse
3912 HttpResponse(status = 200, body = body, headers = defaultHeaders())
3913
3914fn requestPath(req: HttpRequest) -> String
3915 req.path
3916
3917fn connPort(conn: Tcp.Connection) -> Int
3918 conn.port
3919"#,
3920 "network_helpers",
3921 );
3922 let out = transpile(&mut ctx);
3923 let lean = generated_lean_file(&out);
3924
3925 assert!(lean.contains("structure HttpResponse where"));
3926 assert!(lean.contains("structure HttpRequest where"));
3927 assert!(lean.contains("structure Tcp_Connection where"));
3928 assert!(lean.contains("port : Int"));
3929 assert!(lean.contains("List (String × List String)"));
3931 }
3932
3933 #[test]
3934 fn law_auto_example_has_no_sorry_in_proof_mode() {
3935 let mut ctx = ctx_from_source(
3936 include_str!("../../../examples/formal/law_auto.av"),
3937 "law_auto",
3938 );
3939 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3940 let lean = generated_lean_file(&out);
3941 assert!(
3942 !lean.contains("sorry"),
3943 "expected law_auto proof export to avoid sorry, got:\n{}",
3944 lean
3945 );
3946 }
3947
3948 #[test]
3949 fn map_example_has_no_sorry_in_proof_mode() {
3950 let mut ctx = ctx_from_source(include_str!("../../../examples/data/map.av"), "map");
3951 ctx.refresh_facts();
3952 let issues = proof_mode_issues(&ctx);
3953 assert!(
3954 issues.is_empty(),
3955 "expected map example to stay inside proof subset, got: {:?}",
3956 issues
3957 );
3958
3959 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3960 let lean = generated_lean_file(&out);
3961 assert!(lean.contains("theorem incCount_law_trackedCountStepsByOne :"));
3963 assert!(lean.contains("sorry"));
3964 assert!(lean.contains("theorem countWords_law_presenceMatchesContains_sample_1 :"));
3966 assert!(lean.contains("theorem countWords_law_trackedWordCount_sample_1 :"));
3967 assert!(lean.contains("AverMap.has_set_self"));
3968 assert!(lean.contains("AverMap.get_set_self"));
3969 }
3970
3971 #[test]
3972 fn spec_laws_example_has_no_sorry_in_proof_mode() {
3973 let mut ctx = ctx_from_source(
3974 include_str!("../../../examples/formal/spec_laws.av"),
3975 "spec_laws",
3976 );
3977 ctx.refresh_facts();
3978 let issues = proof_mode_issues(&ctx);
3979 assert!(
3980 issues.is_empty(),
3981 "expected spec_laws example to stay inside proof subset, got: {:?}",
3982 issues
3983 );
3984
3985 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3986 let lean = generated_lean_file(&out);
3987 assert!(
3988 !lean.contains("sorry"),
3989 "expected spec_laws proof export to avoid sorry, got:\n{}",
3990 lean
3991 );
3992 assert!(lean.contains("theorem absVal_eq_absValSpec :"));
3993 assert!(lean.contains("theorem clampNonNegative_eq_clampNonNegativeSpec :"));
3994 }
3995
3996 #[test]
3997 fn rle_example_exports_sampled_roundtrip_laws_without_sorry() {
3998 let mut ctx = ctx_from_source(include_str!("../../../examples/data/rle.av"), "rle");
3999 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4000 let lean = generated_lean_file(&out);
4001
4002 assert!(
4003 lean.contains("sorry"),
4004 "expected rle proof export to contain sorry for unproved universal theorems"
4005 );
4006 assert!(lean.contains(
4007 "theorem encode_law_roundtrip_sample_1 : decode (encode []) = [] := by native_decide"
4008 ));
4009 assert!(lean.contains(
4010 "theorem encodeString_law_string_roundtrip_sample_1 : decodeString (encodeString \"\") = \"\" := by native_decide"
4011 ));
4012 }
4013
4014 #[test]
4015 fn fibonacci_example_uses_fuelized_int_countdown_in_proof_mode() {
4016 let mut ctx = ctx_from_source(
4017 include_str!("../../../examples/data/fibonacci.av"),
4018 "fibonacci",
4019 );
4020 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4021 let lean = generated_lean_file(&out);
4022
4023 assert!(lean.contains("def fibTR__fuel"));
4024 assert!(lean.contains("def fibTR (n : Int) (a : Int) (b : Int) : Int :="));
4025 assert!(lean.contains("fibTR__fuel ((Int.natAbs n) + 1) n a b"));
4026 assert!(!lean.contains("partial def fibTR"));
4027 }
4028
4029 #[test]
4030 fn fibonacci_example_stays_inside_proof_subset() {
4031 let mut ctx = ctx_from_source(
4032 include_str!("../../../examples/data/fibonacci.av"),
4033 "fibonacci",
4034 );
4035 ctx.refresh_facts();
4036 let issues = proof_mode_issues(&ctx);
4037 assert!(
4038 issues.is_empty(),
4039 "expected fibonacci example to stay inside proof subset, got: {:?}",
4040 issues
4041 );
4042 }
4043
4044 #[test]
4045 fn fibonacci_example_matches_general_linear_recurrence_shapes() {
4046 let ctx = ctx_from_source(
4047 include_str!("../../../examples/data/fibonacci.av"),
4048 "fibonacci",
4049 );
4050 let fib = ctx.fn_defs.iter().find(|fd| fd.name == "fib").unwrap();
4051 let fib_tr = ctx.fn_defs.iter().find(|fd| fd.name == "fibTR").unwrap();
4052 let fib_spec = ctx.fn_defs.iter().find(|fd| fd.name == "fibSpec").unwrap();
4053
4054 assert!(recurrence::detect_tailrec_int_linear_pair_wrapper(fib).is_some());
4055 assert!(recurrence::detect_tailrec_int_linear_pair_worker(fib_tr).is_some());
4056 assert!(recurrence::detect_second_order_int_linear_recurrence(fib_spec).is_some());
4057 }
4058
4059 #[test]
4060 fn fibonacci_example_auto_proves_general_linear_recurrence_spec_law() {
4061 let mut ctx = ctx_from_source(
4062 include_str!("../../../examples/data/fibonacci.av"),
4063 "fibonacci",
4064 );
4065 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4066 let lean = generated_lean_file(&out);
4067
4068 assert!(lean.contains("private def fibSpec__nat : Nat -> Int"));
4069 assert!(!lean.contains("partial def fibSpec"));
4070 assert!(lean.contains("private theorem fib_eq_fibSpec__worker_nat_shift"));
4071 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_nat"));
4072 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_seed"));
4073 assert!(lean.contains("theorem fib_eq_fibSpec : ∀ (n : Int), fib n = fibSpec n := by"));
4074 assert!(!lean.contains(
4075 "-- universal theorem fib_eq_fibSpec omitted: sampled law shape is not auto-proved yet"
4076 ));
4077 }
4078
4079 #[test]
4080 fn pell_like_example_auto_proves_same_general_shape() {
4081 let mut ctx = ctx_from_source(
4082 r#"
4083module Pell
4084 intent =
4085 "linear recurrence probe"
4086
4087fn pellTR(n: Int, a: Int, b: Int) -> Int
4088 match n
4089 0 -> a
4090 _ -> pellTR(n - 1, b, a + 2 * b)
4091
4092fn pell(n: Int) -> Int
4093 match n < 0
4094 true -> 0
4095 false -> pellTR(n, 0, 1)
4096
4097fn pellSpec(n: Int) -> Int
4098 match n < 0
4099 true -> 0
4100 false -> match n
4101 0 -> 0
4102 1 -> 1
4103 _ -> pellSpec(n - 2) + 2 * pellSpec(n - 1)
4104
4105verify pell law pellSpec
4106 given n: Int = [0, 1, 2, 3]
4107 pell(n) => pellSpec(n)
4108"#,
4109 "pell",
4110 );
4111 ctx.refresh_facts();
4112 let issues = proof_mode_issues(&ctx);
4113 assert!(
4114 issues.is_empty(),
4115 "expected pell example to stay inside proof subset, got: {:?}",
4116 issues
4117 );
4118
4119 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4120 let lean = generated_lean_file(&out);
4121 assert!(lean.contains("private def pellSpec__nat : Nat -> Int"));
4122 assert!(lean.contains("private theorem pell_eq_pellSpec__worker_nat_shift"));
4123 assert!(lean.contains("theorem pell_eq_pellSpec : ∀ (n : Int), pell n = pellSpec n := by"));
4124 assert!(!lean.contains(
4125 "-- universal theorem pell_eq_pellSpec omitted: sampled law shape is not auto-proved yet"
4126 ));
4127 }
4128
4129 #[test]
4130 fn nonlinear_pair_state_recurrence_is_not_auto_proved_as_linear_shape() {
4131 let mut ctx = ctx_from_source(
4132 r#"
4133module WeirdRec
4134 intent =
4135 "reject nonlinear pair-state recurrence from linear recurrence prover"
4136
4137fn weirdTR(n: Int, a: Int, b: Int) -> Int
4138 match n
4139 0 -> a
4140 _ -> weirdTR(n - 1, b, a * b)
4141
4142fn weird(n: Int) -> Int
4143 match n < 0
4144 true -> 0
4145 false -> weirdTR(n, 0, 1)
4146
4147fn weirdSpec(n: Int) -> Int
4148 match n < 0
4149 true -> 0
4150 false -> match n
4151 0 -> 0
4152 1 -> 1
4153 _ -> weirdSpec(n - 1) * weirdSpec(n - 2)
4154
4155verify weird law weirdSpec
4156 given n: Int = [0, 1, 2, 3]
4157 weird(n) => weirdSpec(n)
4158"#,
4159 "weirdrec",
4160 );
4161 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4162 let lean = generated_lean_file(&out);
4163
4164 assert!(lean.contains("sorry"));
4166 assert!(!lean.contains("private theorem weird_eq_weirdSpec__worker_nat_shift"));
4167 assert!(lean.contains("theorem weird_eq_weirdSpec_sample_1 :"));
4168 }
4169
4170 #[test]
4171 fn date_example_stays_inside_proof_subset() {
4172 let mut ctx = ctx_from_source(include_str!("../../../examples/data/date.av"), "date");
4173 ctx.refresh_facts();
4174 let issues = proof_mode_issues(&ctx);
4175 assert!(
4176 issues.is_empty(),
4177 "expected date example to stay inside proof subset, got: {:?}",
4178 issues
4179 );
4180
4181 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4182 let lean = generated_lean_file(&out);
4183 assert!(!lean.contains("partial def"));
4184 assert!(lean.contains("def parseIntSlice (s : String) (from' : Int) (to : Int) : Int :="));
4185 }
4186
4187 #[test]
4188 fn temperature_example_stays_inside_proof_subset() {
4189 let mut ctx = ctx_from_source(
4190 include_str!("../../../examples/core/temperature.av"),
4191 "temperature",
4192 );
4193 ctx.refresh_facts();
4194 let issues = proof_mode_issues(&ctx);
4195 assert!(
4196 issues.is_empty(),
4197 "expected temperature example to stay inside proof subset, got: {:?}",
4198 issues
4199 );
4200
4201 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4202 let lean = generated_lean_file(&out);
4203 assert!(!lean.contains("partial def"));
4204 assert!(
4205 lean.contains("example : celsiusToFahr 0.0 = 32.0 := by native_decide"),
4206 "expected verify examples to survive proof export, got:\n{}",
4207 lean
4208 );
4209 }
4210
4211 #[test]
4212 fn quicksort_example_stays_inside_proof_subset() {
4213 let mut ctx = ctx_from_source(
4214 include_str!("../../../examples/data/quicksort.av"),
4215 "quicksort",
4216 );
4217 ctx.refresh_facts();
4218 let issues = proof_mode_issues(&ctx);
4219 assert!(
4220 issues.is_empty(),
4221 "expected quicksort example to stay inside proof subset, got: {:?}",
4222 issues
4223 );
4224
4225 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4226 let lean = generated_lean_file(&out);
4227 assert!(lean.contains("def isOrderedFrom"));
4228 assert!(!lean.contains("partial def isOrderedFrom"));
4229 assert!(lean.contains("termination_by xs.length"));
4230 }
4231
4232 #[test]
4233 fn grok_s_language_example_uses_total_ranked_sizeof_mutual_recursion() {
4234 let mut ctx = ctx_from_source(
4235 include_str!("../../../examples/core/grok_s_language.av"),
4236 "grok_s_language",
4237 );
4238 ctx.refresh_facts();
4239 let issues = proof_mode_issues(&ctx);
4240 assert!(
4241 issues.is_empty(),
4242 "expected grok_s_language example to stay inside proof subset, got: {:?}",
4243 issues
4244 );
4245
4246 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4247 let lean = generated_lean_file(&out);
4248 assert!(lean.contains("mutual"));
4249 assert!(lean.contains("def eval__fuel"));
4250 assert!(lean.contains("def parseListItems__fuel"));
4251 assert!(!lean.contains("partial def eval"));
4252 assert!(!lean.contains("termination_by (sizeOf e,"));
4253 assert!(lean.contains("-- when validSymbolNames e"));
4254 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
4255 assert!(lean.contains(
4256 "theorem toString'_law_parseRoundtrip : ∀ (e : Sexpr), e = Sexpr.atomNum 42 ∨"
4257 ));
4258 assert!(
4259 lean.contains("validSymbolNames e = true -> parse (toString' e) = Except.ok e := by")
4260 );
4261 assert!(lean.contains("theorem toString'_law_parseSexprRoundtrip :"));
4262 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
4263 }
4264
4265 #[test]
4266 fn lambda_example_keeps_only_eval_outside_proof_subset() {
4267 let mut ctx = ctx_from_source(include_str!("../../../examples/core/lambda.av"), "lambda");
4268 ctx.refresh_facts();
4269 let issues = proof_mode_issues(&ctx);
4270 assert_eq!(
4271 issues,
4272 vec!["recursive function 'eval' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)".to_string()]
4273 );
4274
4275 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4276 let lean = generated_lean_file(&out);
4277 assert!(lean.contains("def termToString__fuel"));
4278 assert!(lean.contains("def subst__fuel"));
4279 assert!(lean.contains("def countS__fuel"));
4280 assert!(!lean.contains("partial def termToString"));
4281 assert!(!lean.contains("partial def subst"));
4282 assert!(!lean.contains("partial def countS"));
4283 assert!(lean.contains("partial def eval"));
4284 }
4285
4286 #[test]
4287 fn mission_control_example_stays_inside_proof_subset() {
4288 let mut ctx = ctx_from_source(
4289 include_str!("../../../examples/apps/mission_control.av"),
4290 "mission_control",
4291 );
4292 ctx.refresh_facts();
4293 let issues = proof_mode_issues(&ctx);
4294 assert!(
4295 issues.is_empty(),
4296 "expected mission_control example to stay inside proof subset, got: {:?}",
4297 issues
4298 );
4299
4300 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4301 let lean = generated_lean_file(&out);
4302 assert!(!lean.contains("partial def normalizeAngle"));
4303 assert!(lean.contains("def normalizeAngle__fuel"));
4304 }
4305
4306 #[test]
4307 fn notepad_store_example_stays_inside_proof_subset() {
4308 let mut ctx = ctx_from_source(
4309 include_str!("../../../examples/apps/notepad/store.av"),
4310 "notepad_store",
4311 );
4312 ctx.refresh_facts();
4313 let issues = proof_mode_issues(&ctx);
4314 assert!(
4315 issues.is_empty(),
4316 "expected notepad/store example to stay inside proof subset, got: {:?}",
4317 issues
4318 );
4319
4320 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4321 let lean = generated_lean_file(&out);
4322 assert!(lean.contains("def deserializeLine (line : String) : Except String Note :="));
4323 assert!(lean.contains("Except String (List Note)"));
4324 assert!(!lean.contains("partial def deserializeLine"));
4325 assert!(lean.contains("-- when noteRoundtripSafe note"));
4326 assert!(lean.contains("-- when notesRoundtripSafe notes"));
4327 assert!(lean.contains(
4328 "theorem serializeLine_law_lineRoundtrip : ∀ (note : Note), note = { id' := 1, title := \"Hello\", body := \"World\" : Note } ∨"
4329 ));
4330 assert!(lean.contains(
4331 "theorem serializeLines_law_notesRoundtrip : ∀ (notes : List Note), notes = [] ∨"
4332 ));
4333 assert!(lean.contains("notesRoundtripSafe notes = true ->"));
4334 assert!(lean.contains("parseNotes (s!\"{String.intercalate \"\\n\" (serializeLines notes)}\\n\") = Except.ok notes"));
4335 assert!(lean.contains("theorem serializeLine_law_lineRoundtrip_sample_1 :"));
4336 assert!(lean.contains("theorem serializeLines_law_notesRoundtrip_sample_1 :"));
4337 }
4338}