1mod builtins;
7mod expr;
8mod law_auto;
9mod pattern;
10pub(crate) mod recurrence;
11mod shared;
12mod toplevel;
13mod types;
14
15use std::collections::{HashMap, HashSet};
16
17use crate::ast::{Expr, FnDef, Spanned, TopLevel, TypeDef, VerifyKind};
18use crate::codegen::{CodegenContext, ProjectOutput};
19
20#[derive(Clone, Copy, Debug, Eq, PartialEq)]
22pub enum VerifyEmitMode {
23 NativeDecide,
25 Sorry,
27 TheoremSkeleton,
31}
32
33pub use crate::codegen::recursion::{ProofModeIssue, RecursionPlan};
38
39const LEAN_PRELUDE_HEADER: &str = r#"-- Generated by the Aver → Lean 4 transpiler
40-- Pure core logic plus Oracle-lifted classified effects
41
42set_option linter.unusedVariables false
43
44-- Prelude: helper definitions for Aver builtins"#;
45
46const LEAN_PRELUDE_FLOAT_COE: &str = r#"instance : Coe Int Float := ⟨fun n => Float.ofInt n⟩
47
48def Float.fromInt (n : Int) : Float := Float.ofInt n
49
50-- Aver's Float-to-Int operations match the runtime semantics
51-- (`f64::floor() as i64` in VM, Rust codegen, WASM — all three use the
52-- same IEEE 754 floor/round/ceil followed by Rust's saturating
53-- `f64 as i64` cast):
54-- * finite values within [i64::MIN, i64::MAX]: truncate toward zero
55-- * finite > i64::MAX: saturate to i64::MAX
56-- * finite < i64::MIN: saturate to i64::MIN
57-- * +Inf: saturate to i64::MAX
58-- * -Inf: saturate to i64::MIN
59-- * NaN: 0 (Rust 1.45+ defined behavior)
60--
61-- Lean's `Float.floor : Float → Float` doesn't directly satisfy Aver's
62-- `Float.floor : Float → Int`, so we synthesize via the saturating
63-- `Float.toUInt64` (returns 0 for NaN/negative) with sign handling and
64-- explicit bounds. Per-case correctness is asserted by `native_decide`
65-- examples below; total semantic agreement with `f64 as i64` would
66-- need a formal IEEE spec in Lean, which is out of scope.
67--
68-- Asymmetry with the Dafny backend: Lean has IEEE 754 `Float` natively
69-- (`double` at runtime), so we use it. Dafny only offers mathematical
70-- `real` (Cauchy-style, no NaN/Inf/overflow), which is a fundamental
71-- type mismatch with Aver's IEEE Float — Dafny operations stay opaque
72-- (`function FloatPi(): real` etc.) rather than synthesizing IEEE on
73-- top of `bv64`, which would mean implementing the entire IEEE
74-- arithmetic in Dafny by hand.
75namespace AverFloat
76def toInt (x : Float) : Int :=
77 if x.isNaN then 0
78 -- 2^63 is exactly representable in f64; values ≥ that saturate up.
79 else if x ≥ 9223372036854775808.0 then 9223372036854775807
80 -- -2^63 is exactly representable; values strictly below saturate down.
81 else if x < -9223372036854775808.0 then -9223372036854775808
82 else if x ≥ 0.0 then Int.ofNat x.toUInt64.toNat
83 else -(Int.ofNat (-x).toUInt64.toNat)
84
85def floor (x : Float) : Int := toInt x.floor
86def ceil (x : Float) : Int := toInt x.ceil
87def round (x : Float) : Int := toInt x.round
88
89def pow (x y : Float) : Float := x ^ y
90
91-- Edge-case smoke checks: each `example` is discharged by reduction,
92-- so any drift from these documented values fails Lake build.
93example : AverFloat.toInt 0.0 = 0 := by native_decide
94example : AverFloat.toInt 3.7 = 3 := by native_decide
95example : AverFloat.toInt (-3.7) = -3 := by native_decide
96example : AverFloat.toInt (1.0 / 0.0) = 9223372036854775807 := by native_decide
97example : AverFloat.toInt (-1.0 / 0.0) = -9223372036854775808 := by native_decide
98example : AverFloat.toInt (0.0 / 0.0) = 0 := by native_decide
99example : AverFloat.floor 3.7 = 3 := by native_decide
100example : AverFloat.floor (-3.7) = -4 := by native_decide
101example : AverFloat.ceil 3.2 = 4 := by native_decide
102example : AverFloat.ceil (-3.2) = -3 := by native_decide
103-- Rounding mode (half-away-from-zero, matching Rust's `f64::round`):
104example : AverFloat.round 0.5 = 1 := by native_decide
105example : AverFloat.round (-0.5) = -1 := by native_decide
106example : AverFloat.round 2.5 = 3 := by native_decide
107example : AverFloat.round (-2.5) = -3 := by native_decide
108end AverFloat"#;
109
110const LEAN_PRELUDE_FLOAT_DEC_EQ: &str = r#"private unsafe def Float.unsafeDecEq (a b : Float) : Decidable (a = b) :=
111 if a == b then isTrue (unsafeCast ()) else isFalse (unsafeCast ())
112@[implemented_by Float.unsafeDecEq]
113private opaque Float.compDecEq (a b : Float) : Decidable (a = b)
114instance : DecidableEq Float := Float.compDecEq"#;
115
116const LEAN_PRELUDE_EXCEPT_DEC_EQ: &str = r#"instance [DecidableEq ε] [DecidableEq α] : DecidableEq (Except ε α)
117 | .ok a, .ok b =>
118 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
119 | .error a, .error b =>
120 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
121 | .ok _, .error _ => isFalse (by intro h; cases h)
122 | .error _, .ok _ => isFalse (by intro h; cases h)"#;
123
124const LEAN_PRELUDE_EXCEPT_NS: &str = r#"namespace Except
125def withDefault (r : Except ε α) (d : α) : α :=
126 match r with
127 | .ok v => v
128 | .error _ => d
129end Except"#;
130
131const LEAN_PRELUDE_OPTION_TO_EXCEPT: &str = r#"def Option.toExcept (o : Option α) (e : ε) : Except ε α :=
132 match o with
133 | some v => .ok v
134 | none => .error e"#;
135
136const LEAN_PRELUDE_STRING_HADD: &str = r#"instance : HAdd String String String := ⟨String.append⟩"#;
137
138const LEAN_PRELUDE_BRANCH_PATH: &str = r#"structure BranchPath where
142 dewey : String
143 deriving Repr, BEq, DecidableEq
144
145def BranchPath.Root : BranchPath := { dewey := "" }
146
147def BranchPath.child (p : BranchPath) (idx : Int) : BranchPath :=
148 if p.dewey.isEmpty then { dewey := toString idx }
149 else { dewey := p.dewey ++ "." ++ toString idx }
150
151def BranchPath.parse (s : String) : BranchPath := { dewey := s }"#;
152
153const LEAN_PRELUDE_PROOF_FUEL: &str = r#"def averStringPosFuel (s : String) (pos : Int) (rankBudget : Nat) : Nat :=
154 (((s.data.length) - pos.toNat) + 1) * rankBudget"#;
155
156const LEAN_PRELUDE_AVER_MEASURE: &str = r#"namespace AverMeasure
157def list (elemMeasure : α → Nat) : List α → Nat
158 | [] => 1
159 | x :: xs => elemMeasure x + list elemMeasure xs + 1
160def option (elemMeasure : α → Nat) : Option α → Nat
161 | none => 1
162 | some x => elemMeasure x + 1
163def except (errMeasure : ε → Nat) (okMeasure : α → Nat) : Except ε α → Nat
164 | .error e => errMeasure e + 1
165 | .ok v => okMeasure v + 1
166end AverMeasure"#;
167
168const AVER_MAP_PRELUDE_BASE: &str = r#"namespace AverMap
169def empty : List (α × β) := []
170def get [DecidableEq α] (m : List (α × β)) (k : α) : Option β :=
171 match m with
172 | [] => none
173 | (k', v) :: rest => if k = k' then some v else AverMap.get rest k
174def set [DecidableEq α] (m : List (α × β)) (k : α) (v : β) : List (α × β) :=
175 let rec go : List (α × β) → List (α × β)
176 | [] => [(k, v)]
177 | (k', v') :: rest => if k = k' then (k, v) :: rest else (k', v') :: go rest
178 go m
179def has [DecidableEq α] (m : List (α × β)) (k : α) : Bool :=
180 m.any (fun p => decide (k = p.1))
181def remove [DecidableEq α] (m : List (α × β)) (k : α) : List (α × β) :=
182 m.filter (fun p => !(decide (k = p.1)))
183def keys (m : List (α × β)) : List α := m.map Prod.fst
184def values (m : List (α × β)) : List β := m.map Prod.snd
185def entries (m : List (α × β)) : List (α × β) := m
186def len (m : List (α × β)) : Nat := m.length
187def fromList (entries : List (α × β)) : List (α × β) := entries"#;
188
189const AVER_MAP_PRELUDE_HAS_SET_SELF: &str = r#"private theorem any_set_go_self [DecidableEq α] (k : α) (v : β) :
190 ∀ (m : List (α × β)), List.any (AverMap.set.go k v m) (fun p => decide (k = p.1)) = true := by
191 intro m
192 induction m with
193 | nil =>
194 simp [AverMap.set.go, List.any]
195 | cons p tl ih =>
196 cases p with
197 | mk k' v' =>
198 by_cases h : k = k'
199 · simp [AverMap.set.go, List.any, h]
200 · simp [AverMap.set.go, List.any, h, ih]
201
202theorem has_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
203 AverMap.has (AverMap.set m k v) k = true := by
204 simpa [AverMap.has, AverMap.set] using any_set_go_self k v m"#;
205
206const AVER_MAP_PRELUDE_GET_SET_SELF: &str = r#"private theorem get_set_go_self [DecidableEq α] (k : α) (v : β) :
207 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) k = some v := by
208 intro m
209 induction m with
210 | nil =>
211 simp [AverMap.set.go, AverMap.get]
212 | cons p tl ih =>
213 cases p with
214 | mk k' v' =>
215 by_cases h : k = k'
216 · simp [AverMap.set.go, AverMap.get, h]
217 · simp [AverMap.set.go, AverMap.get, h, ih]
218
219theorem get_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
220 AverMap.get (AverMap.set m k v) k = some v := by
221 simpa [AverMap.set] using get_set_go_self k v m"#;
222
223const AVER_MAP_PRELUDE_GET_SET_OTHER: &str = r#"private theorem get_set_go_other [DecidableEq α] (k key : α) (v : β) (h : key ≠ k) :
224 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) key = AverMap.get m key := by
225 intro m
226 induction m with
227 | nil =>
228 simp [AverMap.set.go, AverMap.get, h]
229 | cons p tl ih =>
230 cases p with
231 | mk k' v' =>
232 by_cases hk : k = k'
233 · have hkey : key ≠ k' := by simpa [hk] using h
234 simp [AverMap.set.go, AverMap.get, hk, hkey]
235 · by_cases hkey : key = k'
236 · simp [AverMap.set.go, AverMap.get, hk, hkey]
237 · simp [AverMap.set.go, AverMap.get, hk, hkey, ih]
238
239theorem get_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
240 AverMap.get (AverMap.set m k v) key = AverMap.get m key := by
241 simpa [AverMap.set] using get_set_go_other k key v h m"#;
242
243const AVER_MAP_PRELUDE_HAS_SET_OTHER: &str = r#"theorem has_eq_isSome_get [DecidableEq α] (m : List (α × β)) (k : α) :
244 AverMap.has m k = (AverMap.get m k).isSome := by
245 induction m with
246 | nil =>
247 simp [AverMap.has, AverMap.get]
248 | cons p tl ih =>
249 cases p with
250 | mk k' v' =>
251 by_cases h : k = k'
252 · simp [AverMap.has, AverMap.get, List.any, h]
253 · simpa [AverMap.has, AverMap.get, List.any, h] using ih
254
255theorem has_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
256 AverMap.has (AverMap.set m k v) key = AverMap.has m key := by
257 rw [AverMap.has_eq_isSome_get, AverMap.has_eq_isSome_get]
258 simp [AverMap.get_set_other, h]"#;
259
260const AVER_MAP_PRELUDE_END: &str = r#"end AverMap"#;
261
262const LEAN_PRELUDE_AVER_LIST: &str = r#"namespace AverList
263def get (xs : List α) (i : Int) : Option α :=
264 if i < 0 then none else xs.get? i.toNat
265private def insertSorted [Ord α] (x : α) : List α → List α
266 | [] => [x]
267 | y :: ys =>
268 if compare x y == Ordering.lt || compare x y == Ordering.eq then
269 x :: y :: ys
270 else
271 y :: insertSorted x ys
272def sort [Ord α] (xs : List α) : List α :=
273 xs.foldl (fun acc x => insertSorted x acc) []
274end AverList"#;
275
276const LEAN_PRELUDE_STRING_HELPERS: &str = r#"def String.charAt (s : String) (i : Int) : Option String :=
284 if i < 0 then none
285 else (s.toList.get? i.toNat).map Char.toString
286theorem String.charAt_length_none (s : String) : String.charAt s s.length = none := by
287 have hs : ¬ ((s.length : Int) < 0) := by omega
288 unfold String.charAt
289 simp [hs]
290 change s.data.length ≤ s.length
291 exact Nat.le_refl _
292def String.slice (s : String) (start stop : Int) : String :=
293 let startN := if start < 0 then 0 else start.toNat
294 let stopN := if stop < 0 then 0 else stop.toNat
295 let chars := s.toList
296 String.mk ((chars.drop startN).take (stopN - startN))
297private def trimFloatTrailingZerosChars (chars : List Char) : List Char :=
298 let noZeros := (chars.reverse.dropWhile (fun c => c == '0')).reverse
299 match noZeros.reverse with
300 | '.' :: rest => rest.reverse
301 | _ => noZeros
302private def normalizeFloatString (s : String) : String :=
303 if s.toList.any (fun c => c == '.') then
304 String.mk (trimFloatTrailingZerosChars s.toList)
305 else s
306def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)
307def String.chars (s : String) : List String := s.toList.map Char.toString
308private theorem char_to_string_append_mk (c : Char) (chars : List Char) :
309 Char.toString c ++ String.mk chars = String.mk (c :: chars) := by
310 rfl
311private theorem string_intercalate_empty_char_strings_go (acc : String) (chars : List Char) :
312 String.intercalate.go acc "" (chars.map Char.toString) = acc ++ String.mk chars := by
313 induction chars generalizing acc with
314 | nil =>
315 simp [String.intercalate.go]
316 | cons c rest ih =>
317 calc
318 String.intercalate.go acc "" ((c :: rest).map Char.toString)
319 = String.intercalate.go (acc ++ Char.toString c) "" (rest.map Char.toString) := by
320 simp [String.intercalate.go]
321 _ = (acc ++ Char.toString c) ++ String.mk rest := by
322 simpa using ih (acc ++ Char.toString c)
323 _ = acc ++ String.mk (c :: rest) := by
324 simp [String.append_assoc, char_to_string_append_mk]
325private theorem string_intercalate_empty_char_strings (chars : List Char) :
326 String.intercalate "" (chars.map Char.toString) = String.mk chars := by
327 cases chars with
328 | nil =>
329 simp [String.intercalate]
330 | cons c rest =>
331 simpa [String.intercalate] using string_intercalate_empty_char_strings_go c.toString rest
332theorem String.intercalate_empty_chars (s : String) :
333 String.intercalate "" (String.chars s) = s := by
334 cases s with
335 | mk chars =>
336 simpa [String.chars] using string_intercalate_empty_char_strings chars
337namespace AverString
338def splitOnCharGo (currentRev : List Char) (sep : Char) : List Char → List String
339 | [] => [String.mk currentRev.reverse]
340 | c :: cs =>
341 if c == sep then
342 String.mk currentRev.reverse :: splitOnCharGo [] sep cs
343 else
344 splitOnCharGo (c :: currentRev) sep cs
345def splitOnChar (s : String) (sep : Char) : List String :=
346 splitOnCharGo [] sep s.toList
347def split (s delim : String) : List String :=
348 match delim.toList with
349 | [] => "" :: (s.toList.map Char.toString) ++ [""]
350 | [c] => splitOnChar s c
351 | _ => s.splitOn delim
352@[simp] private theorem char_toString_data (c : Char) : c.toString.data = [c] := by
353 rfl
354private theorem splitOnCharGo_until_sep
355 (prefixRev part tail : List Char) (sep : Char) :
356 part.all (fun c => c != sep) = true ->
357 splitOnCharGo prefixRev sep (part ++ sep :: tail) =
358 String.mk (prefixRev.reverse ++ part) :: splitOnCharGo [] sep tail := by
359 intro h_safe
360 induction part generalizing prefixRev with
361 | nil =>
362 simp [splitOnCharGo]
363 | cons c rest ih =>
364 simp at h_safe
365 have h_rest : (rest.all fun c => c != sep) = true := by
366 simpa using h_safe.2
367 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
368 (ih (c :: prefixRev) h_rest)
369private theorem splitOnCharGo_no_sep
370 (prefixRev chars : List Char) (sep : Char) :
371 chars.all (fun c => c != sep) = true ->
372 splitOnCharGo prefixRev sep chars =
373 [String.mk (prefixRev.reverse ++ chars)] := by
374 intro h_safe
375 induction chars generalizing prefixRev with
376 | nil =>
377 simp [splitOnCharGo]
378 | cons c rest ih =>
379 simp at h_safe
380 have h_rest : (rest.all fun c => c != sep) = true := by
381 simpa using h_safe.2
382 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
383 (ih (c :: prefixRev) h_rest)
384@[simp] theorem split_single_char_append
385 (head tail : String) (sep : Char) :
386 head.toList.all (fun c => c != sep) = true ->
387 split (head ++ Char.toString sep ++ tail) (Char.toString sep) =
388 head :: split tail (Char.toString sep) := by
389 intro h_safe
390 simpa [split, splitOnChar] using
391 (splitOnCharGo_until_sep [] head.data tail.data sep h_safe)
392@[simp] theorem split_single_char_no_sep
393 (s : String) (sep : Char) :
394 s.toList.all (fun c => c != sep) = true ->
395 split s (Char.toString sep) = [s] := by
396 intro h_safe
397 simpa [split, splitOnChar] using
398 (splitOnCharGo_no_sep [] s.data sep h_safe)
399private theorem intercalate_go_prefix
400 (pref acc sep : String) (rest : List String) :
401 String.intercalate.go (pref ++ sep ++ acc) sep rest =
402 pref ++ sep ++ String.intercalate.go acc sep rest := by
403 induction rest generalizing acc with
404 | nil =>
405 simp [String.intercalate.go, String.append_assoc]
406 | cons x xs ih =>
407 simpa [String.intercalate.go, String.append_assoc] using
408 (ih (acc ++ sep ++ x))
409@[simp] theorem split_intercalate_trailing_single_char
410 (parts : List String) (sep : Char) :
411 parts.all (fun part => part.toList.all (fun c => c != sep)) = true ->
412 split (String.intercalate (Char.toString sep) parts ++ Char.toString sep) (Char.toString sep) =
413 match parts with
414 | [] => ["", ""]
415 | _ => parts ++ [""] := by
416 intro h_safe
417 induction parts with
418 | nil =>
419 simp [split, splitOnChar, splitOnCharGo]
420 | cons part rest ih =>
421 simp at h_safe
422 have h_part : (part.toList.all fun c => c != sep) = true := by
423 simpa using h_safe.1
424 cases rest with
425 | nil =>
426 have h_empty : ("".toList.all fun c => c != sep) = true := by simp
427 calc
428 split (String.intercalate.go part (Char.toString sep) [] ++ Char.toString sep) (Char.toString sep)
429 = split (part ++ Char.toString sep) (Char.toString sep) := by
430 simp [String.intercalate.go]
431 _ = part :: split "" (Char.toString sep) := by
432 simpa using split_single_char_append part "" sep h_part
433 _ = [part, ""] := by
434 simp [split_single_char_no_sep, h_empty]
435 | cons next rest' =>
436 have h_rest : ((next :: rest').all fun part => part.toList.all fun c => c != sep) = true := by
437 simpa using h_safe.2
438 calc
439 split
440 (String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep)
441 (Char.toString sep)
442 =
443 split
444 (part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep))
445 (Char.toString sep) := by
446 have h_join :
447 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
448 = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
449 calc
450 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
451 = String.intercalate.go (part ++ Char.toString sep ++ next) (Char.toString sep) rest' ++ Char.toString sep := by
452 simp [String.intercalate.go]
453 _ = part ++ Char.toString sep ++ (String.intercalate.go next (Char.toString sep) rest' ++ Char.toString sep) := by
454 rw [intercalate_go_prefix part next (Char.toString sep) rest']
455 simp [String.append_assoc]
456 _ = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
457 simp [String.intercalate, String.intercalate.go]
458 simpa using congrArg (fun s => split s (Char.toString sep)) h_join
459 _ = part :: split
460 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
461 (Char.toString sep) := by
462 simpa using split_single_char_append
463 part
464 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
465 sep
466 h_part
467 _ = part :: (next :: rest' ++ [""]) := by
468 simpa using ih h_rest
469end AverString"#;
470
471const LEAN_PRELUDE_NUMERIC_PARSE: &str = r#"namespace AverDigits
472def foldDigitsAcc (acc : Nat) : List Nat -> Nat
473 | [] => acc
474 | d :: ds => foldDigitsAcc (acc * 10 + d) ds
475
476def foldDigits (digits : List Nat) : Nat :=
477 foldDigitsAcc 0 digits
478
479private theorem foldDigitsAcc_append_singleton (acc : Nat) (xs : List Nat) (d : Nat) :
480 foldDigitsAcc acc (xs ++ [d]) = foldDigitsAcc acc xs * 10 + d := by
481 induction xs generalizing acc with
482 | nil =>
483 simp [foldDigitsAcc]
484 | cons x xs ih =>
485 simp [foldDigitsAcc, ih, Nat.left_distrib, Nat.add_assoc, Nat.add_left_comm]
486
487private theorem foldDigits_append_singleton (xs : List Nat) (d : Nat) :
488 foldDigits (xs ++ [d]) = foldDigits xs * 10 + d := by
489 simpa [foldDigits] using foldDigitsAcc_append_singleton 0 xs d
490
491def natDigits : Nat -> List Nat
492 | n =>
493 if n < 10 then
494 [n]
495 else
496 natDigits (n / 10) ++ [n % 10]
497termination_by
498 n => n
499
500theorem natDigits_nonempty (n : Nat) : natDigits n ≠ [] := by
501 by_cases h : n < 10
502 · rw [natDigits.eq_1]
503 simp [h]
504 · rw [natDigits.eq_1]
505 simp [h]
506
507theorem natDigits_digits_lt_ten : ∀ n : Nat, ∀ d ∈ natDigits n, d < 10 := by
508 intro n d hd
509 by_cases h : n < 10
510 · rw [natDigits.eq_1] at hd
511 simp [h] at hd
512 rcases hd with rfl
513 exact h
514 · rw [natDigits.eq_1] at hd
515 simp [h] at hd
516 rcases hd with hd | hd
517 · exact natDigits_digits_lt_ten (n / 10) d hd
518 · subst hd
519 exact Nat.mod_lt n (by omega)
520
521theorem foldDigits_natDigits : ∀ n : Nat, foldDigits (natDigits n) = n := by
522 intro n
523 by_cases h : n < 10
524 · rw [natDigits.eq_1]
525 simp [h, foldDigits, foldDigitsAcc]
526 · rw [natDigits.eq_1]
527 simp [h]
528 rw [foldDigits_append_singleton]
529 rw [foldDigits_natDigits (n / 10)]
530 omega
531
532def digitChar : Nat -> Char
533 | 0 => '0' | 1 => '1' | 2 => '2' | 3 => '3' | 4 => '4'
534 | 5 => '5' | 6 => '6' | 7 => '7' | 8 => '8' | 9 => '9'
535 | _ => '0'
536
537def charToDigit? : Char -> Option Nat
538 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3 | '4' => some 4
539 | '5' => some 5 | '6' => some 6 | '7' => some 7 | '8' => some 8 | '9' => some 9
540 | _ => none
541
542theorem charToDigit_digitChar : ∀ d : Nat, d < 10 -> charToDigit? (digitChar d) = some d
543 | 0, _ => by simp [digitChar, charToDigit?]
544 | 1, _ => by simp [digitChar, charToDigit?]
545 | 2, _ => by simp [digitChar, charToDigit?]
546 | 3, _ => by simp [digitChar, charToDigit?]
547 | 4, _ => by simp [digitChar, charToDigit?]
548 | 5, _ => by simp [digitChar, charToDigit?]
549 | 6, _ => by simp [digitChar, charToDigit?]
550 | 7, _ => by simp [digitChar, charToDigit?]
551 | 8, _ => by simp [digitChar, charToDigit?]
552 | 9, _ => by simp [digitChar, charToDigit?]
553 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
554 omega
555
556theorem digitChar_ne_minus : ∀ d : Nat, d < 10 -> digitChar d ≠ '-'
557 | 0, _ => by decide
558 | 1, _ => by decide
559 | 2, _ => by decide
560 | 3, _ => by decide
561 | 4, _ => by decide
562 | 5, _ => by decide
563 | 6, _ => by decide
564 | 7, _ => by decide
565 | 8, _ => by decide
566 | 9, _ => by decide
567 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
568 omega
569
570theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->
571 Char.toString (digitChar d) ≠ " " ∧
572 Char.toString (digitChar d) ≠ "\t" ∧
573 Char.toString (digitChar d) ≠ "\n" ∧
574 Char.toString (digitChar d) ≠ "\r"
575 | 0, _ => by decide
576 | 1, _ => by decide
577 | 2, _ => by decide
578 | 3, _ => by decide
579 | 4, _ => by decide
580 | 5, _ => by decide
581 | 6, _ => by decide
582 | 7, _ => by decide
583 | 8, _ => by decide
584 | 9, _ => by decide
585 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
586 omega
587
588theorem mapM_charToDigit_digits : ∀ ds : List Nat,
589 (∀ d ∈ ds, d < 10) -> List.mapM charToDigit? (ds.map digitChar) = some ds := by
590 intro ds hds
591 induction ds with
592 | nil =>
593 simp
594 | cons d ds ih =>
595 have hd : d < 10 := hds d (by simp)
596 have htail : ∀ x ∈ ds, x < 10 := by
597 intro x hx
598 exact hds x (by simp [hx])
599 simp [charToDigit_digitChar d hd, ih htail]
600
601def natDigitsChars (n : Nat) : List Char :=
602 (natDigits n).map digitChar
603
604def parseNatChars (chars : List Char) : Option Nat :=
605 match chars with
606 | [] => none
607 | _ => do
608 let digits <- List.mapM charToDigit? chars
609 pure (foldDigits digits)
610
611theorem parseNatChars_nat (n : Nat) :
612 parseNatChars (natDigitsChars n) = some n := by
613 unfold parseNatChars natDigitsChars
614 cases h : (natDigits n).map digitChar with
615 | nil =>
616 exfalso
617 exact natDigits_nonempty n (List.map_eq_nil_iff.mp h)
618 | cons hd tl =>
619 have hdigits : List.mapM charToDigit? (List.map digitChar (natDigits n)) = some (natDigits n) :=
620 mapM_charToDigit_digits (natDigits n) (fun d hd => natDigits_digits_lt_ten n d hd)
621 rw [h] at hdigits
622 simp [h, hdigits, foldDigits_natDigits]
623end AverDigits
624
625def String.fromInt (n : Int) : String :=
626 match n with
627 | .ofNat m => String.mk (AverDigits.natDigitsChars m)
628 | .negSucc m => String.mk ('-' :: AverDigits.natDigitsChars (m + 1))
629
630def Int.fromString (s : String) : Except String Int :=
631 match s.toList with
632 | [] => .error ("Cannot parse '" ++ s ++ "' as Int")
633 | '-' :: rest =>
634 match AverDigits.parseNatChars rest with
635 | some n => .ok (-Int.ofNat n)
636 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
637 | chars =>
638 match AverDigits.parseNatChars chars with
639 | some n => .ok (Int.ofNat n)
640 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
641
642theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n
643 | .ofNat m => by
644 cases h : AverDigits.natDigits m with
645 | nil =>
646 exfalso
647 exact AverDigits.natDigits_nonempty m h
648 | cons d ds =>
649 have hd : d < 10 := AverDigits.natDigits_digits_lt_ten m d (by simp [h])
650 have hne : AverDigits.digitChar d ≠ '-' := AverDigits.digitChar_ne_minus d hd
651 have hparse : AverDigits.parseNatChars (AverDigits.digitChar d :: List.map AverDigits.digitChar ds) = some m := by
652 simpa [AverDigits.natDigitsChars, h] using AverDigits.parseNatChars_nat m
653 simp [String.fromInt, Int.fromString, AverDigits.natDigitsChars, h, hne, hparse]
654 | .negSucc m => by
655 simp [String.fromInt, Int.fromString, AverDigits.parseNatChars_nat]
656 rfl
657
658private def charDigitsToNat (cs : List Char) : Nat :=
659 cs.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
660
661private def parseExpPart : List Char → (Bool × List Char)
662 | '-' :: rest => (true, rest.takeWhile Char.isDigit)
663 | '+' :: rest => (false, rest.takeWhile Char.isDigit)
664 | rest => (false, rest.takeWhile Char.isDigit)
665
666def Float.fromString (s : String) : Except String Float :=
667 let chars := s.toList
668 let (neg, chars) := match chars with
669 | '-' :: rest => (true, rest)
670 | _ => (false, chars)
671 let intPart := chars.takeWhile Char.isDigit
672 let rest := chars.dropWhile Char.isDigit
673 let (fracPart, rest) := match rest with
674 | '.' :: rest => (rest.takeWhile Char.isDigit, rest.dropWhile Char.isDigit)
675 | _ => ([], rest)
676 let (expNeg, expDigits) := match rest with
677 | 'e' :: rest => parseExpPart rest
678 | 'E' :: rest => parseExpPart rest
679 | _ => (false, [])
680 if intPart.isEmpty && fracPart.isEmpty then .error ("Invalid float: " ++ s)
681 else
682 let mantissa := charDigitsToNat (intPart ++ fracPart)
683 let fracLen : Int := fracPart.length
684 let expVal : Int := charDigitsToNat expDigits
685 let shift : Int := (if expNeg then -expVal else expVal) - fracLen
686 let f := if shift >= 0 then Float.ofScientific mantissa false shift.toNat
687 else Float.ofScientific mantissa true ((-shift).toNat)
688 .ok (if neg then -f else f)"#;
689
690const LEAN_PRELUDE_CHAR_BYTE: &str = r#"def Char.toCode (s : String) : Int :=
691 match s.toList.head? with
692 | some c => (c.toNat : Int)
693 | none => panic! "Char.toCode: string is empty"
694def Char.fromCode (n : Int) : Option String :=
695 if n < 0 || n > 1114111 then none
696 else if n >= 55296 && n <= 57343 then none
697 else some (Char.toString (Char.ofNat n.toNat))
698
699def hexDigit (n : Int) : String :=
700 match n with
701 | 0 => "0" | 1 => "1" | 2 => "2" | 3 => "3"
702 | 4 => "4" | 5 => "5" | 6 => "6" | 7 => "7"
703 | 8 => "8" | 9 => "9" | 10 => "a" | 11 => "b"
704 | 12 => "c" | 13 => "d" | 14 => "e" | 15 => "f"
705 | _ => "?"
706
707def byteToHex (code : Int) : String :=
708 hexDigit (code / 16) ++ hexDigit (code % 16)
709
710namespace AverByte
711private def hexValue (c : Char) : Option Int :=
712 match c with
713 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3
714 | '4' => some 4 | '5' => some 5 | '6' => some 6 | '7' => some 7
715 | '8' => some 8 | '9' => some 9 | 'a' => some 10 | 'b' => some 11
716 | 'c' => some 12 | 'd' => some 13 | 'e' => some 14 | 'f' => some 15
717 | 'A' => some 10 | 'B' => some 11 | 'C' => some 12 | 'D' => some 13
718 | 'E' => some 14 | 'F' => some 15
719 | _ => none
720def toHex (n : Int) : Except String String :=
721 if n < 0 || n > 255 then
722 .error ("Byte.toHex: " ++ toString n ++ " is out of range 0-255")
723 else
724 .ok (byteToHex n)
725def fromHex (s : String) : Except String Int :=
726 match s.toList with
727 | [hi, lo] =>
728 match hexValue hi, hexValue lo with
729 | some h, some l => .ok (h * 16 + l)
730 | _, _ => .error ("Byte.fromHex: invalid hex '" ++ s ++ "'")
731 | _ => .error ("Byte.fromHex: expected exactly 2 hex chars, got '" ++ s ++ "'")
732end AverByte"#;
733
734pub(crate) fn pure_fns(ctx: &CodegenContext) -> Vec<&FnDef> {
735 ctx.modules
736 .iter()
737 .flat_map(|m| m.fn_defs.iter())
738 .chain(ctx.fn_defs.iter())
739 .filter(|fd| toplevel::is_pure_fn(fd))
740 .collect()
741}
742
743pub(crate) fn recursive_type_names(ctx: &CodegenContext) -> HashSet<String> {
744 ctx.modules
745 .iter()
746 .flat_map(|m| m.type_defs.iter())
747 .chain(ctx.type_defs.iter())
748 .filter(|td| toplevel::is_recursive_type_def(td))
749 .map(|td| toplevel::type_def_name(td).to_string())
750 .collect()
751}
752
753pub(crate) fn recursive_pure_fn_names(ctx: &CodegenContext) -> HashSet<String> {
754 let pure_names: HashSet<String> = pure_fns(ctx)
755 .into_iter()
756 .map(|fd| fd.name.clone())
757 .collect();
758 ctx.recursive_fns
764 .iter()
765 .filter(|name| pure_names.contains(name.as_str()))
766 .cloned()
767 .collect()
768}
769
770fn verify_counter_key(vb: &crate::ast::VerifyBlock) -> String {
771 match &vb.kind {
772 VerifyKind::Cases => format!("fn:{}", vb.fn_name),
773 VerifyKind::Law(law) => format!("law:{}::{}", vb.fn_name, law.name),
774 }
775}
776
777fn lean_project_name(ctx: &CodegenContext) -> String {
778 crate::codegen::common::entry_basename(ctx)
779}
780
781pub(crate) fn find_type_def<'a>(ctx: &'a CodegenContext, type_name: &str) -> Option<&'a TypeDef> {
782 ctx.modules
783 .iter()
784 .flat_map(|m| m.type_defs.iter())
785 .chain(ctx.type_defs.iter())
786 .find(|td| toplevel::type_def_name(td) == type_name)
787}
788
789pub(super) fn bound_expr_to_lean(expr: &Spanned<Expr>) -> String {
790 match &expr.node {
791 Expr::Literal(crate::ast::Literal::Int(n)) => format!("{}", n),
792 Expr::Ident(name) => expr::aver_name_to_lean(name),
793 Expr::FnCall(f, args) => {
794 if let Some(dotted) = crate::codegen::common::expr_to_dotted_name(&f.node) {
795 if dotted == "List.len" && args.len() == 1 {
797 return format!("{}.length", bound_expr_to_lean(&args[0]));
798 }
799 let lean_args: Vec<String> = args.iter().map(bound_expr_to_lean).collect();
800 format!(
801 "({} {})",
802 expr::aver_name_to_lean(&dotted),
803 lean_args.join(" ")
804 )
805 } else {
806 "0".to_string()
807 }
808 }
809 Expr::Attr(obj, field) => format!(
810 "{}.{}",
811 bound_expr_to_lean(obj),
812 expr::aver_name_to_lean(field)
813 ),
814 _ => "0".to_string(),
815 }
816}
817
818pub(crate) fn sizeof_measure_param_indices(fd: &FnDef) -> Vec<usize> {
819 fd.params
820 .iter()
821 .enumerate()
822 .filter_map(|(idx, (_, type_name))| {
823 (!crate::codegen::recursion::detect::is_scalar_like_type(type_name)).then_some(idx)
824 })
825 .collect()
826}
827
828pub fn proof_mode_findings(ctx: &CodegenContext) -> Vec<ProofModeIssue> {
833 let (_plans, issues) = crate::codegen::recursion::analyze_plans(ctx);
834 issues
835}
836
837pub fn proof_mode_issues(ctx: &CodegenContext) -> Vec<String> {
838 proof_mode_findings(ctx)
839 .into_iter()
840 .map(|issue| issue.message)
841 .collect()
842}
843
844pub fn transpile(ctx: &mut CodegenContext) -> ProjectOutput {
853 transpile_with_verify_mode(ctx, VerifyEmitMode::NativeDecide)
854}
855
856pub fn transpile_for_proof_mode(
861 ctx: &mut CodegenContext,
862 verify_mode: VerifyEmitMode,
863) -> ProjectOutput {
864 ctx.refresh_facts();
865 transpile_unified(ctx, verify_mode, LeanEmitMode::Proof)
866}
867
868pub fn transpile_with_verify_mode(
874 ctx: &mut CodegenContext,
875 verify_mode: VerifyEmitMode,
876) -> ProjectOutput {
877 ctx.refresh_facts();
878 transpile_unified(ctx, verify_mode, LeanEmitMode::Standard)
879}
880
881fn emit_lifted_effectful_functions(
890 ctx: &CodegenContext,
891 recursive_fns: &HashSet<String>,
892 sections: &mut Vec<String>,
893) {
894 use crate::types::checker::effect_classification::is_classified;
895
896 let reachable = crate::codegen::common::verify_reachable_fn_names(&ctx.items);
902
903 let mut helpers: std::collections::HashMap<String, Vec<String>> =
908 std::collections::HashMap::new();
909 for item in &ctx.items {
910 let TopLevel::FnDef(fd) = item else { continue };
911 if fd.effects.is_empty() || fd.name == "main" {
912 continue;
913 }
914 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
915 continue;
916 }
917 if !reachable.contains(&fd.name) {
918 continue;
919 }
920 helpers.insert(
921 fd.name.clone(),
922 fd.effects.iter().map(|e| e.node.clone()).collect(),
923 );
924 }
925
926 let mut lifted_fns: Vec<(String, crate::ast::FnDef)> = Vec::new();
927 for item in &ctx.items {
928 let TopLevel::FnDef(fd) = item else { continue };
929 if fd.effects.is_empty() || fd.name == "main" {
930 continue;
931 }
932 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
933 continue;
934 }
935 if !reachable.contains(&fd.name) {
936 continue;
937 }
938 let Ok(Some(lifted)) =
939 crate::types::checker::effect_lifting::lift_fn_def_with_helpers(fd, &helpers)
940 else {
941 continue;
942 };
943 lifted_fns.push((fd.name.clone(), lifted));
944 }
945
946 let eligible_names: std::collections::HashSet<String> =
954 lifted_fns.iter().map(|(n, _)| n.clone()).collect();
955 let mut emitted: std::collections::HashSet<String> = std::collections::HashSet::new();
956 let mut order: Vec<usize> = Vec::new();
957 let mut remaining: Vec<usize> = (0..lifted_fns.len()).collect();
958 while !remaining.is_empty() {
959 let before = remaining.len();
960 remaining.retain(|&idx| {
961 let body_calls = collect_called_idents_in_body(&lifted_fns[idx].1.body);
962 let ready = body_calls
963 .iter()
964 .all(|name| !eligible_names.contains(name) || emitted.contains(name));
965 if ready {
966 emitted.insert(lifted_fns[idx].0.clone());
967 order.push(idx);
968 false
969 } else {
970 true
971 }
972 });
973 if remaining.len() == before {
974 order.append(&mut remaining);
978 }
979 }
980
981 for idx in order {
982 let (_, lifted) = &lifted_fns[idx];
983 if let Some(code) = toplevel::emit_fn_def(lifted, recursive_fns, ctx) {
984 sections.push(code);
985 sections.push(String::new());
986 }
987 }
988}
989
990fn collect_called_idents_in_body(body: &crate::ast::FnBody) -> std::collections::HashSet<String> {
991 use crate::ast::{Expr, Spanned, Stmt};
992 let mut out = std::collections::HashSet::new();
993 fn walk(expr: &Spanned<Expr>, out: &mut std::collections::HashSet<String>) {
994 match &expr.node {
995 Expr::FnCall(callee, args) => {
996 if let Expr::Ident(name) | Expr::Resolved { name, .. } = &callee.node {
997 out.insert(name.clone());
998 }
999 walk(callee, out);
1000 for a in args {
1001 walk(a, out);
1002 }
1003 }
1004 Expr::BinOp(_, l, r) => {
1005 walk(l, out);
1006 walk(r, out);
1007 }
1008 Expr::Match { subject, arms } => {
1009 walk(subject, out);
1010 for arm in arms {
1011 walk(&arm.body, out);
1012 }
1013 }
1014 Expr::Attr(inner, _) | Expr::ErrorProp(inner) => walk(inner, out),
1015 Expr::Constructor(_, Some(inner)) => walk(inner, out),
1016 Expr::List(items) | Expr::Tuple(items) | Expr::IndependentProduct(items, _) => {
1017 for i in items {
1018 walk(i, out);
1019 }
1020 }
1021 Expr::MapLiteral(pairs) => {
1022 for (k, v) in pairs {
1023 walk(k, out);
1024 walk(v, out);
1025 }
1026 }
1027 Expr::RecordCreate { fields, .. } => {
1028 for (_, v) in fields {
1029 walk(v, out);
1030 }
1031 }
1032 Expr::RecordUpdate { base, updates, .. } => {
1033 walk(base, out);
1034 for (_, v) in updates {
1035 walk(v, out);
1036 }
1037 }
1038 Expr::InterpolatedStr(parts) => {
1039 for part in parts {
1040 if let crate::ast::StrPart::Parsed(inner) = part {
1041 walk(inner, out);
1042 }
1043 }
1044 }
1045 _ => {}
1046 }
1047 }
1048 for stmt in body.stmts() {
1049 match stmt {
1050 Stmt::Expr(e) => walk(e, &mut out),
1051 Stmt::Binding(_, _, e) => walk(e, &mut out),
1052 }
1053 }
1054 out
1055}
1056
1057#[cfg(test)]
1058fn generate_prelude() -> String {
1059 generate_prelude_for_body("", true)
1060}
1061
1062#[cfg(test)]
1063fn generate_prelude_for_body(body: &str, include_all_helpers: bool) -> String {
1064 let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
1069 if include_all_helpers || crate::codegen::builtin_records::needs_trust_header(body) {
1070 let empty = crate::codegen::common::DeclaredEffects {
1076 bare_namespaces: std::collections::HashSet::new(),
1077 methods: std::collections::HashSet::new(),
1078 };
1079 let has_ip = body.contains("BranchPath");
1080 parts.push(
1081 crate::types::checker::proof_trust_header::generate_commented("-- ", &empty, has_ip),
1082 );
1083 }
1084 for record in crate::codegen::builtin_records::needed_records(body, include_all_helpers) {
1086 parts.push(crate::codegen::builtin_records::render_lean(record));
1087 }
1088
1089 for helper in crate::codegen::builtin_helpers::needed_helpers(body, include_all_helpers) {
1093 match helper.key {
1094 "BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
1095 "AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
1096 "StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
1097 "NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
1098 "CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
1099 "AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
1100 "AverMap" => parts.push(generate_map_prelude(body, include_all_helpers)),
1101 "ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
1102 "FloatInstances" => parts.extend([
1103 LEAN_PRELUDE_FLOAT_COE.to_string(),
1104 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1105 ]),
1106 "ExceptInstances" => parts.extend([
1107 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1108 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1109 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1110 ]),
1111 "StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
1112 "ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
1116 other => panic!(
1117 "Lean backend has no implementation for builtin helper key '{}'. \
1118 Add a match arm in generate_prelude_for_body or remove the key \
1119 from BUILTIN_HELPERS.",
1120 other
1121 ),
1122 }
1123 }
1124
1125 parts.join("\n\n")
1126}
1127
1128fn generate_map_prelude(body: &str, include_all_helpers: bool) -> String {
1129 let mut parts = vec![AVER_MAP_PRELUDE_BASE.to_string()];
1130
1131 let needs_has_set_self = include_all_helpers || body.contains("AverMap.has_set_self");
1132 let needs_get_set_self = include_all_helpers || body.contains("AverMap.get_set_self");
1133 let needs_get_set_other = include_all_helpers
1134 || body.contains("AverMap.get_set_other")
1135 || body.contains("AverMap.has_set_other");
1136 let needs_has_set_other = include_all_helpers || body.contains("AverMap.has_set_other");
1137
1138 if needs_has_set_self {
1139 parts.push(AVER_MAP_PRELUDE_HAS_SET_SELF.to_string());
1140 }
1141 if needs_get_set_self {
1142 parts.push(AVER_MAP_PRELUDE_GET_SET_SELF.to_string());
1143 }
1144 if needs_get_set_other {
1145 parts.push(AVER_MAP_PRELUDE_GET_SET_OTHER.to_string());
1146 }
1147 if needs_has_set_other {
1148 parts.push(AVER_MAP_PRELUDE_HAS_SET_OTHER.to_string());
1149 }
1150
1151 parts.push(AVER_MAP_PRELUDE_END.to_string());
1152 parts.join("\n\n")
1153}
1154
1155fn generate_lakefile_with_roots(project_name: &str, extra_roots: &[String]) -> String {
1156 let mut roots: Vec<String> = vec![format!("`{}", project_name)];
1157 for r in extra_roots {
1158 roots.push(format!("`{}", r));
1159 }
1160 let roots_str = roots.join(", ");
1161 format!(
1162 r#"import Lake
1163open Lake DSL
1164
1165package «{}» where
1166 version := v!"0.1.0"
1167
1168@[default_target]
1169lean_lib «{}» where
1170 srcDir := "."
1171 roots := #[{}]
1172"#,
1173 project_name.to_lowercase(),
1174 project_name,
1175 roots_str
1176 )
1177}
1178
1179fn generate_toolchain() -> String {
1180 "leanprover/lean4:v4.15.0\n".to_string()
1181}
1182
1183#[derive(Clone, Copy)]
1184enum LeanEmitMode {
1185 Standard,
1186 Proof,
1187}
1188
1189fn transpile_unified(
1201 ctx: &CodegenContext,
1202 verify_mode: VerifyEmitMode,
1203 emit_mode: LeanEmitMode,
1204) -> ProjectOutput {
1205 use crate::codegen::recursion::RecursionPlan;
1206
1207 let recursive_fns: HashSet<String> = ctx.recursive_fns.clone();
1210 let recursive_names = recursive_pure_fn_names(ctx);
1211 let recursive_types = recursive_type_names(ctx);
1212 let (plans, _proof_issues) = match emit_mode {
1213 LeanEmitMode::Proof => crate::codegen::recursion::analyze_plans(ctx),
1214 LeanEmitMode::Standard => (HashMap::<String, RecursionPlan>::new(), Vec::new()),
1215 };
1216
1217 let pure_per_scope = crate::codegen::common::route_pure_components_per_scope(
1222 ctx,
1223 toplevel::is_pure_fn,
1224 |comp| {
1225 let mut out: Vec<String> = Vec::new();
1226 if comp.len() > 1 {
1227 let code = match emit_mode {
1228 LeanEmitMode::Proof => {
1229 let all_supported = comp.iter().all(|fd| plans.contains_key(&fd.name));
1230 if all_supported {
1231 toplevel::emit_mutual_group_proof(comp, ctx, &plans)
1232 } else {
1233 toplevel::emit_mutual_group(comp, ctx)
1234 }
1235 }
1236 LeanEmitMode::Standard => toplevel::emit_mutual_group(comp, ctx),
1237 };
1238 out.push(code);
1239 out.push(String::new());
1240 } else if let Some(fd) = comp.first() {
1241 let emitted = match emit_mode {
1242 LeanEmitMode::Proof => {
1243 let is_recursive = recursive_names.contains(&fd.name);
1244 if is_recursive && !plans.contains_key(&fd.name) {
1245 toplevel::emit_fn_def(fd, &recursive_names, ctx)
1246 } else {
1247 toplevel::emit_fn_def_proof(fd, plans.get(&fd.name).cloned(), ctx)
1248 }
1249 }
1250 LeanEmitMode::Standard => toplevel::emit_fn_def(fd, &recursive_fns, ctx),
1251 };
1252 if let Some(code) = emitted {
1253 out.push(code);
1254 out.push(String::new());
1255 }
1256 }
1257 out
1258 },
1259 );
1260
1261 let mut entry_lifted_sections: Vec<String> = Vec::new();
1263 let lifted_recursive_names = match emit_mode {
1264 LeanEmitMode::Proof => &recursive_names,
1265 LeanEmitMode::Standard => &recursive_fns,
1266 };
1267 emit_lifted_effectful_functions(ctx, lifted_recursive_names, &mut entry_lifted_sections);
1268
1269 let mut entry_decision_sections: Vec<String> = Vec::new();
1270 for item in &ctx.items {
1271 if let TopLevel::Decision(db) = item {
1272 entry_decision_sections.push(toplevel::emit_decision(db));
1273 entry_decision_sections.push(String::new());
1274 }
1275 }
1276
1277 let mut entry_verify_sections: Vec<String> = Vec::new();
1278 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1279 for item in &ctx.items {
1280 if let TopLevel::Verify(vb) = item {
1281 let key = verify_counter_key(vb);
1282 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1283 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1284 verify_case_counters.insert(key, next_idx);
1285 entry_verify_sections.push(emitted);
1286 entry_verify_sections.push(String::new());
1287 }
1288 }
1289
1290 let mut module_files: Vec<(String, String)> = Vec::new();
1292 let mut union_body = String::new();
1293
1294 for module in &ctx.modules {
1295 let mut body_sections: Vec<String> = Vec::new();
1296 for td in &module.type_defs {
1297 body_sections.push(toplevel::emit_type_def(td));
1298 if toplevel::is_recursive_type_def(td) {
1299 body_sections.push(toplevel::emit_recursive_decidable_eq(
1300 toplevel::type_def_name(td),
1301 ));
1302 if matches!(emit_mode, LeanEmitMode::Proof)
1303 && let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
1304 {
1305 body_sections.push(measure);
1306 }
1307 }
1308 body_sections.push(String::new());
1309 }
1310 if let Some(scope_sections) = pure_per_scope.by_scope.get(&module.prefix) {
1311 body_sections.extend(scope_sections.clone());
1312 }
1313 let body = body_sections.join("\n");
1314 union_body.push_str(&body);
1315 union_body.push('\n');
1316
1317 let mut imports = vec!["import AverCommon".to_string()];
1318 for d in &module.depends {
1319 imports.push(format!("import {}", d));
1320 }
1321 let opens: Vec<String> = module
1325 .depends
1326 .iter()
1327 .map(|d| format!("open {}", d))
1328 .collect();
1329
1330 let opens_str = if opens.is_empty() {
1331 String::new()
1332 } else {
1333 format!("\n{}\n", opens.join("\n"))
1334 };
1335 let content = format!(
1336 "{}\n{}\nnamespace {}\n\n{}\nend {}\n",
1337 imports.join("\n"),
1338 opens_str,
1339 module.prefix,
1340 body,
1341 module.prefix
1342 );
1343 let path = module.prefix.replace('.', "/");
1344 module_files.push((format!("{}.lean", path), content));
1345 }
1346
1347 let mut entry_body_sections: Vec<String> = Vec::new();
1349 for td in &ctx.type_defs {
1350 entry_body_sections.push(toplevel::emit_type_def(td));
1351 if toplevel::is_recursive_type_def(td) {
1352 entry_body_sections.push(toplevel::emit_recursive_decidable_eq(
1353 toplevel::type_def_name(td),
1354 ));
1355 if matches!(emit_mode, LeanEmitMode::Proof)
1356 && let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types)
1357 {
1358 entry_body_sections.push(measure);
1359 }
1360 }
1361 entry_body_sections.push(String::new());
1362 }
1363 if let Some(entry_pure) = pure_per_scope.by_scope.get("") {
1364 entry_body_sections.extend(entry_pure.clone());
1365 }
1366 entry_body_sections.extend(entry_lifted_sections);
1367 entry_body_sections.extend(entry_decision_sections);
1368 entry_body_sections.extend(entry_verify_sections);
1369
1370 let entry_body = entry_body_sections.join("\n");
1371 union_body.push_str(&entry_body);
1372 union_body.push('\n');
1373
1374 let project_name = lean_project_name(ctx);
1375 let mut entry_imports = vec!["import AverCommon".to_string()];
1376 for m in &ctx.modules {
1377 entry_imports.push(format!("import {}", m.prefix));
1378 }
1379 let entry_opens: Vec<String> = ctx
1380 .modules
1381 .iter()
1382 .map(|m| format!("open {}", m.prefix))
1383 .collect();
1384 let mut entry_parts = vec![entry_imports.join("\n")];
1385 if !entry_opens.is_empty() {
1386 entry_parts.push(entry_opens.join("\n"));
1387 }
1388 let declared = crate::codegen::common::collect_declared_effects(ctx);
1389 let has_ip = union_body.contains("BranchPath");
1390 let has_classified =
1391 crate::types::checker::effect_classification::classifications_for_proof_subset()
1392 .iter()
1393 .any(|c| declared.includes(c.method));
1394 if has_ip || has_classified {
1395 entry_parts.push(
1396 crate::types::checker::proof_trust_header::generate_commented("-- ", &declared, has_ip),
1397 );
1398 }
1399 let subtype_block = crate::types::checker::oracle_subtypes::lean_subtypes(&declared);
1400 if !subtype_block.is_empty() {
1401 entry_parts.push(subtype_block);
1402 }
1403 entry_parts.push(entry_body);
1404 let entry_content = entry_parts.join("\n\n");
1405
1406 let common_content = build_common_lean(&union_body);
1408
1409 let mut extra_roots: Vec<String> = vec!["AverCommon".to_string()];
1411 for m in &ctx.modules {
1412 extra_roots.push(m.prefix.clone());
1413 }
1414 let lakefile = generate_lakefile_with_roots(&project_name, &extra_roots);
1415 let toolchain = generate_toolchain();
1416
1417 let mut files = module_files;
1418 files.push((format!("{}.lean", project_name), entry_content));
1419 files.push(("AverCommon.lean".to_string(), common_content));
1420 files.push(("lakefile.lean".to_string(), lakefile));
1421 files.push(("lean-toolchain".to_string(), toolchain));
1422 ProjectOutput { files }
1423}
1424
1425fn build_common_lean(union_body: &str) -> String {
1426 let mut parts = vec![LEAN_PRELUDE_HEADER.to_string()];
1427 for record in crate::codegen::builtin_records::needed_records(union_body, false) {
1428 parts.push(crate::codegen::builtin_records::render_lean(record));
1429 }
1430 for helper in crate::codegen::builtin_helpers::needed_helpers(union_body, false) {
1431 match helper.key {
1432 "BranchPath" => parts.push(LEAN_PRELUDE_BRANCH_PATH.to_string()),
1433 "AverList" => parts.push(LEAN_PRELUDE_AVER_LIST.to_string()),
1434 "StringHelpers" => parts.push(LEAN_PRELUDE_STRING_HELPERS.to_string()),
1435 "NumericParse" => parts.push(LEAN_PRELUDE_NUMERIC_PARSE.to_string()),
1436 "CharByte" => parts.push(LEAN_PRELUDE_CHAR_BYTE.to_string()),
1437 "AverMeasure" => parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string()),
1438 "AverMap" => parts.push(generate_map_prelude(union_body, false)),
1439 "ProofFuel" => parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string()),
1440 "FloatInstances" => parts.extend([
1441 LEAN_PRELUDE_FLOAT_COE.to_string(),
1442 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1443 ]),
1444 "ExceptInstances" => parts.extend([
1445 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1446 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1447 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1448 ]),
1449 "StringHadd" => parts.push(LEAN_PRELUDE_STRING_HADD.to_string()),
1450 "ResultDatatype" | "OptionDatatype" | "OptionToResult" | "BranchPathDatatype" => {}
1451 other => panic!(
1452 "Lean backend has no implementation for builtin helper key '{}'. \
1453 Add a match arm in build_common_lean or remove the key from BUILTIN_HELPERS.",
1454 other
1455 ),
1456 }
1457 }
1458 parts.join("\n\n")
1459}
1460
1461#[cfg(test)]
1462mod tests {
1463 use super::{
1464 VerifyEmitMode, generate_prelude, proof_mode_issues, recurrence, transpile,
1465 transpile_for_proof_mode, transpile_with_verify_mode,
1466 };
1467 use crate::ast::{
1468 BinOp, Expr, FnBody, FnDef, Literal, MatchArm, Pattern, Spanned, Stmt, TailCallData,
1469 TopLevel, TypeDef, TypeVariant, VerifyBlock, VerifyGiven, VerifyGivenDomain, VerifyKind,
1470 VerifyLaw,
1471 };
1472
1473 fn sb(e: Expr) -> Spanned<Expr> {
1475 Spanned::bare(e)
1476 }
1477 fn sbb(e: Expr) -> Box<Spanned<Expr>> {
1479 Box::new(Spanned::bare(e))
1480 }
1481 use crate::codegen::{CodegenContext, build_context};
1482 use crate::source::parse_source;
1483 use crate::tco;
1484 use std::collections::{HashMap, HashSet};
1485 use std::sync::Arc as Rc;
1486
1487 fn empty_ctx() -> CodegenContext {
1488 CodegenContext {
1489 items: vec![],
1490 fn_sigs: HashMap::new(),
1491 memo_fns: HashSet::new(),
1492 memo_safe_types: HashSet::new(),
1493 type_defs: vec![],
1494 fn_defs: vec![],
1495 project_name: "verify_mode".to_string(),
1496 modules: vec![],
1497 module_prefixes: HashSet::new(),
1498 policy: None,
1499 emit_replay_runtime: false,
1500 runtime_policy_from_env: false,
1501 guest_entry: None,
1502 emit_self_host_support: false,
1503 extra_fn_defs: Vec::new(),
1504 mutual_tco_members: HashSet::new(),
1505 recursive_fns: HashSet::new(),
1506 fn_analyses: HashMap::new(),
1507 buffer_build_sinks: HashMap::new(),
1508 buffer_fusion_sites: Vec::new(),
1509 synthesized_buffered_fns: Vec::new(),
1510 }
1511 }
1512
1513 fn ctx_from_source(source: &str, project_name: &str) -> CodegenContext {
1514 let mut items = parse_source(source).expect("source should parse");
1515 crate::ir::pipeline::tco(&mut items);
1516 let tc = crate::ir::pipeline::typecheck(
1517 &items,
1518 &crate::ir::TypecheckMode::Full { base_dir: None },
1519 );
1520 assert!(
1521 tc.errors.is_empty(),
1522 "source should typecheck without errors: {:?}",
1523 tc.errors
1524 );
1525 build_context(
1526 items,
1527 &tc,
1528 None,
1529 HashSet::new(),
1530 project_name.to_string(),
1531 vec![],
1532 )
1533 }
1534
1535 fn generated_lean_file(out: &crate::codegen::ProjectOutput) -> String {
1543 out.files
1544 .iter()
1545 .filter_map(|(name, content)| {
1546 (name.ends_with(".lean") && name != "lakefile.lean").then_some(content.as_str())
1547 })
1548 .collect::<Vec<&str>>()
1549 .join("\n")
1550 }
1551
1552 fn empty_ctx_with_verify_case() -> CodegenContext {
1553 let mut ctx = empty_ctx();
1554 ctx.items.push(TopLevel::Verify(VerifyBlock {
1555 fn_name: "f".to_string(),
1556 line: 1,
1557 cases: vec![(
1558 sb(Expr::Literal(Literal::Int(1))),
1559 sb(Expr::Literal(Literal::Int(1))),
1560 )],
1561 case_spans: vec![],
1562 case_givens: vec![],
1563 case_hostile_origins: vec![],
1564 case_hostile_profiles: vec![],
1565 kind: VerifyKind::Cases,
1566 trace: false,
1567 cases_givens: vec![],
1568 }));
1569 ctx
1570 }
1571
1572 fn empty_ctx_with_two_verify_blocks_same_fn() -> CodegenContext {
1573 let mut ctx = empty_ctx();
1574 ctx.items.push(TopLevel::Verify(VerifyBlock {
1575 fn_name: "f".to_string(),
1576 line: 1,
1577 cases: vec![(
1578 sb(Expr::Literal(Literal::Int(1))),
1579 sb(Expr::Literal(Literal::Int(1))),
1580 )],
1581 case_spans: vec![],
1582 case_givens: vec![],
1583 case_hostile_origins: vec![],
1584 case_hostile_profiles: vec![],
1585 kind: VerifyKind::Cases,
1586 trace: false,
1587 cases_givens: vec![],
1588 }));
1589 ctx.items.push(TopLevel::Verify(VerifyBlock {
1590 fn_name: "f".to_string(),
1591 line: 2,
1592 cases: vec![(
1593 sb(Expr::Literal(Literal::Int(2))),
1594 sb(Expr::Literal(Literal::Int(2))),
1595 )],
1596 case_spans: vec![],
1597 case_givens: vec![],
1598 case_hostile_origins: vec![],
1599 case_hostile_profiles: vec![],
1600 kind: VerifyKind::Cases,
1601 trace: false,
1602 cases_givens: vec![],
1603 }));
1604 ctx
1605 }
1606
1607 fn empty_ctx_with_verify_law() -> CodegenContext {
1608 let mut ctx = empty_ctx();
1609 let add = FnDef {
1610 name: "add".to_string(),
1611 line: 1,
1612 params: vec![
1613 ("a".to_string(), "Int".to_string()),
1614 ("b".to_string(), "Int".to_string()),
1615 ],
1616 return_type: "Int".to_string(),
1617 effects: vec![],
1618 desc: None,
1619 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
1620 BinOp::Add,
1621 sbb(Expr::Ident("a".to_string())),
1622 sbb(Expr::Ident("b".to_string())),
1623 )))),
1624 resolution: None,
1625 };
1626 ctx.fn_defs.push(add.clone());
1627 ctx.items.push(TopLevel::FnDef(add));
1628 ctx.items.push(TopLevel::Verify(VerifyBlock {
1629 fn_name: "add".to_string(),
1630 line: 1,
1631 cases: vec![
1632 (
1633 sb(Expr::FnCall(
1634 sbb(Expr::Ident("add".to_string())),
1635 vec![
1636 sb(Expr::Literal(Literal::Int(1))),
1637 sb(Expr::Literal(Literal::Int(2))),
1638 ],
1639 )),
1640 sb(Expr::FnCall(
1641 sbb(Expr::Ident("add".to_string())),
1642 vec![
1643 sb(Expr::Literal(Literal::Int(2))),
1644 sb(Expr::Literal(Literal::Int(1))),
1645 ],
1646 )),
1647 ),
1648 (
1649 sb(Expr::FnCall(
1650 sbb(Expr::Ident("add".to_string())),
1651 vec![
1652 sb(Expr::Literal(Literal::Int(2))),
1653 sb(Expr::Literal(Literal::Int(3))),
1654 ],
1655 )),
1656 sb(Expr::FnCall(
1657 sbb(Expr::Ident("add".to_string())),
1658 vec![
1659 sb(Expr::Literal(Literal::Int(3))),
1660 sb(Expr::Literal(Literal::Int(2))),
1661 ],
1662 )),
1663 ),
1664 ],
1665 case_spans: vec![],
1666 case_givens: vec![],
1667 case_hostile_origins: vec![],
1668 case_hostile_profiles: vec![],
1669 kind: VerifyKind::Law(Box::new(VerifyLaw {
1670 name: "commutative".to_string(),
1671 givens: vec![
1672 VerifyGiven {
1673 name: "a".to_string(),
1674 type_name: "Int".to_string(),
1675 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
1676 },
1677 VerifyGiven {
1678 name: "b".to_string(),
1679 type_name: "Int".to_string(),
1680 domain: VerifyGivenDomain::Explicit(vec![
1681 sb(Expr::Literal(Literal::Int(2))),
1682 sb(Expr::Literal(Literal::Int(3))),
1683 ]),
1684 },
1685 ],
1686 when: None,
1687 lhs: sb(Expr::FnCall(
1688 sbb(Expr::Ident("add".to_string())),
1689 vec![
1690 sb(Expr::Ident("a".to_string())),
1691 sb(Expr::Ident("b".to_string())),
1692 ],
1693 )),
1694 rhs: sb(Expr::FnCall(
1695 sbb(Expr::Ident("add".to_string())),
1696 vec![
1697 sb(Expr::Ident("b".to_string())),
1698 sb(Expr::Ident("a".to_string())),
1699 ],
1700 )),
1701 sample_guards: vec![],
1702 })),
1703 trace: false,
1704 cases_givens: vec![],
1705 }));
1706 ctx
1707 }
1708
1709 #[test]
1710 fn prelude_normalizes_float_string_format() {
1711 let prelude = generate_prelude();
1712 assert!(
1713 prelude.contains("private def normalizeFloatString (s : String) : String :="),
1714 "missing normalizeFloatString helper in prelude"
1715 );
1716 assert!(
1717 prelude.contains(
1718 "def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)"
1719 ),
1720 "String.fromFloat should normalize Lean float formatting"
1721 );
1722 }
1723
1724 #[test]
1725 fn prelude_validates_char_from_code_unicode_bounds() {
1726 let prelude = generate_prelude();
1727 assert!(
1728 prelude.contains("if n < 0 || n > 1114111 then none"),
1729 "Char.fromCode should reject code points above Unicode max"
1730 );
1731 assert!(
1732 prelude.contains("else if n >= 55296 && n <= 57343 then none"),
1733 "Char.fromCode should reject surrogate code points"
1734 );
1735 }
1736
1737 #[test]
1738 fn prelude_includes_map_set_helper_lemmas() {
1739 let prelude = generate_prelude();
1740 assert!(
1741 prelude.contains("theorem has_set_self [DecidableEq α]"),
1742 "missing AverMap.has_set_self helper theorem"
1743 );
1744 assert!(
1745 prelude.contains("theorem get_set_self [DecidableEq α]"),
1746 "missing AverMap.get_set_self helper theorem"
1747 );
1748 }
1749
1750 #[test]
1751 fn lean_output_without_map_usage_omits_map_prelude() {
1752 let mut ctx = ctx_from_source(
1753 r#"
1754module NoMap
1755 intent = "Simple pure program without maps."
1756
1757fn addOne(n: Int) -> Int
1758 n + 1
1759
1760verify addOne
1761 addOne(1) => 2
1762"#,
1763 "nomap",
1764 );
1765 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
1766 let lean = generated_lean_file(&out);
1767
1768 assert!(
1769 !lean.contains("namespace AverMap"),
1770 "did not expect AverMap prelude in program without map usage:\n{}",
1771 lean
1772 );
1773 }
1774
1775 #[test]
1776 fn transpile_emits_native_decide_for_verify_by_default() {
1777 let mut ctx = empty_ctx_with_verify_case();
1778 let out = transpile(&mut ctx);
1779 let lean = out
1780 .files
1781 .iter()
1782 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1783 .expect("expected generated Lean file");
1784 assert!(lean.contains("example : 1 = 1 := by native_decide"));
1785 }
1786
1787 #[test]
1788 fn transpile_can_emit_sorry_for_verify_when_requested() {
1789 let mut ctx = empty_ctx_with_verify_case();
1790 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::Sorry);
1791 let lean = out
1792 .files
1793 .iter()
1794 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1795 .expect("expected generated Lean file");
1796 assert!(lean.contains("example : 1 = 1 := by sorry"));
1797 }
1798
1799 #[test]
1800 fn transpile_can_emit_theorem_skeletons_for_verify() {
1801 let mut ctx = empty_ctx_with_verify_case();
1802 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1803 let lean = out
1804 .files
1805 .iter()
1806 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1807 .expect("expected generated Lean file");
1808 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1809 assert!(lean.contains(" sorry"));
1810 }
1811
1812 #[test]
1813 fn theorem_skeleton_numbering_is_global_per_function_across_verify_blocks() {
1814 let mut ctx = empty_ctx_with_two_verify_blocks_same_fn();
1815 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1816 let lean = out
1817 .files
1818 .iter()
1819 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1820 .expect("expected generated Lean file");
1821 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1822 assert!(lean.contains("theorem f_verify_2 : 2 = 2 := by"));
1823 }
1824
1825 #[test]
1826 fn transpile_emits_named_theorems_for_verify_law() {
1827 let mut ctx = empty_ctx_with_verify_law();
1828 let out = transpile(&mut ctx);
1829 let lean = out
1830 .files
1831 .iter()
1832 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1833 .expect("expected generated Lean file");
1834 assert!(lean.contains("-- verify law add.commutative (2 cases)"));
1835 assert!(lean.contains("-- given a: Int = 1..2"));
1836 assert!(lean.contains("-- given b: Int = [2, 3]"));
1837 assert!(lean.contains(
1838 "theorem add_law_commutative : ∀ (a : Int) (b : Int), add a b = add b a := by"
1839 ));
1840 assert!(lean.contains(" intro a b"));
1841 assert!(lean.contains(" simp [add, Int.add_comm]"));
1842 assert!(lean.contains(
1843 "theorem add_law_commutative_sample_1 : add 1 2 = add 2 1 := by native_decide"
1844 ));
1845 assert!(lean.contains(
1846 "theorem add_law_commutative_sample_2 : add 2 3 = add 3 2 := by native_decide"
1847 ));
1848 }
1849
1850 #[test]
1851 fn generate_prelude_emits_int_roundtrip_theorem() {
1852 let lean = generate_prelude();
1853 assert!(lean.contains(
1854 "theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n"
1855 ));
1856 assert!(lean.contains("theorem String.intercalate_empty_chars (s : String) :"));
1857 assert!(lean.contains("def splitOnCharGo"));
1858 assert!(lean.contains("theorem split_single_char_append"));
1859 assert!(lean.contains("theorem split_intercalate_trailing_single_char"));
1860 assert!(lean.contains("namespace AverDigits"));
1861 assert!(lean.contains("theorem String.charAt_length_none (s : String)"));
1862 assert!(lean.contains("theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->"));
1863 }
1864
1865 #[test]
1866 fn transpile_emits_guarded_theorems_for_verify_law_when_clause() {
1867 let mut ctx = ctx_from_source(
1868 r#"
1869module GuardedLaw
1870 intent =
1871 "verify law with precondition"
1872
1873fn pickGreater(a: Int, b: Int) -> Int
1874 match a > b
1875 true -> a
1876 false -> b
1877
1878verify pickGreater law ordered
1879 given a: Int = [1, 2]
1880 given b: Int = [1, 2]
1881 when a > b
1882 pickGreater(a, b) => a
1883"#,
1884 "guarded_law",
1885 );
1886 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1887 let lean = generated_lean_file(&out);
1888
1889 assert!(lean.contains("-- when (a > b)"));
1890 assert!(lean.contains(
1891 "theorem pickGreater_law_ordered : ∀ (a : Int) (b : Int), a = 1 ∨ a = 2 -> b = 1 ∨ b = 2 -> (a > b) = true -> pickGreater a b = a := by"
1892 ));
1893 assert!(lean.contains(
1894 "theorem pickGreater_law_ordered_sample_1 : (1 > 1) = true -> pickGreater 1 1 = 1 := by"
1895 ));
1896 assert!(lean.contains(
1897 "theorem pickGreater_law_ordered_sample_4 : (2 > 2) = true -> pickGreater 2 2 = 2 := by"
1898 ));
1899 }
1900
1901 #[test]
1902 fn transpile_uses_spec_theorem_names_for_declared_spec_laws() {
1903 let mut ctx = ctx_from_source(
1904 r#"
1905module SpecDemo
1906 intent =
1907 "spec demo"
1908
1909fn absVal(x: Int) -> Int
1910 match x < 0
1911 true -> 0 - x
1912 false -> x
1913
1914fn absValSpec(x: Int) -> Int
1915 match x < 0
1916 true -> 0 - x
1917 false -> x
1918
1919verify absVal law absValSpec
1920 given x: Int = [-2, -1, 0, 1, 2]
1921 absVal(x) => absValSpec(x)
1922"#,
1923 "spec_demo",
1924 );
1925 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1926 let lean = generated_lean_file(&out);
1927
1928 assert!(lean.contains("-- verify law absVal.spec absValSpec (5 cases)"));
1929 assert!(
1930 lean.contains(
1931 "theorem absVal_eq_absValSpec : ∀ (x : Int), absVal x = absValSpec x := by"
1932 )
1933 );
1934 assert!(lean.contains("theorem absVal_eq_absValSpec_checked_domain :"));
1935 assert!(lean.contains("theorem absVal_eq_absValSpec_sample_1 :"));
1936 assert!(!lean.contains("theorem absVal_law_absValSpec :"));
1937 }
1938
1939 #[test]
1940 fn transpile_keeps_noncanonical_spec_laws_as_regular_law_names() {
1941 let mut ctx = ctx_from_source(
1942 r#"
1943module SpecLawShape
1944 intent =
1945 "shape probe"
1946
1947fn foo(x: Int) -> Int
1948 x + 1
1949
1950fn fooSpec(seed: Int, x: Int) -> Int
1951 x + seed
1952
1953verify foo law fooSpec
1954 given x: Int = [1, 2]
1955 foo(x) => fooSpec(1, x)
1956"#,
1957 "spec_law_shape",
1958 );
1959 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
1960 let lean = generated_lean_file(&out);
1961
1962 assert!(lean.contains("-- verify law foo.fooSpec (2 cases)"));
1963 assert!(lean.contains("theorem foo_law_fooSpec : ∀ (x : Int), foo x = fooSpec 1 x := by"));
1964 assert!(!lean.contains("theorem foo_eq_fooSpec :"));
1965 }
1966
1967 #[test]
1968 fn transpile_auto_proves_linear_int_canonical_spec_law_in_auto_mode() {
1969 let mut ctx = ctx_from_source(
1970 r#"
1971module SpecGap
1972 intent =
1973 "nontrivial canonical spec law"
1974
1975fn inc(x: Int) -> Int
1976 x + 1
1977
1978fn incSpec(x: Int) -> Int
1979 x + 2 - 1
1980
1981verify inc law incSpec
1982 given x: Int = [0, 1, 2]
1983 inc(x) => incSpec(x)
1984"#,
1985 "spec_gap",
1986 );
1987 let out = transpile(&mut ctx);
1988 let lean = generated_lean_file(&out);
1989
1990 assert!(lean.contains("-- verify law inc.spec incSpec (3 cases)"));
1991 assert!(lean.contains("theorem inc_eq_incSpec : ∀ (x : Int), inc x = incSpec x := by"));
1992 assert!(lean.contains("change (x + 1) = ((x + 2) - 1)"));
1993 assert!(lean.contains("omega"));
1994 assert!(!lean.contains(
1995 "-- universal theorem inc_eq_incSpec omitted: sampled law shape is not auto-proved yet"
1996 ));
1997 assert!(lean.contains("theorem inc_eq_incSpec_checked_domain :"));
1998 }
1999
2000 #[test]
2001 fn transpile_auto_proves_guarded_canonical_spec_law_in_auto_mode() {
2002 let mut ctx = ctx_from_source(
2003 r#"
2004module GuardedSpecGap
2005 intent =
2006 "guarded canonical spec law"
2007
2008fn clampNonNegative(x: Int) -> Int
2009 match x < 0
2010 true -> 0
2011 false -> x
2012
2013fn clampNonNegativeSpec(x: Int) -> Int
2014 match x < 0
2015 true -> 0
2016 false -> x
2017
2018verify clampNonNegative law clampNonNegativeSpec
2019 given x: Int = [-2, -1, 0, 1, 2]
2020 when x >= 0
2021 clampNonNegative(x) => clampNonNegativeSpec(x)
2022"#,
2023 "guarded_spec_gap",
2024 );
2025 let out = transpile(&mut ctx);
2026 let lean = generated_lean_file(&out);
2027
2028 assert!(lean.contains("-- when (x >= 0)"));
2029 assert!(lean.contains(
2030 "theorem clampNonNegative_eq_clampNonNegativeSpec : ∀ (x : Int), x = (-2) ∨ x = (-1) ∨ x = 0 ∨ x = 1 ∨ x = 2 -> (x >= 0) = true -> clampNonNegative x = clampNonNegativeSpec x := by"
2031 ));
2032 assert!(lean.contains("intro x h_x h_when"));
2033 assert!(lean.contains("simpa [clampNonNegative, clampNonNegativeSpec]"));
2034 assert!(!lean.contains(
2035 "-- universal theorem clampNonNegative_eq_clampNonNegativeSpec omitted: sampled law shape is not auto-proved yet"
2036 ));
2037 assert!(!lean.contains("cases h_x"));
2038 }
2039
2040 #[test]
2041 fn transpile_auto_proves_simp_normalized_canonical_spec_law_in_auto_mode() {
2042 let mut ctx = ctx_from_source(
2043 r#"
2044module SpecGapNonlinear
2045 intent =
2046 "nonlinear canonical spec law"
2047
2048fn square(x: Int) -> Int
2049 x * x
2050
2051fn squareSpec(x: Int) -> Int
2052 x * x + 0
2053
2054verify square law squareSpec
2055 given x: Int = [0, 1, 2]
2056 square(x) => squareSpec(x)
2057"#,
2058 "spec_gap_nonlinear",
2059 );
2060 let out = transpile(&mut ctx);
2061 let lean = generated_lean_file(&out);
2062
2063 assert!(lean.contains("-- verify law square.spec squareSpec (3 cases)"));
2064 assert!(
2065 lean.contains(
2066 "theorem square_eq_squareSpec : ∀ (x : Int), square x = squareSpec x := by"
2067 )
2068 );
2069 assert!(lean.contains("simp [square, squareSpec]"));
2070 assert!(!lean.contains(
2071 "-- universal theorem square_eq_squareSpec omitted: sampled law shape is not auto-proved yet"
2072 ));
2073 assert!(lean.contains("theorem square_eq_squareSpec_checked_domain :"));
2074 assert!(lean.contains("theorem square_eq_squareSpec_sample_1 :"));
2075 }
2076
2077 #[test]
2078 fn transpile_auto_proves_reflexive_law_with_rfl() {
2079 let mut ctx = empty_ctx();
2080 ctx.items.push(TopLevel::Verify(VerifyBlock {
2081 fn_name: "idLaw".to_string(),
2082 line: 1,
2083 cases: vec![(
2084 sb(Expr::Literal(Literal::Int(1))),
2085 sb(Expr::Literal(Literal::Int(1))),
2086 )],
2087 case_spans: vec![],
2088 case_givens: vec![],
2089 case_hostile_origins: vec![],
2090 case_hostile_profiles: vec![],
2091 kind: VerifyKind::Law(Box::new(VerifyLaw {
2092 name: "reflexive".to_string(),
2093 givens: vec![VerifyGiven {
2094 name: "x".to_string(),
2095 type_name: "Int".to_string(),
2096 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2097 }],
2098 when: None,
2099 lhs: sb(Expr::Ident("x".to_string())),
2100 rhs: sb(Expr::Ident("x".to_string())),
2101 sample_guards: vec![],
2102 })),
2103 trace: false,
2104 cases_givens: vec![],
2105 }));
2106 let out = transpile(&mut ctx);
2107 let lean = out
2108 .files
2109 .iter()
2110 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2111 .expect("expected generated Lean file");
2112 assert!(lean.contains("theorem idLaw_law_reflexive : ∀ (x : Int), x = x := by"));
2113 assert!(lean.contains(" intro x"));
2114 assert!(lean.contains(" rfl"));
2115 }
2116
2117 #[test]
2118 fn transpile_auto_proves_identity_law_for_int_add_wrapper() {
2119 let mut ctx = empty_ctx_with_verify_law();
2120 ctx.items.push(TopLevel::Verify(VerifyBlock {
2121 fn_name: "add".to_string(),
2122 line: 10,
2123 cases: vec![(
2124 sb(Expr::FnCall(
2125 sbb(Expr::Ident("add".to_string())),
2126 vec![
2127 sb(Expr::Literal(Literal::Int(1))),
2128 sb(Expr::Literal(Literal::Int(0))),
2129 ],
2130 )),
2131 sb(Expr::Literal(Literal::Int(1))),
2132 )],
2133 case_spans: vec![],
2134 case_givens: vec![],
2135 case_hostile_origins: vec![],
2136 case_hostile_profiles: vec![],
2137 kind: VerifyKind::Law(Box::new(VerifyLaw {
2138 name: "identityZero".to_string(),
2139 givens: vec![VerifyGiven {
2140 name: "a".to_string(),
2141 type_name: "Int".to_string(),
2142 domain: VerifyGivenDomain::Explicit(vec![
2143 sb(Expr::Literal(Literal::Int(0))),
2144 sb(Expr::Literal(Literal::Int(1))),
2145 ]),
2146 }],
2147 when: None,
2148 lhs: sb(Expr::FnCall(
2149 sbb(Expr::Ident("add".to_string())),
2150 vec![
2151 sb(Expr::Ident("a".to_string())),
2152 sb(Expr::Literal(Literal::Int(0))),
2153 ],
2154 )),
2155 rhs: sb(Expr::Ident("a".to_string())),
2156 sample_guards: vec![],
2157 })),
2158 trace: false,
2159 cases_givens: vec![],
2160 }));
2161 let out = transpile(&mut ctx);
2162 let lean = out
2163 .files
2164 .iter()
2165 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2166 .expect("expected generated Lean file");
2167 assert!(lean.contains("theorem add_law_identityZero : ∀ (a : Int), add a 0 = a := by"));
2168 assert!(lean.contains(" intro a"));
2169 assert!(lean.contains(" simp [add]"));
2170 }
2171
2172 #[test]
2173 fn transpile_auto_proves_associative_law_for_int_add_wrapper() {
2174 let mut ctx = empty_ctx_with_verify_law();
2175 ctx.items.push(TopLevel::Verify(VerifyBlock {
2176 fn_name: "add".to_string(),
2177 line: 20,
2178 cases: vec![(
2179 sb(Expr::FnCall(
2180 sbb(Expr::Ident("add".to_string())),
2181 vec![
2182 sb(Expr::FnCall(
2183 sbb(Expr::Ident("add".to_string())),
2184 vec![
2185 sb(Expr::Literal(Literal::Int(1))),
2186 sb(Expr::Literal(Literal::Int(2))),
2187 ],
2188 )),
2189 sb(Expr::Literal(Literal::Int(3))),
2190 ],
2191 )),
2192 sb(Expr::FnCall(
2193 sbb(Expr::Ident("add".to_string())),
2194 vec![
2195 sb(Expr::Literal(Literal::Int(1))),
2196 sb(Expr::FnCall(
2197 sbb(Expr::Ident("add".to_string())),
2198 vec![
2199 sb(Expr::Literal(Literal::Int(2))),
2200 sb(Expr::Literal(Literal::Int(3))),
2201 ],
2202 )),
2203 ],
2204 )),
2205 )],
2206 case_spans: vec![],
2207 case_givens: vec![],
2208 case_hostile_origins: vec![],
2209 case_hostile_profiles: vec![],
2210 kind: VerifyKind::Law(Box::new(VerifyLaw {
2211 name: "associative".to_string(),
2212 givens: vec![
2213 VerifyGiven {
2214 name: "a".to_string(),
2215 type_name: "Int".to_string(),
2216 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2217 1,
2218 )))]),
2219 },
2220 VerifyGiven {
2221 name: "b".to_string(),
2222 type_name: "Int".to_string(),
2223 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2224 2,
2225 )))]),
2226 },
2227 VerifyGiven {
2228 name: "c".to_string(),
2229 type_name: "Int".to_string(),
2230 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2231 3,
2232 )))]),
2233 },
2234 ],
2235 when: None,
2236 lhs: sb(Expr::FnCall(
2237 sbb(Expr::Ident("add".to_string())),
2238 vec![
2239 sb(Expr::FnCall(
2240 sbb(Expr::Ident("add".to_string())),
2241 vec![
2242 sb(Expr::Ident("a".to_string())),
2243 sb(Expr::Ident("b".to_string())),
2244 ],
2245 )),
2246 sb(Expr::Ident("c".to_string())),
2247 ],
2248 )),
2249 rhs: sb(Expr::FnCall(
2250 sbb(Expr::Ident("add".to_string())),
2251 vec![
2252 sb(Expr::Ident("a".to_string())),
2253 sb(Expr::FnCall(
2254 sbb(Expr::Ident("add".to_string())),
2255 vec![
2256 sb(Expr::Ident("b".to_string())),
2257 sb(Expr::Ident("c".to_string())),
2258 ],
2259 )),
2260 ],
2261 )),
2262 sample_guards: vec![],
2263 })),
2264 trace: false,
2265 cases_givens: vec![],
2266 }));
2267 let out = transpile(&mut ctx);
2268 let lean = out
2269 .files
2270 .iter()
2271 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2272 .expect("expected generated Lean file");
2273 assert!(lean.contains(
2274 "theorem add_law_associative : ∀ (a : Int) (b : Int) (c : Int), add (add a b) c = add a (add b c) := by"
2275 ));
2276 assert!(lean.contains(" intro a b c"));
2277 assert!(lean.contains(" simp [add, Int.add_assoc]"));
2278 }
2279
2280 #[test]
2281 fn transpile_auto_proves_sub_laws() {
2282 let mut ctx = empty_ctx();
2283 let sub = FnDef {
2284 name: "sub".to_string(),
2285 line: 1,
2286 params: vec![
2287 ("a".to_string(), "Int".to_string()),
2288 ("b".to_string(), "Int".to_string()),
2289 ],
2290 return_type: "Int".to_string(),
2291 effects: vec![],
2292 desc: None,
2293 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2294 BinOp::Sub,
2295 sbb(Expr::Ident("a".to_string())),
2296 sbb(Expr::Ident("b".to_string())),
2297 )))),
2298 resolution: None,
2299 };
2300 ctx.fn_defs.push(sub.clone());
2301 ctx.items.push(TopLevel::FnDef(sub));
2302
2303 ctx.items.push(TopLevel::Verify(VerifyBlock {
2304 fn_name: "sub".to_string(),
2305 line: 10,
2306 cases: vec![(
2307 sb(Expr::FnCall(
2308 sbb(Expr::Ident("sub".to_string())),
2309 vec![
2310 sb(Expr::Literal(Literal::Int(2))),
2311 sb(Expr::Literal(Literal::Int(0))),
2312 ],
2313 )),
2314 sb(Expr::Literal(Literal::Int(2))),
2315 )],
2316 case_spans: vec![],
2317 case_givens: vec![],
2318 case_hostile_origins: vec![],
2319 case_hostile_profiles: vec![],
2320 kind: VerifyKind::Law(Box::new(VerifyLaw {
2321 name: "rightIdentity".to_string(),
2322 givens: vec![VerifyGiven {
2323 name: "a".to_string(),
2324 type_name: "Int".to_string(),
2325 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2326 }],
2327 when: None,
2328 lhs: sb(Expr::FnCall(
2329 sbb(Expr::Ident("sub".to_string())),
2330 vec![
2331 sb(Expr::Ident("a".to_string())),
2332 sb(Expr::Literal(Literal::Int(0))),
2333 ],
2334 )),
2335 rhs: sb(Expr::Ident("a".to_string())),
2336 sample_guards: vec![],
2337 })),
2338 trace: false,
2339 cases_givens: vec![],
2340 }));
2341 ctx.items.push(TopLevel::Verify(VerifyBlock {
2342 fn_name: "sub".to_string(),
2343 line: 20,
2344 cases: vec![(
2345 sb(Expr::FnCall(
2346 sbb(Expr::Ident("sub".to_string())),
2347 vec![
2348 sb(Expr::Literal(Literal::Int(2))),
2349 sb(Expr::Literal(Literal::Int(1))),
2350 ],
2351 )),
2352 sb(Expr::BinOp(
2353 BinOp::Sub,
2354 sbb(Expr::Literal(Literal::Int(0))),
2355 sbb(Expr::FnCall(
2356 sbb(Expr::Ident("sub".to_string())),
2357 vec![
2358 sb(Expr::Literal(Literal::Int(1))),
2359 sb(Expr::Literal(Literal::Int(2))),
2360 ],
2361 )),
2362 )),
2363 )],
2364 case_spans: vec![],
2365 case_givens: vec![],
2366 case_hostile_origins: vec![],
2367 case_hostile_profiles: vec![],
2368 kind: VerifyKind::Law(Box::new(VerifyLaw {
2369 name: "antiCommutative".to_string(),
2370 givens: vec![
2371 VerifyGiven {
2372 name: "a".to_string(),
2373 type_name: "Int".to_string(),
2374 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2375 2,
2376 )))]),
2377 },
2378 VerifyGiven {
2379 name: "b".to_string(),
2380 type_name: "Int".to_string(),
2381 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2382 1,
2383 )))]),
2384 },
2385 ],
2386 when: None,
2387 lhs: sb(Expr::FnCall(
2388 sbb(Expr::Ident("sub".to_string())),
2389 vec![
2390 sb(Expr::Ident("a".to_string())),
2391 sb(Expr::Ident("b".to_string())),
2392 ],
2393 )),
2394 rhs: sb(Expr::BinOp(
2395 BinOp::Sub,
2396 sbb(Expr::Literal(Literal::Int(0))),
2397 sbb(Expr::FnCall(
2398 sbb(Expr::Ident("sub".to_string())),
2399 vec![
2400 sb(Expr::Ident("b".to_string())),
2401 sb(Expr::Ident("a".to_string())),
2402 ],
2403 )),
2404 )),
2405 sample_guards: vec![],
2406 })),
2407 trace: false,
2408 cases_givens: vec![],
2409 }));
2410
2411 let out = transpile(&mut ctx);
2412 let lean = out
2413 .files
2414 .iter()
2415 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2416 .expect("expected generated Lean file");
2417 assert!(lean.contains("theorem sub_law_rightIdentity : ∀ (a : Int), sub a 0 = a := by"));
2418 assert!(lean.contains(" simp [sub]"));
2419 assert!(lean.contains(
2420 "theorem sub_law_antiCommutative : ∀ (a : Int) (b : Int), sub a b = (-sub b a) := by"
2421 ));
2422 assert!(lean.contains(" simpa [sub] using (Int.neg_sub b a).symm"));
2423 }
2424
2425 #[test]
2426 fn transpile_auto_proves_unary_wrapper_equivalence_law() {
2427 let mut ctx = empty_ctx();
2428 let add = FnDef {
2429 name: "add".to_string(),
2430 line: 1,
2431 params: vec![
2432 ("a".to_string(), "Int".to_string()),
2433 ("b".to_string(), "Int".to_string()),
2434 ],
2435 return_type: "Int".to_string(),
2436 effects: vec![],
2437 desc: None,
2438 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2439 BinOp::Add,
2440 sbb(Expr::Ident("a".to_string())),
2441 sbb(Expr::Ident("b".to_string())),
2442 )))),
2443 resolution: None,
2444 };
2445 let add_one = FnDef {
2446 name: "addOne".to_string(),
2447 line: 2,
2448 params: vec![("n".to_string(), "Int".to_string())],
2449 return_type: "Int".to_string(),
2450 effects: vec![],
2451 desc: None,
2452 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2453 BinOp::Add,
2454 sbb(Expr::Ident("n".to_string())),
2455 sbb(Expr::Literal(Literal::Int(1))),
2456 )))),
2457 resolution: None,
2458 };
2459 ctx.fn_defs.push(add.clone());
2460 ctx.fn_defs.push(add_one.clone());
2461 ctx.items.push(TopLevel::FnDef(add));
2462 ctx.items.push(TopLevel::FnDef(add_one));
2463 ctx.items.push(TopLevel::Verify(VerifyBlock {
2464 fn_name: "addOne".to_string(),
2465 line: 3,
2466 cases: vec![(
2467 sb(Expr::FnCall(
2468 sbb(Expr::Ident("addOne".to_string())),
2469 vec![sb(Expr::Literal(Literal::Int(2)))],
2470 )),
2471 sb(Expr::FnCall(
2472 sbb(Expr::Ident("add".to_string())),
2473 vec![
2474 sb(Expr::Literal(Literal::Int(2))),
2475 sb(Expr::Literal(Literal::Int(1))),
2476 ],
2477 )),
2478 )],
2479 case_spans: vec![],
2480 case_givens: vec![],
2481 case_hostile_origins: vec![],
2482 case_hostile_profiles: vec![],
2483 kind: VerifyKind::Law(Box::new(VerifyLaw {
2484 name: "identityViaAdd".to_string(),
2485 givens: vec![VerifyGiven {
2486 name: "n".to_string(),
2487 type_name: "Int".to_string(),
2488 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2489 }],
2490 when: None,
2491 lhs: sb(Expr::FnCall(
2492 sbb(Expr::Ident("addOne".to_string())),
2493 vec![sb(Expr::Ident("n".to_string()))],
2494 )),
2495 rhs: sb(Expr::FnCall(
2496 sbb(Expr::Ident("add".to_string())),
2497 vec![
2498 sb(Expr::Ident("n".to_string())),
2499 sb(Expr::Literal(Literal::Int(1))),
2500 ],
2501 )),
2502 sample_guards: vec![],
2503 })),
2504 trace: false,
2505 cases_givens: vec![],
2506 }));
2507 let out = transpile(&mut ctx);
2508 let lean = out
2509 .files
2510 .iter()
2511 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2512 .expect("expected generated Lean file");
2513 assert!(
2514 lean.contains(
2515 "theorem addOne_law_identityViaAdd : ∀ (n : Int), addOne n = add n 1 := by"
2516 )
2517 );
2518 assert!(lean.contains(" simp [addOne, add]"));
2519 }
2520
2521 #[test]
2522 fn transpile_auto_proves_direct_map_set_laws() {
2523 let mut ctx = empty_ctx();
2524
2525 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2526 sb(Expr::FnCall(
2527 sbb(Expr::Attr(
2528 sbb(Expr::Ident("Map".to_string())),
2529 "set".to_string(),
2530 )),
2531 vec![m, k, v],
2532 ))
2533 };
2534 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2535 sb(Expr::FnCall(
2536 sbb(Expr::Attr(
2537 sbb(Expr::Ident("Map".to_string())),
2538 "has".to_string(),
2539 )),
2540 vec![m, k],
2541 ))
2542 };
2543 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2544 sb(Expr::FnCall(
2545 sbb(Expr::Attr(
2546 sbb(Expr::Ident("Map".to_string())),
2547 "get".to_string(),
2548 )),
2549 vec![m, k],
2550 ))
2551 };
2552 let some = |v: Spanned<Expr>| {
2553 sb(Expr::FnCall(
2554 sbb(Expr::Attr(
2555 sbb(Expr::Ident("Option".to_string())),
2556 "Some".to_string(),
2557 )),
2558 vec![v],
2559 ))
2560 };
2561 let map_empty = || {
2562 sb(Expr::FnCall(
2563 sbb(Expr::Attr(
2564 sbb(Expr::Ident("Map".to_string())),
2565 "empty".to_string(),
2566 )),
2567 vec![],
2568 ))
2569 };
2570
2571 ctx.items.push(TopLevel::Verify(VerifyBlock {
2572 fn_name: "map".to_string(),
2573 line: 1,
2574 cases: vec![(
2575 map_has(
2576 map_set(
2577 sb(Expr::Ident("m".to_string())),
2578 sb(Expr::Ident("k".to_string())),
2579 sb(Expr::Ident("v".to_string())),
2580 ),
2581 sb(Expr::Ident("k".to_string())),
2582 ),
2583 sb(Expr::Literal(Literal::Bool(true))),
2584 )],
2585 case_spans: vec![],
2586 case_givens: vec![],
2587 case_hostile_origins: vec![],
2588 case_hostile_profiles: vec![],
2589 kind: VerifyKind::Law(Box::new(VerifyLaw {
2590 name: "setHasKey".to_string(),
2591 givens: vec![
2592 VerifyGiven {
2593 name: "m".to_string(),
2594 type_name: "Map<String, Int>".to_string(),
2595 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2596 },
2597 VerifyGiven {
2598 name: "k".to_string(),
2599 type_name: "String".to_string(),
2600 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2601 "a".to_string(),
2602 )))]),
2603 },
2604 VerifyGiven {
2605 name: "v".to_string(),
2606 type_name: "Int".to_string(),
2607 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2608 1,
2609 )))]),
2610 },
2611 ],
2612 when: None,
2613 lhs: map_has(
2614 map_set(
2615 sb(Expr::Ident("m".to_string())),
2616 sb(Expr::Ident("k".to_string())),
2617 sb(Expr::Ident("v".to_string())),
2618 ),
2619 sb(Expr::Ident("k".to_string())),
2620 ),
2621 rhs: sb(Expr::Literal(Literal::Bool(true))),
2622 sample_guards: vec![],
2623 })),
2624 trace: false,
2625 cases_givens: vec![],
2626 }));
2627
2628 ctx.items.push(TopLevel::Verify(VerifyBlock {
2629 fn_name: "map".to_string(),
2630 line: 2,
2631 cases: vec![(
2632 map_get(
2633 map_set(
2634 sb(Expr::Ident("m".to_string())),
2635 sb(Expr::Ident("k".to_string())),
2636 sb(Expr::Ident("v".to_string())),
2637 ),
2638 sb(Expr::Ident("k".to_string())),
2639 ),
2640 some(sb(Expr::Ident("v".to_string()))),
2641 )],
2642 case_spans: vec![],
2643 case_givens: vec![],
2644 case_hostile_origins: vec![],
2645 case_hostile_profiles: vec![],
2646 kind: VerifyKind::Law(Box::new(VerifyLaw {
2647 name: "setGetKey".to_string(),
2648 givens: vec![
2649 VerifyGiven {
2650 name: "m".to_string(),
2651 type_name: "Map<String, Int>".to_string(),
2652 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2653 },
2654 VerifyGiven {
2655 name: "k".to_string(),
2656 type_name: "String".to_string(),
2657 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2658 "a".to_string(),
2659 )))]),
2660 },
2661 VerifyGiven {
2662 name: "v".to_string(),
2663 type_name: "Int".to_string(),
2664 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2665 1,
2666 )))]),
2667 },
2668 ],
2669 when: None,
2670 lhs: map_get(
2671 map_set(
2672 sb(Expr::Ident("m".to_string())),
2673 sb(Expr::Ident("k".to_string())),
2674 sb(Expr::Ident("v".to_string())),
2675 ),
2676 sb(Expr::Ident("k".to_string())),
2677 ),
2678 rhs: some(sb(Expr::Ident("v".to_string()))),
2679 sample_guards: vec![],
2680 })),
2681 trace: false,
2682 cases_givens: vec![],
2683 }));
2684
2685 let out = transpile(&mut ctx);
2686 let lean = out
2687 .files
2688 .iter()
2689 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2690 .expect("expected generated Lean file");
2691 assert!(lean.contains("simpa using AverMap.has_set_self m k v"));
2692 assert!(lean.contains("simpa using AverMap.get_set_self m k v"));
2693 }
2694
2695 #[test]
2696 fn transpile_auto_proves_direct_recursive_sum_law_by_structural_induction() {
2697 let mut ctx = ctx_from_source(
2698 r#"
2699module Mirror
2700 intent =
2701 "direct recursive sum induction probe"
2702
2703type Tree
2704 Leaf(Int)
2705 Node(Tree, Tree)
2706
2707fn mirror(t: Tree) -> Tree
2708 match t
2709 Tree.Leaf(v) -> Tree.Leaf(v)
2710 Tree.Node(left, right) -> Tree.Node(mirror(right), mirror(left))
2711
2712verify mirror law involutive
2713 given t: Tree = [Tree.Leaf(1), Tree.Node(Tree.Leaf(1), Tree.Leaf(2))]
2714 mirror(mirror(t)) => t
2715"#,
2716 "mirror",
2717 );
2718 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
2719 let lean = generated_lean_file(&out);
2720
2721 assert!(
2722 lean.contains(
2723 "theorem mirror_law_involutive : ∀ (t : Tree), mirror (mirror t) = t := by"
2724 )
2725 );
2726 assert!(lean.contains(" induction t with"));
2727 assert!(lean.contains(" | leaf f0 => simp [mirror]"));
2728 assert!(lean.contains(" | node f0 f1 ih0 ih1 => simp_all [mirror]"));
2729 assert!(!lean.contains(
2730 "-- universal theorem mirror_law_involutive omitted: sampled law shape is not auto-proved yet"
2731 ));
2732 }
2733
2734 #[test]
2735 fn transpile_auto_proves_map_update_laws() {
2736 let mut ctx = empty_ctx();
2737
2738 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2739 sb(Expr::FnCall(
2740 sbb(Expr::Attr(
2741 sbb(Expr::Ident("Map".to_string())),
2742 "get".to_string(),
2743 )),
2744 vec![m, k],
2745 ))
2746 };
2747 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2748 sb(Expr::FnCall(
2749 sbb(Expr::Attr(
2750 sbb(Expr::Ident("Map".to_string())),
2751 "set".to_string(),
2752 )),
2753 vec![m, k, v],
2754 ))
2755 };
2756 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2757 sb(Expr::FnCall(
2758 sbb(Expr::Attr(
2759 sbb(Expr::Ident("Map".to_string())),
2760 "has".to_string(),
2761 )),
2762 vec![m, k],
2763 ))
2764 };
2765 let option_some = |v: Spanned<Expr>| {
2766 sb(Expr::FnCall(
2767 sbb(Expr::Attr(
2768 sbb(Expr::Ident("Option".to_string())),
2769 "Some".to_string(),
2770 )),
2771 vec![v],
2772 ))
2773 };
2774 let option_with_default = |opt: Spanned<Expr>, def: Spanned<Expr>| {
2775 sb(Expr::FnCall(
2776 sbb(Expr::Attr(
2777 sbb(Expr::Ident("Option".to_string())),
2778 "withDefault".to_string(),
2779 )),
2780 vec![opt, def],
2781 ))
2782 };
2783 let map_empty = || {
2784 sb(Expr::FnCall(
2785 sbb(Expr::Attr(
2786 sbb(Expr::Ident("Map".to_string())),
2787 "empty".to_string(),
2788 )),
2789 vec![],
2790 ))
2791 };
2792
2793 let add_one = FnDef {
2794 name: "addOne".to_string(),
2795 line: 1,
2796 params: vec![("n".to_string(), "Int".to_string())],
2797 return_type: "Int".to_string(),
2798 effects: vec![],
2799 desc: None,
2800 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2801 BinOp::Add,
2802 sbb(Expr::Ident("n".to_string())),
2803 sbb(Expr::Literal(Literal::Int(1))),
2804 )))),
2805 resolution: None,
2806 };
2807 ctx.fn_defs.push(add_one.clone());
2808 ctx.items.push(TopLevel::FnDef(add_one));
2809
2810 let inc_count = FnDef {
2811 name: "incCount".to_string(),
2812 line: 2,
2813 params: vec![
2814 ("counts".to_string(), "Map<String, Int>".to_string()),
2815 ("word".to_string(), "String".to_string()),
2816 ],
2817 return_type: "Map<String, Int>".to_string(),
2818 effects: vec![],
2819 desc: None,
2820 body: Rc::new(FnBody::Block(vec![
2821 Stmt::Binding(
2822 "current".to_string(),
2823 None,
2824 map_get(
2825 sb(Expr::Ident("counts".to_string())),
2826 sb(Expr::Ident("word".to_string())),
2827 ),
2828 ),
2829 Stmt::Expr(sb(Expr::Match {
2830 subject: sbb(Expr::Ident("current".to_string())),
2831 arms: vec![
2832 MatchArm {
2833 pattern: Pattern::Constructor(
2834 "Option.Some".to_string(),
2835 vec!["n".to_string()],
2836 ),
2837 body: Box::new(map_set(
2838 sb(Expr::Ident("counts".to_string())),
2839 sb(Expr::Ident("word".to_string())),
2840 sb(Expr::BinOp(
2841 BinOp::Add,
2842 sbb(Expr::Ident("n".to_string())),
2843 sbb(Expr::Literal(Literal::Int(1))),
2844 )),
2845 )),
2846 },
2847 MatchArm {
2848 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
2849 body: Box::new(map_set(
2850 sb(Expr::Ident("counts".to_string())),
2851 sb(Expr::Ident("word".to_string())),
2852 sb(Expr::Literal(Literal::Int(1))),
2853 )),
2854 },
2855 ],
2856 })),
2857 ])),
2858 resolution: None,
2859 };
2860 ctx.fn_defs.push(inc_count.clone());
2861 ctx.items.push(TopLevel::FnDef(inc_count));
2862
2863 ctx.items.push(TopLevel::Verify(VerifyBlock {
2864 fn_name: "incCount".to_string(),
2865 line: 10,
2866 cases: vec![(
2867 map_has(
2868 sb(Expr::FnCall(
2869 sbb(Expr::Ident("incCount".to_string())),
2870 vec![
2871 sb(Expr::Ident("counts".to_string())),
2872 sb(Expr::Ident("word".to_string())),
2873 ],
2874 )),
2875 sb(Expr::Ident("word".to_string())),
2876 ),
2877 sb(Expr::Literal(Literal::Bool(true))),
2878 )],
2879 case_spans: vec![],
2880 case_givens: vec![],
2881 case_hostile_origins: vec![],
2882 case_hostile_profiles: vec![],
2883 kind: VerifyKind::Law(Box::new(VerifyLaw {
2884 name: "keyPresent".to_string(),
2885 givens: vec![
2886 VerifyGiven {
2887 name: "counts".to_string(),
2888 type_name: "Map<String, Int>".to_string(),
2889 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2890 },
2891 VerifyGiven {
2892 name: "word".to_string(),
2893 type_name: "String".to_string(),
2894 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2895 "a".to_string(),
2896 )))]),
2897 },
2898 ],
2899 when: None,
2900 lhs: map_has(
2901 sb(Expr::FnCall(
2902 sbb(Expr::Ident("incCount".to_string())),
2903 vec![
2904 sb(Expr::Ident("counts".to_string())),
2905 sb(Expr::Ident("word".to_string())),
2906 ],
2907 )),
2908 sb(Expr::Ident("word".to_string())),
2909 ),
2910 rhs: sb(Expr::Literal(Literal::Bool(true))),
2911 sample_guards: vec![],
2912 })),
2913 trace: false,
2914 cases_givens: vec![],
2915 }));
2916
2917 ctx.items.push(TopLevel::Verify(VerifyBlock {
2918 fn_name: "incCount".to_string(),
2919 line: 20,
2920 cases: vec![(
2921 map_get(
2922 sb(Expr::FnCall(
2923 sbb(Expr::Ident("incCount".to_string())),
2924 vec![
2925 sb(Expr::Ident("counts".to_string())),
2926 sb(Expr::Literal(Literal::Str("a".to_string()))),
2927 ],
2928 )),
2929 sb(Expr::Literal(Literal::Str("a".to_string()))),
2930 ),
2931 option_some(sb(Expr::FnCall(
2932 sbb(Expr::Ident("addOne".to_string())),
2933 vec![option_with_default(
2934 map_get(
2935 sb(Expr::Ident("counts".to_string())),
2936 sb(Expr::Literal(Literal::Str("a".to_string()))),
2937 ),
2938 sb(Expr::Literal(Literal::Int(0))),
2939 )],
2940 ))),
2941 )],
2942 case_spans: vec![],
2943 case_givens: vec![],
2944 case_hostile_origins: vec![],
2945 case_hostile_profiles: vec![],
2946 kind: VerifyKind::Law(Box::new(VerifyLaw {
2947 name: "existingKeyIncrements".to_string(),
2948 givens: vec![VerifyGiven {
2949 name: "counts".to_string(),
2950 type_name: "Map<String, Int>".to_string(),
2951 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2952 }],
2953 when: None,
2954 lhs: map_get(
2955 sb(Expr::FnCall(
2956 sbb(Expr::Ident("incCount".to_string())),
2957 vec![
2958 sb(Expr::Ident("counts".to_string())),
2959 sb(Expr::Literal(Literal::Str("a".to_string()))),
2960 ],
2961 )),
2962 sb(Expr::Literal(Literal::Str("a".to_string()))),
2963 ),
2964 rhs: option_some(sb(Expr::FnCall(
2965 sbb(Expr::Ident("addOne".to_string())),
2966 vec![option_with_default(
2967 map_get(
2968 sb(Expr::Ident("counts".to_string())),
2969 sb(Expr::Literal(Literal::Str("a".to_string()))),
2970 ),
2971 sb(Expr::Literal(Literal::Int(0))),
2972 )],
2973 ))),
2974 sample_guards: vec![],
2975 })),
2976 trace: false,
2977 cases_givens: vec![],
2978 }));
2979
2980 let out = transpile(&mut ctx);
2981 let lean = out
2982 .files
2983 .iter()
2984 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2985 .expect("expected generated Lean file");
2986 assert!(
2987 lean.contains("cases h : AverMap.get counts word <;> simp [AverMap.has_set_self]"),
2988 "expected keyPresent auto-proof with has_set_self"
2989 );
2990 assert!(
2991 lean.contains("cases h : AverMap.get counts \"a\" <;> simp [AverMap.get_set_self, addOne, incCount]"),
2992 "expected existingKeyIncrements auto-proof with get_set_self"
2993 );
2994 }
2995
2996 #[test]
2997 fn transpile_parenthesizes_negative_int_call_args_in_law_samples() {
2998 let mut ctx = empty_ctx();
2999 let add = FnDef {
3000 name: "add".to_string(),
3001 line: 1,
3002 params: vec![
3003 ("a".to_string(), "Int".to_string()),
3004 ("b".to_string(), "Int".to_string()),
3005 ],
3006 return_type: "Int".to_string(),
3007 effects: vec![],
3008 desc: None,
3009 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
3010 BinOp::Add,
3011 sbb(Expr::Ident("a".to_string())),
3012 sbb(Expr::Ident("b".to_string())),
3013 )))),
3014 resolution: None,
3015 };
3016 ctx.fn_defs.push(add.clone());
3017 ctx.items.push(TopLevel::FnDef(add));
3018 ctx.items.push(TopLevel::Verify(VerifyBlock {
3019 fn_name: "add".to_string(),
3020 line: 1,
3021 cases: vec![(
3022 sb(Expr::FnCall(
3023 sbb(Expr::Ident("add".to_string())),
3024 vec![
3025 sb(Expr::Literal(Literal::Int(-2))),
3026 sb(Expr::Literal(Literal::Int(-1))),
3027 ],
3028 )),
3029 sb(Expr::FnCall(
3030 sbb(Expr::Ident("add".to_string())),
3031 vec![
3032 sb(Expr::Literal(Literal::Int(-1))),
3033 sb(Expr::Literal(Literal::Int(-2))),
3034 ],
3035 )),
3036 )],
3037 case_spans: vec![],
3038 case_givens: vec![],
3039 case_hostile_origins: vec![],
3040 case_hostile_profiles: vec![],
3041 kind: VerifyKind::Law(Box::new(VerifyLaw {
3042 name: "commutative".to_string(),
3043 givens: vec![
3044 VerifyGiven {
3045 name: "a".to_string(),
3046 type_name: "Int".to_string(),
3047 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3048 -2,
3049 )))]),
3050 },
3051 VerifyGiven {
3052 name: "b".to_string(),
3053 type_name: "Int".to_string(),
3054 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3055 -1,
3056 )))]),
3057 },
3058 ],
3059 when: None,
3060 lhs: sb(Expr::FnCall(
3061 sbb(Expr::Ident("add".to_string())),
3062 vec![
3063 sb(Expr::Ident("a".to_string())),
3064 sb(Expr::Ident("b".to_string())),
3065 ],
3066 )),
3067 rhs: sb(Expr::FnCall(
3068 sbb(Expr::Ident("add".to_string())),
3069 vec![
3070 sb(Expr::Ident("b".to_string())),
3071 sb(Expr::Ident("a".to_string())),
3072 ],
3073 )),
3074 sample_guards: vec![],
3075 })),
3076 trace: false,
3077 cases_givens: vec![],
3078 }));
3079
3080 let out = transpile(&mut ctx);
3081 let lean = out
3082 .files
3083 .iter()
3084 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3085 .expect("expected generated Lean file");
3086 assert!(lean.contains(
3087 "theorem add_law_commutative_sample_1 : add (-2) (-1) = add (-1) (-2) := by native_decide"
3088 ));
3089 }
3090
3091 #[test]
3092 fn verify_law_numbering_is_scoped_per_law_name() {
3093 let mut ctx = empty_ctx();
3094 let f = FnDef {
3095 name: "f".to_string(),
3096 line: 1,
3097 params: vec![("x".to_string(), "Int".to_string())],
3098 return_type: "Int".to_string(),
3099 effects: vec![],
3100 desc: None,
3101 body: Rc::new(FnBody::from_expr(sb(Expr::Ident("x".to_string())))),
3102 resolution: None,
3103 };
3104 ctx.fn_defs.push(f.clone());
3105 ctx.items.push(TopLevel::FnDef(f));
3106 ctx.items.push(TopLevel::Verify(VerifyBlock {
3107 fn_name: "f".to_string(),
3108 line: 1,
3109 cases: vec![(
3110 sb(Expr::Literal(Literal::Int(1))),
3111 sb(Expr::Literal(Literal::Int(1))),
3112 )],
3113 case_spans: vec![],
3114 case_givens: vec![],
3115 case_hostile_origins: vec![],
3116 case_hostile_profiles: vec![],
3117 kind: VerifyKind::Cases,
3118 trace: false,
3119 cases_givens: vec![],
3120 }));
3121 ctx.items.push(TopLevel::Verify(VerifyBlock {
3122 fn_name: "f".to_string(),
3123 line: 2,
3124 cases: vec![(
3125 sb(Expr::Literal(Literal::Int(2))),
3126 sb(Expr::Literal(Literal::Int(2))),
3127 )],
3128 case_spans: vec![],
3129 case_givens: vec![],
3130 case_hostile_origins: vec![],
3131 case_hostile_profiles: vec![],
3132 kind: VerifyKind::Law(Box::new(VerifyLaw {
3133 name: "identity".to_string(),
3134 givens: vec![VerifyGiven {
3135 name: "x".to_string(),
3136 type_name: "Int".to_string(),
3137 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
3138 }],
3139 when: None,
3140 lhs: sb(Expr::Ident("x".to_string())),
3141 rhs: sb(Expr::Ident("x".to_string())),
3142 sample_guards: vec![],
3143 })),
3144 trace: false,
3145 cases_givens: vec![],
3146 }));
3147 let out = transpile_with_verify_mode(&mut ctx, VerifyEmitMode::TheoremSkeleton);
3148 let lean = out
3149 .files
3150 .iter()
3151 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3152 .expect("expected generated Lean file");
3153 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
3154 assert!(lean.contains("theorem f_law_identity : ∀ (x : Int), x = x := by"));
3155 assert!(lean.contains("theorem f_law_identity_sample_1 : 2 = 2 := by"));
3156 assert!(!lean.contains("theorem f_law_identity_sample_2 : 2 = 2 := by"));
3157 }
3158
3159 #[test]
3160 fn proof_mode_accepts_single_int_countdown_recursion() {
3161 let mut ctx = empty_ctx();
3162 let down = FnDef {
3163 name: "down".to_string(),
3164 line: 1,
3165 params: vec![("n".to_string(), "Int".to_string())],
3166 return_type: "Int".to_string(),
3167 effects: vec![],
3168 desc: None,
3169 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3170 subject: sbb(Expr::Ident("n".to_string())),
3171 arms: vec![
3172 MatchArm {
3173 pattern: Pattern::Literal(Literal::Int(0)),
3174 body: sbb(Expr::Literal(Literal::Int(0))),
3175 },
3176 MatchArm {
3177 pattern: Pattern::Wildcard,
3178 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3179 "down".to_string(),
3180 vec![sb(Expr::BinOp(
3181 BinOp::Sub,
3182 sbb(Expr::Ident("n".to_string())),
3183 sbb(Expr::Literal(Literal::Int(1))),
3184 ))],
3185 )))),
3186 },
3187 ],
3188 }))),
3189 resolution: None,
3190 };
3191 ctx.items.push(TopLevel::FnDef(down.clone()));
3192 ctx.fn_defs.push(down);
3193
3194 ctx.refresh_facts();
3195 let issues = proof_mode_issues(&ctx);
3196 assert!(
3197 issues.is_empty(),
3198 "expected Int countdown recursion to be accepted, got: {:?}",
3199 issues
3200 );
3201
3202 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3203 let lean = out
3204 .files
3205 .iter()
3206 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3207 .expect("expected generated Lean file");
3208 assert!(lean.contains("def down__fuel"));
3209 assert!(lean.contains("def down (n : Int) : Int :="));
3210 assert!(lean.contains("down__fuel ((Int.natAbs n) + 1) n"));
3211 }
3212
3213 #[test]
3214 fn proof_mode_accepts_single_int_countdown_on_nonfirst_param() {
3215 let mut ctx = empty_ctx();
3216 let repeat_like = FnDef {
3217 name: "repeatLike".to_string(),
3218 line: 1,
3219 params: vec![
3220 ("char".to_string(), "String".to_string()),
3221 ("n".to_string(), "Int".to_string()),
3222 ],
3223 return_type: "List<String>".to_string(),
3224 effects: vec![],
3225 desc: None,
3226 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3227 subject: sbb(Expr::BinOp(
3228 BinOp::Lte,
3229 sbb(Expr::Ident("n".to_string())),
3230 sbb(Expr::Literal(Literal::Int(0))),
3231 )),
3232 arms: vec![
3233 MatchArm {
3234 pattern: Pattern::Literal(Literal::Bool(true)),
3235 body: sbb(Expr::List(vec![])),
3236 },
3237 MatchArm {
3238 pattern: Pattern::Literal(Literal::Bool(false)),
3239 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3240 "repeatLike".to_string(),
3241 vec![
3242 sb(Expr::Ident("char".to_string())),
3243 sb(Expr::BinOp(
3244 BinOp::Sub,
3245 sbb(Expr::Ident("n".to_string())),
3246 sbb(Expr::Literal(Literal::Int(1))),
3247 )),
3248 ],
3249 )))),
3250 },
3251 ],
3252 }))),
3253 resolution: None,
3254 };
3255 ctx.items.push(TopLevel::FnDef(repeat_like.clone()));
3256 ctx.fn_defs.push(repeat_like);
3257
3258 ctx.refresh_facts();
3259 let issues = proof_mode_issues(&ctx);
3260 assert!(
3261 issues.is_empty(),
3262 "expected non-first Int countdown recursion to be accepted, got: {:?}",
3263 issues
3264 );
3265
3266 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3267 let lean = out
3268 .files
3269 .iter()
3270 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3271 .expect("expected generated Lean file");
3272 assert!(lean.contains("def repeatLike__fuel"));
3273 assert!(lean.contains("def repeatLike (char : String) (n : Int) : List String :="));
3274 assert!(lean.contains("repeatLike__fuel ((Int.natAbs n) + 1) char n"));
3275 }
3276
3277 #[test]
3278 fn proof_mode_accepts_negative_guarded_int_ascent() {
3279 let mut ctx = empty_ctx();
3280 let normalize = FnDef {
3281 name: "normalize".to_string(),
3282 line: 1,
3283 params: vec![("angle".to_string(), "Int".to_string())],
3284 return_type: "Int".to_string(),
3285 effects: vec![],
3286 desc: None,
3287 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3288 subject: sbb(Expr::BinOp(
3289 BinOp::Lt,
3290 sbb(Expr::Ident("angle".to_string())),
3291 sbb(Expr::Literal(Literal::Int(0))),
3292 )),
3293 arms: vec![
3294 MatchArm {
3295 pattern: Pattern::Literal(Literal::Bool(true)),
3296 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3297 "normalize".to_string(),
3298 vec![sb(Expr::BinOp(
3299 BinOp::Add,
3300 sbb(Expr::Ident("angle".to_string())),
3301 sbb(Expr::Literal(Literal::Int(360))),
3302 ))],
3303 )))),
3304 },
3305 MatchArm {
3306 pattern: Pattern::Literal(Literal::Bool(false)),
3307 body: sbb(Expr::Ident("angle".to_string())),
3308 },
3309 ],
3310 }))),
3311 resolution: None,
3312 };
3313 ctx.items.push(TopLevel::FnDef(normalize.clone()));
3314 ctx.fn_defs.push(normalize);
3315
3316 ctx.refresh_facts();
3317 let issues = proof_mode_issues(&ctx);
3318 assert!(
3319 issues.is_empty(),
3320 "expected negative-guarded Int ascent recursion to be accepted, got: {:?}",
3321 issues
3322 );
3323
3324 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3325 let lean = out
3326 .files
3327 .iter()
3328 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3329 .expect("expected generated Lean file");
3330 assert!(lean.contains("def normalize__fuel"));
3331 assert!(lean.contains("normalize__fuel ((Int.natAbs angle) + 1) angle"));
3332 }
3333
3334 #[test]
3335 fn proof_mode_accepts_single_list_structural_recursion() {
3336 let mut ctx = empty_ctx();
3337 let len = FnDef {
3338 name: "len".to_string(),
3339 line: 1,
3340 params: vec![("xs".to_string(), "List<Int>".to_string())],
3341 return_type: "Int".to_string(),
3342 effects: vec![],
3343 desc: None,
3344 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3345 subject: sbb(Expr::Ident("xs".to_string())),
3346 arms: vec![
3347 MatchArm {
3348 pattern: Pattern::EmptyList,
3349 body: sbb(Expr::Literal(Literal::Int(0))),
3350 },
3351 MatchArm {
3352 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3353 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3354 "len".to_string(),
3355 vec![sb(Expr::Ident("t".to_string()))],
3356 )))),
3357 },
3358 ],
3359 }))),
3360 resolution: None,
3361 };
3362 ctx.items.push(TopLevel::FnDef(len.clone()));
3363 ctx.fn_defs.push(len);
3364
3365 ctx.refresh_facts();
3366 let issues = proof_mode_issues(&ctx);
3367 assert!(
3368 issues.is_empty(),
3369 "expected List structural recursion to be accepted, got: {:?}",
3370 issues
3371 );
3372 }
3373
3374 #[test]
3375 fn proof_mode_accepts_single_list_structural_recursion_on_nonfirst_param() {
3376 let mut ctx = empty_ctx();
3377 let len_from = FnDef {
3378 name: "lenFrom".to_string(),
3379 line: 1,
3380 params: vec![
3381 ("count".to_string(), "Int".to_string()),
3382 ("xs".to_string(), "List<Int>".to_string()),
3383 ],
3384 return_type: "Int".to_string(),
3385 effects: vec![],
3386 desc: None,
3387 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3388 subject: sbb(Expr::Ident("xs".to_string())),
3389 arms: vec![
3390 MatchArm {
3391 pattern: Pattern::EmptyList,
3392 body: sbb(Expr::Ident("count".to_string())),
3393 },
3394 MatchArm {
3395 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3396 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3397 "lenFrom".to_string(),
3398 vec![
3399 sb(Expr::BinOp(
3400 BinOp::Add,
3401 sbb(Expr::Ident("count".to_string())),
3402 sbb(Expr::Literal(Literal::Int(1))),
3403 )),
3404 sb(Expr::Ident("t".to_string())),
3405 ],
3406 )))),
3407 },
3408 ],
3409 }))),
3410 resolution: None,
3411 };
3412 ctx.items.push(TopLevel::FnDef(len_from.clone()));
3413 ctx.fn_defs.push(len_from);
3414
3415 ctx.refresh_facts();
3416 let issues = proof_mode_issues(&ctx);
3417 assert!(
3418 issues.is_empty(),
3419 "expected non-first List structural recursion to be accepted, got: {:?}",
3420 issues
3421 );
3422
3423 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3424 let lean = generated_lean_file(&out);
3425 assert!(lean.contains("termination_by xs.length"));
3426 assert!(!lean.contains("partial def lenFrom"));
3427 }
3428
3429 #[test]
3430 fn proof_mode_accepts_single_string_pos_advance_recursion() {
3431 let mut ctx = empty_ctx();
3432 let skip_ws = FnDef {
3433 name: "skipWs".to_string(),
3434 line: 1,
3435 params: vec![
3436 ("s".to_string(), "String".to_string()),
3437 ("pos".to_string(), "Int".to_string()),
3438 ],
3439 return_type: "Int".to_string(),
3440 effects: vec![],
3441 desc: None,
3442 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3443 subject: sbb(Expr::FnCall(
3444 sbb(Expr::Attr(
3445 sbb(Expr::Ident("String".to_string())),
3446 "charAt".to_string(),
3447 )),
3448 vec![
3449 sb(Expr::Ident("s".to_string())),
3450 sb(Expr::Ident("pos".to_string())),
3451 ],
3452 )),
3453 arms: vec![
3454 MatchArm {
3455 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
3456 body: sbb(Expr::Ident("pos".to_string())),
3457 },
3458 MatchArm {
3459 pattern: Pattern::Wildcard,
3460 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3461 "skipWs".to_string(),
3462 vec![
3463 sb(Expr::Ident("s".to_string())),
3464 sb(Expr::BinOp(
3465 BinOp::Add,
3466 sbb(Expr::Ident("pos".to_string())),
3467 sbb(Expr::Literal(Literal::Int(1))),
3468 )),
3469 ],
3470 )))),
3471 },
3472 ],
3473 }))),
3474 resolution: None,
3475 };
3476 ctx.items.push(TopLevel::FnDef(skip_ws.clone()));
3477 ctx.fn_defs.push(skip_ws);
3478
3479 ctx.refresh_facts();
3480 let issues = proof_mode_issues(&ctx);
3481 assert!(
3482 issues.is_empty(),
3483 "expected String+pos recursion to be accepted, got: {:?}",
3484 issues
3485 );
3486
3487 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3488 let lean = generated_lean_file(&out);
3489 assert!(lean.contains("def skipWs__fuel"));
3490 assert!(!lean.contains("partial def skipWs"));
3491 }
3492
3493 #[test]
3494 fn proof_mode_accepts_mutual_int_countdown_recursion() {
3495 let mut ctx = empty_ctx();
3496 let even = FnDef {
3497 name: "even".to_string(),
3498 line: 1,
3499 params: vec![("n".to_string(), "Int".to_string())],
3500 return_type: "Bool".to_string(),
3501 effects: vec![],
3502 desc: None,
3503 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3504 subject: sbb(Expr::Ident("n".to_string())),
3505 arms: vec![
3506 MatchArm {
3507 pattern: Pattern::Literal(Literal::Int(0)),
3508 body: sbb(Expr::Literal(Literal::Bool(true))),
3509 },
3510 MatchArm {
3511 pattern: Pattern::Wildcard,
3512 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3513 "odd".to_string(),
3514 vec![sb(Expr::BinOp(
3515 BinOp::Sub,
3516 sbb(Expr::Ident("n".to_string())),
3517 sbb(Expr::Literal(Literal::Int(1))),
3518 ))],
3519 )))),
3520 },
3521 ],
3522 }))),
3523 resolution: None,
3524 };
3525 let odd = FnDef {
3526 name: "odd".to_string(),
3527 line: 2,
3528 params: vec![("n".to_string(), "Int".to_string())],
3529 return_type: "Bool".to_string(),
3530 effects: vec![],
3531 desc: None,
3532 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3533 subject: sbb(Expr::Ident("n".to_string())),
3534 arms: vec![
3535 MatchArm {
3536 pattern: Pattern::Literal(Literal::Int(0)),
3537 body: sbb(Expr::Literal(Literal::Bool(false))),
3538 },
3539 MatchArm {
3540 pattern: Pattern::Wildcard,
3541 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3542 "even".to_string(),
3543 vec![sb(Expr::BinOp(
3544 BinOp::Sub,
3545 sbb(Expr::Ident("n".to_string())),
3546 sbb(Expr::Literal(Literal::Int(1))),
3547 ))],
3548 )))),
3549 },
3550 ],
3551 }))),
3552 resolution: None,
3553 };
3554 ctx.items.push(TopLevel::FnDef(even.clone()));
3555 ctx.items.push(TopLevel::FnDef(odd.clone()));
3556 ctx.fn_defs.push(even);
3557 ctx.fn_defs.push(odd);
3558
3559 ctx.refresh_facts();
3560 let issues = proof_mode_issues(&ctx);
3561 assert!(
3562 issues.is_empty(),
3563 "expected mutual Int countdown recursion to be accepted, got: {:?}",
3564 issues
3565 );
3566
3567 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3568 let lean = generated_lean_file(&out);
3569 assert!(lean.contains("def even__fuel"));
3570 assert!(lean.contains("def odd__fuel"));
3571 assert!(lean.contains("def even (n : Int) : Bool :="));
3572 assert!(lean.contains("even__fuel ((Int.natAbs n) + 1) n"));
3573 }
3574
3575 #[test]
3576 fn proof_mode_accepts_mutual_string_pos_recursion_with_ranked_same_edges() {
3577 let mut ctx = empty_ctx();
3578 let f = FnDef {
3579 name: "f".to_string(),
3580 line: 1,
3581 params: vec![
3582 ("s".to_string(), "String".to_string()),
3583 ("pos".to_string(), "Int".to_string()),
3584 ],
3585 return_type: "Int".to_string(),
3586 effects: vec![],
3587 desc: None,
3588 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3589 subject: sbb(Expr::BinOp(
3590 BinOp::Gte,
3591 sbb(Expr::Ident("pos".to_string())),
3592 sbb(Expr::Literal(Literal::Int(3))),
3593 )),
3594 arms: vec![
3595 MatchArm {
3596 pattern: Pattern::Literal(Literal::Bool(true)),
3597 body: sbb(Expr::Ident("pos".to_string())),
3598 },
3599 MatchArm {
3600 pattern: Pattern::Wildcard,
3601 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3602 "g".to_string(),
3603 vec![
3604 sb(Expr::Ident("s".to_string())),
3605 sb(Expr::Ident("pos".to_string())),
3606 ],
3607 )))),
3608 },
3609 ],
3610 }))),
3611 resolution: None,
3612 };
3613 let g = FnDef {
3614 name: "g".to_string(),
3615 line: 2,
3616 params: vec![
3617 ("s".to_string(), "String".to_string()),
3618 ("pos".to_string(), "Int".to_string()),
3619 ],
3620 return_type: "Int".to_string(),
3621 effects: vec![],
3622 desc: None,
3623 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3624 subject: sbb(Expr::BinOp(
3625 BinOp::Gte,
3626 sbb(Expr::Ident("pos".to_string())),
3627 sbb(Expr::Literal(Literal::Int(3))),
3628 )),
3629 arms: vec![
3630 MatchArm {
3631 pattern: Pattern::Literal(Literal::Bool(true)),
3632 body: sbb(Expr::Ident("pos".to_string())),
3633 },
3634 MatchArm {
3635 pattern: Pattern::Wildcard,
3636 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3637 "f".to_string(),
3638 vec![
3639 sb(Expr::Ident("s".to_string())),
3640 sb(Expr::BinOp(
3641 BinOp::Add,
3642 sbb(Expr::Ident("pos".to_string())),
3643 sbb(Expr::Literal(Literal::Int(1))),
3644 )),
3645 ],
3646 )))),
3647 },
3648 ],
3649 }))),
3650 resolution: None,
3651 };
3652 ctx.items.push(TopLevel::FnDef(f.clone()));
3653 ctx.items.push(TopLevel::FnDef(g.clone()));
3654 ctx.fn_defs.push(f);
3655 ctx.fn_defs.push(g);
3656
3657 ctx.refresh_facts();
3658 let issues = proof_mode_issues(&ctx);
3659 assert!(
3660 issues.is_empty(),
3661 "expected mutual String+pos recursion to be accepted, got: {:?}",
3662 issues
3663 );
3664
3665 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3666 let lean = generated_lean_file(&out);
3667 assert!(lean.contains("def f__fuel"));
3668 assert!(lean.contains("def g__fuel"));
3669 assert!(!lean.contains("partial def f"));
3670 }
3671
3672 #[test]
3673 fn proof_mode_accepts_mutual_ranked_sizeof_recursion() {
3674 let mut ctx = empty_ctx();
3675 let f = FnDef {
3676 name: "f".to_string(),
3677 line: 1,
3678 params: vec![("xs".to_string(), "List<Int>".to_string())],
3679 return_type: "Int".to_string(),
3680 effects: vec![],
3681 desc: None,
3682 body: Rc::new(FnBody::from_expr(sb(Expr::TailCall(Box::new(
3683 TailCallData::new(
3684 "g".to_string(),
3685 vec![
3686 sb(Expr::Literal(Literal::Str("acc".to_string()))),
3687 sb(Expr::Ident("xs".to_string())),
3688 ],
3689 ),
3690 ))))),
3691 resolution: None,
3692 };
3693 let g = FnDef {
3694 name: "g".to_string(),
3695 line: 2,
3696 params: vec![
3697 ("acc".to_string(), "String".to_string()),
3698 ("xs".to_string(), "List<Int>".to_string()),
3699 ],
3700 return_type: "Int".to_string(),
3701 effects: vec![],
3702 desc: None,
3703 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3704 subject: sbb(Expr::Ident("xs".to_string())),
3705 arms: vec![
3706 MatchArm {
3707 pattern: Pattern::EmptyList,
3708 body: sbb(Expr::Literal(Literal::Int(0))),
3709 },
3710 MatchArm {
3711 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3712 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3713 "f".to_string(),
3714 vec![sb(Expr::Ident("t".to_string()))],
3715 )))),
3716 },
3717 ],
3718 }))),
3719 resolution: None,
3720 };
3721 ctx.items.push(TopLevel::FnDef(f.clone()));
3722 ctx.items.push(TopLevel::FnDef(g.clone()));
3723 ctx.fn_defs.push(f);
3724 ctx.fn_defs.push(g);
3725
3726 ctx.refresh_facts();
3727 let issues = proof_mode_issues(&ctx);
3728 assert!(
3729 issues.is_empty(),
3730 "expected mutual ranked-sizeOf recursion to be accepted, got: {:?}",
3731 issues
3732 );
3733
3734 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3735 let lean = generated_lean_file(&out);
3736 assert!(lean.contains("mutual"));
3737 assert!(lean.contains("def f__fuel"));
3738 assert!(lean.contains("def g__fuel"));
3739 assert!(!lean.contains("partial def f"));
3740 assert!(!lean.contains("partial def g"));
3741 }
3742
3743 #[test]
3744 fn proof_mode_rejects_recursive_pure_functions() {
3745 let mut ctx = empty_ctx();
3746 let recursive_fn = FnDef {
3747 name: "loop".to_string(),
3748 line: 1,
3749 params: vec![("n".to_string(), "Int".to_string())],
3750 return_type: "Int".to_string(),
3751 effects: vec![],
3752 desc: None,
3753 body: Rc::new(FnBody::from_expr(sb(Expr::FnCall(
3754 sbb(Expr::Ident("loop".to_string())),
3755 vec![sb(Expr::Ident("n".to_string()))],
3756 )))),
3757 resolution: None,
3758 };
3759 ctx.items.push(TopLevel::FnDef(recursive_fn.clone()));
3760 ctx.fn_defs.push(recursive_fn);
3761
3762 ctx.refresh_facts();
3763 let issues = proof_mode_issues(&ctx);
3764 assert!(
3765 issues.iter().any(|i| i.contains("outside proof subset")),
3766 "expected recursive function blocker, got: {:?}",
3767 issues
3768 );
3769 }
3770
3771 #[test]
3772 fn proof_mode_allows_recursive_types() {
3773 let mut ctx = empty_ctx();
3774 let recursive_type = TypeDef::Sum {
3775 name: "Node".to_string(),
3776 variants: vec![TypeVariant {
3777 name: "Cons".to_string(),
3778 fields: vec!["Node".to_string()],
3779 }],
3780 line: 1,
3781 };
3782 ctx.items.push(TopLevel::TypeDef(recursive_type.clone()));
3783 ctx.type_defs.push(recursive_type);
3784
3785 ctx.refresh_facts();
3786 let issues = proof_mode_issues(&ctx);
3787 assert!(
3788 issues
3789 .iter()
3790 .all(|i| !i.contains("recursive types require unsafe DecidableEq shim")),
3791 "did not expect recursive type blocker, got: {:?}",
3792 issues
3793 );
3794 }
3795
3796 #[test]
3797 fn law_auto_example_exports_real_proof_artifacts() {
3798 let mut ctx = ctx_from_source(
3799 include_str!("../../../examples/formal/law_auto.av"),
3800 "law_auto",
3801 );
3802 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3803 let lean = generated_lean_file(&out);
3804
3805 assert!(lean.contains("theorem add_law_commutative :"));
3806 assert!(lean.contains("theorem id'_law_reflexive : ∀ (x : Int), x = x := by"));
3807 assert!(lean.contains("theorem incCount_law_keyPresent :"));
3808 assert!(lean.contains("AverMap.has_set_self"));
3809 assert!(lean.contains("theorem add_law_commutative_sample_1 :"));
3810 assert!(lean.contains(":= by native_decide"));
3811 }
3812
3813 #[test]
3814 fn json_example_stays_inside_proof_subset() {
3815 let mut ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3816 ctx.refresh_facts();
3817 let issues = proof_mode_issues(&ctx);
3818 assert!(
3819 issues.is_empty(),
3820 "expected json example to stay inside proof subset, got: {:?}",
3821 issues
3822 );
3823 }
3824
3825 #[test]
3826 fn json_example_uses_total_defs_and_domain_guarded_laws_in_proof_mode() {
3827 let mut ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3828 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3829 let lean = generated_lean_file(&out);
3830
3831 assert!(!lean.contains("partial def"));
3832 assert!(lean.contains("def skipWs__fuel"));
3833 assert!(lean.contains("def parseValue__fuel"));
3834 assert!(lean.contains("def toString' (j : Json) : String :="));
3835 assert!(
3836 lean.contains(
3837 "def averMeasureJsonEntries_String (items : List (String × Json)) : Nat :="
3838 )
3839 );
3840 assert!(lean.contains(
3841 "| .jsonObject x0 => (averMeasureJsonEntries_String (AverMap.entries x0)) + 1"
3842 ));
3843 assert!(lean.contains("-- when jsonRoundtripSafe j"));
3844 assert!(!lean.contains("-- hint: verify law '"));
3845 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
3846 assert!(
3847 lean.contains(
3848 "theorem toString'_law_parseRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3849 )
3850 );
3851 assert!(lean.contains(
3852 "jsonRoundtripSafe j = true -> fromString (toString' j) = Except.ok j := by"
3853 ));
3854 assert!(
3855 lean.contains("theorem finishFloat_law_fromCanonicalFloat : ∀ (f : Float), f = 3.5 ∨")
3856 );
3857 assert!(lean.contains("theorem finishInt_law_fromCanonicalInt_checked_domain :"));
3858 assert!(lean.contains(
3859 "theorem toString'_law_parseValueRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3860 ));
3861 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
3862 assert!(lean.contains(
3863 "example : fromString \"null\" = Except.ok Json.jsonNull := by native_decide"
3864 ));
3865 }
3866
3867 #[test]
3868 fn transpile_injects_builtin_network_types_and_vector_get_support() {
3869 let mut ctx = ctx_from_source(
3870 r#"
3871fn firstOrMissing(xs: Vector<String>) -> Result<String, String>
3872 Option.toResult(Vector.get(xs, 0), "missing")
3873
3874fn defaultHeaders() -> Map<String, List<String>>
3875 {"content-type" => ["application/json"]}
3876
3877fn mkResponse(body: String) -> HttpResponse
3878 HttpResponse(status = 200, body = body, headers = defaultHeaders())
3879
3880fn requestPath(req: HttpRequest) -> String
3881 req.path
3882
3883fn connPort(conn: Tcp.Connection) -> Int
3884 conn.port
3885"#,
3886 "network_helpers",
3887 );
3888 let out = transpile(&mut ctx);
3889 let lean = generated_lean_file(&out);
3890
3891 assert!(lean.contains("structure HttpResponse where"));
3892 assert!(lean.contains("structure HttpRequest where"));
3893 assert!(lean.contains("structure Tcp_Connection where"));
3894 assert!(lean.contains("port : Int"));
3895 assert!(lean.contains("List (String × List String)"));
3897 }
3898
3899 #[test]
3900 fn law_auto_example_has_no_sorry_in_proof_mode() {
3901 let mut ctx = ctx_from_source(
3902 include_str!("../../../examples/formal/law_auto.av"),
3903 "law_auto",
3904 );
3905 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3906 let lean = generated_lean_file(&out);
3907 assert!(
3908 !lean.contains("sorry"),
3909 "expected law_auto proof export to avoid sorry, got:\n{}",
3910 lean
3911 );
3912 }
3913
3914 #[test]
3915 fn map_example_has_no_sorry_in_proof_mode() {
3916 let mut ctx = ctx_from_source(include_str!("../../../examples/data/map.av"), "map");
3917 ctx.refresh_facts();
3918 let issues = proof_mode_issues(&ctx);
3919 assert!(
3920 issues.is_empty(),
3921 "expected map example to stay inside proof subset, got: {:?}",
3922 issues
3923 );
3924
3925 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3926 let lean = generated_lean_file(&out);
3927 assert!(lean.contains("theorem incCount_law_trackedCountStepsByOne :"));
3929 assert!(lean.contains("sorry"));
3930 assert!(lean.contains("theorem countWords_law_presenceMatchesContains_sample_1 :"));
3932 assert!(lean.contains("theorem countWords_law_trackedWordCount_sample_1 :"));
3933 assert!(lean.contains("AverMap.has_set_self"));
3934 assert!(lean.contains("AverMap.get_set_self"));
3935 }
3936
3937 #[test]
3938 fn spec_laws_example_has_no_sorry_in_proof_mode() {
3939 let mut ctx = ctx_from_source(
3940 include_str!("../../../examples/formal/spec_laws.av"),
3941 "spec_laws",
3942 );
3943 ctx.refresh_facts();
3944 let issues = proof_mode_issues(&ctx);
3945 assert!(
3946 issues.is_empty(),
3947 "expected spec_laws example to stay inside proof subset, got: {:?}",
3948 issues
3949 );
3950
3951 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3952 let lean = generated_lean_file(&out);
3953 assert!(
3954 !lean.contains("sorry"),
3955 "expected spec_laws proof export to avoid sorry, got:\n{}",
3956 lean
3957 );
3958 assert!(lean.contains("theorem absVal_eq_absValSpec :"));
3959 assert!(lean.contains("theorem clampNonNegative_eq_clampNonNegativeSpec :"));
3960 }
3961
3962 #[test]
3963 fn rle_example_exports_sampled_roundtrip_laws_without_sorry() {
3964 let mut ctx = ctx_from_source(include_str!("../../../examples/data/rle.av"), "rle");
3965 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3966 let lean = generated_lean_file(&out);
3967
3968 assert!(
3969 lean.contains("sorry"),
3970 "expected rle proof export to contain sorry for unproved universal theorems"
3971 );
3972 assert!(lean.contains(
3973 "theorem encode_law_roundtrip_sample_1 : decode (encode []) = [] := by native_decide"
3974 ));
3975 assert!(lean.contains(
3976 "theorem encodeString_law_string_roundtrip_sample_1 : decodeString (encodeString \"\") = \"\" := by native_decide"
3977 ));
3978 }
3979
3980 #[test]
3981 fn fibonacci_example_uses_fuelized_int_countdown_in_proof_mode() {
3982 let mut ctx = ctx_from_source(
3983 include_str!("../../../examples/data/fibonacci.av"),
3984 "fibonacci",
3985 );
3986 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
3987 let lean = generated_lean_file(&out);
3988
3989 assert!(lean.contains("def fibTR__fuel"));
3990 assert!(lean.contains("def fibTR (n : Int) (a : Int) (b : Int) : Int :="));
3991 assert!(lean.contains("fibTR__fuel ((Int.natAbs n) + 1) n a b"));
3992 assert!(!lean.contains("partial def fibTR"));
3993 }
3994
3995 #[test]
3996 fn fibonacci_example_stays_inside_proof_subset() {
3997 let mut ctx = ctx_from_source(
3998 include_str!("../../../examples/data/fibonacci.av"),
3999 "fibonacci",
4000 );
4001 ctx.refresh_facts();
4002 let issues = proof_mode_issues(&ctx);
4003 assert!(
4004 issues.is_empty(),
4005 "expected fibonacci example to stay inside proof subset, got: {:?}",
4006 issues
4007 );
4008 }
4009
4010 #[test]
4011 fn fibonacci_example_matches_general_linear_recurrence_shapes() {
4012 let mut ctx = ctx_from_source(
4013 include_str!("../../../examples/data/fibonacci.av"),
4014 "fibonacci",
4015 );
4016 let fib = ctx.fn_defs.iter().find(|fd| fd.name == "fib").unwrap();
4017 let fib_tr = ctx.fn_defs.iter().find(|fd| fd.name == "fibTR").unwrap();
4018 let fib_spec = ctx.fn_defs.iter().find(|fd| fd.name == "fibSpec").unwrap();
4019
4020 assert!(recurrence::detect_tailrec_int_linear_pair_wrapper(fib).is_some());
4021 assert!(recurrence::detect_tailrec_int_linear_pair_worker(fib_tr).is_some());
4022 assert!(recurrence::detect_second_order_int_linear_recurrence(fib_spec).is_some());
4023 }
4024
4025 #[test]
4026 fn fibonacci_example_auto_proves_general_linear_recurrence_spec_law() {
4027 let mut ctx = ctx_from_source(
4028 include_str!("../../../examples/data/fibonacci.av"),
4029 "fibonacci",
4030 );
4031 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4032 let lean = generated_lean_file(&out);
4033
4034 assert!(lean.contains("private def fibSpec__nat : Nat -> Int"));
4035 assert!(!lean.contains("partial def fibSpec"));
4036 assert!(lean.contains("private theorem fib_eq_fibSpec__worker_nat_shift"));
4037 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_nat"));
4038 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_seed"));
4039 assert!(lean.contains("theorem fib_eq_fibSpec : ∀ (n : Int), fib n = fibSpec n := by"));
4040 assert!(!lean.contains(
4041 "-- universal theorem fib_eq_fibSpec omitted: sampled law shape is not auto-proved yet"
4042 ));
4043 }
4044
4045 #[test]
4046 fn pell_like_example_auto_proves_same_general_shape() {
4047 let mut ctx = ctx_from_source(
4048 r#"
4049module Pell
4050 intent =
4051 "linear recurrence probe"
4052
4053fn pellTR(n: Int, a: Int, b: Int) -> Int
4054 match n
4055 0 -> a
4056 _ -> pellTR(n - 1, b, a + 2 * b)
4057
4058fn pell(n: Int) -> Int
4059 match n < 0
4060 true -> 0
4061 false -> pellTR(n, 0, 1)
4062
4063fn pellSpec(n: Int) -> Int
4064 match n < 0
4065 true -> 0
4066 false -> match n
4067 0 -> 0
4068 1 -> 1
4069 _ -> pellSpec(n - 2) + 2 * pellSpec(n - 1)
4070
4071verify pell law pellSpec
4072 given n: Int = [0, 1, 2, 3]
4073 pell(n) => pellSpec(n)
4074"#,
4075 "pell",
4076 );
4077 ctx.refresh_facts();
4078 let issues = proof_mode_issues(&ctx);
4079 assert!(
4080 issues.is_empty(),
4081 "expected pell example to stay inside proof subset, got: {:?}",
4082 issues
4083 );
4084
4085 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4086 let lean = generated_lean_file(&out);
4087 assert!(lean.contains("private def pellSpec__nat : Nat -> Int"));
4088 assert!(lean.contains("private theorem pell_eq_pellSpec__worker_nat_shift"));
4089 assert!(lean.contains("theorem pell_eq_pellSpec : ∀ (n : Int), pell n = pellSpec n := by"));
4090 assert!(!lean.contains(
4091 "-- universal theorem pell_eq_pellSpec omitted: sampled law shape is not auto-proved yet"
4092 ));
4093 }
4094
4095 #[test]
4096 fn nonlinear_pair_state_recurrence_is_not_auto_proved_as_linear_shape() {
4097 let mut ctx = ctx_from_source(
4098 r#"
4099module WeirdRec
4100 intent =
4101 "reject nonlinear pair-state recurrence from linear recurrence prover"
4102
4103fn weirdTR(n: Int, a: Int, b: Int) -> Int
4104 match n
4105 0 -> a
4106 _ -> weirdTR(n - 1, b, a * b)
4107
4108fn weird(n: Int) -> Int
4109 match n < 0
4110 true -> 0
4111 false -> weirdTR(n, 0, 1)
4112
4113fn weirdSpec(n: Int) -> Int
4114 match n < 0
4115 true -> 0
4116 false -> match n
4117 0 -> 0
4118 1 -> 1
4119 _ -> weirdSpec(n - 1) * weirdSpec(n - 2)
4120
4121verify weird law weirdSpec
4122 given n: Int = [0, 1, 2, 3]
4123 weird(n) => weirdSpec(n)
4124"#,
4125 "weirdrec",
4126 );
4127 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4128 let lean = generated_lean_file(&out);
4129
4130 assert!(lean.contains("sorry"));
4132 assert!(!lean.contains("private theorem weird_eq_weirdSpec__worker_nat_shift"));
4133 assert!(lean.contains("theorem weird_eq_weirdSpec_sample_1 :"));
4134 }
4135
4136 #[test]
4137 fn date_example_stays_inside_proof_subset() {
4138 let mut ctx = ctx_from_source(include_str!("../../../examples/data/date.av"), "date");
4139 ctx.refresh_facts();
4140 let issues = proof_mode_issues(&ctx);
4141 assert!(
4142 issues.is_empty(),
4143 "expected date example to stay inside proof subset, got: {:?}",
4144 issues
4145 );
4146
4147 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4148 let lean = generated_lean_file(&out);
4149 assert!(!lean.contains("partial def"));
4150 assert!(lean.contains("def parseIntSlice (s : String) (from' : Int) (to : Int) : Int :="));
4151 }
4152
4153 #[test]
4154 fn temperature_example_stays_inside_proof_subset() {
4155 let mut ctx = ctx_from_source(
4156 include_str!("../../../examples/core/temperature.av"),
4157 "temperature",
4158 );
4159 ctx.refresh_facts();
4160 let issues = proof_mode_issues(&ctx);
4161 assert!(
4162 issues.is_empty(),
4163 "expected temperature example to stay inside proof subset, got: {:?}",
4164 issues
4165 );
4166
4167 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4168 let lean = generated_lean_file(&out);
4169 assert!(!lean.contains("partial def"));
4170 assert!(
4171 lean.contains("example : celsiusToFahr 0.0 = 32.0 := by native_decide"),
4172 "expected verify examples to survive proof export, got:\n{}",
4173 lean
4174 );
4175 }
4176
4177 #[test]
4178 fn quicksort_example_stays_inside_proof_subset() {
4179 let mut ctx = ctx_from_source(
4180 include_str!("../../../examples/data/quicksort.av"),
4181 "quicksort",
4182 );
4183 ctx.refresh_facts();
4184 let issues = proof_mode_issues(&ctx);
4185 assert!(
4186 issues.is_empty(),
4187 "expected quicksort example to stay inside proof subset, got: {:?}",
4188 issues
4189 );
4190
4191 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4192 let lean = generated_lean_file(&out);
4193 assert!(lean.contains("def isOrderedFrom"));
4194 assert!(!lean.contains("partial def isOrderedFrom"));
4195 assert!(lean.contains("termination_by xs.length"));
4196 }
4197
4198 #[test]
4199 fn grok_s_language_example_uses_total_ranked_sizeof_mutual_recursion() {
4200 let mut ctx = ctx_from_source(
4201 include_str!("../../../examples/core/grok_s_language.av"),
4202 "grok_s_language",
4203 );
4204 ctx.refresh_facts();
4205 let issues = proof_mode_issues(&ctx);
4206 assert!(
4207 issues.is_empty(),
4208 "expected grok_s_language example to stay inside proof subset, got: {:?}",
4209 issues
4210 );
4211
4212 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4213 let lean = generated_lean_file(&out);
4214 assert!(lean.contains("mutual"));
4215 assert!(lean.contains("def eval__fuel"));
4216 assert!(lean.contains("def parseListItems__fuel"));
4217 assert!(!lean.contains("partial def eval"));
4218 assert!(!lean.contains("termination_by (sizeOf e,"));
4219 assert!(lean.contains("-- when validSymbolNames e"));
4220 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
4221 assert!(lean.contains(
4222 "theorem toString'_law_parseRoundtrip : ∀ (e : Sexpr), e = Sexpr.atomNum 42 ∨"
4223 ));
4224 assert!(
4225 lean.contains("validSymbolNames e = true -> parse (toString' e) = Except.ok e := by")
4226 );
4227 assert!(lean.contains("theorem toString'_law_parseSexprRoundtrip :"));
4228 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
4229 }
4230
4231 #[test]
4232 fn lambda_example_keeps_only_eval_outside_proof_subset() {
4233 let mut ctx = ctx_from_source(include_str!("../../../examples/core/lambda.av"), "lambda");
4234 ctx.refresh_facts();
4235 let issues = proof_mode_issues(&ctx);
4236 assert_eq!(
4237 issues,
4238 vec!["recursive function 'eval' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)".to_string()]
4239 );
4240
4241 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4242 let lean = generated_lean_file(&out);
4243 assert!(lean.contains("def termToString__fuel"));
4244 assert!(lean.contains("def subst__fuel"));
4245 assert!(lean.contains("def countS__fuel"));
4246 assert!(!lean.contains("partial def termToString"));
4247 assert!(!lean.contains("partial def subst"));
4248 assert!(!lean.contains("partial def countS"));
4249 assert!(lean.contains("partial def eval"));
4250 }
4251
4252 #[test]
4253 fn mission_control_example_stays_inside_proof_subset() {
4254 let mut ctx = ctx_from_source(
4255 include_str!("../../../examples/apps/mission_control.av"),
4256 "mission_control",
4257 );
4258 ctx.refresh_facts();
4259 let issues = proof_mode_issues(&ctx);
4260 assert!(
4261 issues.is_empty(),
4262 "expected mission_control example to stay inside proof subset, got: {:?}",
4263 issues
4264 );
4265
4266 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4267 let lean = generated_lean_file(&out);
4268 assert!(!lean.contains("partial def normalizeAngle"));
4269 assert!(lean.contains("def normalizeAngle__fuel"));
4270 }
4271
4272 #[test]
4273 fn notepad_store_example_stays_inside_proof_subset() {
4274 let mut ctx = ctx_from_source(
4275 include_str!("../../../examples/apps/notepad/store.av"),
4276 "notepad_store",
4277 );
4278 ctx.refresh_facts();
4279 let issues = proof_mode_issues(&ctx);
4280 assert!(
4281 issues.is_empty(),
4282 "expected notepad/store example to stay inside proof subset, got: {:?}",
4283 issues
4284 );
4285
4286 let out = transpile_for_proof_mode(&mut ctx, VerifyEmitMode::NativeDecide);
4287 let lean = generated_lean_file(&out);
4288 assert!(lean.contains("def deserializeLine (line : String) : Except String Note :="));
4289 assert!(lean.contains("Except String (List Note)"));
4290 assert!(!lean.contains("partial def deserializeLine"));
4291 assert!(lean.contains("-- when noteRoundtripSafe note"));
4292 assert!(lean.contains("-- when notesRoundtripSafe notes"));
4293 assert!(lean.contains(
4294 "theorem serializeLine_law_lineRoundtrip : ∀ (note : Note), note = { id' := 1, title := \"Hello\", body := \"World\" : Note } ∨"
4295 ));
4296 assert!(lean.contains(
4297 "theorem serializeLines_law_notesRoundtrip : ∀ (notes : List Note), notes = [] ∨"
4298 ));
4299 assert!(lean.contains("notesRoundtripSafe notes = true ->"));
4300 assert!(lean.contains("parseNotes (s!\"{String.intercalate \"\\n\" (serializeLines notes)}\\n\") = Except.ok notes"));
4301 assert!(lean.contains("theorem serializeLine_law_lineRoundtrip_sample_1 :"));
4302 assert!(lean.contains("theorem serializeLines_law_notesRoundtrip_sample_1 :"));
4303 }
4304}