1mod builtins;
7mod expr;
8mod law_auto;
9mod pattern;
10pub(crate) mod recurrence;
11mod shared;
12mod toplevel;
13mod types;
14
15use std::collections::{HashMap, HashSet};
16
17use crate::ast::{Expr, FnDef, Spanned, TopLevel, TypeDef, VerifyKind};
18use crate::call_graph;
19use crate::codegen::{CodegenContext, ProjectOutput};
20
21#[derive(Clone, Copy, Debug, Eq, PartialEq)]
23pub enum VerifyEmitMode {
24 NativeDecide,
26 Sorry,
28 TheoremSkeleton,
32}
33
34pub use crate::codegen::recursion::{ProofModeIssue, RecursionPlan};
39
40const LEAN_PRELUDE_HEADER: &str = r#"-- Generated by the Aver → Lean 4 transpiler
41-- Pure core logic plus Oracle-lifted classified effects
42
43set_option linter.unusedVariables false
44
45-- Prelude: helper definitions for Aver builtins"#;
46
47const LEAN_PRELUDE_FLOAT_COE: &str = r#"instance : Coe Int Float := ⟨fun n => Float.ofInt n⟩
48
49def Float.fromInt (n : Int) : Float := Float.ofInt n"#;
50
51const LEAN_PRELUDE_FLOAT_DEC_EQ: &str = r#"private unsafe def Float.unsafeDecEq (a b : Float) : Decidable (a = b) :=
52 if a == b then isTrue (unsafeCast ()) else isFalse (unsafeCast ())
53@[implemented_by Float.unsafeDecEq]
54private opaque Float.compDecEq (a b : Float) : Decidable (a = b)
55instance : DecidableEq Float := Float.compDecEq"#;
56
57const LEAN_PRELUDE_EXCEPT_DEC_EQ: &str = r#"instance [DecidableEq ε] [DecidableEq α] : DecidableEq (Except ε α)
58 | .ok a, .ok b =>
59 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
60 | .error a, .error b =>
61 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
62 | .ok _, .error _ => isFalse (by intro h; cases h)
63 | .error _, .ok _ => isFalse (by intro h; cases h)"#;
64
65const LEAN_PRELUDE_EXCEPT_NS: &str = r#"namespace Except
66def withDefault (r : Except ε α) (d : α) : α :=
67 match r with
68 | .ok v => v
69 | .error _ => d
70end Except"#;
71
72const LEAN_PRELUDE_OPTION_TO_EXCEPT: &str = r#"def Option.toExcept (o : Option α) (e : ε) : Except ε α :=
73 match o with
74 | some v => .ok v
75 | none => .error e"#;
76
77const LEAN_PRELUDE_STRING_HADD: &str = r#"instance : HAdd String String String := ⟨String.append⟩"#;
78
79const LEAN_PRELUDE_BRANCH_PATH: &str = r#"structure BranchPath where
83 dewey : String
84 deriving Repr, BEq, DecidableEq
85
86def BranchPath.Root : BranchPath := { dewey := "" }
87
88def BranchPath.child (p : BranchPath) (idx : Int) : BranchPath :=
89 if p.dewey.isEmpty then { dewey := toString idx }
90 else { dewey := p.dewey ++ "." ++ toString idx }
91
92def BranchPath.parse (s : String) : BranchPath := { dewey := s }"#;
93
94const LEAN_PRELUDE_PROOF_FUEL: &str = r#"def averStringPosFuel (s : String) (pos : Int) (rankBudget : Nat) : Nat :=
95 (((s.data.length) - pos.toNat) + 1) * rankBudget"#;
96
97const LEAN_PRELUDE_AVER_MEASURE: &str = r#"namespace AverMeasure
98def list (elemMeasure : α → Nat) : List α → Nat
99 | [] => 1
100 | x :: xs => elemMeasure x + list elemMeasure xs + 1
101def option (elemMeasure : α → Nat) : Option α → Nat
102 | none => 1
103 | some x => elemMeasure x + 1
104def except (errMeasure : ε → Nat) (okMeasure : α → Nat) : Except ε α → Nat
105 | .error e => errMeasure e + 1
106 | .ok v => okMeasure v + 1
107end AverMeasure"#;
108
109const AVER_MAP_PRELUDE_BASE: &str = r#"namespace AverMap
110def empty : List (α × β) := []
111def get [DecidableEq α] (m : List (α × β)) (k : α) : Option β :=
112 match m with
113 | [] => none
114 | (k', v) :: rest => if k = k' then some v else AverMap.get rest k
115def set [DecidableEq α] (m : List (α × β)) (k : α) (v : β) : List (α × β) :=
116 let rec go : List (α × β) → List (α × β)
117 | [] => [(k, v)]
118 | (k', v') :: rest => if k = k' then (k, v) :: rest else (k', v') :: go rest
119 go m
120def has [DecidableEq α] (m : List (α × β)) (k : α) : Bool :=
121 m.any (fun p => decide (k = p.1))
122def remove [DecidableEq α] (m : List (α × β)) (k : α) : List (α × β) :=
123 m.filter (fun p => !(decide (k = p.1)))
124def keys (m : List (α × β)) : List α := m.map Prod.fst
125def values (m : List (α × β)) : List β := m.map Prod.snd
126def entries (m : List (α × β)) : List (α × β) := m
127def len (m : List (α × β)) : Nat := m.length
128def fromList (entries : List (α × β)) : List (α × β) := entries"#;
129
130const AVER_MAP_PRELUDE_HAS_SET_SELF: &str = r#"private theorem any_set_go_self [DecidableEq α] (k : α) (v : β) :
131 ∀ (m : List (α × β)), List.any (AverMap.set.go k v m) (fun p => decide (k = p.1)) = true := by
132 intro m
133 induction m with
134 | nil =>
135 simp [AverMap.set.go, List.any]
136 | cons p tl ih =>
137 cases p with
138 | mk k' v' =>
139 by_cases h : k = k'
140 · simp [AverMap.set.go, List.any, h]
141 · simp [AverMap.set.go, List.any, h, ih]
142
143theorem has_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
144 AverMap.has (AverMap.set m k v) k = true := by
145 simpa [AverMap.has, AverMap.set] using any_set_go_self k v m"#;
146
147const AVER_MAP_PRELUDE_GET_SET_SELF: &str = r#"private theorem get_set_go_self [DecidableEq α] (k : α) (v : β) :
148 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) k = some v := by
149 intro m
150 induction m with
151 | nil =>
152 simp [AverMap.set.go, AverMap.get]
153 | cons p tl ih =>
154 cases p with
155 | mk k' v' =>
156 by_cases h : k = k'
157 · simp [AverMap.set.go, AverMap.get, h]
158 · simp [AverMap.set.go, AverMap.get, h, ih]
159
160theorem get_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
161 AverMap.get (AverMap.set m k v) k = some v := by
162 simpa [AverMap.set] using get_set_go_self k v m"#;
163
164const AVER_MAP_PRELUDE_GET_SET_OTHER: &str = r#"private theorem get_set_go_other [DecidableEq α] (k key : α) (v : β) (h : key ≠ k) :
165 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) key = AverMap.get m key := by
166 intro m
167 induction m with
168 | nil =>
169 simp [AverMap.set.go, AverMap.get, h]
170 | cons p tl ih =>
171 cases p with
172 | mk k' v' =>
173 by_cases hk : k = k'
174 · have hkey : key ≠ k' := by simpa [hk] using h
175 simp [AverMap.set.go, AverMap.get, hk, hkey]
176 · by_cases hkey : key = k'
177 · simp [AverMap.set.go, AverMap.get, hk, hkey]
178 · simp [AverMap.set.go, AverMap.get, hk, hkey, ih]
179
180theorem get_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
181 AverMap.get (AverMap.set m k v) key = AverMap.get m key := by
182 simpa [AverMap.set] using get_set_go_other k key v h m"#;
183
184const AVER_MAP_PRELUDE_HAS_SET_OTHER: &str = r#"theorem has_eq_isSome_get [DecidableEq α] (m : List (α × β)) (k : α) :
185 AverMap.has m k = (AverMap.get m k).isSome := by
186 induction m with
187 | nil =>
188 simp [AverMap.has, AverMap.get]
189 | cons p tl ih =>
190 cases p with
191 | mk k' v' =>
192 by_cases h : k = k'
193 · simp [AverMap.has, AverMap.get, List.any, h]
194 · simpa [AverMap.has, AverMap.get, List.any, h] using ih
195
196theorem has_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
197 AverMap.has (AverMap.set m k v) key = AverMap.has m key := by
198 rw [AverMap.has_eq_isSome_get, AverMap.has_eq_isSome_get]
199 simp [AverMap.get_set_other, h]"#;
200
201const AVER_MAP_PRELUDE_END: &str = r#"end AverMap"#;
202
203const LEAN_PRELUDE_AVER_LIST: &str = r#"namespace AverList
204def get (xs : List α) (i : Int) : Option α :=
205 if i < 0 then none else xs.get? i.toNat
206private def insertSorted [Ord α] (x : α) : List α → List α
207 | [] => [x]
208 | y :: ys =>
209 if compare x y == Ordering.lt || compare x y == Ordering.eq then
210 x :: y :: ys
211 else
212 y :: insertSorted x ys
213def sort [Ord α] (xs : List α) : List α :=
214 xs.foldl (fun acc x => insertSorted x acc) []
215end AverList"#;
216
217const LEAN_PRELUDE_HEADER_TYPE: &str = r#"structure Header where
218 name : String
219 value : String
220 deriving Repr, BEq, Inhabited, DecidableEq"#;
221
222const LEAN_PRELUDE_HTTP_RESPONSE_TYPE: &str = r#"structure HttpResponse where
223 status : Int
224 body : String
225 headers : List Header
226 deriving Repr, BEq, Inhabited, DecidableEq"#;
227
228const LEAN_PRELUDE_HTTP_REQUEST_TYPE: &str = r#"structure HttpRequest where
229 method : String
230 path : String
231 body : String
232 headers : List Header
233 deriving Repr, BEq, Inhabited, DecidableEq"#;
234
235const LEAN_PRELUDE_TCP_CONNECTION_TYPE: &str = r#"structure Tcp_Connection where
236 id : String
237 host : String
238 port : Int
239 deriving Repr, BEq, Inhabited, DecidableEq"#;
240
241const LEAN_PRELUDE_STRING_HELPERS: &str = r#"def String.charAt (s : String) (i : Int) : Option String :=
242 if i < 0 then none
243 else (s.toList.get? i.toNat).map Char.toString
244theorem String.charAt_length_none (s : String) : String.charAt s s.length = none := by
245 have hs : ¬ ((s.length : Int) < 0) := by omega
246 unfold String.charAt
247 simp [hs]
248 change s.data.length ≤ s.length
249 exact Nat.le_refl _
250def String.slice (s : String) (start stop : Int) : String :=
251 let startN := if start < 0 then 0 else start.toNat
252 let stopN := if stop < 0 then 0 else stop.toNat
253 let chars := s.toList
254 String.mk ((chars.drop startN).take (stopN - startN))
255private def trimFloatTrailingZerosChars (chars : List Char) : List Char :=
256 let noZeros := (chars.reverse.dropWhile (fun c => c == '0')).reverse
257 match noZeros.reverse with
258 | '.' :: rest => rest.reverse
259 | _ => noZeros
260private def normalizeFloatString (s : String) : String :=
261 if s.toList.any (fun c => c == '.') then
262 String.mk (trimFloatTrailingZerosChars s.toList)
263 else s
264def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)
265def String.chars (s : String) : List String := s.toList.map Char.toString
266private theorem char_to_string_append_mk (c : Char) (chars : List Char) :
267 Char.toString c ++ String.mk chars = String.mk (c :: chars) := by
268 rfl
269private theorem string_intercalate_empty_char_strings_go (acc : String) (chars : List Char) :
270 String.intercalate.go acc "" (chars.map Char.toString) = acc ++ String.mk chars := by
271 induction chars generalizing acc with
272 | nil =>
273 simp [String.intercalate.go]
274 | cons c rest ih =>
275 calc
276 String.intercalate.go acc "" ((c :: rest).map Char.toString)
277 = String.intercalate.go (acc ++ Char.toString c) "" (rest.map Char.toString) := by
278 simp [String.intercalate.go]
279 _ = (acc ++ Char.toString c) ++ String.mk rest := by
280 simpa using ih (acc ++ Char.toString c)
281 _ = acc ++ String.mk (c :: rest) := by
282 simp [String.append_assoc, char_to_string_append_mk]
283private theorem string_intercalate_empty_char_strings (chars : List Char) :
284 String.intercalate "" (chars.map Char.toString) = String.mk chars := by
285 cases chars with
286 | nil =>
287 simp [String.intercalate]
288 | cons c rest =>
289 simpa [String.intercalate] using string_intercalate_empty_char_strings_go c.toString rest
290theorem String.intercalate_empty_chars (s : String) :
291 String.intercalate "" (String.chars s) = s := by
292 cases s with
293 | mk chars =>
294 simpa [String.chars] using string_intercalate_empty_char_strings chars
295namespace AverString
296def splitOnCharGo (currentRev : List Char) (sep : Char) : List Char → List String
297 | [] => [String.mk currentRev.reverse]
298 | c :: cs =>
299 if c == sep then
300 String.mk currentRev.reverse :: splitOnCharGo [] sep cs
301 else
302 splitOnCharGo (c :: currentRev) sep cs
303def splitOnChar (s : String) (sep : Char) : List String :=
304 splitOnCharGo [] sep s.toList
305def split (s delim : String) : List String :=
306 match delim.toList with
307 | [] => "" :: (s.toList.map Char.toString) ++ [""]
308 | [c] => splitOnChar s c
309 | _ => s.splitOn delim
310@[simp] private theorem char_toString_data (c : Char) : c.toString.data = [c] := by
311 rfl
312private theorem splitOnCharGo_until_sep
313 (prefixRev part tail : List Char) (sep : Char) :
314 part.all (fun c => c != sep) = true ->
315 splitOnCharGo prefixRev sep (part ++ sep :: tail) =
316 String.mk (prefixRev.reverse ++ part) :: splitOnCharGo [] sep tail := by
317 intro h_safe
318 induction part generalizing prefixRev with
319 | nil =>
320 simp [splitOnCharGo]
321 | cons c rest ih =>
322 simp at h_safe
323 have h_rest : (rest.all fun c => c != sep) = true := by
324 simpa using h_safe.2
325 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
326 (ih (c :: prefixRev) h_rest)
327private theorem splitOnCharGo_no_sep
328 (prefixRev chars : List Char) (sep : Char) :
329 chars.all (fun c => c != sep) = true ->
330 splitOnCharGo prefixRev sep chars =
331 [String.mk (prefixRev.reverse ++ chars)] := by
332 intro h_safe
333 induction chars generalizing prefixRev with
334 | nil =>
335 simp [splitOnCharGo]
336 | cons c rest ih =>
337 simp at h_safe
338 have h_rest : (rest.all fun c => c != sep) = true := by
339 simpa using h_safe.2
340 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
341 (ih (c :: prefixRev) h_rest)
342@[simp] theorem split_single_char_append
343 (head tail : String) (sep : Char) :
344 head.toList.all (fun c => c != sep) = true ->
345 split (head ++ Char.toString sep ++ tail) (Char.toString sep) =
346 head :: split tail (Char.toString sep) := by
347 intro h_safe
348 simpa [split, splitOnChar] using
349 (splitOnCharGo_until_sep [] head.data tail.data sep h_safe)
350@[simp] theorem split_single_char_no_sep
351 (s : String) (sep : Char) :
352 s.toList.all (fun c => c != sep) = true ->
353 split s (Char.toString sep) = [s] := by
354 intro h_safe
355 simpa [split, splitOnChar] using
356 (splitOnCharGo_no_sep [] s.data sep h_safe)
357private theorem intercalate_go_prefix
358 (pref acc sep : String) (rest : List String) :
359 String.intercalate.go (pref ++ sep ++ acc) sep rest =
360 pref ++ sep ++ String.intercalate.go acc sep rest := by
361 induction rest generalizing acc with
362 | nil =>
363 simp [String.intercalate.go, String.append_assoc]
364 | cons x xs ih =>
365 simpa [String.intercalate.go, String.append_assoc] using
366 (ih (acc ++ sep ++ x))
367@[simp] theorem split_intercalate_trailing_single_char
368 (parts : List String) (sep : Char) :
369 parts.all (fun part => part.toList.all (fun c => c != sep)) = true ->
370 split (String.intercalate (Char.toString sep) parts ++ Char.toString sep) (Char.toString sep) =
371 match parts with
372 | [] => ["", ""]
373 | _ => parts ++ [""] := by
374 intro h_safe
375 induction parts with
376 | nil =>
377 simp [split, splitOnChar, splitOnCharGo]
378 | cons part rest ih =>
379 simp at h_safe
380 have h_part : (part.toList.all fun c => c != sep) = true := by
381 simpa using h_safe.1
382 cases rest with
383 | nil =>
384 have h_empty : ("".toList.all fun c => c != sep) = true := by simp
385 calc
386 split (String.intercalate.go part (Char.toString sep) [] ++ Char.toString sep) (Char.toString sep)
387 = split (part ++ Char.toString sep) (Char.toString sep) := by
388 simp [String.intercalate.go]
389 _ = part :: split "" (Char.toString sep) := by
390 simpa using split_single_char_append part "" sep h_part
391 _ = [part, ""] := by
392 simp [split_single_char_no_sep, h_empty]
393 | cons next rest' =>
394 have h_rest : ((next :: rest').all fun part => part.toList.all fun c => c != sep) = true := by
395 simpa using h_safe.2
396 calc
397 split
398 (String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep)
399 (Char.toString sep)
400 =
401 split
402 (part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep))
403 (Char.toString sep) := by
404 have h_join :
405 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
406 = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
407 calc
408 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
409 = String.intercalate.go (part ++ Char.toString sep ++ next) (Char.toString sep) rest' ++ Char.toString sep := by
410 simp [String.intercalate.go]
411 _ = part ++ Char.toString sep ++ (String.intercalate.go next (Char.toString sep) rest' ++ Char.toString sep) := by
412 rw [intercalate_go_prefix part next (Char.toString sep) rest']
413 simp [String.append_assoc]
414 _ = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
415 simp [String.intercalate, String.intercalate.go]
416 simpa using congrArg (fun s => split s (Char.toString sep)) h_join
417 _ = part :: split
418 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
419 (Char.toString sep) := by
420 simpa using split_single_char_append
421 part
422 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
423 sep
424 h_part
425 _ = part :: (next :: rest' ++ [""]) := by
426 simpa using ih h_rest
427end AverString"#;
428
429const LEAN_PRELUDE_NUMERIC_PARSE: &str = r#"namespace AverDigits
430def foldDigitsAcc (acc : Nat) : List Nat -> Nat
431 | [] => acc
432 | d :: ds => foldDigitsAcc (acc * 10 + d) ds
433
434def foldDigits (digits : List Nat) : Nat :=
435 foldDigitsAcc 0 digits
436
437private theorem foldDigitsAcc_append_singleton (acc : Nat) (xs : List Nat) (d : Nat) :
438 foldDigitsAcc acc (xs ++ [d]) = foldDigitsAcc acc xs * 10 + d := by
439 induction xs generalizing acc with
440 | nil =>
441 simp [foldDigitsAcc]
442 | cons x xs ih =>
443 simp [foldDigitsAcc, ih, Nat.left_distrib, Nat.add_assoc, Nat.add_left_comm]
444
445private theorem foldDigits_append_singleton (xs : List Nat) (d : Nat) :
446 foldDigits (xs ++ [d]) = foldDigits xs * 10 + d := by
447 simpa [foldDigits] using foldDigitsAcc_append_singleton 0 xs d
448
449def natDigits : Nat -> List Nat
450 | n =>
451 if n < 10 then
452 [n]
453 else
454 natDigits (n / 10) ++ [n % 10]
455termination_by
456 n => n
457
458theorem natDigits_nonempty (n : Nat) : natDigits n ≠ [] := by
459 by_cases h : n < 10
460 · rw [natDigits.eq_1]
461 simp [h]
462 · rw [natDigits.eq_1]
463 simp [h]
464
465theorem natDigits_digits_lt_ten : ∀ n : Nat, ∀ d ∈ natDigits n, d < 10 := by
466 intro n d hd
467 by_cases h : n < 10
468 · rw [natDigits.eq_1] at hd
469 simp [h] at hd
470 rcases hd with rfl
471 exact h
472 · rw [natDigits.eq_1] at hd
473 simp [h] at hd
474 rcases hd with hd | hd
475 · exact natDigits_digits_lt_ten (n / 10) d hd
476 · subst hd
477 exact Nat.mod_lt n (by omega)
478
479theorem foldDigits_natDigits : ∀ n : Nat, foldDigits (natDigits n) = n := by
480 intro n
481 by_cases h : n < 10
482 · rw [natDigits.eq_1]
483 simp [h, foldDigits, foldDigitsAcc]
484 · rw [natDigits.eq_1]
485 simp [h]
486 rw [foldDigits_append_singleton]
487 rw [foldDigits_natDigits (n / 10)]
488 omega
489
490def digitChar : Nat -> Char
491 | 0 => '0' | 1 => '1' | 2 => '2' | 3 => '3' | 4 => '4'
492 | 5 => '5' | 6 => '6' | 7 => '7' | 8 => '8' | 9 => '9'
493 | _ => '0'
494
495def charToDigit? : Char -> Option Nat
496 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3 | '4' => some 4
497 | '5' => some 5 | '6' => some 6 | '7' => some 7 | '8' => some 8 | '9' => some 9
498 | _ => none
499
500theorem charToDigit_digitChar : ∀ d : Nat, d < 10 -> charToDigit? (digitChar d) = some d
501 | 0, _ => by simp [digitChar, charToDigit?]
502 | 1, _ => by simp [digitChar, charToDigit?]
503 | 2, _ => by simp [digitChar, charToDigit?]
504 | 3, _ => by simp [digitChar, charToDigit?]
505 | 4, _ => by simp [digitChar, charToDigit?]
506 | 5, _ => by simp [digitChar, charToDigit?]
507 | 6, _ => by simp [digitChar, charToDigit?]
508 | 7, _ => by simp [digitChar, charToDigit?]
509 | 8, _ => by simp [digitChar, charToDigit?]
510 | 9, _ => by simp [digitChar, charToDigit?]
511 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
512 omega
513
514theorem digitChar_ne_minus : ∀ d : Nat, d < 10 -> digitChar d ≠ '-'
515 | 0, _ => by decide
516 | 1, _ => by decide
517 | 2, _ => by decide
518 | 3, _ => by decide
519 | 4, _ => by decide
520 | 5, _ => by decide
521 | 6, _ => by decide
522 | 7, _ => by decide
523 | 8, _ => by decide
524 | 9, _ => by decide
525 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
526 omega
527
528theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->
529 Char.toString (digitChar d) ≠ " " ∧
530 Char.toString (digitChar d) ≠ "\t" ∧
531 Char.toString (digitChar d) ≠ "\n" ∧
532 Char.toString (digitChar d) ≠ "\r"
533 | 0, _ => by decide
534 | 1, _ => by decide
535 | 2, _ => by decide
536 | 3, _ => by decide
537 | 4, _ => by decide
538 | 5, _ => by decide
539 | 6, _ => by decide
540 | 7, _ => by decide
541 | 8, _ => by decide
542 | 9, _ => by decide
543 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
544 omega
545
546theorem mapM_charToDigit_digits : ∀ ds : List Nat,
547 (∀ d ∈ ds, d < 10) -> List.mapM charToDigit? (ds.map digitChar) = some ds := by
548 intro ds hds
549 induction ds with
550 | nil =>
551 simp
552 | cons d ds ih =>
553 have hd : d < 10 := hds d (by simp)
554 have htail : ∀ x ∈ ds, x < 10 := by
555 intro x hx
556 exact hds x (by simp [hx])
557 simp [charToDigit_digitChar d hd, ih htail]
558
559def natDigitsChars (n : Nat) : List Char :=
560 (natDigits n).map digitChar
561
562def parseNatChars (chars : List Char) : Option Nat :=
563 match chars with
564 | [] => none
565 | _ => do
566 let digits <- List.mapM charToDigit? chars
567 pure (foldDigits digits)
568
569theorem parseNatChars_nat (n : Nat) :
570 parseNatChars (natDigitsChars n) = some n := by
571 unfold parseNatChars natDigitsChars
572 cases h : (natDigits n).map digitChar with
573 | nil =>
574 exfalso
575 exact natDigits_nonempty n (List.map_eq_nil_iff.mp h)
576 | cons hd tl =>
577 have hdigits : List.mapM charToDigit? (List.map digitChar (natDigits n)) = some (natDigits n) :=
578 mapM_charToDigit_digits (natDigits n) (fun d hd => natDigits_digits_lt_ten n d hd)
579 rw [h] at hdigits
580 simp [h, hdigits, foldDigits_natDigits]
581end AverDigits
582
583def String.fromInt (n : Int) : String :=
584 match n with
585 | .ofNat m => String.mk (AverDigits.natDigitsChars m)
586 | .negSucc m => String.mk ('-' :: AverDigits.natDigitsChars (m + 1))
587
588def Int.fromString (s : String) : Except String Int :=
589 match s.toList with
590 | [] => .error ("Cannot parse '" ++ s ++ "' as Int")
591 | '-' :: rest =>
592 match AverDigits.parseNatChars rest with
593 | some n => .ok (-Int.ofNat n)
594 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
595 | chars =>
596 match AverDigits.parseNatChars chars with
597 | some n => .ok (Int.ofNat n)
598 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
599
600theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n
601 | .ofNat m => by
602 cases h : AverDigits.natDigits m with
603 | nil =>
604 exfalso
605 exact AverDigits.natDigits_nonempty m h
606 | cons d ds =>
607 have hd : d < 10 := AverDigits.natDigits_digits_lt_ten m d (by simp [h])
608 have hne : AverDigits.digitChar d ≠ '-' := AverDigits.digitChar_ne_minus d hd
609 have hparse : AverDigits.parseNatChars (AverDigits.digitChar d :: List.map AverDigits.digitChar ds) = some m := by
610 simpa [AverDigits.natDigitsChars, h] using AverDigits.parseNatChars_nat m
611 simp [String.fromInt, Int.fromString, AverDigits.natDigitsChars, h, hne, hparse]
612 | .negSucc m => by
613 simp [String.fromInt, Int.fromString, AverDigits.parseNatChars_nat]
614 rfl
615
616private def charDigitsToNat (cs : List Char) : Nat :=
617 cs.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
618
619private def parseExpPart : List Char → (Bool × List Char)
620 | '-' :: rest => (true, rest.takeWhile Char.isDigit)
621 | '+' :: rest => (false, rest.takeWhile Char.isDigit)
622 | rest => (false, rest.takeWhile Char.isDigit)
623
624def Float.fromString (s : String) : Except String Float :=
625 let chars := s.toList
626 let (neg, chars) := match chars with
627 | '-' :: rest => (true, rest)
628 | _ => (false, chars)
629 let intPart := chars.takeWhile Char.isDigit
630 let rest := chars.dropWhile Char.isDigit
631 let (fracPart, rest) := match rest with
632 | '.' :: rest => (rest.takeWhile Char.isDigit, rest.dropWhile Char.isDigit)
633 | _ => ([], rest)
634 let (expNeg, expDigits) := match rest with
635 | 'e' :: rest => parseExpPart rest
636 | 'E' :: rest => parseExpPart rest
637 | _ => (false, [])
638 if intPart.isEmpty && fracPart.isEmpty then .error ("Invalid float: " ++ s)
639 else
640 let mantissa := charDigitsToNat (intPart ++ fracPart)
641 let fracLen : Int := fracPart.length
642 let expVal : Int := charDigitsToNat expDigits
643 let shift : Int := (if expNeg then -expVal else expVal) - fracLen
644 let f := if shift >= 0 then Float.ofScientific mantissa false shift.toNat
645 else Float.ofScientific mantissa true ((-shift).toNat)
646 .ok (if neg then -f else f)"#;
647
648const LEAN_PRELUDE_CHAR_BYTE: &str = r#"def Char.toCode (s : String) : Int :=
649 match s.toList.head? with
650 | some c => (c.toNat : Int)
651 | none => panic! "Char.toCode: string is empty"
652def Char.fromCode (n : Int) : Option String :=
653 if n < 0 || n > 1114111 then none
654 else if n >= 55296 && n <= 57343 then none
655 else some (Char.toString (Char.ofNat n.toNat))
656
657def hexDigit (n : Int) : String :=
658 match n with
659 | 0 => "0" | 1 => "1" | 2 => "2" | 3 => "3"
660 | 4 => "4" | 5 => "5" | 6 => "6" | 7 => "7"
661 | 8 => "8" | 9 => "9" | 10 => "a" | 11 => "b"
662 | 12 => "c" | 13 => "d" | 14 => "e" | 15 => "f"
663 | _ => "?"
664
665def byteToHex (code : Int) : String :=
666 hexDigit (code / 16) ++ hexDigit (code % 16)
667
668namespace AverByte
669private def hexValue (c : Char) : Option Int :=
670 match c with
671 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3
672 | '4' => some 4 | '5' => some 5 | '6' => some 6 | '7' => some 7
673 | '8' => some 8 | '9' => some 9 | 'a' => some 10 | 'b' => some 11
674 | 'c' => some 12 | 'd' => some 13 | 'e' => some 14 | 'f' => some 15
675 | 'A' => some 10 | 'B' => some 11 | 'C' => some 12 | 'D' => some 13
676 | 'E' => some 14 | 'F' => some 15
677 | _ => none
678def toHex (n : Int) : Except String String :=
679 if n < 0 || n > 255 then
680 .error ("Byte.toHex: " ++ toString n ++ " is out of range 0-255")
681 else
682 .ok (byteToHex n)
683def fromHex (s : String) : Except String Int :=
684 match s.toList with
685 | [hi, lo] =>
686 match hexValue hi, hexValue lo with
687 | some h, some l => .ok (h * 16 + l)
688 | _, _ => .error ("Byte.fromHex: invalid hex '" ++ s ++ "'")
689 | _ => .error ("Byte.fromHex: expected exactly 2 hex chars, got '" ++ s ++ "'")
690end AverByte"#;
691
692pub(crate) fn pure_fns(ctx: &CodegenContext) -> Vec<&FnDef> {
693 ctx.modules
694 .iter()
695 .flat_map(|m| m.fn_defs.iter())
696 .chain(ctx.fn_defs.iter())
697 .filter(|fd| toplevel::is_pure_fn(fd))
698 .collect()
699}
700
701pub(crate) fn recursive_type_names(ctx: &CodegenContext) -> HashSet<String> {
702 ctx.modules
703 .iter()
704 .flat_map(|m| m.type_defs.iter())
705 .chain(ctx.type_defs.iter())
706 .filter(|td| toplevel::is_recursive_type_def(td))
707 .map(|td| toplevel::type_def_name(td).to_string())
708 .collect()
709}
710
711pub(crate) fn recursive_pure_fn_names(ctx: &CodegenContext) -> HashSet<String> {
712 let pure_names: HashSet<String> = pure_fns(ctx)
713 .into_iter()
714 .map(|fd| fd.name.clone())
715 .collect();
716 let mut callgraph_items = ctx.items.clone();
717 for module in &ctx.modules {
718 for fd in &module.fn_defs {
719 callgraph_items.push(TopLevel::FnDef(fd.clone()));
720 }
721 }
722 call_graph::find_recursive_fns(&callgraph_items)
723 .into_iter()
724 .filter(|name| pure_names.contains(name))
725 .collect()
726}
727
728fn verify_counter_key(vb: &crate::ast::VerifyBlock) -> String {
729 match &vb.kind {
730 VerifyKind::Cases => format!("fn:{}", vb.fn_name),
731 VerifyKind::Law(law) => format!("law:{}::{}", vb.fn_name, law.name),
732 }
733}
734
735fn lean_project_name(ctx: &CodegenContext) -> String {
736 ctx.items
737 .iter()
738 .find_map(|item| match item {
739 TopLevel::Module(m) => Some(m.name.clone()),
740 _ => None,
741 })
742 .unwrap_or_else(|| capitalize_first(&ctx.project_name))
743}
744
745pub(crate) fn find_type_def<'a>(ctx: &'a CodegenContext, type_name: &str) -> Option<&'a TypeDef> {
746 ctx.modules
747 .iter()
748 .flat_map(|m| m.type_defs.iter())
749 .chain(ctx.type_defs.iter())
750 .find(|td| toplevel::type_def_name(td) == type_name)
751}
752
753pub(super) fn bound_expr_to_lean(expr: &Spanned<Expr>) -> String {
754 match &expr.node {
755 Expr::Literal(crate::ast::Literal::Int(n)) => format!("{}", n),
756 Expr::Ident(name) => expr::aver_name_to_lean(name),
757 Expr::FnCall(f, args) => {
758 if let Some(dotted) = crate::codegen::common::expr_to_dotted_name(&f.node) {
759 if dotted == "List.len" && args.len() == 1 {
761 return format!("{}.length", bound_expr_to_lean(&args[0]));
762 }
763 let lean_args: Vec<String> = args.iter().map(bound_expr_to_lean).collect();
764 format!(
765 "({} {})",
766 expr::aver_name_to_lean(&dotted),
767 lean_args.join(" ")
768 )
769 } else {
770 "0".to_string()
771 }
772 }
773 Expr::Attr(obj, field) => format!(
774 "{}.{}",
775 bound_expr_to_lean(obj),
776 expr::aver_name_to_lean(field)
777 ),
778 _ => "0".to_string(),
779 }
780}
781
782pub(crate) fn sizeof_measure_param_indices(fd: &FnDef) -> Vec<usize> {
783 fd.params
784 .iter()
785 .enumerate()
786 .filter_map(|(idx, (_, type_name))| {
787 (!crate::codegen::recursion::detect::is_scalar_like_type(type_name)).then_some(idx)
788 })
789 .collect()
790}
791
792pub fn proof_mode_findings(ctx: &CodegenContext) -> Vec<ProofModeIssue> {
797 let (_plans, issues) = crate::codegen::recursion::analyze_plans(ctx);
798 issues
799}
800
801pub fn proof_mode_issues(ctx: &CodegenContext) -> Vec<String> {
802 proof_mode_findings(ctx)
803 .into_iter()
804 .map(|issue| issue.message)
805 .collect()
806}
807
808pub fn transpile(ctx: &CodegenContext) -> ProjectOutput {
810 transpile_with_verify_mode(ctx, VerifyEmitMode::NativeDecide)
811}
812
813pub fn transpile_for_proof_mode(
818 ctx: &CodegenContext,
819 verify_mode: VerifyEmitMode,
820) -> ProjectOutput {
821 let (plans, _issues) = crate::codegen::recursion::analyze_plans(ctx);
822 let recursive_names = recursive_pure_fn_names(ctx);
823 let recursive_types = recursive_type_names(ctx);
824
825 let mut sections = Vec::new();
826
827 for module in &ctx.modules {
829 for td in &module.type_defs {
830 sections.push(toplevel::emit_type_def(td));
831 if toplevel::is_recursive_type_def(td) {
832 sections.push(toplevel::emit_recursive_decidable_eq(
833 toplevel::type_def_name(td),
834 ));
835 if let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types) {
836 sections.push(measure);
837 }
838 }
839 sections.push(String::new());
840 }
841 }
842 for td in &ctx.type_defs {
843 sections.push(toplevel::emit_type_def(td));
844 if toplevel::is_recursive_type_def(td) {
845 sections.push(toplevel::emit_recursive_decidable_eq(
846 toplevel::type_def_name(td),
847 ));
848 if let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types) {
849 sections.push(measure);
850 }
851 }
852 sections.push(String::new());
853 }
854
855 emit_pure_functions_proof(ctx, &plans, &recursive_names, &mut sections);
856
857 emit_lifted_effectful_functions(ctx, &recursive_names, &mut sections);
863
864 for item in &ctx.items {
865 if let TopLevel::Decision(db) = item {
866 sections.push(toplevel::emit_decision(db));
867 sections.push(String::new());
868 }
869 }
870
871 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
872 for item in &ctx.items {
873 if let TopLevel::Verify(vb) = item {
874 let key = verify_counter_key(vb);
875 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
876 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
877 verify_case_counters.insert(key, next_idx);
878 sections.push(emitted);
879 sections.push(String::new());
880 }
881 }
882
883 let lean_body = sections.join("\n");
884 let prelude = generate_prelude_for_body(&lean_body, false);
885 let lean_source = format!("{prelude}\n\n{lean_body}");
886 let project_name = lean_project_name(ctx);
887 let lakefile = generate_lakefile(&project_name);
888 let toolchain = generate_toolchain();
889 ProjectOutput {
890 files: vec![
891 ("lakefile.lean".to_string(), lakefile),
892 ("lean-toolchain".to_string(), toolchain),
893 (format!("{}.lean", project_name), lean_source),
894 ],
895 }
896}
897
898pub fn transpile_with_verify_mode(
904 ctx: &CodegenContext,
905 verify_mode: VerifyEmitMode,
906) -> ProjectOutput {
907 let mut sections = Vec::new();
908
909 let recursive_fns = call_graph::find_recursive_fns(&ctx.items);
911
912 for module in &ctx.modules {
914 for td in &module.type_defs {
915 sections.push(toplevel::emit_type_def(td));
916 if toplevel::is_recursive_type_def(td) {
918 sections.push(toplevel::emit_recursive_decidable_eq(
919 toplevel::type_def_name(td),
920 ));
921 }
922 sections.push(String::new());
923 }
924 }
925
926 for td in &ctx.type_defs {
928 sections.push(toplevel::emit_type_def(td));
929 if toplevel::is_recursive_type_def(td) {
931 sections.push(toplevel::emit_recursive_decidable_eq(
932 toplevel::type_def_name(td),
933 ));
934 }
935 sections.push(String::new());
936 }
937
938 emit_pure_functions(ctx, &recursive_fns, &mut sections);
941
942 emit_lifted_effectful_functions(ctx, &recursive_fns, &mut sections);
947
948 for item in &ctx.items {
950 if let TopLevel::Decision(db) = item {
951 sections.push(toplevel::emit_decision(db));
952 sections.push(String::new());
953 }
954 }
955
956 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
958 for item in &ctx.items {
959 if let TopLevel::Verify(vb) = item {
960 let key = verify_counter_key(vb);
961 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
962 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
963 verify_case_counters.insert(key, next_idx);
964 sections.push(emitted);
965 sections.push(String::new());
966 }
967 }
968
969 let lean_body = sections.join("\n");
970 let prelude = generate_prelude_for_body(&lean_body, false);
971 let lean_source = format!("{prelude}\n\n{lean_body}");
972
973 let project_name = lean_project_name(ctx);
975 let lakefile = generate_lakefile(&project_name);
976 let toolchain = generate_toolchain();
977
978 ProjectOutput {
979 files: vec![
980 ("lakefile.lean".to_string(), lakefile),
981 ("lean-toolchain".to_string(), toolchain),
982 (format!("{}.lean", project_name), lean_source),
983 ],
984 }
985}
986
987fn emit_lifted_effectful_functions(
996 ctx: &CodegenContext,
997 recursive_fns: &HashSet<String>,
998 sections: &mut Vec<String>,
999) {
1000 use crate::types::checker::effect_classification::is_classified;
1001
1002 let reachable = crate::codegen::common::verify_reachable_fn_names(&ctx.items);
1008
1009 let mut helpers: std::collections::HashMap<String, Vec<String>> =
1014 std::collections::HashMap::new();
1015 for item in &ctx.items {
1016 let TopLevel::FnDef(fd) = item else { continue };
1017 if fd.effects.is_empty() || fd.name == "main" {
1018 continue;
1019 }
1020 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
1021 continue;
1022 }
1023 if !reachable.contains(&fd.name) {
1024 continue;
1025 }
1026 helpers.insert(
1027 fd.name.clone(),
1028 fd.effects.iter().map(|e| e.node.clone()).collect(),
1029 );
1030 }
1031
1032 let mut lifted_fns: Vec<(String, crate::ast::FnDef)> = Vec::new();
1033 for item in &ctx.items {
1034 let TopLevel::FnDef(fd) = item else { continue };
1035 if fd.effects.is_empty() || fd.name == "main" {
1036 continue;
1037 }
1038 if !fd.effects.iter().all(|e| is_classified(&e.node)) {
1039 continue;
1040 }
1041 if !reachable.contains(&fd.name) {
1042 continue;
1043 }
1044 let Ok(Some(lifted)) =
1045 crate::types::checker::effect_lifting::lift_fn_def_with_helpers(fd, &helpers)
1046 else {
1047 continue;
1048 };
1049 lifted_fns.push((fd.name.clone(), lifted));
1050 }
1051
1052 let eligible_names: std::collections::HashSet<String> =
1060 lifted_fns.iter().map(|(n, _)| n.clone()).collect();
1061 let mut emitted: std::collections::HashSet<String> = std::collections::HashSet::new();
1062 let mut order: Vec<usize> = Vec::new();
1063 let mut remaining: Vec<usize> = (0..lifted_fns.len()).collect();
1064 while !remaining.is_empty() {
1065 let before = remaining.len();
1066 remaining.retain(|&idx| {
1067 let body_calls = collect_called_idents_in_body(&lifted_fns[idx].1.body);
1068 let ready = body_calls
1069 .iter()
1070 .all(|name| !eligible_names.contains(name) || emitted.contains(name));
1071 if ready {
1072 emitted.insert(lifted_fns[idx].0.clone());
1073 order.push(idx);
1074 false
1075 } else {
1076 true
1077 }
1078 });
1079 if remaining.len() == before {
1080 order.append(&mut remaining);
1084 }
1085 }
1086
1087 for idx in order {
1088 let (_, lifted) = &lifted_fns[idx];
1089 if let Some(code) = toplevel::emit_fn_def(lifted, recursive_fns, ctx) {
1090 sections.push(code);
1091 sections.push(String::new());
1092 }
1093 }
1094}
1095
1096fn collect_called_idents_in_body(body: &crate::ast::FnBody) -> std::collections::HashSet<String> {
1097 use crate::ast::{Expr, Spanned, Stmt};
1098 let mut out = std::collections::HashSet::new();
1099 fn walk(expr: &Spanned<Expr>, out: &mut std::collections::HashSet<String>) {
1100 match &expr.node {
1101 Expr::FnCall(callee, args) => {
1102 if let Expr::Ident(name) | Expr::Resolved { name, .. } = &callee.node {
1103 out.insert(name.clone());
1104 }
1105 walk(callee, out);
1106 for a in args {
1107 walk(a, out);
1108 }
1109 }
1110 Expr::BinOp(_, l, r) => {
1111 walk(l, out);
1112 walk(r, out);
1113 }
1114 Expr::Match { subject, arms } => {
1115 walk(subject, out);
1116 for arm in arms {
1117 walk(&arm.body, out);
1118 }
1119 }
1120 Expr::Attr(inner, _) | Expr::ErrorProp(inner) => walk(inner, out),
1121 Expr::Constructor(_, Some(inner)) => walk(inner, out),
1122 Expr::List(items) | Expr::Tuple(items) | Expr::IndependentProduct(items, _) => {
1123 for i in items {
1124 walk(i, out);
1125 }
1126 }
1127 Expr::MapLiteral(pairs) => {
1128 for (k, v) in pairs {
1129 walk(k, out);
1130 walk(v, out);
1131 }
1132 }
1133 Expr::RecordCreate { fields, .. } => {
1134 for (_, v) in fields {
1135 walk(v, out);
1136 }
1137 }
1138 Expr::RecordUpdate { base, updates, .. } => {
1139 walk(base, out);
1140 for (_, v) in updates {
1141 walk(v, out);
1142 }
1143 }
1144 Expr::InterpolatedStr(parts) => {
1145 for part in parts {
1146 if let crate::ast::StrPart::Parsed(inner) = part {
1147 walk(inner, out);
1148 }
1149 }
1150 }
1151 _ => {}
1152 }
1153 }
1154 for stmt in body.stmts() {
1155 match stmt {
1156 Stmt::Expr(e) => walk(e, &mut out),
1157 Stmt::Binding(_, _, e) => walk(e, &mut out),
1158 }
1159 }
1160 out
1161}
1162
1163fn emit_pure_functions(
1164 ctx: &CodegenContext,
1165 recursive_fns: &HashSet<String>,
1166 sections: &mut Vec<String>,
1167) {
1168 let all_fns: Vec<&FnDef> = ctx
1169 .modules
1170 .iter()
1171 .flat_map(|m| m.fn_defs.iter())
1172 .chain(ctx.fn_defs.iter())
1173 .filter(|fd| toplevel::is_pure_fn(fd))
1174 .collect();
1175 if all_fns.is_empty() {
1176 return;
1177 }
1178
1179 let components = call_graph::ordered_fn_components(&all_fns, &ctx.module_prefixes);
1180 for fns in components {
1181 if fns.is_empty() {
1182 continue;
1183 }
1184
1185 if fns.len() > 1 {
1187 sections.push(toplevel::emit_mutual_group(&fns, ctx));
1188 sections.push(String::new());
1189 continue;
1190 }
1191
1192 if let Some(code) = toplevel::emit_fn_def(fns[0], recursive_fns, ctx) {
1195 sections.push(code);
1196 sections.push(String::new());
1197 }
1198 }
1199}
1200
1201fn emit_pure_functions_proof(
1202 ctx: &CodegenContext,
1203 plans: &HashMap<String, RecursionPlan>,
1204 recursive_names: &HashSet<String>,
1205 sections: &mut Vec<String>,
1206) {
1207 let all_fns: Vec<&FnDef> = pure_fns(ctx);
1208 if all_fns.is_empty() {
1209 return;
1210 }
1211
1212 let components = call_graph::ordered_fn_components(&all_fns, &ctx.module_prefixes);
1213 for fns in components {
1214 if fns.is_empty() {
1215 continue;
1216 }
1217 if fns.len() > 1 {
1218 let all_supported = fns.iter().all(|fd| plans.contains_key(&fd.name));
1219 if all_supported {
1220 sections.push(toplevel::emit_mutual_group_proof(&fns, ctx, plans));
1221 } else {
1222 sections.push(toplevel::emit_mutual_group(&fns, ctx));
1223 }
1224 sections.push(String::new());
1225 continue;
1226 }
1227
1228 let fd = fns[0];
1229 let is_recursive = recursive_names.contains(&fd.name);
1230 let emitted = if is_recursive && !plans.contains_key(&fd.name) {
1231 toplevel::emit_fn_def(fd, recursive_names, ctx)
1232 } else {
1233 toplevel::emit_fn_def_proof(fd, plans.get(&fd.name).cloned(), ctx)
1234 };
1235 if let Some(code) = emitted {
1236 sections.push(code);
1237 sections.push(String::new());
1238 }
1239 }
1240}
1241
1242#[cfg(test)]
1243fn generate_prelude() -> String {
1244 generate_prelude_for_body("", true)
1245}
1246
1247fn generate_prelude_for_body(body: &str, include_all_helpers: bool) -> String {
1248 let trust_header = crate::types::checker::proof_trust_header::generate_commented("-- ");
1251 let mut parts = vec![
1252 LEAN_PRELUDE_HEADER.to_string(),
1253 trust_header,
1254 LEAN_PRELUDE_FLOAT_COE.to_string(),
1255 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1256 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1257 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1258 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1259 LEAN_PRELUDE_STRING_HADD.to_string(),
1260 LEAN_PRELUDE_BRANCH_PATH.to_string(),
1261 ];
1262
1263 let needs_header = needs_builtin_named_type(body, "Header")
1270 || needs_builtin_named_type(body, "HttpResponse")
1271 || needs_builtin_named_type(body, "HttpRequest");
1272 if include_all_helpers || needs_header {
1273 parts.push(LEAN_PRELUDE_HEADER_TYPE.to_string());
1274 }
1275 if include_all_helpers || needs_builtin_named_type(body, "HttpResponse") {
1276 parts.push(LEAN_PRELUDE_HTTP_RESPONSE_TYPE.to_string());
1277 }
1278 if include_all_helpers || needs_builtin_named_type(body, "HttpRequest") {
1279 parts.push(LEAN_PRELUDE_HTTP_REQUEST_TYPE.to_string());
1280 }
1281 if include_all_helpers || needs_builtin_named_type(body, "Tcp_Connection") {
1282 parts.push(LEAN_PRELUDE_TCP_CONNECTION_TYPE.to_string());
1283 }
1284
1285 if include_all_helpers || body.contains("averStringPosFuel") {
1286 parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string());
1287 }
1288
1289 if include_all_helpers || body.contains("AverMeasure.") {
1290 parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string());
1291 }
1292
1293 if include_all_helpers || body.contains("AverMap.") {
1294 parts.push(generate_map_prelude(body, include_all_helpers));
1295 }
1296
1297 parts.extend([
1298 LEAN_PRELUDE_AVER_LIST.to_string(),
1299 LEAN_PRELUDE_STRING_HELPERS.to_string(),
1300 LEAN_PRELUDE_NUMERIC_PARSE.to_string(),
1301 LEAN_PRELUDE_CHAR_BYTE.to_string(),
1302 ]);
1303
1304 parts.join("\n\n")
1305}
1306
1307fn needs_builtin_named_type(body: &str, type_name: &str) -> bool {
1308 references_named_type(body, type_name) && !declares_named_type(body, type_name)
1309}
1310
1311fn references_named_type(body: &str, type_name: &str) -> bool {
1312 let mut token = String::new();
1313 for ch in body.chars() {
1314 if ch.is_ascii_alphanumeric() || ch == '_' || ch == '.' {
1315 token.push(ch);
1316 continue;
1317 }
1318 if token == type_name {
1319 return true;
1320 }
1321 token.clear();
1322 }
1323 token == type_name
1324}
1325
1326fn declares_named_type(body: &str, type_name: &str) -> bool {
1327 let structure_decl = format!("structure {} where", type_name);
1328 let inductive_decl = format!("inductive {} where", type_name);
1329 body.lines()
1330 .map(str::trim_start)
1331 .any(|line| line == structure_decl || line == inductive_decl)
1332}
1333
1334fn generate_map_prelude(body: &str, include_all_helpers: bool) -> String {
1335 let mut parts = vec![AVER_MAP_PRELUDE_BASE.to_string()];
1336
1337 let needs_has_set_self = include_all_helpers || body.contains("AverMap.has_set_self");
1338 let needs_get_set_self = include_all_helpers || body.contains("AverMap.get_set_self");
1339 let needs_get_set_other = include_all_helpers
1340 || body.contains("AverMap.get_set_other")
1341 || body.contains("AverMap.has_set_other");
1342 let needs_has_set_other = include_all_helpers || body.contains("AverMap.has_set_other");
1343
1344 if needs_has_set_self {
1345 parts.push(AVER_MAP_PRELUDE_HAS_SET_SELF.to_string());
1346 }
1347 if needs_get_set_self {
1348 parts.push(AVER_MAP_PRELUDE_GET_SET_SELF.to_string());
1349 }
1350 if needs_get_set_other {
1351 parts.push(AVER_MAP_PRELUDE_GET_SET_OTHER.to_string());
1352 }
1353 if needs_has_set_other {
1354 parts.push(AVER_MAP_PRELUDE_HAS_SET_OTHER.to_string());
1355 }
1356
1357 parts.push(AVER_MAP_PRELUDE_END.to_string());
1358 parts.join("\n\n")
1359}
1360
1361fn generate_lakefile(project_name: &str) -> String {
1362 format!(
1363 r#"import Lake
1364open Lake DSL
1365
1366package «{}» where
1367 version := v!"0.1.0"
1368
1369@[default_target]
1370lean_lib «{}» where
1371 srcDir := "."
1372"#,
1373 project_name.to_lowercase(),
1374 project_name
1375 )
1376}
1377
1378fn generate_toolchain() -> String {
1379 "leanprover/lean4:v4.15.0\n".to_string()
1380}
1381
1382fn capitalize_first(s: &str) -> String {
1383 let mut chars = s.chars();
1384 match chars.next() {
1385 None => String::new(),
1386 Some(c) => c.to_uppercase().to_string() + chars.as_str(),
1387 }
1388}
1389
1390#[cfg(test)]
1391mod tests {
1392 use super::{
1393 VerifyEmitMode, generate_prelude, proof_mode_issues, recurrence, transpile,
1394 transpile_for_proof_mode, transpile_with_verify_mode,
1395 };
1396 use crate::ast::{
1397 BinOp, Expr, FnBody, FnDef, Literal, MatchArm, Pattern, Spanned, Stmt, TailCallData,
1398 TopLevel, TypeDef, TypeVariant, VerifyBlock, VerifyGiven, VerifyGivenDomain, VerifyKind,
1399 VerifyLaw,
1400 };
1401
1402 fn sb(e: Expr) -> Spanned<Expr> {
1404 Spanned::bare(e)
1405 }
1406 fn sbb(e: Expr) -> Box<Spanned<Expr>> {
1408 Box::new(Spanned::bare(e))
1409 }
1410 use crate::codegen::{CodegenContext, build_context};
1411 use crate::source::parse_source;
1412 use crate::tco;
1413 use crate::types::checker::run_type_check_full;
1414 use std::collections::{HashMap, HashSet};
1415 use std::sync::Arc as Rc;
1416
1417 fn empty_ctx() -> CodegenContext {
1418 CodegenContext {
1419 items: vec![],
1420 fn_sigs: HashMap::new(),
1421 memo_fns: HashSet::new(),
1422 memo_safe_types: HashSet::new(),
1423 type_defs: vec![],
1424 fn_defs: vec![],
1425 project_name: "verify_mode".to_string(),
1426 modules: vec![],
1427 module_prefixes: HashSet::new(),
1428 policy: None,
1429 emit_replay_runtime: false,
1430 runtime_policy_from_env: false,
1431 guest_entry: None,
1432 emit_self_host_support: false,
1433 extra_fn_defs: Vec::new(),
1434 mutual_tco_members: HashSet::new(),
1435 }
1436 }
1437
1438 fn ctx_from_source(source: &str, project_name: &str) -> CodegenContext {
1439 let mut items = parse_source(source).expect("source should parse");
1440 tco::transform_program(&mut items);
1441 let tc = run_type_check_full(&items, None);
1442 assert!(
1443 tc.errors.is_empty(),
1444 "source should typecheck without errors: {:?}",
1445 tc.errors
1446 );
1447 build_context(items, &tc, HashSet::new(), project_name.to_string(), vec![])
1448 }
1449
1450 fn generated_lean_file(out: &crate::codegen::ProjectOutput) -> &str {
1451 out.files
1452 .iter()
1453 .find_map(|(name, content)| {
1454 (name.ends_with(".lean") && name != "lakefile.lean").then_some(content.as_str())
1455 })
1456 .expect("expected generated Lean file")
1457 }
1458
1459 fn empty_ctx_with_verify_case() -> CodegenContext {
1460 let mut ctx = empty_ctx();
1461 ctx.items.push(TopLevel::Verify(VerifyBlock {
1462 fn_name: "f".to_string(),
1463 line: 1,
1464 cases: vec![(
1465 sb(Expr::Literal(Literal::Int(1))),
1466 sb(Expr::Literal(Literal::Int(1))),
1467 )],
1468 case_spans: vec![],
1469 case_givens: vec![],
1470 kind: VerifyKind::Cases,
1471 trace: false,
1472 cases_givens: vec![],
1473 }));
1474 ctx
1475 }
1476
1477 fn empty_ctx_with_two_verify_blocks_same_fn() -> CodegenContext {
1478 let mut ctx = empty_ctx();
1479 ctx.items.push(TopLevel::Verify(VerifyBlock {
1480 fn_name: "f".to_string(),
1481 line: 1,
1482 cases: vec![(
1483 sb(Expr::Literal(Literal::Int(1))),
1484 sb(Expr::Literal(Literal::Int(1))),
1485 )],
1486 case_spans: vec![],
1487 case_givens: vec![],
1488 kind: VerifyKind::Cases,
1489 trace: false,
1490 cases_givens: vec![],
1491 }));
1492 ctx.items.push(TopLevel::Verify(VerifyBlock {
1493 fn_name: "f".to_string(),
1494 line: 2,
1495 cases: vec![(
1496 sb(Expr::Literal(Literal::Int(2))),
1497 sb(Expr::Literal(Literal::Int(2))),
1498 )],
1499 case_spans: vec![],
1500 case_givens: vec![],
1501 kind: VerifyKind::Cases,
1502 trace: false,
1503 cases_givens: vec![],
1504 }));
1505 ctx
1506 }
1507
1508 fn empty_ctx_with_verify_law() -> CodegenContext {
1509 let mut ctx = empty_ctx();
1510 let add = FnDef {
1511 name: "add".to_string(),
1512 line: 1,
1513 params: vec![
1514 ("a".to_string(), "Int".to_string()),
1515 ("b".to_string(), "Int".to_string()),
1516 ],
1517 return_type: "Int".to_string(),
1518 effects: vec![],
1519 desc: None,
1520 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
1521 BinOp::Add,
1522 sbb(Expr::Ident("a".to_string())),
1523 sbb(Expr::Ident("b".to_string())),
1524 )))),
1525 resolution: None,
1526 };
1527 ctx.fn_defs.push(add.clone());
1528 ctx.items.push(TopLevel::FnDef(add));
1529 ctx.items.push(TopLevel::Verify(VerifyBlock {
1530 fn_name: "add".to_string(),
1531 line: 1,
1532 cases: vec![
1533 (
1534 sb(Expr::FnCall(
1535 sbb(Expr::Ident("add".to_string())),
1536 vec![
1537 sb(Expr::Literal(Literal::Int(1))),
1538 sb(Expr::Literal(Literal::Int(2))),
1539 ],
1540 )),
1541 sb(Expr::FnCall(
1542 sbb(Expr::Ident("add".to_string())),
1543 vec![
1544 sb(Expr::Literal(Literal::Int(2))),
1545 sb(Expr::Literal(Literal::Int(1))),
1546 ],
1547 )),
1548 ),
1549 (
1550 sb(Expr::FnCall(
1551 sbb(Expr::Ident("add".to_string())),
1552 vec![
1553 sb(Expr::Literal(Literal::Int(2))),
1554 sb(Expr::Literal(Literal::Int(3))),
1555 ],
1556 )),
1557 sb(Expr::FnCall(
1558 sbb(Expr::Ident("add".to_string())),
1559 vec![
1560 sb(Expr::Literal(Literal::Int(3))),
1561 sb(Expr::Literal(Literal::Int(2))),
1562 ],
1563 )),
1564 ),
1565 ],
1566 case_spans: vec![],
1567 case_givens: vec![],
1568 kind: VerifyKind::Law(Box::new(VerifyLaw {
1569 name: "commutative".to_string(),
1570 givens: vec![
1571 VerifyGiven {
1572 name: "a".to_string(),
1573 type_name: "Int".to_string(),
1574 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
1575 },
1576 VerifyGiven {
1577 name: "b".to_string(),
1578 type_name: "Int".to_string(),
1579 domain: VerifyGivenDomain::Explicit(vec![
1580 sb(Expr::Literal(Literal::Int(2))),
1581 sb(Expr::Literal(Literal::Int(3))),
1582 ]),
1583 },
1584 ],
1585 when: None,
1586 lhs: sb(Expr::FnCall(
1587 sbb(Expr::Ident("add".to_string())),
1588 vec![
1589 sb(Expr::Ident("a".to_string())),
1590 sb(Expr::Ident("b".to_string())),
1591 ],
1592 )),
1593 rhs: sb(Expr::FnCall(
1594 sbb(Expr::Ident("add".to_string())),
1595 vec![
1596 sb(Expr::Ident("b".to_string())),
1597 sb(Expr::Ident("a".to_string())),
1598 ],
1599 )),
1600 sample_guards: vec![],
1601 })),
1602 trace: false,
1603 cases_givens: vec![],
1604 }));
1605 ctx
1606 }
1607
1608 #[test]
1609 fn prelude_normalizes_float_string_format() {
1610 let prelude = generate_prelude();
1611 assert!(
1612 prelude.contains("private def normalizeFloatString (s : String) : String :="),
1613 "missing normalizeFloatString helper in prelude"
1614 );
1615 assert!(
1616 prelude.contains(
1617 "def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)"
1618 ),
1619 "String.fromFloat should normalize Lean float formatting"
1620 );
1621 }
1622
1623 #[test]
1624 fn prelude_validates_char_from_code_unicode_bounds() {
1625 let prelude = generate_prelude();
1626 assert!(
1627 prelude.contains("if n < 0 || n > 1114111 then none"),
1628 "Char.fromCode should reject code points above Unicode max"
1629 );
1630 assert!(
1631 prelude.contains("else if n >= 55296 && n <= 57343 then none"),
1632 "Char.fromCode should reject surrogate code points"
1633 );
1634 }
1635
1636 #[test]
1637 fn prelude_includes_map_set_helper_lemmas() {
1638 let prelude = generate_prelude();
1639 assert!(
1640 prelude.contains("theorem has_set_self [DecidableEq α]"),
1641 "missing AverMap.has_set_self helper theorem"
1642 );
1643 assert!(
1644 prelude.contains("theorem get_set_self [DecidableEq α]"),
1645 "missing AverMap.get_set_self helper theorem"
1646 );
1647 }
1648
1649 #[test]
1650 fn lean_output_without_map_usage_omits_map_prelude() {
1651 let ctx = ctx_from_source(
1652 r#"
1653module NoMap
1654 intent = "Simple pure program without maps."
1655
1656fn addOne(n: Int) -> Int
1657 n + 1
1658
1659verify addOne
1660 addOne(1) => 2
1661"#,
1662 "nomap",
1663 );
1664 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
1665 let lean = generated_lean_file(&out);
1666
1667 assert!(
1668 !lean.contains("namespace AverMap"),
1669 "did not expect AverMap prelude in program without map usage:\n{}",
1670 lean
1671 );
1672 }
1673
1674 #[test]
1675 fn transpile_emits_native_decide_for_verify_by_default() {
1676 let out = transpile(&empty_ctx_with_verify_case());
1677 let lean = out
1678 .files
1679 .iter()
1680 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1681 .expect("expected generated Lean file");
1682 assert!(lean.contains("example : 1 = 1 := by native_decide"));
1683 }
1684
1685 #[test]
1686 fn transpile_can_emit_sorry_for_verify_when_requested() {
1687 let out = transpile_with_verify_mode(&empty_ctx_with_verify_case(), VerifyEmitMode::Sorry);
1688 let lean = out
1689 .files
1690 .iter()
1691 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1692 .expect("expected generated Lean file");
1693 assert!(lean.contains("example : 1 = 1 := by sorry"));
1694 }
1695
1696 #[test]
1697 fn transpile_can_emit_theorem_skeletons_for_verify() {
1698 let out = transpile_with_verify_mode(
1699 &empty_ctx_with_verify_case(),
1700 VerifyEmitMode::TheoremSkeleton,
1701 );
1702 let lean = out
1703 .files
1704 .iter()
1705 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1706 .expect("expected generated Lean file");
1707 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1708 assert!(lean.contains(" sorry"));
1709 }
1710
1711 #[test]
1712 fn theorem_skeleton_numbering_is_global_per_function_across_verify_blocks() {
1713 let out = transpile_with_verify_mode(
1714 &empty_ctx_with_two_verify_blocks_same_fn(),
1715 VerifyEmitMode::TheoremSkeleton,
1716 );
1717 let lean = out
1718 .files
1719 .iter()
1720 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1721 .expect("expected generated Lean file");
1722 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
1723 assert!(lean.contains("theorem f_verify_2 : 2 = 2 := by"));
1724 }
1725
1726 #[test]
1727 fn transpile_emits_named_theorems_for_verify_law() {
1728 let out = transpile(&empty_ctx_with_verify_law());
1729 let lean = out
1730 .files
1731 .iter()
1732 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
1733 .expect("expected generated Lean file");
1734 assert!(lean.contains("-- verify law add.commutative (2 cases)"));
1735 assert!(lean.contains("-- given a: Int = 1..2"));
1736 assert!(lean.contains("-- given b: Int = [2, 3]"));
1737 assert!(lean.contains(
1738 "theorem add_law_commutative : ∀ (a : Int) (b : Int), add a b = add b a := by"
1739 ));
1740 assert!(lean.contains(" intro a b"));
1741 assert!(lean.contains(" simp [add, Int.add_comm]"));
1742 assert!(lean.contains(
1743 "theorem add_law_commutative_sample_1 : add 1 2 = add 2 1 := by native_decide"
1744 ));
1745 assert!(lean.contains(
1746 "theorem add_law_commutative_sample_2 : add 2 3 = add 3 2 := by native_decide"
1747 ));
1748 }
1749
1750 #[test]
1751 fn generate_prelude_emits_int_roundtrip_theorem() {
1752 let lean = generate_prelude();
1753 assert!(lean.contains(
1754 "theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n"
1755 ));
1756 assert!(lean.contains("theorem String.intercalate_empty_chars (s : String) :"));
1757 assert!(lean.contains("def splitOnCharGo"));
1758 assert!(lean.contains("theorem split_single_char_append"));
1759 assert!(lean.contains("theorem split_intercalate_trailing_single_char"));
1760 assert!(lean.contains("namespace AverDigits"));
1761 assert!(lean.contains("theorem String.charAt_length_none (s : String)"));
1762 assert!(lean.contains("theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->"));
1763 }
1764
1765 #[test]
1766 fn transpile_emits_guarded_theorems_for_verify_law_when_clause() {
1767 let ctx = ctx_from_source(
1768 r#"
1769module GuardedLaw
1770 intent =
1771 "verify law with precondition"
1772
1773fn pickGreater(a: Int, b: Int) -> Int
1774 match a > b
1775 true -> a
1776 false -> b
1777
1778verify pickGreater law ordered
1779 given a: Int = [1, 2]
1780 given b: Int = [1, 2]
1781 when a > b
1782 pickGreater(a, b) => a
1783"#,
1784 "guarded_law",
1785 );
1786 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
1787 let lean = generated_lean_file(&out);
1788
1789 assert!(lean.contains("-- when (a > b)"));
1790 assert!(lean.contains(
1791 "theorem pickGreater_law_ordered : ∀ (a : Int) (b : Int), a = 1 ∨ a = 2 -> b = 1 ∨ b = 2 -> (a > b) = true -> pickGreater a b = a := by"
1792 ));
1793 assert!(lean.contains(
1794 "theorem pickGreater_law_ordered_sample_1 : (1 > 1) = true -> pickGreater 1 1 = 1 := by"
1795 ));
1796 assert!(lean.contains(
1797 "theorem pickGreater_law_ordered_sample_4 : (2 > 2) = true -> pickGreater 2 2 = 2 := by"
1798 ));
1799 }
1800
1801 #[test]
1802 fn transpile_uses_spec_theorem_names_for_declared_spec_laws() {
1803 let ctx = ctx_from_source(
1804 r#"
1805module SpecDemo
1806 intent =
1807 "spec demo"
1808
1809fn absVal(x: Int) -> Int
1810 match x < 0
1811 true -> 0 - x
1812 false -> x
1813
1814fn absValSpec(x: Int) -> Int
1815 match x < 0
1816 true -> 0 - x
1817 false -> x
1818
1819verify absVal law absValSpec
1820 given x: Int = [-2, -1, 0, 1, 2]
1821 absVal(x) => absValSpec(x)
1822"#,
1823 "spec_demo",
1824 );
1825 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
1826 let lean = generated_lean_file(&out);
1827
1828 assert!(lean.contains("-- verify law absVal.spec absValSpec (5 cases)"));
1829 assert!(
1830 lean.contains(
1831 "theorem absVal_eq_absValSpec : ∀ (x : Int), absVal x = absValSpec x := by"
1832 )
1833 );
1834 assert!(lean.contains("theorem absVal_eq_absValSpec_checked_domain :"));
1835 assert!(lean.contains("theorem absVal_eq_absValSpec_sample_1 :"));
1836 assert!(!lean.contains("theorem absVal_law_absValSpec :"));
1837 }
1838
1839 #[test]
1840 fn transpile_keeps_noncanonical_spec_laws_as_regular_law_names() {
1841 let ctx = ctx_from_source(
1842 r#"
1843module SpecLawShape
1844 intent =
1845 "shape probe"
1846
1847fn foo(x: Int) -> Int
1848 x + 1
1849
1850fn fooSpec(seed: Int, x: Int) -> Int
1851 x + seed
1852
1853verify foo law fooSpec
1854 given x: Int = [1, 2]
1855 foo(x) => fooSpec(1, x)
1856"#,
1857 "spec_law_shape",
1858 );
1859 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
1860 let lean = generated_lean_file(&out);
1861
1862 assert!(lean.contains("-- verify law foo.fooSpec (2 cases)"));
1863 assert!(lean.contains("theorem foo_law_fooSpec : ∀ (x : Int), foo x = fooSpec 1 x := by"));
1864 assert!(!lean.contains("theorem foo_eq_fooSpec :"));
1865 }
1866
1867 #[test]
1868 fn transpile_auto_proves_linear_int_canonical_spec_law_in_auto_mode() {
1869 let ctx = ctx_from_source(
1870 r#"
1871module SpecGap
1872 intent =
1873 "nontrivial canonical spec law"
1874
1875fn inc(x: Int) -> Int
1876 x + 1
1877
1878fn incSpec(x: Int) -> Int
1879 x + 2 - 1
1880
1881verify inc law incSpec
1882 given x: Int = [0, 1, 2]
1883 inc(x) => incSpec(x)
1884"#,
1885 "spec_gap",
1886 );
1887 let out = transpile(&ctx);
1888 let lean = generated_lean_file(&out);
1889
1890 assert!(lean.contains("-- verify law inc.spec incSpec (3 cases)"));
1891 assert!(lean.contains("theorem inc_eq_incSpec : ∀ (x : Int), inc x = incSpec x := by"));
1892 assert!(lean.contains("change (x + 1) = ((x + 2) - 1)"));
1893 assert!(lean.contains("omega"));
1894 assert!(!lean.contains(
1895 "-- universal theorem inc_eq_incSpec omitted: sampled law shape is not auto-proved yet"
1896 ));
1897 assert!(lean.contains("theorem inc_eq_incSpec_checked_domain :"));
1898 }
1899
1900 #[test]
1901 fn transpile_auto_proves_guarded_canonical_spec_law_in_auto_mode() {
1902 let ctx = ctx_from_source(
1903 r#"
1904module GuardedSpecGap
1905 intent =
1906 "guarded canonical spec law"
1907
1908fn clampNonNegative(x: Int) -> Int
1909 match x < 0
1910 true -> 0
1911 false -> x
1912
1913fn clampNonNegativeSpec(x: Int) -> Int
1914 match x < 0
1915 true -> 0
1916 false -> x
1917
1918verify clampNonNegative law clampNonNegativeSpec
1919 given x: Int = [-2, -1, 0, 1, 2]
1920 when x >= 0
1921 clampNonNegative(x) => clampNonNegativeSpec(x)
1922"#,
1923 "guarded_spec_gap",
1924 );
1925 let out = transpile(&ctx);
1926 let lean = generated_lean_file(&out);
1927
1928 assert!(lean.contains("-- when (x >= 0)"));
1929 assert!(lean.contains(
1930 "theorem clampNonNegative_eq_clampNonNegativeSpec : ∀ (x : Int), x = (-2) ∨ x = (-1) ∨ x = 0 ∨ x = 1 ∨ x = 2 -> (x >= 0) = true -> clampNonNegative x = clampNonNegativeSpec x := by"
1931 ));
1932 assert!(lean.contains("intro x h_x h_when"));
1933 assert!(lean.contains("simpa [clampNonNegative, clampNonNegativeSpec]"));
1934 assert!(!lean.contains(
1935 "-- universal theorem clampNonNegative_eq_clampNonNegativeSpec omitted: sampled law shape is not auto-proved yet"
1936 ));
1937 assert!(!lean.contains("cases h_x"));
1938 }
1939
1940 #[test]
1941 fn transpile_auto_proves_simp_normalized_canonical_spec_law_in_auto_mode() {
1942 let ctx = ctx_from_source(
1943 r#"
1944module SpecGapNonlinear
1945 intent =
1946 "nonlinear canonical spec law"
1947
1948fn square(x: Int) -> Int
1949 x * x
1950
1951fn squareSpec(x: Int) -> Int
1952 x * x + 0
1953
1954verify square law squareSpec
1955 given x: Int = [0, 1, 2]
1956 square(x) => squareSpec(x)
1957"#,
1958 "spec_gap_nonlinear",
1959 );
1960 let out = transpile(&ctx);
1961 let lean = generated_lean_file(&out);
1962
1963 assert!(lean.contains("-- verify law square.spec squareSpec (3 cases)"));
1964 assert!(
1965 lean.contains(
1966 "theorem square_eq_squareSpec : ∀ (x : Int), square x = squareSpec x := by"
1967 )
1968 );
1969 assert!(lean.contains("simp [square, squareSpec]"));
1970 assert!(!lean.contains(
1971 "-- universal theorem square_eq_squareSpec omitted: sampled law shape is not auto-proved yet"
1972 ));
1973 assert!(lean.contains("theorem square_eq_squareSpec_checked_domain :"));
1974 assert!(lean.contains("theorem square_eq_squareSpec_sample_1 :"));
1975 }
1976
1977 #[test]
1978 fn transpile_auto_proves_reflexive_law_with_rfl() {
1979 let mut ctx = empty_ctx();
1980 ctx.items.push(TopLevel::Verify(VerifyBlock {
1981 fn_name: "idLaw".to_string(),
1982 line: 1,
1983 cases: vec![(
1984 sb(Expr::Literal(Literal::Int(1))),
1985 sb(Expr::Literal(Literal::Int(1))),
1986 )],
1987 case_spans: vec![],
1988 case_givens: vec![],
1989 kind: VerifyKind::Law(Box::new(VerifyLaw {
1990 name: "reflexive".to_string(),
1991 givens: vec![VerifyGiven {
1992 name: "x".to_string(),
1993 type_name: "Int".to_string(),
1994 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
1995 }],
1996 when: None,
1997 lhs: sb(Expr::Ident("x".to_string())),
1998 rhs: sb(Expr::Ident("x".to_string())),
1999 sample_guards: vec![],
2000 })),
2001 trace: false,
2002 cases_givens: vec![],
2003 }));
2004 let out = transpile(&ctx);
2005 let lean = out
2006 .files
2007 .iter()
2008 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2009 .expect("expected generated Lean file");
2010 assert!(lean.contains("theorem idLaw_law_reflexive : ∀ (x : Int), x = x := by"));
2011 assert!(lean.contains(" intro x"));
2012 assert!(lean.contains(" rfl"));
2013 }
2014
2015 #[test]
2016 fn transpile_auto_proves_identity_law_for_int_add_wrapper() {
2017 let mut ctx = empty_ctx_with_verify_law();
2018 ctx.items.push(TopLevel::Verify(VerifyBlock {
2019 fn_name: "add".to_string(),
2020 line: 10,
2021 cases: vec![(
2022 sb(Expr::FnCall(
2023 sbb(Expr::Ident("add".to_string())),
2024 vec![
2025 sb(Expr::Literal(Literal::Int(1))),
2026 sb(Expr::Literal(Literal::Int(0))),
2027 ],
2028 )),
2029 sb(Expr::Literal(Literal::Int(1))),
2030 )],
2031 case_spans: vec![],
2032 case_givens: vec![],
2033 kind: VerifyKind::Law(Box::new(VerifyLaw {
2034 name: "identityZero".to_string(),
2035 givens: vec![VerifyGiven {
2036 name: "a".to_string(),
2037 type_name: "Int".to_string(),
2038 domain: VerifyGivenDomain::Explicit(vec![
2039 sb(Expr::Literal(Literal::Int(0))),
2040 sb(Expr::Literal(Literal::Int(1))),
2041 ]),
2042 }],
2043 when: None,
2044 lhs: sb(Expr::FnCall(
2045 sbb(Expr::Ident("add".to_string())),
2046 vec![
2047 sb(Expr::Ident("a".to_string())),
2048 sb(Expr::Literal(Literal::Int(0))),
2049 ],
2050 )),
2051 rhs: sb(Expr::Ident("a".to_string())),
2052 sample_guards: vec![],
2053 })),
2054 trace: false,
2055 cases_givens: vec![],
2056 }));
2057 let out = transpile(&ctx);
2058 let lean = out
2059 .files
2060 .iter()
2061 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2062 .expect("expected generated Lean file");
2063 assert!(lean.contains("theorem add_law_identityZero : ∀ (a : Int), add a 0 = a := by"));
2064 assert!(lean.contains(" intro a"));
2065 assert!(lean.contains(" simp [add]"));
2066 }
2067
2068 #[test]
2069 fn transpile_auto_proves_associative_law_for_int_add_wrapper() {
2070 let mut ctx = empty_ctx_with_verify_law();
2071 ctx.items.push(TopLevel::Verify(VerifyBlock {
2072 fn_name: "add".to_string(),
2073 line: 20,
2074 cases: vec![(
2075 sb(Expr::FnCall(
2076 sbb(Expr::Ident("add".to_string())),
2077 vec![
2078 sb(Expr::FnCall(
2079 sbb(Expr::Ident("add".to_string())),
2080 vec![
2081 sb(Expr::Literal(Literal::Int(1))),
2082 sb(Expr::Literal(Literal::Int(2))),
2083 ],
2084 )),
2085 sb(Expr::Literal(Literal::Int(3))),
2086 ],
2087 )),
2088 sb(Expr::FnCall(
2089 sbb(Expr::Ident("add".to_string())),
2090 vec![
2091 sb(Expr::Literal(Literal::Int(1))),
2092 sb(Expr::FnCall(
2093 sbb(Expr::Ident("add".to_string())),
2094 vec![
2095 sb(Expr::Literal(Literal::Int(2))),
2096 sb(Expr::Literal(Literal::Int(3))),
2097 ],
2098 )),
2099 ],
2100 )),
2101 )],
2102 case_spans: vec![],
2103 case_givens: vec![],
2104 kind: VerifyKind::Law(Box::new(VerifyLaw {
2105 name: "associative".to_string(),
2106 givens: vec![
2107 VerifyGiven {
2108 name: "a".to_string(),
2109 type_name: "Int".to_string(),
2110 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2111 1,
2112 )))]),
2113 },
2114 VerifyGiven {
2115 name: "b".to_string(),
2116 type_name: "Int".to_string(),
2117 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2118 2,
2119 )))]),
2120 },
2121 VerifyGiven {
2122 name: "c".to_string(),
2123 type_name: "Int".to_string(),
2124 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2125 3,
2126 )))]),
2127 },
2128 ],
2129 when: None,
2130 lhs: sb(Expr::FnCall(
2131 sbb(Expr::Ident("add".to_string())),
2132 vec![
2133 sb(Expr::FnCall(
2134 sbb(Expr::Ident("add".to_string())),
2135 vec![
2136 sb(Expr::Ident("a".to_string())),
2137 sb(Expr::Ident("b".to_string())),
2138 ],
2139 )),
2140 sb(Expr::Ident("c".to_string())),
2141 ],
2142 )),
2143 rhs: sb(Expr::FnCall(
2144 sbb(Expr::Ident("add".to_string())),
2145 vec![
2146 sb(Expr::Ident("a".to_string())),
2147 sb(Expr::FnCall(
2148 sbb(Expr::Ident("add".to_string())),
2149 vec![
2150 sb(Expr::Ident("b".to_string())),
2151 sb(Expr::Ident("c".to_string())),
2152 ],
2153 )),
2154 ],
2155 )),
2156 sample_guards: vec![],
2157 })),
2158 trace: false,
2159 cases_givens: vec![],
2160 }));
2161 let out = transpile(&ctx);
2162 let lean = out
2163 .files
2164 .iter()
2165 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2166 .expect("expected generated Lean file");
2167 assert!(lean.contains(
2168 "theorem add_law_associative : ∀ (a : Int) (b : Int) (c : Int), add (add a b) c = add a (add b c) := by"
2169 ));
2170 assert!(lean.contains(" intro a b c"));
2171 assert!(lean.contains(" simp [add, Int.add_assoc]"));
2172 }
2173
2174 #[test]
2175 fn transpile_auto_proves_sub_laws() {
2176 let mut ctx = empty_ctx();
2177 let sub = FnDef {
2178 name: "sub".to_string(),
2179 line: 1,
2180 params: vec![
2181 ("a".to_string(), "Int".to_string()),
2182 ("b".to_string(), "Int".to_string()),
2183 ],
2184 return_type: "Int".to_string(),
2185 effects: vec![],
2186 desc: None,
2187 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2188 BinOp::Sub,
2189 sbb(Expr::Ident("a".to_string())),
2190 sbb(Expr::Ident("b".to_string())),
2191 )))),
2192 resolution: None,
2193 };
2194 ctx.fn_defs.push(sub.clone());
2195 ctx.items.push(TopLevel::FnDef(sub));
2196
2197 ctx.items.push(TopLevel::Verify(VerifyBlock {
2198 fn_name: "sub".to_string(),
2199 line: 10,
2200 cases: vec![(
2201 sb(Expr::FnCall(
2202 sbb(Expr::Ident("sub".to_string())),
2203 vec![
2204 sb(Expr::Literal(Literal::Int(2))),
2205 sb(Expr::Literal(Literal::Int(0))),
2206 ],
2207 )),
2208 sb(Expr::Literal(Literal::Int(2))),
2209 )],
2210 case_spans: vec![],
2211 case_givens: vec![],
2212 kind: VerifyKind::Law(Box::new(VerifyLaw {
2213 name: "rightIdentity".to_string(),
2214 givens: vec![VerifyGiven {
2215 name: "a".to_string(),
2216 type_name: "Int".to_string(),
2217 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2218 }],
2219 when: None,
2220 lhs: sb(Expr::FnCall(
2221 sbb(Expr::Ident("sub".to_string())),
2222 vec![
2223 sb(Expr::Ident("a".to_string())),
2224 sb(Expr::Literal(Literal::Int(0))),
2225 ],
2226 )),
2227 rhs: sb(Expr::Ident("a".to_string())),
2228 sample_guards: vec![],
2229 })),
2230 trace: false,
2231 cases_givens: vec![],
2232 }));
2233 ctx.items.push(TopLevel::Verify(VerifyBlock {
2234 fn_name: "sub".to_string(),
2235 line: 20,
2236 cases: vec![(
2237 sb(Expr::FnCall(
2238 sbb(Expr::Ident("sub".to_string())),
2239 vec![
2240 sb(Expr::Literal(Literal::Int(2))),
2241 sb(Expr::Literal(Literal::Int(1))),
2242 ],
2243 )),
2244 sb(Expr::BinOp(
2245 BinOp::Sub,
2246 sbb(Expr::Literal(Literal::Int(0))),
2247 sbb(Expr::FnCall(
2248 sbb(Expr::Ident("sub".to_string())),
2249 vec![
2250 sb(Expr::Literal(Literal::Int(1))),
2251 sb(Expr::Literal(Literal::Int(2))),
2252 ],
2253 )),
2254 )),
2255 )],
2256 case_spans: vec![],
2257 case_givens: vec![],
2258 kind: VerifyKind::Law(Box::new(VerifyLaw {
2259 name: "antiCommutative".to_string(),
2260 givens: vec![
2261 VerifyGiven {
2262 name: "a".to_string(),
2263 type_name: "Int".to_string(),
2264 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2265 2,
2266 )))]),
2267 },
2268 VerifyGiven {
2269 name: "b".to_string(),
2270 type_name: "Int".to_string(),
2271 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2272 1,
2273 )))]),
2274 },
2275 ],
2276 when: None,
2277 lhs: sb(Expr::FnCall(
2278 sbb(Expr::Ident("sub".to_string())),
2279 vec![
2280 sb(Expr::Ident("a".to_string())),
2281 sb(Expr::Ident("b".to_string())),
2282 ],
2283 )),
2284 rhs: sb(Expr::BinOp(
2285 BinOp::Sub,
2286 sbb(Expr::Literal(Literal::Int(0))),
2287 sbb(Expr::FnCall(
2288 sbb(Expr::Ident("sub".to_string())),
2289 vec![
2290 sb(Expr::Ident("b".to_string())),
2291 sb(Expr::Ident("a".to_string())),
2292 ],
2293 )),
2294 )),
2295 sample_guards: vec![],
2296 })),
2297 trace: false,
2298 cases_givens: vec![],
2299 }));
2300
2301 let out = transpile(&ctx);
2302 let lean = out
2303 .files
2304 .iter()
2305 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2306 .expect("expected generated Lean file");
2307 assert!(lean.contains("theorem sub_law_rightIdentity : ∀ (a : Int), sub a 0 = a := by"));
2308 assert!(lean.contains(" simp [sub]"));
2309 assert!(lean.contains(
2310 "theorem sub_law_antiCommutative : ∀ (a : Int) (b : Int), sub a b = (-sub b a) := by"
2311 ));
2312 assert!(lean.contains(" simpa [sub] using (Int.neg_sub b a).symm"));
2313 }
2314
2315 #[test]
2316 fn transpile_auto_proves_unary_wrapper_equivalence_law() {
2317 let mut ctx = empty_ctx();
2318 let add = FnDef {
2319 name: "add".to_string(),
2320 line: 1,
2321 params: vec![
2322 ("a".to_string(), "Int".to_string()),
2323 ("b".to_string(), "Int".to_string()),
2324 ],
2325 return_type: "Int".to_string(),
2326 effects: vec![],
2327 desc: None,
2328 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2329 BinOp::Add,
2330 sbb(Expr::Ident("a".to_string())),
2331 sbb(Expr::Ident("b".to_string())),
2332 )))),
2333 resolution: None,
2334 };
2335 let add_one = FnDef {
2336 name: "addOne".to_string(),
2337 line: 2,
2338 params: vec![("n".to_string(), "Int".to_string())],
2339 return_type: "Int".to_string(),
2340 effects: vec![],
2341 desc: None,
2342 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2343 BinOp::Add,
2344 sbb(Expr::Ident("n".to_string())),
2345 sbb(Expr::Literal(Literal::Int(1))),
2346 )))),
2347 resolution: None,
2348 };
2349 ctx.fn_defs.push(add.clone());
2350 ctx.fn_defs.push(add_one.clone());
2351 ctx.items.push(TopLevel::FnDef(add));
2352 ctx.items.push(TopLevel::FnDef(add_one));
2353 ctx.items.push(TopLevel::Verify(VerifyBlock {
2354 fn_name: "addOne".to_string(),
2355 line: 3,
2356 cases: vec![(
2357 sb(Expr::FnCall(
2358 sbb(Expr::Ident("addOne".to_string())),
2359 vec![sb(Expr::Literal(Literal::Int(2)))],
2360 )),
2361 sb(Expr::FnCall(
2362 sbb(Expr::Ident("add".to_string())),
2363 vec![
2364 sb(Expr::Literal(Literal::Int(2))),
2365 sb(Expr::Literal(Literal::Int(1))),
2366 ],
2367 )),
2368 )],
2369 case_spans: vec![],
2370 case_givens: vec![],
2371 kind: VerifyKind::Law(Box::new(VerifyLaw {
2372 name: "identityViaAdd".to_string(),
2373 givens: vec![VerifyGiven {
2374 name: "n".to_string(),
2375 type_name: "Int".to_string(),
2376 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
2377 }],
2378 when: None,
2379 lhs: sb(Expr::FnCall(
2380 sbb(Expr::Ident("addOne".to_string())),
2381 vec![sb(Expr::Ident("n".to_string()))],
2382 )),
2383 rhs: sb(Expr::FnCall(
2384 sbb(Expr::Ident("add".to_string())),
2385 vec![
2386 sb(Expr::Ident("n".to_string())),
2387 sb(Expr::Literal(Literal::Int(1))),
2388 ],
2389 )),
2390 sample_guards: vec![],
2391 })),
2392 trace: false,
2393 cases_givens: vec![],
2394 }));
2395 let out = transpile(&ctx);
2396 let lean = out
2397 .files
2398 .iter()
2399 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2400 .expect("expected generated Lean file");
2401 assert!(
2402 lean.contains(
2403 "theorem addOne_law_identityViaAdd : ∀ (n : Int), addOne n = add n 1 := by"
2404 )
2405 );
2406 assert!(lean.contains(" simp [addOne, add]"));
2407 }
2408
2409 #[test]
2410 fn transpile_auto_proves_direct_map_set_laws() {
2411 let mut ctx = empty_ctx();
2412
2413 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2414 sb(Expr::FnCall(
2415 sbb(Expr::Attr(
2416 sbb(Expr::Ident("Map".to_string())),
2417 "set".to_string(),
2418 )),
2419 vec![m, k, v],
2420 ))
2421 };
2422 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2423 sb(Expr::FnCall(
2424 sbb(Expr::Attr(
2425 sbb(Expr::Ident("Map".to_string())),
2426 "has".to_string(),
2427 )),
2428 vec![m, k],
2429 ))
2430 };
2431 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2432 sb(Expr::FnCall(
2433 sbb(Expr::Attr(
2434 sbb(Expr::Ident("Map".to_string())),
2435 "get".to_string(),
2436 )),
2437 vec![m, k],
2438 ))
2439 };
2440 let some = |v: Spanned<Expr>| {
2441 sb(Expr::FnCall(
2442 sbb(Expr::Attr(
2443 sbb(Expr::Ident("Option".to_string())),
2444 "Some".to_string(),
2445 )),
2446 vec![v],
2447 ))
2448 };
2449 let map_empty = || {
2450 sb(Expr::FnCall(
2451 sbb(Expr::Attr(
2452 sbb(Expr::Ident("Map".to_string())),
2453 "empty".to_string(),
2454 )),
2455 vec![],
2456 ))
2457 };
2458
2459 ctx.items.push(TopLevel::Verify(VerifyBlock {
2460 fn_name: "map".to_string(),
2461 line: 1,
2462 cases: vec![(
2463 map_has(
2464 map_set(
2465 sb(Expr::Ident("m".to_string())),
2466 sb(Expr::Ident("k".to_string())),
2467 sb(Expr::Ident("v".to_string())),
2468 ),
2469 sb(Expr::Ident("k".to_string())),
2470 ),
2471 sb(Expr::Literal(Literal::Bool(true))),
2472 )],
2473 case_spans: vec![],
2474 case_givens: vec![],
2475 kind: VerifyKind::Law(Box::new(VerifyLaw {
2476 name: "setHasKey".to_string(),
2477 givens: vec![
2478 VerifyGiven {
2479 name: "m".to_string(),
2480 type_name: "Map<String, Int>".to_string(),
2481 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2482 },
2483 VerifyGiven {
2484 name: "k".to_string(),
2485 type_name: "String".to_string(),
2486 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2487 "a".to_string(),
2488 )))]),
2489 },
2490 VerifyGiven {
2491 name: "v".to_string(),
2492 type_name: "Int".to_string(),
2493 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2494 1,
2495 )))]),
2496 },
2497 ],
2498 when: None,
2499 lhs: map_has(
2500 map_set(
2501 sb(Expr::Ident("m".to_string())),
2502 sb(Expr::Ident("k".to_string())),
2503 sb(Expr::Ident("v".to_string())),
2504 ),
2505 sb(Expr::Ident("k".to_string())),
2506 ),
2507 rhs: sb(Expr::Literal(Literal::Bool(true))),
2508 sample_guards: vec![],
2509 })),
2510 trace: false,
2511 cases_givens: vec![],
2512 }));
2513
2514 ctx.items.push(TopLevel::Verify(VerifyBlock {
2515 fn_name: "map".to_string(),
2516 line: 2,
2517 cases: vec![(
2518 map_get(
2519 map_set(
2520 sb(Expr::Ident("m".to_string())),
2521 sb(Expr::Ident("k".to_string())),
2522 sb(Expr::Ident("v".to_string())),
2523 ),
2524 sb(Expr::Ident("k".to_string())),
2525 ),
2526 some(sb(Expr::Ident("v".to_string()))),
2527 )],
2528 case_spans: vec![],
2529 case_givens: vec![],
2530 kind: VerifyKind::Law(Box::new(VerifyLaw {
2531 name: "setGetKey".to_string(),
2532 givens: vec![
2533 VerifyGiven {
2534 name: "m".to_string(),
2535 type_name: "Map<String, Int>".to_string(),
2536 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2537 },
2538 VerifyGiven {
2539 name: "k".to_string(),
2540 type_name: "String".to_string(),
2541 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2542 "a".to_string(),
2543 )))]),
2544 },
2545 VerifyGiven {
2546 name: "v".to_string(),
2547 type_name: "Int".to_string(),
2548 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2549 1,
2550 )))]),
2551 },
2552 ],
2553 when: None,
2554 lhs: map_get(
2555 map_set(
2556 sb(Expr::Ident("m".to_string())),
2557 sb(Expr::Ident("k".to_string())),
2558 sb(Expr::Ident("v".to_string())),
2559 ),
2560 sb(Expr::Ident("k".to_string())),
2561 ),
2562 rhs: some(sb(Expr::Ident("v".to_string()))),
2563 sample_guards: vec![],
2564 })),
2565 trace: false,
2566 cases_givens: vec![],
2567 }));
2568
2569 let out = transpile(&ctx);
2570 let lean = out
2571 .files
2572 .iter()
2573 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2574 .expect("expected generated Lean file");
2575 assert!(lean.contains("simpa using AverMap.has_set_self m k v"));
2576 assert!(lean.contains("simpa using AverMap.get_set_self m k v"));
2577 }
2578
2579 #[test]
2580 fn transpile_auto_proves_direct_recursive_sum_law_by_structural_induction() {
2581 let ctx = ctx_from_source(
2582 r#"
2583module Mirror
2584 intent =
2585 "direct recursive sum induction probe"
2586
2587type Tree
2588 Leaf(Int)
2589 Node(Tree, Tree)
2590
2591fn mirror(t: Tree) -> Tree
2592 match t
2593 Tree.Leaf(v) -> Tree.Leaf(v)
2594 Tree.Node(left, right) -> Tree.Node(mirror(right), mirror(left))
2595
2596verify mirror law involutive
2597 given t: Tree = [Tree.Leaf(1), Tree.Node(Tree.Leaf(1), Tree.Leaf(2))]
2598 mirror(mirror(t)) => t
2599"#,
2600 "mirror",
2601 );
2602 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
2603 let lean = generated_lean_file(&out);
2604
2605 assert!(
2606 lean.contains(
2607 "theorem mirror_law_involutive : ∀ (t : Tree), mirror (mirror t) = t := by"
2608 )
2609 );
2610 assert!(lean.contains(" induction t with"));
2611 assert!(lean.contains(" | leaf f0 => simp [mirror]"));
2612 assert!(lean.contains(" | node f0 f1 ih0 ih1 => simp_all [mirror]"));
2613 assert!(!lean.contains(
2614 "-- universal theorem mirror_law_involutive omitted: sampled law shape is not auto-proved yet"
2615 ));
2616 }
2617
2618 #[test]
2619 fn transpile_auto_proves_map_update_laws() {
2620 let mut ctx = empty_ctx();
2621
2622 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
2623 sb(Expr::FnCall(
2624 sbb(Expr::Attr(
2625 sbb(Expr::Ident("Map".to_string())),
2626 "get".to_string(),
2627 )),
2628 vec![m, k],
2629 ))
2630 };
2631 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
2632 sb(Expr::FnCall(
2633 sbb(Expr::Attr(
2634 sbb(Expr::Ident("Map".to_string())),
2635 "set".to_string(),
2636 )),
2637 vec![m, k, v],
2638 ))
2639 };
2640 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
2641 sb(Expr::FnCall(
2642 sbb(Expr::Attr(
2643 sbb(Expr::Ident("Map".to_string())),
2644 "has".to_string(),
2645 )),
2646 vec![m, k],
2647 ))
2648 };
2649 let option_some = |v: Spanned<Expr>| {
2650 sb(Expr::FnCall(
2651 sbb(Expr::Attr(
2652 sbb(Expr::Ident("Option".to_string())),
2653 "Some".to_string(),
2654 )),
2655 vec![v],
2656 ))
2657 };
2658 let option_with_default = |opt: Spanned<Expr>, def: Spanned<Expr>| {
2659 sb(Expr::FnCall(
2660 sbb(Expr::Attr(
2661 sbb(Expr::Ident("Option".to_string())),
2662 "withDefault".to_string(),
2663 )),
2664 vec![opt, def],
2665 ))
2666 };
2667 let map_empty = || {
2668 sb(Expr::FnCall(
2669 sbb(Expr::Attr(
2670 sbb(Expr::Ident("Map".to_string())),
2671 "empty".to_string(),
2672 )),
2673 vec![],
2674 ))
2675 };
2676
2677 let add_one = FnDef {
2678 name: "addOne".to_string(),
2679 line: 1,
2680 params: vec![("n".to_string(), "Int".to_string())],
2681 return_type: "Int".to_string(),
2682 effects: vec![],
2683 desc: None,
2684 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2685 BinOp::Add,
2686 sbb(Expr::Ident("n".to_string())),
2687 sbb(Expr::Literal(Literal::Int(1))),
2688 )))),
2689 resolution: None,
2690 };
2691 ctx.fn_defs.push(add_one.clone());
2692 ctx.items.push(TopLevel::FnDef(add_one));
2693
2694 let inc_count = FnDef {
2695 name: "incCount".to_string(),
2696 line: 2,
2697 params: vec![
2698 ("counts".to_string(), "Map<String, Int>".to_string()),
2699 ("word".to_string(), "String".to_string()),
2700 ],
2701 return_type: "Map<String, Int>".to_string(),
2702 effects: vec![],
2703 desc: None,
2704 body: Rc::new(FnBody::Block(vec![
2705 Stmt::Binding(
2706 "current".to_string(),
2707 None,
2708 map_get(
2709 sb(Expr::Ident("counts".to_string())),
2710 sb(Expr::Ident("word".to_string())),
2711 ),
2712 ),
2713 Stmt::Expr(sb(Expr::Match {
2714 subject: sbb(Expr::Ident("current".to_string())),
2715 arms: vec![
2716 MatchArm {
2717 pattern: Pattern::Constructor(
2718 "Option.Some".to_string(),
2719 vec!["n".to_string()],
2720 ),
2721 body: Box::new(map_set(
2722 sb(Expr::Ident("counts".to_string())),
2723 sb(Expr::Ident("word".to_string())),
2724 sb(Expr::BinOp(
2725 BinOp::Add,
2726 sbb(Expr::Ident("n".to_string())),
2727 sbb(Expr::Literal(Literal::Int(1))),
2728 )),
2729 )),
2730 },
2731 MatchArm {
2732 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
2733 body: Box::new(map_set(
2734 sb(Expr::Ident("counts".to_string())),
2735 sb(Expr::Ident("word".to_string())),
2736 sb(Expr::Literal(Literal::Int(1))),
2737 )),
2738 },
2739 ],
2740 })),
2741 ])),
2742 resolution: None,
2743 };
2744 ctx.fn_defs.push(inc_count.clone());
2745 ctx.items.push(TopLevel::FnDef(inc_count));
2746
2747 ctx.items.push(TopLevel::Verify(VerifyBlock {
2748 fn_name: "incCount".to_string(),
2749 line: 10,
2750 cases: vec![(
2751 map_has(
2752 sb(Expr::FnCall(
2753 sbb(Expr::Ident("incCount".to_string())),
2754 vec![
2755 sb(Expr::Ident("counts".to_string())),
2756 sb(Expr::Ident("word".to_string())),
2757 ],
2758 )),
2759 sb(Expr::Ident("word".to_string())),
2760 ),
2761 sb(Expr::Literal(Literal::Bool(true))),
2762 )],
2763 case_spans: vec![],
2764 case_givens: vec![],
2765 kind: VerifyKind::Law(Box::new(VerifyLaw {
2766 name: "keyPresent".to_string(),
2767 givens: vec![
2768 VerifyGiven {
2769 name: "counts".to_string(),
2770 type_name: "Map<String, Int>".to_string(),
2771 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2772 },
2773 VerifyGiven {
2774 name: "word".to_string(),
2775 type_name: "String".to_string(),
2776 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
2777 "a".to_string(),
2778 )))]),
2779 },
2780 ],
2781 when: None,
2782 lhs: map_has(
2783 sb(Expr::FnCall(
2784 sbb(Expr::Ident("incCount".to_string())),
2785 vec![
2786 sb(Expr::Ident("counts".to_string())),
2787 sb(Expr::Ident("word".to_string())),
2788 ],
2789 )),
2790 sb(Expr::Ident("word".to_string())),
2791 ),
2792 rhs: sb(Expr::Literal(Literal::Bool(true))),
2793 sample_guards: vec![],
2794 })),
2795 trace: false,
2796 cases_givens: vec![],
2797 }));
2798
2799 ctx.items.push(TopLevel::Verify(VerifyBlock {
2800 fn_name: "incCount".to_string(),
2801 line: 20,
2802 cases: vec![(
2803 map_get(
2804 sb(Expr::FnCall(
2805 sbb(Expr::Ident("incCount".to_string())),
2806 vec![
2807 sb(Expr::Ident("counts".to_string())),
2808 sb(Expr::Literal(Literal::Str("a".to_string()))),
2809 ],
2810 )),
2811 sb(Expr::Literal(Literal::Str("a".to_string()))),
2812 ),
2813 option_some(sb(Expr::FnCall(
2814 sbb(Expr::Ident("addOne".to_string())),
2815 vec![option_with_default(
2816 map_get(
2817 sb(Expr::Ident("counts".to_string())),
2818 sb(Expr::Literal(Literal::Str("a".to_string()))),
2819 ),
2820 sb(Expr::Literal(Literal::Int(0))),
2821 )],
2822 ))),
2823 )],
2824 case_spans: vec![],
2825 case_givens: vec![],
2826 kind: VerifyKind::Law(Box::new(VerifyLaw {
2827 name: "existingKeyIncrements".to_string(),
2828 givens: vec![VerifyGiven {
2829 name: "counts".to_string(),
2830 type_name: "Map<String, Int>".to_string(),
2831 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
2832 }],
2833 when: None,
2834 lhs: map_get(
2835 sb(Expr::FnCall(
2836 sbb(Expr::Ident("incCount".to_string())),
2837 vec![
2838 sb(Expr::Ident("counts".to_string())),
2839 sb(Expr::Literal(Literal::Str("a".to_string()))),
2840 ],
2841 )),
2842 sb(Expr::Literal(Literal::Str("a".to_string()))),
2843 ),
2844 rhs: option_some(sb(Expr::FnCall(
2845 sbb(Expr::Ident("addOne".to_string())),
2846 vec![option_with_default(
2847 map_get(
2848 sb(Expr::Ident("counts".to_string())),
2849 sb(Expr::Literal(Literal::Str("a".to_string()))),
2850 ),
2851 sb(Expr::Literal(Literal::Int(0))),
2852 )],
2853 ))),
2854 sample_guards: vec![],
2855 })),
2856 trace: false,
2857 cases_givens: vec![],
2858 }));
2859
2860 let out = transpile(&ctx);
2861 let lean = out
2862 .files
2863 .iter()
2864 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2865 .expect("expected generated Lean file");
2866 assert!(
2867 lean.contains("cases h : AverMap.get counts word <;> simp [AverMap.has_set_self]"),
2868 "expected keyPresent auto-proof with has_set_self"
2869 );
2870 assert!(
2871 lean.contains("cases h : AverMap.get counts \"a\" <;> simp [AverMap.get_set_self, addOne, incCount]"),
2872 "expected existingKeyIncrements auto-proof with get_set_self"
2873 );
2874 }
2875
2876 #[test]
2877 fn transpile_parenthesizes_negative_int_call_args_in_law_samples() {
2878 let mut ctx = empty_ctx();
2879 let add = FnDef {
2880 name: "add".to_string(),
2881 line: 1,
2882 params: vec![
2883 ("a".to_string(), "Int".to_string()),
2884 ("b".to_string(), "Int".to_string()),
2885 ],
2886 return_type: "Int".to_string(),
2887 effects: vec![],
2888 desc: None,
2889 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2890 BinOp::Add,
2891 sbb(Expr::Ident("a".to_string())),
2892 sbb(Expr::Ident("b".to_string())),
2893 )))),
2894 resolution: None,
2895 };
2896 ctx.fn_defs.push(add.clone());
2897 ctx.items.push(TopLevel::FnDef(add));
2898 ctx.items.push(TopLevel::Verify(VerifyBlock {
2899 fn_name: "add".to_string(),
2900 line: 1,
2901 cases: vec![(
2902 sb(Expr::FnCall(
2903 sbb(Expr::Ident("add".to_string())),
2904 vec![
2905 sb(Expr::Literal(Literal::Int(-2))),
2906 sb(Expr::Literal(Literal::Int(-1))),
2907 ],
2908 )),
2909 sb(Expr::FnCall(
2910 sbb(Expr::Ident("add".to_string())),
2911 vec![
2912 sb(Expr::Literal(Literal::Int(-1))),
2913 sb(Expr::Literal(Literal::Int(-2))),
2914 ],
2915 )),
2916 )],
2917 case_spans: vec![],
2918 case_givens: vec![],
2919 kind: VerifyKind::Law(Box::new(VerifyLaw {
2920 name: "commutative".to_string(),
2921 givens: vec![
2922 VerifyGiven {
2923 name: "a".to_string(),
2924 type_name: "Int".to_string(),
2925 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2926 -2,
2927 )))]),
2928 },
2929 VerifyGiven {
2930 name: "b".to_string(),
2931 type_name: "Int".to_string(),
2932 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2933 -1,
2934 )))]),
2935 },
2936 ],
2937 when: None,
2938 lhs: sb(Expr::FnCall(
2939 sbb(Expr::Ident("add".to_string())),
2940 vec![
2941 sb(Expr::Ident("a".to_string())),
2942 sb(Expr::Ident("b".to_string())),
2943 ],
2944 )),
2945 rhs: sb(Expr::FnCall(
2946 sbb(Expr::Ident("add".to_string())),
2947 vec![
2948 sb(Expr::Ident("b".to_string())),
2949 sb(Expr::Ident("a".to_string())),
2950 ],
2951 )),
2952 sample_guards: vec![],
2953 })),
2954 trace: false,
2955 cases_givens: vec![],
2956 }));
2957
2958 let out = transpile(&ctx);
2959 let lean = out
2960 .files
2961 .iter()
2962 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2963 .expect("expected generated Lean file");
2964 assert!(lean.contains(
2965 "theorem add_law_commutative_sample_1 : add (-2) (-1) = add (-1) (-2) := by native_decide"
2966 ));
2967 }
2968
2969 #[test]
2970 fn verify_law_numbering_is_scoped_per_law_name() {
2971 let mut ctx = empty_ctx();
2972 let f = FnDef {
2973 name: "f".to_string(),
2974 line: 1,
2975 params: vec![("x".to_string(), "Int".to_string())],
2976 return_type: "Int".to_string(),
2977 effects: vec![],
2978 desc: None,
2979 body: Rc::new(FnBody::from_expr(sb(Expr::Ident("x".to_string())))),
2980 resolution: None,
2981 };
2982 ctx.fn_defs.push(f.clone());
2983 ctx.items.push(TopLevel::FnDef(f));
2984 ctx.items.push(TopLevel::Verify(VerifyBlock {
2985 fn_name: "f".to_string(),
2986 line: 1,
2987 cases: vec![(
2988 sb(Expr::Literal(Literal::Int(1))),
2989 sb(Expr::Literal(Literal::Int(1))),
2990 )],
2991 case_spans: vec![],
2992 case_givens: vec![],
2993 kind: VerifyKind::Cases,
2994 trace: false,
2995 cases_givens: vec![],
2996 }));
2997 ctx.items.push(TopLevel::Verify(VerifyBlock {
2998 fn_name: "f".to_string(),
2999 line: 2,
3000 cases: vec![(
3001 sb(Expr::Literal(Literal::Int(2))),
3002 sb(Expr::Literal(Literal::Int(2))),
3003 )],
3004 case_spans: vec![],
3005 case_givens: vec![],
3006 kind: VerifyKind::Law(Box::new(VerifyLaw {
3007 name: "identity".to_string(),
3008 givens: vec![VerifyGiven {
3009 name: "x".to_string(),
3010 type_name: "Int".to_string(),
3011 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
3012 }],
3013 when: None,
3014 lhs: sb(Expr::Ident("x".to_string())),
3015 rhs: sb(Expr::Ident("x".to_string())),
3016 sample_guards: vec![],
3017 })),
3018 trace: false,
3019 cases_givens: vec![],
3020 }));
3021 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
3022 let lean = out
3023 .files
3024 .iter()
3025 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3026 .expect("expected generated Lean file");
3027 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
3028 assert!(lean.contains("theorem f_law_identity : ∀ (x : Int), x = x := by"));
3029 assert!(lean.contains("theorem f_law_identity_sample_1 : 2 = 2 := by"));
3030 assert!(!lean.contains("theorem f_law_identity_sample_2 : 2 = 2 := by"));
3031 }
3032
3033 #[test]
3034 fn proof_mode_accepts_single_int_countdown_recursion() {
3035 let mut ctx = empty_ctx();
3036 let down = FnDef {
3037 name: "down".to_string(),
3038 line: 1,
3039 params: vec![("n".to_string(), "Int".to_string())],
3040 return_type: "Int".to_string(),
3041 effects: vec![],
3042 desc: None,
3043 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3044 subject: sbb(Expr::Ident("n".to_string())),
3045 arms: vec![
3046 MatchArm {
3047 pattern: Pattern::Literal(Literal::Int(0)),
3048 body: sbb(Expr::Literal(Literal::Int(0))),
3049 },
3050 MatchArm {
3051 pattern: Pattern::Wildcard,
3052 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3053 "down".to_string(),
3054 vec![sb(Expr::BinOp(
3055 BinOp::Sub,
3056 sbb(Expr::Ident("n".to_string())),
3057 sbb(Expr::Literal(Literal::Int(1))),
3058 ))],
3059 )))),
3060 },
3061 ],
3062 }))),
3063 resolution: None,
3064 };
3065 ctx.items.push(TopLevel::FnDef(down.clone()));
3066 ctx.fn_defs.push(down);
3067
3068 let issues = proof_mode_issues(&ctx);
3069 assert!(
3070 issues.is_empty(),
3071 "expected Int countdown recursion to be accepted, got: {:?}",
3072 issues
3073 );
3074
3075 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3076 let lean = out
3077 .files
3078 .iter()
3079 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3080 .expect("expected generated Lean file");
3081 assert!(lean.contains("def down__fuel"));
3082 assert!(lean.contains("def down (n : Int) : Int :="));
3083 assert!(lean.contains("down__fuel ((Int.natAbs n) + 1) n"));
3084 }
3085
3086 #[test]
3087 fn proof_mode_accepts_single_int_countdown_on_nonfirst_param() {
3088 let mut ctx = empty_ctx();
3089 let repeat_like = FnDef {
3090 name: "repeatLike".to_string(),
3091 line: 1,
3092 params: vec![
3093 ("char".to_string(), "String".to_string()),
3094 ("n".to_string(), "Int".to_string()),
3095 ],
3096 return_type: "List<String>".to_string(),
3097 effects: vec![],
3098 desc: None,
3099 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3100 subject: sbb(Expr::BinOp(
3101 BinOp::Lte,
3102 sbb(Expr::Ident("n".to_string())),
3103 sbb(Expr::Literal(Literal::Int(0))),
3104 )),
3105 arms: vec![
3106 MatchArm {
3107 pattern: Pattern::Literal(Literal::Bool(true)),
3108 body: sbb(Expr::List(vec![])),
3109 },
3110 MatchArm {
3111 pattern: Pattern::Literal(Literal::Bool(false)),
3112 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3113 "repeatLike".to_string(),
3114 vec![
3115 sb(Expr::Ident("char".to_string())),
3116 sb(Expr::BinOp(
3117 BinOp::Sub,
3118 sbb(Expr::Ident("n".to_string())),
3119 sbb(Expr::Literal(Literal::Int(1))),
3120 )),
3121 ],
3122 )))),
3123 },
3124 ],
3125 }))),
3126 resolution: None,
3127 };
3128 ctx.items.push(TopLevel::FnDef(repeat_like.clone()));
3129 ctx.fn_defs.push(repeat_like);
3130
3131 let issues = proof_mode_issues(&ctx);
3132 assert!(
3133 issues.is_empty(),
3134 "expected non-first Int countdown recursion to be accepted, got: {:?}",
3135 issues
3136 );
3137
3138 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3139 let lean = out
3140 .files
3141 .iter()
3142 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3143 .expect("expected generated Lean file");
3144 assert!(lean.contains("def repeatLike__fuel"));
3145 assert!(lean.contains("def repeatLike (char : String) (n : Int) : List String :="));
3146 assert!(lean.contains("repeatLike__fuel ((Int.natAbs n) + 1) char n"));
3147 }
3148
3149 #[test]
3150 fn proof_mode_accepts_negative_guarded_int_ascent() {
3151 let mut ctx = empty_ctx();
3152 let normalize = FnDef {
3153 name: "normalize".to_string(),
3154 line: 1,
3155 params: vec![("angle".to_string(), "Int".to_string())],
3156 return_type: "Int".to_string(),
3157 effects: vec![],
3158 desc: None,
3159 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3160 subject: sbb(Expr::BinOp(
3161 BinOp::Lt,
3162 sbb(Expr::Ident("angle".to_string())),
3163 sbb(Expr::Literal(Literal::Int(0))),
3164 )),
3165 arms: vec![
3166 MatchArm {
3167 pattern: Pattern::Literal(Literal::Bool(true)),
3168 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3169 "normalize".to_string(),
3170 vec![sb(Expr::BinOp(
3171 BinOp::Add,
3172 sbb(Expr::Ident("angle".to_string())),
3173 sbb(Expr::Literal(Literal::Int(360))),
3174 ))],
3175 )))),
3176 },
3177 MatchArm {
3178 pattern: Pattern::Literal(Literal::Bool(false)),
3179 body: sbb(Expr::Ident("angle".to_string())),
3180 },
3181 ],
3182 }))),
3183 resolution: None,
3184 };
3185 ctx.items.push(TopLevel::FnDef(normalize.clone()));
3186 ctx.fn_defs.push(normalize);
3187
3188 let issues = proof_mode_issues(&ctx);
3189 assert!(
3190 issues.is_empty(),
3191 "expected negative-guarded Int ascent recursion to be accepted, got: {:?}",
3192 issues
3193 );
3194
3195 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3196 let lean = out
3197 .files
3198 .iter()
3199 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3200 .expect("expected generated Lean file");
3201 assert!(lean.contains("def normalize__fuel"));
3202 assert!(lean.contains("normalize__fuel ((Int.natAbs angle) + 1) angle"));
3203 }
3204
3205 #[test]
3206 fn proof_mode_accepts_single_list_structural_recursion() {
3207 let mut ctx = empty_ctx();
3208 let len = FnDef {
3209 name: "len".to_string(),
3210 line: 1,
3211 params: vec![("xs".to_string(), "List<Int>".to_string())],
3212 return_type: "Int".to_string(),
3213 effects: vec![],
3214 desc: None,
3215 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3216 subject: sbb(Expr::Ident("xs".to_string())),
3217 arms: vec![
3218 MatchArm {
3219 pattern: Pattern::EmptyList,
3220 body: sbb(Expr::Literal(Literal::Int(0))),
3221 },
3222 MatchArm {
3223 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3224 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3225 "len".to_string(),
3226 vec![sb(Expr::Ident("t".to_string()))],
3227 )))),
3228 },
3229 ],
3230 }))),
3231 resolution: None,
3232 };
3233 ctx.items.push(TopLevel::FnDef(len.clone()));
3234 ctx.fn_defs.push(len);
3235
3236 let issues = proof_mode_issues(&ctx);
3237 assert!(
3238 issues.is_empty(),
3239 "expected List structural recursion to be accepted, got: {:?}",
3240 issues
3241 );
3242 }
3243
3244 #[test]
3245 fn proof_mode_accepts_single_list_structural_recursion_on_nonfirst_param() {
3246 let mut ctx = empty_ctx();
3247 let len_from = FnDef {
3248 name: "lenFrom".to_string(),
3249 line: 1,
3250 params: vec![
3251 ("count".to_string(), "Int".to_string()),
3252 ("xs".to_string(), "List<Int>".to_string()),
3253 ],
3254 return_type: "Int".to_string(),
3255 effects: vec![],
3256 desc: None,
3257 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3258 subject: sbb(Expr::Ident("xs".to_string())),
3259 arms: vec![
3260 MatchArm {
3261 pattern: Pattern::EmptyList,
3262 body: sbb(Expr::Ident("count".to_string())),
3263 },
3264 MatchArm {
3265 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3266 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3267 "lenFrom".to_string(),
3268 vec![
3269 sb(Expr::BinOp(
3270 BinOp::Add,
3271 sbb(Expr::Ident("count".to_string())),
3272 sbb(Expr::Literal(Literal::Int(1))),
3273 )),
3274 sb(Expr::Ident("t".to_string())),
3275 ],
3276 )))),
3277 },
3278 ],
3279 }))),
3280 resolution: None,
3281 };
3282 ctx.items.push(TopLevel::FnDef(len_from.clone()));
3283 ctx.fn_defs.push(len_from);
3284
3285 let issues = proof_mode_issues(&ctx);
3286 assert!(
3287 issues.is_empty(),
3288 "expected non-first List structural recursion to be accepted, got: {:?}",
3289 issues
3290 );
3291
3292 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3293 let lean = generated_lean_file(&out);
3294 assert!(lean.contains("termination_by xs.length"));
3295 assert!(!lean.contains("partial def lenFrom"));
3296 }
3297
3298 #[test]
3299 fn proof_mode_accepts_single_string_pos_advance_recursion() {
3300 let mut ctx = empty_ctx();
3301 let skip_ws = FnDef {
3302 name: "skipWs".to_string(),
3303 line: 1,
3304 params: vec![
3305 ("s".to_string(), "String".to_string()),
3306 ("pos".to_string(), "Int".to_string()),
3307 ],
3308 return_type: "Int".to_string(),
3309 effects: vec![],
3310 desc: None,
3311 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3312 subject: sbb(Expr::FnCall(
3313 sbb(Expr::Attr(
3314 sbb(Expr::Ident("String".to_string())),
3315 "charAt".to_string(),
3316 )),
3317 vec![
3318 sb(Expr::Ident("s".to_string())),
3319 sb(Expr::Ident("pos".to_string())),
3320 ],
3321 )),
3322 arms: vec![
3323 MatchArm {
3324 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
3325 body: sbb(Expr::Ident("pos".to_string())),
3326 },
3327 MatchArm {
3328 pattern: Pattern::Wildcard,
3329 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3330 "skipWs".to_string(),
3331 vec![
3332 sb(Expr::Ident("s".to_string())),
3333 sb(Expr::BinOp(
3334 BinOp::Add,
3335 sbb(Expr::Ident("pos".to_string())),
3336 sbb(Expr::Literal(Literal::Int(1))),
3337 )),
3338 ],
3339 )))),
3340 },
3341 ],
3342 }))),
3343 resolution: None,
3344 };
3345 ctx.items.push(TopLevel::FnDef(skip_ws.clone()));
3346 ctx.fn_defs.push(skip_ws);
3347
3348 let issues = proof_mode_issues(&ctx);
3349 assert!(
3350 issues.is_empty(),
3351 "expected String+pos recursion to be accepted, got: {:?}",
3352 issues
3353 );
3354
3355 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3356 let lean = generated_lean_file(&out);
3357 assert!(lean.contains("def skipWs__fuel"));
3358 assert!(!lean.contains("partial def skipWs"));
3359 }
3360
3361 #[test]
3362 fn proof_mode_accepts_mutual_int_countdown_recursion() {
3363 let mut ctx = empty_ctx();
3364 let even = FnDef {
3365 name: "even".to_string(),
3366 line: 1,
3367 params: vec![("n".to_string(), "Int".to_string())],
3368 return_type: "Bool".to_string(),
3369 effects: vec![],
3370 desc: None,
3371 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3372 subject: sbb(Expr::Ident("n".to_string())),
3373 arms: vec![
3374 MatchArm {
3375 pattern: Pattern::Literal(Literal::Int(0)),
3376 body: sbb(Expr::Literal(Literal::Bool(true))),
3377 },
3378 MatchArm {
3379 pattern: Pattern::Wildcard,
3380 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3381 "odd".to_string(),
3382 vec![sb(Expr::BinOp(
3383 BinOp::Sub,
3384 sbb(Expr::Ident("n".to_string())),
3385 sbb(Expr::Literal(Literal::Int(1))),
3386 ))],
3387 )))),
3388 },
3389 ],
3390 }))),
3391 resolution: None,
3392 };
3393 let odd = FnDef {
3394 name: "odd".to_string(),
3395 line: 2,
3396 params: vec![("n".to_string(), "Int".to_string())],
3397 return_type: "Bool".to_string(),
3398 effects: vec![],
3399 desc: None,
3400 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3401 subject: sbb(Expr::Ident("n".to_string())),
3402 arms: vec![
3403 MatchArm {
3404 pattern: Pattern::Literal(Literal::Int(0)),
3405 body: sbb(Expr::Literal(Literal::Bool(false))),
3406 },
3407 MatchArm {
3408 pattern: Pattern::Wildcard,
3409 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3410 "even".to_string(),
3411 vec![sb(Expr::BinOp(
3412 BinOp::Sub,
3413 sbb(Expr::Ident("n".to_string())),
3414 sbb(Expr::Literal(Literal::Int(1))),
3415 ))],
3416 )))),
3417 },
3418 ],
3419 }))),
3420 resolution: None,
3421 };
3422 ctx.items.push(TopLevel::FnDef(even.clone()));
3423 ctx.items.push(TopLevel::FnDef(odd.clone()));
3424 ctx.fn_defs.push(even);
3425 ctx.fn_defs.push(odd);
3426
3427 let issues = proof_mode_issues(&ctx);
3428 assert!(
3429 issues.is_empty(),
3430 "expected mutual Int countdown recursion to be accepted, got: {:?}",
3431 issues
3432 );
3433
3434 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3435 let lean = generated_lean_file(&out);
3436 assert!(lean.contains("def even__fuel"));
3437 assert!(lean.contains("def odd__fuel"));
3438 assert!(lean.contains("def even (n : Int) : Bool :="));
3439 assert!(lean.contains("even__fuel ((Int.natAbs n) + 1) n"));
3440 }
3441
3442 #[test]
3443 fn proof_mode_accepts_mutual_string_pos_recursion_with_ranked_same_edges() {
3444 let mut ctx = empty_ctx();
3445 let f = FnDef {
3446 name: "f".to_string(),
3447 line: 1,
3448 params: vec![
3449 ("s".to_string(), "String".to_string()),
3450 ("pos".to_string(), "Int".to_string()),
3451 ],
3452 return_type: "Int".to_string(),
3453 effects: vec![],
3454 desc: None,
3455 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3456 subject: sbb(Expr::BinOp(
3457 BinOp::Gte,
3458 sbb(Expr::Ident("pos".to_string())),
3459 sbb(Expr::Literal(Literal::Int(3))),
3460 )),
3461 arms: vec![
3462 MatchArm {
3463 pattern: Pattern::Literal(Literal::Bool(true)),
3464 body: sbb(Expr::Ident("pos".to_string())),
3465 },
3466 MatchArm {
3467 pattern: Pattern::Wildcard,
3468 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3469 "g".to_string(),
3470 vec![
3471 sb(Expr::Ident("s".to_string())),
3472 sb(Expr::Ident("pos".to_string())),
3473 ],
3474 )))),
3475 },
3476 ],
3477 }))),
3478 resolution: None,
3479 };
3480 let g = FnDef {
3481 name: "g".to_string(),
3482 line: 2,
3483 params: vec![
3484 ("s".to_string(), "String".to_string()),
3485 ("pos".to_string(), "Int".to_string()),
3486 ],
3487 return_type: "Int".to_string(),
3488 effects: vec![],
3489 desc: None,
3490 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3491 subject: sbb(Expr::BinOp(
3492 BinOp::Gte,
3493 sbb(Expr::Ident("pos".to_string())),
3494 sbb(Expr::Literal(Literal::Int(3))),
3495 )),
3496 arms: vec![
3497 MatchArm {
3498 pattern: Pattern::Literal(Literal::Bool(true)),
3499 body: sbb(Expr::Ident("pos".to_string())),
3500 },
3501 MatchArm {
3502 pattern: Pattern::Wildcard,
3503 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3504 "f".to_string(),
3505 vec![
3506 sb(Expr::Ident("s".to_string())),
3507 sb(Expr::BinOp(
3508 BinOp::Add,
3509 sbb(Expr::Ident("pos".to_string())),
3510 sbb(Expr::Literal(Literal::Int(1))),
3511 )),
3512 ],
3513 )))),
3514 },
3515 ],
3516 }))),
3517 resolution: None,
3518 };
3519 ctx.items.push(TopLevel::FnDef(f.clone()));
3520 ctx.items.push(TopLevel::FnDef(g.clone()));
3521 ctx.fn_defs.push(f);
3522 ctx.fn_defs.push(g);
3523
3524 let issues = proof_mode_issues(&ctx);
3525 assert!(
3526 issues.is_empty(),
3527 "expected mutual String+pos recursion to be accepted, got: {:?}",
3528 issues
3529 );
3530
3531 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3532 let lean = generated_lean_file(&out);
3533 assert!(lean.contains("def f__fuel"));
3534 assert!(lean.contains("def g__fuel"));
3535 assert!(!lean.contains("partial def f"));
3536 }
3537
3538 #[test]
3539 fn proof_mode_accepts_mutual_ranked_sizeof_recursion() {
3540 let mut ctx = empty_ctx();
3541 let f = FnDef {
3542 name: "f".to_string(),
3543 line: 1,
3544 params: vec![("xs".to_string(), "List<Int>".to_string())],
3545 return_type: "Int".to_string(),
3546 effects: vec![],
3547 desc: None,
3548 body: Rc::new(FnBody::from_expr(sb(Expr::TailCall(Box::new(
3549 TailCallData::new(
3550 "g".to_string(),
3551 vec![
3552 sb(Expr::Literal(Literal::Str("acc".to_string()))),
3553 sb(Expr::Ident("xs".to_string())),
3554 ],
3555 ),
3556 ))))),
3557 resolution: None,
3558 };
3559 let g = FnDef {
3560 name: "g".to_string(),
3561 line: 2,
3562 params: vec![
3563 ("acc".to_string(), "String".to_string()),
3564 ("xs".to_string(), "List<Int>".to_string()),
3565 ],
3566 return_type: "Int".to_string(),
3567 effects: vec![],
3568 desc: None,
3569 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3570 subject: sbb(Expr::Ident("xs".to_string())),
3571 arms: vec![
3572 MatchArm {
3573 pattern: Pattern::EmptyList,
3574 body: sbb(Expr::Literal(Literal::Int(0))),
3575 },
3576 MatchArm {
3577 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3578 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3579 "f".to_string(),
3580 vec![sb(Expr::Ident("t".to_string()))],
3581 )))),
3582 },
3583 ],
3584 }))),
3585 resolution: None,
3586 };
3587 ctx.items.push(TopLevel::FnDef(f.clone()));
3588 ctx.items.push(TopLevel::FnDef(g.clone()));
3589 ctx.fn_defs.push(f);
3590 ctx.fn_defs.push(g);
3591
3592 let issues = proof_mode_issues(&ctx);
3593 assert!(
3594 issues.is_empty(),
3595 "expected mutual ranked-sizeOf recursion to be accepted, got: {:?}",
3596 issues
3597 );
3598
3599 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3600 let lean = generated_lean_file(&out);
3601 assert!(lean.contains("mutual"));
3602 assert!(lean.contains("def f__fuel"));
3603 assert!(lean.contains("def g__fuel"));
3604 assert!(!lean.contains("partial def f"));
3605 assert!(!lean.contains("partial def g"));
3606 }
3607
3608 #[test]
3609 fn proof_mode_rejects_recursive_pure_functions() {
3610 let mut ctx = empty_ctx();
3611 let recursive_fn = FnDef {
3612 name: "loop".to_string(),
3613 line: 1,
3614 params: vec![("n".to_string(), "Int".to_string())],
3615 return_type: "Int".to_string(),
3616 effects: vec![],
3617 desc: None,
3618 body: Rc::new(FnBody::from_expr(sb(Expr::FnCall(
3619 sbb(Expr::Ident("loop".to_string())),
3620 vec![sb(Expr::Ident("n".to_string()))],
3621 )))),
3622 resolution: None,
3623 };
3624 ctx.items.push(TopLevel::FnDef(recursive_fn.clone()));
3625 ctx.fn_defs.push(recursive_fn);
3626
3627 let issues = proof_mode_issues(&ctx);
3628 assert!(
3629 issues.iter().any(|i| i.contains("outside proof subset")),
3630 "expected recursive function blocker, got: {:?}",
3631 issues
3632 );
3633 }
3634
3635 #[test]
3636 fn proof_mode_allows_recursive_types() {
3637 let mut ctx = empty_ctx();
3638 let recursive_type = TypeDef::Sum {
3639 name: "Node".to_string(),
3640 variants: vec![TypeVariant {
3641 name: "Cons".to_string(),
3642 fields: vec!["Node".to_string()],
3643 }],
3644 line: 1,
3645 };
3646 ctx.items.push(TopLevel::TypeDef(recursive_type.clone()));
3647 ctx.type_defs.push(recursive_type);
3648
3649 let issues = proof_mode_issues(&ctx);
3650 assert!(
3651 issues
3652 .iter()
3653 .all(|i| !i.contains("recursive types require unsafe DecidableEq shim")),
3654 "did not expect recursive type blocker, got: {:?}",
3655 issues
3656 );
3657 }
3658
3659 #[test]
3660 fn law_auto_example_exports_real_proof_artifacts() {
3661 let ctx = ctx_from_source(
3662 include_str!("../../../examples/formal/law_auto.av"),
3663 "law_auto",
3664 );
3665 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3666 let lean = generated_lean_file(&out);
3667
3668 assert!(lean.contains("theorem add_law_commutative :"));
3669 assert!(lean.contains("theorem id'_law_reflexive : ∀ (x : Int), x = x := by"));
3670 assert!(lean.contains("theorem incCount_law_keyPresent :"));
3671 assert!(lean.contains("AverMap.has_set_self"));
3672 assert!(lean.contains("theorem add_law_commutative_sample_1 :"));
3673 assert!(lean.contains(":= by native_decide"));
3674 }
3675
3676 #[test]
3677 fn json_example_stays_inside_proof_subset() {
3678 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3679 let issues = proof_mode_issues(&ctx);
3680 assert!(
3681 issues.is_empty(),
3682 "expected json example to stay inside proof subset, got: {:?}",
3683 issues
3684 );
3685 }
3686
3687 #[test]
3688 fn json_example_uses_total_defs_and_domain_guarded_laws_in_proof_mode() {
3689 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
3690 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3691 let lean = generated_lean_file(&out);
3692
3693 assert!(!lean.contains("partial def"));
3694 assert!(lean.contains("def skipWs__fuel"));
3695 assert!(lean.contains("def parseValue__fuel"));
3696 assert!(lean.contains("def toString' (j : Json) : String :="));
3697 assert!(
3698 lean.contains(
3699 "def averMeasureJsonEntries_String (items : List (String × Json)) : Nat :="
3700 )
3701 );
3702 assert!(lean.contains(
3703 "| .jsonObject x0 => (averMeasureJsonEntries_String (AverMap.entries x0)) + 1"
3704 ));
3705 assert!(lean.contains("-- when jsonRoundtripSafe j"));
3706 assert!(!lean.contains("-- hint: verify law '"));
3707 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
3708 assert!(
3709 lean.contains(
3710 "theorem toString'_law_parseRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3711 )
3712 );
3713 assert!(lean.contains(
3714 "jsonRoundtripSafe j = true -> fromString (toString' j) = Except.ok j := by"
3715 ));
3716 assert!(
3717 lean.contains("theorem finishFloat_law_fromCanonicalFloat : ∀ (f : Float), f = 3.5 ∨")
3718 );
3719 assert!(lean.contains("theorem finishInt_law_fromCanonicalInt_checked_domain :"));
3720 assert!(lean.contains(
3721 "theorem toString'_law_parseValueRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
3722 ));
3723 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
3724 assert!(lean.contains(
3725 "example : fromString \"null\" = Except.ok Json.jsonNull := by native_decide"
3726 ));
3727 }
3728
3729 #[test]
3730 fn transpile_injects_builtin_network_types_and_vector_get_support() {
3731 let ctx = ctx_from_source(
3732 r#"
3733fn firstOrMissing(xs: Vector<String>) -> Result<String, String>
3734 Option.toResult(Vector.get(xs, 0), "missing")
3735
3736fn defaultHeader() -> Header
3737 Header(name = "Content-Type", value = "application/json")
3738
3739fn mkResponse(body: String) -> HttpResponse
3740 HttpResponse(status = 200, body = body, headers = [defaultHeader()])
3741
3742fn requestPath(req: HttpRequest) -> String
3743 req.path
3744
3745fn connPort(conn: Tcp.Connection) -> Int
3746 conn.port
3747"#,
3748 "network_helpers",
3749 );
3750 let out = transpile(&ctx);
3751 let lean = generated_lean_file(&out);
3752
3753 assert!(lean.contains("structure Header where"));
3754 assert!(lean.contains("structure HttpResponse where"));
3755 assert!(lean.contains("structure HttpRequest where"));
3756 assert!(lean.contains("structure Tcp_Connection where"));
3757 assert!(lean.contains("port : Int"));
3758 }
3759
3760 #[test]
3761 fn law_auto_example_has_no_sorry_in_proof_mode() {
3762 let ctx = ctx_from_source(
3763 include_str!("../../../examples/formal/law_auto.av"),
3764 "law_auto",
3765 );
3766 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3767 let lean = generated_lean_file(&out);
3768 assert!(
3769 !lean.contains("sorry"),
3770 "expected law_auto proof export to avoid sorry, got:\n{}",
3771 lean
3772 );
3773 }
3774
3775 #[test]
3776 fn map_example_has_no_sorry_in_proof_mode() {
3777 let ctx = ctx_from_source(include_str!("../../../examples/data/map.av"), "map");
3778 let issues = proof_mode_issues(&ctx);
3779 assert!(
3780 issues.is_empty(),
3781 "expected map example to stay inside proof subset, got: {:?}",
3782 issues
3783 );
3784
3785 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3786 let lean = generated_lean_file(&out);
3787 assert!(lean.contains("theorem incCount_law_trackedCountStepsByOne :"));
3789 assert!(lean.contains("sorry"));
3790 assert!(lean.contains("theorem countWords_law_presenceMatchesContains_sample_1 :"));
3792 assert!(lean.contains("theorem countWords_law_trackedWordCount_sample_1 :"));
3793 assert!(lean.contains("AverMap.has_set_self"));
3794 assert!(lean.contains("AverMap.get_set_self"));
3795 }
3796
3797 #[test]
3798 fn spec_laws_example_has_no_sorry_in_proof_mode() {
3799 let ctx = ctx_from_source(
3800 include_str!("../../../examples/formal/spec_laws.av"),
3801 "spec_laws",
3802 );
3803 let issues = proof_mode_issues(&ctx);
3804 assert!(
3805 issues.is_empty(),
3806 "expected spec_laws example to stay inside proof subset, got: {:?}",
3807 issues
3808 );
3809
3810 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3811 let lean = generated_lean_file(&out);
3812 assert!(
3813 !lean.contains("sorry"),
3814 "expected spec_laws proof export to avoid sorry, got:\n{}",
3815 lean
3816 );
3817 assert!(lean.contains("theorem absVal_eq_absValSpec :"));
3818 assert!(lean.contains("theorem clampNonNegative_eq_clampNonNegativeSpec :"));
3819 }
3820
3821 #[test]
3822 fn rle_example_exports_sampled_roundtrip_laws_without_sorry() {
3823 let ctx = ctx_from_source(include_str!("../../../examples/data/rle.av"), "rle");
3824 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3825 let lean = generated_lean_file(&out);
3826
3827 assert!(
3828 lean.contains("sorry"),
3829 "expected rle proof export to contain sorry for unproved universal theorems"
3830 );
3831 assert!(lean.contains(
3832 "theorem encode_law_roundtrip_sample_1 : decode (encode []) = [] := by native_decide"
3833 ));
3834 assert!(lean.contains(
3835 "theorem encodeString_law_string_roundtrip_sample_1 : decodeString (encodeString \"\") = \"\" := by native_decide"
3836 ));
3837 }
3838
3839 #[test]
3840 fn fibonacci_example_uses_fuelized_int_countdown_in_proof_mode() {
3841 let ctx = ctx_from_source(
3842 include_str!("../../../examples/data/fibonacci.av"),
3843 "fibonacci",
3844 );
3845 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3846 let lean = generated_lean_file(&out);
3847
3848 assert!(lean.contains("def fibTR__fuel"));
3849 assert!(lean.contains("def fibTR (n : Int) (a : Int) (b : Int) : Int :="));
3850 assert!(lean.contains("fibTR__fuel ((Int.natAbs n) + 1) n a b"));
3851 assert!(!lean.contains("partial def fibTR"));
3852 }
3853
3854 #[test]
3855 fn fibonacci_example_stays_inside_proof_subset() {
3856 let ctx = ctx_from_source(
3857 include_str!("../../../examples/data/fibonacci.av"),
3858 "fibonacci",
3859 );
3860 let issues = proof_mode_issues(&ctx);
3861 assert!(
3862 issues.is_empty(),
3863 "expected fibonacci example to stay inside proof subset, got: {:?}",
3864 issues
3865 );
3866 }
3867
3868 #[test]
3869 fn fibonacci_example_matches_general_linear_recurrence_shapes() {
3870 let ctx = ctx_from_source(
3871 include_str!("../../../examples/data/fibonacci.av"),
3872 "fibonacci",
3873 );
3874 let fib = ctx.fn_defs.iter().find(|fd| fd.name == "fib").unwrap();
3875 let fib_tr = ctx.fn_defs.iter().find(|fd| fd.name == "fibTR").unwrap();
3876 let fib_spec = ctx.fn_defs.iter().find(|fd| fd.name == "fibSpec").unwrap();
3877
3878 assert!(recurrence::detect_tailrec_int_linear_pair_wrapper(fib).is_some());
3879 assert!(recurrence::detect_tailrec_int_linear_pair_worker(fib_tr).is_some());
3880 assert!(recurrence::detect_second_order_int_linear_recurrence(fib_spec).is_some());
3881 }
3882
3883 #[test]
3884 fn fibonacci_example_auto_proves_general_linear_recurrence_spec_law() {
3885 let ctx = ctx_from_source(
3886 include_str!("../../../examples/data/fibonacci.av"),
3887 "fibonacci",
3888 );
3889 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3890 let lean = generated_lean_file(&out);
3891
3892 assert!(lean.contains("private def fibSpec__nat : Nat -> Int"));
3893 assert!(!lean.contains("partial def fibSpec"));
3894 assert!(lean.contains("private theorem fib_eq_fibSpec__worker_nat_shift"));
3895 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_nat"));
3896 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_seed"));
3897 assert!(lean.contains("theorem fib_eq_fibSpec : ∀ (n : Int), fib n = fibSpec n := by"));
3898 assert!(!lean.contains(
3899 "-- universal theorem fib_eq_fibSpec omitted: sampled law shape is not auto-proved yet"
3900 ));
3901 }
3902
3903 #[test]
3904 fn pell_like_example_auto_proves_same_general_shape() {
3905 let ctx = ctx_from_source(
3906 r#"
3907module Pell
3908 intent =
3909 "linear recurrence probe"
3910
3911fn pellTR(n: Int, a: Int, b: Int) -> Int
3912 match n
3913 0 -> a
3914 _ -> pellTR(n - 1, b, a + 2 * b)
3915
3916fn pell(n: Int) -> Int
3917 match n < 0
3918 true -> 0
3919 false -> pellTR(n, 0, 1)
3920
3921fn pellSpec(n: Int) -> Int
3922 match n < 0
3923 true -> 0
3924 false -> match n
3925 0 -> 0
3926 1 -> 1
3927 _ -> pellSpec(n - 2) + 2 * pellSpec(n - 1)
3928
3929verify pell law pellSpec
3930 given n: Int = [0, 1, 2, 3]
3931 pell(n) => pellSpec(n)
3932"#,
3933 "pell",
3934 );
3935 let issues = proof_mode_issues(&ctx);
3936 assert!(
3937 issues.is_empty(),
3938 "expected pell example to stay inside proof subset, got: {:?}",
3939 issues
3940 );
3941
3942 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3943 let lean = generated_lean_file(&out);
3944 assert!(lean.contains("private def pellSpec__nat : Nat -> Int"));
3945 assert!(lean.contains("private theorem pell_eq_pellSpec__worker_nat_shift"));
3946 assert!(lean.contains("theorem pell_eq_pellSpec : ∀ (n : Int), pell n = pellSpec n := by"));
3947 assert!(!lean.contains(
3948 "-- universal theorem pell_eq_pellSpec omitted: sampled law shape is not auto-proved yet"
3949 ));
3950 }
3951
3952 #[test]
3953 fn nonlinear_pair_state_recurrence_is_not_auto_proved_as_linear_shape() {
3954 let ctx = ctx_from_source(
3955 r#"
3956module WeirdRec
3957 intent =
3958 "reject nonlinear pair-state recurrence from linear recurrence prover"
3959
3960fn weirdTR(n: Int, a: Int, b: Int) -> Int
3961 match n
3962 0 -> a
3963 _ -> weirdTR(n - 1, b, a * b)
3964
3965fn weird(n: Int) -> Int
3966 match n < 0
3967 true -> 0
3968 false -> weirdTR(n, 0, 1)
3969
3970fn weirdSpec(n: Int) -> Int
3971 match n < 0
3972 true -> 0
3973 false -> match n
3974 0 -> 0
3975 1 -> 1
3976 _ -> weirdSpec(n - 1) * weirdSpec(n - 2)
3977
3978verify weird law weirdSpec
3979 given n: Int = [0, 1, 2, 3]
3980 weird(n) => weirdSpec(n)
3981"#,
3982 "weirdrec",
3983 );
3984 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3985 let lean = generated_lean_file(&out);
3986
3987 assert!(lean.contains("sorry"));
3989 assert!(!lean.contains("private theorem weird_eq_weirdSpec__worker_nat_shift"));
3990 assert!(lean.contains("theorem weird_eq_weirdSpec_sample_1 :"));
3991 }
3992
3993 #[test]
3994 fn date_example_stays_inside_proof_subset() {
3995 let ctx = ctx_from_source(include_str!("../../../examples/data/date.av"), "date");
3996 let issues = proof_mode_issues(&ctx);
3997 assert!(
3998 issues.is_empty(),
3999 "expected date example to stay inside proof subset, got: {:?}",
4000 issues
4001 );
4002
4003 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4004 let lean = generated_lean_file(&out);
4005 assert!(!lean.contains("partial def"));
4006 assert!(lean.contains("def parseIntSlice (s : String) (from' : Int) (to : Int) : Int :="));
4007 }
4008
4009 #[test]
4010 fn temperature_example_stays_inside_proof_subset() {
4011 let ctx = ctx_from_source(
4012 include_str!("../../../examples/core/temperature.av"),
4013 "temperature",
4014 );
4015 let issues = proof_mode_issues(&ctx);
4016 assert!(
4017 issues.is_empty(),
4018 "expected temperature example to stay inside proof subset, got: {:?}",
4019 issues
4020 );
4021
4022 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4023 let lean = generated_lean_file(&out);
4024 assert!(!lean.contains("partial def"));
4025 assert!(
4026 lean.contains("example : celsiusToFahr 0.0 = 32.0 := by native_decide"),
4027 "expected verify examples to survive proof export, got:\n{}",
4028 lean
4029 );
4030 }
4031
4032 #[test]
4033 fn quicksort_example_stays_inside_proof_subset() {
4034 let ctx = ctx_from_source(
4035 include_str!("../../../examples/data/quicksort.av"),
4036 "quicksort",
4037 );
4038 let issues = proof_mode_issues(&ctx);
4039 assert!(
4040 issues.is_empty(),
4041 "expected quicksort example to stay inside proof subset, got: {:?}",
4042 issues
4043 );
4044
4045 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4046 let lean = generated_lean_file(&out);
4047 assert!(lean.contains("def isOrderedFrom"));
4048 assert!(!lean.contains("partial def isOrderedFrom"));
4049 assert!(lean.contains("termination_by xs.length"));
4050 }
4051
4052 #[test]
4053 fn grok_s_language_example_uses_total_ranked_sizeof_mutual_recursion() {
4054 let ctx = ctx_from_source(
4055 include_str!("../../../examples/core/grok_s_language.av"),
4056 "grok_s_language",
4057 );
4058 let issues = proof_mode_issues(&ctx);
4059 assert!(
4060 issues.is_empty(),
4061 "expected grok_s_language example to stay inside proof subset, got: {:?}",
4062 issues
4063 );
4064
4065 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4066 let lean = generated_lean_file(&out);
4067 assert!(lean.contains("mutual"));
4068 assert!(lean.contains("def eval__fuel"));
4069 assert!(lean.contains("def parseListItems__fuel"));
4070 assert!(!lean.contains("partial def eval"));
4071 assert!(!lean.contains("termination_by (sizeOf e,"));
4072 assert!(lean.contains("-- when validSymbolNames e"));
4073 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
4074 assert!(lean.contains(
4075 "theorem toString'_law_parseRoundtrip : ∀ (e : Sexpr), e = Sexpr.atomNum 42 ∨"
4076 ));
4077 assert!(
4078 lean.contains("validSymbolNames e = true -> parse (toString' e) = Except.ok e := by")
4079 );
4080 assert!(lean.contains("theorem toString'_law_parseSexprRoundtrip :"));
4081 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
4082 }
4083
4084 #[test]
4085 fn lambda_example_keeps_only_eval_outside_proof_subset() {
4086 let ctx = ctx_from_source(include_str!("../../../examples/core/lambda.av"), "lambda");
4087 let issues = proof_mode_issues(&ctx);
4088 assert_eq!(
4089 issues,
4090 vec!["recursive function 'eval' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)".to_string()]
4091 );
4092
4093 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4094 let lean = generated_lean_file(&out);
4095 assert!(lean.contains("def termToString__fuel"));
4096 assert!(lean.contains("def subst__fuel"));
4097 assert!(lean.contains("def countS__fuel"));
4098 assert!(!lean.contains("partial def termToString"));
4099 assert!(!lean.contains("partial def subst"));
4100 assert!(!lean.contains("partial def countS"));
4101 assert!(lean.contains("partial def eval"));
4102 }
4103
4104 #[test]
4105 fn mission_control_example_stays_inside_proof_subset() {
4106 let ctx = ctx_from_source(
4107 include_str!("../../../examples/apps/mission_control.av"),
4108 "mission_control",
4109 );
4110 let issues = proof_mode_issues(&ctx);
4111 assert!(
4112 issues.is_empty(),
4113 "expected mission_control example to stay inside proof subset, got: {:?}",
4114 issues
4115 );
4116
4117 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4118 let lean = generated_lean_file(&out);
4119 assert!(!lean.contains("partial def normalizeAngle"));
4120 assert!(lean.contains("def normalizeAngle__fuel"));
4121 }
4122
4123 #[test]
4124 fn notepad_store_example_stays_inside_proof_subset() {
4125 let ctx = ctx_from_source(
4126 include_str!("../../../examples/apps/notepad/store.av"),
4127 "notepad_store",
4128 );
4129 let issues = proof_mode_issues(&ctx);
4130 assert!(
4131 issues.is_empty(),
4132 "expected notepad/store example to stay inside proof subset, got: {:?}",
4133 issues
4134 );
4135
4136 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4137 let lean = generated_lean_file(&out);
4138 assert!(lean.contains("def deserializeLine (line : String) : Except String Note :="));
4139 assert!(lean.contains("Except String (List Note)"));
4140 assert!(!lean.contains("partial def deserializeLine"));
4141 assert!(lean.contains("-- when noteRoundtripSafe note"));
4142 assert!(lean.contains("-- when notesRoundtripSafe notes"));
4143 assert!(lean.contains(
4144 "theorem serializeLine_law_lineRoundtrip : ∀ (note : Note), note = { id' := 1, title := \"Hello\", body := \"World\" : Note } ∨"
4145 ));
4146 assert!(lean.contains(
4147 "theorem serializeLines_law_notesRoundtrip : ∀ (notes : List Note), notes = [] ∨"
4148 ));
4149 assert!(lean.contains("notesRoundtripSafe notes = true ->"));
4150 assert!(lean.contains("parseNotes (s!\"{String.intercalate \"\\n\" (serializeLines notes)}\\n\") = Except.ok notes"));
4151 assert!(lean.contains("theorem serializeLine_law_lineRoundtrip_sample_1 :"));
4152 assert!(lean.contains("theorem serializeLines_law_notesRoundtrip_sample_1 :"));
4153 }
4154}