1mod builtins;
7mod expr;
8mod law_auto;
9mod pattern;
10mod recurrence;
11mod shared;
12mod toplevel;
13mod types;
14
15use std::collections::{HashMap, HashSet};
16
17use crate::ast::{
18 BinOp, Expr, FnBody, FnDef, MatchArm, Pattern, Stmt, TopLevel, TypeDef, VerifyKind,
19};
20use crate::call_graph;
21use crate::codegen::{CodegenContext, ProjectOutput};
22
23#[derive(Clone, Copy, Debug, Eq, PartialEq)]
25pub enum VerifyEmitMode {
26 NativeDecide,
28 Sorry,
30 TheoremSkeleton,
34}
35
36#[derive(Clone, Debug, Eq, PartialEq)]
38pub enum RecursionPlan {
39 IntCountdown { param_index: usize },
41 IntAscending {
44 param_index: usize,
45 bound_lean: String,
46 },
47 LinearRecurrence2,
51 ListStructural { param_index: usize },
53 SizeOfStructural,
55 StringPosAdvance,
58 MutualIntCountdown,
60 MutualStringPosAdvance { rank: usize },
63 MutualSizeOfRanked { rank: usize },
66}
67
68#[derive(Clone, Debug, Eq, PartialEq)]
69pub struct ProofModeIssue {
70 pub line: usize,
71 pub message: String,
72}
73
74const LEAN_PRELUDE_HEADER: &str = r#"-- Generated by the Aver → Lean 4 transpiler
75-- Pure core logic only (effectful functions are omitted)
76
77set_option linter.unusedVariables false
78
79-- Prelude: helper definitions for Aver builtins"#;
80
81const LEAN_PRELUDE_FLOAT_COE: &str = r#"instance : Coe Int Float := ⟨fun n => Float.ofInt n⟩
82
83def Float.fromInt (n : Int) : Float := Float.ofInt n"#;
84
85const LEAN_PRELUDE_FLOAT_DEC_EQ: &str = r#"private unsafe def Float.unsafeDecEq (a b : Float) : Decidable (a = b) :=
86 if a == b then isTrue (unsafeCast ()) else isFalse (unsafeCast ())
87@[implemented_by Float.unsafeDecEq]
88private opaque Float.compDecEq (a b : Float) : Decidable (a = b)
89instance : DecidableEq Float := Float.compDecEq"#;
90
91const LEAN_PRELUDE_EXCEPT_DEC_EQ: &str = r#"instance [DecidableEq ε] [DecidableEq α] : DecidableEq (Except ε α)
92 | .ok a, .ok b =>
93 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
94 | .error a, .error b =>
95 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
96 | .ok _, .error _ => isFalse (by intro h; cases h)
97 | .error _, .ok _ => isFalse (by intro h; cases h)"#;
98
99const LEAN_PRELUDE_EXCEPT_NS: &str = r#"namespace Except
100def withDefault (r : Except ε α) (d : α) : α :=
101 match r with
102 | .ok v => v
103 | .error _ => d
104end Except"#;
105
106const LEAN_PRELUDE_OPTION_TO_EXCEPT: &str = r#"def Option.toExcept (o : Option α) (e : ε) : Except ε α :=
107 match o with
108 | some v => .ok v
109 | none => .error e"#;
110
111const LEAN_PRELUDE_STRING_HADD: &str = r#"instance : HAdd String String String := ⟨String.append⟩"#;
112
113const LEAN_PRELUDE_PROOF_FUEL: &str = r#"def averStringPosFuel (s : String) (pos : Int) (rankBudget : Nat) : Nat :=
114 (((s.data.length) - pos.toNat) + 1) * rankBudget"#;
115
116const LEAN_PRELUDE_AVER_MEASURE: &str = r#"namespace AverMeasure
117def list (elemMeasure : α → Nat) : List α → Nat
118 | [] => 1
119 | x :: xs => elemMeasure x + list elemMeasure xs + 1
120def option (elemMeasure : α → Nat) : Option α → Nat
121 | none => 1
122 | some x => elemMeasure x + 1
123def except (errMeasure : ε → Nat) (okMeasure : α → Nat) : Except ε α → Nat
124 | .error e => errMeasure e + 1
125 | .ok v => okMeasure v + 1
126end AverMeasure"#;
127
128const AVER_MAP_PRELUDE_BASE: &str = r#"namespace AverMap
129def empty : List (α × β) := []
130def get [DecidableEq α] (m : List (α × β)) (k : α) : Option β :=
131 match m with
132 | [] => none
133 | (k', v) :: rest => if k = k' then some v else AverMap.get rest k
134def set [DecidableEq α] (m : List (α × β)) (k : α) (v : β) : List (α × β) :=
135 let rec go : List (α × β) → List (α × β)
136 | [] => [(k, v)]
137 | (k', v') :: rest => if k = k' then (k, v) :: rest else (k', v') :: go rest
138 go m
139def has [DecidableEq α] (m : List (α × β)) (k : α) : Bool :=
140 m.any (fun p => decide (k = p.1))
141def remove [DecidableEq α] (m : List (α × β)) (k : α) : List (α × β) :=
142 m.filter (fun p => !(decide (k = p.1)))
143def keys (m : List (α × β)) : List α := m.map Prod.fst
144def values (m : List (α × β)) : List β := m.map Prod.snd
145def entries (m : List (α × β)) : List (α × β) := m
146def len (m : List (α × β)) : Nat := m.length
147def fromList (entries : List (α × β)) : List (α × β) := entries"#;
148
149const AVER_MAP_PRELUDE_HAS_SET_SELF: &str = r#"private theorem any_set_go_self [DecidableEq α] (k : α) (v : β) :
150 ∀ (m : List (α × β)), List.any (AverMap.set.go k v m) (fun p => decide (k = p.1)) = true := by
151 intro m
152 induction m with
153 | nil =>
154 simp [AverMap.set.go, List.any]
155 | cons p tl ih =>
156 cases p with
157 | mk k' v' =>
158 by_cases h : k = k'
159 · simp [AverMap.set.go, List.any, h]
160 · simp [AverMap.set.go, List.any, h, ih]
161
162theorem has_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
163 AverMap.has (AverMap.set m k v) k = true := by
164 simpa [AverMap.has, AverMap.set] using any_set_go_self k v m"#;
165
166const AVER_MAP_PRELUDE_GET_SET_SELF: &str = r#"private theorem get_set_go_self [DecidableEq α] (k : α) (v : β) :
167 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) k = some v := by
168 intro m
169 induction m with
170 | nil =>
171 simp [AverMap.set.go, AverMap.get]
172 | cons p tl ih =>
173 cases p with
174 | mk k' v' =>
175 by_cases h : k = k'
176 · simp [AverMap.set.go, AverMap.get, h]
177 · simp [AverMap.set.go, AverMap.get, h, ih]
178
179theorem get_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
180 AverMap.get (AverMap.set m k v) k = some v := by
181 simpa [AverMap.set] using get_set_go_self k v m"#;
182
183const AVER_MAP_PRELUDE_GET_SET_OTHER: &str = r#"private theorem get_set_go_other [DecidableEq α] (k key : α) (v : β) (h : key ≠ k) :
184 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) key = AverMap.get m key := by
185 intro m
186 induction m with
187 | nil =>
188 simp [AverMap.set.go, AverMap.get, h]
189 | cons p tl ih =>
190 cases p with
191 | mk k' v' =>
192 by_cases hk : k = k'
193 · have hkey : key ≠ k' := by simpa [hk] using h
194 simp [AverMap.set.go, AverMap.get, hk, hkey]
195 · by_cases hkey : key = k'
196 · simp [AverMap.set.go, AverMap.get, hk, hkey]
197 · simp [AverMap.set.go, AverMap.get, hk, hkey, ih]
198
199theorem get_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
200 AverMap.get (AverMap.set m k v) key = AverMap.get m key := by
201 simpa [AverMap.set] using get_set_go_other k key v h m"#;
202
203const AVER_MAP_PRELUDE_HAS_SET_OTHER: &str = r#"theorem has_eq_isSome_get [DecidableEq α] (m : List (α × β)) (k : α) :
204 AverMap.has m k = (AverMap.get m k).isSome := by
205 induction m with
206 | nil =>
207 simp [AverMap.has, AverMap.get]
208 | cons p tl ih =>
209 cases p with
210 | mk k' v' =>
211 by_cases h : k = k'
212 · simp [AverMap.has, AverMap.get, List.any, h]
213 · simpa [AverMap.has, AverMap.get, List.any, h] using ih
214
215theorem has_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
216 AverMap.has (AverMap.set m k v) key = AverMap.has m key := by
217 rw [AverMap.has_eq_isSome_get, AverMap.has_eq_isSome_get]
218 simp [AverMap.get_set_other, h]"#;
219
220const AVER_MAP_PRELUDE_END: &str = r#"end AverMap"#;
221
222const LEAN_PRELUDE_AVER_LIST: &str = r#"namespace AverList
223def get (xs : List α) (i : Int) : Option α :=
224 if i < 0 then none else xs.get? i.toNat
225private def insertSorted [Ord α] (x : α) : List α → List α
226 | [] => [x]
227 | y :: ys =>
228 if compare x y == Ordering.lt || compare x y == Ordering.eq then
229 x :: y :: ys
230 else
231 y :: insertSorted x ys
232def sort [Ord α] (xs : List α) : List α :=
233 xs.foldl (fun acc x => insertSorted x acc) []
234end AverList"#;
235
236const LEAN_PRELUDE_HEADER_TYPE: &str = r#"structure Header where
237 name : String
238 value : String
239 deriving Repr, BEq, Inhabited, DecidableEq"#;
240
241const LEAN_PRELUDE_HTTP_RESPONSE_TYPE: &str = r#"structure HttpResponse where
242 status : Int
243 body : String
244 headers : List Header
245 deriving Repr, BEq, Inhabited, DecidableEq"#;
246
247const LEAN_PRELUDE_HTTP_REQUEST_TYPE: &str = r#"structure HttpRequest where
248 method : String
249 path : String
250 body : String
251 headers : List Header
252 deriving Repr, BEq, Inhabited, DecidableEq"#;
253
254const LEAN_PRELUDE_TCP_CONNECTION_TYPE: &str = r#"structure Tcp_Connection where
255 id : String
256 host : String
257 port : Int
258 deriving Repr, BEq, Inhabited, DecidableEq"#;
259
260const LEAN_PRELUDE_STRING_HELPERS: &str = r#"def String.charAt (s : String) (i : Int) : Option String :=
261 if i < 0 then none
262 else (s.toList.get? i.toNat).map Char.toString
263theorem String.charAt_length_none (s : String) : String.charAt s s.length = none := by
264 have hs : ¬ ((s.length : Int) < 0) := by omega
265 unfold String.charAt
266 simp [hs]
267 change s.data.length ≤ s.length
268 exact Nat.le_refl _
269def String.slice (s : String) (start stop : Int) : String :=
270 let startN := if start < 0 then 0 else start.toNat
271 let stopN := if stop < 0 then 0 else stop.toNat
272 let chars := s.toList
273 String.mk ((chars.drop startN).take (stopN - startN))
274private def trimFloatTrailingZerosChars (chars : List Char) : List Char :=
275 let noZeros := (chars.reverse.dropWhile (fun c => c == '0')).reverse
276 match noZeros.reverse with
277 | '.' :: rest => rest.reverse
278 | _ => noZeros
279private def normalizeFloatString (s : String) : String :=
280 if s.toList.any (fun c => c == '.') then
281 String.mk (trimFloatTrailingZerosChars s.toList)
282 else s
283def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)
284def String.chars (s : String) : List String := s.toList.map Char.toString
285private theorem char_to_string_append_mk (c : Char) (chars : List Char) :
286 Char.toString c ++ String.mk chars = String.mk (c :: chars) := by
287 rfl
288private theorem string_intercalate_empty_char_strings_go (acc : String) (chars : List Char) :
289 String.intercalate.go acc "" (chars.map Char.toString) = acc ++ String.mk chars := by
290 induction chars generalizing acc with
291 | nil =>
292 simp [String.intercalate.go]
293 | cons c rest ih =>
294 calc
295 String.intercalate.go acc "" ((c :: rest).map Char.toString)
296 = String.intercalate.go (acc ++ Char.toString c) "" (rest.map Char.toString) := by
297 simp [String.intercalate.go]
298 _ = (acc ++ Char.toString c) ++ String.mk rest := by
299 simpa using ih (acc ++ Char.toString c)
300 _ = acc ++ String.mk (c :: rest) := by
301 simp [String.append_assoc, char_to_string_append_mk]
302private theorem string_intercalate_empty_char_strings (chars : List Char) :
303 String.intercalate "" (chars.map Char.toString) = String.mk chars := by
304 cases chars with
305 | nil =>
306 simp [String.intercalate]
307 | cons c rest =>
308 simpa [String.intercalate] using string_intercalate_empty_char_strings_go c.toString rest
309theorem String.intercalate_empty_chars (s : String) :
310 String.intercalate "" (String.chars s) = s := by
311 cases s with
312 | mk chars =>
313 simpa [String.chars] using string_intercalate_empty_char_strings chars
314namespace AverString
315def splitOnCharGo (currentRev : List Char) (sep : Char) : List Char → List String
316 | [] => [String.mk currentRev.reverse]
317 | c :: cs =>
318 if c == sep then
319 String.mk currentRev.reverse :: splitOnCharGo [] sep cs
320 else
321 splitOnCharGo (c :: currentRev) sep cs
322def splitOnChar (s : String) (sep : Char) : List String :=
323 splitOnCharGo [] sep s.toList
324def split (s delim : String) : List String :=
325 match delim.toList with
326 | [] => "" :: (s.toList.map Char.toString) ++ [""]
327 | [c] => splitOnChar s c
328 | _ => s.splitOn delim
329@[simp] private theorem char_toString_data (c : Char) : c.toString.data = [c] := by
330 rfl
331private theorem splitOnCharGo_until_sep
332 (prefixRev part tail : List Char) (sep : Char) :
333 part.all (fun c => c != sep) = true ->
334 splitOnCharGo prefixRev sep (part ++ sep :: tail) =
335 String.mk (prefixRev.reverse ++ part) :: splitOnCharGo [] sep tail := by
336 intro h_safe
337 induction part generalizing prefixRev with
338 | nil =>
339 simp [splitOnCharGo]
340 | cons c rest ih =>
341 simp at h_safe
342 have h_rest : (rest.all fun c => c != sep) = true := by
343 simpa using h_safe.2
344 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
345 (ih (c :: prefixRev) h_rest)
346private theorem splitOnCharGo_no_sep
347 (prefixRev chars : List Char) (sep : Char) :
348 chars.all (fun c => c != sep) = true ->
349 splitOnCharGo prefixRev sep chars =
350 [String.mk (prefixRev.reverse ++ chars)] := by
351 intro h_safe
352 induction chars generalizing prefixRev with
353 | nil =>
354 simp [splitOnCharGo]
355 | cons c rest ih =>
356 simp at h_safe
357 have h_rest : (rest.all fun c => c != sep) = true := by
358 simpa using h_safe.2
359 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
360 (ih (c :: prefixRev) h_rest)
361@[simp] theorem split_single_char_append
362 (head tail : String) (sep : Char) :
363 head.toList.all (fun c => c != sep) = true ->
364 split (head ++ Char.toString sep ++ tail) (Char.toString sep) =
365 head :: split tail (Char.toString sep) := by
366 intro h_safe
367 simpa [split, splitOnChar] using
368 (splitOnCharGo_until_sep [] head.data tail.data sep h_safe)
369@[simp] theorem split_single_char_no_sep
370 (s : String) (sep : Char) :
371 s.toList.all (fun c => c != sep) = true ->
372 split s (Char.toString sep) = [s] := by
373 intro h_safe
374 simpa [split, splitOnChar] using
375 (splitOnCharGo_no_sep [] s.data sep h_safe)
376private theorem intercalate_go_prefix
377 (pref acc sep : String) (rest : List String) :
378 String.intercalate.go (pref ++ sep ++ acc) sep rest =
379 pref ++ sep ++ String.intercalate.go acc sep rest := by
380 induction rest generalizing acc with
381 | nil =>
382 simp [String.intercalate.go, String.append_assoc]
383 | cons x xs ih =>
384 simpa [String.intercalate.go, String.append_assoc] using
385 (ih (acc ++ sep ++ x))
386@[simp] theorem split_intercalate_trailing_single_char
387 (parts : List String) (sep : Char) :
388 parts.all (fun part => part.toList.all (fun c => c != sep)) = true ->
389 split (String.intercalate (Char.toString sep) parts ++ Char.toString sep) (Char.toString sep) =
390 match parts with
391 | [] => ["", ""]
392 | _ => parts ++ [""] := by
393 intro h_safe
394 induction parts with
395 | nil =>
396 simp [split, splitOnChar, splitOnCharGo]
397 | cons part rest ih =>
398 simp at h_safe
399 have h_part : (part.toList.all fun c => c != sep) = true := by
400 simpa using h_safe.1
401 cases rest with
402 | nil =>
403 have h_empty : ("".toList.all fun c => c != sep) = true := by simp
404 calc
405 split (String.intercalate.go part (Char.toString sep) [] ++ Char.toString sep) (Char.toString sep)
406 = split (part ++ Char.toString sep) (Char.toString sep) := by
407 simp [String.intercalate.go]
408 _ = part :: split "" (Char.toString sep) := by
409 simpa using split_single_char_append part "" sep h_part
410 _ = [part, ""] := by
411 simp [split_single_char_no_sep, h_empty]
412 | cons next rest' =>
413 have h_rest : ((next :: rest').all fun part => part.toList.all fun c => c != sep) = true := by
414 simpa using h_safe.2
415 calc
416 split
417 (String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep)
418 (Char.toString sep)
419 =
420 split
421 (part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep))
422 (Char.toString sep) := by
423 have h_join :
424 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
425 = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
426 calc
427 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
428 = String.intercalate.go (part ++ Char.toString sep ++ next) (Char.toString sep) rest' ++ Char.toString sep := by
429 simp [String.intercalate.go]
430 _ = part ++ Char.toString sep ++ (String.intercalate.go next (Char.toString sep) rest' ++ Char.toString sep) := by
431 rw [intercalate_go_prefix part next (Char.toString sep) rest']
432 simp [String.append_assoc]
433 _ = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
434 simp [String.intercalate, String.intercalate.go]
435 simpa using congrArg (fun s => split s (Char.toString sep)) h_join
436 _ = part :: split
437 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
438 (Char.toString sep) := by
439 simpa using split_single_char_append
440 part
441 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
442 sep
443 h_part
444 _ = part :: (next :: rest' ++ [""]) := by
445 simpa using ih h_rest
446end AverString"#;
447
448const LEAN_PRELUDE_NUMERIC_PARSE: &str = r#"namespace AverDigits
449def foldDigitsAcc (acc : Nat) : List Nat -> Nat
450 | [] => acc
451 | d :: ds => foldDigitsAcc (acc * 10 + d) ds
452
453def foldDigits (digits : List Nat) : Nat :=
454 foldDigitsAcc 0 digits
455
456private theorem foldDigitsAcc_append_singleton (acc : Nat) (xs : List Nat) (d : Nat) :
457 foldDigitsAcc acc (xs ++ [d]) = foldDigitsAcc acc xs * 10 + d := by
458 induction xs generalizing acc with
459 | nil =>
460 simp [foldDigitsAcc]
461 | cons x xs ih =>
462 simp [foldDigitsAcc, ih, Nat.left_distrib, Nat.add_assoc, Nat.add_left_comm]
463
464private theorem foldDigits_append_singleton (xs : List Nat) (d : Nat) :
465 foldDigits (xs ++ [d]) = foldDigits xs * 10 + d := by
466 simpa [foldDigits] using foldDigitsAcc_append_singleton 0 xs d
467
468def natDigits : Nat -> List Nat
469 | n =>
470 if n < 10 then
471 [n]
472 else
473 natDigits (n / 10) ++ [n % 10]
474termination_by
475 n => n
476
477theorem natDigits_nonempty (n : Nat) : natDigits n ≠ [] := by
478 by_cases h : n < 10
479 · rw [natDigits.eq_1]
480 simp [h]
481 · rw [natDigits.eq_1]
482 simp [h]
483
484theorem natDigits_digits_lt_ten : ∀ n : Nat, ∀ d ∈ natDigits n, d < 10 := by
485 intro n d hd
486 by_cases h : n < 10
487 · rw [natDigits.eq_1] at hd
488 simp [h] at hd
489 rcases hd with rfl
490 exact h
491 · rw [natDigits.eq_1] at hd
492 simp [h] at hd
493 rcases hd with hd | hd
494 · exact natDigits_digits_lt_ten (n / 10) d hd
495 · subst hd
496 exact Nat.mod_lt n (by omega)
497
498theorem foldDigits_natDigits : ∀ n : Nat, foldDigits (natDigits n) = n := by
499 intro n
500 by_cases h : n < 10
501 · rw [natDigits.eq_1]
502 simp [h, foldDigits, foldDigitsAcc]
503 · rw [natDigits.eq_1]
504 simp [h]
505 rw [foldDigits_append_singleton]
506 rw [foldDigits_natDigits (n / 10)]
507 omega
508
509def digitChar : Nat -> Char
510 | 0 => '0' | 1 => '1' | 2 => '2' | 3 => '3' | 4 => '4'
511 | 5 => '5' | 6 => '6' | 7 => '7' | 8 => '8' | 9 => '9'
512 | _ => '0'
513
514def charToDigit? : Char -> Option Nat
515 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3 | '4' => some 4
516 | '5' => some 5 | '6' => some 6 | '7' => some 7 | '8' => some 8 | '9' => some 9
517 | _ => none
518
519theorem charToDigit_digitChar : ∀ d : Nat, d < 10 -> charToDigit? (digitChar d) = some d
520 | 0, _ => by simp [digitChar, charToDigit?]
521 | 1, _ => by simp [digitChar, charToDigit?]
522 | 2, _ => by simp [digitChar, charToDigit?]
523 | 3, _ => by simp [digitChar, charToDigit?]
524 | 4, _ => by simp [digitChar, charToDigit?]
525 | 5, _ => by simp [digitChar, charToDigit?]
526 | 6, _ => by simp [digitChar, charToDigit?]
527 | 7, _ => by simp [digitChar, charToDigit?]
528 | 8, _ => by simp [digitChar, charToDigit?]
529 | 9, _ => by simp [digitChar, charToDigit?]
530 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
531 omega
532
533theorem digitChar_ne_minus : ∀ d : Nat, d < 10 -> digitChar d ≠ '-'
534 | 0, _ => by decide
535 | 1, _ => by decide
536 | 2, _ => by decide
537 | 3, _ => by decide
538 | 4, _ => by decide
539 | 5, _ => by decide
540 | 6, _ => by decide
541 | 7, _ => by decide
542 | 8, _ => by decide
543 | 9, _ => by decide
544 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
545 omega
546
547theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->
548 Char.toString (digitChar d) ≠ " " ∧
549 Char.toString (digitChar d) ≠ "\t" ∧
550 Char.toString (digitChar d) ≠ "\n" ∧
551 Char.toString (digitChar d) ≠ "\r"
552 | 0, _ => by decide
553 | 1, _ => by decide
554 | 2, _ => by decide
555 | 3, _ => by decide
556 | 4, _ => by decide
557 | 5, _ => by decide
558 | 6, _ => by decide
559 | 7, _ => by decide
560 | 8, _ => by decide
561 | 9, _ => by decide
562 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
563 omega
564
565theorem mapM_charToDigit_digits : ∀ ds : List Nat,
566 (∀ d ∈ ds, d < 10) -> List.mapM charToDigit? (ds.map digitChar) = some ds := by
567 intro ds hds
568 induction ds with
569 | nil =>
570 simp
571 | cons d ds ih =>
572 have hd : d < 10 := hds d (by simp)
573 have htail : ∀ x ∈ ds, x < 10 := by
574 intro x hx
575 exact hds x (by simp [hx])
576 simp [charToDigit_digitChar d hd, ih htail]
577
578def natDigitsChars (n : Nat) : List Char :=
579 (natDigits n).map digitChar
580
581def parseNatChars (chars : List Char) : Option Nat :=
582 match chars with
583 | [] => none
584 | _ => do
585 let digits <- List.mapM charToDigit? chars
586 pure (foldDigits digits)
587
588theorem parseNatChars_nat (n : Nat) :
589 parseNatChars (natDigitsChars n) = some n := by
590 unfold parseNatChars natDigitsChars
591 cases h : (natDigits n).map digitChar with
592 | nil =>
593 exfalso
594 exact natDigits_nonempty n (List.map_eq_nil_iff.mp h)
595 | cons hd tl =>
596 have hdigits : List.mapM charToDigit? (List.map digitChar (natDigits n)) = some (natDigits n) :=
597 mapM_charToDigit_digits (natDigits n) (fun d hd => natDigits_digits_lt_ten n d hd)
598 rw [h] at hdigits
599 simp [h, hdigits, foldDigits_natDigits]
600end AverDigits
601
602def String.fromInt (n : Int) : String :=
603 match n with
604 | .ofNat m => String.mk (AverDigits.natDigitsChars m)
605 | .negSucc m => String.mk ('-' :: AverDigits.natDigitsChars (m + 1))
606
607def Int.fromString (s : String) : Except String Int :=
608 match s.toList with
609 | [] => .error ("Cannot parse '" ++ s ++ "' as Int")
610 | '-' :: rest =>
611 match AverDigits.parseNatChars rest with
612 | some n => .ok (-Int.ofNat n)
613 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
614 | chars =>
615 match AverDigits.parseNatChars chars with
616 | some n => .ok (Int.ofNat n)
617 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
618
619theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n
620 | .ofNat m => by
621 cases h : AverDigits.natDigits m with
622 | nil =>
623 exfalso
624 exact AverDigits.natDigits_nonempty m h
625 | cons d ds =>
626 have hd : d < 10 := AverDigits.natDigits_digits_lt_ten m d (by simp [h])
627 have hne : AverDigits.digitChar d ≠ '-' := AverDigits.digitChar_ne_minus d hd
628 have hparse : AverDigits.parseNatChars (AverDigits.digitChar d :: List.map AverDigits.digitChar ds) = some m := by
629 simpa [AverDigits.natDigitsChars, h] using AverDigits.parseNatChars_nat m
630 simp [String.fromInt, Int.fromString, AverDigits.natDigitsChars, h, hne, hparse]
631 | .negSucc m => by
632 simp [String.fromInt, Int.fromString, AverDigits.parseNatChars_nat]
633 rfl
634
635private def charDigitsToNat (cs : List Char) : Nat :=
636 cs.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
637
638private def parseExpPart : List Char → (Bool × List Char)
639 | '-' :: rest => (true, rest.takeWhile Char.isDigit)
640 | '+' :: rest => (false, rest.takeWhile Char.isDigit)
641 | rest => (false, rest.takeWhile Char.isDigit)
642
643def Float.fromString (s : String) : Except String Float :=
644 let chars := s.toList
645 let (neg, chars) := match chars with
646 | '-' :: rest => (true, rest)
647 | _ => (false, chars)
648 let intPart := chars.takeWhile Char.isDigit
649 let rest := chars.dropWhile Char.isDigit
650 let (fracPart, rest) := match rest with
651 | '.' :: rest => (rest.takeWhile Char.isDigit, rest.dropWhile Char.isDigit)
652 | _ => ([], rest)
653 let (expNeg, expDigits) := match rest with
654 | 'e' :: rest => parseExpPart rest
655 | 'E' :: rest => parseExpPart rest
656 | _ => (false, [])
657 if intPart.isEmpty && fracPart.isEmpty then .error ("Invalid float: " ++ s)
658 else
659 let mantissa := charDigitsToNat (intPart ++ fracPart)
660 let fracLen : Int := fracPart.length
661 let expVal : Int := charDigitsToNat expDigits
662 let shift : Int := (if expNeg then -expVal else expVal) - fracLen
663 let f := if shift >= 0 then Float.ofScientific mantissa false shift.toNat
664 else Float.ofScientific mantissa true ((-shift).toNat)
665 .ok (if neg then -f else f)"#;
666
667const LEAN_PRELUDE_CHAR_BYTE: &str = r#"def Char.toCode (s : String) : Int :=
668 match s.toList.head? with
669 | some c => (c.toNat : Int)
670 | none => panic! "Char.toCode: string is empty"
671def Char.fromCode (n : Int) : Option String :=
672 if n < 0 || n > 1114111 then none
673 else if n >= 55296 && n <= 57343 then none
674 else some (Char.toString (Char.ofNat n.toNat))
675
676def hexDigit (n : Int) : String :=
677 match n with
678 | 0 => "0" | 1 => "1" | 2 => "2" | 3 => "3"
679 | 4 => "4" | 5 => "5" | 6 => "6" | 7 => "7"
680 | 8 => "8" | 9 => "9" | 10 => "a" | 11 => "b"
681 | 12 => "c" | 13 => "d" | 14 => "e" | 15 => "f"
682 | _ => "?"
683
684def byteToHex (code : Int) : String :=
685 hexDigit (code / 16) ++ hexDigit (code % 16)
686
687namespace AverByte
688private def hexValue (c : Char) : Option Int :=
689 match c with
690 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3
691 | '4' => some 4 | '5' => some 5 | '6' => some 6 | '7' => some 7
692 | '8' => some 8 | '9' => some 9 | 'a' => some 10 | 'b' => some 11
693 | 'c' => some 12 | 'd' => some 13 | 'e' => some 14 | 'f' => some 15
694 | 'A' => some 10 | 'B' => some 11 | 'C' => some 12 | 'D' => some 13
695 | 'E' => some 14 | 'F' => some 15
696 | _ => none
697def toHex (n : Int) : Except String String :=
698 if n < 0 || n > 255 then
699 .error ("Byte.toHex: " ++ toString n ++ " is out of range 0-255")
700 else
701 .ok (byteToHex n)
702def fromHex (s : String) : Except String Int :=
703 match s.toList with
704 | [hi, lo] =>
705 match hexValue hi, hexValue lo with
706 | some h, some l => .ok (h * 16 + l)
707 | _, _ => .error ("Byte.fromHex: invalid hex '" ++ s ++ "'")
708 | _ => .error ("Byte.fromHex: expected exactly 2 hex chars, got '" ++ s ++ "'")
709end AverByte"#;
710
711fn pure_fns(ctx: &CodegenContext) -> Vec<&FnDef> {
712 ctx.modules
713 .iter()
714 .flat_map(|m| m.fn_defs.iter())
715 .chain(ctx.fn_defs.iter())
716 .filter(|fd| toplevel::is_pure_fn(fd))
717 .collect()
718}
719
720fn recursive_type_names(ctx: &CodegenContext) -> HashSet<String> {
721 ctx.modules
722 .iter()
723 .flat_map(|m| m.type_defs.iter())
724 .chain(ctx.type_defs.iter())
725 .filter(|td| toplevel::is_recursive_type_def(td))
726 .map(|td| toplevel::type_def_name(td).to_string())
727 .collect()
728}
729
730fn recursive_pure_fn_names(ctx: &CodegenContext) -> HashSet<String> {
731 let pure_names: HashSet<String> = pure_fns(ctx)
732 .into_iter()
733 .map(|fd| fd.name.clone())
734 .collect();
735 let mut callgraph_items = ctx.items.clone();
736 for module in &ctx.modules {
737 for fd in &module.fn_defs {
738 callgraph_items.push(TopLevel::FnDef(fd.clone()));
739 }
740 }
741 call_graph::find_recursive_fns(&callgraph_items)
742 .into_iter()
743 .filter(|name| pure_names.contains(name))
744 .collect()
745}
746
747fn verify_counter_key(vb: &crate::ast::VerifyBlock) -> String {
748 match &vb.kind {
749 VerifyKind::Cases => format!("fn:{}", vb.fn_name),
750 VerifyKind::Law(law) => format!("law:{}::{}", vb.fn_name, law.name),
751 }
752}
753
754fn lean_project_name(ctx: &CodegenContext) -> String {
755 ctx.items
756 .iter()
757 .find_map(|item| match item {
758 TopLevel::Module(m) => Some(m.name.clone()),
759 _ => None,
760 })
761 .unwrap_or_else(|| capitalize_first(&ctx.project_name))
762}
763
764fn expr_to_dotted_name(expr: &Expr) -> Option<String> {
765 match expr {
766 Expr::Ident(name) => Some(name.clone()),
767 Expr::Attr(obj, field) => expr_to_dotted_name(obj).map(|p| format!("{}.{}", p, field)),
768 _ => None,
769 }
770}
771
772fn call_matches(name: &str, target: &str) -> bool {
773 name == target || name.rsplit('.').next() == Some(target)
774}
775
776fn call_is_in_set(name: &str, targets: &HashSet<String>) -> bool {
777 call_matches_any(name, targets)
778}
779
780fn canonical_callee_name(name: &str, targets: &HashSet<String>) -> Option<String> {
781 if targets.contains(name) {
782 return Some(name.to_string());
783 }
784 name.rsplit('.')
785 .next()
786 .filter(|last| targets.contains(*last))
787 .map(ToString::to_string)
788}
789
790fn call_matches_any(name: &str, targets: &HashSet<String>) -> bool {
791 if targets.contains(name) {
792 return true;
793 }
794 match name.rsplit('.').next() {
795 Some(last) => targets.contains(last),
796 None => false,
797 }
798}
799
800fn is_int_minus_positive(expr: &Expr, param_name: &str) -> bool {
801 match expr {
802 Expr::BinOp(BinOp::Sub, left, right) => {
803 matches!(left.as_ref(), Expr::Ident(id) if id == param_name)
804 && matches!(right.as_ref(), Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
805 }
806 Expr::FnCall(callee, args) => {
807 let Some(name) = expr_to_dotted_name(callee) else {
808 return false;
809 };
810 (name == "Int.sub" || name == "int.sub")
811 && args.len() == 2
812 && matches!(&args[0], Expr::Ident(id) if id == param_name)
813 && matches!(&args[1], Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
814 }
815 _ => false,
816 }
817}
818
819fn collect_calls_from_expr<'a>(expr: &'a Expr, out: &mut Vec<(String, Vec<&'a Expr>)>) {
820 match expr {
821 Expr::FnCall(callee, args) => {
822 if let Some(name) = expr_to_dotted_name(callee) {
823 out.push((name, args.iter().collect()));
824 }
825 collect_calls_from_expr(callee, out);
826 for arg in args {
827 collect_calls_from_expr(arg, out);
828 }
829 }
830 Expr::TailCall(boxed) => {
831 let (name, args) = boxed.as_ref();
832 out.push((name.clone(), args.iter().collect()));
833 for arg in args {
834 collect_calls_from_expr(arg, out);
835 }
836 }
837 Expr::Attr(obj, _) => collect_calls_from_expr(obj, out),
838 Expr::BinOp(_, left, right) => {
839 collect_calls_from_expr(left, out);
840 collect_calls_from_expr(right, out);
841 }
842 Expr::Match { subject, arms, .. } => {
843 collect_calls_from_expr(subject, out);
844 for arm in arms {
845 collect_calls_from_expr(&arm.body, out);
846 }
847 }
848 Expr::Constructor(_, inner) => {
849 if let Some(inner) = inner {
850 collect_calls_from_expr(inner, out);
851 }
852 }
853 Expr::ErrorProp(inner) => collect_calls_from_expr(inner, out),
854 Expr::InterpolatedStr(parts) => {
855 for p in parts {
856 if let crate::ast::StrPart::Parsed(e) = p {
857 collect_calls_from_expr(e, out);
858 }
859 }
860 }
861 Expr::List(items) | Expr::Tuple(items) => {
862 for item in items {
863 collect_calls_from_expr(item, out);
864 }
865 }
866 Expr::MapLiteral(entries) => {
867 for (k, v) in entries {
868 collect_calls_from_expr(k, out);
869 collect_calls_from_expr(v, out);
870 }
871 }
872 Expr::RecordCreate { fields, .. } => {
873 for (_, v) in fields {
874 collect_calls_from_expr(v, out);
875 }
876 }
877 Expr::RecordUpdate { base, updates, .. } => {
878 collect_calls_from_expr(base, out);
879 for (_, v) in updates {
880 collect_calls_from_expr(v, out);
881 }
882 }
883 Expr::Literal(_) | Expr::Ident(_) | Expr::Resolved(_) => {}
884 }
885}
886
887fn collect_calls_from_body(body: &FnBody) -> Vec<(String, Vec<&Expr>)> {
888 let mut out = Vec::new();
889 for stmt in body.stmts() {
890 match stmt {
891 Stmt::Binding(_, _, expr) | Stmt::Expr(expr) => collect_calls_from_expr(expr, &mut out),
892 }
893 }
894 out
895}
896
897fn collect_list_tail_binders_from_expr(
898 expr: &Expr,
899 list_param_name: &str,
900 tails: &mut HashSet<String>,
901) {
902 match expr {
903 Expr::Match { subject, arms, .. } => {
904 if matches!(subject.as_ref(), Expr::Ident(id) if id == list_param_name) {
905 for MatchArm { pattern, .. } in arms {
906 if let Pattern::Cons(_, tail) = pattern {
907 tails.insert(tail.clone());
908 }
909 }
910 }
911 for arm in arms {
912 collect_list_tail_binders_from_expr(&arm.body, list_param_name, tails);
913 }
914 collect_list_tail_binders_from_expr(subject, list_param_name, tails);
915 }
916 Expr::FnCall(callee, args) => {
917 collect_list_tail_binders_from_expr(callee, list_param_name, tails);
918 for arg in args {
919 collect_list_tail_binders_from_expr(arg, list_param_name, tails);
920 }
921 }
922 Expr::TailCall(boxed) => {
923 let (_, args) = boxed.as_ref();
924 for arg in args {
925 collect_list_tail_binders_from_expr(arg, list_param_name, tails);
926 }
927 }
928 Expr::Attr(obj, _) => collect_list_tail_binders_from_expr(obj, list_param_name, tails),
929 Expr::BinOp(_, left, right) => {
930 collect_list_tail_binders_from_expr(left, list_param_name, tails);
931 collect_list_tail_binders_from_expr(right, list_param_name, tails);
932 }
933 Expr::Constructor(_, inner) => {
934 if let Some(inner) = inner {
935 collect_list_tail_binders_from_expr(inner, list_param_name, tails);
936 }
937 }
938 Expr::ErrorProp(inner) => {
939 collect_list_tail_binders_from_expr(inner, list_param_name, tails)
940 }
941 Expr::InterpolatedStr(parts) => {
942 for p in parts {
943 if let crate::ast::StrPart::Parsed(e) = p {
944 collect_list_tail_binders_from_expr(e, list_param_name, tails);
945 }
946 }
947 }
948 Expr::List(items) | Expr::Tuple(items) => {
949 for item in items {
950 collect_list_tail_binders_from_expr(item, list_param_name, tails);
951 }
952 }
953 Expr::MapLiteral(entries) => {
954 for (k, v) in entries {
955 collect_list_tail_binders_from_expr(k, list_param_name, tails);
956 collect_list_tail_binders_from_expr(v, list_param_name, tails);
957 }
958 }
959 Expr::RecordCreate { fields, .. } => {
960 for (_, v) in fields {
961 collect_list_tail_binders_from_expr(v, list_param_name, tails);
962 }
963 }
964 Expr::RecordUpdate { base, updates, .. } => {
965 collect_list_tail_binders_from_expr(base, list_param_name, tails);
966 for (_, v) in updates {
967 collect_list_tail_binders_from_expr(v, list_param_name, tails);
968 }
969 }
970 Expr::Literal(_) | Expr::Ident(_) | Expr::Resolved(_) => {}
971 }
972}
973
974fn collect_list_tail_binders(fd: &FnDef, list_param_name: &str) -> HashSet<String> {
975 let mut tails = HashSet::new();
976 for stmt in fd.body.stmts() {
977 match stmt {
978 Stmt::Binding(_, _, expr) | Stmt::Expr(expr) => {
979 collect_list_tail_binders_from_expr(expr, list_param_name, &mut tails)
980 }
981 }
982 }
983 tails
984}
985
986fn find_type_def<'a>(ctx: &'a CodegenContext, type_name: &str) -> Option<&'a TypeDef> {
987 ctx.modules
988 .iter()
989 .flat_map(|m| m.type_defs.iter())
990 .chain(ctx.type_defs.iter())
991 .find(|td| toplevel::type_def_name(td) == type_name)
992}
993
994fn recursive_constructor_binders(
995 td: &TypeDef,
996 variant_name: &str,
997 binders: &[String],
998) -> Vec<String> {
999 let variant_short = variant_name.rsplit('.').next().unwrap_or(variant_name);
1000 match td {
1001 TypeDef::Sum { name, variants, .. } => variants
1002 .iter()
1003 .find(|variant| variant.name == variant_short)
1004 .map(|variant| {
1005 variant
1006 .fields
1007 .iter()
1008 .zip(binders.iter())
1009 .filter_map(|(field_ty, binder)| {
1010 (field_ty.trim() == name).then_some(binder.clone())
1011 })
1012 .collect()
1013 })
1014 .unwrap_or_default(),
1015 TypeDef::Product { .. } => Vec::new(),
1016 }
1017}
1018
1019fn grow_recursive_subterm_binders_from_expr(
1020 expr: &Expr,
1021 tracked: &HashSet<String>,
1022 td: &TypeDef,
1023 out: &mut HashSet<String>,
1024) {
1025 match expr {
1026 Expr::Match { subject, arms, .. } => {
1027 if let Expr::Ident(subject_name) = subject.as_ref()
1028 && tracked.contains(subject_name)
1029 {
1030 for arm in arms {
1031 if let Pattern::Constructor(variant_name, binders) = &arm.pattern {
1032 out.extend(recursive_constructor_binders(td, variant_name, binders));
1033 }
1034 }
1035 }
1036 grow_recursive_subterm_binders_from_expr(subject, tracked, td, out);
1037 for arm in arms {
1038 grow_recursive_subterm_binders_from_expr(&arm.body, tracked, td, out);
1039 }
1040 }
1041 Expr::FnCall(callee, args) => {
1042 grow_recursive_subterm_binders_from_expr(callee, tracked, td, out);
1043 for arg in args {
1044 grow_recursive_subterm_binders_from_expr(arg, tracked, td, out);
1045 }
1046 }
1047 Expr::Attr(obj, _) => grow_recursive_subterm_binders_from_expr(obj, tracked, td, out),
1048 Expr::BinOp(_, left, right) => {
1049 grow_recursive_subterm_binders_from_expr(left, tracked, td, out);
1050 grow_recursive_subterm_binders_from_expr(right, tracked, td, out);
1051 }
1052 Expr::Constructor(_, Some(inner)) | Expr::ErrorProp(inner) => {
1053 grow_recursive_subterm_binders_from_expr(inner, tracked, td, out)
1054 }
1055 Expr::InterpolatedStr(parts) => {
1056 for part in parts {
1057 if let crate::ast::StrPart::Parsed(inner) = part {
1058 grow_recursive_subterm_binders_from_expr(inner, tracked, td, out);
1059 }
1060 }
1061 }
1062 Expr::List(items) | Expr::Tuple(items) => {
1063 for item in items {
1064 grow_recursive_subterm_binders_from_expr(item, tracked, td, out);
1065 }
1066 }
1067 Expr::MapLiteral(entries) => {
1068 for (k, v) in entries {
1069 grow_recursive_subterm_binders_from_expr(k, tracked, td, out);
1070 grow_recursive_subterm_binders_from_expr(v, tracked, td, out);
1071 }
1072 }
1073 Expr::RecordCreate { fields, .. } => {
1074 for (_, v) in fields {
1075 grow_recursive_subterm_binders_from_expr(v, tracked, td, out);
1076 }
1077 }
1078 Expr::RecordUpdate { base, updates, .. } => {
1079 grow_recursive_subterm_binders_from_expr(base, tracked, td, out);
1080 for (_, v) in updates {
1081 grow_recursive_subterm_binders_from_expr(v, tracked, td, out);
1082 }
1083 }
1084 Expr::TailCall(boxed) => {
1085 for arg in &boxed.1 {
1086 grow_recursive_subterm_binders_from_expr(arg, tracked, td, out);
1087 }
1088 }
1089 Expr::Literal(_) | Expr::Ident(_) | Expr::Resolved(_) | Expr::Constructor(_, None) => {}
1090 }
1091}
1092
1093fn collect_recursive_subterm_binders(
1094 fd: &FnDef,
1095 param_name: &str,
1096 param_type: &str,
1097 ctx: &CodegenContext,
1098) -> HashSet<String> {
1099 let Some(td) = find_type_def(ctx, param_type) else {
1100 return HashSet::new();
1101 };
1102 let mut tracked: HashSet<String> = HashSet::from([param_name.to_string()]);
1103 loop {
1104 let mut discovered = HashSet::new();
1105 for stmt in fd.body.stmts() {
1106 match stmt {
1107 Stmt::Binding(_, _, expr) | Stmt::Expr(expr) => {
1108 grow_recursive_subterm_binders_from_expr(expr, &tracked, td, &mut discovered);
1109 }
1110 }
1111 }
1112 let before = tracked.len();
1113 tracked.extend(discovered);
1114 if tracked.len() == before {
1115 break;
1116 }
1117 }
1118 tracked.remove(param_name);
1119 tracked
1120}
1121
1122fn single_int_countdown_param_index(fd: &FnDef) -> Option<usize> {
1123 let recursive_calls: Vec<Vec<&Expr>> = collect_calls_from_body(fd.body.as_ref())
1124 .into_iter()
1125 .filter(|(name, _)| call_matches(name, &fd.name))
1126 .map(|(_, args)| args)
1127 .collect();
1128 if recursive_calls.is_empty() {
1129 return None;
1130 }
1131
1132 fd.params
1133 .iter()
1134 .enumerate()
1135 .find_map(|(idx, (param_name, param_ty))| {
1136 if param_ty != "Int" {
1137 return None;
1138 }
1139 let countdown_ok = recursive_calls.iter().all(|args| {
1140 args.get(idx)
1141 .cloned()
1142 .is_some_and(|arg| is_int_minus_positive(arg, param_name))
1143 });
1144 if countdown_ok {
1145 return Some(idx);
1146 }
1147
1148 let ascent_ok = recursive_calls.iter().all(|args| {
1151 args.get(idx)
1152 .copied()
1153 .is_some_and(|arg| is_int_plus_positive(arg, param_name))
1154 });
1155 (ascent_ok && has_negative_guarded_ascent(fd, param_name)).then_some(idx)
1156 })
1157}
1158
1159fn has_negative_guarded_ascent(fd: &FnDef, param_name: &str) -> bool {
1160 let Some(Expr::Match { subject, arms, .. }) = fd.body.tail_expr() else {
1161 return false;
1162 };
1163 let Expr::BinOp(BinOp::Lt, left, right) = subject.as_ref() else {
1164 return false;
1165 };
1166 if !is_ident(left, param_name)
1167 || !matches!(right.as_ref(), Expr::Literal(crate::ast::Literal::Int(0)))
1168 {
1169 return false;
1170 }
1171
1172 let mut true_arm = None;
1173 let mut false_arm = None;
1174 for arm in arms {
1175 match arm.pattern {
1176 Pattern::Literal(crate::ast::Literal::Bool(true)) => true_arm = Some(arm.body.as_ref()),
1177 Pattern::Literal(crate::ast::Literal::Bool(false)) => {
1178 false_arm = Some(arm.body.as_ref())
1179 }
1180 _ => return false,
1181 }
1182 }
1183
1184 let Some(true_arm) = true_arm else {
1185 return false;
1186 };
1187 let Some(false_arm) = false_arm else {
1188 return false;
1189 };
1190
1191 let mut true_calls = Vec::new();
1192 collect_calls_from_expr(true_arm, &mut true_calls);
1193 let mut false_calls = Vec::new();
1194 collect_calls_from_expr(false_arm, &mut false_calls);
1195
1196 true_calls
1197 .iter()
1198 .any(|(name, _)| call_matches(name, &fd.name))
1199 && false_calls
1200 .iter()
1201 .all(|(name, _)| !call_matches(name, &fd.name))
1202}
1203
1204fn single_int_ascending_param(fd: &FnDef) -> Option<(usize, String)> {
1207 let recursive_calls: Vec<Vec<&Expr>> = collect_calls_from_body(fd.body.as_ref())
1208 .into_iter()
1209 .filter(|(name, _)| call_matches(name, &fd.name))
1210 .map(|(_, args)| args)
1211 .collect();
1212 if recursive_calls.is_empty() {
1213 return None;
1214 }
1215
1216 for (idx, (param_name, param_ty)) in fd.params.iter().enumerate() {
1217 if param_ty != "Int" {
1218 continue;
1219 }
1220 let ascent_ok = recursive_calls.iter().all(|args| {
1221 args.get(idx)
1222 .cloned()
1223 .is_some_and(|arg| is_int_plus_positive(arg, param_name))
1224 });
1225 if !ascent_ok {
1226 continue;
1227 }
1228 if let Some(bound_lean) = extract_equality_bound_lean(fd, param_name) {
1229 return Some((idx, bound_lean));
1230 }
1231 }
1232 None
1233}
1234
1235fn extract_equality_bound_lean(fd: &FnDef, param_name: &str) -> Option<String> {
1237 let Expr::Match { subject, arms, .. } = fd.body.tail_expr()? else {
1238 return None;
1239 };
1240 let Expr::BinOp(BinOp::Eq, left, right) = subject.as_ref() else {
1241 return None;
1242 };
1243 if !is_ident(left, param_name) {
1244 return None;
1245 }
1246 let mut true_has_self = false;
1248 let mut false_has_self = false;
1249 for arm in arms {
1250 match arm.pattern {
1251 Pattern::Literal(crate::ast::Literal::Bool(true)) => {
1252 let mut calls = Vec::new();
1253 collect_calls_from_expr(&arm.body, &mut calls);
1254 true_has_self = calls.iter().any(|(n, _)| call_matches(n, &fd.name));
1255 }
1256 Pattern::Literal(crate::ast::Literal::Bool(false)) => {
1257 let mut calls = Vec::new();
1258 collect_calls_from_expr(&arm.body, &mut calls);
1259 false_has_self = calls.iter().any(|(n, _)| call_matches(n, &fd.name));
1260 }
1261 _ => return None,
1262 }
1263 }
1264 if true_has_self || !false_has_self {
1265 return None;
1266 }
1267 Some(bound_expr_to_lean(right))
1269}
1270
1271fn bound_expr_to_lean(expr: &Expr) -> String {
1272 match expr {
1273 Expr::Literal(crate::ast::Literal::Int(n)) => format!("{}", n),
1274 Expr::Ident(name) => expr::aver_name_to_lean(name),
1275 Expr::FnCall(f, args) => {
1276 if let Some(dotted) = crate::codegen::common::expr_to_dotted_name(f) {
1277 if dotted == "List.len" && args.len() == 1 {
1279 return format!("{}.length", bound_expr_to_lean(&args[0]));
1280 }
1281 let lean_args: Vec<String> = args.iter().map(bound_expr_to_lean).collect();
1282 format!(
1283 "({} {})",
1284 expr::aver_name_to_lean(&dotted),
1285 lean_args.join(" ")
1286 )
1287 } else {
1288 "0".to_string()
1289 }
1290 }
1291 Expr::Attr(obj, field) => format!(
1292 "{}.{}",
1293 bound_expr_to_lean(obj),
1294 expr::aver_name_to_lean(field)
1295 ),
1296 _ => "0".to_string(),
1297 }
1298}
1299
1300fn supports_single_sizeof_structural(fd: &FnDef, ctx: &CodegenContext) -> bool {
1301 let recursive_calls: Vec<Vec<&Expr>> = collect_calls_from_body(fd.body.as_ref())
1302 .into_iter()
1303 .filter(|(name, _)| call_matches(name, &fd.name))
1304 .map(|(_, args)| args)
1305 .collect();
1306 if recursive_calls.is_empty() {
1307 return false;
1308 }
1309
1310 let metric_indices = sizeof_measure_param_indices(fd);
1311 if metric_indices.is_empty() {
1312 return false;
1313 }
1314
1315 let binder_sets: HashMap<usize, HashSet<String>> = metric_indices
1316 .iter()
1317 .filter_map(|idx| {
1318 let (param_name, param_type) = fd.params.get(*idx)?;
1319 recursive_type_names(ctx).contains(param_type).then(|| {
1320 (
1321 *idx,
1322 collect_recursive_subterm_binders(fd, param_name, param_type, ctx),
1323 )
1324 })
1325 })
1326 .collect();
1327
1328 if binder_sets.values().all(HashSet::is_empty) {
1329 return false;
1330 }
1331
1332 recursive_calls.iter().all(|args| {
1333 let mut strictly_smaller = false;
1334 for idx in &metric_indices {
1335 let Some((param_name, _)) = fd.params.get(*idx) else {
1336 return false;
1337 };
1338 let Some(arg) = args.get(*idx).cloned() else {
1339 return false;
1340 };
1341 if is_ident(arg, param_name) {
1342 continue;
1343 }
1344 let Some(binders) = binder_sets.get(idx) else {
1345 return false;
1346 };
1347 if matches!(arg, Expr::Ident(id) if binders.contains(id)) {
1348 strictly_smaller = true;
1349 continue;
1350 }
1351 return false;
1352 }
1353 strictly_smaller
1354 })
1355}
1356
1357fn single_list_structural_param_index(fd: &FnDef) -> Option<usize> {
1358 fd.params
1359 .iter()
1360 .enumerate()
1361 .find_map(|(param_index, (param_name, param_ty))| {
1362 if !(param_ty.starts_with("List<") || param_ty == "List") {
1363 return None;
1364 }
1365
1366 let tails = collect_list_tail_binders(fd, param_name);
1367 if tails.is_empty() {
1368 return None;
1369 }
1370
1371 let recursive_calls: Vec<Option<&Expr>> = collect_calls_from_body(fd.body.as_ref())
1372 .into_iter()
1373 .filter(|(name, _)| call_matches(name, &fd.name))
1374 .map(|(_, args)| args.get(param_index).cloned())
1375 .collect();
1376 if recursive_calls.is_empty() {
1377 return None;
1378 }
1379
1380 recursive_calls
1381 .into_iter()
1382 .all(|arg| {
1383 matches!(
1384 arg,
1385 Some(Expr::Ident(id)) if tails.contains(id)
1386 )
1387 })
1388 .then_some(param_index)
1389 })
1390}
1391
1392fn is_ident(expr: &Expr, name: &str) -> bool {
1393 matches!(expr, Expr::Ident(id) if id == name)
1394}
1395
1396fn is_int_plus_positive(expr: &Expr, param_name: &str) -> bool {
1397 match expr {
1398 Expr::BinOp(BinOp::Add, left, right) => {
1399 matches!(left.as_ref(), Expr::Ident(id) if id == param_name)
1400 && matches!(right.as_ref(), Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
1401 }
1402 Expr::FnCall(callee, args) => {
1403 let Some(name) = expr_to_dotted_name(callee) else {
1404 return false;
1405 };
1406 (name == "Int.add" || name == "int.add")
1407 && args.len() == 2
1408 && matches!(&args[0], Expr::Ident(id) if id == param_name)
1409 && matches!(&args[1], Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
1410 }
1411 _ => false,
1412 }
1413}
1414
1415fn is_skip_ws_advance(expr: &Expr, string_param: &str, pos_param: &str) -> bool {
1416 let Expr::FnCall(callee, args) = expr else {
1417 return false;
1418 };
1419 let Some(name) = expr_to_dotted_name(callee) else {
1420 return false;
1421 };
1422 if !call_matches(&name, "skipWs") || args.len() != 2 {
1423 return false;
1424 }
1425 is_ident(&args[0], string_param) && is_int_plus_positive(&args[1], pos_param)
1426}
1427
1428fn is_skip_ws_same(expr: &Expr, string_param: &str, pos_param: &str) -> bool {
1429 let Expr::FnCall(callee, args) = expr else {
1430 return false;
1431 };
1432 let Some(name) = expr_to_dotted_name(callee) else {
1433 return false;
1434 };
1435 if !call_matches(&name, "skipWs") || args.len() != 2 {
1436 return false;
1437 }
1438 is_ident(&args[0], string_param) && is_ident(&args[1], pos_param)
1439}
1440
1441fn is_string_pos_advance(expr: &Expr, string_param: &str, pos_param: &str) -> bool {
1442 is_int_plus_positive(expr, pos_param) || is_skip_ws_advance(expr, string_param, pos_param)
1443}
1444
1445#[derive(Clone, Copy, Debug, Eq, PartialEq)]
1446enum StringPosEdge {
1447 Same,
1448 Advance,
1449}
1450
1451fn classify_string_pos_edge(
1452 expr: &Expr,
1453 string_param: &str,
1454 pos_param: &str,
1455) -> Option<StringPosEdge> {
1456 if is_ident(expr, pos_param) || is_skip_ws_same(expr, string_param, pos_param) {
1457 return Some(StringPosEdge::Same);
1458 }
1459 if is_string_pos_advance(expr, string_param, pos_param) {
1460 return Some(StringPosEdge::Advance);
1461 }
1462 if let Expr::FnCall(callee, args) = expr {
1463 let name = expr_to_dotted_name(callee)?;
1464 if call_matches(&name, "skipWs")
1465 && args.len() == 2
1466 && is_ident(&args[0], string_param)
1467 && matches!(&args[1], Expr::Ident(id) if id != pos_param)
1468 {
1469 return Some(StringPosEdge::Advance);
1470 }
1471 }
1472 if matches!(expr, Expr::Ident(id) if id != pos_param) {
1473 return Some(StringPosEdge::Advance);
1474 }
1475 None
1476}
1477
1478fn ranks_from_same_edges(
1479 names: &HashSet<String>,
1480 same_edges: &HashMap<String, HashSet<String>>,
1481) -> Option<HashMap<String, usize>> {
1482 let mut indegree: HashMap<String, usize> = names.iter().map(|n| (n.clone(), 0)).collect();
1483 for outs in same_edges.values() {
1484 for to in outs {
1485 if let Some(entry) = indegree.get_mut(to) {
1486 *entry += 1;
1487 } else {
1488 return None;
1489 }
1490 }
1491 }
1492
1493 let mut queue: Vec<String> = indegree
1494 .iter()
1495 .filter_map(|(name, °)| (deg == 0).then_some(name.clone()))
1496 .collect();
1497 queue.sort();
1498 let mut topo = Vec::new();
1499 while let Some(node) = queue.pop() {
1500 topo.push(node.clone());
1501 let outs = same_edges.get(&node).cloned().unwrap_or_default();
1502 let mut newly_zero = Vec::new();
1503 for to in outs {
1504 if let Some(entry) = indegree.get_mut(&to) {
1505 *entry -= 1;
1506 if *entry == 0 {
1507 newly_zero.push(to);
1508 }
1509 } else {
1510 return None;
1511 }
1512 }
1513 newly_zero.sort();
1514 queue.extend(newly_zero);
1515 }
1516
1517 if topo.len() != names.len() {
1518 return None;
1519 }
1520
1521 let n = topo.len();
1522 let mut ranks = HashMap::new();
1523 for (idx, name) in topo.into_iter().enumerate() {
1524 ranks.insert(name, n - idx);
1525 }
1526 Some(ranks)
1527}
1528
1529fn supports_single_string_pos_advance(fd: &FnDef) -> bool {
1530 let Some((string_param, string_ty)) = fd.params.first() else {
1531 return false;
1532 };
1533 let Some((pos_param, pos_ty)) = fd.params.get(1) else {
1534 return false;
1535 };
1536 if string_ty != "String" || pos_ty != "Int" {
1537 return false;
1538 }
1539
1540 let recursive_calls: Vec<(Option<&Expr>, Option<&Expr>)> =
1541 collect_calls_from_body(fd.body.as_ref())
1542 .into_iter()
1543 .filter(|(name, _)| call_matches(name, &fd.name))
1544 .map(|(_, args)| (args.first().cloned(), args.get(1).cloned()))
1545 .collect();
1546 if recursive_calls.is_empty() {
1547 return false;
1548 }
1549
1550 recursive_calls.into_iter().all(|(arg0, arg1)| {
1551 arg0.is_some_and(|e| is_ident(e, string_param))
1552 && arg1.is_some_and(|e| is_string_pos_advance(e, string_param, pos_param))
1553 })
1554}
1555
1556fn supports_mutual_int_countdown(component: &[&FnDef]) -> bool {
1557 if component.len() < 2 {
1558 return false;
1559 }
1560 if component
1561 .iter()
1562 .any(|fd| !matches!(fd.params.first(), Some((_, t)) if t == "Int"))
1563 {
1564 return false;
1565 }
1566 let names: HashSet<String> = component.iter().map(|fd| fd.name.clone()).collect();
1567 let mut any_intra = false;
1568 for fd in component {
1569 let param_name = &fd.params[0].0;
1570 for (callee, args) in collect_calls_from_body(fd.body.as_ref()) {
1571 if !call_is_in_set(&callee, &names) {
1572 continue;
1573 }
1574 any_intra = true;
1575 let Some(arg0) = args.first().cloned() else {
1576 return false;
1577 };
1578 if !is_int_minus_positive(arg0, param_name) {
1579 return false;
1580 }
1581 }
1582 }
1583 any_intra
1584}
1585
1586fn supports_mutual_string_pos_advance(component: &[&FnDef]) -> Option<HashMap<String, usize>> {
1587 if component.len() < 2 {
1588 return None;
1589 }
1590 if component.iter().any(|fd| {
1591 !matches!(fd.params.first(), Some((_, t)) if t == "String")
1592 || !matches!(fd.params.get(1), Some((_, t)) if t == "Int")
1593 }) {
1594 return None;
1595 }
1596
1597 let names: HashSet<String> = component.iter().map(|fd| fd.name.clone()).collect();
1598 let mut same_edges: HashMap<String, HashSet<String>> =
1599 names.iter().map(|n| (n.clone(), HashSet::new())).collect();
1600 let mut any_intra = false;
1601
1602 for fd in component {
1603 let string_param = &fd.params[0].0;
1604 let pos_param = &fd.params[1].0;
1605 for (callee_raw, args) in collect_calls_from_body(fd.body.as_ref()) {
1606 let Some(callee) = canonical_callee_name(&callee_raw, &names) else {
1607 continue;
1608 };
1609 any_intra = true;
1610
1611 let arg0 = args.first().cloned()?;
1612 let arg1 = args.get(1).cloned()?;
1613
1614 if !is_ident(arg0, string_param) {
1615 return None;
1616 }
1617
1618 match classify_string_pos_edge(arg1, string_param, pos_param) {
1619 Some(StringPosEdge::Same) => {
1620 if let Some(edges) = same_edges.get_mut(&fd.name) {
1621 edges.insert(callee);
1622 } else {
1623 return None;
1624 }
1625 }
1626 Some(StringPosEdge::Advance) => {}
1627 None => return None,
1628 }
1629 }
1630 }
1631
1632 if !any_intra {
1633 return None;
1634 }
1635
1636 ranks_from_same_edges(&names, &same_edges)
1637}
1638
1639fn is_scalar_like_type(type_name: &str) -> bool {
1640 matches!(
1641 type_name,
1642 "Int" | "Float" | "Bool" | "String" | "Char" | "Byte" | "Unit"
1643 )
1644}
1645
1646pub(super) fn sizeof_measure_param_indices(fd: &FnDef) -> Vec<usize> {
1647 fd.params
1648 .iter()
1649 .enumerate()
1650 .filter_map(|(idx, (_, type_name))| (!is_scalar_like_type(type_name)).then_some(idx))
1651 .collect()
1652}
1653
1654fn supports_mutual_sizeof_ranked(component: &[&FnDef]) -> Option<HashMap<String, usize>> {
1655 if component.len() < 2 {
1656 return None;
1657 }
1658 let names: HashSet<String> = component.iter().map(|fd| fd.name.clone()).collect();
1659 let metric_indices: HashMap<String, Vec<usize>> = component
1660 .iter()
1661 .map(|fd| (fd.name.clone(), sizeof_measure_param_indices(fd)))
1662 .collect();
1663 if component.iter().any(|fd| {
1664 metric_indices
1665 .get(&fd.name)
1666 .is_none_or(|indices| indices.is_empty())
1667 }) {
1668 return None;
1669 }
1670
1671 let mut same_edges: HashMap<String, HashSet<String>> =
1672 names.iter().map(|n| (n.clone(), HashSet::new())).collect();
1673 let mut any_intra = false;
1674 for fd in component {
1675 let caller_metric_indices = metric_indices.get(&fd.name)?;
1676 let caller_metric_params: Vec<&str> = caller_metric_indices
1677 .iter()
1678 .filter_map(|idx| fd.params.get(*idx).map(|(name, _)| name.as_str()))
1679 .collect();
1680 for (callee_raw, args) in collect_calls_from_body(fd.body.as_ref()) {
1681 let Some(callee) = canonical_callee_name(&callee_raw, &names) else {
1682 continue;
1683 };
1684 any_intra = true;
1685 let callee_metric_indices = metric_indices.get(&callee)?;
1686 let is_same_edge = callee_metric_indices.len() == caller_metric_params.len()
1687 && callee_metric_indices
1688 .iter()
1689 .enumerate()
1690 .all(|(pos, callee_idx)| {
1691 let Some(arg) = args.get(*callee_idx).cloned() else {
1692 return false;
1693 };
1694 is_ident(arg, caller_metric_params[pos])
1695 });
1696 if is_same_edge {
1697 if let Some(edges) = same_edges.get_mut(&fd.name) {
1698 edges.insert(callee);
1699 } else {
1700 return None;
1701 }
1702 }
1703 }
1704 }
1705 if !any_intra {
1706 return None;
1707 }
1708
1709 let ranks = ranks_from_same_edges(&names, &same_edges)?;
1710 let mut out = HashMap::new();
1711 for fd in component {
1712 let rank = ranks.get(&fd.name).cloned()?;
1713 out.insert(fd.name.clone(), rank);
1714 }
1715 Some(out)
1716}
1717
1718fn proof_mode_recursion_analysis(
1719 ctx: &CodegenContext,
1720) -> (HashMap<String, RecursionPlan>, Vec<ProofModeIssue>) {
1721 let mut plans = HashMap::new();
1722 let mut issues = Vec::new();
1723
1724 let all_pure = pure_fns(ctx);
1725 let recursive_names = recursive_pure_fn_names(ctx);
1726 let components = call_graph::ordered_fn_components(&all_pure, &ctx.module_prefixes);
1727
1728 for component in components {
1729 if component.is_empty() {
1730 continue;
1731 }
1732 let is_recursive_component =
1733 component.len() > 1 || recursive_names.contains(&component[0].name);
1734 if !is_recursive_component {
1735 continue;
1736 }
1737
1738 if component.len() > 1 {
1739 if supports_mutual_int_countdown(&component) {
1740 for fd in &component {
1741 plans.insert(fd.name.clone(), RecursionPlan::MutualIntCountdown);
1742 }
1743 } else if let Some(ranks) = supports_mutual_string_pos_advance(&component) {
1744 for fd in &component {
1745 if let Some(rank) = ranks.get(&fd.name).cloned() {
1746 plans.insert(
1747 fd.name.clone(),
1748 RecursionPlan::MutualStringPosAdvance { rank },
1749 );
1750 }
1751 }
1752 } else if let Some(rankings) = supports_mutual_sizeof_ranked(&component) {
1753 for fd in &component {
1754 if let Some(rank) = rankings.get(&fd.name).cloned() {
1755 plans.insert(fd.name.clone(), RecursionPlan::MutualSizeOfRanked { rank });
1756 }
1757 }
1758 } else {
1759 let names = component
1760 .iter()
1761 .map(|fd| fd.name.clone())
1762 .collect::<Vec<_>>()
1763 .join(", ");
1764 let line = component.iter().map(|fd| fd.line).min().unwrap_or(1);
1765 issues.push(ProofModeIssue {
1766 line,
1767 message: format!(
1768 "unsupported mutual recursion group (currently supported in proof mode: Int countdown on first param): {}",
1769 names
1770 ),
1771 });
1772 }
1773 continue;
1774 }
1775
1776 let fd = component[0];
1777 if recurrence::detect_second_order_int_linear_recurrence(fd).is_some() {
1778 plans.insert(fd.name.clone(), RecursionPlan::LinearRecurrence2);
1779 } else if let Some((param_index, bound_lean)) = single_int_ascending_param(fd) {
1780 plans.insert(
1781 fd.name.clone(),
1782 RecursionPlan::IntAscending {
1783 param_index,
1784 bound_lean,
1785 },
1786 );
1787 } else if let Some(param_index) = single_int_countdown_param_index(fd) {
1788 plans.insert(fd.name.clone(), RecursionPlan::IntCountdown { param_index });
1789 } else if supports_single_sizeof_structural(fd, ctx) {
1790 plans.insert(fd.name.clone(), RecursionPlan::SizeOfStructural);
1791 } else if let Some(param_index) = single_list_structural_param_index(fd) {
1792 plans.insert(
1793 fd.name.clone(),
1794 RecursionPlan::ListStructural { param_index },
1795 );
1796 } else if supports_single_string_pos_advance(fd) {
1797 plans.insert(fd.name.clone(), RecursionPlan::StringPosAdvance);
1798 } else {
1799 issues.push(ProofModeIssue {
1800 line: fd.line,
1801 message: format!(
1802 "recursive function '{}' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)",
1803 fd.name
1804 ),
1805 });
1806 }
1807 }
1808
1809 (plans, issues)
1810}
1811
1812pub fn proof_mode_findings(ctx: &CodegenContext) -> Vec<ProofModeIssue> {
1817 let (_plans, issues) = proof_mode_recursion_analysis(ctx);
1818 issues
1819}
1820
1821pub fn proof_mode_issues(ctx: &CodegenContext) -> Vec<String> {
1822 proof_mode_findings(ctx)
1823 .into_iter()
1824 .map(|issue| issue.message)
1825 .collect()
1826}
1827
1828pub fn transpile(ctx: &CodegenContext) -> ProjectOutput {
1830 transpile_with_verify_mode(ctx, VerifyEmitMode::NativeDecide)
1831}
1832
1833pub fn transpile_for_proof_mode(
1838 ctx: &CodegenContext,
1839 verify_mode: VerifyEmitMode,
1840) -> ProjectOutput {
1841 let (plans, _issues) = proof_mode_recursion_analysis(ctx);
1842 let recursive_names = recursive_pure_fn_names(ctx);
1843 let recursive_types = recursive_type_names(ctx);
1844
1845 let mut sections = Vec::new();
1846
1847 for module in &ctx.modules {
1849 for td in &module.type_defs {
1850 sections.push(toplevel::emit_type_def(td));
1851 if toplevel::is_recursive_type_def(td) {
1852 sections.push(toplevel::emit_recursive_decidable_eq(
1853 toplevel::type_def_name(td),
1854 ));
1855 if let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types) {
1856 sections.push(measure);
1857 }
1858 }
1859 sections.push(String::new());
1860 }
1861 }
1862 for td in &ctx.type_defs {
1863 sections.push(toplevel::emit_type_def(td));
1864 if toplevel::is_recursive_type_def(td) {
1865 sections.push(toplevel::emit_recursive_decidable_eq(
1866 toplevel::type_def_name(td),
1867 ));
1868 if let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types) {
1869 sections.push(measure);
1870 }
1871 }
1872 sections.push(String::new());
1873 }
1874
1875 emit_pure_functions_proof(ctx, &plans, &recursive_names, &mut sections);
1876
1877 for item in &ctx.items {
1878 if let TopLevel::Decision(db) = item {
1879 sections.push(toplevel::emit_decision(db));
1880 sections.push(String::new());
1881 }
1882 }
1883
1884 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1885 for item in &ctx.items {
1886 if let TopLevel::Verify(vb) = item {
1887 let key = verify_counter_key(vb);
1888 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1889 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1890 verify_case_counters.insert(key, next_idx);
1891 sections.push(emitted);
1892 sections.push(String::new());
1893 }
1894 }
1895
1896 let lean_body = sections.join("\n");
1897 let prelude = generate_prelude_for_body(&lean_body, false);
1898 let lean_source = format!("{prelude}\n\n{lean_body}");
1899 let project_name = lean_project_name(ctx);
1900 let lakefile = generate_lakefile(&project_name);
1901 let toolchain = generate_toolchain();
1902 ProjectOutput {
1903 files: vec![
1904 ("lakefile.lean".to_string(), lakefile),
1905 ("lean-toolchain".to_string(), toolchain),
1906 (format!("{}.lean", project_name), lean_source),
1907 ],
1908 }
1909}
1910
1911pub fn transpile_with_verify_mode(
1917 ctx: &CodegenContext,
1918 verify_mode: VerifyEmitMode,
1919) -> ProjectOutput {
1920 let mut sections = Vec::new();
1921
1922 let recursive_fns = call_graph::find_recursive_fns(&ctx.items);
1924
1925 for module in &ctx.modules {
1927 for td in &module.type_defs {
1928 sections.push(toplevel::emit_type_def(td));
1929 if toplevel::is_recursive_type_def(td) {
1931 sections.push(toplevel::emit_recursive_decidable_eq(
1932 toplevel::type_def_name(td),
1933 ));
1934 }
1935 sections.push(String::new());
1936 }
1937 }
1938
1939 for td in &ctx.type_defs {
1941 sections.push(toplevel::emit_type_def(td));
1942 if toplevel::is_recursive_type_def(td) {
1944 sections.push(toplevel::emit_recursive_decidable_eq(
1945 toplevel::type_def_name(td),
1946 ));
1947 }
1948 sections.push(String::new());
1949 }
1950
1951 emit_pure_functions(ctx, &recursive_fns, &mut sections);
1954
1955 for item in &ctx.items {
1957 if let TopLevel::Decision(db) = item {
1958 sections.push(toplevel::emit_decision(db));
1959 sections.push(String::new());
1960 }
1961 }
1962
1963 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1965 for item in &ctx.items {
1966 if let TopLevel::Verify(vb) = item {
1967 let key = verify_counter_key(vb);
1968 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1969 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1970 verify_case_counters.insert(key, next_idx);
1971 sections.push(emitted);
1972 sections.push(String::new());
1973 }
1974 }
1975
1976 let lean_body = sections.join("\n");
1977 let prelude = generate_prelude_for_body(&lean_body, false);
1978 let lean_source = format!("{prelude}\n\n{lean_body}");
1979
1980 let project_name = lean_project_name(ctx);
1982 let lakefile = generate_lakefile(&project_name);
1983 let toolchain = generate_toolchain();
1984
1985 ProjectOutput {
1986 files: vec![
1987 ("lakefile.lean".to_string(), lakefile),
1988 ("lean-toolchain".to_string(), toolchain),
1989 (format!("{}.lean", project_name), lean_source),
1990 ],
1991 }
1992}
1993
1994fn emit_pure_functions(
1995 ctx: &CodegenContext,
1996 recursive_fns: &HashSet<String>,
1997 sections: &mut Vec<String>,
1998) {
1999 let all_fns: Vec<&FnDef> = ctx
2000 .modules
2001 .iter()
2002 .flat_map(|m| m.fn_defs.iter())
2003 .chain(ctx.fn_defs.iter())
2004 .filter(|fd| toplevel::is_pure_fn(fd))
2005 .collect();
2006 if all_fns.is_empty() {
2007 return;
2008 }
2009
2010 let components = call_graph::ordered_fn_components(&all_fns, &ctx.module_prefixes);
2011 for fns in components {
2012 if fns.is_empty() {
2013 continue;
2014 }
2015
2016 if fns.len() > 1 {
2018 sections.push(toplevel::emit_mutual_group(&fns, ctx));
2019 sections.push(String::new());
2020 continue;
2021 }
2022
2023 if let Some(code) = toplevel::emit_fn_def(fns[0], recursive_fns, ctx) {
2026 sections.push(code);
2027 sections.push(String::new());
2028 }
2029 }
2030}
2031
2032fn emit_pure_functions_proof(
2033 ctx: &CodegenContext,
2034 plans: &HashMap<String, RecursionPlan>,
2035 recursive_names: &HashSet<String>,
2036 sections: &mut Vec<String>,
2037) {
2038 let all_fns: Vec<&FnDef> = pure_fns(ctx);
2039 if all_fns.is_empty() {
2040 return;
2041 }
2042
2043 let components = call_graph::ordered_fn_components(&all_fns, &ctx.module_prefixes);
2044 for fns in components {
2045 if fns.is_empty() {
2046 continue;
2047 }
2048 if fns.len() > 1 {
2049 let all_supported = fns.iter().all(|fd| plans.contains_key(&fd.name));
2050 if all_supported {
2051 sections.push(toplevel::emit_mutual_group_proof(&fns, ctx, plans));
2052 } else {
2053 sections.push(toplevel::emit_mutual_group(&fns, ctx));
2054 }
2055 sections.push(String::new());
2056 continue;
2057 }
2058
2059 let fd = fns[0];
2060 let is_recursive = recursive_names.contains(&fd.name);
2061 let emitted = if is_recursive && !plans.contains_key(&fd.name) {
2062 toplevel::emit_fn_def(fd, recursive_names, ctx)
2063 } else {
2064 toplevel::emit_fn_def_proof(fd, plans.get(&fd.name).cloned(), ctx)
2065 };
2066 if let Some(code) = emitted {
2067 sections.push(code);
2068 sections.push(String::new());
2069 }
2070 }
2071}
2072
2073#[cfg(test)]
2074fn generate_prelude() -> String {
2075 generate_prelude_for_body("", true)
2076}
2077
2078fn generate_prelude_for_body(body: &str, include_all_helpers: bool) -> String {
2079 let mut parts = vec![
2080 LEAN_PRELUDE_HEADER.to_string(),
2081 LEAN_PRELUDE_FLOAT_COE.to_string(),
2082 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
2083 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
2084 LEAN_PRELUDE_EXCEPT_NS.to_string(),
2085 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
2086 LEAN_PRELUDE_STRING_HADD.to_string(),
2087 ];
2088
2089 if include_all_helpers || needs_builtin_named_type(body, "Header") {
2090 parts.push(LEAN_PRELUDE_HEADER_TYPE.to_string());
2091 }
2092 if include_all_helpers || needs_builtin_named_type(body, "HttpResponse") {
2093 parts.push(LEAN_PRELUDE_HTTP_RESPONSE_TYPE.to_string());
2094 }
2095 if include_all_helpers || needs_builtin_named_type(body, "HttpRequest") {
2096 parts.push(LEAN_PRELUDE_HTTP_REQUEST_TYPE.to_string());
2097 }
2098 if include_all_helpers || needs_builtin_named_type(body, "Tcp_Connection") {
2099 parts.push(LEAN_PRELUDE_TCP_CONNECTION_TYPE.to_string());
2100 }
2101
2102 if include_all_helpers || body.contains("averStringPosFuel") {
2103 parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string());
2104 }
2105
2106 if include_all_helpers || body.contains("AverMeasure.") {
2107 parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string());
2108 }
2109
2110 if include_all_helpers || body.contains("AverMap.") {
2111 parts.push(generate_map_prelude(body, include_all_helpers));
2112 }
2113
2114 parts.extend([
2115 LEAN_PRELUDE_AVER_LIST.to_string(),
2116 LEAN_PRELUDE_STRING_HELPERS.to_string(),
2117 LEAN_PRELUDE_NUMERIC_PARSE.to_string(),
2118 LEAN_PRELUDE_CHAR_BYTE.to_string(),
2119 ]);
2120
2121 parts.join("\n\n")
2122}
2123
2124fn needs_builtin_named_type(body: &str, type_name: &str) -> bool {
2125 references_named_type(body, type_name) && !declares_named_type(body, type_name)
2126}
2127
2128fn references_named_type(body: &str, type_name: &str) -> bool {
2129 let mut token = String::new();
2130 for ch in body.chars() {
2131 if ch.is_ascii_alphanumeric() || ch == '_' || ch == '.' {
2132 token.push(ch);
2133 continue;
2134 }
2135 if token == type_name {
2136 return true;
2137 }
2138 token.clear();
2139 }
2140 token == type_name
2141}
2142
2143fn declares_named_type(body: &str, type_name: &str) -> bool {
2144 let structure_decl = format!("structure {} where", type_name);
2145 let inductive_decl = format!("inductive {} where", type_name);
2146 body.lines()
2147 .map(str::trim_start)
2148 .any(|line| line == structure_decl || line == inductive_decl)
2149}
2150
2151fn generate_map_prelude(body: &str, include_all_helpers: bool) -> String {
2152 let mut parts = vec![AVER_MAP_PRELUDE_BASE.to_string()];
2153
2154 let needs_has_set_self = include_all_helpers || body.contains("AverMap.has_set_self");
2155 let needs_get_set_self = include_all_helpers || body.contains("AverMap.get_set_self");
2156 let needs_get_set_other = include_all_helpers
2157 || body.contains("AverMap.get_set_other")
2158 || body.contains("AverMap.has_set_other");
2159 let needs_has_set_other = include_all_helpers || body.contains("AverMap.has_set_other");
2160
2161 if needs_has_set_self {
2162 parts.push(AVER_MAP_PRELUDE_HAS_SET_SELF.to_string());
2163 }
2164 if needs_get_set_self {
2165 parts.push(AVER_MAP_PRELUDE_GET_SET_SELF.to_string());
2166 }
2167 if needs_get_set_other {
2168 parts.push(AVER_MAP_PRELUDE_GET_SET_OTHER.to_string());
2169 }
2170 if needs_has_set_other {
2171 parts.push(AVER_MAP_PRELUDE_HAS_SET_OTHER.to_string());
2172 }
2173
2174 parts.push(AVER_MAP_PRELUDE_END.to_string());
2175 parts.join("\n\n")
2176}
2177
2178fn generate_lakefile(project_name: &str) -> String {
2179 format!(
2180 r#"import Lake
2181open Lake DSL
2182
2183package «{}» where
2184 version := v!"0.1.0"
2185
2186@[default_target]
2187lean_lib «{}» where
2188 srcDir := "."
2189"#,
2190 project_name.to_lowercase(),
2191 project_name
2192 )
2193}
2194
2195fn generate_toolchain() -> String {
2196 "leanprover/lean4:v4.15.0\n".to_string()
2197}
2198
2199fn capitalize_first(s: &str) -> String {
2200 let mut chars = s.chars();
2201 match chars.next() {
2202 None => String::new(),
2203 Some(c) => c.to_uppercase().to_string() + chars.as_str(),
2204 }
2205}
2206
2207#[cfg(test)]
2208mod tests {
2209 use super::{
2210 VerifyEmitMode, generate_prelude, proof_mode_issues, recurrence, transpile,
2211 transpile_for_proof_mode, transpile_with_verify_mode,
2212 };
2213 use crate::ast::{
2214 BinOp, Expr, FnBody, FnDef, Literal, MatchArm, Pattern, Stmt, TopLevel, TypeDef,
2215 TypeVariant, VerifyBlock, VerifyGiven, VerifyGivenDomain, VerifyKind, VerifyLaw,
2216 };
2217 use crate::codegen::{CodegenContext, build_context};
2218 use crate::source::parse_source;
2219 use crate::tco;
2220 use crate::types::checker::run_type_check_full;
2221 use std::collections::{HashMap, HashSet};
2222 use std::rc::Rc;
2223
2224 fn empty_ctx() -> CodegenContext {
2225 CodegenContext {
2226 items: vec![],
2227 fn_sigs: HashMap::new(),
2228 memo_fns: HashSet::new(),
2229 memo_safe_types: HashSet::new(),
2230 type_defs: vec![],
2231 fn_defs: vec![],
2232 project_name: "verify_mode".to_string(),
2233 modules: vec![],
2234 module_prefixes: HashSet::new(),
2235 policy: None,
2236 emit_replay_runtime: false,
2237 runtime_policy_from_env: false,
2238 guest_entry: None,
2239 emit_self_host_support: false,
2240 extra_fn_defs: Vec::new(),
2241 mutual_tco_members: HashSet::new(),
2242 }
2243 }
2244
2245 fn ctx_from_source(source: &str, project_name: &str) -> CodegenContext {
2246 let mut items = parse_source(source).expect("source should parse");
2247 tco::transform_program(&mut items);
2248 let tc = run_type_check_full(&items, None);
2249 assert!(
2250 tc.errors.is_empty(),
2251 "source should typecheck without errors: {:?}",
2252 tc.errors
2253 );
2254 build_context(items, &tc, HashSet::new(), project_name.to_string(), vec![])
2255 }
2256
2257 fn generated_lean_file(out: &crate::codegen::ProjectOutput) -> &str {
2258 out.files
2259 .iter()
2260 .find_map(|(name, content)| {
2261 (name.ends_with(".lean") && name != "lakefile.lean").then_some(content.as_str())
2262 })
2263 .expect("expected generated Lean file")
2264 }
2265
2266 fn empty_ctx_with_verify_case() -> CodegenContext {
2267 let mut ctx = empty_ctx();
2268 ctx.items.push(TopLevel::Verify(VerifyBlock {
2269 fn_name: "f".to_string(),
2270 line: 1,
2271 cases: vec![(
2272 Expr::Literal(Literal::Int(1)),
2273 Expr::Literal(Literal::Int(1)),
2274 )],
2275 case_spans: vec![],
2276 case_givens: vec![],
2277 kind: VerifyKind::Cases,
2278 }));
2279 ctx
2280 }
2281
2282 fn empty_ctx_with_two_verify_blocks_same_fn() -> CodegenContext {
2283 let mut ctx = empty_ctx();
2284 ctx.items.push(TopLevel::Verify(VerifyBlock {
2285 fn_name: "f".to_string(),
2286 line: 1,
2287 cases: vec![(
2288 Expr::Literal(Literal::Int(1)),
2289 Expr::Literal(Literal::Int(1)),
2290 )],
2291 case_spans: vec![],
2292 case_givens: vec![],
2293 kind: VerifyKind::Cases,
2294 }));
2295 ctx.items.push(TopLevel::Verify(VerifyBlock {
2296 fn_name: "f".to_string(),
2297 line: 2,
2298 cases: vec![(
2299 Expr::Literal(Literal::Int(2)),
2300 Expr::Literal(Literal::Int(2)),
2301 )],
2302 case_spans: vec![],
2303 case_givens: vec![],
2304 kind: VerifyKind::Cases,
2305 }));
2306 ctx
2307 }
2308
2309 fn empty_ctx_with_verify_law() -> CodegenContext {
2310 let mut ctx = empty_ctx();
2311 let add = FnDef {
2312 name: "add".to_string(),
2313 line: 1,
2314 params: vec![
2315 ("a".to_string(), "Int".to_string()),
2316 ("b".to_string(), "Int".to_string()),
2317 ],
2318 return_type: "Int".to_string(),
2319 effects: vec![],
2320 desc: None,
2321 body: Rc::new(FnBody::from_expr(Expr::BinOp(
2322 BinOp::Add,
2323 Box::new(Expr::Ident("a".to_string())),
2324 Box::new(Expr::Ident("b".to_string())),
2325 ))),
2326 resolution: None,
2327 };
2328 ctx.fn_defs.push(add.clone());
2329 ctx.items.push(TopLevel::FnDef(add));
2330 ctx.items.push(TopLevel::Verify(VerifyBlock {
2331 fn_name: "add".to_string(),
2332 line: 1,
2333 cases: vec![
2334 (
2335 Expr::FnCall(
2336 Box::new(Expr::Ident("add".to_string())),
2337 vec![
2338 Expr::Literal(Literal::Int(1)),
2339 Expr::Literal(Literal::Int(2)),
2340 ],
2341 ),
2342 Expr::FnCall(
2343 Box::new(Expr::Ident("add".to_string())),
2344 vec![
2345 Expr::Literal(Literal::Int(2)),
2346 Expr::Literal(Literal::Int(1)),
2347 ],
2348 ),
2349 ),
2350 (
2351 Expr::FnCall(
2352 Box::new(Expr::Ident("add".to_string())),
2353 vec![
2354 Expr::Literal(Literal::Int(2)),
2355 Expr::Literal(Literal::Int(3)),
2356 ],
2357 ),
2358 Expr::FnCall(
2359 Box::new(Expr::Ident("add".to_string())),
2360 vec![
2361 Expr::Literal(Literal::Int(3)),
2362 Expr::Literal(Literal::Int(2)),
2363 ],
2364 ),
2365 ),
2366 ],
2367 case_spans: vec![],
2368 case_givens: vec![],
2369 kind: VerifyKind::Law(Box::new(VerifyLaw {
2370 name: "commutative".to_string(),
2371 givens: vec![
2372 VerifyGiven {
2373 name: "a".to_string(),
2374 type_name: "Int".to_string(),
2375 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2376 },
2377 VerifyGiven {
2378 name: "b".to_string(),
2379 type_name: "Int".to_string(),
2380 domain: VerifyGivenDomain::Explicit(vec![
2381 Expr::Literal(Literal::Int(2)),
2382 Expr::Literal(Literal::Int(3)),
2383 ]),
2384 },
2385 ],
2386 when: None,
2387 lhs: Expr::FnCall(
2388 Box::new(Expr::Ident("add".to_string())),
2389 vec![Expr::Ident("a".to_string()), Expr::Ident("b".to_string())],
2390 ),
2391 rhs: Expr::FnCall(
2392 Box::new(Expr::Ident("add".to_string())),
2393 vec![Expr::Ident("b".to_string()), Expr::Ident("a".to_string())],
2394 ),
2395 sample_guards: vec![],
2396 })),
2397 }));
2398 ctx
2399 }
2400
2401 #[test]
2402 fn prelude_normalizes_float_string_format() {
2403 let prelude = generate_prelude();
2404 assert!(
2405 prelude.contains("private def normalizeFloatString (s : String) : String :="),
2406 "missing normalizeFloatString helper in prelude"
2407 );
2408 assert!(
2409 prelude.contains(
2410 "def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)"
2411 ),
2412 "String.fromFloat should normalize Lean float formatting"
2413 );
2414 }
2415
2416 #[test]
2417 fn prelude_validates_char_from_code_unicode_bounds() {
2418 let prelude = generate_prelude();
2419 assert!(
2420 prelude.contains("if n < 0 || n > 1114111 then none"),
2421 "Char.fromCode should reject code points above Unicode max"
2422 );
2423 assert!(
2424 prelude.contains("else if n >= 55296 && n <= 57343 then none"),
2425 "Char.fromCode should reject surrogate code points"
2426 );
2427 }
2428
2429 #[test]
2430 fn prelude_includes_map_set_helper_lemmas() {
2431 let prelude = generate_prelude();
2432 assert!(
2433 prelude.contains("theorem has_set_self [DecidableEq α]"),
2434 "missing AverMap.has_set_self helper theorem"
2435 );
2436 assert!(
2437 prelude.contains("theorem get_set_self [DecidableEq α]"),
2438 "missing AverMap.get_set_self helper theorem"
2439 );
2440 }
2441
2442 #[test]
2443 fn lean_output_without_map_usage_omits_map_prelude() {
2444 let ctx = ctx_from_source(
2445 r#"
2446module NoMap
2447 intent = "Simple pure program without maps."
2448
2449fn addOne(n: Int) -> Int
2450 n + 1
2451
2452verify addOne
2453 addOne(1) => 2
2454"#,
2455 "nomap",
2456 );
2457 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
2458 let lean = generated_lean_file(&out);
2459
2460 assert!(
2461 !lean.contains("namespace AverMap"),
2462 "did not expect AverMap prelude in program without map usage:\n{}",
2463 lean
2464 );
2465 }
2466
2467 #[test]
2468 fn transpile_emits_native_decide_for_verify_by_default() {
2469 let out = transpile(&empty_ctx_with_verify_case());
2470 let lean = out
2471 .files
2472 .iter()
2473 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2474 .expect("expected generated Lean file");
2475 assert!(lean.contains("example : 1 = 1 := by native_decide"));
2476 }
2477
2478 #[test]
2479 fn transpile_can_emit_sorry_for_verify_when_requested() {
2480 let out = transpile_with_verify_mode(&empty_ctx_with_verify_case(), VerifyEmitMode::Sorry);
2481 let lean = out
2482 .files
2483 .iter()
2484 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2485 .expect("expected generated Lean file");
2486 assert!(lean.contains("example : 1 = 1 := by sorry"));
2487 }
2488
2489 #[test]
2490 fn transpile_can_emit_theorem_skeletons_for_verify() {
2491 let out = transpile_with_verify_mode(
2492 &empty_ctx_with_verify_case(),
2493 VerifyEmitMode::TheoremSkeleton,
2494 );
2495 let lean = out
2496 .files
2497 .iter()
2498 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2499 .expect("expected generated Lean file");
2500 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
2501 assert!(lean.contains(" sorry"));
2502 }
2503
2504 #[test]
2505 fn theorem_skeleton_numbering_is_global_per_function_across_verify_blocks() {
2506 let out = transpile_with_verify_mode(
2507 &empty_ctx_with_two_verify_blocks_same_fn(),
2508 VerifyEmitMode::TheoremSkeleton,
2509 );
2510 let lean = out
2511 .files
2512 .iter()
2513 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2514 .expect("expected generated Lean file");
2515 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
2516 assert!(lean.contains("theorem f_verify_2 : 2 = 2 := by"));
2517 }
2518
2519 #[test]
2520 fn transpile_emits_named_theorems_for_verify_law() {
2521 let out = transpile(&empty_ctx_with_verify_law());
2522 let lean = out
2523 .files
2524 .iter()
2525 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2526 .expect("expected generated Lean file");
2527 assert!(lean.contains("-- verify law add.commutative (2 cases)"));
2528 assert!(lean.contains("-- given a: Int = 1..2"));
2529 assert!(lean.contains("-- given b: Int = [2, 3]"));
2530 assert!(lean.contains(
2531 "theorem add_law_commutative : ∀ (a : Int) (b : Int), add a b = add b a := by"
2532 ));
2533 assert!(lean.contains(" intro a b"));
2534 assert!(lean.contains(" simp [add, Int.add_comm]"));
2535 assert!(lean.contains(
2536 "theorem add_law_commutative_sample_1 : add 1 2 = add 2 1 := by native_decide"
2537 ));
2538 assert!(lean.contains(
2539 "theorem add_law_commutative_sample_2 : add 2 3 = add 3 2 := by native_decide"
2540 ));
2541 }
2542
2543 #[test]
2544 fn generate_prelude_emits_int_roundtrip_theorem() {
2545 let lean = generate_prelude();
2546 assert!(lean.contains(
2547 "theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n"
2548 ));
2549 assert!(lean.contains("theorem String.intercalate_empty_chars (s : String) :"));
2550 assert!(lean.contains("def splitOnCharGo"));
2551 assert!(lean.contains("theorem split_single_char_append"));
2552 assert!(lean.contains("theorem split_intercalate_trailing_single_char"));
2553 assert!(lean.contains("namespace AverDigits"));
2554 assert!(lean.contains("theorem String.charAt_length_none (s : String)"));
2555 assert!(lean.contains("theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->"));
2556 }
2557
2558 #[test]
2559 fn transpile_emits_guarded_theorems_for_verify_law_when_clause() {
2560 let ctx = ctx_from_source(
2561 r#"
2562module GuardedLaw
2563 intent =
2564 "verify law with precondition"
2565
2566fn pickGreater(a: Int, b: Int) -> Int
2567 match a > b
2568 true -> a
2569 false -> b
2570
2571verify pickGreater law ordered
2572 given a: Int = [1, 2]
2573 given b: Int = [1, 2]
2574 when a > b
2575 pickGreater(a, b) => a
2576"#,
2577 "guarded_law",
2578 );
2579 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
2580 let lean = generated_lean_file(&out);
2581
2582 assert!(lean.contains("-- when (a > b)"));
2583 assert!(lean.contains(
2584 "theorem pickGreater_law_ordered : ∀ (a : Int) (b : Int), a = 1 ∨ a = 2 -> b = 1 ∨ b = 2 -> (a > b) = true -> pickGreater a b = a := by"
2585 ));
2586 assert!(lean.contains(
2587 "theorem pickGreater_law_ordered_sample_1 : (1 > 1) = true -> pickGreater 1 1 = 1 := by"
2588 ));
2589 assert!(lean.contains(
2590 "theorem pickGreater_law_ordered_sample_4 : (2 > 2) = true -> pickGreater 2 2 = 2 := by"
2591 ));
2592 }
2593
2594 #[test]
2595 fn transpile_uses_spec_theorem_names_for_declared_spec_laws() {
2596 let ctx = ctx_from_source(
2597 r#"
2598module SpecDemo
2599 intent =
2600 "spec demo"
2601
2602fn absVal(x: Int) -> Int
2603 match x < 0
2604 true -> 0 - x
2605 false -> x
2606
2607fn absValSpec(x: Int) -> Int
2608 match x < 0
2609 true -> 0 - x
2610 false -> x
2611
2612verify absVal law absValSpec
2613 given x: Int = [-2, -1, 0, 1, 2]
2614 absVal(x) => absValSpec(x)
2615"#,
2616 "spec_demo",
2617 );
2618 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
2619 let lean = generated_lean_file(&out);
2620
2621 assert!(lean.contains("-- verify law absVal.spec absValSpec (5 cases)"));
2622 assert!(
2623 lean.contains(
2624 "theorem absVal_eq_absValSpec : ∀ (x : Int), absVal x = absValSpec x := by"
2625 )
2626 );
2627 assert!(lean.contains("theorem absVal_eq_absValSpec_checked_domain :"));
2628 assert!(lean.contains("theorem absVal_eq_absValSpec_sample_1 :"));
2629 assert!(!lean.contains("theorem absVal_law_absValSpec :"));
2630 }
2631
2632 #[test]
2633 fn transpile_keeps_noncanonical_spec_laws_as_regular_law_names() {
2634 let ctx = ctx_from_source(
2635 r#"
2636module SpecLawShape
2637 intent =
2638 "shape probe"
2639
2640fn foo(x: Int) -> Int
2641 x + 1
2642
2643fn fooSpec(seed: Int, x: Int) -> Int
2644 x + seed
2645
2646verify foo law fooSpec
2647 given x: Int = [1, 2]
2648 foo(x) => fooSpec(1, x)
2649"#,
2650 "spec_law_shape",
2651 );
2652 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
2653 let lean = generated_lean_file(&out);
2654
2655 assert!(lean.contains("-- verify law foo.fooSpec (2 cases)"));
2656 assert!(lean.contains("theorem foo_law_fooSpec : ∀ (x : Int), foo x = fooSpec 1 x := by"));
2657 assert!(!lean.contains("theorem foo_eq_fooSpec :"));
2658 }
2659
2660 #[test]
2661 fn transpile_auto_proves_linear_int_canonical_spec_law_in_auto_mode() {
2662 let ctx = ctx_from_source(
2663 r#"
2664module SpecGap
2665 intent =
2666 "nontrivial canonical spec law"
2667
2668fn inc(x: Int) -> Int
2669 x + 1
2670
2671fn incSpec(x: Int) -> Int
2672 x + 2 - 1
2673
2674verify inc law incSpec
2675 given x: Int = [0, 1, 2]
2676 inc(x) => incSpec(x)
2677"#,
2678 "spec_gap",
2679 );
2680 let out = transpile(&ctx);
2681 let lean = generated_lean_file(&out);
2682
2683 assert!(lean.contains("-- verify law inc.spec incSpec (3 cases)"));
2684 assert!(lean.contains("theorem inc_eq_incSpec : ∀ (x : Int), inc x = incSpec x := by"));
2685 assert!(lean.contains("change (x + 1) = ((x + 2) - 1)"));
2686 assert!(lean.contains("omega"));
2687 assert!(!lean.contains(
2688 "-- universal theorem inc_eq_incSpec omitted: sampled law shape is not auto-proved yet"
2689 ));
2690 assert!(lean.contains("theorem inc_eq_incSpec_checked_domain :"));
2691 }
2692
2693 #[test]
2694 fn transpile_auto_proves_guarded_canonical_spec_law_in_auto_mode() {
2695 let ctx = ctx_from_source(
2696 r#"
2697module GuardedSpecGap
2698 intent =
2699 "guarded canonical spec law"
2700
2701fn clampNonNegative(x: Int) -> Int
2702 match x < 0
2703 true -> 0
2704 false -> x
2705
2706fn clampNonNegativeSpec(x: Int) -> Int
2707 match x < 0
2708 true -> 0
2709 false -> x
2710
2711verify clampNonNegative law clampNonNegativeSpec
2712 given x: Int = [-2, -1, 0, 1, 2]
2713 when x >= 0
2714 clampNonNegative(x) => clampNonNegativeSpec(x)
2715"#,
2716 "guarded_spec_gap",
2717 );
2718 let out = transpile(&ctx);
2719 let lean = generated_lean_file(&out);
2720
2721 assert!(lean.contains("-- when (x >= 0)"));
2722 assert!(lean.contains(
2723 "theorem clampNonNegative_eq_clampNonNegativeSpec : ∀ (x : Int), x = (-2) ∨ x = (-1) ∨ x = 0 ∨ x = 1 ∨ x = 2 -> (x >= 0) = true -> clampNonNegative x = clampNonNegativeSpec x := by"
2724 ));
2725 assert!(lean.contains("intro x h_x h_when"));
2726 assert!(lean.contains("simpa [clampNonNegative, clampNonNegativeSpec]"));
2727 assert!(!lean.contains(
2728 "-- universal theorem clampNonNegative_eq_clampNonNegativeSpec omitted: sampled law shape is not auto-proved yet"
2729 ));
2730 assert!(!lean.contains("cases h_x"));
2731 }
2732
2733 #[test]
2734 fn transpile_auto_proves_simp_normalized_canonical_spec_law_in_auto_mode() {
2735 let ctx = ctx_from_source(
2736 r#"
2737module SpecGapNonlinear
2738 intent =
2739 "nonlinear canonical spec law"
2740
2741fn square(x: Int) -> Int
2742 x * x
2743
2744fn squareSpec(x: Int) -> Int
2745 x * x + 0
2746
2747verify square law squareSpec
2748 given x: Int = [0, 1, 2]
2749 square(x) => squareSpec(x)
2750"#,
2751 "spec_gap_nonlinear",
2752 );
2753 let out = transpile(&ctx);
2754 let lean = generated_lean_file(&out);
2755
2756 assert!(lean.contains("-- verify law square.spec squareSpec (3 cases)"));
2757 assert!(
2758 lean.contains(
2759 "theorem square_eq_squareSpec : ∀ (x : Int), square x = squareSpec x := by"
2760 )
2761 );
2762 assert!(lean.contains("simp [square, squareSpec]"));
2763 assert!(!lean.contains(
2764 "-- universal theorem square_eq_squareSpec omitted: sampled law shape is not auto-proved yet"
2765 ));
2766 assert!(lean.contains("theorem square_eq_squareSpec_checked_domain :"));
2767 assert!(lean.contains("theorem square_eq_squareSpec_sample_1 :"));
2768 }
2769
2770 #[test]
2771 fn transpile_auto_proves_reflexive_law_with_rfl() {
2772 let mut ctx = empty_ctx();
2773 ctx.items.push(TopLevel::Verify(VerifyBlock {
2774 fn_name: "idLaw".to_string(),
2775 line: 1,
2776 cases: vec![(
2777 Expr::Literal(Literal::Int(1)),
2778 Expr::Literal(Literal::Int(1)),
2779 )],
2780 case_spans: vec![],
2781 case_givens: vec![],
2782 kind: VerifyKind::Law(Box::new(VerifyLaw {
2783 name: "reflexive".to_string(),
2784 givens: vec![VerifyGiven {
2785 name: "x".to_string(),
2786 type_name: "Int".to_string(),
2787 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2788 }],
2789 when: None,
2790 lhs: Expr::Ident("x".to_string()),
2791 rhs: Expr::Ident("x".to_string()),
2792 sample_guards: vec![],
2793 })),
2794 }));
2795 let out = transpile(&ctx);
2796 let lean = out
2797 .files
2798 .iter()
2799 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2800 .expect("expected generated Lean file");
2801 assert!(lean.contains("theorem idLaw_law_reflexive : ∀ (x : Int), x = x := by"));
2802 assert!(lean.contains(" intro x"));
2803 assert!(lean.contains(" rfl"));
2804 }
2805
2806 #[test]
2807 fn transpile_auto_proves_identity_law_for_int_add_wrapper() {
2808 let mut ctx = empty_ctx_with_verify_law();
2809 ctx.items.push(TopLevel::Verify(VerifyBlock {
2810 fn_name: "add".to_string(),
2811 line: 10,
2812 cases: vec![(
2813 Expr::FnCall(
2814 Box::new(Expr::Ident("add".to_string())),
2815 vec![
2816 Expr::Literal(Literal::Int(1)),
2817 Expr::Literal(Literal::Int(0)),
2818 ],
2819 ),
2820 Expr::Literal(Literal::Int(1)),
2821 )],
2822 case_spans: vec![],
2823 case_givens: vec![],
2824 kind: VerifyKind::Law(Box::new(VerifyLaw {
2825 name: "identityZero".to_string(),
2826 givens: vec![VerifyGiven {
2827 name: "a".to_string(),
2828 type_name: "Int".to_string(),
2829 domain: VerifyGivenDomain::Explicit(vec![
2830 Expr::Literal(Literal::Int(0)),
2831 Expr::Literal(Literal::Int(1)),
2832 ]),
2833 }],
2834 when: None,
2835 lhs: Expr::FnCall(
2836 Box::new(Expr::Ident("add".to_string())),
2837 vec![Expr::Ident("a".to_string()), Expr::Literal(Literal::Int(0))],
2838 ),
2839 rhs: Expr::Ident("a".to_string()),
2840 sample_guards: vec![],
2841 })),
2842 }));
2843 let out = transpile(&ctx);
2844 let lean = out
2845 .files
2846 .iter()
2847 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2848 .expect("expected generated Lean file");
2849 assert!(lean.contains("theorem add_law_identityZero : ∀ (a : Int), add a 0 = a := by"));
2850 assert!(lean.contains(" intro a"));
2851 assert!(lean.contains(" simp [add]"));
2852 }
2853
2854 #[test]
2855 fn transpile_auto_proves_associative_law_for_int_add_wrapper() {
2856 let mut ctx = empty_ctx_with_verify_law();
2857 ctx.items.push(TopLevel::Verify(VerifyBlock {
2858 fn_name: "add".to_string(),
2859 line: 20,
2860 cases: vec![(
2861 Expr::FnCall(
2862 Box::new(Expr::Ident("add".to_string())),
2863 vec![
2864 Expr::FnCall(
2865 Box::new(Expr::Ident("add".to_string())),
2866 vec![
2867 Expr::Literal(Literal::Int(1)),
2868 Expr::Literal(Literal::Int(2)),
2869 ],
2870 ),
2871 Expr::Literal(Literal::Int(3)),
2872 ],
2873 ),
2874 Expr::FnCall(
2875 Box::new(Expr::Ident("add".to_string())),
2876 vec![
2877 Expr::Literal(Literal::Int(1)),
2878 Expr::FnCall(
2879 Box::new(Expr::Ident("add".to_string())),
2880 vec![
2881 Expr::Literal(Literal::Int(2)),
2882 Expr::Literal(Literal::Int(3)),
2883 ],
2884 ),
2885 ],
2886 ),
2887 )],
2888 case_spans: vec![],
2889 case_givens: vec![],
2890 kind: VerifyKind::Law(Box::new(VerifyLaw {
2891 name: "associative".to_string(),
2892 givens: vec![
2893 VerifyGiven {
2894 name: "a".to_string(),
2895 type_name: "Int".to_string(),
2896 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(1))]),
2897 },
2898 VerifyGiven {
2899 name: "b".to_string(),
2900 type_name: "Int".to_string(),
2901 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
2902 },
2903 VerifyGiven {
2904 name: "c".to_string(),
2905 type_name: "Int".to_string(),
2906 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(3))]),
2907 },
2908 ],
2909 when: None,
2910 lhs: Expr::FnCall(
2911 Box::new(Expr::Ident("add".to_string())),
2912 vec![
2913 Expr::FnCall(
2914 Box::new(Expr::Ident("add".to_string())),
2915 vec![Expr::Ident("a".to_string()), Expr::Ident("b".to_string())],
2916 ),
2917 Expr::Ident("c".to_string()),
2918 ],
2919 ),
2920 rhs: Expr::FnCall(
2921 Box::new(Expr::Ident("add".to_string())),
2922 vec![
2923 Expr::Ident("a".to_string()),
2924 Expr::FnCall(
2925 Box::new(Expr::Ident("add".to_string())),
2926 vec![Expr::Ident("b".to_string()), Expr::Ident("c".to_string())],
2927 ),
2928 ],
2929 ),
2930 sample_guards: vec![],
2931 })),
2932 }));
2933 let out = transpile(&ctx);
2934 let lean = out
2935 .files
2936 .iter()
2937 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2938 .expect("expected generated Lean file");
2939 assert!(lean.contains(
2940 "theorem add_law_associative : ∀ (a : Int) (b : Int) (c : Int), add (add a b) c = add a (add b c) := by"
2941 ));
2942 assert!(lean.contains(" intro a b c"));
2943 assert!(lean.contains(" simp [add, Int.add_assoc]"));
2944 }
2945
2946 #[test]
2947 fn transpile_auto_proves_sub_laws() {
2948 let mut ctx = empty_ctx();
2949 let sub = FnDef {
2950 name: "sub".to_string(),
2951 line: 1,
2952 params: vec![
2953 ("a".to_string(), "Int".to_string()),
2954 ("b".to_string(), "Int".to_string()),
2955 ],
2956 return_type: "Int".to_string(),
2957 effects: vec![],
2958 desc: None,
2959 body: Rc::new(FnBody::from_expr(Expr::BinOp(
2960 BinOp::Sub,
2961 Box::new(Expr::Ident("a".to_string())),
2962 Box::new(Expr::Ident("b".to_string())),
2963 ))),
2964 resolution: None,
2965 };
2966 ctx.fn_defs.push(sub.clone());
2967 ctx.items.push(TopLevel::FnDef(sub));
2968
2969 ctx.items.push(TopLevel::Verify(VerifyBlock {
2970 fn_name: "sub".to_string(),
2971 line: 10,
2972 cases: vec![(
2973 Expr::FnCall(
2974 Box::new(Expr::Ident("sub".to_string())),
2975 vec![
2976 Expr::Literal(Literal::Int(2)),
2977 Expr::Literal(Literal::Int(0)),
2978 ],
2979 ),
2980 Expr::Literal(Literal::Int(2)),
2981 )],
2982 case_spans: vec![],
2983 case_givens: vec![],
2984 kind: VerifyKind::Law(Box::new(VerifyLaw {
2985 name: "rightIdentity".to_string(),
2986 givens: vec![VerifyGiven {
2987 name: "a".to_string(),
2988 type_name: "Int".to_string(),
2989 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
2990 }],
2991 when: None,
2992 lhs: Expr::FnCall(
2993 Box::new(Expr::Ident("sub".to_string())),
2994 vec![Expr::Ident("a".to_string()), Expr::Literal(Literal::Int(0))],
2995 ),
2996 rhs: Expr::Ident("a".to_string()),
2997 sample_guards: vec![],
2998 })),
2999 }));
3000 ctx.items.push(TopLevel::Verify(VerifyBlock {
3001 fn_name: "sub".to_string(),
3002 line: 20,
3003 cases: vec![(
3004 Expr::FnCall(
3005 Box::new(Expr::Ident("sub".to_string())),
3006 vec![
3007 Expr::Literal(Literal::Int(2)),
3008 Expr::Literal(Literal::Int(1)),
3009 ],
3010 ),
3011 Expr::BinOp(
3012 BinOp::Sub,
3013 Box::new(Expr::Literal(Literal::Int(0))),
3014 Box::new(Expr::FnCall(
3015 Box::new(Expr::Ident("sub".to_string())),
3016 vec![
3017 Expr::Literal(Literal::Int(1)),
3018 Expr::Literal(Literal::Int(2)),
3019 ],
3020 )),
3021 ),
3022 )],
3023 case_spans: vec![],
3024 case_givens: vec![],
3025 kind: VerifyKind::Law(Box::new(VerifyLaw {
3026 name: "antiCommutative".to_string(),
3027 givens: vec![
3028 VerifyGiven {
3029 name: "a".to_string(),
3030 type_name: "Int".to_string(),
3031 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
3032 },
3033 VerifyGiven {
3034 name: "b".to_string(),
3035 type_name: "Int".to_string(),
3036 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(1))]),
3037 },
3038 ],
3039 when: None,
3040 lhs: Expr::FnCall(
3041 Box::new(Expr::Ident("sub".to_string())),
3042 vec![Expr::Ident("a".to_string()), Expr::Ident("b".to_string())],
3043 ),
3044 rhs: Expr::BinOp(
3045 BinOp::Sub,
3046 Box::new(Expr::Literal(Literal::Int(0))),
3047 Box::new(Expr::FnCall(
3048 Box::new(Expr::Ident("sub".to_string())),
3049 vec![Expr::Ident("b".to_string()), Expr::Ident("a".to_string())],
3050 )),
3051 ),
3052 sample_guards: vec![],
3053 })),
3054 }));
3055
3056 let out = transpile(&ctx);
3057 let lean = out
3058 .files
3059 .iter()
3060 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3061 .expect("expected generated Lean file");
3062 assert!(lean.contains("theorem sub_law_rightIdentity : ∀ (a : Int), sub a 0 = a := by"));
3063 assert!(lean.contains(" simp [sub]"));
3064 assert!(lean.contains(
3065 "theorem sub_law_antiCommutative : ∀ (a : Int) (b : Int), sub a b = (-sub b a) := by"
3066 ));
3067 assert!(lean.contains(" simpa [sub] using (Int.neg_sub b a).symm"));
3068 }
3069
3070 #[test]
3071 fn transpile_auto_proves_unary_wrapper_equivalence_law() {
3072 let mut ctx = empty_ctx();
3073 let add = FnDef {
3074 name: "add".to_string(),
3075 line: 1,
3076 params: vec![
3077 ("a".to_string(), "Int".to_string()),
3078 ("b".to_string(), "Int".to_string()),
3079 ],
3080 return_type: "Int".to_string(),
3081 effects: vec![],
3082 desc: None,
3083 body: Rc::new(FnBody::from_expr(Expr::BinOp(
3084 BinOp::Add,
3085 Box::new(Expr::Ident("a".to_string())),
3086 Box::new(Expr::Ident("b".to_string())),
3087 ))),
3088 resolution: None,
3089 };
3090 let add_one = FnDef {
3091 name: "addOne".to_string(),
3092 line: 2,
3093 params: vec![("n".to_string(), "Int".to_string())],
3094 return_type: "Int".to_string(),
3095 effects: vec![],
3096 desc: None,
3097 body: Rc::new(FnBody::from_expr(Expr::BinOp(
3098 BinOp::Add,
3099 Box::new(Expr::Ident("n".to_string())),
3100 Box::new(Expr::Literal(Literal::Int(1))),
3101 ))),
3102 resolution: None,
3103 };
3104 ctx.fn_defs.push(add.clone());
3105 ctx.fn_defs.push(add_one.clone());
3106 ctx.items.push(TopLevel::FnDef(add));
3107 ctx.items.push(TopLevel::FnDef(add_one));
3108 ctx.items.push(TopLevel::Verify(VerifyBlock {
3109 fn_name: "addOne".to_string(),
3110 line: 3,
3111 cases: vec![(
3112 Expr::FnCall(
3113 Box::new(Expr::Ident("addOne".to_string())),
3114 vec![Expr::Literal(Literal::Int(2))],
3115 ),
3116 Expr::FnCall(
3117 Box::new(Expr::Ident("add".to_string())),
3118 vec![
3119 Expr::Literal(Literal::Int(2)),
3120 Expr::Literal(Literal::Int(1)),
3121 ],
3122 ),
3123 )],
3124 case_spans: vec![],
3125 case_givens: vec![],
3126 kind: VerifyKind::Law(Box::new(VerifyLaw {
3127 name: "identityViaAdd".to_string(),
3128 givens: vec![VerifyGiven {
3129 name: "n".to_string(),
3130 type_name: "Int".to_string(),
3131 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
3132 }],
3133 when: None,
3134 lhs: Expr::FnCall(
3135 Box::new(Expr::Ident("addOne".to_string())),
3136 vec![Expr::Ident("n".to_string())],
3137 ),
3138 rhs: Expr::FnCall(
3139 Box::new(Expr::Ident("add".to_string())),
3140 vec![Expr::Ident("n".to_string()), Expr::Literal(Literal::Int(1))],
3141 ),
3142 sample_guards: vec![],
3143 })),
3144 }));
3145 let out = transpile(&ctx);
3146 let lean = out
3147 .files
3148 .iter()
3149 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3150 .expect("expected generated Lean file");
3151 assert!(
3152 lean.contains(
3153 "theorem addOne_law_identityViaAdd : ∀ (n : Int), addOne n = add n 1 := by"
3154 )
3155 );
3156 assert!(lean.contains(" simp [addOne, add]"));
3157 }
3158
3159 #[test]
3160 fn transpile_auto_proves_direct_map_set_laws() {
3161 let mut ctx = empty_ctx();
3162
3163 let map_set = |m: Expr, k: Expr, v: Expr| {
3164 Expr::FnCall(
3165 Box::new(Expr::Attr(
3166 Box::new(Expr::Ident("Map".to_string())),
3167 "set".to_string(),
3168 )),
3169 vec![m, k, v],
3170 )
3171 };
3172 let map_has = |m: Expr, k: Expr| {
3173 Expr::FnCall(
3174 Box::new(Expr::Attr(
3175 Box::new(Expr::Ident("Map".to_string())),
3176 "has".to_string(),
3177 )),
3178 vec![m, k],
3179 )
3180 };
3181 let map_get = |m: Expr, k: Expr| {
3182 Expr::FnCall(
3183 Box::new(Expr::Attr(
3184 Box::new(Expr::Ident("Map".to_string())),
3185 "get".to_string(),
3186 )),
3187 vec![m, k],
3188 )
3189 };
3190 let some = |v: Expr| {
3191 Expr::FnCall(
3192 Box::new(Expr::Attr(
3193 Box::new(Expr::Ident("Option".to_string())),
3194 "Some".to_string(),
3195 )),
3196 vec![v],
3197 )
3198 };
3199
3200 ctx.items.push(TopLevel::Verify(VerifyBlock {
3201 fn_name: "map".to_string(),
3202 line: 1,
3203 cases: vec![(
3204 map_has(
3205 map_set(
3206 Expr::Ident("m".to_string()),
3207 Expr::Ident("k".to_string()),
3208 Expr::Ident("v".to_string()),
3209 ),
3210 Expr::Ident("k".to_string()),
3211 ),
3212 Expr::Literal(Literal::Bool(true)),
3213 )],
3214 case_spans: vec![],
3215 case_givens: vec![],
3216 kind: VerifyKind::Law(Box::new(VerifyLaw {
3217 name: "setHasKey".to_string(),
3218 givens: vec![
3219 VerifyGiven {
3220 name: "m".to_string(),
3221 type_name: "Map<String, Int>".to_string(),
3222 domain: VerifyGivenDomain::Explicit(vec![Expr::FnCall(
3223 Box::new(Expr::Attr(
3224 Box::new(Expr::Ident("Map".to_string())),
3225 "empty".to_string(),
3226 )),
3227 vec![],
3228 )]),
3229 },
3230 VerifyGiven {
3231 name: "k".to_string(),
3232 type_name: "String".to_string(),
3233 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Str(
3234 "a".to_string(),
3235 ))]),
3236 },
3237 VerifyGiven {
3238 name: "v".to_string(),
3239 type_name: "Int".to_string(),
3240 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(1))]),
3241 },
3242 ],
3243 when: None,
3244 lhs: map_has(
3245 map_set(
3246 Expr::Ident("m".to_string()),
3247 Expr::Ident("k".to_string()),
3248 Expr::Ident("v".to_string()),
3249 ),
3250 Expr::Ident("k".to_string()),
3251 ),
3252 rhs: Expr::Literal(Literal::Bool(true)),
3253 sample_guards: vec![],
3254 })),
3255 }));
3256
3257 ctx.items.push(TopLevel::Verify(VerifyBlock {
3258 fn_name: "map".to_string(),
3259 line: 2,
3260 cases: vec![(
3261 map_get(
3262 map_set(
3263 Expr::Ident("m".to_string()),
3264 Expr::Ident("k".to_string()),
3265 Expr::Ident("v".to_string()),
3266 ),
3267 Expr::Ident("k".to_string()),
3268 ),
3269 some(Expr::Ident("v".to_string())),
3270 )],
3271 case_spans: vec![],
3272 case_givens: vec![],
3273 kind: VerifyKind::Law(Box::new(VerifyLaw {
3274 name: "setGetKey".to_string(),
3275 givens: vec![
3276 VerifyGiven {
3277 name: "m".to_string(),
3278 type_name: "Map<String, Int>".to_string(),
3279 domain: VerifyGivenDomain::Explicit(vec![Expr::FnCall(
3280 Box::new(Expr::Attr(
3281 Box::new(Expr::Ident("Map".to_string())),
3282 "empty".to_string(),
3283 )),
3284 vec![],
3285 )]),
3286 },
3287 VerifyGiven {
3288 name: "k".to_string(),
3289 type_name: "String".to_string(),
3290 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Str(
3291 "a".to_string(),
3292 ))]),
3293 },
3294 VerifyGiven {
3295 name: "v".to_string(),
3296 type_name: "Int".to_string(),
3297 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(1))]),
3298 },
3299 ],
3300 when: None,
3301 lhs: map_get(
3302 map_set(
3303 Expr::Ident("m".to_string()),
3304 Expr::Ident("k".to_string()),
3305 Expr::Ident("v".to_string()),
3306 ),
3307 Expr::Ident("k".to_string()),
3308 ),
3309 rhs: some(Expr::Ident("v".to_string())),
3310 sample_guards: vec![],
3311 })),
3312 }));
3313
3314 let out = transpile(&ctx);
3315 let lean = out
3316 .files
3317 .iter()
3318 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3319 .expect("expected generated Lean file");
3320 assert!(lean.contains("simpa using AverMap.has_set_self m k v"));
3321 assert!(lean.contains("simpa using AverMap.get_set_self m k v"));
3322 }
3323
3324 #[test]
3325 fn transpile_auto_proves_direct_recursive_sum_law_by_structural_induction() {
3326 let ctx = ctx_from_source(
3327 r#"
3328module Mirror
3329 intent =
3330 "direct recursive sum induction probe"
3331
3332type Tree
3333 Leaf(Int)
3334 Node(Tree, Tree)
3335
3336fn mirror(t: Tree) -> Tree
3337 match t
3338 Tree.Leaf(v) -> Tree.Leaf(v)
3339 Tree.Node(left, right) -> Tree.Node(mirror(right), mirror(left))
3340
3341verify mirror law involutive
3342 given t: Tree = [Tree.Leaf(1), Tree.Node(Tree.Leaf(1), Tree.Leaf(2))]
3343 mirror(mirror(t)) => t
3344"#,
3345 "mirror",
3346 );
3347 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3348 let lean = generated_lean_file(&out);
3349
3350 assert!(
3351 lean.contains(
3352 "theorem mirror_law_involutive : ∀ (t : Tree), mirror (mirror t) = t := by"
3353 )
3354 );
3355 assert!(lean.contains(" induction t with"));
3356 assert!(lean.contains(" | leaf f0 => simp [mirror]"));
3357 assert!(lean.contains(" | node f0 f1 ih0 ih1 => simp_all [mirror]"));
3358 assert!(!lean.contains(
3359 "-- universal theorem mirror_law_involutive omitted: sampled law shape is not auto-proved yet"
3360 ));
3361 }
3362
3363 #[test]
3364 fn transpile_auto_proves_map_update_laws() {
3365 let mut ctx = empty_ctx();
3366
3367 let map_get = |m: Expr, k: Expr| {
3368 Expr::FnCall(
3369 Box::new(Expr::Attr(
3370 Box::new(Expr::Ident("Map".to_string())),
3371 "get".to_string(),
3372 )),
3373 vec![m, k],
3374 )
3375 };
3376 let map_set = |m: Expr, k: Expr, v: Expr| {
3377 Expr::FnCall(
3378 Box::new(Expr::Attr(
3379 Box::new(Expr::Ident("Map".to_string())),
3380 "set".to_string(),
3381 )),
3382 vec![m, k, v],
3383 )
3384 };
3385 let map_has = |m: Expr, k: Expr| {
3386 Expr::FnCall(
3387 Box::new(Expr::Attr(
3388 Box::new(Expr::Ident("Map".to_string())),
3389 "has".to_string(),
3390 )),
3391 vec![m, k],
3392 )
3393 };
3394 let option_some = |v: Expr| {
3395 Expr::FnCall(
3396 Box::new(Expr::Attr(
3397 Box::new(Expr::Ident("Option".to_string())),
3398 "Some".to_string(),
3399 )),
3400 vec![v],
3401 )
3402 };
3403 let option_with_default = |opt: Expr, def: Expr| {
3404 Expr::FnCall(
3405 Box::new(Expr::Attr(
3406 Box::new(Expr::Ident("Option".to_string())),
3407 "withDefault".to_string(),
3408 )),
3409 vec![opt, def],
3410 )
3411 };
3412
3413 let add_one = FnDef {
3414 name: "addOne".to_string(),
3415 line: 1,
3416 params: vec![("n".to_string(), "Int".to_string())],
3417 return_type: "Int".to_string(),
3418 effects: vec![],
3419 desc: None,
3420 body: Rc::new(FnBody::from_expr(Expr::BinOp(
3421 BinOp::Add,
3422 Box::new(Expr::Ident("n".to_string())),
3423 Box::new(Expr::Literal(Literal::Int(1))),
3424 ))),
3425 resolution: None,
3426 };
3427 ctx.fn_defs.push(add_one.clone());
3428 ctx.items.push(TopLevel::FnDef(add_one));
3429
3430 let inc_count = FnDef {
3431 name: "incCount".to_string(),
3432 line: 2,
3433 params: vec![
3434 ("counts".to_string(), "Map<String, Int>".to_string()),
3435 ("word".to_string(), "String".to_string()),
3436 ],
3437 return_type: "Map<String, Int>".to_string(),
3438 effects: vec![],
3439 desc: None,
3440 body: Rc::new(FnBody::Block(vec![
3441 Stmt::Binding(
3442 "current".to_string(),
3443 None,
3444 map_get(
3445 Expr::Ident("counts".to_string()),
3446 Expr::Ident("word".to_string()),
3447 ),
3448 ),
3449 Stmt::Expr(Expr::Match {
3450 subject: Box::new(Expr::Ident("current".to_string())),
3451 arms: vec![
3452 MatchArm {
3453 pattern: Pattern::Constructor(
3454 "Option.Some".to_string(),
3455 vec!["n".to_string()],
3456 ),
3457 body: Box::new(map_set(
3458 Expr::Ident("counts".to_string()),
3459 Expr::Ident("word".to_string()),
3460 Expr::BinOp(
3461 BinOp::Add,
3462 Box::new(Expr::Ident("n".to_string())),
3463 Box::new(Expr::Literal(Literal::Int(1))),
3464 ),
3465 )),
3466 },
3467 MatchArm {
3468 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
3469 body: Box::new(map_set(
3470 Expr::Ident("counts".to_string()),
3471 Expr::Ident("word".to_string()),
3472 Expr::Literal(Literal::Int(1)),
3473 )),
3474 },
3475 ],
3476 line: 2,
3477 }),
3478 ])),
3479 resolution: None,
3480 };
3481 ctx.fn_defs.push(inc_count.clone());
3482 ctx.items.push(TopLevel::FnDef(inc_count));
3483
3484 ctx.items.push(TopLevel::Verify(VerifyBlock {
3485 fn_name: "incCount".to_string(),
3486 line: 10,
3487 cases: vec![(
3488 map_has(
3489 Expr::FnCall(
3490 Box::new(Expr::Ident("incCount".to_string())),
3491 vec![
3492 Expr::Ident("counts".to_string()),
3493 Expr::Ident("word".to_string()),
3494 ],
3495 ),
3496 Expr::Ident("word".to_string()),
3497 ),
3498 Expr::Literal(Literal::Bool(true)),
3499 )],
3500 case_spans: vec![],
3501 case_givens: vec![],
3502 kind: VerifyKind::Law(Box::new(VerifyLaw {
3503 name: "keyPresent".to_string(),
3504 givens: vec![
3505 VerifyGiven {
3506 name: "counts".to_string(),
3507 type_name: "Map<String, Int>".to_string(),
3508 domain: VerifyGivenDomain::Explicit(vec![Expr::FnCall(
3509 Box::new(Expr::Attr(
3510 Box::new(Expr::Ident("Map".to_string())),
3511 "empty".to_string(),
3512 )),
3513 vec![],
3514 )]),
3515 },
3516 VerifyGiven {
3517 name: "word".to_string(),
3518 type_name: "String".to_string(),
3519 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Str(
3520 "a".to_string(),
3521 ))]),
3522 },
3523 ],
3524 when: None,
3525 lhs: map_has(
3526 Expr::FnCall(
3527 Box::new(Expr::Ident("incCount".to_string())),
3528 vec![
3529 Expr::Ident("counts".to_string()),
3530 Expr::Ident("word".to_string()),
3531 ],
3532 ),
3533 Expr::Ident("word".to_string()),
3534 ),
3535 rhs: Expr::Literal(Literal::Bool(true)),
3536 sample_guards: vec![],
3537 })),
3538 }));
3539
3540 ctx.items.push(TopLevel::Verify(VerifyBlock {
3541 fn_name: "incCount".to_string(),
3542 line: 20,
3543 cases: vec![(
3544 map_get(
3545 Expr::FnCall(
3546 Box::new(Expr::Ident("incCount".to_string())),
3547 vec![
3548 Expr::Ident("counts".to_string()),
3549 Expr::Literal(Literal::Str("a".to_string())),
3550 ],
3551 ),
3552 Expr::Literal(Literal::Str("a".to_string())),
3553 ),
3554 option_some(Expr::FnCall(
3555 Box::new(Expr::Ident("addOne".to_string())),
3556 vec![option_with_default(
3557 map_get(
3558 Expr::Ident("counts".to_string()),
3559 Expr::Literal(Literal::Str("a".to_string())),
3560 ),
3561 Expr::Literal(Literal::Int(0)),
3562 )],
3563 )),
3564 )],
3565 case_spans: vec![],
3566 case_givens: vec![],
3567 kind: VerifyKind::Law(Box::new(VerifyLaw {
3568 name: "existingKeyIncrements".to_string(),
3569 givens: vec![VerifyGiven {
3570 name: "counts".to_string(),
3571 type_name: "Map<String, Int>".to_string(),
3572 domain: VerifyGivenDomain::Explicit(vec![Expr::FnCall(
3573 Box::new(Expr::Attr(
3574 Box::new(Expr::Ident("Map".to_string())),
3575 "empty".to_string(),
3576 )),
3577 vec![],
3578 )]),
3579 }],
3580 when: None,
3581 lhs: map_get(
3582 Expr::FnCall(
3583 Box::new(Expr::Ident("incCount".to_string())),
3584 vec![
3585 Expr::Ident("counts".to_string()),
3586 Expr::Literal(Literal::Str("a".to_string())),
3587 ],
3588 ),
3589 Expr::Literal(Literal::Str("a".to_string())),
3590 ),
3591 rhs: option_some(Expr::FnCall(
3592 Box::new(Expr::Ident("addOne".to_string())),
3593 vec![option_with_default(
3594 map_get(
3595 Expr::Ident("counts".to_string()),
3596 Expr::Literal(Literal::Str("a".to_string())),
3597 ),
3598 Expr::Literal(Literal::Int(0)),
3599 )],
3600 )),
3601 sample_guards: vec![],
3602 })),
3603 }));
3604
3605 let out = transpile(&ctx);
3606 let lean = out
3607 .files
3608 .iter()
3609 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3610 .expect("expected generated Lean file");
3611 assert!(
3612 lean.contains("cases h : AverMap.get counts word <;> simp [AverMap.has_set_self]"),
3613 "expected keyPresent auto-proof with has_set_self"
3614 );
3615 assert!(
3616 lean.contains("cases h : AverMap.get counts \"a\" <;> simp [AverMap.get_set_self, addOne, incCount]"),
3617 "expected existingKeyIncrements auto-proof with get_set_self"
3618 );
3619 }
3620
3621 #[test]
3622 fn transpile_parenthesizes_negative_int_call_args_in_law_samples() {
3623 let mut ctx = empty_ctx();
3624 let add = FnDef {
3625 name: "add".to_string(),
3626 line: 1,
3627 params: vec![
3628 ("a".to_string(), "Int".to_string()),
3629 ("b".to_string(), "Int".to_string()),
3630 ],
3631 return_type: "Int".to_string(),
3632 effects: vec![],
3633 desc: None,
3634 body: Rc::new(FnBody::from_expr(Expr::BinOp(
3635 BinOp::Add,
3636 Box::new(Expr::Ident("a".to_string())),
3637 Box::new(Expr::Ident("b".to_string())),
3638 ))),
3639 resolution: None,
3640 };
3641 ctx.fn_defs.push(add.clone());
3642 ctx.items.push(TopLevel::FnDef(add));
3643 ctx.items.push(TopLevel::Verify(VerifyBlock {
3644 fn_name: "add".to_string(),
3645 line: 1,
3646 cases: vec![(
3647 Expr::FnCall(
3648 Box::new(Expr::Ident("add".to_string())),
3649 vec![
3650 Expr::Literal(Literal::Int(-2)),
3651 Expr::Literal(Literal::Int(-1)),
3652 ],
3653 ),
3654 Expr::FnCall(
3655 Box::new(Expr::Ident("add".to_string())),
3656 vec![
3657 Expr::Literal(Literal::Int(-1)),
3658 Expr::Literal(Literal::Int(-2)),
3659 ],
3660 ),
3661 )],
3662 case_spans: vec![],
3663 case_givens: vec![],
3664 kind: VerifyKind::Law(Box::new(VerifyLaw {
3665 name: "commutative".to_string(),
3666 givens: vec![
3667 VerifyGiven {
3668 name: "a".to_string(),
3669 type_name: "Int".to_string(),
3670 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(-2))]),
3671 },
3672 VerifyGiven {
3673 name: "b".to_string(),
3674 type_name: "Int".to_string(),
3675 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(-1))]),
3676 },
3677 ],
3678 when: None,
3679 lhs: Expr::FnCall(
3680 Box::new(Expr::Ident("add".to_string())),
3681 vec![Expr::Ident("a".to_string()), Expr::Ident("b".to_string())],
3682 ),
3683 rhs: Expr::FnCall(
3684 Box::new(Expr::Ident("add".to_string())),
3685 vec![Expr::Ident("b".to_string()), Expr::Ident("a".to_string())],
3686 ),
3687 sample_guards: vec![],
3688 })),
3689 }));
3690
3691 let out = transpile(&ctx);
3692 let lean = out
3693 .files
3694 .iter()
3695 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3696 .expect("expected generated Lean file");
3697 assert!(lean.contains(
3698 "theorem add_law_commutative_sample_1 : add (-2) (-1) = add (-1) (-2) := by native_decide"
3699 ));
3700 }
3701
3702 #[test]
3703 fn verify_law_numbering_is_scoped_per_law_name() {
3704 let mut ctx = empty_ctx();
3705 let f = FnDef {
3706 name: "f".to_string(),
3707 line: 1,
3708 params: vec![("x".to_string(), "Int".to_string())],
3709 return_type: "Int".to_string(),
3710 effects: vec![],
3711 desc: None,
3712 body: Rc::new(FnBody::from_expr(Expr::Ident("x".to_string()))),
3713 resolution: None,
3714 };
3715 ctx.fn_defs.push(f.clone());
3716 ctx.items.push(TopLevel::FnDef(f));
3717 ctx.items.push(TopLevel::Verify(VerifyBlock {
3718 fn_name: "f".to_string(),
3719 line: 1,
3720 cases: vec![(
3721 Expr::Literal(Literal::Int(1)),
3722 Expr::Literal(Literal::Int(1)),
3723 )],
3724 case_spans: vec![],
3725 case_givens: vec![],
3726 kind: VerifyKind::Cases,
3727 }));
3728 ctx.items.push(TopLevel::Verify(VerifyBlock {
3729 fn_name: "f".to_string(),
3730 line: 2,
3731 cases: vec![(
3732 Expr::Literal(Literal::Int(2)),
3733 Expr::Literal(Literal::Int(2)),
3734 )],
3735 case_spans: vec![],
3736 case_givens: vec![],
3737 kind: VerifyKind::Law(Box::new(VerifyLaw {
3738 name: "identity".to_string(),
3739 givens: vec![VerifyGiven {
3740 name: "x".to_string(),
3741 type_name: "Int".to_string(),
3742 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
3743 }],
3744 when: None,
3745 lhs: Expr::Ident("x".to_string()),
3746 rhs: Expr::Ident("x".to_string()),
3747 sample_guards: vec![],
3748 })),
3749 }));
3750 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
3751 let lean = out
3752 .files
3753 .iter()
3754 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3755 .expect("expected generated Lean file");
3756 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
3757 assert!(lean.contains("theorem f_law_identity : ∀ (x : Int), x = x := by"));
3758 assert!(lean.contains("theorem f_law_identity_sample_1 : 2 = 2 := by"));
3759 assert!(!lean.contains("theorem f_law_identity_sample_2 : 2 = 2 := by"));
3760 }
3761
3762 #[test]
3763 fn proof_mode_accepts_single_int_countdown_recursion() {
3764 let mut ctx = empty_ctx();
3765 let down = FnDef {
3766 name: "down".to_string(),
3767 line: 1,
3768 params: vec![("n".to_string(), "Int".to_string())],
3769 return_type: "Int".to_string(),
3770 effects: vec![],
3771 desc: None,
3772 body: Rc::new(FnBody::from_expr(Expr::Match {
3773 subject: Box::new(Expr::Ident("n".to_string())),
3774 arms: vec![
3775 MatchArm {
3776 pattern: Pattern::Literal(Literal::Int(0)),
3777 body: Box::new(Expr::Literal(Literal::Int(0))),
3778 },
3779 MatchArm {
3780 pattern: Pattern::Wildcard,
3781 body: Box::new(Expr::TailCall(Box::new((
3782 "down".to_string(),
3783 vec![Expr::BinOp(
3784 BinOp::Sub,
3785 Box::new(Expr::Ident("n".to_string())),
3786 Box::new(Expr::Literal(Literal::Int(1))),
3787 )],
3788 )))),
3789 },
3790 ],
3791 line: 1,
3792 })),
3793 resolution: None,
3794 };
3795 ctx.items.push(TopLevel::FnDef(down.clone()));
3796 ctx.fn_defs.push(down);
3797
3798 let issues = proof_mode_issues(&ctx);
3799 assert!(
3800 issues.is_empty(),
3801 "expected Int countdown recursion to be accepted, got: {:?}",
3802 issues
3803 );
3804
3805 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3806 let lean = out
3807 .files
3808 .iter()
3809 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3810 .expect("expected generated Lean file");
3811 assert!(lean.contains("def down__fuel"));
3812 assert!(lean.contains("def down (n : Int) : Int :="));
3813 assert!(lean.contains("down__fuel ((Int.natAbs n) + 1) n"));
3814 }
3815
3816 #[test]
3817 fn proof_mode_accepts_single_int_countdown_on_nonfirst_param() {
3818 let mut ctx = empty_ctx();
3819 let repeat_like = FnDef {
3820 name: "repeatLike".to_string(),
3821 line: 1,
3822 params: vec![
3823 ("char".to_string(), "String".to_string()),
3824 ("n".to_string(), "Int".to_string()),
3825 ],
3826 return_type: "List<String>".to_string(),
3827 effects: vec![],
3828 desc: None,
3829 body: Rc::new(FnBody::from_expr(Expr::Match {
3830 subject: Box::new(Expr::BinOp(
3831 BinOp::Lte,
3832 Box::new(Expr::Ident("n".to_string())),
3833 Box::new(Expr::Literal(Literal::Int(0))),
3834 )),
3835 arms: vec![
3836 MatchArm {
3837 pattern: Pattern::Literal(Literal::Bool(true)),
3838 body: Box::new(Expr::List(vec![])),
3839 },
3840 MatchArm {
3841 pattern: Pattern::Literal(Literal::Bool(false)),
3842 body: Box::new(Expr::TailCall(Box::new((
3843 "repeatLike".to_string(),
3844 vec![
3845 Expr::Ident("char".to_string()),
3846 Expr::BinOp(
3847 BinOp::Sub,
3848 Box::new(Expr::Ident("n".to_string())),
3849 Box::new(Expr::Literal(Literal::Int(1))),
3850 ),
3851 ],
3852 )))),
3853 },
3854 ],
3855 line: 1,
3856 })),
3857 resolution: None,
3858 };
3859 ctx.items.push(TopLevel::FnDef(repeat_like.clone()));
3860 ctx.fn_defs.push(repeat_like);
3861
3862 let issues = proof_mode_issues(&ctx);
3863 assert!(
3864 issues.is_empty(),
3865 "expected non-first Int countdown recursion to be accepted, got: {:?}",
3866 issues
3867 );
3868
3869 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3870 let lean = out
3871 .files
3872 .iter()
3873 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3874 .expect("expected generated Lean file");
3875 assert!(lean.contains("def repeatLike__fuel"));
3876 assert!(lean.contains("def repeatLike (char : String) (n : Int) : List String :="));
3877 assert!(lean.contains("repeatLike__fuel ((Int.natAbs n) + 1) char n"));
3878 }
3879
3880 #[test]
3881 fn proof_mode_accepts_negative_guarded_int_ascent() {
3882 let mut ctx = empty_ctx();
3883 let normalize = FnDef {
3884 name: "normalize".to_string(),
3885 line: 1,
3886 params: vec![("angle".to_string(), "Int".to_string())],
3887 return_type: "Int".to_string(),
3888 effects: vec![],
3889 desc: None,
3890 body: Rc::new(FnBody::from_expr(Expr::Match {
3891 subject: Box::new(Expr::BinOp(
3892 BinOp::Lt,
3893 Box::new(Expr::Ident("angle".to_string())),
3894 Box::new(Expr::Literal(Literal::Int(0))),
3895 )),
3896 arms: vec![
3897 MatchArm {
3898 pattern: Pattern::Literal(Literal::Bool(true)),
3899 body: Box::new(Expr::TailCall(Box::new((
3900 "normalize".to_string(),
3901 vec![Expr::BinOp(
3902 BinOp::Add,
3903 Box::new(Expr::Ident("angle".to_string())),
3904 Box::new(Expr::Literal(Literal::Int(360))),
3905 )],
3906 )))),
3907 },
3908 MatchArm {
3909 pattern: Pattern::Literal(Literal::Bool(false)),
3910 body: Box::new(Expr::Ident("angle".to_string())),
3911 },
3912 ],
3913 line: 1,
3914 })),
3915 resolution: None,
3916 };
3917 ctx.items.push(TopLevel::FnDef(normalize.clone()));
3918 ctx.fn_defs.push(normalize);
3919
3920 let issues = proof_mode_issues(&ctx);
3921 assert!(
3922 issues.is_empty(),
3923 "expected negative-guarded Int ascent recursion to be accepted, got: {:?}",
3924 issues
3925 );
3926
3927 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3928 let lean = out
3929 .files
3930 .iter()
3931 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3932 .expect("expected generated Lean file");
3933 assert!(lean.contains("def normalize__fuel"));
3934 assert!(lean.contains("normalize__fuel ((Int.natAbs angle) + 1) angle"));
3935 }
3936
3937 #[test]
3938 fn proof_mode_accepts_single_list_structural_recursion() {
3939 let mut ctx = empty_ctx();
3940 let len = FnDef {
3941 name: "len".to_string(),
3942 line: 1,
3943 params: vec![("xs".to_string(), "List<Int>".to_string())],
3944 return_type: "Int".to_string(),
3945 effects: vec![],
3946 desc: None,
3947 body: Rc::new(FnBody::from_expr(Expr::Match {
3948 subject: Box::new(Expr::Ident("xs".to_string())),
3949 arms: vec![
3950 MatchArm {
3951 pattern: Pattern::EmptyList,
3952 body: Box::new(Expr::Literal(Literal::Int(0))),
3953 },
3954 MatchArm {
3955 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3956 body: Box::new(Expr::TailCall(Box::new((
3957 "len".to_string(),
3958 vec![Expr::Ident("t".to_string())],
3959 )))),
3960 },
3961 ],
3962 line: 1,
3963 })),
3964 resolution: None,
3965 };
3966 ctx.items.push(TopLevel::FnDef(len.clone()));
3967 ctx.fn_defs.push(len);
3968
3969 let issues = proof_mode_issues(&ctx);
3970 assert!(
3971 issues.is_empty(),
3972 "expected List structural recursion to be accepted, got: {:?}",
3973 issues
3974 );
3975 }
3976
3977 #[test]
3978 fn proof_mode_accepts_single_list_structural_recursion_on_nonfirst_param() {
3979 let mut ctx = empty_ctx();
3980 let len_from = FnDef {
3981 name: "lenFrom".to_string(),
3982 line: 1,
3983 params: vec![
3984 ("count".to_string(), "Int".to_string()),
3985 ("xs".to_string(), "List<Int>".to_string()),
3986 ],
3987 return_type: "Int".to_string(),
3988 effects: vec![],
3989 desc: None,
3990 body: Rc::new(FnBody::from_expr(Expr::Match {
3991 subject: Box::new(Expr::Ident("xs".to_string())),
3992 arms: vec![
3993 MatchArm {
3994 pattern: Pattern::EmptyList,
3995 body: Box::new(Expr::Ident("count".to_string())),
3996 },
3997 MatchArm {
3998 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3999 body: Box::new(Expr::TailCall(Box::new((
4000 "lenFrom".to_string(),
4001 vec![
4002 Expr::BinOp(
4003 BinOp::Add,
4004 Box::new(Expr::Ident("count".to_string())),
4005 Box::new(Expr::Literal(Literal::Int(1))),
4006 ),
4007 Expr::Ident("t".to_string()),
4008 ],
4009 )))),
4010 },
4011 ],
4012 line: 1,
4013 })),
4014 resolution: None,
4015 };
4016 ctx.items.push(TopLevel::FnDef(len_from.clone()));
4017 ctx.fn_defs.push(len_from);
4018
4019 let issues = proof_mode_issues(&ctx);
4020 assert!(
4021 issues.is_empty(),
4022 "expected non-first List structural recursion to be accepted, got: {:?}",
4023 issues
4024 );
4025
4026 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4027 let lean = generated_lean_file(&out);
4028 assert!(lean.contains("termination_by xs.length"));
4029 assert!(!lean.contains("partial def lenFrom"));
4030 }
4031
4032 #[test]
4033 fn proof_mode_accepts_single_string_pos_advance_recursion() {
4034 let mut ctx = empty_ctx();
4035 let skip_ws = FnDef {
4036 name: "skipWs".to_string(),
4037 line: 1,
4038 params: vec![
4039 ("s".to_string(), "String".to_string()),
4040 ("pos".to_string(), "Int".to_string()),
4041 ],
4042 return_type: "Int".to_string(),
4043 effects: vec![],
4044 desc: None,
4045 body: Rc::new(FnBody::from_expr(Expr::Match {
4046 subject: Box::new(Expr::FnCall(
4047 Box::new(Expr::Attr(
4048 Box::new(Expr::Ident("String".to_string())),
4049 "charAt".to_string(),
4050 )),
4051 vec![Expr::Ident("s".to_string()), Expr::Ident("pos".to_string())],
4052 )),
4053 arms: vec![
4054 MatchArm {
4055 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
4056 body: Box::new(Expr::Ident("pos".to_string())),
4057 },
4058 MatchArm {
4059 pattern: Pattern::Wildcard,
4060 body: Box::new(Expr::TailCall(Box::new((
4061 "skipWs".to_string(),
4062 vec![
4063 Expr::Ident("s".to_string()),
4064 Expr::BinOp(
4065 BinOp::Add,
4066 Box::new(Expr::Ident("pos".to_string())),
4067 Box::new(Expr::Literal(Literal::Int(1))),
4068 ),
4069 ],
4070 )))),
4071 },
4072 ],
4073 line: 1,
4074 })),
4075 resolution: None,
4076 };
4077 ctx.items.push(TopLevel::FnDef(skip_ws.clone()));
4078 ctx.fn_defs.push(skip_ws);
4079
4080 let issues = proof_mode_issues(&ctx);
4081 assert!(
4082 issues.is_empty(),
4083 "expected String+pos recursion to be accepted, got: {:?}",
4084 issues
4085 );
4086
4087 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4088 let lean = generated_lean_file(&out);
4089 assert!(lean.contains("def skipWs__fuel"));
4090 assert!(!lean.contains("partial def skipWs"));
4091 }
4092
4093 #[test]
4094 fn proof_mode_accepts_mutual_int_countdown_recursion() {
4095 let mut ctx = empty_ctx();
4096 let even = FnDef {
4097 name: "even".to_string(),
4098 line: 1,
4099 params: vec![("n".to_string(), "Int".to_string())],
4100 return_type: "Bool".to_string(),
4101 effects: vec![],
4102 desc: None,
4103 body: Rc::new(FnBody::from_expr(Expr::Match {
4104 subject: Box::new(Expr::Ident("n".to_string())),
4105 arms: vec![
4106 MatchArm {
4107 pattern: Pattern::Literal(Literal::Int(0)),
4108 body: Box::new(Expr::Literal(Literal::Bool(true))),
4109 },
4110 MatchArm {
4111 pattern: Pattern::Wildcard,
4112 body: Box::new(Expr::TailCall(Box::new((
4113 "odd".to_string(),
4114 vec![Expr::BinOp(
4115 BinOp::Sub,
4116 Box::new(Expr::Ident("n".to_string())),
4117 Box::new(Expr::Literal(Literal::Int(1))),
4118 )],
4119 )))),
4120 },
4121 ],
4122 line: 1,
4123 })),
4124 resolution: None,
4125 };
4126 let odd = FnDef {
4127 name: "odd".to_string(),
4128 line: 2,
4129 params: vec![("n".to_string(), "Int".to_string())],
4130 return_type: "Bool".to_string(),
4131 effects: vec![],
4132 desc: None,
4133 body: Rc::new(FnBody::from_expr(Expr::Match {
4134 subject: Box::new(Expr::Ident("n".to_string())),
4135 arms: vec![
4136 MatchArm {
4137 pattern: Pattern::Literal(Literal::Int(0)),
4138 body: Box::new(Expr::Literal(Literal::Bool(false))),
4139 },
4140 MatchArm {
4141 pattern: Pattern::Wildcard,
4142 body: Box::new(Expr::TailCall(Box::new((
4143 "even".to_string(),
4144 vec![Expr::BinOp(
4145 BinOp::Sub,
4146 Box::new(Expr::Ident("n".to_string())),
4147 Box::new(Expr::Literal(Literal::Int(1))),
4148 )],
4149 )))),
4150 },
4151 ],
4152 line: 2,
4153 })),
4154 resolution: None,
4155 };
4156 ctx.items.push(TopLevel::FnDef(even.clone()));
4157 ctx.items.push(TopLevel::FnDef(odd.clone()));
4158 ctx.fn_defs.push(even);
4159 ctx.fn_defs.push(odd);
4160
4161 let issues = proof_mode_issues(&ctx);
4162 assert!(
4163 issues.is_empty(),
4164 "expected mutual Int countdown recursion to be accepted, got: {:?}",
4165 issues
4166 );
4167
4168 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4169 let lean = generated_lean_file(&out);
4170 assert!(lean.contains("def even__fuel"));
4171 assert!(lean.contains("def odd__fuel"));
4172 assert!(lean.contains("def even (n : Int) : Bool :="));
4173 assert!(lean.contains("even__fuel ((Int.natAbs n) + 1) n"));
4174 }
4175
4176 #[test]
4177 fn proof_mode_accepts_mutual_string_pos_recursion_with_ranked_same_edges() {
4178 let mut ctx = empty_ctx();
4179 let f = FnDef {
4180 name: "f".to_string(),
4181 line: 1,
4182 params: vec![
4183 ("s".to_string(), "String".to_string()),
4184 ("pos".to_string(), "Int".to_string()),
4185 ],
4186 return_type: "Int".to_string(),
4187 effects: vec![],
4188 desc: None,
4189 body: Rc::new(FnBody::from_expr(Expr::Match {
4190 subject: Box::new(Expr::BinOp(
4191 BinOp::Gte,
4192 Box::new(Expr::Ident("pos".to_string())),
4193 Box::new(Expr::Literal(Literal::Int(3))),
4194 )),
4195 arms: vec![
4196 MatchArm {
4197 pattern: Pattern::Literal(Literal::Bool(true)),
4198 body: Box::new(Expr::Ident("pos".to_string())),
4199 },
4200 MatchArm {
4201 pattern: Pattern::Wildcard,
4202 body: Box::new(Expr::TailCall(Box::new((
4203 "g".to_string(),
4204 vec![Expr::Ident("s".to_string()), Expr::Ident("pos".to_string())],
4205 )))),
4206 },
4207 ],
4208 line: 1,
4209 })),
4210 resolution: None,
4211 };
4212 let g = FnDef {
4213 name: "g".to_string(),
4214 line: 2,
4215 params: vec![
4216 ("s".to_string(), "String".to_string()),
4217 ("pos".to_string(), "Int".to_string()),
4218 ],
4219 return_type: "Int".to_string(),
4220 effects: vec![],
4221 desc: None,
4222 body: Rc::new(FnBody::from_expr(Expr::Match {
4223 subject: Box::new(Expr::BinOp(
4224 BinOp::Gte,
4225 Box::new(Expr::Ident("pos".to_string())),
4226 Box::new(Expr::Literal(Literal::Int(3))),
4227 )),
4228 arms: vec![
4229 MatchArm {
4230 pattern: Pattern::Literal(Literal::Bool(true)),
4231 body: Box::new(Expr::Ident("pos".to_string())),
4232 },
4233 MatchArm {
4234 pattern: Pattern::Wildcard,
4235 body: Box::new(Expr::TailCall(Box::new((
4236 "f".to_string(),
4237 vec![
4238 Expr::Ident("s".to_string()),
4239 Expr::BinOp(
4240 BinOp::Add,
4241 Box::new(Expr::Ident("pos".to_string())),
4242 Box::new(Expr::Literal(Literal::Int(1))),
4243 ),
4244 ],
4245 )))),
4246 },
4247 ],
4248 line: 2,
4249 })),
4250 resolution: None,
4251 };
4252 ctx.items.push(TopLevel::FnDef(f.clone()));
4253 ctx.items.push(TopLevel::FnDef(g.clone()));
4254 ctx.fn_defs.push(f);
4255 ctx.fn_defs.push(g);
4256
4257 let issues = proof_mode_issues(&ctx);
4258 assert!(
4259 issues.is_empty(),
4260 "expected mutual String+pos recursion to be accepted, got: {:?}",
4261 issues
4262 );
4263
4264 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4265 let lean = generated_lean_file(&out);
4266 assert!(lean.contains("def f__fuel"));
4267 assert!(lean.contains("def g__fuel"));
4268 assert!(!lean.contains("partial def f"));
4269 }
4270
4271 #[test]
4272 fn proof_mode_accepts_mutual_ranked_sizeof_recursion() {
4273 let mut ctx = empty_ctx();
4274 let f = FnDef {
4275 name: "f".to_string(),
4276 line: 1,
4277 params: vec![("xs".to_string(), "List<Int>".to_string())],
4278 return_type: "Int".to_string(),
4279 effects: vec![],
4280 desc: None,
4281 body: Rc::new(FnBody::from_expr(Expr::TailCall(Box::new((
4282 "g".to_string(),
4283 vec![
4284 Expr::Literal(Literal::Str("acc".to_string())),
4285 Expr::Ident("xs".to_string()),
4286 ],
4287 ))))),
4288 resolution: None,
4289 };
4290 let g = FnDef {
4291 name: "g".to_string(),
4292 line: 2,
4293 params: vec![
4294 ("acc".to_string(), "String".to_string()),
4295 ("xs".to_string(), "List<Int>".to_string()),
4296 ],
4297 return_type: "Int".to_string(),
4298 effects: vec![],
4299 desc: None,
4300 body: Rc::new(FnBody::from_expr(Expr::Match {
4301 subject: Box::new(Expr::Ident("xs".to_string())),
4302 arms: vec![
4303 MatchArm {
4304 pattern: Pattern::EmptyList,
4305 body: Box::new(Expr::Literal(Literal::Int(0))),
4306 },
4307 MatchArm {
4308 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
4309 body: Box::new(Expr::TailCall(Box::new((
4310 "f".to_string(),
4311 vec![Expr::Ident("t".to_string())],
4312 )))),
4313 },
4314 ],
4315 line: 2,
4316 })),
4317 resolution: None,
4318 };
4319 ctx.items.push(TopLevel::FnDef(f.clone()));
4320 ctx.items.push(TopLevel::FnDef(g.clone()));
4321 ctx.fn_defs.push(f);
4322 ctx.fn_defs.push(g);
4323
4324 let issues = proof_mode_issues(&ctx);
4325 assert!(
4326 issues.is_empty(),
4327 "expected mutual ranked-sizeOf recursion to be accepted, got: {:?}",
4328 issues
4329 );
4330
4331 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4332 let lean = generated_lean_file(&out);
4333 assert!(lean.contains("mutual"));
4334 assert!(lean.contains("def f__fuel"));
4335 assert!(lean.contains("def g__fuel"));
4336 assert!(!lean.contains("partial def f"));
4337 assert!(!lean.contains("partial def g"));
4338 }
4339
4340 #[test]
4341 fn proof_mode_rejects_recursive_pure_functions() {
4342 let mut ctx = empty_ctx();
4343 let recursive_fn = FnDef {
4344 name: "loop".to_string(),
4345 line: 1,
4346 params: vec![("n".to_string(), "Int".to_string())],
4347 return_type: "Int".to_string(),
4348 effects: vec![],
4349 desc: None,
4350 body: Rc::new(FnBody::from_expr(Expr::FnCall(
4351 Box::new(Expr::Ident("loop".to_string())),
4352 vec![Expr::Ident("n".to_string())],
4353 ))),
4354 resolution: None,
4355 };
4356 ctx.items.push(TopLevel::FnDef(recursive_fn.clone()));
4357 ctx.fn_defs.push(recursive_fn);
4358
4359 let issues = proof_mode_issues(&ctx);
4360 assert!(
4361 issues.iter().any(|i| i.contains("outside proof subset")),
4362 "expected recursive function blocker, got: {:?}",
4363 issues
4364 );
4365 }
4366
4367 #[test]
4368 fn proof_mode_allows_recursive_types() {
4369 let mut ctx = empty_ctx();
4370 let recursive_type = TypeDef::Sum {
4371 name: "Node".to_string(),
4372 variants: vec![TypeVariant {
4373 name: "Cons".to_string(),
4374 fields: vec!["Node".to_string()],
4375 }],
4376 line: 1,
4377 };
4378 ctx.items.push(TopLevel::TypeDef(recursive_type.clone()));
4379 ctx.type_defs.push(recursive_type);
4380
4381 let issues = proof_mode_issues(&ctx);
4382 assert!(
4383 issues
4384 .iter()
4385 .all(|i| !i.contains("recursive types require unsafe DecidableEq shim")),
4386 "did not expect recursive type blocker, got: {:?}",
4387 issues
4388 );
4389 }
4390
4391 #[test]
4392 fn law_auto_example_exports_real_proof_artifacts() {
4393 let ctx = ctx_from_source(
4394 include_str!("../../../examples/formal/law_auto.av"),
4395 "law_auto",
4396 );
4397 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4398 let lean = generated_lean_file(&out);
4399
4400 assert!(lean.contains("theorem add_law_commutative :"));
4401 assert!(lean.contains("theorem id'_law_reflexive : ∀ (x : Int), x = x := by"));
4402 assert!(lean.contains("theorem incCount_law_keyPresent :"));
4403 assert!(lean.contains("AverMap.has_set_self"));
4404 assert!(lean.contains("theorem add_law_commutative_sample_1 :"));
4405 assert!(lean.contains(":= by native_decide"));
4406 }
4407
4408 #[test]
4409 fn json_example_stays_inside_proof_subset() {
4410 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
4411 let issues = proof_mode_issues(&ctx);
4412 assert!(
4413 issues.is_empty(),
4414 "expected json example to stay inside proof subset, got: {:?}",
4415 issues
4416 );
4417 }
4418
4419 #[test]
4420 fn json_example_uses_total_defs_and_domain_guarded_laws_in_proof_mode() {
4421 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
4422 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4423 let lean = generated_lean_file(&out);
4424
4425 assert!(!lean.contains("partial def"));
4426 assert!(lean.contains("def skipWs__fuel"));
4427 assert!(lean.contains("def parseValue__fuel"));
4428 assert!(lean.contains("def toString' (j : Json) : String :="));
4429 assert!(
4430 lean.contains(
4431 "def averMeasureJsonEntries_String (items : List (String × Json)) : Nat :="
4432 )
4433 );
4434 assert!(lean.contains(
4435 "| .jsonObject x0 => (averMeasureJsonEntries_String (AverMap.entries x0)) + 1"
4436 ));
4437 assert!(lean.contains("-- when jsonRoundtripSafe j"));
4438 assert!(!lean.contains("-- hint: verify law '"));
4439 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
4440 assert!(
4441 lean.contains(
4442 "theorem toString'_law_parseRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
4443 )
4444 );
4445 assert!(lean.contains(
4446 "jsonRoundtripSafe j = true -> fromString (toString' j) = Except.ok j := by"
4447 ));
4448 assert!(
4449 lean.contains("theorem finishFloat_law_fromCanonicalFloat : ∀ (f : Float), f = 3.5 ∨")
4450 );
4451 assert!(lean.contains("theorem finishInt_law_fromCanonicalInt_checked_domain :"));
4452 assert!(lean.contains(
4453 "theorem toString'_law_parseValueRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
4454 ));
4455 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
4456 assert!(lean.contains(
4457 "example : fromString \"null\" = Except.ok Json.jsonNull := by native_decide"
4458 ));
4459 }
4460
4461 #[test]
4462 fn transpile_injects_builtin_network_types_and_vector_get_support() {
4463 let ctx = ctx_from_source(
4464 r#"
4465fn firstOrMissing(xs: Vector<String>) -> Result<String, String>
4466 Option.toResult(Vector.get(xs, 0), "missing")
4467
4468fn defaultHeader() -> Header
4469 Header(name = "Content-Type", value = "application/json")
4470
4471fn mkResponse(body: String) -> HttpResponse
4472 HttpResponse(status = 200, body = body, headers = [defaultHeader()])
4473
4474fn requestPath(req: HttpRequest) -> String
4475 req.path
4476
4477fn connPort(conn: Tcp.Connection) -> Int
4478 conn.port
4479"#,
4480 "network_helpers",
4481 );
4482 let out = transpile(&ctx);
4483 let lean = generated_lean_file(&out);
4484
4485 assert!(lean.contains("structure Header where"));
4486 assert!(lean.contains("structure HttpResponse where"));
4487 assert!(lean.contains("structure HttpRequest where"));
4488 assert!(lean.contains("structure Tcp_Connection where"));
4489 assert!(lean.contains("port : Int"));
4490 }
4491
4492 #[test]
4493 fn law_auto_example_has_no_sorry_in_proof_mode() {
4494 let ctx = ctx_from_source(
4495 include_str!("../../../examples/formal/law_auto.av"),
4496 "law_auto",
4497 );
4498 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4499 let lean = generated_lean_file(&out);
4500 assert!(
4501 !lean.contains("sorry"),
4502 "expected law_auto proof export to avoid sorry, got:\n{}",
4503 lean
4504 );
4505 }
4506
4507 #[test]
4508 fn map_example_has_no_sorry_in_proof_mode() {
4509 let ctx = ctx_from_source(include_str!("../../../examples/data/map.av"), "map");
4510 let issues = proof_mode_issues(&ctx);
4511 assert!(
4512 issues.is_empty(),
4513 "expected map example to stay inside proof subset, got: {:?}",
4514 issues
4515 );
4516
4517 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4518 let lean = generated_lean_file(&out);
4519 assert!(lean.contains("theorem incCount_law_trackedCountStepsByOne :"));
4521 assert!(lean.contains("sorry"));
4522 assert!(lean.contains("theorem countWords_law_presenceMatchesContains_sample_1 :"));
4524 assert!(lean.contains("theorem countWords_law_trackedWordCount_sample_1 :"));
4525 assert!(lean.contains("AverMap.has_set_self"));
4526 assert!(lean.contains("AverMap.get_set_self"));
4527 }
4528
4529 #[test]
4530 fn spec_laws_example_has_no_sorry_in_proof_mode() {
4531 let ctx = ctx_from_source(
4532 include_str!("../../../examples/formal/spec_laws.av"),
4533 "spec_laws",
4534 );
4535 let issues = proof_mode_issues(&ctx);
4536 assert!(
4537 issues.is_empty(),
4538 "expected spec_laws example to stay inside proof subset, got: {:?}",
4539 issues
4540 );
4541
4542 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4543 let lean = generated_lean_file(&out);
4544 assert!(
4545 !lean.contains("sorry"),
4546 "expected spec_laws proof export to avoid sorry, got:\n{}",
4547 lean
4548 );
4549 assert!(lean.contains("theorem absVal_eq_absValSpec :"));
4550 assert!(lean.contains("theorem clampNonNegative_eq_clampNonNegativeSpec :"));
4551 }
4552
4553 #[test]
4554 fn rle_example_exports_sampled_roundtrip_laws_without_sorry() {
4555 let ctx = ctx_from_source(include_str!("../../../examples/data/rle.av"), "rle");
4556 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4557 let lean = generated_lean_file(&out);
4558
4559 assert!(
4560 lean.contains("sorry"),
4561 "expected rle proof export to contain sorry for unproved universal theorems"
4562 );
4563 assert!(lean.contains(
4564 "theorem encode_law_roundtrip_sample_1 : decode (encode []) = [] := by native_decide"
4565 ));
4566 assert!(lean.contains(
4567 "theorem encodeString_law_string_roundtrip_sample_1 : decodeString (encodeString \"\") = \"\" := by native_decide"
4568 ));
4569 }
4570
4571 #[test]
4572 fn fibonacci_example_uses_fuelized_int_countdown_in_proof_mode() {
4573 let ctx = ctx_from_source(
4574 include_str!("../../../examples/data/fibonacci.av"),
4575 "fibonacci",
4576 );
4577 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4578 let lean = generated_lean_file(&out);
4579
4580 assert!(lean.contains("def fibTR__fuel"));
4581 assert!(lean.contains("def fibTR (n : Int) (a : Int) (b : Int) : Int :="));
4582 assert!(lean.contains("fibTR__fuel ((Int.natAbs n) + 1) n a b"));
4583 assert!(!lean.contains("partial def fibTR"));
4584 }
4585
4586 #[test]
4587 fn fibonacci_example_stays_inside_proof_subset() {
4588 let ctx = ctx_from_source(
4589 include_str!("../../../examples/data/fibonacci.av"),
4590 "fibonacci",
4591 );
4592 let issues = proof_mode_issues(&ctx);
4593 assert!(
4594 issues.is_empty(),
4595 "expected fibonacci example to stay inside proof subset, got: {:?}",
4596 issues
4597 );
4598 }
4599
4600 #[test]
4601 fn fibonacci_example_matches_general_linear_recurrence_shapes() {
4602 let ctx = ctx_from_source(
4603 include_str!("../../../examples/data/fibonacci.av"),
4604 "fibonacci",
4605 );
4606 let fib = ctx.fn_defs.iter().find(|fd| fd.name == "fib").unwrap();
4607 let fib_tr = ctx.fn_defs.iter().find(|fd| fd.name == "fibTR").unwrap();
4608 let fib_spec = ctx.fn_defs.iter().find(|fd| fd.name == "fibSpec").unwrap();
4609
4610 assert!(recurrence::detect_tailrec_int_linear_pair_wrapper(fib).is_some());
4611 assert!(recurrence::detect_tailrec_int_linear_pair_worker(fib_tr).is_some());
4612 assert!(recurrence::detect_second_order_int_linear_recurrence(fib_spec).is_some());
4613 }
4614
4615 #[test]
4616 fn fibonacci_example_auto_proves_general_linear_recurrence_spec_law() {
4617 let ctx = ctx_from_source(
4618 include_str!("../../../examples/data/fibonacci.av"),
4619 "fibonacci",
4620 );
4621 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4622 let lean = generated_lean_file(&out);
4623
4624 assert!(lean.contains("private def fibSpec__nat : Nat -> Int"));
4625 assert!(!lean.contains("partial def fibSpec"));
4626 assert!(lean.contains("private theorem fib_eq_fibSpec__worker_nat_shift"));
4627 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_nat"));
4628 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_seed"));
4629 assert!(lean.contains("theorem fib_eq_fibSpec : ∀ (n : Int), fib n = fibSpec n := by"));
4630 assert!(!lean.contains(
4631 "-- universal theorem fib_eq_fibSpec omitted: sampled law shape is not auto-proved yet"
4632 ));
4633 }
4634
4635 #[test]
4636 fn pell_like_example_auto_proves_same_general_shape() {
4637 let ctx = ctx_from_source(
4638 r#"
4639module Pell
4640 intent =
4641 "linear recurrence probe"
4642
4643fn pellTR(n: Int, a: Int, b: Int) -> Int
4644 match n
4645 0 -> a
4646 _ -> pellTR(n - 1, b, a + 2 * b)
4647
4648fn pell(n: Int) -> Int
4649 match n < 0
4650 true -> 0
4651 false -> pellTR(n, 0, 1)
4652
4653fn pellSpec(n: Int) -> Int
4654 match n < 0
4655 true -> 0
4656 false -> match n
4657 0 -> 0
4658 1 -> 1
4659 _ -> pellSpec(n - 2) + 2 * pellSpec(n - 1)
4660
4661verify pell law pellSpec
4662 given n: Int = [0, 1, 2, 3]
4663 pell(n) => pellSpec(n)
4664"#,
4665 "pell",
4666 );
4667 let issues = proof_mode_issues(&ctx);
4668 assert!(
4669 issues.is_empty(),
4670 "expected pell example to stay inside proof subset, got: {:?}",
4671 issues
4672 );
4673
4674 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4675 let lean = generated_lean_file(&out);
4676 assert!(lean.contains("private def pellSpec__nat : Nat -> Int"));
4677 assert!(lean.contains("private theorem pell_eq_pellSpec__worker_nat_shift"));
4678 assert!(lean.contains("theorem pell_eq_pellSpec : ∀ (n : Int), pell n = pellSpec n := by"));
4679 assert!(!lean.contains(
4680 "-- universal theorem pell_eq_pellSpec omitted: sampled law shape is not auto-proved yet"
4681 ));
4682 }
4683
4684 #[test]
4685 fn nonlinear_pair_state_recurrence_is_not_auto_proved_as_linear_shape() {
4686 let ctx = ctx_from_source(
4687 r#"
4688module WeirdRec
4689 intent =
4690 "reject nonlinear pair-state recurrence from linear recurrence prover"
4691
4692fn weirdTR(n: Int, a: Int, b: Int) -> Int
4693 match n
4694 0 -> a
4695 _ -> weirdTR(n - 1, b, a * b)
4696
4697fn weird(n: Int) -> Int
4698 match n < 0
4699 true -> 0
4700 false -> weirdTR(n, 0, 1)
4701
4702fn weirdSpec(n: Int) -> Int
4703 match n < 0
4704 true -> 0
4705 false -> match n
4706 0 -> 0
4707 1 -> 1
4708 _ -> weirdSpec(n - 1) * weirdSpec(n - 2)
4709
4710verify weird law weirdSpec
4711 given n: Int = [0, 1, 2, 3]
4712 weird(n) => weirdSpec(n)
4713"#,
4714 "weirdrec",
4715 );
4716 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4717 let lean = generated_lean_file(&out);
4718
4719 assert!(lean.contains("sorry"));
4721 assert!(!lean.contains("private theorem weird_eq_weirdSpec__worker_nat_shift"));
4722 assert!(lean.contains("theorem weird_eq_weirdSpec_sample_1 :"));
4723 }
4724
4725 #[test]
4726 fn date_example_stays_inside_proof_subset() {
4727 let ctx = ctx_from_source(include_str!("../../../examples/data/date.av"), "date");
4728 let issues = proof_mode_issues(&ctx);
4729 assert!(
4730 issues.is_empty(),
4731 "expected date example to stay inside proof subset, got: {:?}",
4732 issues
4733 );
4734
4735 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4736 let lean = generated_lean_file(&out);
4737 assert!(!lean.contains("partial def"));
4738 assert!(lean.contains("def parseIntSlice (s : String) (from' : Int) (to : Int) : Int :="));
4739 }
4740
4741 #[test]
4742 fn temperature_example_stays_inside_proof_subset() {
4743 let ctx = ctx_from_source(
4744 include_str!("../../../examples/core/temperature.av"),
4745 "temperature",
4746 );
4747 let issues = proof_mode_issues(&ctx);
4748 assert!(
4749 issues.is_empty(),
4750 "expected temperature example to stay inside proof subset, got: {:?}",
4751 issues
4752 );
4753
4754 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4755 let lean = generated_lean_file(&out);
4756 assert!(!lean.contains("partial def"));
4757 assert!(
4758 lean.contains("example : celsiusToFahr 0.0 = 32.0 := by native_decide"),
4759 "expected verify examples to survive proof export, got:\n{}",
4760 lean
4761 );
4762 }
4763
4764 #[test]
4765 fn quicksort_example_stays_inside_proof_subset() {
4766 let ctx = ctx_from_source(
4767 include_str!("../../../examples/data/quicksort.av"),
4768 "quicksort",
4769 );
4770 let issues = proof_mode_issues(&ctx);
4771 assert!(
4772 issues.is_empty(),
4773 "expected quicksort example to stay inside proof subset, got: {:?}",
4774 issues
4775 );
4776
4777 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4778 let lean = generated_lean_file(&out);
4779 assert!(lean.contains("def isOrderedFrom"));
4780 assert!(!lean.contains("partial def isOrderedFrom"));
4781 assert!(lean.contains("termination_by xs.length"));
4782 }
4783
4784 #[test]
4785 fn grok_s_language_example_uses_total_ranked_sizeof_mutual_recursion() {
4786 let ctx = ctx_from_source(
4787 include_str!("../../../examples/core/grok_s_language.av"),
4788 "grok_s_language",
4789 );
4790 let issues = proof_mode_issues(&ctx);
4791 assert!(
4792 issues.is_empty(),
4793 "expected grok_s_language example to stay inside proof subset, got: {:?}",
4794 issues
4795 );
4796
4797 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4798 let lean = generated_lean_file(&out);
4799 assert!(lean.contains("mutual"));
4800 assert!(lean.contains("def eval__fuel"));
4801 assert!(lean.contains("def parseListItems__fuel"));
4802 assert!(!lean.contains("partial def eval"));
4803 assert!(!lean.contains("termination_by (sizeOf e,"));
4804 assert!(lean.contains("-- when validSymbolNames e"));
4805 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
4806 assert!(lean.contains(
4807 "theorem toString'_law_parseRoundtrip : ∀ (e : Sexpr), e = Sexpr.atomNum 42 ∨"
4808 ));
4809 assert!(
4810 lean.contains("validSymbolNames e = true -> parse (toString' e) = Except.ok e := by")
4811 );
4812 assert!(lean.contains("theorem toString'_law_parseSexprRoundtrip :"));
4813 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
4814 }
4815
4816 #[test]
4817 fn lambda_example_keeps_only_eval_outside_proof_subset() {
4818 let ctx = ctx_from_source(include_str!("../../../examples/core/lambda.av"), "lambda");
4819 let issues = proof_mode_issues(&ctx);
4820 assert_eq!(
4821 issues,
4822 vec!["recursive function 'eval' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)".to_string()]
4823 );
4824
4825 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4826 let lean = generated_lean_file(&out);
4827 assert!(lean.contains("def termToString__fuel"));
4828 assert!(lean.contains("def subst__fuel"));
4829 assert!(lean.contains("def countS__fuel"));
4830 assert!(!lean.contains("partial def termToString"));
4831 assert!(!lean.contains("partial def subst"));
4832 assert!(!lean.contains("partial def countS"));
4833 assert!(lean.contains("partial def eval"));
4834 }
4835
4836 #[test]
4837 fn mission_control_example_stays_inside_proof_subset() {
4838 let ctx = ctx_from_source(
4839 include_str!("../../../examples/apps/mission_control.av"),
4840 "mission_control",
4841 );
4842 let issues = proof_mode_issues(&ctx);
4843 assert!(
4844 issues.is_empty(),
4845 "expected mission_control example to stay inside proof subset, got: {:?}",
4846 issues
4847 );
4848
4849 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4850 let lean = generated_lean_file(&out);
4851 assert!(!lean.contains("partial def normalizeAngle"));
4852 assert!(lean.contains("def normalizeAngle__fuel"));
4853 }
4854
4855 #[test]
4856 fn notepad_store_example_stays_inside_proof_subset() {
4857 let ctx = ctx_from_source(
4858 include_str!("../../../examples/apps/notepad/store.av"),
4859 "notepad_store",
4860 );
4861 let issues = proof_mode_issues(&ctx);
4862 assert!(
4863 issues.is_empty(),
4864 "expected notepad/store example to stay inside proof subset, got: {:?}",
4865 issues
4866 );
4867
4868 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4869 let lean = generated_lean_file(&out);
4870 assert!(lean.contains("def deserializeLine (line : String) : Except String Note :="));
4871 assert!(lean.contains("Except String (List Note)"));
4872 assert!(!lean.contains("partial def deserializeLine"));
4873 assert!(lean.contains("-- when noteRoundtripSafe note"));
4874 assert!(lean.contains("-- when notesRoundtripSafe notes"));
4875 assert!(lean.contains(
4876 "theorem serializeLine_law_lineRoundtrip : ∀ (note : Note), note = { id' := 1, title := \"Hello\", body := \"World\" : Note } ∨"
4877 ));
4878 assert!(lean.contains(
4879 "theorem serializeLines_law_notesRoundtrip : ∀ (notes : List Note), notes = [] ∨"
4880 ));
4881 assert!(lean.contains("notesRoundtripSafe notes = true ->"));
4882 assert!(lean.contains("parseNotes (s!\"{String.intercalate \"\\n\" (serializeLines notes)}\\n\") = Except.ok notes"));
4883 assert!(lean.contains("theorem serializeLine_law_lineRoundtrip_sample_1 :"));
4884 assert!(lean.contains("theorem serializeLines_law_notesRoundtrip_sample_1 :"));
4885 }
4886}