1mod builtins;
7mod expr;
8mod law_auto;
9mod pattern;
10mod recurrence;
11mod shared;
12mod toplevel;
13mod types;
14
15use std::collections::{HashMap, HashSet};
16
17use crate::ast::{
18 BinOp, Expr, FnBody, FnDef, MatchArm, Pattern, Spanned, Stmt, TailCallData, TopLevel, TypeDef,
19 VerifyKind,
20};
21use crate::call_graph;
22use crate::codegen::{CodegenContext, ProjectOutput};
23
24#[derive(Clone, Copy, Debug, Eq, PartialEq)]
26pub enum VerifyEmitMode {
27 NativeDecide,
29 Sorry,
31 TheoremSkeleton,
35}
36
37#[derive(Clone, Debug, Eq, PartialEq)]
39pub enum RecursionPlan {
40 IntCountdown { param_index: usize },
42 IntAscending {
45 param_index: usize,
46 bound_lean: String,
47 },
48 LinearRecurrence2,
52 ListStructural { param_index: usize },
54 SizeOfStructural,
56 StringPosAdvance,
59 MutualIntCountdown,
61 MutualStringPosAdvance { rank: usize },
64 MutualSizeOfRanked { rank: usize },
67}
68
69#[derive(Clone, Debug, Eq, PartialEq)]
70pub struct ProofModeIssue {
71 pub line: usize,
72 pub message: String,
73}
74
75const LEAN_PRELUDE_HEADER: &str = r#"-- Generated by the Aver → Lean 4 transpiler
76-- Pure core logic only (effectful functions are omitted)
77
78set_option linter.unusedVariables false
79
80-- Prelude: helper definitions for Aver builtins"#;
81
82const LEAN_PRELUDE_FLOAT_COE: &str = r#"instance : Coe Int Float := ⟨fun n => Float.ofInt n⟩
83
84def Float.fromInt (n : Int) : Float := Float.ofInt n"#;
85
86const LEAN_PRELUDE_FLOAT_DEC_EQ: &str = r#"private unsafe def Float.unsafeDecEq (a b : Float) : Decidable (a = b) :=
87 if a == b then isTrue (unsafeCast ()) else isFalse (unsafeCast ())
88@[implemented_by Float.unsafeDecEq]
89private opaque Float.compDecEq (a b : Float) : Decidable (a = b)
90instance : DecidableEq Float := Float.compDecEq"#;
91
92const LEAN_PRELUDE_EXCEPT_DEC_EQ: &str = r#"instance [DecidableEq ε] [DecidableEq α] : DecidableEq (Except ε α)
93 | .ok a, .ok b =>
94 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
95 | .error a, .error b =>
96 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
97 | .ok _, .error _ => isFalse (by intro h; cases h)
98 | .error _, .ok _ => isFalse (by intro h; cases h)"#;
99
100const LEAN_PRELUDE_EXCEPT_NS: &str = r#"namespace Except
101def withDefault (r : Except ε α) (d : α) : α :=
102 match r with
103 | .ok v => v
104 | .error _ => d
105end Except"#;
106
107const LEAN_PRELUDE_OPTION_TO_EXCEPT: &str = r#"def Option.toExcept (o : Option α) (e : ε) : Except ε α :=
108 match o with
109 | some v => .ok v
110 | none => .error e"#;
111
112const LEAN_PRELUDE_STRING_HADD: &str = r#"instance : HAdd String String String := ⟨String.append⟩"#;
113
114const LEAN_PRELUDE_PROOF_FUEL: &str = r#"def averStringPosFuel (s : String) (pos : Int) (rankBudget : Nat) : Nat :=
115 (((s.data.length) - pos.toNat) + 1) * rankBudget"#;
116
117const LEAN_PRELUDE_AVER_MEASURE: &str = r#"namespace AverMeasure
118def list (elemMeasure : α → Nat) : List α → Nat
119 | [] => 1
120 | x :: xs => elemMeasure x + list elemMeasure xs + 1
121def option (elemMeasure : α → Nat) : Option α → Nat
122 | none => 1
123 | some x => elemMeasure x + 1
124def except (errMeasure : ε → Nat) (okMeasure : α → Nat) : Except ε α → Nat
125 | .error e => errMeasure e + 1
126 | .ok v => okMeasure v + 1
127end AverMeasure"#;
128
129const AVER_MAP_PRELUDE_BASE: &str = r#"namespace AverMap
130def empty : List (α × β) := []
131def get [DecidableEq α] (m : List (α × β)) (k : α) : Option β :=
132 match m with
133 | [] => none
134 | (k', v) :: rest => if k = k' then some v else AverMap.get rest k
135def set [DecidableEq α] (m : List (α × β)) (k : α) (v : β) : List (α × β) :=
136 let rec go : List (α × β) → List (α × β)
137 | [] => [(k, v)]
138 | (k', v') :: rest => if k = k' then (k, v) :: rest else (k', v') :: go rest
139 go m
140def has [DecidableEq α] (m : List (α × β)) (k : α) : Bool :=
141 m.any (fun p => decide (k = p.1))
142def remove [DecidableEq α] (m : List (α × β)) (k : α) : List (α × β) :=
143 m.filter (fun p => !(decide (k = p.1)))
144def keys (m : List (α × β)) : List α := m.map Prod.fst
145def values (m : List (α × β)) : List β := m.map Prod.snd
146def entries (m : List (α × β)) : List (α × β) := m
147def len (m : List (α × β)) : Nat := m.length
148def fromList (entries : List (α × β)) : List (α × β) := entries"#;
149
150const AVER_MAP_PRELUDE_HAS_SET_SELF: &str = r#"private theorem any_set_go_self [DecidableEq α] (k : α) (v : β) :
151 ∀ (m : List (α × β)), List.any (AverMap.set.go k v m) (fun p => decide (k = p.1)) = true := by
152 intro m
153 induction m with
154 | nil =>
155 simp [AverMap.set.go, List.any]
156 | cons p tl ih =>
157 cases p with
158 | mk k' v' =>
159 by_cases h : k = k'
160 · simp [AverMap.set.go, List.any, h]
161 · simp [AverMap.set.go, List.any, h, ih]
162
163theorem has_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
164 AverMap.has (AverMap.set m k v) k = true := by
165 simpa [AverMap.has, AverMap.set] using any_set_go_self k v m"#;
166
167const AVER_MAP_PRELUDE_GET_SET_SELF: &str = r#"private theorem get_set_go_self [DecidableEq α] (k : α) (v : β) :
168 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) k = some v := by
169 intro m
170 induction m with
171 | nil =>
172 simp [AverMap.set.go, AverMap.get]
173 | cons p tl ih =>
174 cases p with
175 | mk k' v' =>
176 by_cases h : k = k'
177 · simp [AverMap.set.go, AverMap.get, h]
178 · simp [AverMap.set.go, AverMap.get, h, ih]
179
180theorem get_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
181 AverMap.get (AverMap.set m k v) k = some v := by
182 simpa [AverMap.set] using get_set_go_self k v m"#;
183
184const AVER_MAP_PRELUDE_GET_SET_OTHER: &str = r#"private theorem get_set_go_other [DecidableEq α] (k key : α) (v : β) (h : key ≠ k) :
185 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) key = AverMap.get m key := by
186 intro m
187 induction m with
188 | nil =>
189 simp [AverMap.set.go, AverMap.get, h]
190 | cons p tl ih =>
191 cases p with
192 | mk k' v' =>
193 by_cases hk : k = k'
194 · have hkey : key ≠ k' := by simpa [hk] using h
195 simp [AverMap.set.go, AverMap.get, hk, hkey]
196 · by_cases hkey : key = k'
197 · simp [AverMap.set.go, AverMap.get, hk, hkey]
198 · simp [AverMap.set.go, AverMap.get, hk, hkey, ih]
199
200theorem get_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
201 AverMap.get (AverMap.set m k v) key = AverMap.get m key := by
202 simpa [AverMap.set] using get_set_go_other k key v h m"#;
203
204const AVER_MAP_PRELUDE_HAS_SET_OTHER: &str = r#"theorem has_eq_isSome_get [DecidableEq α] (m : List (α × β)) (k : α) :
205 AverMap.has m k = (AverMap.get m k).isSome := by
206 induction m with
207 | nil =>
208 simp [AverMap.has, AverMap.get]
209 | cons p tl ih =>
210 cases p with
211 | mk k' v' =>
212 by_cases h : k = k'
213 · simp [AverMap.has, AverMap.get, List.any, h]
214 · simpa [AverMap.has, AverMap.get, List.any, h] using ih
215
216theorem has_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
217 AverMap.has (AverMap.set m k v) key = AverMap.has m key := by
218 rw [AverMap.has_eq_isSome_get, AverMap.has_eq_isSome_get]
219 simp [AverMap.get_set_other, h]"#;
220
221const AVER_MAP_PRELUDE_END: &str = r#"end AverMap"#;
222
223const LEAN_PRELUDE_AVER_LIST: &str = r#"namespace AverList
224def get (xs : List α) (i : Int) : Option α :=
225 if i < 0 then none else xs.get? i.toNat
226private def insertSorted [Ord α] (x : α) : List α → List α
227 | [] => [x]
228 | y :: ys =>
229 if compare x y == Ordering.lt || compare x y == Ordering.eq then
230 x :: y :: ys
231 else
232 y :: insertSorted x ys
233def sort [Ord α] (xs : List α) : List α :=
234 xs.foldl (fun acc x => insertSorted x acc) []
235end AverList"#;
236
237const LEAN_PRELUDE_HEADER_TYPE: &str = r#"structure Header where
238 name : String
239 value : String
240 deriving Repr, BEq, Inhabited, DecidableEq"#;
241
242const LEAN_PRELUDE_HTTP_RESPONSE_TYPE: &str = r#"structure HttpResponse where
243 status : Int
244 body : String
245 headers : List Header
246 deriving Repr, BEq, Inhabited, DecidableEq"#;
247
248const LEAN_PRELUDE_HTTP_REQUEST_TYPE: &str = r#"structure HttpRequest where
249 method : String
250 path : String
251 body : String
252 headers : List Header
253 deriving Repr, BEq, Inhabited, DecidableEq"#;
254
255const LEAN_PRELUDE_TCP_CONNECTION_TYPE: &str = r#"structure Tcp_Connection where
256 id : String
257 host : String
258 port : Int
259 deriving Repr, BEq, Inhabited, DecidableEq"#;
260
261const LEAN_PRELUDE_STRING_HELPERS: &str = r#"def String.charAt (s : String) (i : Int) : Option String :=
262 if i < 0 then none
263 else (s.toList.get? i.toNat).map Char.toString
264theorem String.charAt_length_none (s : String) : String.charAt s s.length = none := by
265 have hs : ¬ ((s.length : Int) < 0) := by omega
266 unfold String.charAt
267 simp [hs]
268 change s.data.length ≤ s.length
269 exact Nat.le_refl _
270def String.slice (s : String) (start stop : Int) : String :=
271 let startN := if start < 0 then 0 else start.toNat
272 let stopN := if stop < 0 then 0 else stop.toNat
273 let chars := s.toList
274 String.mk ((chars.drop startN).take (stopN - startN))
275private def trimFloatTrailingZerosChars (chars : List Char) : List Char :=
276 let noZeros := (chars.reverse.dropWhile (fun c => c == '0')).reverse
277 match noZeros.reverse with
278 | '.' :: rest => rest.reverse
279 | _ => noZeros
280private def normalizeFloatString (s : String) : String :=
281 if s.toList.any (fun c => c == '.') then
282 String.mk (trimFloatTrailingZerosChars s.toList)
283 else s
284def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)
285def String.chars (s : String) : List String := s.toList.map Char.toString
286private theorem char_to_string_append_mk (c : Char) (chars : List Char) :
287 Char.toString c ++ String.mk chars = String.mk (c :: chars) := by
288 rfl
289private theorem string_intercalate_empty_char_strings_go (acc : String) (chars : List Char) :
290 String.intercalate.go acc "" (chars.map Char.toString) = acc ++ String.mk chars := by
291 induction chars generalizing acc with
292 | nil =>
293 simp [String.intercalate.go]
294 | cons c rest ih =>
295 calc
296 String.intercalate.go acc "" ((c :: rest).map Char.toString)
297 = String.intercalate.go (acc ++ Char.toString c) "" (rest.map Char.toString) := by
298 simp [String.intercalate.go]
299 _ = (acc ++ Char.toString c) ++ String.mk rest := by
300 simpa using ih (acc ++ Char.toString c)
301 _ = acc ++ String.mk (c :: rest) := by
302 simp [String.append_assoc, char_to_string_append_mk]
303private theorem string_intercalate_empty_char_strings (chars : List Char) :
304 String.intercalate "" (chars.map Char.toString) = String.mk chars := by
305 cases chars with
306 | nil =>
307 simp [String.intercalate]
308 | cons c rest =>
309 simpa [String.intercalate] using string_intercalate_empty_char_strings_go c.toString rest
310theorem String.intercalate_empty_chars (s : String) :
311 String.intercalate "" (String.chars s) = s := by
312 cases s with
313 | mk chars =>
314 simpa [String.chars] using string_intercalate_empty_char_strings chars
315namespace AverString
316def splitOnCharGo (currentRev : List Char) (sep : Char) : List Char → List String
317 | [] => [String.mk currentRev.reverse]
318 | c :: cs =>
319 if c == sep then
320 String.mk currentRev.reverse :: splitOnCharGo [] sep cs
321 else
322 splitOnCharGo (c :: currentRev) sep cs
323def splitOnChar (s : String) (sep : Char) : List String :=
324 splitOnCharGo [] sep s.toList
325def split (s delim : String) : List String :=
326 match delim.toList with
327 | [] => "" :: (s.toList.map Char.toString) ++ [""]
328 | [c] => splitOnChar s c
329 | _ => s.splitOn delim
330@[simp] private theorem char_toString_data (c : Char) : c.toString.data = [c] := by
331 rfl
332private theorem splitOnCharGo_until_sep
333 (prefixRev part tail : List Char) (sep : Char) :
334 part.all (fun c => c != sep) = true ->
335 splitOnCharGo prefixRev sep (part ++ sep :: tail) =
336 String.mk (prefixRev.reverse ++ part) :: splitOnCharGo [] sep tail := by
337 intro h_safe
338 induction part generalizing prefixRev with
339 | nil =>
340 simp [splitOnCharGo]
341 | cons c rest ih =>
342 simp at h_safe
343 have h_rest : (rest.all fun c => c != sep) = true := by
344 simpa using h_safe.2
345 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
346 (ih (c :: prefixRev) h_rest)
347private theorem splitOnCharGo_no_sep
348 (prefixRev chars : List Char) (sep : Char) :
349 chars.all (fun c => c != sep) = true ->
350 splitOnCharGo prefixRev sep chars =
351 [String.mk (prefixRev.reverse ++ chars)] := by
352 intro h_safe
353 induction chars generalizing prefixRev with
354 | nil =>
355 simp [splitOnCharGo]
356 | cons c rest ih =>
357 simp at h_safe
358 have h_rest : (rest.all fun c => c != sep) = true := by
359 simpa using h_safe.2
360 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
361 (ih (c :: prefixRev) h_rest)
362@[simp] theorem split_single_char_append
363 (head tail : String) (sep : Char) :
364 head.toList.all (fun c => c != sep) = true ->
365 split (head ++ Char.toString sep ++ tail) (Char.toString sep) =
366 head :: split tail (Char.toString sep) := by
367 intro h_safe
368 simpa [split, splitOnChar] using
369 (splitOnCharGo_until_sep [] head.data tail.data sep h_safe)
370@[simp] theorem split_single_char_no_sep
371 (s : String) (sep : Char) :
372 s.toList.all (fun c => c != sep) = true ->
373 split s (Char.toString sep) = [s] := by
374 intro h_safe
375 simpa [split, splitOnChar] using
376 (splitOnCharGo_no_sep [] s.data sep h_safe)
377private theorem intercalate_go_prefix
378 (pref acc sep : String) (rest : List String) :
379 String.intercalate.go (pref ++ sep ++ acc) sep rest =
380 pref ++ sep ++ String.intercalate.go acc sep rest := by
381 induction rest generalizing acc with
382 | nil =>
383 simp [String.intercalate.go, String.append_assoc]
384 | cons x xs ih =>
385 simpa [String.intercalate.go, String.append_assoc] using
386 (ih (acc ++ sep ++ x))
387@[simp] theorem split_intercalate_trailing_single_char
388 (parts : List String) (sep : Char) :
389 parts.all (fun part => part.toList.all (fun c => c != sep)) = true ->
390 split (String.intercalate (Char.toString sep) parts ++ Char.toString sep) (Char.toString sep) =
391 match parts with
392 | [] => ["", ""]
393 | _ => parts ++ [""] := by
394 intro h_safe
395 induction parts with
396 | nil =>
397 simp [split, splitOnChar, splitOnCharGo]
398 | cons part rest ih =>
399 simp at h_safe
400 have h_part : (part.toList.all fun c => c != sep) = true := by
401 simpa using h_safe.1
402 cases rest with
403 | nil =>
404 have h_empty : ("".toList.all fun c => c != sep) = true := by simp
405 calc
406 split (String.intercalate.go part (Char.toString sep) [] ++ Char.toString sep) (Char.toString sep)
407 = split (part ++ Char.toString sep) (Char.toString sep) := by
408 simp [String.intercalate.go]
409 _ = part :: split "" (Char.toString sep) := by
410 simpa using split_single_char_append part "" sep h_part
411 _ = [part, ""] := by
412 simp [split_single_char_no_sep, h_empty]
413 | cons next rest' =>
414 have h_rest : ((next :: rest').all fun part => part.toList.all fun c => c != sep) = true := by
415 simpa using h_safe.2
416 calc
417 split
418 (String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep)
419 (Char.toString sep)
420 =
421 split
422 (part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep))
423 (Char.toString sep) := by
424 have h_join :
425 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
426 = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
427 calc
428 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
429 = String.intercalate.go (part ++ Char.toString sep ++ next) (Char.toString sep) rest' ++ Char.toString sep := by
430 simp [String.intercalate.go]
431 _ = part ++ Char.toString sep ++ (String.intercalate.go next (Char.toString sep) rest' ++ Char.toString sep) := by
432 rw [intercalate_go_prefix part next (Char.toString sep) rest']
433 simp [String.append_assoc]
434 _ = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
435 simp [String.intercalate, String.intercalate.go]
436 simpa using congrArg (fun s => split s (Char.toString sep)) h_join
437 _ = part :: split
438 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
439 (Char.toString sep) := by
440 simpa using split_single_char_append
441 part
442 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
443 sep
444 h_part
445 _ = part :: (next :: rest' ++ [""]) := by
446 simpa using ih h_rest
447end AverString"#;
448
449const LEAN_PRELUDE_NUMERIC_PARSE: &str = r#"namespace AverDigits
450def foldDigitsAcc (acc : Nat) : List Nat -> Nat
451 | [] => acc
452 | d :: ds => foldDigitsAcc (acc * 10 + d) ds
453
454def foldDigits (digits : List Nat) : Nat :=
455 foldDigitsAcc 0 digits
456
457private theorem foldDigitsAcc_append_singleton (acc : Nat) (xs : List Nat) (d : Nat) :
458 foldDigitsAcc acc (xs ++ [d]) = foldDigitsAcc acc xs * 10 + d := by
459 induction xs generalizing acc with
460 | nil =>
461 simp [foldDigitsAcc]
462 | cons x xs ih =>
463 simp [foldDigitsAcc, ih, Nat.left_distrib, Nat.add_assoc, Nat.add_left_comm]
464
465private theorem foldDigits_append_singleton (xs : List Nat) (d : Nat) :
466 foldDigits (xs ++ [d]) = foldDigits xs * 10 + d := by
467 simpa [foldDigits] using foldDigitsAcc_append_singleton 0 xs d
468
469def natDigits : Nat -> List Nat
470 | n =>
471 if n < 10 then
472 [n]
473 else
474 natDigits (n / 10) ++ [n % 10]
475termination_by
476 n => n
477
478theorem natDigits_nonempty (n : Nat) : natDigits n ≠ [] := by
479 by_cases h : n < 10
480 · rw [natDigits.eq_1]
481 simp [h]
482 · rw [natDigits.eq_1]
483 simp [h]
484
485theorem natDigits_digits_lt_ten : ∀ n : Nat, ∀ d ∈ natDigits n, d < 10 := by
486 intro n d hd
487 by_cases h : n < 10
488 · rw [natDigits.eq_1] at hd
489 simp [h] at hd
490 rcases hd with rfl
491 exact h
492 · rw [natDigits.eq_1] at hd
493 simp [h] at hd
494 rcases hd with hd | hd
495 · exact natDigits_digits_lt_ten (n / 10) d hd
496 · subst hd
497 exact Nat.mod_lt n (by omega)
498
499theorem foldDigits_natDigits : ∀ n : Nat, foldDigits (natDigits n) = n := by
500 intro n
501 by_cases h : n < 10
502 · rw [natDigits.eq_1]
503 simp [h, foldDigits, foldDigitsAcc]
504 · rw [natDigits.eq_1]
505 simp [h]
506 rw [foldDigits_append_singleton]
507 rw [foldDigits_natDigits (n / 10)]
508 omega
509
510def digitChar : Nat -> Char
511 | 0 => '0' | 1 => '1' | 2 => '2' | 3 => '3' | 4 => '4'
512 | 5 => '5' | 6 => '6' | 7 => '7' | 8 => '8' | 9 => '9'
513 | _ => '0'
514
515def charToDigit? : Char -> Option Nat
516 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3 | '4' => some 4
517 | '5' => some 5 | '6' => some 6 | '7' => some 7 | '8' => some 8 | '9' => some 9
518 | _ => none
519
520theorem charToDigit_digitChar : ∀ d : Nat, d < 10 -> charToDigit? (digitChar d) = some d
521 | 0, _ => by simp [digitChar, charToDigit?]
522 | 1, _ => by simp [digitChar, charToDigit?]
523 | 2, _ => by simp [digitChar, charToDigit?]
524 | 3, _ => by simp [digitChar, charToDigit?]
525 | 4, _ => by simp [digitChar, charToDigit?]
526 | 5, _ => by simp [digitChar, charToDigit?]
527 | 6, _ => by simp [digitChar, charToDigit?]
528 | 7, _ => by simp [digitChar, charToDigit?]
529 | 8, _ => by simp [digitChar, charToDigit?]
530 | 9, _ => by simp [digitChar, charToDigit?]
531 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
532 omega
533
534theorem digitChar_ne_minus : ∀ d : Nat, d < 10 -> digitChar d ≠ '-'
535 | 0, _ => by decide
536 | 1, _ => by decide
537 | 2, _ => by decide
538 | 3, _ => by decide
539 | 4, _ => by decide
540 | 5, _ => by decide
541 | 6, _ => by decide
542 | 7, _ => by decide
543 | 8, _ => by decide
544 | 9, _ => by decide
545 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
546 omega
547
548theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->
549 Char.toString (digitChar d) ≠ " " ∧
550 Char.toString (digitChar d) ≠ "\t" ∧
551 Char.toString (digitChar d) ≠ "\n" ∧
552 Char.toString (digitChar d) ≠ "\r"
553 | 0, _ => by decide
554 | 1, _ => by decide
555 | 2, _ => by decide
556 | 3, _ => by decide
557 | 4, _ => by decide
558 | 5, _ => by decide
559 | 6, _ => by decide
560 | 7, _ => by decide
561 | 8, _ => by decide
562 | 9, _ => by decide
563 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
564 omega
565
566theorem mapM_charToDigit_digits : ∀ ds : List Nat,
567 (∀ d ∈ ds, d < 10) -> List.mapM charToDigit? (ds.map digitChar) = some ds := by
568 intro ds hds
569 induction ds with
570 | nil =>
571 simp
572 | cons d ds ih =>
573 have hd : d < 10 := hds d (by simp)
574 have htail : ∀ x ∈ ds, x < 10 := by
575 intro x hx
576 exact hds x (by simp [hx])
577 simp [charToDigit_digitChar d hd, ih htail]
578
579def natDigitsChars (n : Nat) : List Char :=
580 (natDigits n).map digitChar
581
582def parseNatChars (chars : List Char) : Option Nat :=
583 match chars with
584 | [] => none
585 | _ => do
586 let digits <- List.mapM charToDigit? chars
587 pure (foldDigits digits)
588
589theorem parseNatChars_nat (n : Nat) :
590 parseNatChars (natDigitsChars n) = some n := by
591 unfold parseNatChars natDigitsChars
592 cases h : (natDigits n).map digitChar with
593 | nil =>
594 exfalso
595 exact natDigits_nonempty n (List.map_eq_nil_iff.mp h)
596 | cons hd tl =>
597 have hdigits : List.mapM charToDigit? (List.map digitChar (natDigits n)) = some (natDigits n) :=
598 mapM_charToDigit_digits (natDigits n) (fun d hd => natDigits_digits_lt_ten n d hd)
599 rw [h] at hdigits
600 simp [h, hdigits, foldDigits_natDigits]
601end AverDigits
602
603def String.fromInt (n : Int) : String :=
604 match n with
605 | .ofNat m => String.mk (AverDigits.natDigitsChars m)
606 | .negSucc m => String.mk ('-' :: AverDigits.natDigitsChars (m + 1))
607
608def Int.fromString (s : String) : Except String Int :=
609 match s.toList with
610 | [] => .error ("Cannot parse '" ++ s ++ "' as Int")
611 | '-' :: rest =>
612 match AverDigits.parseNatChars rest with
613 | some n => .ok (-Int.ofNat n)
614 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
615 | chars =>
616 match AverDigits.parseNatChars chars with
617 | some n => .ok (Int.ofNat n)
618 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
619
620theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n
621 | .ofNat m => by
622 cases h : AverDigits.natDigits m with
623 | nil =>
624 exfalso
625 exact AverDigits.natDigits_nonempty m h
626 | cons d ds =>
627 have hd : d < 10 := AverDigits.natDigits_digits_lt_ten m d (by simp [h])
628 have hne : AverDigits.digitChar d ≠ '-' := AverDigits.digitChar_ne_minus d hd
629 have hparse : AverDigits.parseNatChars (AverDigits.digitChar d :: List.map AverDigits.digitChar ds) = some m := by
630 simpa [AverDigits.natDigitsChars, h] using AverDigits.parseNatChars_nat m
631 simp [String.fromInt, Int.fromString, AverDigits.natDigitsChars, h, hne, hparse]
632 | .negSucc m => by
633 simp [String.fromInt, Int.fromString, AverDigits.parseNatChars_nat]
634 rfl
635
636private def charDigitsToNat (cs : List Char) : Nat :=
637 cs.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
638
639private def parseExpPart : List Char → (Bool × List Char)
640 | '-' :: rest => (true, rest.takeWhile Char.isDigit)
641 | '+' :: rest => (false, rest.takeWhile Char.isDigit)
642 | rest => (false, rest.takeWhile Char.isDigit)
643
644def Float.fromString (s : String) : Except String Float :=
645 let chars := s.toList
646 let (neg, chars) := match chars with
647 | '-' :: rest => (true, rest)
648 | _ => (false, chars)
649 let intPart := chars.takeWhile Char.isDigit
650 let rest := chars.dropWhile Char.isDigit
651 let (fracPart, rest) := match rest with
652 | '.' :: rest => (rest.takeWhile Char.isDigit, rest.dropWhile Char.isDigit)
653 | _ => ([], rest)
654 let (expNeg, expDigits) := match rest with
655 | 'e' :: rest => parseExpPart rest
656 | 'E' :: rest => parseExpPart rest
657 | _ => (false, [])
658 if intPart.isEmpty && fracPart.isEmpty then .error ("Invalid float: " ++ s)
659 else
660 let mantissa := charDigitsToNat (intPart ++ fracPart)
661 let fracLen : Int := fracPart.length
662 let expVal : Int := charDigitsToNat expDigits
663 let shift : Int := (if expNeg then -expVal else expVal) - fracLen
664 let f := if shift >= 0 then Float.ofScientific mantissa false shift.toNat
665 else Float.ofScientific mantissa true ((-shift).toNat)
666 .ok (if neg then -f else f)"#;
667
668const LEAN_PRELUDE_CHAR_BYTE: &str = r#"def Char.toCode (s : String) : Int :=
669 match s.toList.head? with
670 | some c => (c.toNat : Int)
671 | none => panic! "Char.toCode: string is empty"
672def Char.fromCode (n : Int) : Option String :=
673 if n < 0 || n > 1114111 then none
674 else if n >= 55296 && n <= 57343 then none
675 else some (Char.toString (Char.ofNat n.toNat))
676
677def hexDigit (n : Int) : String :=
678 match n with
679 | 0 => "0" | 1 => "1" | 2 => "2" | 3 => "3"
680 | 4 => "4" | 5 => "5" | 6 => "6" | 7 => "7"
681 | 8 => "8" | 9 => "9" | 10 => "a" | 11 => "b"
682 | 12 => "c" | 13 => "d" | 14 => "e" | 15 => "f"
683 | _ => "?"
684
685def byteToHex (code : Int) : String :=
686 hexDigit (code / 16) ++ hexDigit (code % 16)
687
688namespace AverByte
689private def hexValue (c : Char) : Option Int :=
690 match c with
691 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3
692 | '4' => some 4 | '5' => some 5 | '6' => some 6 | '7' => some 7
693 | '8' => some 8 | '9' => some 9 | 'a' => some 10 | 'b' => some 11
694 | 'c' => some 12 | 'd' => some 13 | 'e' => some 14 | 'f' => some 15
695 | 'A' => some 10 | 'B' => some 11 | 'C' => some 12 | 'D' => some 13
696 | 'E' => some 14 | 'F' => some 15
697 | _ => none
698def toHex (n : Int) : Except String String :=
699 if n < 0 || n > 255 then
700 .error ("Byte.toHex: " ++ toString n ++ " is out of range 0-255")
701 else
702 .ok (byteToHex n)
703def fromHex (s : String) : Except String Int :=
704 match s.toList with
705 | [hi, lo] =>
706 match hexValue hi, hexValue lo with
707 | some h, some l => .ok (h * 16 + l)
708 | _, _ => .error ("Byte.fromHex: invalid hex '" ++ s ++ "'")
709 | _ => .error ("Byte.fromHex: expected exactly 2 hex chars, got '" ++ s ++ "'")
710end AverByte"#;
711
712fn pure_fns(ctx: &CodegenContext) -> Vec<&FnDef> {
713 ctx.modules
714 .iter()
715 .flat_map(|m| m.fn_defs.iter())
716 .chain(ctx.fn_defs.iter())
717 .filter(|fd| toplevel::is_pure_fn(fd))
718 .collect()
719}
720
721fn recursive_type_names(ctx: &CodegenContext) -> HashSet<String> {
722 ctx.modules
723 .iter()
724 .flat_map(|m| m.type_defs.iter())
725 .chain(ctx.type_defs.iter())
726 .filter(|td| toplevel::is_recursive_type_def(td))
727 .map(|td| toplevel::type_def_name(td).to_string())
728 .collect()
729}
730
731fn recursive_pure_fn_names(ctx: &CodegenContext) -> HashSet<String> {
732 let pure_names: HashSet<String> = pure_fns(ctx)
733 .into_iter()
734 .map(|fd| fd.name.clone())
735 .collect();
736 let mut callgraph_items = ctx.items.clone();
737 for module in &ctx.modules {
738 for fd in &module.fn_defs {
739 callgraph_items.push(TopLevel::FnDef(fd.clone()));
740 }
741 }
742 call_graph::find_recursive_fns(&callgraph_items)
743 .into_iter()
744 .filter(|name| pure_names.contains(name))
745 .collect()
746}
747
748fn verify_counter_key(vb: &crate::ast::VerifyBlock) -> String {
749 match &vb.kind {
750 VerifyKind::Cases => format!("fn:{}", vb.fn_name),
751 VerifyKind::Law(law) => format!("law:{}::{}", vb.fn_name, law.name),
752 }
753}
754
755fn lean_project_name(ctx: &CodegenContext) -> String {
756 ctx.items
757 .iter()
758 .find_map(|item| match item {
759 TopLevel::Module(m) => Some(m.name.clone()),
760 _ => None,
761 })
762 .unwrap_or_else(|| capitalize_first(&ctx.project_name))
763}
764
765fn expr_to_dotted_name(expr: &Spanned<Expr>) -> Option<String> {
766 match &expr.node {
767 Expr::Ident(name) => Some(name.clone()),
768 Expr::Attr(obj, field) => expr_to_dotted_name(obj).map(|p| format!("{}.{}", p, field)),
769 _ => None,
770 }
771}
772
773fn call_matches(name: &str, target: &str) -> bool {
774 name == target || name.rsplit('.').next() == Some(target)
775}
776
777fn call_is_in_set(name: &str, targets: &HashSet<String>) -> bool {
778 call_matches_any(name, targets)
779}
780
781fn canonical_callee_name(name: &str, targets: &HashSet<String>) -> Option<String> {
782 if targets.contains(name) {
783 return Some(name.to_string());
784 }
785 name.rsplit('.')
786 .next()
787 .filter(|last| targets.contains(*last))
788 .map(ToString::to_string)
789}
790
791fn call_matches_any(name: &str, targets: &HashSet<String>) -> bool {
792 if targets.contains(name) {
793 return true;
794 }
795 match name.rsplit('.').next() {
796 Some(last) => targets.contains(last),
797 None => false,
798 }
799}
800
801fn is_int_minus_positive(expr: &Spanned<Expr>, param_name: &str) -> bool {
802 match &expr.node {
803 Expr::BinOp(BinOp::Sub, left, right) => {
804 matches!(&left.node, Expr::Ident(id) if id == param_name)
805 && matches!(&right.node, Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
806 }
807 Expr::FnCall(callee, args) => {
808 let Some(name) = expr_to_dotted_name(callee) else {
809 return false;
810 };
811 (name == "Int.sub" || name == "int.sub")
812 && args.len() == 2
813 && matches!(&args[0].node, Expr::Ident(id) if id == param_name)
814 && matches!(&args[1].node, Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
815 }
816 _ => false,
817 }
818}
819
820fn collect_calls_from_expr<'a>(
821 expr: &'a Spanned<Expr>,
822 out: &mut Vec<(String, Vec<&'a Spanned<Expr>>)>,
823) {
824 match &expr.node {
825 Expr::FnCall(callee, args) => {
826 if let Some(name) = expr_to_dotted_name(callee) {
827 out.push((name, args.iter().collect()));
828 }
829 collect_calls_from_expr(callee, out);
830 for arg in args {
831 collect_calls_from_expr(arg, out);
832 }
833 }
834 Expr::TailCall(boxed) => {
835 let TailCallData {
836 target: name, args, ..
837 } = boxed.as_ref();
838 out.push((name.clone(), args.iter().collect()));
839 for arg in args {
840 collect_calls_from_expr(arg, out);
841 }
842 }
843 Expr::Attr(obj, _) => collect_calls_from_expr(obj, out),
844 Expr::BinOp(_, left, right) => {
845 collect_calls_from_expr(left, out);
846 collect_calls_from_expr(right, out);
847 }
848 Expr::Match { subject, arms, .. } => {
849 collect_calls_from_expr(subject, out);
850 for arm in arms {
851 collect_calls_from_expr(&arm.body, out);
852 }
853 }
854 Expr::Constructor(_, inner) => {
855 if let Some(inner) = inner {
856 collect_calls_from_expr(inner, out);
857 }
858 }
859 Expr::ErrorProp(inner) => collect_calls_from_expr(inner, out),
860 Expr::InterpolatedStr(parts) => {
861 for p in parts {
862 if let crate::ast::StrPart::Parsed(e) = p {
863 collect_calls_from_expr(e, out);
864 }
865 }
866 }
867 Expr::List(items) | Expr::Tuple(items) | Expr::IndependentProduct(items, _) => {
868 for item in items {
869 collect_calls_from_expr(item, out);
870 }
871 }
872 Expr::MapLiteral(entries) => {
873 for (k, v) in entries {
874 collect_calls_from_expr(k, out);
875 collect_calls_from_expr(v, out);
876 }
877 }
878 Expr::RecordCreate { fields, .. } => {
879 for (_, v) in fields {
880 collect_calls_from_expr(v, out);
881 }
882 }
883 Expr::RecordUpdate { base, updates, .. } => {
884 collect_calls_from_expr(base, out);
885 for (_, v) in updates {
886 collect_calls_from_expr(v, out);
887 }
888 }
889 Expr::Literal(_) | Expr::Ident(_) | Expr::Resolved { .. } => {}
890 }
891}
892
893fn collect_calls_from_body(body: &FnBody) -> Vec<(String, Vec<&Spanned<Expr>>)> {
894 let mut out = Vec::new();
895 for stmt in body.stmts() {
896 match stmt {
897 Stmt::Binding(_, _, expr) | Stmt::Expr(expr) => collect_calls_from_expr(expr, &mut out),
898 }
899 }
900 out
901}
902
903fn collect_list_tail_binders_from_expr(
904 expr: &Spanned<Expr>,
905 list_param_name: &str,
906 tails: &mut HashSet<String>,
907) {
908 match &expr.node {
909 Expr::Match { subject, arms, .. } => {
910 if matches!(&subject.node, Expr::Ident(id) if id == list_param_name) {
911 for MatchArm { pattern, .. } in arms {
912 if let Pattern::Cons(_, tail) = pattern {
913 tails.insert(tail.clone());
914 }
915 }
916 }
917 for arm in arms {
918 collect_list_tail_binders_from_expr(&arm.body, list_param_name, tails);
919 }
920 collect_list_tail_binders_from_expr(subject, list_param_name, tails);
921 }
922 Expr::FnCall(callee, args) => {
923 collect_list_tail_binders_from_expr(callee, list_param_name, tails);
924 for arg in args {
925 collect_list_tail_binders_from_expr(arg, list_param_name, tails);
926 }
927 }
928 Expr::TailCall(boxed) => {
929 let TailCallData {
930 target: _, args, ..
931 } = boxed.as_ref();
932 for arg in args {
933 collect_list_tail_binders_from_expr(arg, list_param_name, tails);
934 }
935 }
936 Expr::Attr(obj, _) => collect_list_tail_binders_from_expr(obj, list_param_name, tails),
937 Expr::BinOp(_, left, right) => {
938 collect_list_tail_binders_from_expr(left, list_param_name, tails);
939 collect_list_tail_binders_from_expr(right, list_param_name, tails);
940 }
941 Expr::Constructor(_, inner) => {
942 if let Some(inner) = inner {
943 collect_list_tail_binders_from_expr(inner, list_param_name, tails);
944 }
945 }
946 Expr::ErrorProp(inner) => {
947 collect_list_tail_binders_from_expr(inner, list_param_name, tails)
948 }
949 Expr::InterpolatedStr(parts) => {
950 for p in parts {
951 if let crate::ast::StrPart::Parsed(e) = p {
952 collect_list_tail_binders_from_expr(e, list_param_name, tails);
953 }
954 }
955 }
956 Expr::List(items) | Expr::Tuple(items) | Expr::IndependentProduct(items, _) => {
957 for item in items {
958 collect_list_tail_binders_from_expr(item, list_param_name, tails);
959 }
960 }
961 Expr::MapLiteral(entries) => {
962 for (k, v) in entries {
963 collect_list_tail_binders_from_expr(k, list_param_name, tails);
964 collect_list_tail_binders_from_expr(v, list_param_name, tails);
965 }
966 }
967 Expr::RecordCreate { fields, .. } => {
968 for (_, v) in fields {
969 collect_list_tail_binders_from_expr(v, list_param_name, tails);
970 }
971 }
972 Expr::RecordUpdate { base, updates, .. } => {
973 collect_list_tail_binders_from_expr(base, list_param_name, tails);
974 for (_, v) in updates {
975 collect_list_tail_binders_from_expr(v, list_param_name, tails);
976 }
977 }
978 Expr::Literal(_) | Expr::Ident(_) | Expr::Resolved { .. } => {}
979 }
980}
981
982fn collect_list_tail_binders(fd: &FnDef, list_param_name: &str) -> HashSet<String> {
983 let mut tails = HashSet::new();
984 for stmt in fd.body.stmts() {
985 match stmt {
986 Stmt::Binding(_, _, expr) | Stmt::Expr(expr) => {
987 collect_list_tail_binders_from_expr(expr, list_param_name, &mut tails)
988 }
989 }
990 }
991 tails
992}
993
994fn find_type_def<'a>(ctx: &'a CodegenContext, type_name: &str) -> Option<&'a TypeDef> {
995 ctx.modules
996 .iter()
997 .flat_map(|m| m.type_defs.iter())
998 .chain(ctx.type_defs.iter())
999 .find(|td| toplevel::type_def_name(td) == type_name)
1000}
1001
1002fn recursive_constructor_binders(
1003 td: &TypeDef,
1004 variant_name: &str,
1005 binders: &[String],
1006) -> Vec<String> {
1007 let variant_short = variant_name.rsplit('.').next().unwrap_or(variant_name);
1008 match td {
1009 TypeDef::Sum { name, variants, .. } => variants
1010 .iter()
1011 .find(|variant| variant.name == variant_short)
1012 .map(|variant| {
1013 variant
1014 .fields
1015 .iter()
1016 .zip(binders.iter())
1017 .filter_map(|(field_ty, binder)| {
1018 (field_ty.trim() == name).then_some(binder.clone())
1019 })
1020 .collect()
1021 })
1022 .unwrap_or_default(),
1023 TypeDef::Product { .. } => Vec::new(),
1024 }
1025}
1026
1027fn grow_recursive_subterm_binders_from_expr(
1028 expr: &Spanned<Expr>,
1029 tracked: &HashSet<String>,
1030 td: &TypeDef,
1031 out: &mut HashSet<String>,
1032) {
1033 match &expr.node {
1034 Expr::Match { subject, arms, .. } => {
1035 if let Expr::Ident(subject_name) = &subject.node
1036 && tracked.contains(subject_name)
1037 {
1038 for arm in arms {
1039 if let Pattern::Constructor(variant_name, binders) = &arm.pattern {
1040 out.extend(recursive_constructor_binders(td, variant_name, binders));
1041 }
1042 }
1043 }
1044 grow_recursive_subterm_binders_from_expr(subject, tracked, td, out);
1045 for arm in arms {
1046 grow_recursive_subterm_binders_from_expr(&arm.body, tracked, td, out);
1047 }
1048 }
1049 Expr::FnCall(callee, args) => {
1050 grow_recursive_subterm_binders_from_expr(callee, tracked, td, out);
1051 for arg in args {
1052 grow_recursive_subterm_binders_from_expr(arg, tracked, td, out);
1053 }
1054 }
1055 Expr::Attr(obj, _) => grow_recursive_subterm_binders_from_expr(obj, tracked, td, out),
1056 Expr::BinOp(_, left, right) => {
1057 grow_recursive_subterm_binders_from_expr(left, tracked, td, out);
1058 grow_recursive_subterm_binders_from_expr(right, tracked, td, out);
1059 }
1060 Expr::Constructor(_, Some(inner)) | Expr::ErrorProp(inner) => {
1061 grow_recursive_subterm_binders_from_expr(inner, tracked, td, out)
1062 }
1063 Expr::InterpolatedStr(parts) => {
1064 for part in parts {
1065 if let crate::ast::StrPart::Parsed(inner) = part {
1066 grow_recursive_subterm_binders_from_expr(inner, tracked, td, out);
1067 }
1068 }
1069 }
1070 Expr::List(items) | Expr::Tuple(items) | Expr::IndependentProduct(items, _) => {
1071 for item in items {
1072 grow_recursive_subterm_binders_from_expr(item, tracked, td, out);
1073 }
1074 }
1075 Expr::MapLiteral(entries) => {
1076 for (k, v) in entries {
1077 grow_recursive_subterm_binders_from_expr(k, tracked, td, out);
1078 grow_recursive_subterm_binders_from_expr(v, tracked, td, out);
1079 }
1080 }
1081 Expr::RecordCreate { fields, .. } => {
1082 for (_, v) in fields {
1083 grow_recursive_subterm_binders_from_expr(v, tracked, td, out);
1084 }
1085 }
1086 Expr::RecordUpdate { base, updates, .. } => {
1087 grow_recursive_subterm_binders_from_expr(base, tracked, td, out);
1088 for (_, v) in updates {
1089 grow_recursive_subterm_binders_from_expr(v, tracked, td, out);
1090 }
1091 }
1092 Expr::TailCall(boxed) => {
1093 for arg in &boxed.args {
1094 grow_recursive_subterm_binders_from_expr(arg, tracked, td, out);
1095 }
1096 }
1097 Expr::Literal(_) | Expr::Ident(_) | Expr::Resolved { .. } | Expr::Constructor(_, None) => {}
1098 }
1099}
1100
1101fn collect_recursive_subterm_binders(
1102 fd: &FnDef,
1103 param_name: &str,
1104 param_type: &str,
1105 ctx: &CodegenContext,
1106) -> HashSet<String> {
1107 let Some(td) = find_type_def(ctx, param_type) else {
1108 return HashSet::new();
1109 };
1110 let mut tracked: HashSet<String> = HashSet::from([param_name.to_string()]);
1111 loop {
1112 let mut discovered = HashSet::new();
1113 for stmt in fd.body.stmts() {
1114 match stmt {
1115 Stmt::Binding(_, _, expr) | Stmt::Expr(expr) => {
1116 grow_recursive_subterm_binders_from_expr(expr, &tracked, td, &mut discovered);
1117 }
1118 }
1119 }
1120 let before = tracked.len();
1121 tracked.extend(discovered);
1122 if tracked.len() == before {
1123 break;
1124 }
1125 }
1126 tracked.remove(param_name);
1127 tracked
1128}
1129
1130fn single_int_countdown_param_index(fd: &FnDef) -> Option<usize> {
1131 let recursive_calls: Vec<Vec<&Spanned<Expr>>> = collect_calls_from_body(fd.body.as_ref())
1132 .into_iter()
1133 .filter(|(name, _)| call_matches(name, &fd.name))
1134 .map(|(_, args)| args)
1135 .collect();
1136 if recursive_calls.is_empty() {
1137 return None;
1138 }
1139
1140 fd.params
1141 .iter()
1142 .enumerate()
1143 .find_map(|(idx, (param_name, param_ty))| {
1144 if param_ty != "Int" {
1145 return None;
1146 }
1147 let countdown_ok = recursive_calls.iter().all(|args| {
1148 args.get(idx)
1149 .cloned()
1150 .is_some_and(|arg| is_int_minus_positive(arg, param_name))
1151 });
1152 if countdown_ok {
1153 return Some(idx);
1154 }
1155
1156 let ascent_ok = recursive_calls.iter().all(|args| {
1159 args.get(idx)
1160 .copied()
1161 .is_some_and(|arg| is_int_plus_positive(arg, param_name))
1162 });
1163 (ascent_ok && has_negative_guarded_ascent(fd, param_name)).then_some(idx)
1164 })
1165}
1166
1167fn has_negative_guarded_ascent(fd: &FnDef, param_name: &str) -> bool {
1168 let Some(tail) = fd.body.tail_expr() else {
1169 return false;
1170 };
1171 let Expr::Match { subject, arms, .. } = &tail.node else {
1172 return false;
1173 };
1174 let Expr::BinOp(BinOp::Lt, left, right) = &subject.node else {
1175 return false;
1176 };
1177 if !is_ident(left, param_name)
1178 || !matches!(&right.node, Expr::Literal(crate::ast::Literal::Int(0)))
1179 {
1180 return false;
1181 }
1182
1183 let mut true_arm = None;
1184 let mut false_arm = None;
1185 for arm in arms {
1186 match arm.pattern {
1187 Pattern::Literal(crate::ast::Literal::Bool(true)) => true_arm = Some(arm.body.as_ref()),
1188 Pattern::Literal(crate::ast::Literal::Bool(false)) => {
1189 false_arm = Some(arm.body.as_ref())
1190 }
1191 _ => return false,
1192 }
1193 }
1194
1195 let Some(true_arm) = true_arm else {
1196 return false;
1197 };
1198 let Some(false_arm) = false_arm else {
1199 return false;
1200 };
1201
1202 let mut true_calls = Vec::new();
1203 collect_calls_from_expr(true_arm, &mut true_calls);
1204 let mut false_calls = Vec::new();
1205 collect_calls_from_expr(false_arm, &mut false_calls);
1206
1207 true_calls
1208 .iter()
1209 .any(|(name, _)| call_matches(name, &fd.name))
1210 && false_calls
1211 .iter()
1212 .all(|(name, _)| !call_matches(name, &fd.name))
1213}
1214
1215fn single_int_ascending_param(fd: &FnDef) -> Option<(usize, String)> {
1218 let recursive_calls: Vec<Vec<&Spanned<Expr>>> = collect_calls_from_body(fd.body.as_ref())
1219 .into_iter()
1220 .filter(|(name, _)| call_matches(name, &fd.name))
1221 .map(|(_, args)| args)
1222 .collect();
1223 if recursive_calls.is_empty() {
1224 return None;
1225 }
1226
1227 for (idx, (param_name, param_ty)) in fd.params.iter().enumerate() {
1228 if param_ty != "Int" {
1229 continue;
1230 }
1231 let ascent_ok = recursive_calls.iter().all(|args| {
1232 args.get(idx)
1233 .cloned()
1234 .is_some_and(|arg| is_int_plus_positive(arg, param_name))
1235 });
1236 if !ascent_ok {
1237 continue;
1238 }
1239 if let Some(bound_lean) = extract_equality_bound_lean(fd, param_name) {
1240 return Some((idx, bound_lean));
1241 }
1242 }
1243 None
1244}
1245
1246fn extract_equality_bound_lean(fd: &FnDef, param_name: &str) -> Option<String> {
1248 let tail = fd.body.tail_expr()?;
1249 let Expr::Match { subject, arms, .. } = &tail.node else {
1250 return None;
1251 };
1252 let Expr::BinOp(BinOp::Eq, left, right) = &subject.node else {
1253 return None;
1254 };
1255 if !is_ident(left, param_name) {
1256 return None;
1257 }
1258 let mut true_has_self = false;
1260 let mut false_has_self = false;
1261 for arm in arms {
1262 match arm.pattern {
1263 Pattern::Literal(crate::ast::Literal::Bool(true)) => {
1264 let mut calls = Vec::new();
1265 collect_calls_from_expr(&arm.body, &mut calls);
1266 true_has_self = calls.iter().any(|(n, _)| call_matches(n, &fd.name));
1267 }
1268 Pattern::Literal(crate::ast::Literal::Bool(false)) => {
1269 let mut calls = Vec::new();
1270 collect_calls_from_expr(&arm.body, &mut calls);
1271 false_has_self = calls.iter().any(|(n, _)| call_matches(n, &fd.name));
1272 }
1273 _ => return None,
1274 }
1275 }
1276 if true_has_self || !false_has_self {
1277 return None;
1278 }
1279 Some(bound_expr_to_lean(right))
1281}
1282
1283fn bound_expr_to_lean(expr: &Spanned<Expr>) -> String {
1284 match &expr.node {
1285 Expr::Literal(crate::ast::Literal::Int(n)) => format!("{}", n),
1286 Expr::Ident(name) => expr::aver_name_to_lean(name),
1287 Expr::FnCall(f, args) => {
1288 if let Some(dotted) = crate::codegen::common::expr_to_dotted_name(&f.node) {
1289 if dotted == "List.len" && args.len() == 1 {
1291 return format!("{}.length", bound_expr_to_lean(&args[0]));
1292 }
1293 let lean_args: Vec<String> = args.iter().map(bound_expr_to_lean).collect();
1294 format!(
1295 "({} {})",
1296 expr::aver_name_to_lean(&dotted),
1297 lean_args.join(" ")
1298 )
1299 } else {
1300 "0".to_string()
1301 }
1302 }
1303 Expr::Attr(obj, field) => format!(
1304 "{}.{}",
1305 bound_expr_to_lean(obj),
1306 expr::aver_name_to_lean(field)
1307 ),
1308 _ => "0".to_string(),
1309 }
1310}
1311
1312fn supports_single_sizeof_structural(fd: &FnDef, ctx: &CodegenContext) -> bool {
1313 let recursive_calls: Vec<Vec<&Spanned<Expr>>> = collect_calls_from_body(fd.body.as_ref())
1314 .into_iter()
1315 .filter(|(name, _)| call_matches(name, &fd.name))
1316 .map(|(_, args)| args)
1317 .collect();
1318 if recursive_calls.is_empty() {
1319 return false;
1320 }
1321
1322 let metric_indices = sizeof_measure_param_indices(fd);
1323 if metric_indices.is_empty() {
1324 return false;
1325 }
1326
1327 let binder_sets: HashMap<usize, HashSet<String>> = metric_indices
1328 .iter()
1329 .filter_map(|idx| {
1330 let (param_name, param_type) = fd.params.get(*idx)?;
1331 recursive_type_names(ctx).contains(param_type).then(|| {
1332 (
1333 *idx,
1334 collect_recursive_subterm_binders(fd, param_name, param_type, ctx),
1335 )
1336 })
1337 })
1338 .collect();
1339
1340 if binder_sets.values().all(HashSet::is_empty) {
1341 return false;
1342 }
1343
1344 recursive_calls.iter().all(|args| {
1345 let mut strictly_smaller = false;
1346 for idx in &metric_indices {
1347 let Some((param_name, _)) = fd.params.get(*idx) else {
1348 return false;
1349 };
1350 let Some(arg) = args.get(*idx).cloned() else {
1351 return false;
1352 };
1353 if is_ident(arg, param_name) {
1354 continue;
1355 }
1356 let Some(binders) = binder_sets.get(idx) else {
1357 return false;
1358 };
1359 if matches!(&arg.node, Expr::Ident(id) if binders.contains(id)) {
1360 strictly_smaller = true;
1361 continue;
1362 }
1363 return false;
1364 }
1365 strictly_smaller
1366 })
1367}
1368
1369fn single_list_structural_param_index(fd: &FnDef) -> Option<usize> {
1370 fd.params
1371 .iter()
1372 .enumerate()
1373 .find_map(|(param_index, (param_name, param_ty))| {
1374 if !(param_ty.starts_with("List<") || param_ty == "List") {
1375 return None;
1376 }
1377
1378 let tails = collect_list_tail_binders(fd, param_name);
1379 if tails.is_empty() {
1380 return None;
1381 }
1382
1383 let recursive_calls: Vec<Option<&Spanned<Expr>>> =
1384 collect_calls_from_body(fd.body.as_ref())
1385 .into_iter()
1386 .filter(|(name, _)| call_matches(name, &fd.name))
1387 .map(|(_, args)| args.get(param_index).cloned())
1388 .collect();
1389 if recursive_calls.is_empty() {
1390 return None;
1391 }
1392
1393 recursive_calls
1394 .into_iter()
1395 .all(|arg| {
1396 arg.is_some_and(|a| matches!(&a.node, Expr::Ident(id) if tails.contains(id)))
1397 })
1398 .then_some(param_index)
1399 })
1400}
1401
1402fn is_ident(expr: &Spanned<Expr>, name: &str) -> bool {
1403 matches!(&expr.node, Expr::Ident(id) if id == name)
1404}
1405
1406fn is_int_plus_positive(expr: &Spanned<Expr>, param_name: &str) -> bool {
1407 match &expr.node {
1408 Expr::BinOp(BinOp::Add, left, right) => {
1409 matches!(&left.node, Expr::Ident(id) if id == param_name)
1410 && matches!(&right.node, Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
1411 }
1412 Expr::FnCall(callee, args) => {
1413 let Some(name) = expr_to_dotted_name(callee) else {
1414 return false;
1415 };
1416 (name == "Int.add" || name == "int.add")
1417 && args.len() == 2
1418 && matches!(&args[0].node, Expr::Ident(id) if id == param_name)
1419 && matches!(&args[1].node, Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
1420 }
1421 _ => false,
1422 }
1423}
1424
1425fn is_skip_ws_advance(expr: &Spanned<Expr>, string_param: &str, pos_param: &str) -> bool {
1426 let Expr::FnCall(callee, args) = &expr.node else {
1427 return false;
1428 };
1429 let Some(name) = expr_to_dotted_name(callee) else {
1430 return false;
1431 };
1432 if !call_matches(&name, "skipWs") || args.len() != 2 {
1433 return false;
1434 }
1435 is_ident(&args[0], string_param) && is_int_plus_positive(&args[1], pos_param)
1436}
1437
1438fn is_skip_ws_same(expr: &Spanned<Expr>, string_param: &str, pos_param: &str) -> bool {
1439 let Expr::FnCall(callee, args) = &expr.node else {
1440 return false;
1441 };
1442 let Some(name) = expr_to_dotted_name(callee) else {
1443 return false;
1444 };
1445 if !call_matches(&name, "skipWs") || args.len() != 2 {
1446 return false;
1447 }
1448 is_ident(&args[0], string_param) && is_ident(&args[1], pos_param)
1449}
1450
1451fn is_string_pos_advance(expr: &Spanned<Expr>, string_param: &str, pos_param: &str) -> bool {
1452 is_int_plus_positive(expr, pos_param) || is_skip_ws_advance(expr, string_param, pos_param)
1453}
1454
1455#[derive(Clone, Copy, Debug, Eq, PartialEq)]
1456enum StringPosEdge {
1457 Same,
1458 Advance,
1459}
1460
1461fn classify_string_pos_edge(
1462 expr: &Spanned<Expr>,
1463 string_param: &str,
1464 pos_param: &str,
1465) -> Option<StringPosEdge> {
1466 if is_ident(expr, pos_param) || is_skip_ws_same(expr, string_param, pos_param) {
1467 return Some(StringPosEdge::Same);
1468 }
1469 if is_string_pos_advance(expr, string_param, pos_param) {
1470 return Some(StringPosEdge::Advance);
1471 }
1472 if let Expr::FnCall(callee, args) = &expr.node {
1473 let name = expr_to_dotted_name(callee)?;
1474 if call_matches(&name, "skipWs")
1475 && args.len() == 2
1476 && is_ident(&args[0], string_param)
1477 && matches!(&args[1].node, Expr::Ident(id) if id != pos_param)
1478 {
1479 return Some(StringPosEdge::Advance);
1480 }
1481 }
1482 if matches!(&expr.node, Expr::Ident(id) if id != pos_param) {
1483 return Some(StringPosEdge::Advance);
1484 }
1485 None
1486}
1487
1488fn ranks_from_same_edges(
1489 names: &HashSet<String>,
1490 same_edges: &HashMap<String, HashSet<String>>,
1491) -> Option<HashMap<String, usize>> {
1492 let mut indegree: HashMap<String, usize> = names.iter().map(|n| (n.clone(), 0)).collect();
1493 for outs in same_edges.values() {
1494 for to in outs {
1495 if let Some(entry) = indegree.get_mut(to) {
1496 *entry += 1;
1497 } else {
1498 return None;
1499 }
1500 }
1501 }
1502
1503 let mut queue: Vec<String> = indegree
1504 .iter()
1505 .filter_map(|(name, °)| (deg == 0).then_some(name.clone()))
1506 .collect();
1507 queue.sort();
1508 let mut topo = Vec::new();
1509 while let Some(node) = queue.pop() {
1510 topo.push(node.clone());
1511 let outs = same_edges.get(&node).cloned().unwrap_or_default();
1512 let mut newly_zero = Vec::new();
1513 for to in outs {
1514 if let Some(entry) = indegree.get_mut(&to) {
1515 *entry -= 1;
1516 if *entry == 0 {
1517 newly_zero.push(to);
1518 }
1519 } else {
1520 return None;
1521 }
1522 }
1523 newly_zero.sort();
1524 queue.extend(newly_zero);
1525 }
1526
1527 if topo.len() != names.len() {
1528 return None;
1529 }
1530
1531 let n = topo.len();
1532 let mut ranks = HashMap::new();
1533 for (idx, name) in topo.into_iter().enumerate() {
1534 ranks.insert(name, n - idx);
1535 }
1536 Some(ranks)
1537}
1538
1539fn supports_single_string_pos_advance(fd: &FnDef) -> bool {
1540 let Some((string_param, string_ty)) = fd.params.first() else {
1541 return false;
1542 };
1543 let Some((pos_param, pos_ty)) = fd.params.get(1) else {
1544 return false;
1545 };
1546 if string_ty != "String" || pos_ty != "Int" {
1547 return false;
1548 }
1549
1550 type CallPair<'a> = (Option<&'a Spanned<Expr>>, Option<&'a Spanned<Expr>>);
1551 let recursive_calls: Vec<CallPair<'_>> = collect_calls_from_body(fd.body.as_ref())
1552 .into_iter()
1553 .filter(|(name, _)| call_matches(name, &fd.name))
1554 .map(|(_, args)| (args.first().cloned(), args.get(1).cloned()))
1555 .collect();
1556 if recursive_calls.is_empty() {
1557 return false;
1558 }
1559
1560 recursive_calls.into_iter().all(|(arg0, arg1)| {
1561 arg0.is_some_and(|e| is_ident(e, string_param))
1562 && arg1.is_some_and(|e| is_string_pos_advance(e, string_param, pos_param))
1563 })
1564}
1565
1566fn supports_mutual_int_countdown(component: &[&FnDef]) -> bool {
1567 if component.len() < 2 {
1568 return false;
1569 }
1570 if component
1571 .iter()
1572 .any(|fd| !matches!(fd.params.first(), Some((_, t)) if t == "Int"))
1573 {
1574 return false;
1575 }
1576 let names: HashSet<String> = component.iter().map(|fd| fd.name.clone()).collect();
1577 let mut any_intra = false;
1578 for fd in component {
1579 let param_name = &fd.params[0].0;
1580 for (callee, args) in collect_calls_from_body(fd.body.as_ref()) {
1581 if !call_is_in_set(&callee, &names) {
1582 continue;
1583 }
1584 any_intra = true;
1585 let Some(arg0) = args.first().cloned() else {
1586 return false;
1587 };
1588 if !is_int_minus_positive(arg0, param_name) {
1589 return false;
1590 }
1591 }
1592 }
1593 any_intra
1594}
1595
1596fn supports_mutual_string_pos_advance(component: &[&FnDef]) -> Option<HashMap<String, usize>> {
1597 if component.len() < 2 {
1598 return None;
1599 }
1600 if component.iter().any(|fd| {
1601 !matches!(fd.params.first(), Some((_, t)) if t == "String")
1602 || !matches!(fd.params.get(1), Some((_, t)) if t == "Int")
1603 }) {
1604 return None;
1605 }
1606
1607 let names: HashSet<String> = component.iter().map(|fd| fd.name.clone()).collect();
1608 let mut same_edges: HashMap<String, HashSet<String>> =
1609 names.iter().map(|n| (n.clone(), HashSet::new())).collect();
1610 let mut any_intra = false;
1611
1612 for fd in component {
1613 let string_param = &fd.params[0].0;
1614 let pos_param = &fd.params[1].0;
1615 for (callee_raw, args) in collect_calls_from_body(fd.body.as_ref()) {
1616 let Some(callee) = canonical_callee_name(&callee_raw, &names) else {
1617 continue;
1618 };
1619 any_intra = true;
1620
1621 let arg0 = args.first().cloned()?;
1622 let arg1 = args.get(1).cloned()?;
1623
1624 if !is_ident(arg0, string_param) {
1625 return None;
1626 }
1627
1628 match classify_string_pos_edge(arg1, string_param, pos_param) {
1629 Some(StringPosEdge::Same) => {
1630 if let Some(edges) = same_edges.get_mut(&fd.name) {
1631 edges.insert(callee);
1632 } else {
1633 return None;
1634 }
1635 }
1636 Some(StringPosEdge::Advance) => {}
1637 None => return None,
1638 }
1639 }
1640 }
1641
1642 if !any_intra {
1643 return None;
1644 }
1645
1646 ranks_from_same_edges(&names, &same_edges)
1647}
1648
1649fn is_scalar_like_type(type_name: &str) -> bool {
1650 matches!(
1651 type_name,
1652 "Int" | "Float" | "Bool" | "String" | "Char" | "Byte" | "Unit"
1653 )
1654}
1655
1656pub(super) fn sizeof_measure_param_indices(fd: &FnDef) -> Vec<usize> {
1657 fd.params
1658 .iter()
1659 .enumerate()
1660 .filter_map(|(idx, (_, type_name))| (!is_scalar_like_type(type_name)).then_some(idx))
1661 .collect()
1662}
1663
1664fn supports_mutual_sizeof_ranked(component: &[&FnDef]) -> Option<HashMap<String, usize>> {
1665 if component.len() < 2 {
1666 return None;
1667 }
1668 let names: HashSet<String> = component.iter().map(|fd| fd.name.clone()).collect();
1669 let metric_indices: HashMap<String, Vec<usize>> = component
1670 .iter()
1671 .map(|fd| (fd.name.clone(), sizeof_measure_param_indices(fd)))
1672 .collect();
1673 if component.iter().any(|fd| {
1674 metric_indices
1675 .get(&fd.name)
1676 .is_none_or(|indices| indices.is_empty())
1677 }) {
1678 return None;
1679 }
1680
1681 let mut same_edges: HashMap<String, HashSet<String>> =
1682 names.iter().map(|n| (n.clone(), HashSet::new())).collect();
1683 let mut any_intra = false;
1684 for fd in component {
1685 let caller_metric_indices = metric_indices.get(&fd.name)?;
1686 let caller_metric_params: Vec<&str> = caller_metric_indices
1687 .iter()
1688 .filter_map(|idx| fd.params.get(*idx).map(|(name, _)| name.as_str()))
1689 .collect();
1690 for (callee_raw, args) in collect_calls_from_body(fd.body.as_ref()) {
1691 let Some(callee) = canonical_callee_name(&callee_raw, &names) else {
1692 continue;
1693 };
1694 any_intra = true;
1695 let callee_metric_indices = metric_indices.get(&callee)?;
1696 let is_same_edge = callee_metric_indices.len() == caller_metric_params.len()
1697 && callee_metric_indices
1698 .iter()
1699 .enumerate()
1700 .all(|(pos, callee_idx)| {
1701 let Some(arg) = args.get(*callee_idx).cloned() else {
1702 return false;
1703 };
1704 is_ident(arg, caller_metric_params[pos])
1705 });
1706 if is_same_edge {
1707 if let Some(edges) = same_edges.get_mut(&fd.name) {
1708 edges.insert(callee);
1709 } else {
1710 return None;
1711 }
1712 }
1713 }
1714 }
1715 if !any_intra {
1716 return None;
1717 }
1718
1719 let ranks = ranks_from_same_edges(&names, &same_edges)?;
1720 let mut out = HashMap::new();
1721 for fd in component {
1722 let rank = ranks.get(&fd.name).cloned()?;
1723 out.insert(fd.name.clone(), rank);
1724 }
1725 Some(out)
1726}
1727
1728fn proof_mode_recursion_analysis(
1729 ctx: &CodegenContext,
1730) -> (HashMap<String, RecursionPlan>, Vec<ProofModeIssue>) {
1731 let mut plans = HashMap::new();
1732 let mut issues = Vec::new();
1733
1734 let all_pure = pure_fns(ctx);
1735 let recursive_names = recursive_pure_fn_names(ctx);
1736 let components = call_graph::ordered_fn_components(&all_pure, &ctx.module_prefixes);
1737
1738 for component in components {
1739 if component.is_empty() {
1740 continue;
1741 }
1742 let is_recursive_component =
1743 component.len() > 1 || recursive_names.contains(&component[0].name);
1744 if !is_recursive_component {
1745 continue;
1746 }
1747
1748 if component.len() > 1 {
1749 if supports_mutual_int_countdown(&component) {
1750 for fd in &component {
1751 plans.insert(fd.name.clone(), RecursionPlan::MutualIntCountdown);
1752 }
1753 } else if let Some(ranks) = supports_mutual_string_pos_advance(&component) {
1754 for fd in &component {
1755 if let Some(rank) = ranks.get(&fd.name).cloned() {
1756 plans.insert(
1757 fd.name.clone(),
1758 RecursionPlan::MutualStringPosAdvance { rank },
1759 );
1760 }
1761 }
1762 } else if let Some(rankings) = supports_mutual_sizeof_ranked(&component) {
1763 for fd in &component {
1764 if let Some(rank) = rankings.get(&fd.name).cloned() {
1765 plans.insert(fd.name.clone(), RecursionPlan::MutualSizeOfRanked { rank });
1766 }
1767 }
1768 } else {
1769 let names = component
1770 .iter()
1771 .map(|fd| fd.name.clone())
1772 .collect::<Vec<_>>()
1773 .join(", ");
1774 let line = component.iter().map(|fd| fd.line).min().unwrap_or(1);
1775 issues.push(ProofModeIssue {
1776 line,
1777 message: format!(
1778 "unsupported mutual recursion group (currently supported in proof mode: Int countdown on first param): {}",
1779 names
1780 ),
1781 });
1782 }
1783 continue;
1784 }
1785
1786 let fd = component[0];
1787 if recurrence::detect_second_order_int_linear_recurrence(fd).is_some() {
1788 plans.insert(fd.name.clone(), RecursionPlan::LinearRecurrence2);
1789 } else if let Some((param_index, bound_lean)) = single_int_ascending_param(fd) {
1790 plans.insert(
1791 fd.name.clone(),
1792 RecursionPlan::IntAscending {
1793 param_index,
1794 bound_lean,
1795 },
1796 );
1797 } else if let Some(param_index) = single_int_countdown_param_index(fd) {
1798 plans.insert(fd.name.clone(), RecursionPlan::IntCountdown { param_index });
1799 } else if supports_single_sizeof_structural(fd, ctx) {
1800 plans.insert(fd.name.clone(), RecursionPlan::SizeOfStructural);
1801 } else if let Some(param_index) = single_list_structural_param_index(fd) {
1802 plans.insert(
1803 fd.name.clone(),
1804 RecursionPlan::ListStructural { param_index },
1805 );
1806 } else if supports_single_string_pos_advance(fd) {
1807 plans.insert(fd.name.clone(), RecursionPlan::StringPosAdvance);
1808 } else {
1809 issues.push(ProofModeIssue {
1810 line: fd.line,
1811 message: format!(
1812 "recursive function '{}' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)",
1813 fd.name
1814 ),
1815 });
1816 }
1817 }
1818
1819 (plans, issues)
1820}
1821
1822pub fn proof_mode_findings(ctx: &CodegenContext) -> Vec<ProofModeIssue> {
1827 let (_plans, issues) = proof_mode_recursion_analysis(ctx);
1828 issues
1829}
1830
1831pub fn proof_mode_issues(ctx: &CodegenContext) -> Vec<String> {
1832 proof_mode_findings(ctx)
1833 .into_iter()
1834 .map(|issue| issue.message)
1835 .collect()
1836}
1837
1838pub fn transpile(ctx: &CodegenContext) -> ProjectOutput {
1840 transpile_with_verify_mode(ctx, VerifyEmitMode::NativeDecide)
1841}
1842
1843pub fn transpile_for_proof_mode(
1848 ctx: &CodegenContext,
1849 verify_mode: VerifyEmitMode,
1850) -> ProjectOutput {
1851 let (plans, _issues) = proof_mode_recursion_analysis(ctx);
1852 let recursive_names = recursive_pure_fn_names(ctx);
1853 let recursive_types = recursive_type_names(ctx);
1854
1855 let mut sections = Vec::new();
1856
1857 for module in &ctx.modules {
1859 for td in &module.type_defs {
1860 sections.push(toplevel::emit_type_def(td));
1861 if toplevel::is_recursive_type_def(td) {
1862 sections.push(toplevel::emit_recursive_decidable_eq(
1863 toplevel::type_def_name(td),
1864 ));
1865 if let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types) {
1866 sections.push(measure);
1867 }
1868 }
1869 sections.push(String::new());
1870 }
1871 }
1872 for td in &ctx.type_defs {
1873 sections.push(toplevel::emit_type_def(td));
1874 if toplevel::is_recursive_type_def(td) {
1875 sections.push(toplevel::emit_recursive_decidable_eq(
1876 toplevel::type_def_name(td),
1877 ));
1878 if let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types) {
1879 sections.push(measure);
1880 }
1881 }
1882 sections.push(String::new());
1883 }
1884
1885 emit_pure_functions_proof(ctx, &plans, &recursive_names, &mut sections);
1886
1887 for item in &ctx.items {
1888 if let TopLevel::Decision(db) = item {
1889 sections.push(toplevel::emit_decision(db));
1890 sections.push(String::new());
1891 }
1892 }
1893
1894 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1895 for item in &ctx.items {
1896 if let TopLevel::Verify(vb) = item {
1897 let key = verify_counter_key(vb);
1898 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1899 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1900 verify_case_counters.insert(key, next_idx);
1901 sections.push(emitted);
1902 sections.push(String::new());
1903 }
1904 }
1905
1906 let lean_body = sections.join("\n");
1907 let prelude = generate_prelude_for_body(&lean_body, false);
1908 let lean_source = format!("{prelude}\n\n{lean_body}");
1909 let project_name = lean_project_name(ctx);
1910 let lakefile = generate_lakefile(&project_name);
1911 let toolchain = generate_toolchain();
1912 ProjectOutput {
1913 files: vec![
1914 ("lakefile.lean".to_string(), lakefile),
1915 ("lean-toolchain".to_string(), toolchain),
1916 (format!("{}.lean", project_name), lean_source),
1917 ],
1918 }
1919}
1920
1921pub fn transpile_with_verify_mode(
1927 ctx: &CodegenContext,
1928 verify_mode: VerifyEmitMode,
1929) -> ProjectOutput {
1930 let mut sections = Vec::new();
1931
1932 let recursive_fns = call_graph::find_recursive_fns(&ctx.items);
1934
1935 for module in &ctx.modules {
1937 for td in &module.type_defs {
1938 sections.push(toplevel::emit_type_def(td));
1939 if toplevel::is_recursive_type_def(td) {
1941 sections.push(toplevel::emit_recursive_decidable_eq(
1942 toplevel::type_def_name(td),
1943 ));
1944 }
1945 sections.push(String::new());
1946 }
1947 }
1948
1949 for td in &ctx.type_defs {
1951 sections.push(toplevel::emit_type_def(td));
1952 if toplevel::is_recursive_type_def(td) {
1954 sections.push(toplevel::emit_recursive_decidable_eq(
1955 toplevel::type_def_name(td),
1956 ));
1957 }
1958 sections.push(String::new());
1959 }
1960
1961 emit_pure_functions(ctx, &recursive_fns, &mut sections);
1964
1965 for item in &ctx.items {
1967 if let TopLevel::Decision(db) = item {
1968 sections.push(toplevel::emit_decision(db));
1969 sections.push(String::new());
1970 }
1971 }
1972
1973 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1975 for item in &ctx.items {
1976 if let TopLevel::Verify(vb) = item {
1977 let key = verify_counter_key(vb);
1978 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1979 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1980 verify_case_counters.insert(key, next_idx);
1981 sections.push(emitted);
1982 sections.push(String::new());
1983 }
1984 }
1985
1986 let lean_body = sections.join("\n");
1987 let prelude = generate_prelude_for_body(&lean_body, false);
1988 let lean_source = format!("{prelude}\n\n{lean_body}");
1989
1990 let project_name = lean_project_name(ctx);
1992 let lakefile = generate_lakefile(&project_name);
1993 let toolchain = generate_toolchain();
1994
1995 ProjectOutput {
1996 files: vec![
1997 ("lakefile.lean".to_string(), lakefile),
1998 ("lean-toolchain".to_string(), toolchain),
1999 (format!("{}.lean", project_name), lean_source),
2000 ],
2001 }
2002}
2003
2004fn emit_pure_functions(
2005 ctx: &CodegenContext,
2006 recursive_fns: &HashSet<String>,
2007 sections: &mut Vec<String>,
2008) {
2009 let all_fns: Vec<&FnDef> = ctx
2010 .modules
2011 .iter()
2012 .flat_map(|m| m.fn_defs.iter())
2013 .chain(ctx.fn_defs.iter())
2014 .filter(|fd| toplevel::is_pure_fn(fd))
2015 .collect();
2016 if all_fns.is_empty() {
2017 return;
2018 }
2019
2020 let components = call_graph::ordered_fn_components(&all_fns, &ctx.module_prefixes);
2021 for fns in components {
2022 if fns.is_empty() {
2023 continue;
2024 }
2025
2026 if fns.len() > 1 {
2028 sections.push(toplevel::emit_mutual_group(&fns, ctx));
2029 sections.push(String::new());
2030 continue;
2031 }
2032
2033 if let Some(code) = toplevel::emit_fn_def(fns[0], recursive_fns, ctx) {
2036 sections.push(code);
2037 sections.push(String::new());
2038 }
2039 }
2040}
2041
2042fn emit_pure_functions_proof(
2043 ctx: &CodegenContext,
2044 plans: &HashMap<String, RecursionPlan>,
2045 recursive_names: &HashSet<String>,
2046 sections: &mut Vec<String>,
2047) {
2048 let all_fns: Vec<&FnDef> = pure_fns(ctx);
2049 if all_fns.is_empty() {
2050 return;
2051 }
2052
2053 let components = call_graph::ordered_fn_components(&all_fns, &ctx.module_prefixes);
2054 for fns in components {
2055 if fns.is_empty() {
2056 continue;
2057 }
2058 if fns.len() > 1 {
2059 let all_supported = fns.iter().all(|fd| plans.contains_key(&fd.name));
2060 if all_supported {
2061 sections.push(toplevel::emit_mutual_group_proof(&fns, ctx, plans));
2062 } else {
2063 sections.push(toplevel::emit_mutual_group(&fns, ctx));
2064 }
2065 sections.push(String::new());
2066 continue;
2067 }
2068
2069 let fd = fns[0];
2070 let is_recursive = recursive_names.contains(&fd.name);
2071 let emitted = if is_recursive && !plans.contains_key(&fd.name) {
2072 toplevel::emit_fn_def(fd, recursive_names, ctx)
2073 } else {
2074 toplevel::emit_fn_def_proof(fd, plans.get(&fd.name).cloned(), ctx)
2075 };
2076 if let Some(code) = emitted {
2077 sections.push(code);
2078 sections.push(String::new());
2079 }
2080 }
2081}
2082
2083#[cfg(test)]
2084fn generate_prelude() -> String {
2085 generate_prelude_for_body("", true)
2086}
2087
2088fn generate_prelude_for_body(body: &str, include_all_helpers: bool) -> String {
2089 let mut parts = vec![
2090 LEAN_PRELUDE_HEADER.to_string(),
2091 LEAN_PRELUDE_FLOAT_COE.to_string(),
2092 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
2093 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
2094 LEAN_PRELUDE_EXCEPT_NS.to_string(),
2095 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
2096 LEAN_PRELUDE_STRING_HADD.to_string(),
2097 ];
2098
2099 if include_all_helpers || needs_builtin_named_type(body, "Header") {
2100 parts.push(LEAN_PRELUDE_HEADER_TYPE.to_string());
2101 }
2102 if include_all_helpers || needs_builtin_named_type(body, "HttpResponse") {
2103 parts.push(LEAN_PRELUDE_HTTP_RESPONSE_TYPE.to_string());
2104 }
2105 if include_all_helpers || needs_builtin_named_type(body, "HttpRequest") {
2106 parts.push(LEAN_PRELUDE_HTTP_REQUEST_TYPE.to_string());
2107 }
2108 if include_all_helpers || needs_builtin_named_type(body, "Tcp_Connection") {
2109 parts.push(LEAN_PRELUDE_TCP_CONNECTION_TYPE.to_string());
2110 }
2111
2112 if include_all_helpers || body.contains("averStringPosFuel") {
2113 parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string());
2114 }
2115
2116 if include_all_helpers || body.contains("AverMeasure.") {
2117 parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string());
2118 }
2119
2120 if include_all_helpers || body.contains("AverMap.") {
2121 parts.push(generate_map_prelude(body, include_all_helpers));
2122 }
2123
2124 parts.extend([
2125 LEAN_PRELUDE_AVER_LIST.to_string(),
2126 LEAN_PRELUDE_STRING_HELPERS.to_string(),
2127 LEAN_PRELUDE_NUMERIC_PARSE.to_string(),
2128 LEAN_PRELUDE_CHAR_BYTE.to_string(),
2129 ]);
2130
2131 parts.join("\n\n")
2132}
2133
2134fn needs_builtin_named_type(body: &str, type_name: &str) -> bool {
2135 references_named_type(body, type_name) && !declares_named_type(body, type_name)
2136}
2137
2138fn references_named_type(body: &str, type_name: &str) -> bool {
2139 let mut token = String::new();
2140 for ch in body.chars() {
2141 if ch.is_ascii_alphanumeric() || ch == '_' || ch == '.' {
2142 token.push(ch);
2143 continue;
2144 }
2145 if token == type_name {
2146 return true;
2147 }
2148 token.clear();
2149 }
2150 token == type_name
2151}
2152
2153fn declares_named_type(body: &str, type_name: &str) -> bool {
2154 let structure_decl = format!("structure {} where", type_name);
2155 let inductive_decl = format!("inductive {} where", type_name);
2156 body.lines()
2157 .map(str::trim_start)
2158 .any(|line| line == structure_decl || line == inductive_decl)
2159}
2160
2161fn generate_map_prelude(body: &str, include_all_helpers: bool) -> String {
2162 let mut parts = vec![AVER_MAP_PRELUDE_BASE.to_string()];
2163
2164 let needs_has_set_self = include_all_helpers || body.contains("AverMap.has_set_self");
2165 let needs_get_set_self = include_all_helpers || body.contains("AverMap.get_set_self");
2166 let needs_get_set_other = include_all_helpers
2167 || body.contains("AverMap.get_set_other")
2168 || body.contains("AverMap.has_set_other");
2169 let needs_has_set_other = include_all_helpers || body.contains("AverMap.has_set_other");
2170
2171 if needs_has_set_self {
2172 parts.push(AVER_MAP_PRELUDE_HAS_SET_SELF.to_string());
2173 }
2174 if needs_get_set_self {
2175 parts.push(AVER_MAP_PRELUDE_GET_SET_SELF.to_string());
2176 }
2177 if needs_get_set_other {
2178 parts.push(AVER_MAP_PRELUDE_GET_SET_OTHER.to_string());
2179 }
2180 if needs_has_set_other {
2181 parts.push(AVER_MAP_PRELUDE_HAS_SET_OTHER.to_string());
2182 }
2183
2184 parts.push(AVER_MAP_PRELUDE_END.to_string());
2185 parts.join("\n\n")
2186}
2187
2188fn generate_lakefile(project_name: &str) -> String {
2189 format!(
2190 r#"import Lake
2191open Lake DSL
2192
2193package «{}» where
2194 version := v!"0.1.0"
2195
2196@[default_target]
2197lean_lib «{}» where
2198 srcDir := "."
2199"#,
2200 project_name.to_lowercase(),
2201 project_name
2202 )
2203}
2204
2205fn generate_toolchain() -> String {
2206 "leanprover/lean4:v4.15.0\n".to_string()
2207}
2208
2209fn capitalize_first(s: &str) -> String {
2210 let mut chars = s.chars();
2211 match chars.next() {
2212 None => String::new(),
2213 Some(c) => c.to_uppercase().to_string() + chars.as_str(),
2214 }
2215}
2216
2217#[cfg(test)]
2218mod tests {
2219 use super::{
2220 VerifyEmitMode, generate_prelude, proof_mode_issues, recurrence, transpile,
2221 transpile_for_proof_mode, transpile_with_verify_mode,
2222 };
2223 use crate::ast::{
2224 BinOp, Expr, FnBody, FnDef, Literal, MatchArm, Pattern, Spanned, Stmt, TailCallData,
2225 TopLevel, TypeDef, TypeVariant, VerifyBlock, VerifyGiven, VerifyGivenDomain, VerifyKind,
2226 VerifyLaw,
2227 };
2228
2229 fn sb(e: Expr) -> Spanned<Expr> {
2231 Spanned::bare(e)
2232 }
2233 fn sbb(e: Expr) -> Box<Spanned<Expr>> {
2235 Box::new(Spanned::bare(e))
2236 }
2237 use crate::codegen::{CodegenContext, build_context};
2238 use crate::source::parse_source;
2239 use crate::tco;
2240 use crate::types::checker::run_type_check_full;
2241 use std::collections::{HashMap, HashSet};
2242 use std::sync::Arc as Rc;
2243
2244 fn empty_ctx() -> CodegenContext {
2245 CodegenContext {
2246 items: vec![],
2247 fn_sigs: HashMap::new(),
2248 memo_fns: HashSet::new(),
2249 memo_safe_types: HashSet::new(),
2250 type_defs: vec![],
2251 fn_defs: vec![],
2252 project_name: "verify_mode".to_string(),
2253 modules: vec![],
2254 module_prefixes: HashSet::new(),
2255 policy: None,
2256 emit_replay_runtime: false,
2257 runtime_policy_from_env: false,
2258 guest_entry: None,
2259 emit_self_host_support: false,
2260 extra_fn_defs: Vec::new(),
2261 mutual_tco_members: HashSet::new(),
2262 }
2263 }
2264
2265 fn ctx_from_source(source: &str, project_name: &str) -> CodegenContext {
2266 let mut items = parse_source(source).expect("source should parse");
2267 tco::transform_program(&mut items);
2268 let tc = run_type_check_full(&items, None);
2269 assert!(
2270 tc.errors.is_empty(),
2271 "source should typecheck without errors: {:?}",
2272 tc.errors
2273 );
2274 build_context(items, &tc, HashSet::new(), project_name.to_string(), vec![])
2275 }
2276
2277 fn generated_lean_file(out: &crate::codegen::ProjectOutput) -> &str {
2278 out.files
2279 .iter()
2280 .find_map(|(name, content)| {
2281 (name.ends_with(".lean") && name != "lakefile.lean").then_some(content.as_str())
2282 })
2283 .expect("expected generated Lean file")
2284 }
2285
2286 fn empty_ctx_with_verify_case() -> CodegenContext {
2287 let mut ctx = empty_ctx();
2288 ctx.items.push(TopLevel::Verify(VerifyBlock {
2289 fn_name: "f".to_string(),
2290 line: 1,
2291 cases: vec![(
2292 sb(Expr::Literal(Literal::Int(1))),
2293 sb(Expr::Literal(Literal::Int(1))),
2294 )],
2295 case_spans: vec![],
2296 case_givens: vec![],
2297 kind: VerifyKind::Cases,
2298 }));
2299 ctx
2300 }
2301
2302 fn empty_ctx_with_two_verify_blocks_same_fn() -> CodegenContext {
2303 let mut ctx = empty_ctx();
2304 ctx.items.push(TopLevel::Verify(VerifyBlock {
2305 fn_name: "f".to_string(),
2306 line: 1,
2307 cases: vec![(
2308 sb(Expr::Literal(Literal::Int(1))),
2309 sb(Expr::Literal(Literal::Int(1))),
2310 )],
2311 case_spans: vec![],
2312 case_givens: vec![],
2313 kind: VerifyKind::Cases,
2314 }));
2315 ctx.items.push(TopLevel::Verify(VerifyBlock {
2316 fn_name: "f".to_string(),
2317 line: 2,
2318 cases: vec![(
2319 sb(Expr::Literal(Literal::Int(2))),
2320 sb(Expr::Literal(Literal::Int(2))),
2321 )],
2322 case_spans: vec![],
2323 case_givens: vec![],
2324 kind: VerifyKind::Cases,
2325 }));
2326 ctx
2327 }
2328
2329 fn empty_ctx_with_verify_law() -> CodegenContext {
2330 let mut ctx = empty_ctx();
2331 let add = FnDef {
2332 name: "add".to_string(),
2333 line: 1,
2334 params: vec![
2335 ("a".to_string(), "Int".to_string()),
2336 ("b".to_string(), "Int".to_string()),
2337 ],
2338 return_type: "Int".to_string(),
2339 effects: vec![],
2340 desc: None,
2341 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
2342 BinOp::Add,
2343 sbb(Expr::Ident("a".to_string())),
2344 sbb(Expr::Ident("b".to_string())),
2345 )))),
2346 resolution: None,
2347 };
2348 ctx.fn_defs.push(add.clone());
2349 ctx.items.push(TopLevel::FnDef(add));
2350 ctx.items.push(TopLevel::Verify(VerifyBlock {
2351 fn_name: "add".to_string(),
2352 line: 1,
2353 cases: vec![
2354 (
2355 sb(Expr::FnCall(
2356 sbb(Expr::Ident("add".to_string())),
2357 vec![
2358 sb(Expr::Literal(Literal::Int(1))),
2359 sb(Expr::Literal(Literal::Int(2))),
2360 ],
2361 )),
2362 sb(Expr::FnCall(
2363 sbb(Expr::Ident("add".to_string())),
2364 vec![
2365 sb(Expr::Literal(Literal::Int(2))),
2366 sb(Expr::Literal(Literal::Int(1))),
2367 ],
2368 )),
2369 ),
2370 (
2371 sb(Expr::FnCall(
2372 sbb(Expr::Ident("add".to_string())),
2373 vec![
2374 sb(Expr::Literal(Literal::Int(2))),
2375 sb(Expr::Literal(Literal::Int(3))),
2376 ],
2377 )),
2378 sb(Expr::FnCall(
2379 sbb(Expr::Ident("add".to_string())),
2380 vec![
2381 sb(Expr::Literal(Literal::Int(3))),
2382 sb(Expr::Literal(Literal::Int(2))),
2383 ],
2384 )),
2385 ),
2386 ],
2387 case_spans: vec![],
2388 case_givens: vec![],
2389 kind: VerifyKind::Law(Box::new(VerifyLaw {
2390 name: "commutative".to_string(),
2391 givens: vec![
2392 VerifyGiven {
2393 name: "a".to_string(),
2394 type_name: "Int".to_string(),
2395 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2396 },
2397 VerifyGiven {
2398 name: "b".to_string(),
2399 type_name: "Int".to_string(),
2400 domain: VerifyGivenDomain::Explicit(vec![
2401 sb(Expr::Literal(Literal::Int(2))),
2402 sb(Expr::Literal(Literal::Int(3))),
2403 ]),
2404 },
2405 ],
2406 when: None,
2407 lhs: sb(Expr::FnCall(
2408 sbb(Expr::Ident("add".to_string())),
2409 vec![
2410 sb(Expr::Ident("a".to_string())),
2411 sb(Expr::Ident("b".to_string())),
2412 ],
2413 )),
2414 rhs: sb(Expr::FnCall(
2415 sbb(Expr::Ident("add".to_string())),
2416 vec![
2417 sb(Expr::Ident("b".to_string())),
2418 sb(Expr::Ident("a".to_string())),
2419 ],
2420 )),
2421 sample_guards: vec![],
2422 })),
2423 }));
2424 ctx
2425 }
2426
2427 #[test]
2428 fn prelude_normalizes_float_string_format() {
2429 let prelude = generate_prelude();
2430 assert!(
2431 prelude.contains("private def normalizeFloatString (s : String) : String :="),
2432 "missing normalizeFloatString helper in prelude"
2433 );
2434 assert!(
2435 prelude.contains(
2436 "def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)"
2437 ),
2438 "String.fromFloat should normalize Lean float formatting"
2439 );
2440 }
2441
2442 #[test]
2443 fn prelude_validates_char_from_code_unicode_bounds() {
2444 let prelude = generate_prelude();
2445 assert!(
2446 prelude.contains("if n < 0 || n > 1114111 then none"),
2447 "Char.fromCode should reject code points above Unicode max"
2448 );
2449 assert!(
2450 prelude.contains("else if n >= 55296 && n <= 57343 then none"),
2451 "Char.fromCode should reject surrogate code points"
2452 );
2453 }
2454
2455 #[test]
2456 fn prelude_includes_map_set_helper_lemmas() {
2457 let prelude = generate_prelude();
2458 assert!(
2459 prelude.contains("theorem has_set_self [DecidableEq α]"),
2460 "missing AverMap.has_set_self helper theorem"
2461 );
2462 assert!(
2463 prelude.contains("theorem get_set_self [DecidableEq α]"),
2464 "missing AverMap.get_set_self helper theorem"
2465 );
2466 }
2467
2468 #[test]
2469 fn lean_output_without_map_usage_omits_map_prelude() {
2470 let ctx = ctx_from_source(
2471 r#"
2472module NoMap
2473 intent = "Simple pure program without maps."
2474
2475fn addOne(n: Int) -> Int
2476 n + 1
2477
2478verify addOne
2479 addOne(1) => 2
2480"#,
2481 "nomap",
2482 );
2483 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
2484 let lean = generated_lean_file(&out);
2485
2486 assert!(
2487 !lean.contains("namespace AverMap"),
2488 "did not expect AverMap prelude in program without map usage:\n{}",
2489 lean
2490 );
2491 }
2492
2493 #[test]
2494 fn transpile_emits_native_decide_for_verify_by_default() {
2495 let out = transpile(&empty_ctx_with_verify_case());
2496 let lean = out
2497 .files
2498 .iter()
2499 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2500 .expect("expected generated Lean file");
2501 assert!(lean.contains("example : 1 = 1 := by native_decide"));
2502 }
2503
2504 #[test]
2505 fn transpile_can_emit_sorry_for_verify_when_requested() {
2506 let out = transpile_with_verify_mode(&empty_ctx_with_verify_case(), VerifyEmitMode::Sorry);
2507 let lean = out
2508 .files
2509 .iter()
2510 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2511 .expect("expected generated Lean file");
2512 assert!(lean.contains("example : 1 = 1 := by sorry"));
2513 }
2514
2515 #[test]
2516 fn transpile_can_emit_theorem_skeletons_for_verify() {
2517 let out = transpile_with_verify_mode(
2518 &empty_ctx_with_verify_case(),
2519 VerifyEmitMode::TheoremSkeleton,
2520 );
2521 let lean = out
2522 .files
2523 .iter()
2524 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2525 .expect("expected generated Lean file");
2526 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
2527 assert!(lean.contains(" sorry"));
2528 }
2529
2530 #[test]
2531 fn theorem_skeleton_numbering_is_global_per_function_across_verify_blocks() {
2532 let out = transpile_with_verify_mode(
2533 &empty_ctx_with_two_verify_blocks_same_fn(),
2534 VerifyEmitMode::TheoremSkeleton,
2535 );
2536 let lean = out
2537 .files
2538 .iter()
2539 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2540 .expect("expected generated Lean file");
2541 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
2542 assert!(lean.contains("theorem f_verify_2 : 2 = 2 := by"));
2543 }
2544
2545 #[test]
2546 fn transpile_emits_named_theorems_for_verify_law() {
2547 let out = transpile(&empty_ctx_with_verify_law());
2548 let lean = out
2549 .files
2550 .iter()
2551 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2552 .expect("expected generated Lean file");
2553 assert!(lean.contains("-- verify law add.commutative (2 cases)"));
2554 assert!(lean.contains("-- given a: Int = 1..2"));
2555 assert!(lean.contains("-- given b: Int = [2, 3]"));
2556 assert!(lean.contains(
2557 "theorem add_law_commutative : ∀ (a : Int) (b : Int), add a b = add b a := by"
2558 ));
2559 assert!(lean.contains(" intro a b"));
2560 assert!(lean.contains(" simp [add, Int.add_comm]"));
2561 assert!(lean.contains(
2562 "theorem add_law_commutative_sample_1 : add 1 2 = add 2 1 := by native_decide"
2563 ));
2564 assert!(lean.contains(
2565 "theorem add_law_commutative_sample_2 : add 2 3 = add 3 2 := by native_decide"
2566 ));
2567 }
2568
2569 #[test]
2570 fn generate_prelude_emits_int_roundtrip_theorem() {
2571 let lean = generate_prelude();
2572 assert!(lean.contains(
2573 "theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n"
2574 ));
2575 assert!(lean.contains("theorem String.intercalate_empty_chars (s : String) :"));
2576 assert!(lean.contains("def splitOnCharGo"));
2577 assert!(lean.contains("theorem split_single_char_append"));
2578 assert!(lean.contains("theorem split_intercalate_trailing_single_char"));
2579 assert!(lean.contains("namespace AverDigits"));
2580 assert!(lean.contains("theorem String.charAt_length_none (s : String)"));
2581 assert!(lean.contains("theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->"));
2582 }
2583
2584 #[test]
2585 fn transpile_emits_guarded_theorems_for_verify_law_when_clause() {
2586 let ctx = ctx_from_source(
2587 r#"
2588module GuardedLaw
2589 intent =
2590 "verify law with precondition"
2591
2592fn pickGreater(a: Int, b: Int) -> Int
2593 match a > b
2594 true -> a
2595 false -> b
2596
2597verify pickGreater law ordered
2598 given a: Int = [1, 2]
2599 given b: Int = [1, 2]
2600 when a > b
2601 pickGreater(a, b) => a
2602"#,
2603 "guarded_law",
2604 );
2605 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
2606 let lean = generated_lean_file(&out);
2607
2608 assert!(lean.contains("-- when (a > b)"));
2609 assert!(lean.contains(
2610 "theorem pickGreater_law_ordered : ∀ (a : Int) (b : Int), a = 1 ∨ a = 2 -> b = 1 ∨ b = 2 -> (a > b) = true -> pickGreater a b = a := by"
2611 ));
2612 assert!(lean.contains(
2613 "theorem pickGreater_law_ordered_sample_1 : (1 > 1) = true -> pickGreater 1 1 = 1 := by"
2614 ));
2615 assert!(lean.contains(
2616 "theorem pickGreater_law_ordered_sample_4 : (2 > 2) = true -> pickGreater 2 2 = 2 := by"
2617 ));
2618 }
2619
2620 #[test]
2621 fn transpile_uses_spec_theorem_names_for_declared_spec_laws() {
2622 let ctx = ctx_from_source(
2623 r#"
2624module SpecDemo
2625 intent =
2626 "spec demo"
2627
2628fn absVal(x: Int) -> Int
2629 match x < 0
2630 true -> 0 - x
2631 false -> x
2632
2633fn absValSpec(x: Int) -> Int
2634 match x < 0
2635 true -> 0 - x
2636 false -> x
2637
2638verify absVal law absValSpec
2639 given x: Int = [-2, -1, 0, 1, 2]
2640 absVal(x) => absValSpec(x)
2641"#,
2642 "spec_demo",
2643 );
2644 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
2645 let lean = generated_lean_file(&out);
2646
2647 assert!(lean.contains("-- verify law absVal.spec absValSpec (5 cases)"));
2648 assert!(
2649 lean.contains(
2650 "theorem absVal_eq_absValSpec : ∀ (x : Int), absVal x = absValSpec x := by"
2651 )
2652 );
2653 assert!(lean.contains("theorem absVal_eq_absValSpec_checked_domain :"));
2654 assert!(lean.contains("theorem absVal_eq_absValSpec_sample_1 :"));
2655 assert!(!lean.contains("theorem absVal_law_absValSpec :"));
2656 }
2657
2658 #[test]
2659 fn transpile_keeps_noncanonical_spec_laws_as_regular_law_names() {
2660 let ctx = ctx_from_source(
2661 r#"
2662module SpecLawShape
2663 intent =
2664 "shape probe"
2665
2666fn foo(x: Int) -> Int
2667 x + 1
2668
2669fn fooSpec(seed: Int, x: Int) -> Int
2670 x + seed
2671
2672verify foo law fooSpec
2673 given x: Int = [1, 2]
2674 foo(x) => fooSpec(1, x)
2675"#,
2676 "spec_law_shape",
2677 );
2678 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
2679 let lean = generated_lean_file(&out);
2680
2681 assert!(lean.contains("-- verify law foo.fooSpec (2 cases)"));
2682 assert!(lean.contains("theorem foo_law_fooSpec : ∀ (x : Int), foo x = fooSpec 1 x := by"));
2683 assert!(!lean.contains("theorem foo_eq_fooSpec :"));
2684 }
2685
2686 #[test]
2687 fn transpile_auto_proves_linear_int_canonical_spec_law_in_auto_mode() {
2688 let ctx = ctx_from_source(
2689 r#"
2690module SpecGap
2691 intent =
2692 "nontrivial canonical spec law"
2693
2694fn inc(x: Int) -> Int
2695 x + 1
2696
2697fn incSpec(x: Int) -> Int
2698 x + 2 - 1
2699
2700verify inc law incSpec
2701 given x: Int = [0, 1, 2]
2702 inc(x) => incSpec(x)
2703"#,
2704 "spec_gap",
2705 );
2706 let out = transpile(&ctx);
2707 let lean = generated_lean_file(&out);
2708
2709 assert!(lean.contains("-- verify law inc.spec incSpec (3 cases)"));
2710 assert!(lean.contains("theorem inc_eq_incSpec : ∀ (x : Int), inc x = incSpec x := by"));
2711 assert!(lean.contains("change (x + 1) = ((x + 2) - 1)"));
2712 assert!(lean.contains("omega"));
2713 assert!(!lean.contains(
2714 "-- universal theorem inc_eq_incSpec omitted: sampled law shape is not auto-proved yet"
2715 ));
2716 assert!(lean.contains("theorem inc_eq_incSpec_checked_domain :"));
2717 }
2718
2719 #[test]
2720 fn transpile_auto_proves_guarded_canonical_spec_law_in_auto_mode() {
2721 let ctx = ctx_from_source(
2722 r#"
2723module GuardedSpecGap
2724 intent =
2725 "guarded canonical spec law"
2726
2727fn clampNonNegative(x: Int) -> Int
2728 match x < 0
2729 true -> 0
2730 false -> x
2731
2732fn clampNonNegativeSpec(x: Int) -> Int
2733 match x < 0
2734 true -> 0
2735 false -> x
2736
2737verify clampNonNegative law clampNonNegativeSpec
2738 given x: Int = [-2, -1, 0, 1, 2]
2739 when x >= 0
2740 clampNonNegative(x) => clampNonNegativeSpec(x)
2741"#,
2742 "guarded_spec_gap",
2743 );
2744 let out = transpile(&ctx);
2745 let lean = generated_lean_file(&out);
2746
2747 assert!(lean.contains("-- when (x >= 0)"));
2748 assert!(lean.contains(
2749 "theorem clampNonNegative_eq_clampNonNegativeSpec : ∀ (x : Int), x = (-2) ∨ x = (-1) ∨ x = 0 ∨ x = 1 ∨ x = 2 -> (x >= 0) = true -> clampNonNegative x = clampNonNegativeSpec x := by"
2750 ));
2751 assert!(lean.contains("intro x h_x h_when"));
2752 assert!(lean.contains("simpa [clampNonNegative, clampNonNegativeSpec]"));
2753 assert!(!lean.contains(
2754 "-- universal theorem clampNonNegative_eq_clampNonNegativeSpec omitted: sampled law shape is not auto-proved yet"
2755 ));
2756 assert!(!lean.contains("cases h_x"));
2757 }
2758
2759 #[test]
2760 fn transpile_auto_proves_simp_normalized_canonical_spec_law_in_auto_mode() {
2761 let ctx = ctx_from_source(
2762 r#"
2763module SpecGapNonlinear
2764 intent =
2765 "nonlinear canonical spec law"
2766
2767fn square(x: Int) -> Int
2768 x * x
2769
2770fn squareSpec(x: Int) -> Int
2771 x * x + 0
2772
2773verify square law squareSpec
2774 given x: Int = [0, 1, 2]
2775 square(x) => squareSpec(x)
2776"#,
2777 "spec_gap_nonlinear",
2778 );
2779 let out = transpile(&ctx);
2780 let lean = generated_lean_file(&out);
2781
2782 assert!(lean.contains("-- verify law square.spec squareSpec (3 cases)"));
2783 assert!(
2784 lean.contains(
2785 "theorem square_eq_squareSpec : ∀ (x : Int), square x = squareSpec x := by"
2786 )
2787 );
2788 assert!(lean.contains("simp [square, squareSpec]"));
2789 assert!(!lean.contains(
2790 "-- universal theorem square_eq_squareSpec omitted: sampled law shape is not auto-proved yet"
2791 ));
2792 assert!(lean.contains("theorem square_eq_squareSpec_checked_domain :"));
2793 assert!(lean.contains("theorem square_eq_squareSpec_sample_1 :"));
2794 }
2795
2796 #[test]
2797 fn transpile_auto_proves_reflexive_law_with_rfl() {
2798 let mut ctx = empty_ctx();
2799 ctx.items.push(TopLevel::Verify(VerifyBlock {
2800 fn_name: "idLaw".to_string(),
2801 line: 1,
2802 cases: vec![(
2803 sb(Expr::Literal(Literal::Int(1))),
2804 sb(Expr::Literal(Literal::Int(1))),
2805 )],
2806 case_spans: vec![],
2807 case_givens: vec![],
2808 kind: VerifyKind::Law(Box::new(VerifyLaw {
2809 name: "reflexive".to_string(),
2810 givens: vec![VerifyGiven {
2811 name: "x".to_string(),
2812 type_name: "Int".to_string(),
2813 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2814 }],
2815 when: None,
2816 lhs: sb(Expr::Ident("x".to_string())),
2817 rhs: sb(Expr::Ident("x".to_string())),
2818 sample_guards: vec![],
2819 })),
2820 }));
2821 let out = transpile(&ctx);
2822 let lean = out
2823 .files
2824 .iter()
2825 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2826 .expect("expected generated Lean file");
2827 assert!(lean.contains("theorem idLaw_law_reflexive : ∀ (x : Int), x = x := by"));
2828 assert!(lean.contains(" intro x"));
2829 assert!(lean.contains(" rfl"));
2830 }
2831
2832 #[test]
2833 fn transpile_auto_proves_identity_law_for_int_add_wrapper() {
2834 let mut ctx = empty_ctx_with_verify_law();
2835 ctx.items.push(TopLevel::Verify(VerifyBlock {
2836 fn_name: "add".to_string(),
2837 line: 10,
2838 cases: vec![(
2839 sb(Expr::FnCall(
2840 sbb(Expr::Ident("add".to_string())),
2841 vec![
2842 sb(Expr::Literal(Literal::Int(1))),
2843 sb(Expr::Literal(Literal::Int(0))),
2844 ],
2845 )),
2846 sb(Expr::Literal(Literal::Int(1))),
2847 )],
2848 case_spans: vec![],
2849 case_givens: vec![],
2850 kind: VerifyKind::Law(Box::new(VerifyLaw {
2851 name: "identityZero".to_string(),
2852 givens: vec![VerifyGiven {
2853 name: "a".to_string(),
2854 type_name: "Int".to_string(),
2855 domain: VerifyGivenDomain::Explicit(vec![
2856 sb(Expr::Literal(Literal::Int(0))),
2857 sb(Expr::Literal(Literal::Int(1))),
2858 ]),
2859 }],
2860 when: None,
2861 lhs: sb(Expr::FnCall(
2862 sbb(Expr::Ident("add".to_string())),
2863 vec![
2864 sb(Expr::Ident("a".to_string())),
2865 sb(Expr::Literal(Literal::Int(0))),
2866 ],
2867 )),
2868 rhs: sb(Expr::Ident("a".to_string())),
2869 sample_guards: vec![],
2870 })),
2871 }));
2872 let out = transpile(&ctx);
2873 let lean = out
2874 .files
2875 .iter()
2876 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2877 .expect("expected generated Lean file");
2878 assert!(lean.contains("theorem add_law_identityZero : ∀ (a : Int), add a 0 = a := by"));
2879 assert!(lean.contains(" intro a"));
2880 assert!(lean.contains(" simp [add]"));
2881 }
2882
2883 #[test]
2884 fn transpile_auto_proves_associative_law_for_int_add_wrapper() {
2885 let mut ctx = empty_ctx_with_verify_law();
2886 ctx.items.push(TopLevel::Verify(VerifyBlock {
2887 fn_name: "add".to_string(),
2888 line: 20,
2889 cases: vec![(
2890 sb(Expr::FnCall(
2891 sbb(Expr::Ident("add".to_string())),
2892 vec![
2893 sb(Expr::FnCall(
2894 sbb(Expr::Ident("add".to_string())),
2895 vec![
2896 sb(Expr::Literal(Literal::Int(1))),
2897 sb(Expr::Literal(Literal::Int(2))),
2898 ],
2899 )),
2900 sb(Expr::Literal(Literal::Int(3))),
2901 ],
2902 )),
2903 sb(Expr::FnCall(
2904 sbb(Expr::Ident("add".to_string())),
2905 vec![
2906 sb(Expr::Literal(Literal::Int(1))),
2907 sb(Expr::FnCall(
2908 sbb(Expr::Ident("add".to_string())),
2909 vec![
2910 sb(Expr::Literal(Literal::Int(2))),
2911 sb(Expr::Literal(Literal::Int(3))),
2912 ],
2913 )),
2914 ],
2915 )),
2916 )],
2917 case_spans: vec![],
2918 case_givens: vec![],
2919 kind: VerifyKind::Law(Box::new(VerifyLaw {
2920 name: "associative".to_string(),
2921 givens: vec![
2922 VerifyGiven {
2923 name: "a".to_string(),
2924 type_name: "Int".to_string(),
2925 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2926 1,
2927 )))]),
2928 },
2929 VerifyGiven {
2930 name: "b".to_string(),
2931 type_name: "Int".to_string(),
2932 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2933 2,
2934 )))]),
2935 },
2936 VerifyGiven {
2937 name: "c".to_string(),
2938 type_name: "Int".to_string(),
2939 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
2940 3,
2941 )))]),
2942 },
2943 ],
2944 when: None,
2945 lhs: sb(Expr::FnCall(
2946 sbb(Expr::Ident("add".to_string())),
2947 vec![
2948 sb(Expr::FnCall(
2949 sbb(Expr::Ident("add".to_string())),
2950 vec![
2951 sb(Expr::Ident("a".to_string())),
2952 sb(Expr::Ident("b".to_string())),
2953 ],
2954 )),
2955 sb(Expr::Ident("c".to_string())),
2956 ],
2957 )),
2958 rhs: sb(Expr::FnCall(
2959 sbb(Expr::Ident("add".to_string())),
2960 vec![
2961 sb(Expr::Ident("a".to_string())),
2962 sb(Expr::FnCall(
2963 sbb(Expr::Ident("add".to_string())),
2964 vec![
2965 sb(Expr::Ident("b".to_string())),
2966 sb(Expr::Ident("c".to_string())),
2967 ],
2968 )),
2969 ],
2970 )),
2971 sample_guards: vec![],
2972 })),
2973 }));
2974 let out = transpile(&ctx);
2975 let lean = out
2976 .files
2977 .iter()
2978 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2979 .expect("expected generated Lean file");
2980 assert!(lean.contains(
2981 "theorem add_law_associative : ∀ (a : Int) (b : Int) (c : Int), add (add a b) c = add a (add b c) := by"
2982 ));
2983 assert!(lean.contains(" intro a b c"));
2984 assert!(lean.contains(" simp [add, Int.add_assoc]"));
2985 }
2986
2987 #[test]
2988 fn transpile_auto_proves_sub_laws() {
2989 let mut ctx = empty_ctx();
2990 let sub = FnDef {
2991 name: "sub".to_string(),
2992 line: 1,
2993 params: vec![
2994 ("a".to_string(), "Int".to_string()),
2995 ("b".to_string(), "Int".to_string()),
2996 ],
2997 return_type: "Int".to_string(),
2998 effects: vec![],
2999 desc: None,
3000 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
3001 BinOp::Sub,
3002 sbb(Expr::Ident("a".to_string())),
3003 sbb(Expr::Ident("b".to_string())),
3004 )))),
3005 resolution: None,
3006 };
3007 ctx.fn_defs.push(sub.clone());
3008 ctx.items.push(TopLevel::FnDef(sub));
3009
3010 ctx.items.push(TopLevel::Verify(VerifyBlock {
3011 fn_name: "sub".to_string(),
3012 line: 10,
3013 cases: vec![(
3014 sb(Expr::FnCall(
3015 sbb(Expr::Ident("sub".to_string())),
3016 vec![
3017 sb(Expr::Literal(Literal::Int(2))),
3018 sb(Expr::Literal(Literal::Int(0))),
3019 ],
3020 )),
3021 sb(Expr::Literal(Literal::Int(2))),
3022 )],
3023 case_spans: vec![],
3024 case_givens: vec![],
3025 kind: VerifyKind::Law(Box::new(VerifyLaw {
3026 name: "rightIdentity".to_string(),
3027 givens: vec![VerifyGiven {
3028 name: "a".to_string(),
3029 type_name: "Int".to_string(),
3030 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
3031 }],
3032 when: None,
3033 lhs: sb(Expr::FnCall(
3034 sbb(Expr::Ident("sub".to_string())),
3035 vec![
3036 sb(Expr::Ident("a".to_string())),
3037 sb(Expr::Literal(Literal::Int(0))),
3038 ],
3039 )),
3040 rhs: sb(Expr::Ident("a".to_string())),
3041 sample_guards: vec![],
3042 })),
3043 }));
3044 ctx.items.push(TopLevel::Verify(VerifyBlock {
3045 fn_name: "sub".to_string(),
3046 line: 20,
3047 cases: vec![(
3048 sb(Expr::FnCall(
3049 sbb(Expr::Ident("sub".to_string())),
3050 vec![
3051 sb(Expr::Literal(Literal::Int(2))),
3052 sb(Expr::Literal(Literal::Int(1))),
3053 ],
3054 )),
3055 sb(Expr::BinOp(
3056 BinOp::Sub,
3057 sbb(Expr::Literal(Literal::Int(0))),
3058 sbb(Expr::FnCall(
3059 sbb(Expr::Ident("sub".to_string())),
3060 vec![
3061 sb(Expr::Literal(Literal::Int(1))),
3062 sb(Expr::Literal(Literal::Int(2))),
3063 ],
3064 )),
3065 )),
3066 )],
3067 case_spans: vec![],
3068 case_givens: vec![],
3069 kind: VerifyKind::Law(Box::new(VerifyLaw {
3070 name: "antiCommutative".to_string(),
3071 givens: vec![
3072 VerifyGiven {
3073 name: "a".to_string(),
3074 type_name: "Int".to_string(),
3075 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3076 2,
3077 )))]),
3078 },
3079 VerifyGiven {
3080 name: "b".to_string(),
3081 type_name: "Int".to_string(),
3082 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3083 1,
3084 )))]),
3085 },
3086 ],
3087 when: None,
3088 lhs: sb(Expr::FnCall(
3089 sbb(Expr::Ident("sub".to_string())),
3090 vec![
3091 sb(Expr::Ident("a".to_string())),
3092 sb(Expr::Ident("b".to_string())),
3093 ],
3094 )),
3095 rhs: sb(Expr::BinOp(
3096 BinOp::Sub,
3097 sbb(Expr::Literal(Literal::Int(0))),
3098 sbb(Expr::FnCall(
3099 sbb(Expr::Ident("sub".to_string())),
3100 vec![
3101 sb(Expr::Ident("b".to_string())),
3102 sb(Expr::Ident("a".to_string())),
3103 ],
3104 )),
3105 )),
3106 sample_guards: vec![],
3107 })),
3108 }));
3109
3110 let out = transpile(&ctx);
3111 let lean = out
3112 .files
3113 .iter()
3114 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3115 .expect("expected generated Lean file");
3116 assert!(lean.contains("theorem sub_law_rightIdentity : ∀ (a : Int), sub a 0 = a := by"));
3117 assert!(lean.contains(" simp [sub]"));
3118 assert!(lean.contains(
3119 "theorem sub_law_antiCommutative : ∀ (a : Int) (b : Int), sub a b = (-sub b a) := by"
3120 ));
3121 assert!(lean.contains(" simpa [sub] using (Int.neg_sub b a).symm"));
3122 }
3123
3124 #[test]
3125 fn transpile_auto_proves_unary_wrapper_equivalence_law() {
3126 let mut ctx = empty_ctx();
3127 let add = FnDef {
3128 name: "add".to_string(),
3129 line: 1,
3130 params: vec![
3131 ("a".to_string(), "Int".to_string()),
3132 ("b".to_string(), "Int".to_string()),
3133 ],
3134 return_type: "Int".to_string(),
3135 effects: vec![],
3136 desc: None,
3137 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
3138 BinOp::Add,
3139 sbb(Expr::Ident("a".to_string())),
3140 sbb(Expr::Ident("b".to_string())),
3141 )))),
3142 resolution: None,
3143 };
3144 let add_one = FnDef {
3145 name: "addOne".to_string(),
3146 line: 2,
3147 params: vec![("n".to_string(), "Int".to_string())],
3148 return_type: "Int".to_string(),
3149 effects: vec![],
3150 desc: None,
3151 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
3152 BinOp::Add,
3153 sbb(Expr::Ident("n".to_string())),
3154 sbb(Expr::Literal(Literal::Int(1))),
3155 )))),
3156 resolution: None,
3157 };
3158 ctx.fn_defs.push(add.clone());
3159 ctx.fn_defs.push(add_one.clone());
3160 ctx.items.push(TopLevel::FnDef(add));
3161 ctx.items.push(TopLevel::FnDef(add_one));
3162 ctx.items.push(TopLevel::Verify(VerifyBlock {
3163 fn_name: "addOne".to_string(),
3164 line: 3,
3165 cases: vec![(
3166 sb(Expr::FnCall(
3167 sbb(Expr::Ident("addOne".to_string())),
3168 vec![sb(Expr::Literal(Literal::Int(2)))],
3169 )),
3170 sb(Expr::FnCall(
3171 sbb(Expr::Ident("add".to_string())),
3172 vec![
3173 sb(Expr::Literal(Literal::Int(2))),
3174 sb(Expr::Literal(Literal::Int(1))),
3175 ],
3176 )),
3177 )],
3178 case_spans: vec![],
3179 case_givens: vec![],
3180 kind: VerifyKind::Law(Box::new(VerifyLaw {
3181 name: "identityViaAdd".to_string(),
3182 givens: vec![VerifyGiven {
3183 name: "n".to_string(),
3184 type_name: "Int".to_string(),
3185 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
3186 }],
3187 when: None,
3188 lhs: sb(Expr::FnCall(
3189 sbb(Expr::Ident("addOne".to_string())),
3190 vec![sb(Expr::Ident("n".to_string()))],
3191 )),
3192 rhs: sb(Expr::FnCall(
3193 sbb(Expr::Ident("add".to_string())),
3194 vec![
3195 sb(Expr::Ident("n".to_string())),
3196 sb(Expr::Literal(Literal::Int(1))),
3197 ],
3198 )),
3199 sample_guards: vec![],
3200 })),
3201 }));
3202 let out = transpile(&ctx);
3203 let lean = out
3204 .files
3205 .iter()
3206 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3207 .expect("expected generated Lean file");
3208 assert!(
3209 lean.contains(
3210 "theorem addOne_law_identityViaAdd : ∀ (n : Int), addOne n = add n 1 := by"
3211 )
3212 );
3213 assert!(lean.contains(" simp [addOne, add]"));
3214 }
3215
3216 #[test]
3217 fn transpile_auto_proves_direct_map_set_laws() {
3218 let mut ctx = empty_ctx();
3219
3220 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
3221 sb(Expr::FnCall(
3222 sbb(Expr::Attr(
3223 sbb(Expr::Ident("Map".to_string())),
3224 "set".to_string(),
3225 )),
3226 vec![m, k, v],
3227 ))
3228 };
3229 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
3230 sb(Expr::FnCall(
3231 sbb(Expr::Attr(
3232 sbb(Expr::Ident("Map".to_string())),
3233 "has".to_string(),
3234 )),
3235 vec![m, k],
3236 ))
3237 };
3238 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
3239 sb(Expr::FnCall(
3240 sbb(Expr::Attr(
3241 sbb(Expr::Ident("Map".to_string())),
3242 "get".to_string(),
3243 )),
3244 vec![m, k],
3245 ))
3246 };
3247 let some = |v: Spanned<Expr>| {
3248 sb(Expr::FnCall(
3249 sbb(Expr::Attr(
3250 sbb(Expr::Ident("Option".to_string())),
3251 "Some".to_string(),
3252 )),
3253 vec![v],
3254 ))
3255 };
3256 let map_empty = || {
3257 sb(Expr::FnCall(
3258 sbb(Expr::Attr(
3259 sbb(Expr::Ident("Map".to_string())),
3260 "empty".to_string(),
3261 )),
3262 vec![],
3263 ))
3264 };
3265
3266 ctx.items.push(TopLevel::Verify(VerifyBlock {
3267 fn_name: "map".to_string(),
3268 line: 1,
3269 cases: vec![(
3270 map_has(
3271 map_set(
3272 sb(Expr::Ident("m".to_string())),
3273 sb(Expr::Ident("k".to_string())),
3274 sb(Expr::Ident("v".to_string())),
3275 ),
3276 sb(Expr::Ident("k".to_string())),
3277 ),
3278 sb(Expr::Literal(Literal::Bool(true))),
3279 )],
3280 case_spans: vec![],
3281 case_givens: vec![],
3282 kind: VerifyKind::Law(Box::new(VerifyLaw {
3283 name: "setHasKey".to_string(),
3284 givens: vec![
3285 VerifyGiven {
3286 name: "m".to_string(),
3287 type_name: "Map<String, Int>".to_string(),
3288 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
3289 },
3290 VerifyGiven {
3291 name: "k".to_string(),
3292 type_name: "String".to_string(),
3293 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
3294 "a".to_string(),
3295 )))]),
3296 },
3297 VerifyGiven {
3298 name: "v".to_string(),
3299 type_name: "Int".to_string(),
3300 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3301 1,
3302 )))]),
3303 },
3304 ],
3305 when: None,
3306 lhs: map_has(
3307 map_set(
3308 sb(Expr::Ident("m".to_string())),
3309 sb(Expr::Ident("k".to_string())),
3310 sb(Expr::Ident("v".to_string())),
3311 ),
3312 sb(Expr::Ident("k".to_string())),
3313 ),
3314 rhs: sb(Expr::Literal(Literal::Bool(true))),
3315 sample_guards: vec![],
3316 })),
3317 }));
3318
3319 ctx.items.push(TopLevel::Verify(VerifyBlock {
3320 fn_name: "map".to_string(),
3321 line: 2,
3322 cases: vec![(
3323 map_get(
3324 map_set(
3325 sb(Expr::Ident("m".to_string())),
3326 sb(Expr::Ident("k".to_string())),
3327 sb(Expr::Ident("v".to_string())),
3328 ),
3329 sb(Expr::Ident("k".to_string())),
3330 ),
3331 some(sb(Expr::Ident("v".to_string()))),
3332 )],
3333 case_spans: vec![],
3334 case_givens: vec![],
3335 kind: VerifyKind::Law(Box::new(VerifyLaw {
3336 name: "setGetKey".to_string(),
3337 givens: vec![
3338 VerifyGiven {
3339 name: "m".to_string(),
3340 type_name: "Map<String, Int>".to_string(),
3341 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
3342 },
3343 VerifyGiven {
3344 name: "k".to_string(),
3345 type_name: "String".to_string(),
3346 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
3347 "a".to_string(),
3348 )))]),
3349 },
3350 VerifyGiven {
3351 name: "v".to_string(),
3352 type_name: "Int".to_string(),
3353 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3354 1,
3355 )))]),
3356 },
3357 ],
3358 when: None,
3359 lhs: map_get(
3360 map_set(
3361 sb(Expr::Ident("m".to_string())),
3362 sb(Expr::Ident("k".to_string())),
3363 sb(Expr::Ident("v".to_string())),
3364 ),
3365 sb(Expr::Ident("k".to_string())),
3366 ),
3367 rhs: some(sb(Expr::Ident("v".to_string()))),
3368 sample_guards: vec![],
3369 })),
3370 }));
3371
3372 let out = transpile(&ctx);
3373 let lean = out
3374 .files
3375 .iter()
3376 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3377 .expect("expected generated Lean file");
3378 assert!(lean.contains("simpa using AverMap.has_set_self m k v"));
3379 assert!(lean.contains("simpa using AverMap.get_set_self m k v"));
3380 }
3381
3382 #[test]
3383 fn transpile_auto_proves_direct_recursive_sum_law_by_structural_induction() {
3384 let ctx = ctx_from_source(
3385 r#"
3386module Mirror
3387 intent =
3388 "direct recursive sum induction probe"
3389
3390type Tree
3391 Leaf(Int)
3392 Node(Tree, Tree)
3393
3394fn mirror(t: Tree) -> Tree
3395 match t
3396 Tree.Leaf(v) -> Tree.Leaf(v)
3397 Tree.Node(left, right) -> Tree.Node(mirror(right), mirror(left))
3398
3399verify mirror law involutive
3400 given t: Tree = [Tree.Leaf(1), Tree.Node(Tree.Leaf(1), Tree.Leaf(2))]
3401 mirror(mirror(t)) => t
3402"#,
3403 "mirror",
3404 );
3405 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3406 let lean = generated_lean_file(&out);
3407
3408 assert!(
3409 lean.contains(
3410 "theorem mirror_law_involutive : ∀ (t : Tree), mirror (mirror t) = t := by"
3411 )
3412 );
3413 assert!(lean.contains(" induction t with"));
3414 assert!(lean.contains(" | leaf f0 => simp [mirror]"));
3415 assert!(lean.contains(" | node f0 f1 ih0 ih1 => simp_all [mirror]"));
3416 assert!(!lean.contains(
3417 "-- universal theorem mirror_law_involutive omitted: sampled law shape is not auto-proved yet"
3418 ));
3419 }
3420
3421 #[test]
3422 fn transpile_auto_proves_map_update_laws() {
3423 let mut ctx = empty_ctx();
3424
3425 let map_get = |m: Spanned<Expr>, k: Spanned<Expr>| {
3426 sb(Expr::FnCall(
3427 sbb(Expr::Attr(
3428 sbb(Expr::Ident("Map".to_string())),
3429 "get".to_string(),
3430 )),
3431 vec![m, k],
3432 ))
3433 };
3434 let map_set = |m: Spanned<Expr>, k: Spanned<Expr>, v: Spanned<Expr>| {
3435 sb(Expr::FnCall(
3436 sbb(Expr::Attr(
3437 sbb(Expr::Ident("Map".to_string())),
3438 "set".to_string(),
3439 )),
3440 vec![m, k, v],
3441 ))
3442 };
3443 let map_has = |m: Spanned<Expr>, k: Spanned<Expr>| {
3444 sb(Expr::FnCall(
3445 sbb(Expr::Attr(
3446 sbb(Expr::Ident("Map".to_string())),
3447 "has".to_string(),
3448 )),
3449 vec![m, k],
3450 ))
3451 };
3452 let option_some = |v: Spanned<Expr>| {
3453 sb(Expr::FnCall(
3454 sbb(Expr::Attr(
3455 sbb(Expr::Ident("Option".to_string())),
3456 "Some".to_string(),
3457 )),
3458 vec![v],
3459 ))
3460 };
3461 let option_with_default = |opt: Spanned<Expr>, def: Spanned<Expr>| {
3462 sb(Expr::FnCall(
3463 sbb(Expr::Attr(
3464 sbb(Expr::Ident("Option".to_string())),
3465 "withDefault".to_string(),
3466 )),
3467 vec![opt, def],
3468 ))
3469 };
3470 let map_empty = || {
3471 sb(Expr::FnCall(
3472 sbb(Expr::Attr(
3473 sbb(Expr::Ident("Map".to_string())),
3474 "empty".to_string(),
3475 )),
3476 vec![],
3477 ))
3478 };
3479
3480 let add_one = FnDef {
3481 name: "addOne".to_string(),
3482 line: 1,
3483 params: vec![("n".to_string(), "Int".to_string())],
3484 return_type: "Int".to_string(),
3485 effects: vec![],
3486 desc: None,
3487 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
3488 BinOp::Add,
3489 sbb(Expr::Ident("n".to_string())),
3490 sbb(Expr::Literal(Literal::Int(1))),
3491 )))),
3492 resolution: None,
3493 };
3494 ctx.fn_defs.push(add_one.clone());
3495 ctx.items.push(TopLevel::FnDef(add_one));
3496
3497 let inc_count = FnDef {
3498 name: "incCount".to_string(),
3499 line: 2,
3500 params: vec![
3501 ("counts".to_string(), "Map<String, Int>".to_string()),
3502 ("word".to_string(), "String".to_string()),
3503 ],
3504 return_type: "Map<String, Int>".to_string(),
3505 effects: vec![],
3506 desc: None,
3507 body: Rc::new(FnBody::Block(vec![
3508 Stmt::Binding(
3509 "current".to_string(),
3510 None,
3511 map_get(
3512 sb(Expr::Ident("counts".to_string())),
3513 sb(Expr::Ident("word".to_string())),
3514 ),
3515 ),
3516 Stmt::Expr(sb(Expr::Match {
3517 subject: sbb(Expr::Ident("current".to_string())),
3518 arms: vec![
3519 MatchArm {
3520 pattern: Pattern::Constructor(
3521 "Option.Some".to_string(),
3522 vec!["n".to_string()],
3523 ),
3524 body: Box::new(map_set(
3525 sb(Expr::Ident("counts".to_string())),
3526 sb(Expr::Ident("word".to_string())),
3527 sb(Expr::BinOp(
3528 BinOp::Add,
3529 sbb(Expr::Ident("n".to_string())),
3530 sbb(Expr::Literal(Literal::Int(1))),
3531 )),
3532 )),
3533 },
3534 MatchArm {
3535 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
3536 body: Box::new(map_set(
3537 sb(Expr::Ident("counts".to_string())),
3538 sb(Expr::Ident("word".to_string())),
3539 sb(Expr::Literal(Literal::Int(1))),
3540 )),
3541 },
3542 ],
3543 })),
3544 ])),
3545 resolution: None,
3546 };
3547 ctx.fn_defs.push(inc_count.clone());
3548 ctx.items.push(TopLevel::FnDef(inc_count));
3549
3550 ctx.items.push(TopLevel::Verify(VerifyBlock {
3551 fn_name: "incCount".to_string(),
3552 line: 10,
3553 cases: vec![(
3554 map_has(
3555 sb(Expr::FnCall(
3556 sbb(Expr::Ident("incCount".to_string())),
3557 vec![
3558 sb(Expr::Ident("counts".to_string())),
3559 sb(Expr::Ident("word".to_string())),
3560 ],
3561 )),
3562 sb(Expr::Ident("word".to_string())),
3563 ),
3564 sb(Expr::Literal(Literal::Bool(true))),
3565 )],
3566 case_spans: vec![],
3567 case_givens: vec![],
3568 kind: VerifyKind::Law(Box::new(VerifyLaw {
3569 name: "keyPresent".to_string(),
3570 givens: vec![
3571 VerifyGiven {
3572 name: "counts".to_string(),
3573 type_name: "Map<String, Int>".to_string(),
3574 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
3575 },
3576 VerifyGiven {
3577 name: "word".to_string(),
3578 type_name: "String".to_string(),
3579 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Str(
3580 "a".to_string(),
3581 )))]),
3582 },
3583 ],
3584 when: None,
3585 lhs: map_has(
3586 sb(Expr::FnCall(
3587 sbb(Expr::Ident("incCount".to_string())),
3588 vec![
3589 sb(Expr::Ident("counts".to_string())),
3590 sb(Expr::Ident("word".to_string())),
3591 ],
3592 )),
3593 sb(Expr::Ident("word".to_string())),
3594 ),
3595 rhs: sb(Expr::Literal(Literal::Bool(true))),
3596 sample_guards: vec![],
3597 })),
3598 }));
3599
3600 ctx.items.push(TopLevel::Verify(VerifyBlock {
3601 fn_name: "incCount".to_string(),
3602 line: 20,
3603 cases: vec![(
3604 map_get(
3605 sb(Expr::FnCall(
3606 sbb(Expr::Ident("incCount".to_string())),
3607 vec![
3608 sb(Expr::Ident("counts".to_string())),
3609 sb(Expr::Literal(Literal::Str("a".to_string()))),
3610 ],
3611 )),
3612 sb(Expr::Literal(Literal::Str("a".to_string()))),
3613 ),
3614 option_some(sb(Expr::FnCall(
3615 sbb(Expr::Ident("addOne".to_string())),
3616 vec![option_with_default(
3617 map_get(
3618 sb(Expr::Ident("counts".to_string())),
3619 sb(Expr::Literal(Literal::Str("a".to_string()))),
3620 ),
3621 sb(Expr::Literal(Literal::Int(0))),
3622 )],
3623 ))),
3624 )],
3625 case_spans: vec![],
3626 case_givens: vec![],
3627 kind: VerifyKind::Law(Box::new(VerifyLaw {
3628 name: "existingKeyIncrements".to_string(),
3629 givens: vec![VerifyGiven {
3630 name: "counts".to_string(),
3631 type_name: "Map<String, Int>".to_string(),
3632 domain: VerifyGivenDomain::Explicit(vec![map_empty()]),
3633 }],
3634 when: None,
3635 lhs: map_get(
3636 sb(Expr::FnCall(
3637 sbb(Expr::Ident("incCount".to_string())),
3638 vec![
3639 sb(Expr::Ident("counts".to_string())),
3640 sb(Expr::Literal(Literal::Str("a".to_string()))),
3641 ],
3642 )),
3643 sb(Expr::Literal(Literal::Str("a".to_string()))),
3644 ),
3645 rhs: option_some(sb(Expr::FnCall(
3646 sbb(Expr::Ident("addOne".to_string())),
3647 vec![option_with_default(
3648 map_get(
3649 sb(Expr::Ident("counts".to_string())),
3650 sb(Expr::Literal(Literal::Str("a".to_string()))),
3651 ),
3652 sb(Expr::Literal(Literal::Int(0))),
3653 )],
3654 ))),
3655 sample_guards: vec![],
3656 })),
3657 }));
3658
3659 let out = transpile(&ctx);
3660 let lean = out
3661 .files
3662 .iter()
3663 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3664 .expect("expected generated Lean file");
3665 assert!(
3666 lean.contains("cases h : AverMap.get counts word <;> simp [AverMap.has_set_self]"),
3667 "expected keyPresent auto-proof with has_set_self"
3668 );
3669 assert!(
3670 lean.contains("cases h : AverMap.get counts \"a\" <;> simp [AverMap.get_set_self, addOne, incCount]"),
3671 "expected existingKeyIncrements auto-proof with get_set_self"
3672 );
3673 }
3674
3675 #[test]
3676 fn transpile_parenthesizes_negative_int_call_args_in_law_samples() {
3677 let mut ctx = empty_ctx();
3678 let add = FnDef {
3679 name: "add".to_string(),
3680 line: 1,
3681 params: vec![
3682 ("a".to_string(), "Int".to_string()),
3683 ("b".to_string(), "Int".to_string()),
3684 ],
3685 return_type: "Int".to_string(),
3686 effects: vec![],
3687 desc: None,
3688 body: Rc::new(FnBody::from_expr(sb(Expr::BinOp(
3689 BinOp::Add,
3690 sbb(Expr::Ident("a".to_string())),
3691 sbb(Expr::Ident("b".to_string())),
3692 )))),
3693 resolution: None,
3694 };
3695 ctx.fn_defs.push(add.clone());
3696 ctx.items.push(TopLevel::FnDef(add));
3697 ctx.items.push(TopLevel::Verify(VerifyBlock {
3698 fn_name: "add".to_string(),
3699 line: 1,
3700 cases: vec![(
3701 sb(Expr::FnCall(
3702 sbb(Expr::Ident("add".to_string())),
3703 vec![
3704 sb(Expr::Literal(Literal::Int(-2))),
3705 sb(Expr::Literal(Literal::Int(-1))),
3706 ],
3707 )),
3708 sb(Expr::FnCall(
3709 sbb(Expr::Ident("add".to_string())),
3710 vec![
3711 sb(Expr::Literal(Literal::Int(-1))),
3712 sb(Expr::Literal(Literal::Int(-2))),
3713 ],
3714 )),
3715 )],
3716 case_spans: vec![],
3717 case_givens: vec![],
3718 kind: VerifyKind::Law(Box::new(VerifyLaw {
3719 name: "commutative".to_string(),
3720 givens: vec![
3721 VerifyGiven {
3722 name: "a".to_string(),
3723 type_name: "Int".to_string(),
3724 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3725 -2,
3726 )))]),
3727 },
3728 VerifyGiven {
3729 name: "b".to_string(),
3730 type_name: "Int".to_string(),
3731 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(
3732 -1,
3733 )))]),
3734 },
3735 ],
3736 when: None,
3737 lhs: sb(Expr::FnCall(
3738 sbb(Expr::Ident("add".to_string())),
3739 vec![
3740 sb(Expr::Ident("a".to_string())),
3741 sb(Expr::Ident("b".to_string())),
3742 ],
3743 )),
3744 rhs: sb(Expr::FnCall(
3745 sbb(Expr::Ident("add".to_string())),
3746 vec![
3747 sb(Expr::Ident("b".to_string())),
3748 sb(Expr::Ident("a".to_string())),
3749 ],
3750 )),
3751 sample_guards: vec![],
3752 })),
3753 }));
3754
3755 let out = transpile(&ctx);
3756 let lean = out
3757 .files
3758 .iter()
3759 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3760 .expect("expected generated Lean file");
3761 assert!(lean.contains(
3762 "theorem add_law_commutative_sample_1 : add (-2) (-1) = add (-1) (-2) := by native_decide"
3763 ));
3764 }
3765
3766 #[test]
3767 fn verify_law_numbering_is_scoped_per_law_name() {
3768 let mut ctx = empty_ctx();
3769 let f = FnDef {
3770 name: "f".to_string(),
3771 line: 1,
3772 params: vec![("x".to_string(), "Int".to_string())],
3773 return_type: "Int".to_string(),
3774 effects: vec![],
3775 desc: None,
3776 body: Rc::new(FnBody::from_expr(sb(Expr::Ident("x".to_string())))),
3777 resolution: None,
3778 };
3779 ctx.fn_defs.push(f.clone());
3780 ctx.items.push(TopLevel::FnDef(f));
3781 ctx.items.push(TopLevel::Verify(VerifyBlock {
3782 fn_name: "f".to_string(),
3783 line: 1,
3784 cases: vec![(
3785 sb(Expr::Literal(Literal::Int(1))),
3786 sb(Expr::Literal(Literal::Int(1))),
3787 )],
3788 case_spans: vec![],
3789 case_givens: vec![],
3790 kind: VerifyKind::Cases,
3791 }));
3792 ctx.items.push(TopLevel::Verify(VerifyBlock {
3793 fn_name: "f".to_string(),
3794 line: 2,
3795 cases: vec![(
3796 sb(Expr::Literal(Literal::Int(2))),
3797 sb(Expr::Literal(Literal::Int(2))),
3798 )],
3799 case_spans: vec![],
3800 case_givens: vec![],
3801 kind: VerifyKind::Law(Box::new(VerifyLaw {
3802 name: "identity".to_string(),
3803 givens: vec![VerifyGiven {
3804 name: "x".to_string(),
3805 type_name: "Int".to_string(),
3806 domain: VerifyGivenDomain::Explicit(vec![sb(Expr::Literal(Literal::Int(2)))]),
3807 }],
3808 when: None,
3809 lhs: sb(Expr::Ident("x".to_string())),
3810 rhs: sb(Expr::Ident("x".to_string())),
3811 sample_guards: vec![],
3812 })),
3813 }));
3814 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
3815 let lean = out
3816 .files
3817 .iter()
3818 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3819 .expect("expected generated Lean file");
3820 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
3821 assert!(lean.contains("theorem f_law_identity : ∀ (x : Int), x = x := by"));
3822 assert!(lean.contains("theorem f_law_identity_sample_1 : 2 = 2 := by"));
3823 assert!(!lean.contains("theorem f_law_identity_sample_2 : 2 = 2 := by"));
3824 }
3825
3826 #[test]
3827 fn proof_mode_accepts_single_int_countdown_recursion() {
3828 let mut ctx = empty_ctx();
3829 let down = FnDef {
3830 name: "down".to_string(),
3831 line: 1,
3832 params: vec![("n".to_string(), "Int".to_string())],
3833 return_type: "Int".to_string(),
3834 effects: vec![],
3835 desc: None,
3836 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3837 subject: sbb(Expr::Ident("n".to_string())),
3838 arms: vec![
3839 MatchArm {
3840 pattern: Pattern::Literal(Literal::Int(0)),
3841 body: sbb(Expr::Literal(Literal::Int(0))),
3842 },
3843 MatchArm {
3844 pattern: Pattern::Wildcard,
3845 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3846 "down".to_string(),
3847 vec![sb(Expr::BinOp(
3848 BinOp::Sub,
3849 sbb(Expr::Ident("n".to_string())),
3850 sbb(Expr::Literal(Literal::Int(1))),
3851 ))],
3852 )))),
3853 },
3854 ],
3855 }))),
3856 resolution: None,
3857 };
3858 ctx.items.push(TopLevel::FnDef(down.clone()));
3859 ctx.fn_defs.push(down);
3860
3861 let issues = proof_mode_issues(&ctx);
3862 assert!(
3863 issues.is_empty(),
3864 "expected Int countdown recursion to be accepted, got: {:?}",
3865 issues
3866 );
3867
3868 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3869 let lean = out
3870 .files
3871 .iter()
3872 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3873 .expect("expected generated Lean file");
3874 assert!(lean.contains("def down__fuel"));
3875 assert!(lean.contains("def down (n : Int) : Int :="));
3876 assert!(lean.contains("down__fuel ((Int.natAbs n) + 1) n"));
3877 }
3878
3879 #[test]
3880 fn proof_mode_accepts_single_int_countdown_on_nonfirst_param() {
3881 let mut ctx = empty_ctx();
3882 let repeat_like = FnDef {
3883 name: "repeatLike".to_string(),
3884 line: 1,
3885 params: vec![
3886 ("char".to_string(), "String".to_string()),
3887 ("n".to_string(), "Int".to_string()),
3888 ],
3889 return_type: "List<String>".to_string(),
3890 effects: vec![],
3891 desc: None,
3892 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3893 subject: sbb(Expr::BinOp(
3894 BinOp::Lte,
3895 sbb(Expr::Ident("n".to_string())),
3896 sbb(Expr::Literal(Literal::Int(0))),
3897 )),
3898 arms: vec![
3899 MatchArm {
3900 pattern: Pattern::Literal(Literal::Bool(true)),
3901 body: sbb(Expr::List(vec![])),
3902 },
3903 MatchArm {
3904 pattern: Pattern::Literal(Literal::Bool(false)),
3905 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3906 "repeatLike".to_string(),
3907 vec![
3908 sb(Expr::Ident("char".to_string())),
3909 sb(Expr::BinOp(
3910 BinOp::Sub,
3911 sbb(Expr::Ident("n".to_string())),
3912 sbb(Expr::Literal(Literal::Int(1))),
3913 )),
3914 ],
3915 )))),
3916 },
3917 ],
3918 }))),
3919 resolution: None,
3920 };
3921 ctx.items.push(TopLevel::FnDef(repeat_like.clone()));
3922 ctx.fn_defs.push(repeat_like);
3923
3924 let issues = proof_mode_issues(&ctx);
3925 assert!(
3926 issues.is_empty(),
3927 "expected non-first Int countdown recursion to be accepted, got: {:?}",
3928 issues
3929 );
3930
3931 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3932 let lean = out
3933 .files
3934 .iter()
3935 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3936 .expect("expected generated Lean file");
3937 assert!(lean.contains("def repeatLike__fuel"));
3938 assert!(lean.contains("def repeatLike (char : String) (n : Int) : List String :="));
3939 assert!(lean.contains("repeatLike__fuel ((Int.natAbs n) + 1) char n"));
3940 }
3941
3942 #[test]
3943 fn proof_mode_accepts_negative_guarded_int_ascent() {
3944 let mut ctx = empty_ctx();
3945 let normalize = FnDef {
3946 name: "normalize".to_string(),
3947 line: 1,
3948 params: vec![("angle".to_string(), "Int".to_string())],
3949 return_type: "Int".to_string(),
3950 effects: vec![],
3951 desc: None,
3952 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
3953 subject: sbb(Expr::BinOp(
3954 BinOp::Lt,
3955 sbb(Expr::Ident("angle".to_string())),
3956 sbb(Expr::Literal(Literal::Int(0))),
3957 )),
3958 arms: vec![
3959 MatchArm {
3960 pattern: Pattern::Literal(Literal::Bool(true)),
3961 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
3962 "normalize".to_string(),
3963 vec![sb(Expr::BinOp(
3964 BinOp::Add,
3965 sbb(Expr::Ident("angle".to_string())),
3966 sbb(Expr::Literal(Literal::Int(360))),
3967 ))],
3968 )))),
3969 },
3970 MatchArm {
3971 pattern: Pattern::Literal(Literal::Bool(false)),
3972 body: sbb(Expr::Ident("angle".to_string())),
3973 },
3974 ],
3975 }))),
3976 resolution: None,
3977 };
3978 ctx.items.push(TopLevel::FnDef(normalize.clone()));
3979 ctx.fn_defs.push(normalize);
3980
3981 let issues = proof_mode_issues(&ctx);
3982 assert!(
3983 issues.is_empty(),
3984 "expected negative-guarded Int ascent recursion to be accepted, got: {:?}",
3985 issues
3986 );
3987
3988 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3989 let lean = out
3990 .files
3991 .iter()
3992 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3993 .expect("expected generated Lean file");
3994 assert!(lean.contains("def normalize__fuel"));
3995 assert!(lean.contains("normalize__fuel ((Int.natAbs angle) + 1) angle"));
3996 }
3997
3998 #[test]
3999 fn proof_mode_accepts_single_list_structural_recursion() {
4000 let mut ctx = empty_ctx();
4001 let len = FnDef {
4002 name: "len".to_string(),
4003 line: 1,
4004 params: vec![("xs".to_string(), "List<Int>".to_string())],
4005 return_type: "Int".to_string(),
4006 effects: vec![],
4007 desc: None,
4008 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
4009 subject: sbb(Expr::Ident("xs".to_string())),
4010 arms: vec![
4011 MatchArm {
4012 pattern: Pattern::EmptyList,
4013 body: sbb(Expr::Literal(Literal::Int(0))),
4014 },
4015 MatchArm {
4016 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
4017 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
4018 "len".to_string(),
4019 vec![sb(Expr::Ident("t".to_string()))],
4020 )))),
4021 },
4022 ],
4023 }))),
4024 resolution: None,
4025 };
4026 ctx.items.push(TopLevel::FnDef(len.clone()));
4027 ctx.fn_defs.push(len);
4028
4029 let issues = proof_mode_issues(&ctx);
4030 assert!(
4031 issues.is_empty(),
4032 "expected List structural recursion to be accepted, got: {:?}",
4033 issues
4034 );
4035 }
4036
4037 #[test]
4038 fn proof_mode_accepts_single_list_structural_recursion_on_nonfirst_param() {
4039 let mut ctx = empty_ctx();
4040 let len_from = FnDef {
4041 name: "lenFrom".to_string(),
4042 line: 1,
4043 params: vec![
4044 ("count".to_string(), "Int".to_string()),
4045 ("xs".to_string(), "List<Int>".to_string()),
4046 ],
4047 return_type: "Int".to_string(),
4048 effects: vec![],
4049 desc: None,
4050 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
4051 subject: sbb(Expr::Ident("xs".to_string())),
4052 arms: vec![
4053 MatchArm {
4054 pattern: Pattern::EmptyList,
4055 body: sbb(Expr::Ident("count".to_string())),
4056 },
4057 MatchArm {
4058 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
4059 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
4060 "lenFrom".to_string(),
4061 vec![
4062 sb(Expr::BinOp(
4063 BinOp::Add,
4064 sbb(Expr::Ident("count".to_string())),
4065 sbb(Expr::Literal(Literal::Int(1))),
4066 )),
4067 sb(Expr::Ident("t".to_string())),
4068 ],
4069 )))),
4070 },
4071 ],
4072 }))),
4073 resolution: None,
4074 };
4075 ctx.items.push(TopLevel::FnDef(len_from.clone()));
4076 ctx.fn_defs.push(len_from);
4077
4078 let issues = proof_mode_issues(&ctx);
4079 assert!(
4080 issues.is_empty(),
4081 "expected non-first List structural recursion to be accepted, got: {:?}",
4082 issues
4083 );
4084
4085 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4086 let lean = generated_lean_file(&out);
4087 assert!(lean.contains("termination_by xs.length"));
4088 assert!(!lean.contains("partial def lenFrom"));
4089 }
4090
4091 #[test]
4092 fn proof_mode_accepts_single_string_pos_advance_recursion() {
4093 let mut ctx = empty_ctx();
4094 let skip_ws = FnDef {
4095 name: "skipWs".to_string(),
4096 line: 1,
4097 params: vec![
4098 ("s".to_string(), "String".to_string()),
4099 ("pos".to_string(), "Int".to_string()),
4100 ],
4101 return_type: "Int".to_string(),
4102 effects: vec![],
4103 desc: None,
4104 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
4105 subject: sbb(Expr::FnCall(
4106 sbb(Expr::Attr(
4107 sbb(Expr::Ident("String".to_string())),
4108 "charAt".to_string(),
4109 )),
4110 vec![
4111 sb(Expr::Ident("s".to_string())),
4112 sb(Expr::Ident("pos".to_string())),
4113 ],
4114 )),
4115 arms: vec![
4116 MatchArm {
4117 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
4118 body: sbb(Expr::Ident("pos".to_string())),
4119 },
4120 MatchArm {
4121 pattern: Pattern::Wildcard,
4122 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
4123 "skipWs".to_string(),
4124 vec![
4125 sb(Expr::Ident("s".to_string())),
4126 sb(Expr::BinOp(
4127 BinOp::Add,
4128 sbb(Expr::Ident("pos".to_string())),
4129 sbb(Expr::Literal(Literal::Int(1))),
4130 )),
4131 ],
4132 )))),
4133 },
4134 ],
4135 }))),
4136 resolution: None,
4137 };
4138 ctx.items.push(TopLevel::FnDef(skip_ws.clone()));
4139 ctx.fn_defs.push(skip_ws);
4140
4141 let issues = proof_mode_issues(&ctx);
4142 assert!(
4143 issues.is_empty(),
4144 "expected String+pos recursion to be accepted, got: {:?}",
4145 issues
4146 );
4147
4148 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4149 let lean = generated_lean_file(&out);
4150 assert!(lean.contains("def skipWs__fuel"));
4151 assert!(!lean.contains("partial def skipWs"));
4152 }
4153
4154 #[test]
4155 fn proof_mode_accepts_mutual_int_countdown_recursion() {
4156 let mut ctx = empty_ctx();
4157 let even = FnDef {
4158 name: "even".to_string(),
4159 line: 1,
4160 params: vec![("n".to_string(), "Int".to_string())],
4161 return_type: "Bool".to_string(),
4162 effects: vec![],
4163 desc: None,
4164 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
4165 subject: sbb(Expr::Ident("n".to_string())),
4166 arms: vec![
4167 MatchArm {
4168 pattern: Pattern::Literal(Literal::Int(0)),
4169 body: sbb(Expr::Literal(Literal::Bool(true))),
4170 },
4171 MatchArm {
4172 pattern: Pattern::Wildcard,
4173 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
4174 "odd".to_string(),
4175 vec![sb(Expr::BinOp(
4176 BinOp::Sub,
4177 sbb(Expr::Ident("n".to_string())),
4178 sbb(Expr::Literal(Literal::Int(1))),
4179 ))],
4180 )))),
4181 },
4182 ],
4183 }))),
4184 resolution: None,
4185 };
4186 let odd = FnDef {
4187 name: "odd".to_string(),
4188 line: 2,
4189 params: vec![("n".to_string(), "Int".to_string())],
4190 return_type: "Bool".to_string(),
4191 effects: vec![],
4192 desc: None,
4193 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
4194 subject: sbb(Expr::Ident("n".to_string())),
4195 arms: vec![
4196 MatchArm {
4197 pattern: Pattern::Literal(Literal::Int(0)),
4198 body: sbb(Expr::Literal(Literal::Bool(false))),
4199 },
4200 MatchArm {
4201 pattern: Pattern::Wildcard,
4202 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
4203 "even".to_string(),
4204 vec![sb(Expr::BinOp(
4205 BinOp::Sub,
4206 sbb(Expr::Ident("n".to_string())),
4207 sbb(Expr::Literal(Literal::Int(1))),
4208 ))],
4209 )))),
4210 },
4211 ],
4212 }))),
4213 resolution: None,
4214 };
4215 ctx.items.push(TopLevel::FnDef(even.clone()));
4216 ctx.items.push(TopLevel::FnDef(odd.clone()));
4217 ctx.fn_defs.push(even);
4218 ctx.fn_defs.push(odd);
4219
4220 let issues = proof_mode_issues(&ctx);
4221 assert!(
4222 issues.is_empty(),
4223 "expected mutual Int countdown recursion to be accepted, got: {:?}",
4224 issues
4225 );
4226
4227 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4228 let lean = generated_lean_file(&out);
4229 assert!(lean.contains("def even__fuel"));
4230 assert!(lean.contains("def odd__fuel"));
4231 assert!(lean.contains("def even (n : Int) : Bool :="));
4232 assert!(lean.contains("even__fuel ((Int.natAbs n) + 1) n"));
4233 }
4234
4235 #[test]
4236 fn proof_mode_accepts_mutual_string_pos_recursion_with_ranked_same_edges() {
4237 let mut ctx = empty_ctx();
4238 let f = FnDef {
4239 name: "f".to_string(),
4240 line: 1,
4241 params: vec![
4242 ("s".to_string(), "String".to_string()),
4243 ("pos".to_string(), "Int".to_string()),
4244 ],
4245 return_type: "Int".to_string(),
4246 effects: vec![],
4247 desc: None,
4248 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
4249 subject: sbb(Expr::BinOp(
4250 BinOp::Gte,
4251 sbb(Expr::Ident("pos".to_string())),
4252 sbb(Expr::Literal(Literal::Int(3))),
4253 )),
4254 arms: vec![
4255 MatchArm {
4256 pattern: Pattern::Literal(Literal::Bool(true)),
4257 body: sbb(Expr::Ident("pos".to_string())),
4258 },
4259 MatchArm {
4260 pattern: Pattern::Wildcard,
4261 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
4262 "g".to_string(),
4263 vec![
4264 sb(Expr::Ident("s".to_string())),
4265 sb(Expr::Ident("pos".to_string())),
4266 ],
4267 )))),
4268 },
4269 ],
4270 }))),
4271 resolution: None,
4272 };
4273 let g = FnDef {
4274 name: "g".to_string(),
4275 line: 2,
4276 params: vec![
4277 ("s".to_string(), "String".to_string()),
4278 ("pos".to_string(), "Int".to_string()),
4279 ],
4280 return_type: "Int".to_string(),
4281 effects: vec![],
4282 desc: None,
4283 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
4284 subject: sbb(Expr::BinOp(
4285 BinOp::Gte,
4286 sbb(Expr::Ident("pos".to_string())),
4287 sbb(Expr::Literal(Literal::Int(3))),
4288 )),
4289 arms: vec![
4290 MatchArm {
4291 pattern: Pattern::Literal(Literal::Bool(true)),
4292 body: sbb(Expr::Ident("pos".to_string())),
4293 },
4294 MatchArm {
4295 pattern: Pattern::Wildcard,
4296 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
4297 "f".to_string(),
4298 vec![
4299 sb(Expr::Ident("s".to_string())),
4300 sb(Expr::BinOp(
4301 BinOp::Add,
4302 sbb(Expr::Ident("pos".to_string())),
4303 sbb(Expr::Literal(Literal::Int(1))),
4304 )),
4305 ],
4306 )))),
4307 },
4308 ],
4309 }))),
4310 resolution: None,
4311 };
4312 ctx.items.push(TopLevel::FnDef(f.clone()));
4313 ctx.items.push(TopLevel::FnDef(g.clone()));
4314 ctx.fn_defs.push(f);
4315 ctx.fn_defs.push(g);
4316
4317 let issues = proof_mode_issues(&ctx);
4318 assert!(
4319 issues.is_empty(),
4320 "expected mutual String+pos recursion to be accepted, got: {:?}",
4321 issues
4322 );
4323
4324 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4325 let lean = generated_lean_file(&out);
4326 assert!(lean.contains("def f__fuel"));
4327 assert!(lean.contains("def g__fuel"));
4328 assert!(!lean.contains("partial def f"));
4329 }
4330
4331 #[test]
4332 fn proof_mode_accepts_mutual_ranked_sizeof_recursion() {
4333 let mut ctx = empty_ctx();
4334 let f = FnDef {
4335 name: "f".to_string(),
4336 line: 1,
4337 params: vec![("xs".to_string(), "List<Int>".to_string())],
4338 return_type: "Int".to_string(),
4339 effects: vec![],
4340 desc: None,
4341 body: Rc::new(FnBody::from_expr(sb(Expr::TailCall(Box::new(
4342 TailCallData::new(
4343 "g".to_string(),
4344 vec![
4345 sb(Expr::Literal(Literal::Str("acc".to_string()))),
4346 sb(Expr::Ident("xs".to_string())),
4347 ],
4348 ),
4349 ))))),
4350 resolution: None,
4351 };
4352 let g = FnDef {
4353 name: "g".to_string(),
4354 line: 2,
4355 params: vec![
4356 ("acc".to_string(), "String".to_string()),
4357 ("xs".to_string(), "List<Int>".to_string()),
4358 ],
4359 return_type: "Int".to_string(),
4360 effects: vec![],
4361 desc: None,
4362 body: Rc::new(FnBody::from_expr(sb(Expr::Match {
4363 subject: sbb(Expr::Ident("xs".to_string())),
4364 arms: vec![
4365 MatchArm {
4366 pattern: Pattern::EmptyList,
4367 body: sbb(Expr::Literal(Literal::Int(0))),
4368 },
4369 MatchArm {
4370 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
4371 body: sbb(Expr::TailCall(Box::new(TailCallData::new(
4372 "f".to_string(),
4373 vec![sb(Expr::Ident("t".to_string()))],
4374 )))),
4375 },
4376 ],
4377 }))),
4378 resolution: None,
4379 };
4380 ctx.items.push(TopLevel::FnDef(f.clone()));
4381 ctx.items.push(TopLevel::FnDef(g.clone()));
4382 ctx.fn_defs.push(f);
4383 ctx.fn_defs.push(g);
4384
4385 let issues = proof_mode_issues(&ctx);
4386 assert!(
4387 issues.is_empty(),
4388 "expected mutual ranked-sizeOf recursion to be accepted, got: {:?}",
4389 issues
4390 );
4391
4392 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4393 let lean = generated_lean_file(&out);
4394 assert!(lean.contains("mutual"));
4395 assert!(lean.contains("def f__fuel"));
4396 assert!(lean.contains("def g__fuel"));
4397 assert!(!lean.contains("partial def f"));
4398 assert!(!lean.contains("partial def g"));
4399 }
4400
4401 #[test]
4402 fn proof_mode_rejects_recursive_pure_functions() {
4403 let mut ctx = empty_ctx();
4404 let recursive_fn = FnDef {
4405 name: "loop".to_string(),
4406 line: 1,
4407 params: vec![("n".to_string(), "Int".to_string())],
4408 return_type: "Int".to_string(),
4409 effects: vec![],
4410 desc: None,
4411 body: Rc::new(FnBody::from_expr(sb(Expr::FnCall(
4412 sbb(Expr::Ident("loop".to_string())),
4413 vec![sb(Expr::Ident("n".to_string()))],
4414 )))),
4415 resolution: None,
4416 };
4417 ctx.items.push(TopLevel::FnDef(recursive_fn.clone()));
4418 ctx.fn_defs.push(recursive_fn);
4419
4420 let issues = proof_mode_issues(&ctx);
4421 assert!(
4422 issues.iter().any(|i| i.contains("outside proof subset")),
4423 "expected recursive function blocker, got: {:?}",
4424 issues
4425 );
4426 }
4427
4428 #[test]
4429 fn proof_mode_allows_recursive_types() {
4430 let mut ctx = empty_ctx();
4431 let recursive_type = TypeDef::Sum {
4432 name: "Node".to_string(),
4433 variants: vec![TypeVariant {
4434 name: "Cons".to_string(),
4435 fields: vec!["Node".to_string()],
4436 }],
4437 line: 1,
4438 };
4439 ctx.items.push(TopLevel::TypeDef(recursive_type.clone()));
4440 ctx.type_defs.push(recursive_type);
4441
4442 let issues = proof_mode_issues(&ctx);
4443 assert!(
4444 issues
4445 .iter()
4446 .all(|i| !i.contains("recursive types require unsafe DecidableEq shim")),
4447 "did not expect recursive type blocker, got: {:?}",
4448 issues
4449 );
4450 }
4451
4452 #[test]
4453 fn law_auto_example_exports_real_proof_artifacts() {
4454 let ctx = ctx_from_source(
4455 include_str!("../../../examples/formal/law_auto.av"),
4456 "law_auto",
4457 );
4458 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4459 let lean = generated_lean_file(&out);
4460
4461 assert!(lean.contains("theorem add_law_commutative :"));
4462 assert!(lean.contains("theorem id'_law_reflexive : ∀ (x : Int), x = x := by"));
4463 assert!(lean.contains("theorem incCount_law_keyPresent :"));
4464 assert!(lean.contains("AverMap.has_set_self"));
4465 assert!(lean.contains("theorem add_law_commutative_sample_1 :"));
4466 assert!(lean.contains(":= by native_decide"));
4467 }
4468
4469 #[test]
4470 fn json_example_stays_inside_proof_subset() {
4471 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
4472 let issues = proof_mode_issues(&ctx);
4473 assert!(
4474 issues.is_empty(),
4475 "expected json example to stay inside proof subset, got: {:?}",
4476 issues
4477 );
4478 }
4479
4480 #[test]
4481 fn json_example_uses_total_defs_and_domain_guarded_laws_in_proof_mode() {
4482 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
4483 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4484 let lean = generated_lean_file(&out);
4485
4486 assert!(!lean.contains("partial def"));
4487 assert!(lean.contains("def skipWs__fuel"));
4488 assert!(lean.contains("def parseValue__fuel"));
4489 assert!(lean.contains("def toString' (j : Json) : String :="));
4490 assert!(
4491 lean.contains(
4492 "def averMeasureJsonEntries_String (items : List (String × Json)) : Nat :="
4493 )
4494 );
4495 assert!(lean.contains(
4496 "| .jsonObject x0 => (averMeasureJsonEntries_String (AverMap.entries x0)) + 1"
4497 ));
4498 assert!(lean.contains("-- when jsonRoundtripSafe j"));
4499 assert!(!lean.contains("-- hint: verify law '"));
4500 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
4501 assert!(
4502 lean.contains(
4503 "theorem toString'_law_parseRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
4504 )
4505 );
4506 assert!(lean.contains(
4507 "jsonRoundtripSafe j = true -> fromString (toString' j) = Except.ok j := by"
4508 ));
4509 assert!(
4510 lean.contains("theorem finishFloat_law_fromCanonicalFloat : ∀ (f : Float), f = 3.5 ∨")
4511 );
4512 assert!(lean.contains("theorem finishInt_law_fromCanonicalInt_checked_domain :"));
4513 assert!(lean.contains(
4514 "theorem toString'_law_parseValueRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
4515 ));
4516 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
4517 assert!(lean.contains(
4518 "example : fromString \"null\" = Except.ok Json.jsonNull := by native_decide"
4519 ));
4520 }
4521
4522 #[test]
4523 fn transpile_injects_builtin_network_types_and_vector_get_support() {
4524 let ctx = ctx_from_source(
4525 r#"
4526fn firstOrMissing(xs: Vector<String>) -> Result<String, String>
4527 Option.toResult(Vector.get(xs, 0), "missing")
4528
4529fn defaultHeader() -> Header
4530 Header(name = "Content-Type", value = "application/json")
4531
4532fn mkResponse(body: String) -> HttpResponse
4533 HttpResponse(status = 200, body = body, headers = [defaultHeader()])
4534
4535fn requestPath(req: HttpRequest) -> String
4536 req.path
4537
4538fn connPort(conn: Tcp.Connection) -> Int
4539 conn.port
4540"#,
4541 "network_helpers",
4542 );
4543 let out = transpile(&ctx);
4544 let lean = generated_lean_file(&out);
4545
4546 assert!(lean.contains("structure Header where"));
4547 assert!(lean.contains("structure HttpResponse where"));
4548 assert!(lean.contains("structure HttpRequest where"));
4549 assert!(lean.contains("structure Tcp_Connection where"));
4550 assert!(lean.contains("port : Int"));
4551 }
4552
4553 #[test]
4554 fn law_auto_example_has_no_sorry_in_proof_mode() {
4555 let ctx = ctx_from_source(
4556 include_str!("../../../examples/formal/law_auto.av"),
4557 "law_auto",
4558 );
4559 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4560 let lean = generated_lean_file(&out);
4561 assert!(
4562 !lean.contains("sorry"),
4563 "expected law_auto proof export to avoid sorry, got:\n{}",
4564 lean
4565 );
4566 }
4567
4568 #[test]
4569 fn map_example_has_no_sorry_in_proof_mode() {
4570 let ctx = ctx_from_source(include_str!("../../../examples/data/map.av"), "map");
4571 let issues = proof_mode_issues(&ctx);
4572 assert!(
4573 issues.is_empty(),
4574 "expected map example to stay inside proof subset, got: {:?}",
4575 issues
4576 );
4577
4578 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4579 let lean = generated_lean_file(&out);
4580 assert!(lean.contains("theorem incCount_law_trackedCountStepsByOne :"));
4582 assert!(lean.contains("sorry"));
4583 assert!(lean.contains("theorem countWords_law_presenceMatchesContains_sample_1 :"));
4585 assert!(lean.contains("theorem countWords_law_trackedWordCount_sample_1 :"));
4586 assert!(lean.contains("AverMap.has_set_self"));
4587 assert!(lean.contains("AverMap.get_set_self"));
4588 }
4589
4590 #[test]
4591 fn spec_laws_example_has_no_sorry_in_proof_mode() {
4592 let ctx = ctx_from_source(
4593 include_str!("../../../examples/formal/spec_laws.av"),
4594 "spec_laws",
4595 );
4596 let issues = proof_mode_issues(&ctx);
4597 assert!(
4598 issues.is_empty(),
4599 "expected spec_laws example to stay inside proof subset, got: {:?}",
4600 issues
4601 );
4602
4603 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4604 let lean = generated_lean_file(&out);
4605 assert!(
4606 !lean.contains("sorry"),
4607 "expected spec_laws proof export to avoid sorry, got:\n{}",
4608 lean
4609 );
4610 assert!(lean.contains("theorem absVal_eq_absValSpec :"));
4611 assert!(lean.contains("theorem clampNonNegative_eq_clampNonNegativeSpec :"));
4612 }
4613
4614 #[test]
4615 fn rle_example_exports_sampled_roundtrip_laws_without_sorry() {
4616 let ctx = ctx_from_source(include_str!("../../../examples/data/rle.av"), "rle");
4617 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4618 let lean = generated_lean_file(&out);
4619
4620 assert!(
4621 lean.contains("sorry"),
4622 "expected rle proof export to contain sorry for unproved universal theorems"
4623 );
4624 assert!(lean.contains(
4625 "theorem encode_law_roundtrip_sample_1 : decode (encode []) = [] := by native_decide"
4626 ));
4627 assert!(lean.contains(
4628 "theorem encodeString_law_string_roundtrip_sample_1 : decodeString (encodeString \"\") = \"\" := by native_decide"
4629 ));
4630 }
4631
4632 #[test]
4633 fn fibonacci_example_uses_fuelized_int_countdown_in_proof_mode() {
4634 let ctx = ctx_from_source(
4635 include_str!("../../../examples/data/fibonacci.av"),
4636 "fibonacci",
4637 );
4638 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4639 let lean = generated_lean_file(&out);
4640
4641 assert!(lean.contains("def fibTR__fuel"));
4642 assert!(lean.contains("def fibTR (n : Int) (a : Int) (b : Int) : Int :="));
4643 assert!(lean.contains("fibTR__fuel ((Int.natAbs n) + 1) n a b"));
4644 assert!(!lean.contains("partial def fibTR"));
4645 }
4646
4647 #[test]
4648 fn fibonacci_example_stays_inside_proof_subset() {
4649 let ctx = ctx_from_source(
4650 include_str!("../../../examples/data/fibonacci.av"),
4651 "fibonacci",
4652 );
4653 let issues = proof_mode_issues(&ctx);
4654 assert!(
4655 issues.is_empty(),
4656 "expected fibonacci example to stay inside proof subset, got: {:?}",
4657 issues
4658 );
4659 }
4660
4661 #[test]
4662 fn fibonacci_example_matches_general_linear_recurrence_shapes() {
4663 let ctx = ctx_from_source(
4664 include_str!("../../../examples/data/fibonacci.av"),
4665 "fibonacci",
4666 );
4667 let fib = ctx.fn_defs.iter().find(|fd| fd.name == "fib").unwrap();
4668 let fib_tr = ctx.fn_defs.iter().find(|fd| fd.name == "fibTR").unwrap();
4669 let fib_spec = ctx.fn_defs.iter().find(|fd| fd.name == "fibSpec").unwrap();
4670
4671 assert!(recurrence::detect_tailrec_int_linear_pair_wrapper(fib).is_some());
4672 assert!(recurrence::detect_tailrec_int_linear_pair_worker(fib_tr).is_some());
4673 assert!(recurrence::detect_second_order_int_linear_recurrence(fib_spec).is_some());
4674 }
4675
4676 #[test]
4677 fn fibonacci_example_auto_proves_general_linear_recurrence_spec_law() {
4678 let ctx = ctx_from_source(
4679 include_str!("../../../examples/data/fibonacci.av"),
4680 "fibonacci",
4681 );
4682 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4683 let lean = generated_lean_file(&out);
4684
4685 assert!(lean.contains("private def fibSpec__nat : Nat -> Int"));
4686 assert!(!lean.contains("partial def fibSpec"));
4687 assert!(lean.contains("private theorem fib_eq_fibSpec__worker_nat_shift"));
4688 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_nat"));
4689 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_seed"));
4690 assert!(lean.contains("theorem fib_eq_fibSpec : ∀ (n : Int), fib n = fibSpec n := by"));
4691 assert!(!lean.contains(
4692 "-- universal theorem fib_eq_fibSpec omitted: sampled law shape is not auto-proved yet"
4693 ));
4694 }
4695
4696 #[test]
4697 fn pell_like_example_auto_proves_same_general_shape() {
4698 let ctx = ctx_from_source(
4699 r#"
4700module Pell
4701 intent =
4702 "linear recurrence probe"
4703
4704fn pellTR(n: Int, a: Int, b: Int) -> Int
4705 match n
4706 0 -> a
4707 _ -> pellTR(n - 1, b, a + 2 * b)
4708
4709fn pell(n: Int) -> Int
4710 match n < 0
4711 true -> 0
4712 false -> pellTR(n, 0, 1)
4713
4714fn pellSpec(n: Int) -> Int
4715 match n < 0
4716 true -> 0
4717 false -> match n
4718 0 -> 0
4719 1 -> 1
4720 _ -> pellSpec(n - 2) + 2 * pellSpec(n - 1)
4721
4722verify pell law pellSpec
4723 given n: Int = [0, 1, 2, 3]
4724 pell(n) => pellSpec(n)
4725"#,
4726 "pell",
4727 );
4728 let issues = proof_mode_issues(&ctx);
4729 assert!(
4730 issues.is_empty(),
4731 "expected pell example to stay inside proof subset, got: {:?}",
4732 issues
4733 );
4734
4735 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4736 let lean = generated_lean_file(&out);
4737 assert!(lean.contains("private def pellSpec__nat : Nat -> Int"));
4738 assert!(lean.contains("private theorem pell_eq_pellSpec__worker_nat_shift"));
4739 assert!(lean.contains("theorem pell_eq_pellSpec : ∀ (n : Int), pell n = pellSpec n := by"));
4740 assert!(!lean.contains(
4741 "-- universal theorem pell_eq_pellSpec omitted: sampled law shape is not auto-proved yet"
4742 ));
4743 }
4744
4745 #[test]
4746 fn nonlinear_pair_state_recurrence_is_not_auto_proved_as_linear_shape() {
4747 let ctx = ctx_from_source(
4748 r#"
4749module WeirdRec
4750 intent =
4751 "reject nonlinear pair-state recurrence from linear recurrence prover"
4752
4753fn weirdTR(n: Int, a: Int, b: Int) -> Int
4754 match n
4755 0 -> a
4756 _ -> weirdTR(n - 1, b, a * b)
4757
4758fn weird(n: Int) -> Int
4759 match n < 0
4760 true -> 0
4761 false -> weirdTR(n, 0, 1)
4762
4763fn weirdSpec(n: Int) -> Int
4764 match n < 0
4765 true -> 0
4766 false -> match n
4767 0 -> 0
4768 1 -> 1
4769 _ -> weirdSpec(n - 1) * weirdSpec(n - 2)
4770
4771verify weird law weirdSpec
4772 given n: Int = [0, 1, 2, 3]
4773 weird(n) => weirdSpec(n)
4774"#,
4775 "weirdrec",
4776 );
4777 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4778 let lean = generated_lean_file(&out);
4779
4780 assert!(lean.contains("sorry"));
4782 assert!(!lean.contains("private theorem weird_eq_weirdSpec__worker_nat_shift"));
4783 assert!(lean.contains("theorem weird_eq_weirdSpec_sample_1 :"));
4784 }
4785
4786 #[test]
4787 fn date_example_stays_inside_proof_subset() {
4788 let ctx = ctx_from_source(include_str!("../../../examples/data/date.av"), "date");
4789 let issues = proof_mode_issues(&ctx);
4790 assert!(
4791 issues.is_empty(),
4792 "expected date example to stay inside proof subset, got: {:?}",
4793 issues
4794 );
4795
4796 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4797 let lean = generated_lean_file(&out);
4798 assert!(!lean.contains("partial def"));
4799 assert!(lean.contains("def parseIntSlice (s : String) (from' : Int) (to : Int) : Int :="));
4800 }
4801
4802 #[test]
4803 fn temperature_example_stays_inside_proof_subset() {
4804 let ctx = ctx_from_source(
4805 include_str!("../../../examples/core/temperature.av"),
4806 "temperature",
4807 );
4808 let issues = proof_mode_issues(&ctx);
4809 assert!(
4810 issues.is_empty(),
4811 "expected temperature example to stay inside proof subset, got: {:?}",
4812 issues
4813 );
4814
4815 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4816 let lean = generated_lean_file(&out);
4817 assert!(!lean.contains("partial def"));
4818 assert!(
4819 lean.contains("example : celsiusToFahr 0.0 = 32.0 := by native_decide"),
4820 "expected verify examples to survive proof export, got:\n{}",
4821 lean
4822 );
4823 }
4824
4825 #[test]
4826 fn quicksort_example_stays_inside_proof_subset() {
4827 let ctx = ctx_from_source(
4828 include_str!("../../../examples/data/quicksort.av"),
4829 "quicksort",
4830 );
4831 let issues = proof_mode_issues(&ctx);
4832 assert!(
4833 issues.is_empty(),
4834 "expected quicksort example to stay inside proof subset, got: {:?}",
4835 issues
4836 );
4837
4838 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4839 let lean = generated_lean_file(&out);
4840 assert!(lean.contains("def isOrderedFrom"));
4841 assert!(!lean.contains("partial def isOrderedFrom"));
4842 assert!(lean.contains("termination_by xs.length"));
4843 }
4844
4845 #[test]
4846 fn grok_s_language_example_uses_total_ranked_sizeof_mutual_recursion() {
4847 let ctx = ctx_from_source(
4848 include_str!("../../../examples/core/grok_s_language.av"),
4849 "grok_s_language",
4850 );
4851 let issues = proof_mode_issues(&ctx);
4852 assert!(
4853 issues.is_empty(),
4854 "expected grok_s_language example to stay inside proof subset, got: {:?}",
4855 issues
4856 );
4857
4858 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4859 let lean = generated_lean_file(&out);
4860 assert!(lean.contains("mutual"));
4861 assert!(lean.contains("def eval__fuel"));
4862 assert!(lean.contains("def parseListItems__fuel"));
4863 assert!(!lean.contains("partial def eval"));
4864 assert!(!lean.contains("termination_by (sizeOf e,"));
4865 assert!(lean.contains("-- when validSymbolNames e"));
4866 assert!(!lean.contains("private theorem toString'_law_parseRoundtrip_aux"));
4867 assert!(lean.contains(
4868 "theorem toString'_law_parseRoundtrip : ∀ (e : Sexpr), e = Sexpr.atomNum 42 ∨"
4869 ));
4870 assert!(
4871 lean.contains("validSymbolNames e = true -> parse (toString' e) = Except.ok e := by")
4872 );
4873 assert!(lean.contains("theorem toString'_law_parseSexprRoundtrip :"));
4874 assert!(lean.contains("theorem toString'_law_parseRoundtrip_sample_1 :"));
4875 }
4876
4877 #[test]
4878 fn lambda_example_keeps_only_eval_outside_proof_subset() {
4879 let ctx = ctx_from_source(include_str!("../../../examples/core/lambda.av"), "lambda");
4880 let issues = proof_mode_issues(&ctx);
4881 assert_eq!(
4882 issues,
4883 vec!["recursive function 'eval' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)".to_string()]
4884 );
4885
4886 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4887 let lean = generated_lean_file(&out);
4888 assert!(lean.contains("def termToString__fuel"));
4889 assert!(lean.contains("def subst__fuel"));
4890 assert!(lean.contains("def countS__fuel"));
4891 assert!(!lean.contains("partial def termToString"));
4892 assert!(!lean.contains("partial def subst"));
4893 assert!(!lean.contains("partial def countS"));
4894 assert!(lean.contains("partial def eval"));
4895 }
4896
4897 #[test]
4898 fn mission_control_example_stays_inside_proof_subset() {
4899 let ctx = ctx_from_source(
4900 include_str!("../../../examples/apps/mission_control.av"),
4901 "mission_control",
4902 );
4903 let issues = proof_mode_issues(&ctx);
4904 assert!(
4905 issues.is_empty(),
4906 "expected mission_control example to stay inside proof subset, got: {:?}",
4907 issues
4908 );
4909
4910 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4911 let lean = generated_lean_file(&out);
4912 assert!(!lean.contains("partial def normalizeAngle"));
4913 assert!(lean.contains("def normalizeAngle__fuel"));
4914 }
4915
4916 #[test]
4917 fn notepad_store_example_stays_inside_proof_subset() {
4918 let ctx = ctx_from_source(
4919 include_str!("../../../examples/apps/notepad/store.av"),
4920 "notepad_store",
4921 );
4922 let issues = proof_mode_issues(&ctx);
4923 assert!(
4924 issues.is_empty(),
4925 "expected notepad/store example to stay inside proof subset, got: {:?}",
4926 issues
4927 );
4928
4929 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4930 let lean = generated_lean_file(&out);
4931 assert!(lean.contains("def deserializeLine (line : String) : Except String Note :="));
4932 assert!(lean.contains("Except String (List Note)"));
4933 assert!(!lean.contains("partial def deserializeLine"));
4934 assert!(lean.contains("-- when noteRoundtripSafe note"));
4935 assert!(lean.contains("-- when notesRoundtripSafe notes"));
4936 assert!(lean.contains(
4937 "theorem serializeLine_law_lineRoundtrip : ∀ (note : Note), note = { id' := 1, title := \"Hello\", body := \"World\" : Note } ∨"
4938 ));
4939 assert!(lean.contains(
4940 "theorem serializeLines_law_notesRoundtrip : ∀ (notes : List Note), notes = [] ∨"
4941 ));
4942 assert!(lean.contains("notesRoundtripSafe notes = true ->"));
4943 assert!(lean.contains("parseNotes (s!\"{String.intercalate \"\\n\" (serializeLines notes)}\\n\") = Except.ok notes"));
4944 assert!(lean.contains("theorem serializeLine_law_lineRoundtrip_sample_1 :"));
4945 assert!(lean.contains("theorem serializeLines_law_notesRoundtrip_sample_1 :"));
4946 }
4947}