1mod builtins;
7mod expr;
8mod law_auto;
9mod pattern;
10mod recurrence;
11mod shared;
12mod toplevel;
13mod types;
14
15use std::collections::{HashMap, HashSet};
16
17use crate::ast::{
18 BinOp, Expr, FnBody, FnDef, MatchArm, Pattern, Stmt, TopLevel, TypeDef, VerifyKind,
19};
20use crate::call_graph;
21use crate::codegen::{CodegenContext, ProjectOutput};
22
23#[derive(Clone, Copy, Debug, Eq, PartialEq)]
25pub enum VerifyEmitMode {
26 NativeDecide,
28 Sorry,
30 TheoremSkeleton,
34}
35
36#[derive(Clone, Copy, Debug, Eq, PartialEq)]
38pub enum RecursionPlan {
39 IntCountdown { param_index: usize },
41 LinearRecurrence2,
45 ListStructural { param_index: usize },
47 SizeOfStructural,
49 StringPosAdvance,
52 MutualIntCountdown,
54 MutualStringPosAdvance { rank: usize },
57 MutualSizeOfRanked { rank: usize },
60}
61
62#[derive(Clone, Debug, Eq, PartialEq)]
63pub struct ProofModeIssue {
64 pub line: usize,
65 pub message: String,
66}
67
68const LEAN_PRELUDE_HEADER: &str = r#"-- Generated by the Aver → Lean 4 transpiler
69-- Pure core logic only (effectful functions are omitted)
70
71set_option linter.unusedVariables false
72
73-- Prelude: helper definitions for Aver builtins"#;
74
75const LEAN_PRELUDE_FLOAT_COE: &str = r#"instance : Coe Int Float := ⟨fun n => Float.ofInt n⟩"#;
76
77const LEAN_PRELUDE_FLOAT_DEC_EQ: &str = r#"private unsafe def Float.unsafeDecEq (a b : Float) : Decidable (a = b) :=
78 if a == b then isTrue (unsafeCast ()) else isFalse (unsafeCast ())
79@[implemented_by Float.unsafeDecEq]
80private opaque Float.compDecEq (a b : Float) : Decidable (a = b)
81instance : DecidableEq Float := Float.compDecEq"#;
82
83const LEAN_PRELUDE_EXCEPT_DEC_EQ: &str = r#"instance [DecidableEq ε] [DecidableEq α] : DecidableEq (Except ε α)
84 | .ok a, .ok b =>
85 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
86 | .error a, .error b =>
87 if h : a = b then isTrue (h ▸ rfl) else isFalse (by intro h'; cases h'; exact h rfl)
88 | .ok _, .error _ => isFalse (by intro h; cases h)
89 | .error _, .ok _ => isFalse (by intro h; cases h)"#;
90
91const LEAN_PRELUDE_EXCEPT_NS: &str = r#"namespace Except
92def withDefault (r : Except ε α) (d : α) : α :=
93 match r with
94 | .ok v => v
95 | .error _ => d
96end Except"#;
97
98const LEAN_PRELUDE_OPTION_TO_EXCEPT: &str = r#"def Option.toExcept (o : Option α) (e : ε) : Except ε α :=
99 match o with
100 | some v => .ok v
101 | none => .error e"#;
102
103const LEAN_PRELUDE_STRING_HADD: &str = r#"instance : HAdd String String String := ⟨String.append⟩"#;
104
105const LEAN_PRELUDE_PROOF_FUEL: &str = r#"def averStringPosFuel (s : String) (pos : Int) (rankBudget : Nat) : Nat :=
106 (((s.data.length) - pos.toNat) + 1) * rankBudget"#;
107
108const LEAN_PRELUDE_AVER_MEASURE: &str = r#"namespace AverMeasure
109def list (elemMeasure : α → Nat) : List α → Nat
110 | [] => 1
111 | x :: xs => elemMeasure x + list elemMeasure xs + 1
112def option (elemMeasure : α → Nat) : Option α → Nat
113 | none => 1
114 | some x => elemMeasure x + 1
115def except (errMeasure : ε → Nat) (okMeasure : α → Nat) : Except ε α → Nat
116 | .error e => errMeasure e + 1
117 | .ok v => okMeasure v + 1
118end AverMeasure"#;
119
120const AVER_MAP_PRELUDE_BASE: &str = r#"namespace AverMap
121def empty : List (α × β) := []
122def get [DecidableEq α] (m : List (α × β)) (k : α) : Option β :=
123 match m with
124 | [] => none
125 | (k', v) :: rest => if k = k' then some v else AverMap.get rest k
126def set [DecidableEq α] (m : List (α × β)) (k : α) (v : β) : List (α × β) :=
127 let rec go : List (α × β) → List (α × β)
128 | [] => [(k, v)]
129 | (k', v') :: rest => if k = k' then (k, v) :: rest else (k', v') :: go rest
130 go m
131def has [DecidableEq α] (m : List (α × β)) (k : α) : Bool :=
132 m.any (fun p => decide (k = p.1))
133def remove [DecidableEq α] (m : List (α × β)) (k : α) : List (α × β) :=
134 m.filter (fun p => !(decide (k = p.1)))
135def keys (m : List (α × β)) : List α := m.map Prod.fst
136def values (m : List (α × β)) : List β := m.map Prod.snd
137def entries (m : List (α × β)) : List (α × β) := m
138def len (m : List (α × β)) : Nat := m.length
139def fromList (entries : List (α × β)) : List (α × β) := entries"#;
140
141const AVER_MAP_PRELUDE_HAS_SET_SELF: &str = r#"private theorem any_set_go_self [DecidableEq α] (k : α) (v : β) :
142 ∀ (m : List (α × β)), List.any (AverMap.set.go k v m) (fun p => decide (k = p.1)) = true := by
143 intro m
144 induction m with
145 | nil =>
146 simp [AverMap.set.go, List.any]
147 | cons p tl ih =>
148 cases p with
149 | mk k' v' =>
150 by_cases h : k = k'
151 · simp [AverMap.set.go, List.any, h]
152 · simp [AverMap.set.go, List.any, h, ih]
153
154theorem has_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
155 AverMap.has (AverMap.set m k v) k = true := by
156 simpa [AverMap.has, AverMap.set] using any_set_go_self k v m"#;
157
158const AVER_MAP_PRELUDE_GET_SET_SELF: &str = r#"private theorem get_set_go_self [DecidableEq α] (k : α) (v : β) :
159 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) k = some v := by
160 intro m
161 induction m with
162 | nil =>
163 simp [AverMap.set.go, AverMap.get]
164 | cons p tl ih =>
165 cases p with
166 | mk k' v' =>
167 by_cases h : k = k'
168 · simp [AverMap.set.go, AverMap.get, h]
169 · simp [AverMap.set.go, AverMap.get, h, ih]
170
171theorem get_set_self [DecidableEq α] (m : List (α × β)) (k : α) (v : β) :
172 AverMap.get (AverMap.set m k v) k = some v := by
173 simpa [AverMap.set] using get_set_go_self k v m"#;
174
175const AVER_MAP_PRELUDE_GET_SET_OTHER: &str = r#"private theorem get_set_go_other [DecidableEq α] (k key : α) (v : β) (h : key ≠ k) :
176 ∀ (m : List (α × β)), AverMap.get (AverMap.set.go k v m) key = AverMap.get m key := by
177 intro m
178 induction m with
179 | nil =>
180 simp [AverMap.set.go, AverMap.get, h]
181 | cons p tl ih =>
182 cases p with
183 | mk k' v' =>
184 by_cases hk : k = k'
185 · have hkey : key ≠ k' := by simpa [hk] using h
186 simp [AverMap.set.go, AverMap.get, hk, hkey]
187 · by_cases hkey : key = k'
188 · simp [AverMap.set.go, AverMap.get, hk, hkey]
189 · simp [AverMap.set.go, AverMap.get, hk, hkey, ih]
190
191theorem get_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
192 AverMap.get (AverMap.set m k v) key = AverMap.get m key := by
193 simpa [AverMap.set] using get_set_go_other k key v h m"#;
194
195const AVER_MAP_PRELUDE_HAS_SET_OTHER: &str = r#"theorem has_eq_isSome_get [DecidableEq α] (m : List (α × β)) (k : α) :
196 AverMap.has m k = (AverMap.get m k).isSome := by
197 induction m with
198 | nil =>
199 simp [AverMap.has, AverMap.get]
200 | cons p tl ih =>
201 cases p with
202 | mk k' v' =>
203 by_cases h : k = k'
204 · simp [AverMap.has, AverMap.get, List.any, h]
205 · simpa [AverMap.has, AverMap.get, List.any, h] using ih
206
207theorem has_set_other [DecidableEq α] (m : List (α × β)) (k key : α) (v : β) (h : key ≠ k) :
208 AverMap.has (AverMap.set m k v) key = AverMap.has m key := by
209 rw [AverMap.has_eq_isSome_get, AverMap.has_eq_isSome_get]
210 simp [AverMap.get_set_other, h]"#;
211
212const AVER_MAP_PRELUDE_END: &str = r#"end AverMap"#;
213
214const LEAN_PRELUDE_AVER_LIST: &str = r#"namespace AverList
215def get (xs : List α) (i : Int) : Option α :=
216 if i < 0 then none else xs.get? i.toNat
217private def insertSorted [Ord α] (x : α) : List α → List α
218 | [] => [x]
219 | y :: ys =>
220 if compare x y == Ordering.lt || compare x y == Ordering.eq then
221 x :: y :: ys
222 else
223 y :: insertSorted x ys
224def sort [Ord α] (xs : List α) : List α :=
225 xs.foldl (fun acc x => insertSorted x acc) []
226end AverList"#;
227
228const LEAN_PRELUDE_HEADER_TYPE: &str = r#"structure Header where
229 name : String
230 value : String
231 deriving Repr, BEq, Inhabited, DecidableEq"#;
232
233const LEAN_PRELUDE_HTTP_RESPONSE_TYPE: &str = r#"structure HttpResponse where
234 status : Int
235 body : String
236 headers : List Header
237 deriving Repr, BEq, Inhabited, DecidableEq"#;
238
239const LEAN_PRELUDE_HTTP_REQUEST_TYPE: &str = r#"structure HttpRequest where
240 method : String
241 path : String
242 body : String
243 headers : List Header
244 deriving Repr, BEq, Inhabited, DecidableEq"#;
245
246const LEAN_PRELUDE_TCP_CONNECTION_TYPE: &str = r#"structure Tcp_Connection where
247 id : String
248 host : String
249 port : Int
250 deriving Repr, BEq, Inhabited, DecidableEq"#;
251
252const LEAN_PRELUDE_STRING_HELPERS: &str = r#"def String.charAt (s : String) (i : Int) : Option String :=
253 if i < 0 then none
254 else (s.toList.get? i.toNat).map Char.toString
255theorem String.charAt_length_none (s : String) : String.charAt s s.length = none := by
256 have hs : ¬ ((s.length : Int) < 0) := by omega
257 unfold String.charAt
258 simp [hs]
259 change s.data.length ≤ s.length
260 exact Nat.le_refl _
261def String.slice (s : String) (start stop : Int) : String :=
262 let startN := if start < 0 then 0 else start.toNat
263 let stopN := if stop < 0 then 0 else stop.toNat
264 let chars := s.toList
265 String.mk ((chars.drop startN).take (stopN - startN))
266private def trimFloatTrailingZerosChars (chars : List Char) : List Char :=
267 let noZeros := (chars.reverse.dropWhile (fun c => c == '0')).reverse
268 match noZeros.reverse with
269 | '.' :: rest => rest.reverse
270 | _ => noZeros
271private def normalizeFloatString (s : String) : String :=
272 if s.toList.any (fun c => c == '.') then
273 String.mk (trimFloatTrailingZerosChars s.toList)
274 else s
275def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)
276def String.chars (s : String) : List String := s.toList.map Char.toString
277private theorem char_to_string_append_mk (c : Char) (chars : List Char) :
278 Char.toString c ++ String.mk chars = String.mk (c :: chars) := by
279 rfl
280private theorem string_intercalate_empty_char_strings_go (acc : String) (chars : List Char) :
281 String.intercalate.go acc "" (chars.map Char.toString) = acc ++ String.mk chars := by
282 induction chars generalizing acc with
283 | nil =>
284 simp [String.intercalate.go]
285 | cons c rest ih =>
286 calc
287 String.intercalate.go acc "" ((c :: rest).map Char.toString)
288 = String.intercalate.go (acc ++ Char.toString c) "" (rest.map Char.toString) := by
289 simp [String.intercalate.go]
290 _ = (acc ++ Char.toString c) ++ String.mk rest := by
291 simpa using ih (acc ++ Char.toString c)
292 _ = acc ++ String.mk (c :: rest) := by
293 simp [String.append_assoc, char_to_string_append_mk]
294private theorem string_intercalate_empty_char_strings (chars : List Char) :
295 String.intercalate "" (chars.map Char.toString) = String.mk chars := by
296 cases chars with
297 | nil =>
298 simp [String.intercalate]
299 | cons c rest =>
300 simpa [String.intercalate] using string_intercalate_empty_char_strings_go c.toString rest
301theorem String.intercalate_empty_chars (s : String) :
302 String.intercalate "" (String.chars s) = s := by
303 cases s with
304 | mk chars =>
305 simpa [String.chars] using string_intercalate_empty_char_strings chars
306namespace AverString
307def splitOnCharGo (currentRev : List Char) (sep : Char) : List Char → List String
308 | [] => [String.mk currentRev.reverse]
309 | c :: cs =>
310 if c == sep then
311 String.mk currentRev.reverse :: splitOnCharGo [] sep cs
312 else
313 splitOnCharGo (c :: currentRev) sep cs
314def splitOnChar (s : String) (sep : Char) : List String :=
315 splitOnCharGo [] sep s.toList
316def split (s delim : String) : List String :=
317 match delim.toList with
318 | [] => "" :: (s.toList.map Char.toString) ++ [""]
319 | [c] => splitOnChar s c
320 | _ => s.splitOn delim
321@[simp] private theorem char_toString_data (c : Char) : c.toString.data = [c] := by
322 rfl
323private theorem splitOnCharGo_until_sep
324 (prefixRev part tail : List Char) (sep : Char) :
325 part.all (fun c => c != sep) = true ->
326 splitOnCharGo prefixRev sep (part ++ sep :: tail) =
327 String.mk (prefixRev.reverse ++ part) :: splitOnCharGo [] sep tail := by
328 intro h_safe
329 induction part generalizing prefixRev with
330 | nil =>
331 simp [splitOnCharGo]
332 | cons c rest ih =>
333 simp at h_safe
334 have h_rest : (rest.all fun c => c != sep) = true := by
335 simpa using h_safe.2
336 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
337 (ih (c :: prefixRev) h_rest)
338private theorem splitOnCharGo_no_sep
339 (prefixRev chars : List Char) (sep : Char) :
340 chars.all (fun c => c != sep) = true ->
341 splitOnCharGo prefixRev sep chars =
342 [String.mk (prefixRev.reverse ++ chars)] := by
343 intro h_safe
344 induction chars generalizing prefixRev with
345 | nil =>
346 simp [splitOnCharGo]
347 | cons c rest ih =>
348 simp at h_safe
349 have h_rest : (rest.all fun c => c != sep) = true := by
350 simpa using h_safe.2
351 simpa [splitOnCharGo, h_safe.1, List.reverse_cons, List.append_assoc] using
352 (ih (c :: prefixRev) h_rest)
353@[simp] theorem split_single_char_append
354 (head tail : String) (sep : Char) :
355 head.toList.all (fun c => c != sep) = true ->
356 split (head ++ Char.toString sep ++ tail) (Char.toString sep) =
357 head :: split tail (Char.toString sep) := by
358 intro h_safe
359 simpa [split, splitOnChar] using
360 (splitOnCharGo_until_sep [] head.data tail.data sep h_safe)
361@[simp] theorem split_single_char_no_sep
362 (s : String) (sep : Char) :
363 s.toList.all (fun c => c != sep) = true ->
364 split s (Char.toString sep) = [s] := by
365 intro h_safe
366 simpa [split, splitOnChar] using
367 (splitOnCharGo_no_sep [] s.data sep h_safe)
368private theorem intercalate_go_prefix
369 (pref acc sep : String) (rest : List String) :
370 String.intercalate.go (pref ++ sep ++ acc) sep rest =
371 pref ++ sep ++ String.intercalate.go acc sep rest := by
372 induction rest generalizing acc with
373 | nil =>
374 simp [String.intercalate.go, String.append_assoc]
375 | cons x xs ih =>
376 simpa [String.intercalate.go, String.append_assoc] using
377 (ih (acc ++ sep ++ x))
378@[simp] theorem split_intercalate_trailing_single_char
379 (parts : List String) (sep : Char) :
380 parts.all (fun part => part.toList.all (fun c => c != sep)) = true ->
381 split (String.intercalate (Char.toString sep) parts ++ Char.toString sep) (Char.toString sep) =
382 match parts with
383 | [] => ["", ""]
384 | _ => parts ++ [""] := by
385 intro h_safe
386 induction parts with
387 | nil =>
388 simp [split, splitOnChar, splitOnCharGo]
389 | cons part rest ih =>
390 simp at h_safe
391 have h_part : (part.toList.all fun c => c != sep) = true := by
392 simpa using h_safe.1
393 cases rest with
394 | nil =>
395 have h_empty : ("".toList.all fun c => c != sep) = true := by simp
396 calc
397 split (String.intercalate.go part (Char.toString sep) [] ++ Char.toString sep) (Char.toString sep)
398 = split (part ++ Char.toString sep) (Char.toString sep) := by
399 simp [String.intercalate.go]
400 _ = part :: split "" (Char.toString sep) := by
401 simpa using split_single_char_append part "" sep h_part
402 _ = [part, ""] := by
403 simp [split_single_char_no_sep, h_empty]
404 | cons next rest' =>
405 have h_rest : ((next :: rest').all fun part => part.toList.all fun c => c != sep) = true := by
406 simpa using h_safe.2
407 calc
408 split
409 (String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep)
410 (Char.toString sep)
411 =
412 split
413 (part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep))
414 (Char.toString sep) := by
415 have h_join :
416 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
417 = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
418 calc
419 String.intercalate.go part (Char.toString sep) (next :: rest') ++ Char.toString sep
420 = String.intercalate.go (part ++ Char.toString sep ++ next) (Char.toString sep) rest' ++ Char.toString sep := by
421 simp [String.intercalate.go]
422 _ = part ++ Char.toString sep ++ (String.intercalate.go next (Char.toString sep) rest' ++ Char.toString sep) := by
423 rw [intercalate_go_prefix part next (Char.toString sep) rest']
424 simp [String.append_assoc]
425 _ = part ++ Char.toString sep ++ (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep) := by
426 simp [String.intercalate, String.intercalate.go]
427 simpa using congrArg (fun s => split s (Char.toString sep)) h_join
428 _ = part :: split
429 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
430 (Char.toString sep) := by
431 simpa using split_single_char_append
432 part
433 (String.intercalate (Char.toString sep) (next :: rest') ++ Char.toString sep)
434 sep
435 h_part
436 _ = part :: (next :: rest' ++ [""]) := by
437 simpa using ih h_rest
438end AverString"#;
439
440const LEAN_PRELUDE_NUMERIC_PARSE: &str = r#"namespace AverDigits
441def foldDigitsAcc (acc : Nat) : List Nat -> Nat
442 | [] => acc
443 | d :: ds => foldDigitsAcc (acc * 10 + d) ds
444
445def foldDigits (digits : List Nat) : Nat :=
446 foldDigitsAcc 0 digits
447
448private theorem foldDigitsAcc_append_singleton (acc : Nat) (xs : List Nat) (d : Nat) :
449 foldDigitsAcc acc (xs ++ [d]) = foldDigitsAcc acc xs * 10 + d := by
450 induction xs generalizing acc with
451 | nil =>
452 simp [foldDigitsAcc]
453 | cons x xs ih =>
454 simp [foldDigitsAcc, ih, Nat.left_distrib, Nat.add_assoc, Nat.add_left_comm]
455
456private theorem foldDigits_append_singleton (xs : List Nat) (d : Nat) :
457 foldDigits (xs ++ [d]) = foldDigits xs * 10 + d := by
458 simpa [foldDigits] using foldDigitsAcc_append_singleton 0 xs d
459
460def natDigits : Nat -> List Nat
461 | n =>
462 if n < 10 then
463 [n]
464 else
465 natDigits (n / 10) ++ [n % 10]
466termination_by
467 n => n
468
469theorem natDigits_nonempty (n : Nat) : natDigits n ≠ [] := by
470 by_cases h : n < 10
471 · rw [natDigits.eq_1]
472 simp [h]
473 · rw [natDigits.eq_1]
474 simp [h]
475
476theorem natDigits_digits_lt_ten : ∀ n : Nat, ∀ d ∈ natDigits n, d < 10 := by
477 intro n d hd
478 by_cases h : n < 10
479 · rw [natDigits.eq_1] at hd
480 simp [h] at hd
481 rcases hd with rfl
482 exact h
483 · rw [natDigits.eq_1] at hd
484 simp [h] at hd
485 rcases hd with hd | hd
486 · exact natDigits_digits_lt_ten (n / 10) d hd
487 · subst hd
488 exact Nat.mod_lt n (by omega)
489
490theorem foldDigits_natDigits : ∀ n : Nat, foldDigits (natDigits n) = n := by
491 intro n
492 by_cases h : n < 10
493 · rw [natDigits.eq_1]
494 simp [h, foldDigits, foldDigitsAcc]
495 · rw [natDigits.eq_1]
496 simp [h]
497 rw [foldDigits_append_singleton]
498 rw [foldDigits_natDigits (n / 10)]
499 omega
500
501def digitChar : Nat -> Char
502 | 0 => '0' | 1 => '1' | 2 => '2' | 3 => '3' | 4 => '4'
503 | 5 => '5' | 6 => '6' | 7 => '7' | 8 => '8' | 9 => '9'
504 | _ => '0'
505
506def charToDigit? : Char -> Option Nat
507 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3 | '4' => some 4
508 | '5' => some 5 | '6' => some 6 | '7' => some 7 | '8' => some 8 | '9' => some 9
509 | _ => none
510
511theorem charToDigit_digitChar : ∀ d : Nat, d < 10 -> charToDigit? (digitChar d) = some d
512 | 0, _ => by simp [digitChar, charToDigit?]
513 | 1, _ => by simp [digitChar, charToDigit?]
514 | 2, _ => by simp [digitChar, charToDigit?]
515 | 3, _ => by simp [digitChar, charToDigit?]
516 | 4, _ => by simp [digitChar, charToDigit?]
517 | 5, _ => by simp [digitChar, charToDigit?]
518 | 6, _ => by simp [digitChar, charToDigit?]
519 | 7, _ => by simp [digitChar, charToDigit?]
520 | 8, _ => by simp [digitChar, charToDigit?]
521 | 9, _ => by simp [digitChar, charToDigit?]
522 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
523 omega
524
525theorem digitChar_ne_minus : ∀ d : Nat, d < 10 -> digitChar d ≠ '-'
526 | 0, _ => by decide
527 | 1, _ => by decide
528 | 2, _ => by decide
529 | 3, _ => by decide
530 | 4, _ => by decide
531 | 5, _ => by decide
532 | 6, _ => by decide
533 | 7, _ => by decide
534 | 8, _ => by decide
535 | 9, _ => by decide
536 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
537 omega
538
539theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->
540 Char.toString (digitChar d) ≠ " " ∧
541 Char.toString (digitChar d) ≠ "\t" ∧
542 Char.toString (digitChar d) ≠ "\n" ∧
543 Char.toString (digitChar d) ≠ "\r"
544 | 0, _ => by decide
545 | 1, _ => by decide
546 | 2, _ => by decide
547 | 3, _ => by decide
548 | 4, _ => by decide
549 | 5, _ => by decide
550 | 6, _ => by decide
551 | 7, _ => by decide
552 | 8, _ => by decide
553 | 9, _ => by decide
554 | Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ (Nat.succ n))))))))), h => by
555 omega
556
557theorem mapM_charToDigit_digits : ∀ ds : List Nat,
558 (∀ d ∈ ds, d < 10) -> List.mapM charToDigit? (ds.map digitChar) = some ds := by
559 intro ds hds
560 induction ds with
561 | nil =>
562 simp
563 | cons d ds ih =>
564 have hd : d < 10 := hds d (by simp)
565 have htail : ∀ x ∈ ds, x < 10 := by
566 intro x hx
567 exact hds x (by simp [hx])
568 simp [charToDigit_digitChar d hd, ih htail]
569
570def natDigitsChars (n : Nat) : List Char :=
571 (natDigits n).map digitChar
572
573def parseNatChars (chars : List Char) : Option Nat :=
574 match chars with
575 | [] => none
576 | _ => do
577 let digits <- List.mapM charToDigit? chars
578 pure (foldDigits digits)
579
580theorem parseNatChars_nat (n : Nat) :
581 parseNatChars (natDigitsChars n) = some n := by
582 unfold parseNatChars natDigitsChars
583 cases h : (natDigits n).map digitChar with
584 | nil =>
585 exfalso
586 exact natDigits_nonempty n (List.map_eq_nil_iff.mp h)
587 | cons hd tl =>
588 have hdigits : List.mapM charToDigit? (List.map digitChar (natDigits n)) = some (natDigits n) :=
589 mapM_charToDigit_digits (natDigits n) (fun d hd => natDigits_digits_lt_ten n d hd)
590 rw [h] at hdigits
591 simp [h, hdigits, foldDigits_natDigits]
592end AverDigits
593
594def String.fromInt (n : Int) : String :=
595 match n with
596 | .ofNat m => String.mk (AverDigits.natDigitsChars m)
597 | .negSucc m => String.mk ('-' :: AverDigits.natDigitsChars (m + 1))
598
599def Int.fromString (s : String) : Except String Int :=
600 match s.toList with
601 | [] => .error ("Cannot parse '" ++ s ++ "' as Int")
602 | '-' :: rest =>
603 match AverDigits.parseNatChars rest with
604 | some n => .ok (-Int.ofNat n)
605 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
606 | chars =>
607 match AverDigits.parseNatChars chars with
608 | some n => .ok (Int.ofNat n)
609 | none => .error ("Cannot parse '" ++ s ++ "' as Int")
610
611theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n
612 | .ofNat m => by
613 cases h : AverDigits.natDigits m with
614 | nil =>
615 exfalso
616 exact AverDigits.natDigits_nonempty m h
617 | cons d ds =>
618 have hd : d < 10 := AverDigits.natDigits_digits_lt_ten m d (by simp [h])
619 have hne : AverDigits.digitChar d ≠ '-' := AverDigits.digitChar_ne_minus d hd
620 have hparse : AverDigits.parseNatChars (AverDigits.digitChar d :: List.map AverDigits.digitChar ds) = some m := by
621 simpa [AverDigits.natDigitsChars, h] using AverDigits.parseNatChars_nat m
622 simp [String.fromInt, Int.fromString, AverDigits.natDigitsChars, h, hne, hparse]
623 | .negSucc m => by
624 simp [String.fromInt, Int.fromString, AverDigits.parseNatChars_nat]
625 rfl
626
627private def charDigitsToNat (cs : List Char) : Nat :=
628 cs.foldl (fun acc c => acc * 10 + (c.toNat - '0'.toNat)) 0
629
630private def parseExpPart : List Char → (Bool × List Char)
631 | '-' :: rest => (true, rest.takeWhile Char.isDigit)
632 | '+' :: rest => (false, rest.takeWhile Char.isDigit)
633 | rest => (false, rest.takeWhile Char.isDigit)
634
635def Float.fromString (s : String) : Except String Float :=
636 let chars := s.toList
637 let (neg, chars) := match chars with
638 | '-' :: rest => (true, rest)
639 | _ => (false, chars)
640 let intPart := chars.takeWhile Char.isDigit
641 let rest := chars.dropWhile Char.isDigit
642 let (fracPart, rest) := match rest with
643 | '.' :: rest => (rest.takeWhile Char.isDigit, rest.dropWhile Char.isDigit)
644 | _ => ([], rest)
645 let (expNeg, expDigits) := match rest with
646 | 'e' :: rest => parseExpPart rest
647 | 'E' :: rest => parseExpPart rest
648 | _ => (false, [])
649 if intPart.isEmpty && fracPart.isEmpty then .error ("Invalid float: " ++ s)
650 else
651 let mantissa := charDigitsToNat (intPart ++ fracPart)
652 let fracLen : Int := fracPart.length
653 let expVal : Int := charDigitsToNat expDigits
654 let shift : Int := (if expNeg then -expVal else expVal) - fracLen
655 let f := if shift >= 0 then Float.ofScientific mantissa false shift.toNat
656 else Float.ofScientific mantissa true ((-shift).toNat)
657 .ok (if neg then -f else f)"#;
658
659const LEAN_PRELUDE_CHAR_BYTE: &str = r#"def Char.toCode (s : String) : Int :=
660 match s.toList.head? with
661 | some c => (c.toNat : Int)
662 | none => panic! "Char.toCode: string is empty"
663def Char.fromCode (n : Int) : Option String :=
664 if n < 0 || n > 1114111 then none
665 else if n >= 55296 && n <= 57343 then none
666 else some (Char.toString (Char.ofNat n.toNat))
667
668def hexDigit (n : Int) : String :=
669 match n with
670 | 0 => "0" | 1 => "1" | 2 => "2" | 3 => "3"
671 | 4 => "4" | 5 => "5" | 6 => "6" | 7 => "7"
672 | 8 => "8" | 9 => "9" | 10 => "a" | 11 => "b"
673 | 12 => "c" | 13 => "d" | 14 => "e" | 15 => "f"
674 | _ => "?"
675
676def byteToHex (code : Int) : String :=
677 hexDigit (code / 16) ++ hexDigit (code % 16)
678
679namespace AverByte
680private def hexValue (c : Char) : Option Int :=
681 match c with
682 | '0' => some 0 | '1' => some 1 | '2' => some 2 | '3' => some 3
683 | '4' => some 4 | '5' => some 5 | '6' => some 6 | '7' => some 7
684 | '8' => some 8 | '9' => some 9 | 'a' => some 10 | 'b' => some 11
685 | 'c' => some 12 | 'd' => some 13 | 'e' => some 14 | 'f' => some 15
686 | 'A' => some 10 | 'B' => some 11 | 'C' => some 12 | 'D' => some 13
687 | 'E' => some 14 | 'F' => some 15
688 | _ => none
689def toHex (n : Int) : Except String String :=
690 if n < 0 || n > 255 then
691 .error ("Byte.toHex: " ++ toString n ++ " is out of range 0-255")
692 else
693 .ok (byteToHex n)
694def fromHex (s : String) : Except String Int :=
695 match s.toList with
696 | [hi, lo] =>
697 match hexValue hi, hexValue lo with
698 | some h, some l => .ok (h * 16 + l)
699 | _, _ => .error ("Byte.fromHex: invalid hex '" ++ s ++ "'")
700 | _ => .error ("Byte.fromHex: expected exactly 2 hex chars, got '" ++ s ++ "'")
701end AverByte"#;
702
703fn pure_fns(ctx: &CodegenContext) -> Vec<&FnDef> {
704 ctx.modules
705 .iter()
706 .flat_map(|m| m.fn_defs.iter())
707 .chain(ctx.fn_defs.iter())
708 .filter(|fd| toplevel::is_pure_fn(fd))
709 .collect()
710}
711
712fn recursive_type_names(ctx: &CodegenContext) -> HashSet<String> {
713 ctx.modules
714 .iter()
715 .flat_map(|m| m.type_defs.iter())
716 .chain(ctx.type_defs.iter())
717 .filter(|td| toplevel::is_recursive_type_def(td))
718 .map(|td| toplevel::type_def_name(td).to_string())
719 .collect()
720}
721
722fn recursive_pure_fn_names(ctx: &CodegenContext) -> HashSet<String> {
723 let pure_names: HashSet<String> = pure_fns(ctx)
724 .into_iter()
725 .map(|fd| fd.name.clone())
726 .collect();
727 let mut callgraph_items = ctx.items.clone();
728 for module in &ctx.modules {
729 for fd in &module.fn_defs {
730 callgraph_items.push(TopLevel::FnDef(fd.clone()));
731 }
732 }
733 call_graph::find_recursive_fns(&callgraph_items)
734 .into_iter()
735 .filter(|name| pure_names.contains(name))
736 .collect()
737}
738
739fn verify_counter_key(vb: &crate::ast::VerifyBlock) -> String {
740 match &vb.kind {
741 VerifyKind::Cases => format!("fn:{}", vb.fn_name),
742 VerifyKind::Law(law) => format!("law:{}::{}", vb.fn_name, law.name),
743 }
744}
745
746fn lean_project_name(ctx: &CodegenContext) -> String {
747 ctx.items
748 .iter()
749 .find_map(|item| match item {
750 TopLevel::Module(m) => Some(m.name.clone()),
751 _ => None,
752 })
753 .unwrap_or_else(|| capitalize_first(&ctx.project_name))
754}
755
756fn expr_to_dotted_name(expr: &Expr) -> Option<String> {
757 match expr {
758 Expr::Ident(name) => Some(name.clone()),
759 Expr::Attr(obj, field) => expr_to_dotted_name(obj).map(|p| format!("{}.{}", p, field)),
760 _ => None,
761 }
762}
763
764fn call_matches(name: &str, target: &str) -> bool {
765 name == target || name.rsplit('.').next() == Some(target)
766}
767
768fn call_is_in_set(name: &str, targets: &HashSet<String>) -> bool {
769 call_matches_any(name, targets)
770}
771
772fn canonical_callee_name(name: &str, targets: &HashSet<String>) -> Option<String> {
773 if targets.contains(name) {
774 return Some(name.to_string());
775 }
776 name.rsplit('.')
777 .next()
778 .filter(|last| targets.contains(*last))
779 .map(ToString::to_string)
780}
781
782fn call_matches_any(name: &str, targets: &HashSet<String>) -> bool {
783 if targets.contains(name) {
784 return true;
785 }
786 match name.rsplit('.').next() {
787 Some(last) => targets.contains(last),
788 None => false,
789 }
790}
791
792fn is_int_minus_positive(expr: &Expr, param_name: &str) -> bool {
793 match expr {
794 Expr::BinOp(BinOp::Sub, left, right) => {
795 matches!(left.as_ref(), Expr::Ident(id) if id == param_name)
796 && matches!(right.as_ref(), Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
797 }
798 Expr::FnCall(callee, args) => {
799 let Some(name) = expr_to_dotted_name(callee) else {
800 return false;
801 };
802 (name == "Int.sub" || name == "int.sub")
803 && args.len() == 2
804 && matches!(&args[0], Expr::Ident(id) if id == param_name)
805 && matches!(&args[1], Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
806 }
807 _ => false,
808 }
809}
810
811fn collect_calls_from_expr<'a>(expr: &'a Expr, out: &mut Vec<(String, Vec<&'a Expr>)>) {
812 match expr {
813 Expr::FnCall(callee, args) => {
814 if let Some(name) = expr_to_dotted_name(callee) {
815 out.push((name, args.iter().collect()));
816 }
817 collect_calls_from_expr(callee, out);
818 for arg in args {
819 collect_calls_from_expr(arg, out);
820 }
821 }
822 Expr::TailCall(boxed) => {
823 let (name, args) = boxed.as_ref();
824 out.push((name.clone(), args.iter().collect()));
825 for arg in args {
826 collect_calls_from_expr(arg, out);
827 }
828 }
829 Expr::Attr(obj, _) => collect_calls_from_expr(obj, out),
830 Expr::BinOp(_, left, right) => {
831 collect_calls_from_expr(left, out);
832 collect_calls_from_expr(right, out);
833 }
834 Expr::Match { subject, arms, .. } => {
835 collect_calls_from_expr(subject, out);
836 for arm in arms {
837 collect_calls_from_expr(&arm.body, out);
838 }
839 }
840 Expr::Constructor(_, inner) => {
841 if let Some(inner) = inner {
842 collect_calls_from_expr(inner, out);
843 }
844 }
845 Expr::ErrorProp(inner) => collect_calls_from_expr(inner, out),
846 Expr::InterpolatedStr(parts) => {
847 for p in parts {
848 if let crate::ast::StrPart::Parsed(e) = p {
849 collect_calls_from_expr(e, out);
850 }
851 }
852 }
853 Expr::List(items) | Expr::Tuple(items) => {
854 for item in items {
855 collect_calls_from_expr(item, out);
856 }
857 }
858 Expr::MapLiteral(entries) => {
859 for (k, v) in entries {
860 collect_calls_from_expr(k, out);
861 collect_calls_from_expr(v, out);
862 }
863 }
864 Expr::RecordCreate { fields, .. } => {
865 for (_, v) in fields {
866 collect_calls_from_expr(v, out);
867 }
868 }
869 Expr::RecordUpdate { base, updates, .. } => {
870 collect_calls_from_expr(base, out);
871 for (_, v) in updates {
872 collect_calls_from_expr(v, out);
873 }
874 }
875 Expr::Literal(_) | Expr::Ident(_) | Expr::Resolved(_) => {}
876 }
877}
878
879fn collect_calls_from_body(body: &FnBody) -> Vec<(String, Vec<&Expr>)> {
880 let mut out = Vec::new();
881 for stmt in body.stmts() {
882 match stmt {
883 Stmt::Binding(_, _, expr) | Stmt::Expr(expr) => collect_calls_from_expr(expr, &mut out),
884 }
885 }
886 out
887}
888
889fn collect_list_tail_binders_from_expr(
890 expr: &Expr,
891 list_param_name: &str,
892 tails: &mut HashSet<String>,
893) {
894 match expr {
895 Expr::Match { subject, arms, .. } => {
896 if matches!(subject.as_ref(), Expr::Ident(id) if id == list_param_name) {
897 for MatchArm { pattern, .. } in arms {
898 if let Pattern::Cons(_, tail) = pattern {
899 tails.insert(tail.clone());
900 }
901 }
902 }
903 for arm in arms {
904 collect_list_tail_binders_from_expr(&arm.body, list_param_name, tails);
905 }
906 collect_list_tail_binders_from_expr(subject, list_param_name, tails);
907 }
908 Expr::FnCall(callee, args) => {
909 collect_list_tail_binders_from_expr(callee, list_param_name, tails);
910 for arg in args {
911 collect_list_tail_binders_from_expr(arg, list_param_name, tails);
912 }
913 }
914 Expr::TailCall(boxed) => {
915 let (_, args) = boxed.as_ref();
916 for arg in args {
917 collect_list_tail_binders_from_expr(arg, list_param_name, tails);
918 }
919 }
920 Expr::Attr(obj, _) => collect_list_tail_binders_from_expr(obj, list_param_name, tails),
921 Expr::BinOp(_, left, right) => {
922 collect_list_tail_binders_from_expr(left, list_param_name, tails);
923 collect_list_tail_binders_from_expr(right, list_param_name, tails);
924 }
925 Expr::Constructor(_, inner) => {
926 if let Some(inner) = inner {
927 collect_list_tail_binders_from_expr(inner, list_param_name, tails);
928 }
929 }
930 Expr::ErrorProp(inner) => {
931 collect_list_tail_binders_from_expr(inner, list_param_name, tails)
932 }
933 Expr::InterpolatedStr(parts) => {
934 for p in parts {
935 if let crate::ast::StrPart::Parsed(e) = p {
936 collect_list_tail_binders_from_expr(e, list_param_name, tails);
937 }
938 }
939 }
940 Expr::List(items) | Expr::Tuple(items) => {
941 for item in items {
942 collect_list_tail_binders_from_expr(item, list_param_name, tails);
943 }
944 }
945 Expr::MapLiteral(entries) => {
946 for (k, v) in entries {
947 collect_list_tail_binders_from_expr(k, list_param_name, tails);
948 collect_list_tail_binders_from_expr(v, list_param_name, tails);
949 }
950 }
951 Expr::RecordCreate { fields, .. } => {
952 for (_, v) in fields {
953 collect_list_tail_binders_from_expr(v, list_param_name, tails);
954 }
955 }
956 Expr::RecordUpdate { base, updates, .. } => {
957 collect_list_tail_binders_from_expr(base, list_param_name, tails);
958 for (_, v) in updates {
959 collect_list_tail_binders_from_expr(v, list_param_name, tails);
960 }
961 }
962 Expr::Literal(_) | Expr::Ident(_) | Expr::Resolved(_) => {}
963 }
964}
965
966fn collect_list_tail_binders(fd: &FnDef, list_param_name: &str) -> HashSet<String> {
967 let mut tails = HashSet::new();
968 for stmt in fd.body.stmts() {
969 match stmt {
970 Stmt::Binding(_, _, expr) | Stmt::Expr(expr) => {
971 collect_list_tail_binders_from_expr(expr, list_param_name, &mut tails)
972 }
973 }
974 }
975 tails
976}
977
978fn find_type_def<'a>(ctx: &'a CodegenContext, type_name: &str) -> Option<&'a TypeDef> {
979 ctx.modules
980 .iter()
981 .flat_map(|m| m.type_defs.iter())
982 .chain(ctx.type_defs.iter())
983 .find(|td| toplevel::type_def_name(td) == type_name)
984}
985
986fn recursive_constructor_binders(
987 td: &TypeDef,
988 variant_name: &str,
989 binders: &[String],
990) -> Vec<String> {
991 let variant_short = variant_name.rsplit('.').next().unwrap_or(variant_name);
992 match td {
993 TypeDef::Sum { name, variants, .. } => variants
994 .iter()
995 .find(|variant| variant.name == variant_short)
996 .map(|variant| {
997 variant
998 .fields
999 .iter()
1000 .zip(binders.iter())
1001 .filter_map(|(field_ty, binder)| {
1002 (field_ty.trim() == name).then_some(binder.clone())
1003 })
1004 .collect()
1005 })
1006 .unwrap_or_default(),
1007 TypeDef::Product { .. } => Vec::new(),
1008 }
1009}
1010
1011fn grow_recursive_subterm_binders_from_expr(
1012 expr: &Expr,
1013 tracked: &HashSet<String>,
1014 td: &TypeDef,
1015 out: &mut HashSet<String>,
1016) {
1017 match expr {
1018 Expr::Match { subject, arms, .. } => {
1019 if let Expr::Ident(subject_name) = subject.as_ref()
1020 && tracked.contains(subject_name)
1021 {
1022 for arm in arms {
1023 if let Pattern::Constructor(variant_name, binders) = &arm.pattern {
1024 out.extend(recursive_constructor_binders(td, variant_name, binders));
1025 }
1026 }
1027 }
1028 grow_recursive_subterm_binders_from_expr(subject, tracked, td, out);
1029 for arm in arms {
1030 grow_recursive_subterm_binders_from_expr(&arm.body, tracked, td, out);
1031 }
1032 }
1033 Expr::FnCall(callee, args) => {
1034 grow_recursive_subterm_binders_from_expr(callee, tracked, td, out);
1035 for arg in args {
1036 grow_recursive_subterm_binders_from_expr(arg, tracked, td, out);
1037 }
1038 }
1039 Expr::Attr(obj, _) => grow_recursive_subterm_binders_from_expr(obj, tracked, td, out),
1040 Expr::BinOp(_, left, right) => {
1041 grow_recursive_subterm_binders_from_expr(left, tracked, td, out);
1042 grow_recursive_subterm_binders_from_expr(right, tracked, td, out);
1043 }
1044 Expr::Constructor(_, Some(inner)) | Expr::ErrorProp(inner) => {
1045 grow_recursive_subterm_binders_from_expr(inner, tracked, td, out)
1046 }
1047 Expr::InterpolatedStr(parts) => {
1048 for part in parts {
1049 if let crate::ast::StrPart::Parsed(inner) = part {
1050 grow_recursive_subterm_binders_from_expr(inner, tracked, td, out);
1051 }
1052 }
1053 }
1054 Expr::List(items) | Expr::Tuple(items) => {
1055 for item in items {
1056 grow_recursive_subterm_binders_from_expr(item, tracked, td, out);
1057 }
1058 }
1059 Expr::MapLiteral(entries) => {
1060 for (k, v) in entries {
1061 grow_recursive_subterm_binders_from_expr(k, tracked, td, out);
1062 grow_recursive_subterm_binders_from_expr(v, tracked, td, out);
1063 }
1064 }
1065 Expr::RecordCreate { fields, .. } => {
1066 for (_, v) in fields {
1067 grow_recursive_subterm_binders_from_expr(v, tracked, td, out);
1068 }
1069 }
1070 Expr::RecordUpdate { base, updates, .. } => {
1071 grow_recursive_subterm_binders_from_expr(base, tracked, td, out);
1072 for (_, v) in updates {
1073 grow_recursive_subterm_binders_from_expr(v, tracked, td, out);
1074 }
1075 }
1076 Expr::TailCall(boxed) => {
1077 for arg in &boxed.1 {
1078 grow_recursive_subterm_binders_from_expr(arg, tracked, td, out);
1079 }
1080 }
1081 Expr::Literal(_) | Expr::Ident(_) | Expr::Resolved(_) | Expr::Constructor(_, None) => {}
1082 }
1083}
1084
1085fn collect_recursive_subterm_binders(
1086 fd: &FnDef,
1087 param_name: &str,
1088 param_type: &str,
1089 ctx: &CodegenContext,
1090) -> HashSet<String> {
1091 let Some(td) = find_type_def(ctx, param_type) else {
1092 return HashSet::new();
1093 };
1094 let mut tracked: HashSet<String> = HashSet::from([param_name.to_string()]);
1095 loop {
1096 let mut discovered = HashSet::new();
1097 for stmt in fd.body.stmts() {
1098 match stmt {
1099 Stmt::Binding(_, _, expr) | Stmt::Expr(expr) => {
1100 grow_recursive_subterm_binders_from_expr(expr, &tracked, td, &mut discovered);
1101 }
1102 }
1103 }
1104 let before = tracked.len();
1105 tracked.extend(discovered);
1106 if tracked.len() == before {
1107 break;
1108 }
1109 }
1110 tracked.remove(param_name);
1111 tracked
1112}
1113
1114fn single_int_countdown_param_index(fd: &FnDef) -> Option<usize> {
1115 let recursive_calls: Vec<Vec<&Expr>> = collect_calls_from_body(fd.body.as_ref())
1116 .into_iter()
1117 .filter(|(name, _)| call_matches(name, &fd.name))
1118 .map(|(_, args)| args)
1119 .collect();
1120 if recursive_calls.is_empty() {
1121 return None;
1122 }
1123
1124 fd.params
1125 .iter()
1126 .enumerate()
1127 .find_map(|(idx, (param_name, param_ty))| {
1128 if param_ty != "Int" {
1129 return None;
1130 }
1131 let countdown_ok = recursive_calls.iter().all(|args| {
1132 args.get(idx)
1133 .copied()
1134 .is_some_and(|arg| is_int_minus_positive(arg, param_name))
1135 });
1136 if countdown_ok {
1137 return Some(idx);
1138 }
1139
1140 let ascent_ok = recursive_calls.iter().all(|args| {
1141 args.get(idx)
1142 .copied()
1143 .is_some_and(|arg| is_int_plus_positive(arg, param_name))
1144 });
1145 (ascent_ok && has_negative_guarded_ascent(fd, param_name)).then_some(idx)
1146 })
1147}
1148
1149fn has_negative_guarded_ascent(fd: &FnDef, param_name: &str) -> bool {
1150 let Some(Expr::Match { subject, arms, .. }) = fd.body.tail_expr() else {
1151 return false;
1152 };
1153 let Expr::BinOp(BinOp::Lt, left, right) = subject.as_ref() else {
1154 return false;
1155 };
1156 if !is_ident(left, param_name)
1157 || !matches!(right.as_ref(), Expr::Literal(crate::ast::Literal::Int(0)))
1158 {
1159 return false;
1160 }
1161
1162 let mut true_arm = None;
1163 let mut false_arm = None;
1164 for arm in arms {
1165 match arm.pattern {
1166 Pattern::Literal(crate::ast::Literal::Bool(true)) => true_arm = Some(arm.body.as_ref()),
1167 Pattern::Literal(crate::ast::Literal::Bool(false)) => {
1168 false_arm = Some(arm.body.as_ref())
1169 }
1170 _ => return false,
1171 }
1172 }
1173
1174 let Some(true_arm) = true_arm else {
1175 return false;
1176 };
1177 let Some(false_arm) = false_arm else {
1178 return false;
1179 };
1180
1181 let mut true_calls = Vec::new();
1182 collect_calls_from_expr(true_arm, &mut true_calls);
1183 let mut false_calls = Vec::new();
1184 collect_calls_from_expr(false_arm, &mut false_calls);
1185
1186 true_calls
1187 .iter()
1188 .any(|(name, _)| call_matches(name, &fd.name))
1189 && false_calls
1190 .iter()
1191 .all(|(name, _)| !call_matches(name, &fd.name))
1192}
1193
1194fn supports_single_sizeof_structural(fd: &FnDef, ctx: &CodegenContext) -> bool {
1195 let recursive_calls: Vec<Vec<&Expr>> = collect_calls_from_body(fd.body.as_ref())
1196 .into_iter()
1197 .filter(|(name, _)| call_matches(name, &fd.name))
1198 .map(|(_, args)| args)
1199 .collect();
1200 if recursive_calls.is_empty() {
1201 return false;
1202 }
1203
1204 let metric_indices = sizeof_measure_param_indices(fd);
1205 if metric_indices.is_empty() {
1206 return false;
1207 }
1208
1209 let binder_sets: HashMap<usize, HashSet<String>> = metric_indices
1210 .iter()
1211 .filter_map(|idx| {
1212 let (param_name, param_type) = fd.params.get(*idx)?;
1213 recursive_type_names(ctx).contains(param_type).then(|| {
1214 (
1215 *idx,
1216 collect_recursive_subterm_binders(fd, param_name, param_type, ctx),
1217 )
1218 })
1219 })
1220 .collect();
1221
1222 if binder_sets.values().all(HashSet::is_empty) {
1223 return false;
1224 }
1225
1226 recursive_calls.iter().all(|args| {
1227 let mut strictly_smaller = false;
1228 for idx in &metric_indices {
1229 let Some((param_name, _)) = fd.params.get(*idx) else {
1230 return false;
1231 };
1232 let Some(arg) = args.get(*idx).copied() else {
1233 return false;
1234 };
1235 if is_ident(arg, param_name) {
1236 continue;
1237 }
1238 let Some(binders) = binder_sets.get(idx) else {
1239 return false;
1240 };
1241 if matches!(arg, Expr::Ident(id) if binders.contains(id)) {
1242 strictly_smaller = true;
1243 continue;
1244 }
1245 return false;
1246 }
1247 strictly_smaller
1248 })
1249}
1250
1251fn single_list_structural_param_index(fd: &FnDef) -> Option<usize> {
1252 fd.params
1253 .iter()
1254 .enumerate()
1255 .find_map(|(param_index, (param_name, param_ty))| {
1256 if !(param_ty.starts_with("List<") || param_ty == "List") {
1257 return None;
1258 }
1259
1260 let tails = collect_list_tail_binders(fd, param_name);
1261 if tails.is_empty() {
1262 return None;
1263 }
1264
1265 let recursive_calls: Vec<Option<&Expr>> = collect_calls_from_body(fd.body.as_ref())
1266 .into_iter()
1267 .filter(|(name, _)| call_matches(name, &fd.name))
1268 .map(|(_, args)| args.get(param_index).copied())
1269 .collect();
1270 if recursive_calls.is_empty() {
1271 return None;
1272 }
1273
1274 recursive_calls
1275 .into_iter()
1276 .all(|arg| {
1277 matches!(
1278 arg,
1279 Some(Expr::Ident(id)) if tails.contains(id)
1280 )
1281 })
1282 .then_some(param_index)
1283 })
1284}
1285
1286fn is_ident(expr: &Expr, name: &str) -> bool {
1287 matches!(expr, Expr::Ident(id) if id == name)
1288}
1289
1290fn is_int_plus_positive(expr: &Expr, param_name: &str) -> bool {
1291 match expr {
1292 Expr::BinOp(BinOp::Add, left, right) => {
1293 matches!(left.as_ref(), Expr::Ident(id) if id == param_name)
1294 && matches!(right.as_ref(), Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
1295 }
1296 Expr::FnCall(callee, args) => {
1297 let Some(name) = expr_to_dotted_name(callee) else {
1298 return false;
1299 };
1300 (name == "Int.add" || name == "int.add")
1301 && args.len() == 2
1302 && matches!(&args[0], Expr::Ident(id) if id == param_name)
1303 && matches!(&args[1], Expr::Literal(crate::ast::Literal::Int(n)) if *n >= 1)
1304 }
1305 _ => false,
1306 }
1307}
1308
1309fn is_skip_ws_advance(expr: &Expr, string_param: &str, pos_param: &str) -> bool {
1310 let Expr::FnCall(callee, args) = expr else {
1311 return false;
1312 };
1313 let Some(name) = expr_to_dotted_name(callee) else {
1314 return false;
1315 };
1316 if !call_matches(&name, "skipWs") || args.len() != 2 {
1317 return false;
1318 }
1319 is_ident(&args[0], string_param) && is_int_plus_positive(&args[1], pos_param)
1320}
1321
1322fn is_skip_ws_same(expr: &Expr, string_param: &str, pos_param: &str) -> bool {
1323 let Expr::FnCall(callee, args) = expr else {
1324 return false;
1325 };
1326 let Some(name) = expr_to_dotted_name(callee) else {
1327 return false;
1328 };
1329 if !call_matches(&name, "skipWs") || args.len() != 2 {
1330 return false;
1331 }
1332 is_ident(&args[0], string_param) && is_ident(&args[1], pos_param)
1333}
1334
1335fn is_string_pos_advance(expr: &Expr, string_param: &str, pos_param: &str) -> bool {
1336 is_int_plus_positive(expr, pos_param) || is_skip_ws_advance(expr, string_param, pos_param)
1337}
1338
1339#[derive(Clone, Copy, Debug, Eq, PartialEq)]
1340enum StringPosEdge {
1341 Same,
1342 Advance,
1343}
1344
1345fn classify_string_pos_edge(
1346 expr: &Expr,
1347 string_param: &str,
1348 pos_param: &str,
1349) -> Option<StringPosEdge> {
1350 if is_ident(expr, pos_param) || is_skip_ws_same(expr, string_param, pos_param) {
1351 return Some(StringPosEdge::Same);
1352 }
1353 if is_string_pos_advance(expr, string_param, pos_param) {
1354 return Some(StringPosEdge::Advance);
1355 }
1356 if let Expr::FnCall(callee, args) = expr {
1357 let name = expr_to_dotted_name(callee)?;
1358 if call_matches(&name, "skipWs")
1359 && args.len() == 2
1360 && is_ident(&args[0], string_param)
1361 && matches!(&args[1], Expr::Ident(id) if id != pos_param)
1362 {
1363 return Some(StringPosEdge::Advance);
1364 }
1365 }
1366 if matches!(expr, Expr::Ident(id) if id != pos_param) {
1367 return Some(StringPosEdge::Advance);
1368 }
1369 None
1370}
1371
1372fn ranks_from_same_edges(
1373 names: &HashSet<String>,
1374 same_edges: &HashMap<String, HashSet<String>>,
1375) -> Option<HashMap<String, usize>> {
1376 let mut indegree: HashMap<String, usize> = names.iter().map(|n| (n.clone(), 0)).collect();
1377 for outs in same_edges.values() {
1378 for to in outs {
1379 if let Some(entry) = indegree.get_mut(to) {
1380 *entry += 1;
1381 } else {
1382 return None;
1383 }
1384 }
1385 }
1386
1387 let mut queue: Vec<String> = indegree
1388 .iter()
1389 .filter_map(|(name, °)| (deg == 0).then_some(name.clone()))
1390 .collect();
1391 queue.sort();
1392 let mut topo = Vec::new();
1393 while let Some(node) = queue.pop() {
1394 topo.push(node.clone());
1395 let outs = same_edges.get(&node).cloned().unwrap_or_default();
1396 let mut newly_zero = Vec::new();
1397 for to in outs {
1398 if let Some(entry) = indegree.get_mut(&to) {
1399 *entry -= 1;
1400 if *entry == 0 {
1401 newly_zero.push(to);
1402 }
1403 } else {
1404 return None;
1405 }
1406 }
1407 newly_zero.sort();
1408 queue.extend(newly_zero);
1409 }
1410
1411 if topo.len() != names.len() {
1412 return None;
1413 }
1414
1415 let n = topo.len();
1416 let mut ranks = HashMap::new();
1417 for (idx, name) in topo.into_iter().enumerate() {
1418 ranks.insert(name, n - idx);
1419 }
1420 Some(ranks)
1421}
1422
1423fn supports_single_string_pos_advance(fd: &FnDef) -> bool {
1424 let Some((string_param, string_ty)) = fd.params.first() else {
1425 return false;
1426 };
1427 let Some((pos_param, pos_ty)) = fd.params.get(1) else {
1428 return false;
1429 };
1430 if string_ty != "String" || pos_ty != "Int" {
1431 return false;
1432 }
1433
1434 let recursive_calls: Vec<(Option<&Expr>, Option<&Expr>)> =
1435 collect_calls_from_body(fd.body.as_ref())
1436 .into_iter()
1437 .filter(|(name, _)| call_matches(name, &fd.name))
1438 .map(|(_, args)| (args.first().copied(), args.get(1).copied()))
1439 .collect();
1440 if recursive_calls.is_empty() {
1441 return false;
1442 }
1443
1444 recursive_calls.into_iter().all(|(arg0, arg1)| {
1445 arg0.is_some_and(|e| is_ident(e, string_param))
1446 && arg1.is_some_and(|e| is_string_pos_advance(e, string_param, pos_param))
1447 })
1448}
1449
1450fn supports_mutual_int_countdown(component: &[&FnDef]) -> bool {
1451 if component.len() < 2 {
1452 return false;
1453 }
1454 if component
1455 .iter()
1456 .any(|fd| !matches!(fd.params.first(), Some((_, t)) if t == "Int"))
1457 {
1458 return false;
1459 }
1460 let names: HashSet<String> = component.iter().map(|fd| fd.name.clone()).collect();
1461 let mut any_intra = false;
1462 for fd in component {
1463 let param_name = &fd.params[0].0;
1464 for (callee, args) in collect_calls_from_body(fd.body.as_ref()) {
1465 if !call_is_in_set(&callee, &names) {
1466 continue;
1467 }
1468 any_intra = true;
1469 let Some(arg0) = args.first().copied() else {
1470 return false;
1471 };
1472 if !is_int_minus_positive(arg0, param_name) {
1473 return false;
1474 }
1475 }
1476 }
1477 any_intra
1478}
1479
1480fn supports_mutual_string_pos_advance(component: &[&FnDef]) -> Option<HashMap<String, usize>> {
1481 if component.len() < 2 {
1482 return None;
1483 }
1484 if component.iter().any(|fd| {
1485 !matches!(fd.params.first(), Some((_, t)) if t == "String")
1486 || !matches!(fd.params.get(1), Some((_, t)) if t == "Int")
1487 }) {
1488 return None;
1489 }
1490
1491 let names: HashSet<String> = component.iter().map(|fd| fd.name.clone()).collect();
1492 let mut same_edges: HashMap<String, HashSet<String>> =
1493 names.iter().map(|n| (n.clone(), HashSet::new())).collect();
1494 let mut any_intra = false;
1495
1496 for fd in component {
1497 let string_param = &fd.params[0].0;
1498 let pos_param = &fd.params[1].0;
1499 for (callee_raw, args) in collect_calls_from_body(fd.body.as_ref()) {
1500 let Some(callee) = canonical_callee_name(&callee_raw, &names) else {
1501 continue;
1502 };
1503 any_intra = true;
1504
1505 let arg0 = args.first().copied()?;
1506 let arg1 = args.get(1).copied()?;
1507
1508 if !is_ident(arg0, string_param) {
1509 return None;
1510 }
1511
1512 match classify_string_pos_edge(arg1, string_param, pos_param) {
1513 Some(StringPosEdge::Same) => {
1514 if let Some(edges) = same_edges.get_mut(&fd.name) {
1515 edges.insert(callee);
1516 } else {
1517 return None;
1518 }
1519 }
1520 Some(StringPosEdge::Advance) => {}
1521 None => return None,
1522 }
1523 }
1524 }
1525
1526 if !any_intra {
1527 return None;
1528 }
1529
1530 ranks_from_same_edges(&names, &same_edges)
1531}
1532
1533fn is_scalar_like_type(type_name: &str) -> bool {
1534 matches!(
1535 type_name,
1536 "Int" | "Float" | "Bool" | "String" | "Char" | "Byte" | "Unit"
1537 )
1538}
1539
1540pub(super) fn sizeof_measure_param_indices(fd: &FnDef) -> Vec<usize> {
1541 fd.params
1542 .iter()
1543 .enumerate()
1544 .filter_map(|(idx, (_, type_name))| (!is_scalar_like_type(type_name)).then_some(idx))
1545 .collect()
1546}
1547
1548fn supports_mutual_sizeof_ranked(component: &[&FnDef]) -> Option<HashMap<String, usize>> {
1549 if component.len() < 2 {
1550 return None;
1551 }
1552 let names: HashSet<String> = component.iter().map(|fd| fd.name.clone()).collect();
1553 let metric_indices: HashMap<String, Vec<usize>> = component
1554 .iter()
1555 .map(|fd| (fd.name.clone(), sizeof_measure_param_indices(fd)))
1556 .collect();
1557 if component.iter().any(|fd| {
1558 metric_indices
1559 .get(&fd.name)
1560 .is_none_or(|indices| indices.is_empty())
1561 }) {
1562 return None;
1563 }
1564
1565 let mut same_edges: HashMap<String, HashSet<String>> =
1566 names.iter().map(|n| (n.clone(), HashSet::new())).collect();
1567 let mut any_intra = false;
1568 for fd in component {
1569 let caller_metric_indices = metric_indices.get(&fd.name)?;
1570 let caller_metric_params: Vec<&str> = caller_metric_indices
1571 .iter()
1572 .filter_map(|idx| fd.params.get(*idx).map(|(name, _)| name.as_str()))
1573 .collect();
1574 for (callee_raw, args) in collect_calls_from_body(fd.body.as_ref()) {
1575 let Some(callee) = canonical_callee_name(&callee_raw, &names) else {
1576 continue;
1577 };
1578 any_intra = true;
1579 let callee_metric_indices = metric_indices.get(&callee)?;
1580 let is_same_edge = callee_metric_indices.len() == caller_metric_params.len()
1581 && callee_metric_indices
1582 .iter()
1583 .enumerate()
1584 .all(|(pos, callee_idx)| {
1585 let Some(arg) = args.get(*callee_idx).copied() else {
1586 return false;
1587 };
1588 is_ident(arg, caller_metric_params[pos])
1589 });
1590 if is_same_edge {
1591 if let Some(edges) = same_edges.get_mut(&fd.name) {
1592 edges.insert(callee);
1593 } else {
1594 return None;
1595 }
1596 }
1597 }
1598 }
1599 if !any_intra {
1600 return None;
1601 }
1602
1603 let ranks = ranks_from_same_edges(&names, &same_edges)?;
1604 let mut out = HashMap::new();
1605 for fd in component {
1606 let rank = ranks.get(&fd.name).copied()?;
1607 out.insert(fd.name.clone(), rank);
1608 }
1609 Some(out)
1610}
1611
1612fn proof_mode_recursion_analysis(
1613 ctx: &CodegenContext,
1614) -> (HashMap<String, RecursionPlan>, Vec<ProofModeIssue>) {
1615 let mut plans = HashMap::new();
1616 let mut issues = Vec::new();
1617
1618 let all_pure = pure_fns(ctx);
1619 let recursive_names = recursive_pure_fn_names(ctx);
1620 let components = call_graph::ordered_fn_components(&all_pure, &ctx.module_prefixes);
1621
1622 for component in components {
1623 if component.is_empty() {
1624 continue;
1625 }
1626 let is_recursive_component =
1627 component.len() > 1 || recursive_names.contains(&component[0].name);
1628 if !is_recursive_component {
1629 continue;
1630 }
1631
1632 if component.len() > 1 {
1633 if supports_mutual_int_countdown(&component) {
1634 for fd in &component {
1635 plans.insert(fd.name.clone(), RecursionPlan::MutualIntCountdown);
1636 }
1637 } else if let Some(ranks) = supports_mutual_string_pos_advance(&component) {
1638 for fd in &component {
1639 if let Some(rank) = ranks.get(&fd.name).copied() {
1640 plans.insert(
1641 fd.name.clone(),
1642 RecursionPlan::MutualStringPosAdvance { rank },
1643 );
1644 }
1645 }
1646 } else if let Some(rankings) = supports_mutual_sizeof_ranked(&component) {
1647 for fd in &component {
1648 if let Some(rank) = rankings.get(&fd.name).copied() {
1649 plans.insert(fd.name.clone(), RecursionPlan::MutualSizeOfRanked { rank });
1650 }
1651 }
1652 } else {
1653 let names = component
1654 .iter()
1655 .map(|fd| fd.name.clone())
1656 .collect::<Vec<_>>()
1657 .join(", ");
1658 let line = component.iter().map(|fd| fd.line).min().unwrap_or(1);
1659 issues.push(ProofModeIssue {
1660 line,
1661 message: format!(
1662 "unsupported mutual recursion group (currently supported in proof mode: Int countdown on first param): {}",
1663 names
1664 ),
1665 });
1666 }
1667 continue;
1668 }
1669
1670 let fd = component[0];
1671 if recurrence::detect_second_order_int_linear_recurrence(fd).is_some() {
1672 plans.insert(fd.name.clone(), RecursionPlan::LinearRecurrence2);
1673 } else if let Some(param_index) = single_int_countdown_param_index(fd) {
1674 plans.insert(fd.name.clone(), RecursionPlan::IntCountdown { param_index });
1675 } else if supports_single_sizeof_structural(fd, ctx) {
1676 plans.insert(fd.name.clone(), RecursionPlan::SizeOfStructural);
1677 } else if let Some(param_index) = single_list_structural_param_index(fd) {
1678 plans.insert(
1679 fd.name.clone(),
1680 RecursionPlan::ListStructural { param_index },
1681 );
1682 } else if supports_single_string_pos_advance(fd) {
1683 plans.insert(fd.name.clone(), RecursionPlan::StringPosAdvance);
1684 } else {
1685 issues.push(ProofModeIssue {
1686 line: fd.line,
1687 message: format!(
1688 "recursive function '{}' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)",
1689 fd.name
1690 ),
1691 });
1692 }
1693 }
1694
1695 (plans, issues)
1696}
1697
1698pub fn proof_mode_findings(ctx: &CodegenContext) -> Vec<ProofModeIssue> {
1703 let (_plans, issues) = proof_mode_recursion_analysis(ctx);
1704 issues
1705}
1706
1707pub fn proof_mode_issues(ctx: &CodegenContext) -> Vec<String> {
1708 proof_mode_findings(ctx)
1709 .into_iter()
1710 .map(|issue| issue.message)
1711 .collect()
1712}
1713
1714pub fn transpile(ctx: &CodegenContext) -> ProjectOutput {
1716 transpile_with_verify_mode(ctx, VerifyEmitMode::NativeDecide)
1717}
1718
1719pub fn transpile_for_proof_mode(
1724 ctx: &CodegenContext,
1725 verify_mode: VerifyEmitMode,
1726) -> ProjectOutput {
1727 let (plans, _issues) = proof_mode_recursion_analysis(ctx);
1728 let recursive_names = recursive_pure_fn_names(ctx);
1729 let recursive_types = recursive_type_names(ctx);
1730
1731 let mut sections = Vec::new();
1732
1733 for module in &ctx.modules {
1735 for td in &module.type_defs {
1736 sections.push(toplevel::emit_type_def(td));
1737 if toplevel::is_recursive_type_def(td) {
1738 sections.push(toplevel::emit_recursive_decidable_eq(
1739 toplevel::type_def_name(td),
1740 ));
1741 if let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types) {
1742 sections.push(measure);
1743 }
1744 }
1745 sections.push(String::new());
1746 }
1747 }
1748 for td in &ctx.type_defs {
1749 sections.push(toplevel::emit_type_def(td));
1750 if toplevel::is_recursive_type_def(td) {
1751 sections.push(toplevel::emit_recursive_decidable_eq(
1752 toplevel::type_def_name(td),
1753 ));
1754 if let Some(measure) = toplevel::emit_recursive_measure(td, &recursive_types) {
1755 sections.push(measure);
1756 }
1757 }
1758 sections.push(String::new());
1759 }
1760
1761 emit_pure_functions_proof(ctx, &plans, &recursive_names, &mut sections);
1762
1763 for item in &ctx.items {
1764 if let TopLevel::Decision(db) = item {
1765 sections.push(toplevel::emit_decision(db));
1766 sections.push(String::new());
1767 }
1768 }
1769
1770 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1771 for item in &ctx.items {
1772 if let TopLevel::Verify(vb) = item {
1773 let key = verify_counter_key(vb);
1774 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1775 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1776 verify_case_counters.insert(key, next_idx);
1777 sections.push(emitted);
1778 sections.push(String::new());
1779 }
1780 }
1781
1782 let lean_body = sections.join("\n");
1783 let prelude = generate_prelude_for_body(&lean_body, false);
1784 let lean_source = format!("{prelude}\n\n{lean_body}");
1785 let project_name = lean_project_name(ctx);
1786 let lakefile = generate_lakefile(&project_name);
1787 let toolchain = generate_toolchain();
1788 ProjectOutput {
1789 files: vec![
1790 ("lakefile.lean".to_string(), lakefile),
1791 ("lean-toolchain".to_string(), toolchain),
1792 (format!("{}.lean", project_name), lean_source),
1793 ],
1794 }
1795}
1796
1797pub fn transpile_with_verify_mode(
1803 ctx: &CodegenContext,
1804 verify_mode: VerifyEmitMode,
1805) -> ProjectOutput {
1806 let mut sections = Vec::new();
1807
1808 let recursive_fns = call_graph::find_recursive_fns(&ctx.items);
1810
1811 for module in &ctx.modules {
1813 for td in &module.type_defs {
1814 sections.push(toplevel::emit_type_def(td));
1815 if toplevel::is_recursive_type_def(td) {
1817 sections.push(toplevel::emit_recursive_decidable_eq(
1818 toplevel::type_def_name(td),
1819 ));
1820 }
1821 sections.push(String::new());
1822 }
1823 }
1824
1825 for td in &ctx.type_defs {
1827 sections.push(toplevel::emit_type_def(td));
1828 if toplevel::is_recursive_type_def(td) {
1830 sections.push(toplevel::emit_recursive_decidable_eq(
1831 toplevel::type_def_name(td),
1832 ));
1833 }
1834 sections.push(String::new());
1835 }
1836
1837 emit_pure_functions(ctx, &recursive_fns, &mut sections);
1840
1841 for item in &ctx.items {
1843 if let TopLevel::Decision(db) = item {
1844 sections.push(toplevel::emit_decision(db));
1845 sections.push(String::new());
1846 }
1847 }
1848
1849 let mut verify_case_counters: HashMap<String, usize> = HashMap::new();
1851 for item in &ctx.items {
1852 if let TopLevel::Verify(vb) = item {
1853 let key = verify_counter_key(vb);
1854 let start_idx = *verify_case_counters.get(&key).unwrap_or(&0);
1855 let (emitted, next_idx) = toplevel::emit_verify_block(vb, ctx, verify_mode, start_idx);
1856 verify_case_counters.insert(key, next_idx);
1857 sections.push(emitted);
1858 sections.push(String::new());
1859 }
1860 }
1861
1862 let lean_body = sections.join("\n");
1863 let prelude = generate_prelude_for_body(&lean_body, false);
1864 let lean_source = format!("{prelude}\n\n{lean_body}");
1865
1866 let project_name = lean_project_name(ctx);
1868 let lakefile = generate_lakefile(&project_name);
1869 let toolchain = generate_toolchain();
1870
1871 ProjectOutput {
1872 files: vec![
1873 ("lakefile.lean".to_string(), lakefile),
1874 ("lean-toolchain".to_string(), toolchain),
1875 (format!("{}.lean", project_name), lean_source),
1876 ],
1877 }
1878}
1879
1880fn emit_pure_functions(
1881 ctx: &CodegenContext,
1882 recursive_fns: &HashSet<String>,
1883 sections: &mut Vec<String>,
1884) {
1885 let all_fns: Vec<&FnDef> = ctx
1886 .modules
1887 .iter()
1888 .flat_map(|m| m.fn_defs.iter())
1889 .chain(ctx.fn_defs.iter())
1890 .filter(|fd| toplevel::is_pure_fn(fd))
1891 .collect();
1892 if all_fns.is_empty() {
1893 return;
1894 }
1895
1896 let components = call_graph::ordered_fn_components(&all_fns, &ctx.module_prefixes);
1897 for fns in components {
1898 if fns.is_empty() {
1899 continue;
1900 }
1901
1902 if fns.len() > 1 {
1904 sections.push(toplevel::emit_mutual_group(&fns, ctx));
1905 sections.push(String::new());
1906 continue;
1907 }
1908
1909 if let Some(code) = toplevel::emit_fn_def(fns[0], recursive_fns, ctx) {
1912 sections.push(code);
1913 sections.push(String::new());
1914 }
1915 }
1916}
1917
1918fn emit_pure_functions_proof(
1919 ctx: &CodegenContext,
1920 plans: &HashMap<String, RecursionPlan>,
1921 recursive_names: &HashSet<String>,
1922 sections: &mut Vec<String>,
1923) {
1924 let all_fns: Vec<&FnDef> = pure_fns(ctx);
1925 if all_fns.is_empty() {
1926 return;
1927 }
1928
1929 let components = call_graph::ordered_fn_components(&all_fns, &ctx.module_prefixes);
1930 for fns in components {
1931 if fns.is_empty() {
1932 continue;
1933 }
1934 if fns.len() > 1 {
1935 let all_supported = fns.iter().all(|fd| plans.contains_key(&fd.name));
1936 if all_supported {
1937 sections.push(toplevel::emit_mutual_group_proof(&fns, ctx, plans));
1938 } else {
1939 sections.push(toplevel::emit_mutual_group(&fns, ctx));
1940 }
1941 sections.push(String::new());
1942 continue;
1943 }
1944
1945 let fd = fns[0];
1946 let is_recursive = recursive_names.contains(&fd.name);
1947 let emitted = if is_recursive && !plans.contains_key(&fd.name) {
1948 toplevel::emit_fn_def(fd, recursive_names, ctx)
1949 } else {
1950 toplevel::emit_fn_def_proof(fd, plans.get(&fd.name).copied(), ctx)
1951 };
1952 if let Some(code) = emitted {
1953 sections.push(code);
1954 sections.push(String::new());
1955 }
1956 }
1957}
1958
1959#[cfg(test)]
1960fn generate_prelude() -> String {
1961 generate_prelude_for_body("", true)
1962}
1963
1964fn generate_prelude_for_body(body: &str, include_all_helpers: bool) -> String {
1965 let mut parts = vec![
1966 LEAN_PRELUDE_HEADER.to_string(),
1967 LEAN_PRELUDE_FLOAT_COE.to_string(),
1968 LEAN_PRELUDE_FLOAT_DEC_EQ.to_string(),
1969 LEAN_PRELUDE_EXCEPT_DEC_EQ.to_string(),
1970 LEAN_PRELUDE_EXCEPT_NS.to_string(),
1971 LEAN_PRELUDE_OPTION_TO_EXCEPT.to_string(),
1972 LEAN_PRELUDE_STRING_HADD.to_string(),
1973 ];
1974
1975 if include_all_helpers || needs_builtin_named_type(body, "Header") {
1976 parts.push(LEAN_PRELUDE_HEADER_TYPE.to_string());
1977 }
1978 if include_all_helpers || needs_builtin_named_type(body, "HttpResponse") {
1979 parts.push(LEAN_PRELUDE_HTTP_RESPONSE_TYPE.to_string());
1980 }
1981 if include_all_helpers || needs_builtin_named_type(body, "HttpRequest") {
1982 parts.push(LEAN_PRELUDE_HTTP_REQUEST_TYPE.to_string());
1983 }
1984 if include_all_helpers || needs_builtin_named_type(body, "Tcp_Connection") {
1985 parts.push(LEAN_PRELUDE_TCP_CONNECTION_TYPE.to_string());
1986 }
1987
1988 if include_all_helpers || body.contains("averStringPosFuel") {
1989 parts.push(LEAN_PRELUDE_PROOF_FUEL.to_string());
1990 }
1991
1992 if include_all_helpers || body.contains("AverMeasure.") {
1993 parts.push(LEAN_PRELUDE_AVER_MEASURE.to_string());
1994 }
1995
1996 if include_all_helpers || body.contains("AverMap.") {
1997 parts.push(generate_map_prelude(body, include_all_helpers));
1998 }
1999
2000 parts.extend([
2001 LEAN_PRELUDE_AVER_LIST.to_string(),
2002 LEAN_PRELUDE_STRING_HELPERS.to_string(),
2003 LEAN_PRELUDE_NUMERIC_PARSE.to_string(),
2004 LEAN_PRELUDE_CHAR_BYTE.to_string(),
2005 ]);
2006
2007 parts.join("\n\n")
2008}
2009
2010fn needs_builtin_named_type(body: &str, type_name: &str) -> bool {
2011 references_named_type(body, type_name) && !declares_named_type(body, type_name)
2012}
2013
2014fn references_named_type(body: &str, type_name: &str) -> bool {
2015 let mut token = String::new();
2016 for ch in body.chars() {
2017 if ch.is_ascii_alphanumeric() || ch == '_' || ch == '.' {
2018 token.push(ch);
2019 continue;
2020 }
2021 if token == type_name {
2022 return true;
2023 }
2024 token.clear();
2025 }
2026 token == type_name
2027}
2028
2029fn declares_named_type(body: &str, type_name: &str) -> bool {
2030 let structure_decl = format!("structure {} where", type_name);
2031 let inductive_decl = format!("inductive {} where", type_name);
2032 body.lines()
2033 .map(str::trim_start)
2034 .any(|line| line == structure_decl || line == inductive_decl)
2035}
2036
2037fn generate_map_prelude(body: &str, include_all_helpers: bool) -> String {
2038 let mut parts = vec![AVER_MAP_PRELUDE_BASE.to_string()];
2039
2040 let needs_has_set_self = include_all_helpers || body.contains("AverMap.has_set_self");
2041 let needs_get_set_self = include_all_helpers || body.contains("AverMap.get_set_self");
2042 let needs_get_set_other = include_all_helpers
2043 || body.contains("AverMap.get_set_other")
2044 || body.contains("AverMap.has_set_other");
2045 let needs_has_set_other = include_all_helpers || body.contains("AverMap.has_set_other");
2046
2047 if needs_has_set_self {
2048 parts.push(AVER_MAP_PRELUDE_HAS_SET_SELF.to_string());
2049 }
2050 if needs_get_set_self {
2051 parts.push(AVER_MAP_PRELUDE_GET_SET_SELF.to_string());
2052 }
2053 if needs_get_set_other {
2054 parts.push(AVER_MAP_PRELUDE_GET_SET_OTHER.to_string());
2055 }
2056 if needs_has_set_other {
2057 parts.push(AVER_MAP_PRELUDE_HAS_SET_OTHER.to_string());
2058 }
2059
2060 parts.push(AVER_MAP_PRELUDE_END.to_string());
2061 parts.join("\n\n")
2062}
2063
2064fn generate_lakefile(project_name: &str) -> String {
2065 format!(
2066 r#"import Lake
2067open Lake DSL
2068
2069package «{}» where
2070 version := v!"0.1.0"
2071
2072@[default_target]
2073lean_lib «{}» where
2074 srcDir := "."
2075"#,
2076 project_name.to_lowercase(),
2077 project_name
2078 )
2079}
2080
2081fn generate_toolchain() -> String {
2082 "leanprover/lean4:v4.15.0\n".to_string()
2083}
2084
2085fn capitalize_first(s: &str) -> String {
2086 let mut chars = s.chars();
2087 match chars.next() {
2088 None => String::new(),
2089 Some(c) => c.to_uppercase().to_string() + chars.as_str(),
2090 }
2091}
2092
2093#[cfg(test)]
2094mod tests {
2095 use super::{
2096 VerifyEmitMode, generate_prelude, proof_mode_issues, recurrence, transpile,
2097 transpile_for_proof_mode, transpile_with_verify_mode,
2098 };
2099 use crate::ast::{
2100 BinOp, Expr, FnBody, FnDef, Literal, MatchArm, Pattern, Stmt, TopLevel, TypeDef,
2101 TypeVariant, VerifyBlock, VerifyGiven, VerifyGivenDomain, VerifyKind, VerifyLaw,
2102 };
2103 use crate::codegen::{CodegenContext, build_context};
2104 use crate::source::parse_source;
2105 use crate::tco;
2106 use crate::types::checker::run_type_check_full;
2107 use std::collections::{HashMap, HashSet};
2108 use std::rc::Rc;
2109
2110 fn empty_ctx() -> CodegenContext {
2111 CodegenContext {
2112 items: vec![],
2113 fn_sigs: HashMap::new(),
2114 memo_fns: HashSet::new(),
2115 memo_safe_types: HashSet::new(),
2116 type_defs: vec![],
2117 fn_defs: vec![],
2118 project_name: "verify_mode".to_string(),
2119 modules: vec![],
2120 module_prefixes: HashSet::new(),
2121 policy: None,
2122 }
2123 }
2124
2125 fn ctx_from_source(source: &str, project_name: &str) -> CodegenContext {
2126 let mut items = parse_source(source).expect("source should parse");
2127 tco::transform_program(&mut items);
2128 let tc = run_type_check_full(&items, None);
2129 assert!(
2130 tc.errors.is_empty(),
2131 "source should typecheck without errors: {:?}",
2132 tc.errors
2133 );
2134 build_context(items, &tc, HashSet::new(), project_name.to_string(), vec![])
2135 }
2136
2137 fn generated_lean_file(out: &crate::codegen::ProjectOutput) -> &str {
2138 out.files
2139 .iter()
2140 .find_map(|(name, content)| {
2141 (name.ends_with(".lean") && name != "lakefile.lean").then_some(content.as_str())
2142 })
2143 .expect("expected generated Lean file")
2144 }
2145
2146 fn empty_ctx_with_verify_case() -> CodegenContext {
2147 let mut ctx = empty_ctx();
2148 ctx.items.push(TopLevel::Verify(VerifyBlock {
2149 fn_name: "f".to_string(),
2150 line: 1,
2151 cases: vec![(
2152 Expr::Literal(Literal::Int(1)),
2153 Expr::Literal(Literal::Int(1)),
2154 )],
2155 kind: VerifyKind::Cases,
2156 }));
2157 ctx
2158 }
2159
2160 fn empty_ctx_with_two_verify_blocks_same_fn() -> CodegenContext {
2161 let mut ctx = empty_ctx();
2162 ctx.items.push(TopLevel::Verify(VerifyBlock {
2163 fn_name: "f".to_string(),
2164 line: 1,
2165 cases: vec![(
2166 Expr::Literal(Literal::Int(1)),
2167 Expr::Literal(Literal::Int(1)),
2168 )],
2169 kind: VerifyKind::Cases,
2170 }));
2171 ctx.items.push(TopLevel::Verify(VerifyBlock {
2172 fn_name: "f".to_string(),
2173 line: 2,
2174 cases: vec![(
2175 Expr::Literal(Literal::Int(2)),
2176 Expr::Literal(Literal::Int(2)),
2177 )],
2178 kind: VerifyKind::Cases,
2179 }));
2180 ctx
2181 }
2182
2183 fn empty_ctx_with_verify_law() -> CodegenContext {
2184 let mut ctx = empty_ctx();
2185 let add = FnDef {
2186 name: "add".to_string(),
2187 line: 1,
2188 params: vec![
2189 ("a".to_string(), "Int".to_string()),
2190 ("b".to_string(), "Int".to_string()),
2191 ],
2192 return_type: "Int".to_string(),
2193 effects: vec![],
2194 desc: None,
2195 body: Rc::new(FnBody::from_expr(Expr::BinOp(
2196 BinOp::Add,
2197 Box::new(Expr::Ident("a".to_string())),
2198 Box::new(Expr::Ident("b".to_string())),
2199 ))),
2200 resolution: None,
2201 };
2202 ctx.fn_defs.push(add.clone());
2203 ctx.items.push(TopLevel::FnDef(add));
2204 ctx.items.push(TopLevel::Verify(VerifyBlock {
2205 fn_name: "add".to_string(),
2206 line: 1,
2207 cases: vec![
2208 (
2209 Expr::FnCall(
2210 Box::new(Expr::Ident("add".to_string())),
2211 vec![
2212 Expr::Literal(Literal::Int(1)),
2213 Expr::Literal(Literal::Int(2)),
2214 ],
2215 ),
2216 Expr::FnCall(
2217 Box::new(Expr::Ident("add".to_string())),
2218 vec![
2219 Expr::Literal(Literal::Int(2)),
2220 Expr::Literal(Literal::Int(1)),
2221 ],
2222 ),
2223 ),
2224 (
2225 Expr::FnCall(
2226 Box::new(Expr::Ident("add".to_string())),
2227 vec![
2228 Expr::Literal(Literal::Int(2)),
2229 Expr::Literal(Literal::Int(3)),
2230 ],
2231 ),
2232 Expr::FnCall(
2233 Box::new(Expr::Ident("add".to_string())),
2234 vec![
2235 Expr::Literal(Literal::Int(3)),
2236 Expr::Literal(Literal::Int(2)),
2237 ],
2238 ),
2239 ),
2240 ],
2241 kind: VerifyKind::Law(Box::new(VerifyLaw {
2242 name: "commutative".to_string(),
2243 givens: vec![
2244 VerifyGiven {
2245 name: "a".to_string(),
2246 type_name: "Int".to_string(),
2247 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2248 },
2249 VerifyGiven {
2250 name: "b".to_string(),
2251 type_name: "Int".to_string(),
2252 domain: VerifyGivenDomain::Explicit(vec![
2253 Expr::Literal(Literal::Int(2)),
2254 Expr::Literal(Literal::Int(3)),
2255 ]),
2256 },
2257 ],
2258 when: None,
2259 lhs: Expr::FnCall(
2260 Box::new(Expr::Ident("add".to_string())),
2261 vec![Expr::Ident("a".to_string()), Expr::Ident("b".to_string())],
2262 ),
2263 rhs: Expr::FnCall(
2264 Box::new(Expr::Ident("add".to_string())),
2265 vec![Expr::Ident("b".to_string()), Expr::Ident("a".to_string())],
2266 ),
2267 sample_guards: vec![],
2268 })),
2269 }));
2270 ctx
2271 }
2272
2273 #[test]
2274 fn prelude_normalizes_float_string_format() {
2275 let prelude = generate_prelude();
2276 assert!(
2277 prelude.contains("private def normalizeFloatString (s : String) : String :="),
2278 "missing normalizeFloatString helper in prelude"
2279 );
2280 assert!(
2281 prelude.contains(
2282 "def String.fromFloat (f : Float) : String := normalizeFloatString (toString f)"
2283 ),
2284 "String.fromFloat should normalize Lean float formatting"
2285 );
2286 }
2287
2288 #[test]
2289 fn prelude_validates_char_from_code_unicode_bounds() {
2290 let prelude = generate_prelude();
2291 assert!(
2292 prelude.contains("if n < 0 || n > 1114111 then none"),
2293 "Char.fromCode should reject code points above Unicode max"
2294 );
2295 assert!(
2296 prelude.contains("else if n >= 55296 && n <= 57343 then none"),
2297 "Char.fromCode should reject surrogate code points"
2298 );
2299 }
2300
2301 #[test]
2302 fn prelude_includes_map_set_helper_lemmas() {
2303 let prelude = generate_prelude();
2304 assert!(
2305 prelude.contains("theorem has_set_self [DecidableEq α]"),
2306 "missing AverMap.has_set_self helper theorem"
2307 );
2308 assert!(
2309 prelude.contains("theorem get_set_self [DecidableEq α]"),
2310 "missing AverMap.get_set_self helper theorem"
2311 );
2312 }
2313
2314 #[test]
2315 fn lean_output_without_map_usage_omits_map_prelude() {
2316 let ctx = ctx_from_source(
2317 r#"
2318module NoMap
2319 intent = "Simple pure program without maps."
2320
2321fn addOne(n: Int) -> Int
2322 n + 1
2323
2324verify addOne
2325 addOne(1) => 2
2326"#,
2327 "nomap",
2328 );
2329 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
2330 let lean = generated_lean_file(&out);
2331
2332 assert!(
2333 !lean.contains("namespace AverMap"),
2334 "did not expect AverMap prelude in program without map usage:\n{}",
2335 lean
2336 );
2337 }
2338
2339 #[test]
2340 fn transpile_emits_native_decide_for_verify_by_default() {
2341 let out = transpile(&empty_ctx_with_verify_case());
2342 let lean = out
2343 .files
2344 .iter()
2345 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2346 .expect("expected generated Lean file");
2347 assert!(lean.contains("example : 1 = 1 := by native_decide"));
2348 }
2349
2350 #[test]
2351 fn transpile_can_emit_sorry_for_verify_when_requested() {
2352 let out = transpile_with_verify_mode(&empty_ctx_with_verify_case(), VerifyEmitMode::Sorry);
2353 let lean = out
2354 .files
2355 .iter()
2356 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2357 .expect("expected generated Lean file");
2358 assert!(lean.contains("example : 1 = 1 := by sorry"));
2359 }
2360
2361 #[test]
2362 fn transpile_can_emit_theorem_skeletons_for_verify() {
2363 let out = transpile_with_verify_mode(
2364 &empty_ctx_with_verify_case(),
2365 VerifyEmitMode::TheoremSkeleton,
2366 );
2367 let lean = out
2368 .files
2369 .iter()
2370 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2371 .expect("expected generated Lean file");
2372 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
2373 assert!(lean.contains(" sorry"));
2374 }
2375
2376 #[test]
2377 fn theorem_skeleton_numbering_is_global_per_function_across_verify_blocks() {
2378 let out = transpile_with_verify_mode(
2379 &empty_ctx_with_two_verify_blocks_same_fn(),
2380 VerifyEmitMode::TheoremSkeleton,
2381 );
2382 let lean = out
2383 .files
2384 .iter()
2385 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2386 .expect("expected generated Lean file");
2387 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
2388 assert!(lean.contains("theorem f_verify_2 : 2 = 2 := by"));
2389 }
2390
2391 #[test]
2392 fn transpile_emits_named_theorems_for_verify_law() {
2393 let out = transpile(&empty_ctx_with_verify_law());
2394 let lean = out
2395 .files
2396 .iter()
2397 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2398 .expect("expected generated Lean file");
2399 assert!(lean.contains("-- verify law add.commutative (2 cases)"));
2400 assert!(lean.contains("-- given a: Int = 1..2"));
2401 assert!(lean.contains("-- given b: Int = [2, 3]"));
2402 assert!(lean.contains(
2403 "theorem add_law_commutative : ∀ (a : Int) (b : Int), add a b = add b a := by"
2404 ));
2405 assert!(lean.contains(" intro a b"));
2406 assert!(lean.contains(" simp [add, Int.add_comm]"));
2407 assert!(lean.contains(
2408 "theorem add_law_commutative_sample_1 : add 1 2 = add 2 1 := by native_decide"
2409 ));
2410 assert!(lean.contains(
2411 "theorem add_law_commutative_sample_2 : add 2 3 = add 3 2 := by native_decide"
2412 ));
2413 }
2414
2415 #[test]
2416 fn generate_prelude_emits_int_roundtrip_theorem() {
2417 let lean = generate_prelude();
2418 assert!(lean.contains(
2419 "theorem Int.fromString_fromInt : ∀ n : Int, Int.fromString (String.fromInt n) = .ok n"
2420 ));
2421 assert!(lean.contains("theorem String.intercalate_empty_chars (s : String) :"));
2422 assert!(lean.contains("def splitOnCharGo"));
2423 assert!(lean.contains("theorem split_single_char_append"));
2424 assert!(lean.contains("theorem split_intercalate_trailing_single_char"));
2425 assert!(lean.contains("namespace AverDigits"));
2426 assert!(lean.contains("theorem String.charAt_length_none (s : String)"));
2427 assert!(lean.contains("theorem digitChar_not_ws : ∀ d : Nat, d < 10 ->"));
2428 }
2429
2430 #[test]
2431 fn transpile_emits_guarded_theorems_for_verify_law_when_clause() {
2432 let ctx = ctx_from_source(
2433 r#"
2434module GuardedLaw
2435 intent =
2436 "verify law with precondition"
2437
2438fn pickGreater(a: Int, b: Int) -> Int
2439 match a > b
2440 true -> a
2441 false -> b
2442
2443verify pickGreater law ordered
2444 given a: Int = [1, 2]
2445 given b: Int = [1, 2]
2446 when a > b
2447 pickGreater(a, b) => a
2448"#,
2449 "guarded_law",
2450 );
2451 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
2452 let lean = generated_lean_file(&out);
2453
2454 assert!(lean.contains("-- when (a > b)"));
2455 assert!(lean.contains(
2456 "theorem pickGreater_law_ordered : ∀ (a : Int) (b : Int), a = 1 ∨ a = 2 -> b = 1 ∨ b = 2 -> (a > b) = true -> pickGreater a b = a := by"
2457 ));
2458 assert!(lean.contains(
2459 "theorem pickGreater_law_ordered_sample_1 : (1 > 1) = true -> pickGreater 1 1 = 1 := by"
2460 ));
2461 assert!(lean.contains(
2462 "theorem pickGreater_law_ordered_sample_4 : (2 > 2) = true -> pickGreater 2 2 = 2 := by"
2463 ));
2464 }
2465
2466 #[test]
2467 fn transpile_uses_spec_theorem_names_for_declared_spec_laws() {
2468 let ctx = ctx_from_source(
2469 r#"
2470module SpecDemo
2471 intent =
2472 "spec demo"
2473
2474fn absVal(x: Int) -> Int
2475 match x < 0
2476 true -> 0 - x
2477 false -> x
2478
2479fn absValSpec(x: Int) -> Int
2480 match x < 0
2481 true -> 0 - x
2482 false -> x
2483
2484verify absVal law absValSpec
2485 given x: Int = [-2, -1, 0, 1, 2]
2486 absVal(x) => absValSpec(x)
2487"#,
2488 "spec_demo",
2489 );
2490 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
2491 let lean = generated_lean_file(&out);
2492
2493 assert!(lean.contains("-- verify law absVal.spec absValSpec (5 cases)"));
2494 assert!(
2495 lean.contains(
2496 "theorem absVal_eq_absValSpec : ∀ (x : Int), absVal x = absValSpec x := by"
2497 )
2498 );
2499 assert!(lean.contains("theorem absVal_eq_absValSpec_checked_domain :"));
2500 assert!(lean.contains("theorem absVal_eq_absValSpec_sample_1 :"));
2501 assert!(!lean.contains("theorem absVal_law_absValSpec :"));
2502 }
2503
2504 #[test]
2505 fn transpile_keeps_noncanonical_spec_laws_as_regular_law_names() {
2506 let ctx = ctx_from_source(
2507 r#"
2508module SpecLawShape
2509 intent =
2510 "shape probe"
2511
2512fn foo(x: Int) -> Int
2513 x + 1
2514
2515fn fooSpec(seed: Int, x: Int) -> Int
2516 x + seed
2517
2518verify foo law fooSpec
2519 given x: Int = [1, 2]
2520 foo(x) => fooSpec(1, x)
2521"#,
2522 "spec_law_shape",
2523 );
2524 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
2525 let lean = generated_lean_file(&out);
2526
2527 assert!(lean.contains("-- verify law foo.fooSpec (2 cases)"));
2528 assert!(lean.contains("theorem foo_law_fooSpec : ∀ (x : Int), foo x = fooSpec 1 x := by"));
2529 assert!(!lean.contains("theorem foo_eq_fooSpec :"));
2530 }
2531
2532 #[test]
2533 fn transpile_auto_proves_linear_int_canonical_spec_law_in_auto_mode() {
2534 let ctx = ctx_from_source(
2535 r#"
2536module SpecGap
2537 intent =
2538 "nontrivial canonical spec law"
2539
2540fn inc(x: Int) -> Int
2541 x + 1
2542
2543fn incSpec(x: Int) -> Int
2544 x + 2 - 1
2545
2546verify inc law incSpec
2547 given x: Int = [0, 1, 2]
2548 inc(x) => incSpec(x)
2549"#,
2550 "spec_gap",
2551 );
2552 let out = transpile(&ctx);
2553 let lean = generated_lean_file(&out);
2554
2555 assert!(lean.contains("-- verify law inc.spec incSpec (3 cases)"));
2556 assert!(lean.contains("theorem inc_eq_incSpec : ∀ (x : Int), inc x = incSpec x := by"));
2557 assert!(lean.contains("change (x + 1) = ((x + 2) - 1)"));
2558 assert!(lean.contains("omega"));
2559 assert!(!lean.contains(
2560 "-- universal theorem inc_eq_incSpec omitted: sampled law shape is not auto-proved yet"
2561 ));
2562 assert!(lean.contains("theorem inc_eq_incSpec_checked_domain :"));
2563 }
2564
2565 #[test]
2566 fn transpile_auto_proves_guarded_canonical_spec_law_in_auto_mode() {
2567 let ctx = ctx_from_source(
2568 r#"
2569module GuardedSpecGap
2570 intent =
2571 "guarded canonical spec law"
2572
2573fn clampNonNegative(x: Int) -> Int
2574 match x < 0
2575 true -> 0
2576 false -> x
2577
2578fn clampNonNegativeSpec(x: Int) -> Int
2579 match x < 0
2580 true -> 0
2581 false -> x
2582
2583verify clampNonNegative law clampNonNegativeSpec
2584 given x: Int = [-2, -1, 0, 1, 2]
2585 when x >= 0
2586 clampNonNegative(x) => clampNonNegativeSpec(x)
2587"#,
2588 "guarded_spec_gap",
2589 );
2590 let out = transpile(&ctx);
2591 let lean = generated_lean_file(&out);
2592
2593 assert!(lean.contains("-- when (x >= 0)"));
2594 assert!(lean.contains(
2595 "theorem clampNonNegative_eq_clampNonNegativeSpec : ∀ (x : Int), x = (-2) ∨ x = (-1) ∨ x = 0 ∨ x = 1 ∨ x = 2 -> (x >= 0) = true -> clampNonNegative x = clampNonNegativeSpec x := by"
2596 ));
2597 assert!(lean.contains("intro x h_x h_when"));
2598 assert!(lean.contains("simpa [clampNonNegative, clampNonNegativeSpec]"));
2599 assert!(!lean.contains(
2600 "-- universal theorem clampNonNegative_eq_clampNonNegativeSpec omitted: sampled law shape is not auto-proved yet"
2601 ));
2602 assert!(!lean.contains("cases h_x"));
2603 }
2604
2605 #[test]
2606 fn transpile_auto_proves_simp_normalized_canonical_spec_law_in_auto_mode() {
2607 let ctx = ctx_from_source(
2608 r#"
2609module SpecGapNonlinear
2610 intent =
2611 "nonlinear canonical spec law"
2612
2613fn square(x: Int) -> Int
2614 x * x
2615
2616fn squareSpec(x: Int) -> Int
2617 x * x + 0
2618
2619verify square law squareSpec
2620 given x: Int = [0, 1, 2]
2621 square(x) => squareSpec(x)
2622"#,
2623 "spec_gap_nonlinear",
2624 );
2625 let out = transpile(&ctx);
2626 let lean = generated_lean_file(&out);
2627
2628 assert!(lean.contains("-- verify law square.spec squareSpec (3 cases)"));
2629 assert!(
2630 lean.contains(
2631 "theorem square_eq_squareSpec : ∀ (x : Int), square x = squareSpec x := by"
2632 )
2633 );
2634 assert!(lean.contains("simp [square, squareSpec]"));
2635 assert!(!lean.contains(
2636 "-- universal theorem square_eq_squareSpec omitted: sampled law shape is not auto-proved yet"
2637 ));
2638 assert!(lean.contains("theorem square_eq_squareSpec_checked_domain :"));
2639 assert!(lean.contains("theorem square_eq_squareSpec_sample_1 :"));
2640 }
2641
2642 #[test]
2643 fn transpile_auto_proves_reflexive_law_with_rfl() {
2644 let mut ctx = empty_ctx();
2645 ctx.items.push(TopLevel::Verify(VerifyBlock {
2646 fn_name: "idLaw".to_string(),
2647 line: 1,
2648 cases: vec![(
2649 Expr::Literal(Literal::Int(1)),
2650 Expr::Literal(Literal::Int(1)),
2651 )],
2652 kind: VerifyKind::Law(Box::new(VerifyLaw {
2653 name: "reflexive".to_string(),
2654 givens: vec![VerifyGiven {
2655 name: "x".to_string(),
2656 type_name: "Int".to_string(),
2657 domain: VerifyGivenDomain::IntRange { start: 1, end: 2 },
2658 }],
2659 when: None,
2660 lhs: Expr::Ident("x".to_string()),
2661 rhs: Expr::Ident("x".to_string()),
2662 sample_guards: vec![],
2663 })),
2664 }));
2665 let out = transpile(&ctx);
2666 let lean = out
2667 .files
2668 .iter()
2669 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2670 .expect("expected generated Lean file");
2671 assert!(lean.contains("theorem idLaw_law_reflexive : ∀ (x : Int), x = x := by"));
2672 assert!(lean.contains(" intro x"));
2673 assert!(lean.contains(" rfl"));
2674 }
2675
2676 #[test]
2677 fn transpile_auto_proves_identity_law_for_int_add_wrapper() {
2678 let mut ctx = empty_ctx_with_verify_law();
2679 ctx.items.push(TopLevel::Verify(VerifyBlock {
2680 fn_name: "add".to_string(),
2681 line: 10,
2682 cases: vec![(
2683 Expr::FnCall(
2684 Box::new(Expr::Ident("add".to_string())),
2685 vec![
2686 Expr::Literal(Literal::Int(1)),
2687 Expr::Literal(Literal::Int(0)),
2688 ],
2689 ),
2690 Expr::Literal(Literal::Int(1)),
2691 )],
2692 kind: VerifyKind::Law(Box::new(VerifyLaw {
2693 name: "identityZero".to_string(),
2694 givens: vec![VerifyGiven {
2695 name: "a".to_string(),
2696 type_name: "Int".to_string(),
2697 domain: VerifyGivenDomain::Explicit(vec![
2698 Expr::Literal(Literal::Int(0)),
2699 Expr::Literal(Literal::Int(1)),
2700 ]),
2701 }],
2702 when: None,
2703 lhs: Expr::FnCall(
2704 Box::new(Expr::Ident("add".to_string())),
2705 vec![Expr::Ident("a".to_string()), Expr::Literal(Literal::Int(0))],
2706 ),
2707 rhs: Expr::Ident("a".to_string()),
2708 sample_guards: vec![],
2709 })),
2710 }));
2711 let out = transpile(&ctx);
2712 let lean = out
2713 .files
2714 .iter()
2715 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2716 .expect("expected generated Lean file");
2717 assert!(lean.contains("theorem add_law_identityZero : ∀ (a : Int), add a 0 = a := by"));
2718 assert!(lean.contains(" intro a"));
2719 assert!(lean.contains(" simp [add]"));
2720 }
2721
2722 #[test]
2723 fn transpile_auto_proves_associative_law_for_int_add_wrapper() {
2724 let mut ctx = empty_ctx_with_verify_law();
2725 ctx.items.push(TopLevel::Verify(VerifyBlock {
2726 fn_name: "add".to_string(),
2727 line: 20,
2728 cases: vec![(
2729 Expr::FnCall(
2730 Box::new(Expr::Ident("add".to_string())),
2731 vec![
2732 Expr::FnCall(
2733 Box::new(Expr::Ident("add".to_string())),
2734 vec![
2735 Expr::Literal(Literal::Int(1)),
2736 Expr::Literal(Literal::Int(2)),
2737 ],
2738 ),
2739 Expr::Literal(Literal::Int(3)),
2740 ],
2741 ),
2742 Expr::FnCall(
2743 Box::new(Expr::Ident("add".to_string())),
2744 vec![
2745 Expr::Literal(Literal::Int(1)),
2746 Expr::FnCall(
2747 Box::new(Expr::Ident("add".to_string())),
2748 vec![
2749 Expr::Literal(Literal::Int(2)),
2750 Expr::Literal(Literal::Int(3)),
2751 ],
2752 ),
2753 ],
2754 ),
2755 )],
2756 kind: VerifyKind::Law(Box::new(VerifyLaw {
2757 name: "associative".to_string(),
2758 givens: vec![
2759 VerifyGiven {
2760 name: "a".to_string(),
2761 type_name: "Int".to_string(),
2762 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(1))]),
2763 },
2764 VerifyGiven {
2765 name: "b".to_string(),
2766 type_name: "Int".to_string(),
2767 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
2768 },
2769 VerifyGiven {
2770 name: "c".to_string(),
2771 type_name: "Int".to_string(),
2772 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(3))]),
2773 },
2774 ],
2775 when: None,
2776 lhs: Expr::FnCall(
2777 Box::new(Expr::Ident("add".to_string())),
2778 vec![
2779 Expr::FnCall(
2780 Box::new(Expr::Ident("add".to_string())),
2781 vec![Expr::Ident("a".to_string()), Expr::Ident("b".to_string())],
2782 ),
2783 Expr::Ident("c".to_string()),
2784 ],
2785 ),
2786 rhs: Expr::FnCall(
2787 Box::new(Expr::Ident("add".to_string())),
2788 vec![
2789 Expr::Ident("a".to_string()),
2790 Expr::FnCall(
2791 Box::new(Expr::Ident("add".to_string())),
2792 vec![Expr::Ident("b".to_string()), Expr::Ident("c".to_string())],
2793 ),
2794 ],
2795 ),
2796 sample_guards: vec![],
2797 })),
2798 }));
2799 let out = transpile(&ctx);
2800 let lean = out
2801 .files
2802 .iter()
2803 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2804 .expect("expected generated Lean file");
2805 assert!(lean.contains(
2806 "theorem add_law_associative : ∀ (a : Int) (b : Int) (c : Int), add (add a b) c = add a (add b c) := by"
2807 ));
2808 assert!(lean.contains(" intro a b c"));
2809 assert!(lean.contains(" simp [add, Int.add_assoc]"));
2810 }
2811
2812 #[test]
2813 fn transpile_auto_proves_sub_laws() {
2814 let mut ctx = empty_ctx();
2815 let sub = FnDef {
2816 name: "sub".to_string(),
2817 line: 1,
2818 params: vec![
2819 ("a".to_string(), "Int".to_string()),
2820 ("b".to_string(), "Int".to_string()),
2821 ],
2822 return_type: "Int".to_string(),
2823 effects: vec![],
2824 desc: None,
2825 body: Rc::new(FnBody::from_expr(Expr::BinOp(
2826 BinOp::Sub,
2827 Box::new(Expr::Ident("a".to_string())),
2828 Box::new(Expr::Ident("b".to_string())),
2829 ))),
2830 resolution: None,
2831 };
2832 ctx.fn_defs.push(sub.clone());
2833 ctx.items.push(TopLevel::FnDef(sub));
2834
2835 ctx.items.push(TopLevel::Verify(VerifyBlock {
2836 fn_name: "sub".to_string(),
2837 line: 10,
2838 cases: vec![(
2839 Expr::FnCall(
2840 Box::new(Expr::Ident("sub".to_string())),
2841 vec![
2842 Expr::Literal(Literal::Int(2)),
2843 Expr::Literal(Literal::Int(0)),
2844 ],
2845 ),
2846 Expr::Literal(Literal::Int(2)),
2847 )],
2848 kind: VerifyKind::Law(Box::new(VerifyLaw {
2849 name: "rightIdentity".to_string(),
2850 givens: vec![VerifyGiven {
2851 name: "a".to_string(),
2852 type_name: "Int".to_string(),
2853 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
2854 }],
2855 when: None,
2856 lhs: Expr::FnCall(
2857 Box::new(Expr::Ident("sub".to_string())),
2858 vec![Expr::Ident("a".to_string()), Expr::Literal(Literal::Int(0))],
2859 ),
2860 rhs: Expr::Ident("a".to_string()),
2861 sample_guards: vec![],
2862 })),
2863 }));
2864 ctx.items.push(TopLevel::Verify(VerifyBlock {
2865 fn_name: "sub".to_string(),
2866 line: 20,
2867 cases: vec![(
2868 Expr::FnCall(
2869 Box::new(Expr::Ident("sub".to_string())),
2870 vec![
2871 Expr::Literal(Literal::Int(2)),
2872 Expr::Literal(Literal::Int(1)),
2873 ],
2874 ),
2875 Expr::BinOp(
2876 BinOp::Sub,
2877 Box::new(Expr::Literal(Literal::Int(0))),
2878 Box::new(Expr::FnCall(
2879 Box::new(Expr::Ident("sub".to_string())),
2880 vec![
2881 Expr::Literal(Literal::Int(1)),
2882 Expr::Literal(Literal::Int(2)),
2883 ],
2884 )),
2885 ),
2886 )],
2887 kind: VerifyKind::Law(Box::new(VerifyLaw {
2888 name: "antiCommutative".to_string(),
2889 givens: vec![
2890 VerifyGiven {
2891 name: "a".to_string(),
2892 type_name: "Int".to_string(),
2893 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
2894 },
2895 VerifyGiven {
2896 name: "b".to_string(),
2897 type_name: "Int".to_string(),
2898 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(1))]),
2899 },
2900 ],
2901 when: None,
2902 lhs: Expr::FnCall(
2903 Box::new(Expr::Ident("sub".to_string())),
2904 vec![Expr::Ident("a".to_string()), Expr::Ident("b".to_string())],
2905 ),
2906 rhs: Expr::BinOp(
2907 BinOp::Sub,
2908 Box::new(Expr::Literal(Literal::Int(0))),
2909 Box::new(Expr::FnCall(
2910 Box::new(Expr::Ident("sub".to_string())),
2911 vec![Expr::Ident("b".to_string()), Expr::Ident("a".to_string())],
2912 )),
2913 ),
2914 sample_guards: vec![],
2915 })),
2916 }));
2917
2918 let out = transpile(&ctx);
2919 let lean = out
2920 .files
2921 .iter()
2922 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
2923 .expect("expected generated Lean file");
2924 assert!(lean.contains("theorem sub_law_rightIdentity : ∀ (a : Int), sub a 0 = a := by"));
2925 assert!(lean.contains(" simp [sub]"));
2926 assert!(lean.contains(
2927 "theorem sub_law_antiCommutative : ∀ (a : Int) (b : Int), sub a b = (-sub b a) := by"
2928 ));
2929 assert!(lean.contains(" simpa [sub] using (Int.neg_sub b a).symm"));
2930 }
2931
2932 #[test]
2933 fn transpile_auto_proves_unary_wrapper_equivalence_law() {
2934 let mut ctx = empty_ctx();
2935 let add = FnDef {
2936 name: "add".to_string(),
2937 line: 1,
2938 params: vec![
2939 ("a".to_string(), "Int".to_string()),
2940 ("b".to_string(), "Int".to_string()),
2941 ],
2942 return_type: "Int".to_string(),
2943 effects: vec![],
2944 desc: None,
2945 body: Rc::new(FnBody::from_expr(Expr::BinOp(
2946 BinOp::Add,
2947 Box::new(Expr::Ident("a".to_string())),
2948 Box::new(Expr::Ident("b".to_string())),
2949 ))),
2950 resolution: None,
2951 };
2952 let add_one = FnDef {
2953 name: "addOne".to_string(),
2954 line: 2,
2955 params: vec![("n".to_string(), "Int".to_string())],
2956 return_type: "Int".to_string(),
2957 effects: vec![],
2958 desc: None,
2959 body: Rc::new(FnBody::from_expr(Expr::BinOp(
2960 BinOp::Add,
2961 Box::new(Expr::Ident("n".to_string())),
2962 Box::new(Expr::Literal(Literal::Int(1))),
2963 ))),
2964 resolution: None,
2965 };
2966 ctx.fn_defs.push(add.clone());
2967 ctx.fn_defs.push(add_one.clone());
2968 ctx.items.push(TopLevel::FnDef(add));
2969 ctx.items.push(TopLevel::FnDef(add_one));
2970 ctx.items.push(TopLevel::Verify(VerifyBlock {
2971 fn_name: "addOne".to_string(),
2972 line: 3,
2973 cases: vec![(
2974 Expr::FnCall(
2975 Box::new(Expr::Ident("addOne".to_string())),
2976 vec![Expr::Literal(Literal::Int(2))],
2977 ),
2978 Expr::FnCall(
2979 Box::new(Expr::Ident("add".to_string())),
2980 vec![
2981 Expr::Literal(Literal::Int(2)),
2982 Expr::Literal(Literal::Int(1)),
2983 ],
2984 ),
2985 )],
2986 kind: VerifyKind::Law(Box::new(VerifyLaw {
2987 name: "identityViaAdd".to_string(),
2988 givens: vec![VerifyGiven {
2989 name: "n".to_string(),
2990 type_name: "Int".to_string(),
2991 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
2992 }],
2993 when: None,
2994 lhs: Expr::FnCall(
2995 Box::new(Expr::Ident("addOne".to_string())),
2996 vec![Expr::Ident("n".to_string())],
2997 ),
2998 rhs: Expr::FnCall(
2999 Box::new(Expr::Ident("add".to_string())),
3000 vec![Expr::Ident("n".to_string()), Expr::Literal(Literal::Int(1))],
3001 ),
3002 sample_guards: vec![],
3003 })),
3004 }));
3005 let out = transpile(&ctx);
3006 let lean = out
3007 .files
3008 .iter()
3009 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3010 .expect("expected generated Lean file");
3011 assert!(
3012 lean.contains(
3013 "theorem addOne_law_identityViaAdd : ∀ (n : Int), addOne n = add n 1 := by"
3014 )
3015 );
3016 assert!(lean.contains(" simp [addOne, add]"));
3017 }
3018
3019 #[test]
3020 fn transpile_auto_proves_direct_map_set_laws() {
3021 let mut ctx = empty_ctx();
3022
3023 let map_set = |m: Expr, k: Expr, v: Expr| {
3024 Expr::FnCall(
3025 Box::new(Expr::Attr(
3026 Box::new(Expr::Ident("Map".to_string())),
3027 "set".to_string(),
3028 )),
3029 vec![m, k, v],
3030 )
3031 };
3032 let map_has = |m: Expr, k: Expr| {
3033 Expr::FnCall(
3034 Box::new(Expr::Attr(
3035 Box::new(Expr::Ident("Map".to_string())),
3036 "has".to_string(),
3037 )),
3038 vec![m, k],
3039 )
3040 };
3041 let map_get = |m: Expr, k: Expr| {
3042 Expr::FnCall(
3043 Box::new(Expr::Attr(
3044 Box::new(Expr::Ident("Map".to_string())),
3045 "get".to_string(),
3046 )),
3047 vec![m, k],
3048 )
3049 };
3050 let some = |v: Expr| {
3051 Expr::FnCall(
3052 Box::new(Expr::Attr(
3053 Box::new(Expr::Ident("Option".to_string())),
3054 "Some".to_string(),
3055 )),
3056 vec![v],
3057 )
3058 };
3059
3060 ctx.items.push(TopLevel::Verify(VerifyBlock {
3061 fn_name: "map".to_string(),
3062 line: 1,
3063 cases: vec![(
3064 map_has(
3065 map_set(
3066 Expr::Ident("m".to_string()),
3067 Expr::Ident("k".to_string()),
3068 Expr::Ident("v".to_string()),
3069 ),
3070 Expr::Ident("k".to_string()),
3071 ),
3072 Expr::Literal(Literal::Bool(true)),
3073 )],
3074 kind: VerifyKind::Law(Box::new(VerifyLaw {
3075 name: "setHasKey".to_string(),
3076 givens: vec![
3077 VerifyGiven {
3078 name: "m".to_string(),
3079 type_name: "Map<String, Int>".to_string(),
3080 domain: VerifyGivenDomain::Explicit(vec![Expr::FnCall(
3081 Box::new(Expr::Attr(
3082 Box::new(Expr::Ident("Map".to_string())),
3083 "empty".to_string(),
3084 )),
3085 vec![],
3086 )]),
3087 },
3088 VerifyGiven {
3089 name: "k".to_string(),
3090 type_name: "String".to_string(),
3091 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Str(
3092 "a".to_string(),
3093 ))]),
3094 },
3095 VerifyGiven {
3096 name: "v".to_string(),
3097 type_name: "Int".to_string(),
3098 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(1))]),
3099 },
3100 ],
3101 when: None,
3102 lhs: map_has(
3103 map_set(
3104 Expr::Ident("m".to_string()),
3105 Expr::Ident("k".to_string()),
3106 Expr::Ident("v".to_string()),
3107 ),
3108 Expr::Ident("k".to_string()),
3109 ),
3110 rhs: Expr::Literal(Literal::Bool(true)),
3111 sample_guards: vec![],
3112 })),
3113 }));
3114
3115 ctx.items.push(TopLevel::Verify(VerifyBlock {
3116 fn_name: "map".to_string(),
3117 line: 2,
3118 cases: vec![(
3119 map_get(
3120 map_set(
3121 Expr::Ident("m".to_string()),
3122 Expr::Ident("k".to_string()),
3123 Expr::Ident("v".to_string()),
3124 ),
3125 Expr::Ident("k".to_string()),
3126 ),
3127 some(Expr::Ident("v".to_string())),
3128 )],
3129 kind: VerifyKind::Law(Box::new(VerifyLaw {
3130 name: "setGetKey".to_string(),
3131 givens: vec![
3132 VerifyGiven {
3133 name: "m".to_string(),
3134 type_name: "Map<String, Int>".to_string(),
3135 domain: VerifyGivenDomain::Explicit(vec![Expr::FnCall(
3136 Box::new(Expr::Attr(
3137 Box::new(Expr::Ident("Map".to_string())),
3138 "empty".to_string(),
3139 )),
3140 vec![],
3141 )]),
3142 },
3143 VerifyGiven {
3144 name: "k".to_string(),
3145 type_name: "String".to_string(),
3146 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Str(
3147 "a".to_string(),
3148 ))]),
3149 },
3150 VerifyGiven {
3151 name: "v".to_string(),
3152 type_name: "Int".to_string(),
3153 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(1))]),
3154 },
3155 ],
3156 when: None,
3157 lhs: map_get(
3158 map_set(
3159 Expr::Ident("m".to_string()),
3160 Expr::Ident("k".to_string()),
3161 Expr::Ident("v".to_string()),
3162 ),
3163 Expr::Ident("k".to_string()),
3164 ),
3165 rhs: some(Expr::Ident("v".to_string())),
3166 sample_guards: vec![],
3167 })),
3168 }));
3169
3170 let out = transpile(&ctx);
3171 let lean = out
3172 .files
3173 .iter()
3174 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3175 .expect("expected generated Lean file");
3176 assert!(lean.contains("simpa using AverMap.has_set_self m k v"));
3177 assert!(lean.contains("simpa using AverMap.get_set_self m k v"));
3178 }
3179
3180 #[test]
3181 fn transpile_auto_proves_direct_recursive_sum_law_by_structural_induction() {
3182 let ctx = ctx_from_source(
3183 r#"
3184module Mirror
3185 intent =
3186 "direct recursive sum induction probe"
3187
3188type Tree
3189 Leaf(Int)
3190 Node(Tree, Tree)
3191
3192fn mirror(t: Tree) -> Tree
3193 match t
3194 Tree.Leaf(v) -> Tree.Leaf(v)
3195 Tree.Node(left, right) -> Tree.Node(mirror(right), mirror(left))
3196
3197verify mirror law involutive
3198 given t: Tree = [Tree.Leaf(1), Tree.Node(Tree.Leaf(1), Tree.Leaf(2))]
3199 mirror(mirror(t)) => t
3200"#,
3201 "mirror",
3202 );
3203 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3204 let lean = generated_lean_file(&out);
3205
3206 assert!(
3207 lean.contains(
3208 "theorem mirror_law_involutive : ∀ (t : Tree), mirror (mirror t) = t := by"
3209 )
3210 );
3211 assert!(lean.contains(" induction t with"));
3212 assert!(lean.contains(" | leaf f0 => simp [mirror]"));
3213 assert!(lean.contains(" | node f0 f1 ih0 ih1 => simp_all [mirror]"));
3214 assert!(!lean.contains(
3215 "-- universal theorem mirror_law_involutive omitted: sampled law shape is not auto-proved yet"
3216 ));
3217 }
3218
3219 #[test]
3220 fn transpile_auto_proves_map_update_laws() {
3221 let mut ctx = empty_ctx();
3222
3223 let map_get = |m: Expr, k: Expr| {
3224 Expr::FnCall(
3225 Box::new(Expr::Attr(
3226 Box::new(Expr::Ident("Map".to_string())),
3227 "get".to_string(),
3228 )),
3229 vec![m, k],
3230 )
3231 };
3232 let map_set = |m: Expr, k: Expr, v: Expr| {
3233 Expr::FnCall(
3234 Box::new(Expr::Attr(
3235 Box::new(Expr::Ident("Map".to_string())),
3236 "set".to_string(),
3237 )),
3238 vec![m, k, v],
3239 )
3240 };
3241 let map_has = |m: Expr, k: Expr| {
3242 Expr::FnCall(
3243 Box::new(Expr::Attr(
3244 Box::new(Expr::Ident("Map".to_string())),
3245 "has".to_string(),
3246 )),
3247 vec![m, k],
3248 )
3249 };
3250 let option_some = |v: Expr| {
3251 Expr::FnCall(
3252 Box::new(Expr::Attr(
3253 Box::new(Expr::Ident("Option".to_string())),
3254 "Some".to_string(),
3255 )),
3256 vec![v],
3257 )
3258 };
3259 let option_with_default = |opt: Expr, def: Expr| {
3260 Expr::FnCall(
3261 Box::new(Expr::Attr(
3262 Box::new(Expr::Ident("Option".to_string())),
3263 "withDefault".to_string(),
3264 )),
3265 vec![opt, def],
3266 )
3267 };
3268
3269 let add_one = FnDef {
3270 name: "addOne".to_string(),
3271 line: 1,
3272 params: vec![("n".to_string(), "Int".to_string())],
3273 return_type: "Int".to_string(),
3274 effects: vec![],
3275 desc: None,
3276 body: Rc::new(FnBody::from_expr(Expr::BinOp(
3277 BinOp::Add,
3278 Box::new(Expr::Ident("n".to_string())),
3279 Box::new(Expr::Literal(Literal::Int(1))),
3280 ))),
3281 resolution: None,
3282 };
3283 ctx.fn_defs.push(add_one.clone());
3284 ctx.items.push(TopLevel::FnDef(add_one));
3285
3286 let inc_count = FnDef {
3287 name: "incCount".to_string(),
3288 line: 2,
3289 params: vec![
3290 ("counts".to_string(), "Map<String, Int>".to_string()),
3291 ("word".to_string(), "String".to_string()),
3292 ],
3293 return_type: "Map<String, Int>".to_string(),
3294 effects: vec![],
3295 desc: None,
3296 body: Rc::new(FnBody::Block(vec![
3297 Stmt::Binding(
3298 "current".to_string(),
3299 None,
3300 map_get(
3301 Expr::Ident("counts".to_string()),
3302 Expr::Ident("word".to_string()),
3303 ),
3304 ),
3305 Stmt::Expr(Expr::Match {
3306 subject: Box::new(Expr::Ident("current".to_string())),
3307 arms: vec![
3308 MatchArm {
3309 pattern: Pattern::Constructor(
3310 "Option.Some".to_string(),
3311 vec!["n".to_string()],
3312 ),
3313 body: Box::new(map_set(
3314 Expr::Ident("counts".to_string()),
3315 Expr::Ident("word".to_string()),
3316 Expr::BinOp(
3317 BinOp::Add,
3318 Box::new(Expr::Ident("n".to_string())),
3319 Box::new(Expr::Literal(Literal::Int(1))),
3320 ),
3321 )),
3322 },
3323 MatchArm {
3324 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
3325 body: Box::new(map_set(
3326 Expr::Ident("counts".to_string()),
3327 Expr::Ident("word".to_string()),
3328 Expr::Literal(Literal::Int(1)),
3329 )),
3330 },
3331 ],
3332 line: 2,
3333 }),
3334 ])),
3335 resolution: None,
3336 };
3337 ctx.fn_defs.push(inc_count.clone());
3338 ctx.items.push(TopLevel::FnDef(inc_count));
3339
3340 ctx.items.push(TopLevel::Verify(VerifyBlock {
3341 fn_name: "incCount".to_string(),
3342 line: 10,
3343 cases: vec![(
3344 map_has(
3345 Expr::FnCall(
3346 Box::new(Expr::Ident("incCount".to_string())),
3347 vec![
3348 Expr::Ident("counts".to_string()),
3349 Expr::Ident("word".to_string()),
3350 ],
3351 ),
3352 Expr::Ident("word".to_string()),
3353 ),
3354 Expr::Literal(Literal::Bool(true)),
3355 )],
3356 kind: VerifyKind::Law(Box::new(VerifyLaw {
3357 name: "keyPresent".to_string(),
3358 givens: vec![
3359 VerifyGiven {
3360 name: "counts".to_string(),
3361 type_name: "Map<String, Int>".to_string(),
3362 domain: VerifyGivenDomain::Explicit(vec![Expr::FnCall(
3363 Box::new(Expr::Attr(
3364 Box::new(Expr::Ident("Map".to_string())),
3365 "empty".to_string(),
3366 )),
3367 vec![],
3368 )]),
3369 },
3370 VerifyGiven {
3371 name: "word".to_string(),
3372 type_name: "String".to_string(),
3373 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Str(
3374 "a".to_string(),
3375 ))]),
3376 },
3377 ],
3378 when: None,
3379 lhs: map_has(
3380 Expr::FnCall(
3381 Box::new(Expr::Ident("incCount".to_string())),
3382 vec![
3383 Expr::Ident("counts".to_string()),
3384 Expr::Ident("word".to_string()),
3385 ],
3386 ),
3387 Expr::Ident("word".to_string()),
3388 ),
3389 rhs: Expr::Literal(Literal::Bool(true)),
3390 sample_guards: vec![],
3391 })),
3392 }));
3393
3394 ctx.items.push(TopLevel::Verify(VerifyBlock {
3395 fn_name: "incCount".to_string(),
3396 line: 20,
3397 cases: vec![(
3398 map_get(
3399 Expr::FnCall(
3400 Box::new(Expr::Ident("incCount".to_string())),
3401 vec![
3402 Expr::Ident("counts".to_string()),
3403 Expr::Literal(Literal::Str("a".to_string())),
3404 ],
3405 ),
3406 Expr::Literal(Literal::Str("a".to_string())),
3407 ),
3408 option_some(Expr::FnCall(
3409 Box::new(Expr::Ident("addOne".to_string())),
3410 vec![option_with_default(
3411 map_get(
3412 Expr::Ident("counts".to_string()),
3413 Expr::Literal(Literal::Str("a".to_string())),
3414 ),
3415 Expr::Literal(Literal::Int(0)),
3416 )],
3417 )),
3418 )],
3419 kind: VerifyKind::Law(Box::new(VerifyLaw {
3420 name: "existingKeyIncrements".to_string(),
3421 givens: vec![VerifyGiven {
3422 name: "counts".to_string(),
3423 type_name: "Map<String, Int>".to_string(),
3424 domain: VerifyGivenDomain::Explicit(vec![Expr::FnCall(
3425 Box::new(Expr::Attr(
3426 Box::new(Expr::Ident("Map".to_string())),
3427 "empty".to_string(),
3428 )),
3429 vec![],
3430 )]),
3431 }],
3432 when: None,
3433 lhs: map_get(
3434 Expr::FnCall(
3435 Box::new(Expr::Ident("incCount".to_string())),
3436 vec![
3437 Expr::Ident("counts".to_string()),
3438 Expr::Literal(Literal::Str("a".to_string())),
3439 ],
3440 ),
3441 Expr::Literal(Literal::Str("a".to_string())),
3442 ),
3443 rhs: option_some(Expr::FnCall(
3444 Box::new(Expr::Ident("addOne".to_string())),
3445 vec![option_with_default(
3446 map_get(
3447 Expr::Ident("counts".to_string()),
3448 Expr::Literal(Literal::Str("a".to_string())),
3449 ),
3450 Expr::Literal(Literal::Int(0)),
3451 )],
3452 )),
3453 sample_guards: vec![],
3454 })),
3455 }));
3456
3457 let out = transpile(&ctx);
3458 let lean = out
3459 .files
3460 .iter()
3461 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3462 .expect("expected generated Lean file");
3463 assert!(
3464 lean.contains("cases h : AverMap.get counts word <;> simp [AverMap.has_set_self]"),
3465 "expected keyPresent auto-proof with has_set_self"
3466 );
3467 assert!(
3468 lean.contains("cases h : AverMap.get counts \"a\" <;> simp [AverMap.get_set_self, addOne, incCount]"),
3469 "expected existingKeyIncrements auto-proof with get_set_self"
3470 );
3471 }
3472
3473 #[test]
3474 fn transpile_parenthesizes_negative_int_call_args_in_law_samples() {
3475 let mut ctx = empty_ctx();
3476 let add = FnDef {
3477 name: "add".to_string(),
3478 line: 1,
3479 params: vec![
3480 ("a".to_string(), "Int".to_string()),
3481 ("b".to_string(), "Int".to_string()),
3482 ],
3483 return_type: "Int".to_string(),
3484 effects: vec![],
3485 desc: None,
3486 body: Rc::new(FnBody::from_expr(Expr::BinOp(
3487 BinOp::Add,
3488 Box::new(Expr::Ident("a".to_string())),
3489 Box::new(Expr::Ident("b".to_string())),
3490 ))),
3491 resolution: None,
3492 };
3493 ctx.fn_defs.push(add.clone());
3494 ctx.items.push(TopLevel::FnDef(add));
3495 ctx.items.push(TopLevel::Verify(VerifyBlock {
3496 fn_name: "add".to_string(),
3497 line: 1,
3498 cases: vec![(
3499 Expr::FnCall(
3500 Box::new(Expr::Ident("add".to_string())),
3501 vec![
3502 Expr::Literal(Literal::Int(-2)),
3503 Expr::Literal(Literal::Int(-1)),
3504 ],
3505 ),
3506 Expr::FnCall(
3507 Box::new(Expr::Ident("add".to_string())),
3508 vec![
3509 Expr::Literal(Literal::Int(-1)),
3510 Expr::Literal(Literal::Int(-2)),
3511 ],
3512 ),
3513 )],
3514 kind: VerifyKind::Law(Box::new(VerifyLaw {
3515 name: "commutative".to_string(),
3516 givens: vec![
3517 VerifyGiven {
3518 name: "a".to_string(),
3519 type_name: "Int".to_string(),
3520 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(-2))]),
3521 },
3522 VerifyGiven {
3523 name: "b".to_string(),
3524 type_name: "Int".to_string(),
3525 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(-1))]),
3526 },
3527 ],
3528 when: None,
3529 lhs: Expr::FnCall(
3530 Box::new(Expr::Ident("add".to_string())),
3531 vec![Expr::Ident("a".to_string()), Expr::Ident("b".to_string())],
3532 ),
3533 rhs: Expr::FnCall(
3534 Box::new(Expr::Ident("add".to_string())),
3535 vec![Expr::Ident("b".to_string()), Expr::Ident("a".to_string())],
3536 ),
3537 sample_guards: vec![],
3538 })),
3539 }));
3540
3541 let out = transpile(&ctx);
3542 let lean = out
3543 .files
3544 .iter()
3545 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3546 .expect("expected generated Lean file");
3547 assert!(lean.contains(
3548 "theorem add_law_commutative_sample_1 : add (-2) (-1) = add (-1) (-2) := by native_decide"
3549 ));
3550 }
3551
3552 #[test]
3553 fn verify_law_numbering_is_scoped_per_law_name() {
3554 let mut ctx = empty_ctx();
3555 let f = FnDef {
3556 name: "f".to_string(),
3557 line: 1,
3558 params: vec![("x".to_string(), "Int".to_string())],
3559 return_type: "Int".to_string(),
3560 effects: vec![],
3561 desc: None,
3562 body: Rc::new(FnBody::from_expr(Expr::Ident("x".to_string()))),
3563 resolution: None,
3564 };
3565 ctx.fn_defs.push(f.clone());
3566 ctx.items.push(TopLevel::FnDef(f));
3567 ctx.items.push(TopLevel::Verify(VerifyBlock {
3568 fn_name: "f".to_string(),
3569 line: 1,
3570 cases: vec![(
3571 Expr::Literal(Literal::Int(1)),
3572 Expr::Literal(Literal::Int(1)),
3573 )],
3574 kind: VerifyKind::Cases,
3575 }));
3576 ctx.items.push(TopLevel::Verify(VerifyBlock {
3577 fn_name: "f".to_string(),
3578 line: 2,
3579 cases: vec![(
3580 Expr::Literal(Literal::Int(2)),
3581 Expr::Literal(Literal::Int(2)),
3582 )],
3583 kind: VerifyKind::Law(Box::new(VerifyLaw {
3584 name: "identity".to_string(),
3585 givens: vec![VerifyGiven {
3586 name: "x".to_string(),
3587 type_name: "Int".to_string(),
3588 domain: VerifyGivenDomain::Explicit(vec![Expr::Literal(Literal::Int(2))]),
3589 }],
3590 when: None,
3591 lhs: Expr::Ident("x".to_string()),
3592 rhs: Expr::Ident("x".to_string()),
3593 sample_guards: vec![],
3594 })),
3595 }));
3596 let out = transpile_with_verify_mode(&ctx, VerifyEmitMode::TheoremSkeleton);
3597 let lean = out
3598 .files
3599 .iter()
3600 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3601 .expect("expected generated Lean file");
3602 assert!(lean.contains("theorem f_verify_1 : 1 = 1 := by"));
3603 assert!(lean.contains("theorem f_law_identity : ∀ (x : Int), x = x := by"));
3604 assert!(lean.contains("theorem f_law_identity_sample_1 : 2 = 2 := by"));
3605 assert!(!lean.contains("theorem f_law_identity_sample_2 : 2 = 2 := by"));
3606 }
3607
3608 #[test]
3609 fn proof_mode_accepts_single_int_countdown_recursion() {
3610 let mut ctx = empty_ctx();
3611 let down = FnDef {
3612 name: "down".to_string(),
3613 line: 1,
3614 params: vec![("n".to_string(), "Int".to_string())],
3615 return_type: "Int".to_string(),
3616 effects: vec![],
3617 desc: None,
3618 body: Rc::new(FnBody::from_expr(Expr::Match {
3619 subject: Box::new(Expr::Ident("n".to_string())),
3620 arms: vec![
3621 MatchArm {
3622 pattern: Pattern::Literal(Literal::Int(0)),
3623 body: Box::new(Expr::Literal(Literal::Int(0))),
3624 },
3625 MatchArm {
3626 pattern: Pattern::Wildcard,
3627 body: Box::new(Expr::TailCall(Box::new((
3628 "down".to_string(),
3629 vec![Expr::BinOp(
3630 BinOp::Sub,
3631 Box::new(Expr::Ident("n".to_string())),
3632 Box::new(Expr::Literal(Literal::Int(1))),
3633 )],
3634 )))),
3635 },
3636 ],
3637 line: 1,
3638 })),
3639 resolution: None,
3640 };
3641 ctx.items.push(TopLevel::FnDef(down.clone()));
3642 ctx.fn_defs.push(down);
3643
3644 let issues = proof_mode_issues(&ctx);
3645 assert!(
3646 issues.is_empty(),
3647 "expected Int countdown recursion to be accepted, got: {:?}",
3648 issues
3649 );
3650
3651 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3652 let lean = out
3653 .files
3654 .iter()
3655 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3656 .expect("expected generated Lean file");
3657 assert!(lean.contains("def down__fuel"));
3658 assert!(lean.contains("def down (n : Int) : Int :="));
3659 assert!(lean.contains("down__fuel ((Int.natAbs n) + 1) n"));
3660 }
3661
3662 #[test]
3663 fn proof_mode_accepts_single_int_countdown_on_nonfirst_param() {
3664 let mut ctx = empty_ctx();
3665 let repeat_like = FnDef {
3666 name: "repeatLike".to_string(),
3667 line: 1,
3668 params: vec![
3669 ("char".to_string(), "String".to_string()),
3670 ("n".to_string(), "Int".to_string()),
3671 ],
3672 return_type: "List<String>".to_string(),
3673 effects: vec![],
3674 desc: None,
3675 body: Rc::new(FnBody::from_expr(Expr::Match {
3676 subject: Box::new(Expr::BinOp(
3677 BinOp::Lte,
3678 Box::new(Expr::Ident("n".to_string())),
3679 Box::new(Expr::Literal(Literal::Int(0))),
3680 )),
3681 arms: vec![
3682 MatchArm {
3683 pattern: Pattern::Literal(Literal::Bool(true)),
3684 body: Box::new(Expr::List(vec![])),
3685 },
3686 MatchArm {
3687 pattern: Pattern::Literal(Literal::Bool(false)),
3688 body: Box::new(Expr::TailCall(Box::new((
3689 "repeatLike".to_string(),
3690 vec![
3691 Expr::Ident("char".to_string()),
3692 Expr::BinOp(
3693 BinOp::Sub,
3694 Box::new(Expr::Ident("n".to_string())),
3695 Box::new(Expr::Literal(Literal::Int(1))),
3696 ),
3697 ],
3698 )))),
3699 },
3700 ],
3701 line: 1,
3702 })),
3703 resolution: None,
3704 };
3705 ctx.items.push(TopLevel::FnDef(repeat_like.clone()));
3706 ctx.fn_defs.push(repeat_like);
3707
3708 let issues = proof_mode_issues(&ctx);
3709 assert!(
3710 issues.is_empty(),
3711 "expected non-first Int countdown recursion to be accepted, got: {:?}",
3712 issues
3713 );
3714
3715 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3716 let lean = out
3717 .files
3718 .iter()
3719 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3720 .expect("expected generated Lean file");
3721 assert!(lean.contains("def repeatLike__fuel"));
3722 assert!(lean.contains("def repeatLike (char : String) (n : Int) : List String :="));
3723 assert!(lean.contains("repeatLike__fuel ((Int.natAbs n) + 1) char n"));
3724 }
3725
3726 #[test]
3727 fn proof_mode_accepts_negative_guarded_int_ascent() {
3728 let mut ctx = empty_ctx();
3729 let normalize = FnDef {
3730 name: "normalize".to_string(),
3731 line: 1,
3732 params: vec![("angle".to_string(), "Int".to_string())],
3733 return_type: "Int".to_string(),
3734 effects: vec![],
3735 desc: None,
3736 body: Rc::new(FnBody::from_expr(Expr::Match {
3737 subject: Box::new(Expr::BinOp(
3738 BinOp::Lt,
3739 Box::new(Expr::Ident("angle".to_string())),
3740 Box::new(Expr::Literal(Literal::Int(0))),
3741 )),
3742 arms: vec![
3743 MatchArm {
3744 pattern: Pattern::Literal(Literal::Bool(true)),
3745 body: Box::new(Expr::TailCall(Box::new((
3746 "normalize".to_string(),
3747 vec![Expr::BinOp(
3748 BinOp::Add,
3749 Box::new(Expr::Ident("angle".to_string())),
3750 Box::new(Expr::Literal(Literal::Int(360))),
3751 )],
3752 )))),
3753 },
3754 MatchArm {
3755 pattern: Pattern::Literal(Literal::Bool(false)),
3756 body: Box::new(Expr::Ident("angle".to_string())),
3757 },
3758 ],
3759 line: 1,
3760 })),
3761 resolution: None,
3762 };
3763 ctx.items.push(TopLevel::FnDef(normalize.clone()));
3764 ctx.fn_defs.push(normalize);
3765
3766 let issues = proof_mode_issues(&ctx);
3767 assert!(
3768 issues.is_empty(),
3769 "expected negative-guarded Int ascent recursion to be accepted, got: {:?}",
3770 issues
3771 );
3772
3773 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3774 let lean = out
3775 .files
3776 .iter()
3777 .find_map(|(name, content)| (name == "Verify_mode.lean").then_some(content))
3778 .expect("expected generated Lean file");
3779 assert!(lean.contains("def normalize__fuel"));
3780 assert!(lean.contains("normalize__fuel ((Int.natAbs angle) + 1) angle"));
3781 }
3782
3783 #[test]
3784 fn proof_mode_accepts_single_list_structural_recursion() {
3785 let mut ctx = empty_ctx();
3786 let len = FnDef {
3787 name: "len".to_string(),
3788 line: 1,
3789 params: vec![("xs".to_string(), "List<Int>".to_string())],
3790 return_type: "Int".to_string(),
3791 effects: vec![],
3792 desc: None,
3793 body: Rc::new(FnBody::from_expr(Expr::Match {
3794 subject: Box::new(Expr::Ident("xs".to_string())),
3795 arms: vec![
3796 MatchArm {
3797 pattern: Pattern::EmptyList,
3798 body: Box::new(Expr::Literal(Literal::Int(0))),
3799 },
3800 MatchArm {
3801 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3802 body: Box::new(Expr::TailCall(Box::new((
3803 "len".to_string(),
3804 vec![Expr::Ident("t".to_string())],
3805 )))),
3806 },
3807 ],
3808 line: 1,
3809 })),
3810 resolution: None,
3811 };
3812 ctx.items.push(TopLevel::FnDef(len.clone()));
3813 ctx.fn_defs.push(len);
3814
3815 let issues = proof_mode_issues(&ctx);
3816 assert!(
3817 issues.is_empty(),
3818 "expected List structural recursion to be accepted, got: {:?}",
3819 issues
3820 );
3821 }
3822
3823 #[test]
3824 fn proof_mode_accepts_single_list_structural_recursion_on_nonfirst_param() {
3825 let mut ctx = empty_ctx();
3826 let len_from = FnDef {
3827 name: "lenFrom".to_string(),
3828 line: 1,
3829 params: vec![
3830 ("count".to_string(), "Int".to_string()),
3831 ("xs".to_string(), "List<Int>".to_string()),
3832 ],
3833 return_type: "Int".to_string(),
3834 effects: vec![],
3835 desc: None,
3836 body: Rc::new(FnBody::from_expr(Expr::Match {
3837 subject: Box::new(Expr::Ident("xs".to_string())),
3838 arms: vec![
3839 MatchArm {
3840 pattern: Pattern::EmptyList,
3841 body: Box::new(Expr::Ident("count".to_string())),
3842 },
3843 MatchArm {
3844 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
3845 body: Box::new(Expr::TailCall(Box::new((
3846 "lenFrom".to_string(),
3847 vec![
3848 Expr::BinOp(
3849 BinOp::Add,
3850 Box::new(Expr::Ident("count".to_string())),
3851 Box::new(Expr::Literal(Literal::Int(1))),
3852 ),
3853 Expr::Ident("t".to_string()),
3854 ],
3855 )))),
3856 },
3857 ],
3858 line: 1,
3859 })),
3860 resolution: None,
3861 };
3862 ctx.items.push(TopLevel::FnDef(len_from.clone()));
3863 ctx.fn_defs.push(len_from);
3864
3865 let issues = proof_mode_issues(&ctx);
3866 assert!(
3867 issues.is_empty(),
3868 "expected non-first List structural recursion to be accepted, got: {:?}",
3869 issues
3870 );
3871
3872 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3873 let lean = generated_lean_file(&out);
3874 assert!(lean.contains("termination_by xs.length"));
3875 assert!(!lean.contains("partial def lenFrom"));
3876 }
3877
3878 #[test]
3879 fn proof_mode_accepts_single_string_pos_advance_recursion() {
3880 let mut ctx = empty_ctx();
3881 let skip_ws = FnDef {
3882 name: "skipWs".to_string(),
3883 line: 1,
3884 params: vec![
3885 ("s".to_string(), "String".to_string()),
3886 ("pos".to_string(), "Int".to_string()),
3887 ],
3888 return_type: "Int".to_string(),
3889 effects: vec![],
3890 desc: None,
3891 body: Rc::new(FnBody::from_expr(Expr::Match {
3892 subject: Box::new(Expr::FnCall(
3893 Box::new(Expr::Attr(
3894 Box::new(Expr::Ident("String".to_string())),
3895 "charAt".to_string(),
3896 )),
3897 vec![Expr::Ident("s".to_string()), Expr::Ident("pos".to_string())],
3898 )),
3899 arms: vec![
3900 MatchArm {
3901 pattern: Pattern::Constructor("Option.None".to_string(), vec![]),
3902 body: Box::new(Expr::Ident("pos".to_string())),
3903 },
3904 MatchArm {
3905 pattern: Pattern::Wildcard,
3906 body: Box::new(Expr::TailCall(Box::new((
3907 "skipWs".to_string(),
3908 vec![
3909 Expr::Ident("s".to_string()),
3910 Expr::BinOp(
3911 BinOp::Add,
3912 Box::new(Expr::Ident("pos".to_string())),
3913 Box::new(Expr::Literal(Literal::Int(1))),
3914 ),
3915 ],
3916 )))),
3917 },
3918 ],
3919 line: 1,
3920 })),
3921 resolution: None,
3922 };
3923 ctx.items.push(TopLevel::FnDef(skip_ws.clone()));
3924 ctx.fn_defs.push(skip_ws);
3925
3926 let issues = proof_mode_issues(&ctx);
3927 assert!(
3928 issues.is_empty(),
3929 "expected String+pos recursion to be accepted, got: {:?}",
3930 issues
3931 );
3932
3933 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
3934 let lean = generated_lean_file(&out);
3935 assert!(lean.contains("def skipWs__fuel"));
3936 assert!(!lean.contains("partial def skipWs"));
3937 }
3938
3939 #[test]
3940 fn proof_mode_accepts_mutual_int_countdown_recursion() {
3941 let mut ctx = empty_ctx();
3942 let even = FnDef {
3943 name: "even".to_string(),
3944 line: 1,
3945 params: vec![("n".to_string(), "Int".to_string())],
3946 return_type: "Bool".to_string(),
3947 effects: vec![],
3948 desc: None,
3949 body: Rc::new(FnBody::from_expr(Expr::Match {
3950 subject: Box::new(Expr::Ident("n".to_string())),
3951 arms: vec![
3952 MatchArm {
3953 pattern: Pattern::Literal(Literal::Int(0)),
3954 body: Box::new(Expr::Literal(Literal::Bool(true))),
3955 },
3956 MatchArm {
3957 pattern: Pattern::Wildcard,
3958 body: Box::new(Expr::TailCall(Box::new((
3959 "odd".to_string(),
3960 vec![Expr::BinOp(
3961 BinOp::Sub,
3962 Box::new(Expr::Ident("n".to_string())),
3963 Box::new(Expr::Literal(Literal::Int(1))),
3964 )],
3965 )))),
3966 },
3967 ],
3968 line: 1,
3969 })),
3970 resolution: None,
3971 };
3972 let odd = FnDef {
3973 name: "odd".to_string(),
3974 line: 2,
3975 params: vec![("n".to_string(), "Int".to_string())],
3976 return_type: "Bool".to_string(),
3977 effects: vec![],
3978 desc: None,
3979 body: Rc::new(FnBody::from_expr(Expr::Match {
3980 subject: Box::new(Expr::Ident("n".to_string())),
3981 arms: vec![
3982 MatchArm {
3983 pattern: Pattern::Literal(Literal::Int(0)),
3984 body: Box::new(Expr::Literal(Literal::Bool(false))),
3985 },
3986 MatchArm {
3987 pattern: Pattern::Wildcard,
3988 body: Box::new(Expr::TailCall(Box::new((
3989 "even".to_string(),
3990 vec![Expr::BinOp(
3991 BinOp::Sub,
3992 Box::new(Expr::Ident("n".to_string())),
3993 Box::new(Expr::Literal(Literal::Int(1))),
3994 )],
3995 )))),
3996 },
3997 ],
3998 line: 2,
3999 })),
4000 resolution: None,
4001 };
4002 ctx.items.push(TopLevel::FnDef(even.clone()));
4003 ctx.items.push(TopLevel::FnDef(odd.clone()));
4004 ctx.fn_defs.push(even);
4005 ctx.fn_defs.push(odd);
4006
4007 let issues = proof_mode_issues(&ctx);
4008 assert!(
4009 issues.is_empty(),
4010 "expected mutual Int countdown recursion to be accepted, got: {:?}",
4011 issues
4012 );
4013
4014 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4015 let lean = generated_lean_file(&out);
4016 assert!(lean.contains("def even__fuel"));
4017 assert!(lean.contains("def odd__fuel"));
4018 assert!(lean.contains("def even (n : Int) : Bool :="));
4019 assert!(lean.contains("even__fuel ((Int.natAbs n) + 1) n"));
4020 }
4021
4022 #[test]
4023 fn proof_mode_accepts_mutual_string_pos_recursion_with_ranked_same_edges() {
4024 let mut ctx = empty_ctx();
4025 let f = FnDef {
4026 name: "f".to_string(),
4027 line: 1,
4028 params: vec![
4029 ("s".to_string(), "String".to_string()),
4030 ("pos".to_string(), "Int".to_string()),
4031 ],
4032 return_type: "Int".to_string(),
4033 effects: vec![],
4034 desc: None,
4035 body: Rc::new(FnBody::from_expr(Expr::Match {
4036 subject: Box::new(Expr::BinOp(
4037 BinOp::Gte,
4038 Box::new(Expr::Ident("pos".to_string())),
4039 Box::new(Expr::Literal(Literal::Int(3))),
4040 )),
4041 arms: vec![
4042 MatchArm {
4043 pattern: Pattern::Literal(Literal::Bool(true)),
4044 body: Box::new(Expr::Ident("pos".to_string())),
4045 },
4046 MatchArm {
4047 pattern: Pattern::Wildcard,
4048 body: Box::new(Expr::TailCall(Box::new((
4049 "g".to_string(),
4050 vec![Expr::Ident("s".to_string()), Expr::Ident("pos".to_string())],
4051 )))),
4052 },
4053 ],
4054 line: 1,
4055 })),
4056 resolution: None,
4057 };
4058 let g = FnDef {
4059 name: "g".to_string(),
4060 line: 2,
4061 params: vec![
4062 ("s".to_string(), "String".to_string()),
4063 ("pos".to_string(), "Int".to_string()),
4064 ],
4065 return_type: "Int".to_string(),
4066 effects: vec![],
4067 desc: None,
4068 body: Rc::new(FnBody::from_expr(Expr::Match {
4069 subject: Box::new(Expr::BinOp(
4070 BinOp::Gte,
4071 Box::new(Expr::Ident("pos".to_string())),
4072 Box::new(Expr::Literal(Literal::Int(3))),
4073 )),
4074 arms: vec![
4075 MatchArm {
4076 pattern: Pattern::Literal(Literal::Bool(true)),
4077 body: Box::new(Expr::Ident("pos".to_string())),
4078 },
4079 MatchArm {
4080 pattern: Pattern::Wildcard,
4081 body: Box::new(Expr::TailCall(Box::new((
4082 "f".to_string(),
4083 vec![
4084 Expr::Ident("s".to_string()),
4085 Expr::BinOp(
4086 BinOp::Add,
4087 Box::new(Expr::Ident("pos".to_string())),
4088 Box::new(Expr::Literal(Literal::Int(1))),
4089 ),
4090 ],
4091 )))),
4092 },
4093 ],
4094 line: 2,
4095 })),
4096 resolution: None,
4097 };
4098 ctx.items.push(TopLevel::FnDef(f.clone()));
4099 ctx.items.push(TopLevel::FnDef(g.clone()));
4100 ctx.fn_defs.push(f);
4101 ctx.fn_defs.push(g);
4102
4103 let issues = proof_mode_issues(&ctx);
4104 assert!(
4105 issues.is_empty(),
4106 "expected mutual String+pos recursion to be accepted, got: {:?}",
4107 issues
4108 );
4109
4110 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4111 let lean = generated_lean_file(&out);
4112 assert!(lean.contains("def f__fuel"));
4113 assert!(lean.contains("def g__fuel"));
4114 assert!(!lean.contains("partial def f"));
4115 }
4116
4117 #[test]
4118 fn proof_mode_accepts_mutual_ranked_sizeof_recursion() {
4119 let mut ctx = empty_ctx();
4120 let f = FnDef {
4121 name: "f".to_string(),
4122 line: 1,
4123 params: vec![("xs".to_string(), "List<Int>".to_string())],
4124 return_type: "Int".to_string(),
4125 effects: vec![],
4126 desc: None,
4127 body: Rc::new(FnBody::from_expr(Expr::TailCall(Box::new((
4128 "g".to_string(),
4129 vec![
4130 Expr::Literal(Literal::Str("acc".to_string())),
4131 Expr::Ident("xs".to_string()),
4132 ],
4133 ))))),
4134 resolution: None,
4135 };
4136 let g = FnDef {
4137 name: "g".to_string(),
4138 line: 2,
4139 params: vec![
4140 ("acc".to_string(), "String".to_string()),
4141 ("xs".to_string(), "List<Int>".to_string()),
4142 ],
4143 return_type: "Int".to_string(),
4144 effects: vec![],
4145 desc: None,
4146 body: Rc::new(FnBody::from_expr(Expr::Match {
4147 subject: Box::new(Expr::Ident("xs".to_string())),
4148 arms: vec![
4149 MatchArm {
4150 pattern: Pattern::EmptyList,
4151 body: Box::new(Expr::Literal(Literal::Int(0))),
4152 },
4153 MatchArm {
4154 pattern: Pattern::Cons("h".to_string(), "t".to_string()),
4155 body: Box::new(Expr::TailCall(Box::new((
4156 "f".to_string(),
4157 vec![Expr::Ident("t".to_string())],
4158 )))),
4159 },
4160 ],
4161 line: 2,
4162 })),
4163 resolution: None,
4164 };
4165 ctx.items.push(TopLevel::FnDef(f.clone()));
4166 ctx.items.push(TopLevel::FnDef(g.clone()));
4167 ctx.fn_defs.push(f);
4168 ctx.fn_defs.push(g);
4169
4170 let issues = proof_mode_issues(&ctx);
4171 assert!(
4172 issues.is_empty(),
4173 "expected mutual ranked-sizeOf recursion to be accepted, got: {:?}",
4174 issues
4175 );
4176
4177 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4178 let lean = generated_lean_file(&out);
4179 assert!(lean.contains("mutual"));
4180 assert!(lean.contains("def f__fuel"));
4181 assert!(lean.contains("def g__fuel"));
4182 assert!(!lean.contains("partial def f"));
4183 assert!(!lean.contains("partial def g"));
4184 }
4185
4186 #[test]
4187 fn proof_mode_rejects_recursive_pure_functions() {
4188 let mut ctx = empty_ctx();
4189 let recursive_fn = FnDef {
4190 name: "loop".to_string(),
4191 line: 1,
4192 params: vec![("n".to_string(), "Int".to_string())],
4193 return_type: "Int".to_string(),
4194 effects: vec![],
4195 desc: None,
4196 body: Rc::new(FnBody::from_expr(Expr::FnCall(
4197 Box::new(Expr::Ident("loop".to_string())),
4198 vec![Expr::Ident("n".to_string())],
4199 ))),
4200 resolution: None,
4201 };
4202 ctx.items.push(TopLevel::FnDef(recursive_fn.clone()));
4203 ctx.fn_defs.push(recursive_fn);
4204
4205 let issues = proof_mode_issues(&ctx);
4206 assert!(
4207 issues.iter().any(|i| i.contains("outside proof subset")),
4208 "expected recursive function blocker, got: {:?}",
4209 issues
4210 );
4211 }
4212
4213 #[test]
4214 fn proof_mode_allows_recursive_types() {
4215 let mut ctx = empty_ctx();
4216 let recursive_type = TypeDef::Sum {
4217 name: "Node".to_string(),
4218 variants: vec![TypeVariant {
4219 name: "Cons".to_string(),
4220 fields: vec!["Node".to_string()],
4221 }],
4222 line: 1,
4223 };
4224 ctx.items.push(TopLevel::TypeDef(recursive_type.clone()));
4225 ctx.type_defs.push(recursive_type);
4226
4227 let issues = proof_mode_issues(&ctx);
4228 assert!(
4229 issues
4230 .iter()
4231 .all(|i| !i.contains("recursive types require unsafe DecidableEq shim")),
4232 "did not expect recursive type blocker, got: {:?}",
4233 issues
4234 );
4235 }
4236
4237 #[test]
4238 fn law_auto_example_exports_real_proof_artifacts() {
4239 let ctx = ctx_from_source(
4240 include_str!("../../../examples/formal/law_auto.av"),
4241 "law_auto",
4242 );
4243 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4244 let lean = generated_lean_file(&out);
4245
4246 assert!(lean.contains("theorem add_law_commutative :"));
4247 assert!(lean.contains("theorem id'_law_reflexive : ∀ (x : Int), x = x := by"));
4248 assert!(lean.contains("theorem incCount_law_keyPresent :"));
4249 assert!(lean.contains("AverMap.has_set_self"));
4250 assert!(lean.contains("theorem add_law_commutative_sample_1 :"));
4251 assert!(lean.contains(":= by native_decide"));
4252 }
4253
4254 #[test]
4255 fn json_example_stays_inside_proof_subset() {
4256 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
4257 let issues = proof_mode_issues(&ctx);
4258 assert!(
4259 issues.is_empty(),
4260 "expected json example to stay inside proof subset, got: {:?}",
4261 issues
4262 );
4263 }
4264
4265 #[test]
4266 fn json_example_uses_total_defs_and_domain_guarded_laws_in_proof_mode() {
4267 let ctx = ctx_from_source(include_str!("../../../examples/data/json.av"), "json");
4268 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4269 let lean = generated_lean_file(&out);
4270
4271 assert!(!lean.contains("partial def"));
4272 assert!(lean.contains("def skipWs__fuel"));
4273 assert!(lean.contains("def parseValue__fuel"));
4274 assert!(lean.contains("def toString (j : Json) : String :="));
4275 assert!(
4276 lean.contains(
4277 "def averMeasureJsonEntries_String (items : List (String × Json)) : Nat :="
4278 )
4279 );
4280 assert!(lean.contains(
4281 "| .jsonObject x0 => (averMeasureJsonEntries_String (AverMap.entries x0)) + 1"
4282 ));
4283 assert!(lean.contains("-- when jsonRoundtripSafe j"));
4284 assert!(!lean.contains("-- hint: verify law '"));
4285 assert!(!lean.contains("private theorem toString_law_parseRoundtrip_aux"));
4286 assert!(
4287 lean.contains(
4288 "theorem toString_law_parseRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
4289 )
4290 );
4291 assert!(
4292 lean.contains(
4293 "jsonRoundtripSafe j = true -> fromString (toString j) = Except.ok j := by"
4294 )
4295 );
4296 assert!(
4297 lean.contains("theorem finishFloat_law_fromCanonicalFloat : ∀ (f : Float), f = 3.5 ∨")
4298 );
4299 assert!(lean.contains("theorem finishInt_law_fromCanonicalInt_checked_domain :"));
4300 assert!(lean.contains(
4301 "theorem toString_law_parseValueRoundtrip : ∀ (j : Json), j = Json.jsonNull ∨"
4302 ));
4303 assert!(lean.contains("theorem toString_law_parseRoundtrip_sample_1 :"));
4304 assert!(lean.contains(
4305 "example : fromString \"null\" = Except.ok Json.jsonNull := by native_decide"
4306 ));
4307 }
4308
4309 #[test]
4310 fn transpile_injects_builtin_network_types_and_int_list_get_support() {
4311 let ctx = ctx_from_source(
4312 r#"
4313fn firstOrMissing(xs: List<String>) -> Result<String, String>
4314 Option.toResult(List.get(xs, -1), "missing")
4315
4316fn defaultHeader() -> Header
4317 Header(name = "Content-Type", value = "application/json")
4318
4319fn mkResponse(body: String) -> HttpResponse
4320 HttpResponse(status = 200, body = body, headers = [defaultHeader()])
4321
4322fn requestPath(req: HttpRequest) -> String
4323 req.path
4324
4325fn connPort(conn: Tcp.Connection) -> Int
4326 conn.port
4327"#,
4328 "network_helpers",
4329 );
4330 let out = transpile(&ctx);
4331 let lean = generated_lean_file(&out);
4332
4333 assert!(lean.contains("structure Header where"));
4334 assert!(lean.contains("structure HttpResponse where"));
4335 assert!(lean.contains("structure HttpRequest where"));
4336 assert!(lean.contains("structure Tcp_Connection where"));
4337 assert!(lean.contains("port : Int"));
4338 assert!(lean.contains("def get (xs : List α) (i : Int) : Option α :="));
4339 assert!(lean.contains("AverList.get xs (-1)"));
4340 }
4341
4342 #[test]
4343 fn law_auto_example_has_no_sorry_in_proof_mode() {
4344 let ctx = ctx_from_source(
4345 include_str!("../../../examples/formal/law_auto.av"),
4346 "law_auto",
4347 );
4348 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4349 let lean = generated_lean_file(&out);
4350 assert!(
4351 !lean.contains("sorry"),
4352 "expected law_auto proof export to avoid sorry, got:\n{}",
4353 lean
4354 );
4355 }
4356
4357 #[test]
4358 fn map_example_has_no_sorry_in_proof_mode() {
4359 let ctx = ctx_from_source(include_str!("../../../examples/data/map.av"), "map");
4360 let issues = proof_mode_issues(&ctx);
4361 assert!(
4362 issues.is_empty(),
4363 "expected map example to stay inside proof subset, got: {:?}",
4364 issues
4365 );
4366
4367 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4368 let lean = generated_lean_file(&out);
4369 assert!(
4370 !lean.contains("sorry"),
4371 "expected map proof export to avoid sorry, got:\n{}",
4372 lean
4373 );
4374 assert!(lean.contains("theorem incCount_law_trackedCountStepsByOne :"));
4375 assert!(lean.contains(
4376 "-- universal theorem countWords_law_presenceMatchesContains omitted: sampled law shape is not auto-proved yet"
4377 ));
4378 assert!(lean.contains(
4379 "-- universal theorem countWords_law_trackedWordCount omitted: sampled law shape is not auto-proved yet"
4380 ));
4381 assert!(lean.contains("theorem countWords_law_presenceMatchesContains_sample_1 :"));
4382 assert!(lean.contains("theorem countWords_law_trackedWordCount_sample_1 :"));
4383 assert!(lean.contains("AverMap.has_set_self"));
4384 assert!(lean.contains("AverMap.get_set_self"));
4385 }
4386
4387 #[test]
4388 fn spec_laws_example_has_no_sorry_in_proof_mode() {
4389 let ctx = ctx_from_source(
4390 include_str!("../../../examples/formal/spec_laws.av"),
4391 "spec_laws",
4392 );
4393 let issues = proof_mode_issues(&ctx);
4394 assert!(
4395 issues.is_empty(),
4396 "expected spec_laws example to stay inside proof subset, got: {:?}",
4397 issues
4398 );
4399
4400 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4401 let lean = generated_lean_file(&out);
4402 assert!(
4403 !lean.contains("sorry"),
4404 "expected spec_laws proof export to avoid sorry, got:\n{}",
4405 lean
4406 );
4407 assert!(lean.contains("theorem absVal_eq_absValSpec :"));
4408 assert!(lean.contains("theorem clampNonNegative_eq_clampNonNegativeSpec :"));
4409 }
4410
4411 #[test]
4412 fn rle_example_exports_sampled_roundtrip_laws_without_sorry() {
4413 let ctx = ctx_from_source(include_str!("../../../examples/data/rle.av"), "rle");
4414 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4415 let lean = generated_lean_file(&out);
4416
4417 assert!(
4418 !lean.contains("sorry"),
4419 "expected rle proof export to avoid sorry, got:\n{}",
4420 lean
4421 );
4422 assert!(lean.contains(
4423 "-- universal theorem encode_law_roundtrip omitted: sampled law shape is not auto-proved yet"
4424 ));
4425 assert!(lean.contains(
4426 "theorem encode_law_roundtrip_sample_1 : decode (encode []) = [] := by native_decide"
4427 ));
4428 assert!(lean.contains(
4429 "-- universal theorem encodeString_law_string_roundtrip omitted: sampled law shape is not auto-proved yet"
4430 ));
4431 assert!(lean.contains(
4432 "theorem encodeString_law_string_roundtrip_sample_1 : decodeString (encodeString \"\") = \"\" := by native_decide"
4433 ));
4434 }
4435
4436 #[test]
4437 fn fibonacci_example_uses_fuelized_int_countdown_in_proof_mode() {
4438 let ctx = ctx_from_source(
4439 include_str!("../../../examples/data/fibonacci.av"),
4440 "fibonacci",
4441 );
4442 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4443 let lean = generated_lean_file(&out);
4444
4445 assert!(lean.contains("def fibTR__fuel"));
4446 assert!(lean.contains("def fibTR (n : Int) (a : Int) (b : Int) : Int :="));
4447 assert!(lean.contains("fibTR__fuel ((Int.natAbs n) + 1) n a b"));
4448 assert!(!lean.contains("partial def fibTR"));
4449 }
4450
4451 #[test]
4452 fn fibonacci_example_stays_inside_proof_subset() {
4453 let ctx = ctx_from_source(
4454 include_str!("../../../examples/data/fibonacci.av"),
4455 "fibonacci",
4456 );
4457 let issues = proof_mode_issues(&ctx);
4458 assert!(
4459 issues.is_empty(),
4460 "expected fibonacci example to stay inside proof subset, got: {:?}",
4461 issues
4462 );
4463 }
4464
4465 #[test]
4466 fn fibonacci_example_matches_general_linear_recurrence_shapes() {
4467 let ctx = ctx_from_source(
4468 include_str!("../../../examples/data/fibonacci.av"),
4469 "fibonacci",
4470 );
4471 let fib = ctx.fn_defs.iter().find(|fd| fd.name == "fib").unwrap();
4472 let fib_tr = ctx.fn_defs.iter().find(|fd| fd.name == "fibTR").unwrap();
4473 let fib_spec = ctx.fn_defs.iter().find(|fd| fd.name == "fibSpec").unwrap();
4474
4475 assert!(recurrence::detect_tailrec_int_linear_pair_wrapper(fib).is_some());
4476 assert!(recurrence::detect_tailrec_int_linear_pair_worker(fib_tr).is_some());
4477 assert!(recurrence::detect_second_order_int_linear_recurrence(fib_spec).is_some());
4478 }
4479
4480 #[test]
4481 fn fibonacci_example_auto_proves_general_linear_recurrence_spec_law() {
4482 let ctx = ctx_from_source(
4483 include_str!("../../../examples/data/fibonacci.av"),
4484 "fibonacci",
4485 );
4486 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4487 let lean = generated_lean_file(&out);
4488
4489 assert!(lean.contains("private def fibSpec__nat : Nat -> Int"));
4490 assert!(!lean.contains("partial def fibSpec"));
4491 assert!(lean.contains("private theorem fib_eq_fibSpec__worker_nat_shift"));
4492 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_nat"));
4493 assert!(lean.contains("private theorem fib_eq_fibSpec__helper_seed"));
4494 assert!(lean.contains("theorem fib_eq_fibSpec : ∀ (n : Int), fib n = fibSpec n := by"));
4495 assert!(!lean.contains(
4496 "-- universal theorem fib_eq_fibSpec omitted: sampled law shape is not auto-proved yet"
4497 ));
4498 }
4499
4500 #[test]
4501 fn pell_like_example_auto_proves_same_general_shape() {
4502 let ctx = ctx_from_source(
4503 r#"
4504module Pell
4505 intent =
4506 "linear recurrence probe"
4507
4508fn pellTR(n: Int, a: Int, b: Int) -> Int
4509 match n
4510 0 -> a
4511 _ -> pellTR(n - 1, b, a + 2 * b)
4512
4513fn pell(n: Int) -> Int
4514 match n < 0
4515 true -> 0
4516 false -> pellTR(n, 0, 1)
4517
4518fn pellSpec(n: Int) -> Int
4519 match n < 0
4520 true -> 0
4521 false -> match n
4522 0 -> 0
4523 1 -> 1
4524 _ -> pellSpec(n - 2) + 2 * pellSpec(n - 1)
4525
4526verify pell law pellSpec
4527 given n: Int = [0, 1, 2, 3]
4528 pell(n) => pellSpec(n)
4529"#,
4530 "pell",
4531 );
4532 let issues = proof_mode_issues(&ctx);
4533 assert!(
4534 issues.is_empty(),
4535 "expected pell example to stay inside proof subset, got: {:?}",
4536 issues
4537 );
4538
4539 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4540 let lean = generated_lean_file(&out);
4541 assert!(lean.contains("private def pellSpec__nat : Nat -> Int"));
4542 assert!(lean.contains("private theorem pell_eq_pellSpec__worker_nat_shift"));
4543 assert!(lean.contains("theorem pell_eq_pellSpec : ∀ (n : Int), pell n = pellSpec n := by"));
4544 assert!(!lean.contains(
4545 "-- universal theorem pell_eq_pellSpec omitted: sampled law shape is not auto-proved yet"
4546 ));
4547 }
4548
4549 #[test]
4550 fn nonlinear_pair_state_recurrence_is_not_auto_proved_as_linear_shape() {
4551 let ctx = ctx_from_source(
4552 r#"
4553module WeirdRec
4554 intent =
4555 "reject nonlinear pair-state recurrence from linear recurrence prover"
4556
4557fn weirdTR(n: Int, a: Int, b: Int) -> Int
4558 match n
4559 0 -> a
4560 _ -> weirdTR(n - 1, b, a * b)
4561
4562fn weird(n: Int) -> Int
4563 match n < 0
4564 true -> 0
4565 false -> weirdTR(n, 0, 1)
4566
4567fn weirdSpec(n: Int) -> Int
4568 match n < 0
4569 true -> 0
4570 false -> match n
4571 0 -> 0
4572 1 -> 1
4573 _ -> weirdSpec(n - 1) * weirdSpec(n - 2)
4574
4575verify weird law weirdSpec
4576 given n: Int = [0, 1, 2, 3]
4577 weird(n) => weirdSpec(n)
4578"#,
4579 "weirdrec",
4580 );
4581 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4582 let lean = generated_lean_file(&out);
4583
4584 assert!(lean.contains(
4585 "-- universal theorem weird_eq_weirdSpec omitted: sampled law shape is not auto-proved yet"
4586 ));
4587 assert!(!lean.contains("private theorem weird_eq_weirdSpec__worker_nat_shift"));
4588 assert!(lean.contains("theorem weird_eq_weirdSpec_sample_1 :"));
4589 }
4590
4591 #[test]
4592 fn date_example_stays_inside_proof_subset() {
4593 let ctx = ctx_from_source(include_str!("../../../examples/data/date.av"), "date");
4594 let issues = proof_mode_issues(&ctx);
4595 assert!(
4596 issues.is_empty(),
4597 "expected date example to stay inside proof subset, got: {:?}",
4598 issues
4599 );
4600
4601 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4602 let lean = generated_lean_file(&out);
4603 assert!(!lean.contains("partial def"));
4604 assert!(lean.contains("def parseIntSlice (s : String) (from' : Int) (to : Int) : Int :="));
4605 }
4606
4607 #[test]
4608 fn temperature_example_stays_inside_proof_subset() {
4609 let ctx = ctx_from_source(
4610 include_str!("../../../examples/core/temperature.av"),
4611 "temperature",
4612 );
4613 let issues = proof_mode_issues(&ctx);
4614 assert!(
4615 issues.is_empty(),
4616 "expected temperature example to stay inside proof subset, got: {:?}",
4617 issues
4618 );
4619
4620 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4621 let lean = generated_lean_file(&out);
4622 assert!(!lean.contains("partial def"));
4623 assert!(
4624 lean.contains("example : celsiusToFahr 0.0 = 32.0 := by native_decide"),
4625 "expected verify examples to survive proof export, got:\n{}",
4626 lean
4627 );
4628 }
4629
4630 #[test]
4631 fn quicksort_example_stays_inside_proof_subset() {
4632 let ctx = ctx_from_source(
4633 include_str!("../../../examples/data/quicksort.av"),
4634 "quicksort",
4635 );
4636 let issues = proof_mode_issues(&ctx);
4637 assert!(
4638 issues.is_empty(),
4639 "expected quicksort example to stay inside proof subset, got: {:?}",
4640 issues
4641 );
4642
4643 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4644 let lean = generated_lean_file(&out);
4645 assert!(lean.contains("def isOrderedFrom"));
4646 assert!(!lean.contains("partial def isOrderedFrom"));
4647 assert!(lean.contains("termination_by xs.length"));
4648 }
4649
4650 #[test]
4651 fn grok_s_language_example_uses_total_ranked_sizeof_mutual_recursion() {
4652 let ctx = ctx_from_source(
4653 include_str!("../../../examples/core/grok_s_language.av"),
4654 "grok_s_language",
4655 );
4656 let issues = proof_mode_issues(&ctx);
4657 assert!(
4658 issues.is_empty(),
4659 "expected grok_s_language example to stay inside proof subset, got: {:?}",
4660 issues
4661 );
4662
4663 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4664 let lean = generated_lean_file(&out);
4665 assert!(lean.contains("mutual"));
4666 assert!(lean.contains("def eval__fuel"));
4667 assert!(lean.contains("def parseListItems__fuel"));
4668 assert!(!lean.contains("partial def eval"));
4669 assert!(!lean.contains("termination_by (sizeOf e,"));
4670 assert!(lean.contains("-- when validSymbolNames e"));
4671 assert!(!lean.contains("private theorem toString_law_parseRoundtrip_aux"));
4672 assert!(lean.contains(
4673 "theorem toString_law_parseRoundtrip : ∀ (e : Sexpr), e = Sexpr.atomNum 42 ∨"
4674 ));
4675 assert!(
4676 lean.contains("validSymbolNames e = true -> parse (toString e) = Except.ok e := by")
4677 );
4678 assert!(lean.contains("theorem toString_law_parseSexprRoundtrip :"));
4679 assert!(lean.contains("theorem toString_law_parseRoundtrip_sample_1 :"));
4680 }
4681
4682 #[test]
4683 fn lambda_example_keeps_only_eval_outside_proof_subset() {
4684 let ctx = ctx_from_source(include_str!("../../../examples/core/lambda.av"), "lambda");
4685 let issues = proof_mode_issues(&ctx);
4686 assert_eq!(
4687 issues,
4688 vec!["recursive function 'eval' is outside proof subset (currently supported: Int countdown, second-order affine Int recurrences with pair-state worker, structural recursion on List/recursive ADTs, String+position, mutual Int countdown, mutual String+position, and ranked sizeOf recursion)".to_string()]
4689 );
4690
4691 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4692 let lean = generated_lean_file(&out);
4693 assert!(lean.contains("def termToString__fuel"));
4694 assert!(lean.contains("def subst__fuel"));
4695 assert!(lean.contains("def countS__fuel"));
4696 assert!(!lean.contains("partial def termToString"));
4697 assert!(!lean.contains("partial def subst"));
4698 assert!(!lean.contains("partial def countS"));
4699 assert!(lean.contains("partial def eval"));
4700 }
4701
4702 #[test]
4703 fn mission_control_example_stays_inside_proof_subset() {
4704 let ctx = ctx_from_source(
4705 include_str!("../../../examples/apps/mission_control.av"),
4706 "mission_control",
4707 );
4708 let issues = proof_mode_issues(&ctx);
4709 assert!(
4710 issues.is_empty(),
4711 "expected mission_control example to stay inside proof subset, got: {:?}",
4712 issues
4713 );
4714
4715 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4716 let lean = generated_lean_file(&out);
4717 assert!(!lean.contains("partial def normalizeAngle"));
4718 assert!(lean.contains("def normalizeAngle__fuel"));
4719 }
4720
4721 #[test]
4722 fn notepad_store_example_stays_inside_proof_subset() {
4723 let ctx = ctx_from_source(
4724 include_str!("../../../examples/apps/notepad/store.av"),
4725 "notepad_store",
4726 );
4727 let issues = proof_mode_issues(&ctx);
4728 assert!(
4729 issues.is_empty(),
4730 "expected notepad/store example to stay inside proof subset, got: {:?}",
4731 issues
4732 );
4733
4734 let out = transpile_for_proof_mode(&ctx, VerifyEmitMode::NativeDecide);
4735 let lean = generated_lean_file(&out);
4736 assert!(lean.contains("def deserializeLine (line : String) : Except String Note :=\n do"));
4737 assert!(lean.contains("Except String (List Note)"));
4738 assert!(!lean.contains("partial def deserializeLine"));
4739 assert!(lean.contains("-- when noteRoundtripSafe note"));
4740 assert!(lean.contains("-- when notesRoundtripSafe notes"));
4741 assert!(lean.contains(
4742 "theorem serializeLine_law_lineRoundtrip : ∀ (note : Note), note = { id' := 1, title := \"Hello\", body := \"World\" : Note } ∨"
4743 ));
4744 assert!(lean.contains(
4745 "theorem serializeLines_law_notesRoundtrip : ∀ (notes : List Note), notes = [] ∨"
4746 ));
4747 assert!(lean.contains("notesRoundtripSafe notes = true ->"));
4748 assert!(lean.contains("parseNotes (s!\"{String.intercalate \"\\n\" (serializeLines notes)}\\n\") = Except.ok notes"));
4749 assert!(lean.contains("theorem serializeLine_law_lineRoundtrip_sample_1 :"));
4750 assert!(lean.contains("theorem serializeLines_law_notesRoundtrip_sample_1 :"));
4751 }
4752}