1use serde::{Deserialize, Serialize};
31use std::collections::HashSet;
32
33use logicaffeine_base::{Arena, Interner};
34use logicaffeine_language::{
35 ast::{self, LogicExpr, Term},
36 analysis::DiscoveryPass,
37 arena_ctx::AstContext,
38 compile::{compile_forest, compile_forest_with_options},
39 drs,
40 error::socratic_explanation,
41 lexer::Lexer,
42 mwe,
43 parser::Parser,
44 pragmatics,
45 registry::SymbolRegistry,
46 semantics,
47 token::TokenType,
48 CompileOptions, OutputFormat, ParseError,
49};
50use logicaffeine_proof::{BackwardChainer, DerivationTree, ProofExpr};
51
52pub use crate::interpreter::InterpreterResult;
54
55#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
61pub enum TokenCategory {
62 Quantifier,
64 Noun,
66 Verb,
68 Adjective,
70 Connective,
72 Determiner,
74 Preposition,
76 Pronoun,
78 Modal,
80 Punctuation,
82 Proper,
84 Other,
86}
87
88#[derive(Debug, Clone, Serialize, Deserialize)]
90pub struct TokenInfo {
91 pub start: usize,
93 pub end: usize,
95 pub text: String,
97 pub category: TokenCategory,
99}
100
101fn categorize_token(kind: &TokenType, _interner: &Interner) -> TokenCategory {
102 match kind {
103 TokenType::All | TokenType::Some | TokenType::No | TokenType::Any
104 | TokenType::Most | TokenType::Few | TokenType::Many
105 | TokenType::Cardinal(_) | TokenType::AtLeast(_) | TokenType::AtMost(_) => TokenCategory::Quantifier,
106 TokenType::Noun(_) => TokenCategory::Noun,
107 TokenType::Verb { .. } => TokenCategory::Verb,
108 TokenType::Adjective(_) | TokenType::NonIntersectiveAdjective(_) => TokenCategory::Adjective,
109 TokenType::And | TokenType::Or | TokenType::Not | TokenType::If | TokenType::Then
110 | TokenType::Iff | TokenType::Because => TokenCategory::Connective,
111 TokenType::Article(_) => TokenCategory::Determiner,
112 TokenType::Preposition(_) => TokenCategory::Preposition,
113 TokenType::Pronoun { .. } => TokenCategory::Pronoun,
114 TokenType::Must | TokenType::Can | TokenType::Should | TokenType::Shall
115 | TokenType::Would | TokenType::Could | TokenType::May | TokenType::Cannot
116 | TokenType::Might => TokenCategory::Modal,
117 TokenType::Period | TokenType::Comma => TokenCategory::Punctuation,
118 TokenType::ProperName(_) => TokenCategory::Proper,
119 _ => TokenCategory::Other,
120 }
121}
122
123pub fn tokenize_for_ui(input: &str) -> Vec<TokenInfo> {
128 let mut interner = Interner::new();
129 let mut lexer = Lexer::new(input, &mut interner);
130 let tokens = lexer.tokenize();
131
132 tokens.iter().map(|t| TokenInfo {
133 start: t.span.start,
134 end: t.span.end,
135 text: input[t.span.start..t.span.end].to_string(),
136 category: categorize_token(&t.kind, &interner),
137 }).collect()
138}
139
140#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
146pub struct AstNode {
147 pub label: String,
149 pub node_type: String,
151 pub children: Vec<AstNode>,
153}
154
155impl AstNode {
156 pub fn leaf(label: &str, node_type: &str) -> Self {
157 AstNode {
158 label: label.to_string(),
159 node_type: node_type.to_string(),
160 children: Vec::new(),
161 }
162 }
163
164 pub fn with_children(label: &str, node_type: &str, children: Vec<AstNode>) -> Self {
165 AstNode {
166 label: label.to_string(),
167 node_type: node_type.to_string(),
168 children,
169 }
170 }
171}
172
173pub fn expr_to_ast_node(expr: &LogicExpr, interner: &Interner) -> AstNode {
179 match expr {
180 LogicExpr::Predicate { name, args, .. } => {
181 let name_str = interner.resolve(*name);
182 let arg_nodes: Vec<AstNode> = args.iter()
183 .map(|t| term_to_ast_node(t, interner))
184 .collect();
185 AstNode::with_children(
186 &format!("{}({})", name_str, args.len()),
187 "predicate",
188 arg_nodes,
189 )
190 }
191 LogicExpr::Quantifier { kind, variable, body, .. } => {
192 let var_str = interner.resolve(*variable);
193 let symbol = match kind {
194 ast::QuantifierKind::Universal => "∀",
195 ast::QuantifierKind::Existential => "∃",
196 ast::QuantifierKind::Most => "MOST",
197 ast::QuantifierKind::Few => "FEW",
198 ast::QuantifierKind::Many => "MANY",
199 ast::QuantifierKind::Cardinal(n) => return AstNode::with_children(
200 &format!("∃={}{}", n, var_str),
201 "quantifier",
202 vec![expr_to_ast_node(body, interner)],
203 ),
204 ast::QuantifierKind::AtLeast(n) => return AstNode::with_children(
205 &format!("∃≥{}{}", n, var_str),
206 "quantifier",
207 vec![expr_to_ast_node(body, interner)],
208 ),
209 ast::QuantifierKind::AtMost(n) => return AstNode::with_children(
210 &format!("∃≤{}{}", n, var_str),
211 "quantifier",
212 vec![expr_to_ast_node(body, interner)],
213 ),
214 ast::QuantifierKind::Generic => "GEN",
215 };
216 AstNode::with_children(
217 &format!("{}{}", symbol, var_str),
218 "quantifier",
219 vec![expr_to_ast_node(body, interner)],
220 )
221 }
222 LogicExpr::BinaryOp { left, op, right } => {
223 let op_str = match op {
224 TokenType::And => "∧",
225 TokenType::Or => "∨",
226 TokenType::If | TokenType::Then => "→",
227 TokenType::Iff => "↔",
228 _ => "?",
229 };
230 AstNode::with_children(
231 op_str,
232 "binary_op",
233 vec![
234 expr_to_ast_node(left, interner),
235 expr_to_ast_node(right, interner),
236 ],
237 )
238 }
239 LogicExpr::UnaryOp { op, operand } => {
240 let op_str = match op {
241 TokenType::Not => "¬",
242 _ => "?",
243 };
244 AstNode::with_children(
245 op_str,
246 "unary_op",
247 vec![expr_to_ast_node(operand, interner)],
248 )
249 }
250 LogicExpr::Identity { left, right } => {
251 AstNode::with_children(
252 "=",
253 "identity",
254 vec![
255 term_to_ast_node(left, interner),
256 term_to_ast_node(right, interner),
257 ],
258 )
259 }
260 LogicExpr::Modal { vector, operand } => {
261 AstNode::with_children(
262 &format!("□{:?}", vector.domain),
263 "modal",
264 vec![expr_to_ast_node(operand, interner)],
265 )
266 }
267 LogicExpr::Lambda { variable, body } => {
268 let var_str = interner.resolve(*variable);
269 AstNode::with_children(
270 &format!("λ{}", var_str),
271 "lambda",
272 vec![expr_to_ast_node(body, interner)],
273 )
274 }
275 _ => AstNode::leaf(&format!("{:?}", expr), "other"),
276 }
277}
278
279fn term_to_ast_node(term: &Term, interner: &Interner) -> AstNode {
280 match term {
281 Term::Constant(sym) => AstNode::leaf(interner.resolve(*sym), "constant"),
282 Term::Variable(sym) => AstNode::leaf(interner.resolve(*sym), "variable"),
283 Term::Function(name, args) => {
284 let name_str = interner.resolve(*name);
285 let arg_nodes: Vec<AstNode> = args.iter()
286 .map(|t| term_to_ast_node(t, interner))
287 .collect();
288 AstNode::with_children(&format!("{}()", name_str), "function", arg_nodes)
289 }
290 Term::Group(terms) => {
291 let term_nodes: Vec<AstNode> = terms.iter()
292 .map(|t| term_to_ast_node(t, interner))
293 .collect();
294 AstNode::with_children("⊕", "group", term_nodes)
295 }
296 _ => AstNode::leaf(&format!("{:?}", term), "term"),
297 }
298}
299
300#[derive(Debug, Clone, Serialize, Deserialize)]
305pub struct CompileResult {
307 pub logic: Option<String>,
309 pub simple_logic: Option<String>,
311 pub kripke_logic: Option<String>,
313 pub ast: Option<AstNode>,
315 pub readings: Vec<String>,
317 pub simple_readings: Vec<String>,
319 pub kripke_readings: Vec<String>,
321 pub tokens: Vec<TokenInfo>,
323 pub error: Option<String>,
325}
326
327pub fn compile_for_ui(input: &str) -> CompileResult {
329 let tokens = tokenize_for_ui(input);
330 let readings = compile_forest(input);
331
332 let simple_readings: Vec<String> = {
334 let raw = compile_forest_with_options(input, CompileOptions { format: OutputFormat::SimpleFOL });
335 let mut seen = HashSet::new();
336 raw.into_iter().filter(|r| seen.insert(r.clone())).collect()
337 };
338
339 let kripke_readings = compile_forest_with_options(input, CompileOptions { format: OutputFormat::Kripke });
341
342 let mut interner = Interner::new();
343 let mut lexer = Lexer::new(input, &mut interner);
344 let lex_tokens = lexer.tokenize();
345
346 let mwe_trie = mwe::build_mwe_trie();
347 let lex_tokens = mwe::apply_mwe_pipeline(lex_tokens, &mwe_trie, &mut interner);
348
349 let type_registry = {
351 let mut discovery = DiscoveryPass::new(&lex_tokens, &mut interner);
352 discovery.run()
353 };
354
355 let expr_arena = Arena::new();
356 let term_arena = Arena::new();
357 let np_arena = Arena::new();
358 let sym_arena = Arena::new();
359 let role_arena = Arena::new();
360 let pp_arena = Arena::new();
361
362 let ctx = AstContext::new(
363 &expr_arena,
364 &term_arena,
365 &np_arena,
366 &sym_arena,
367 &role_arena,
368 &pp_arena,
369 );
370
371 let mut world_state = drs::WorldState::new();
373 let mut parser = Parser::new(lex_tokens, &mut world_state, &mut interner, ctx, type_registry);
374
375 match parser.parse() {
376 Ok(ast) => {
377 let ast = semantics::apply_axioms(ast, ctx.exprs, ctx.terms, &mut interner);
378 let ast = pragmatics::apply_pragmatics(ast, ctx.exprs, &interner);
379 let ast_node = expr_to_ast_node(ast, &interner);
380 let mut registry = SymbolRegistry::new();
381 let logic = ast.transpile_discourse(&mut registry, &interner, OutputFormat::Unicode);
382 let simple_logic = ast.transpile_discourse(&mut registry, &interner, OutputFormat::SimpleFOL);
383
384 let kripke_ast = semantics::apply_kripke_lowering(ast, ctx.exprs, ctx.terms, &mut interner);
385 let kripke_logic = kripke_ast.transpile_discourse(&mut registry, &interner, OutputFormat::Kripke);
386
387 CompileResult {
388 logic: Some(logic),
389 simple_logic: Some(simple_logic),
390 kripke_logic: Some(kripke_logic),
391 ast: Some(ast_node),
392 readings,
393 simple_readings,
394 kripke_readings,
395 tokens,
396 error: None,
397 }
398 }
399 Err(e) => {
400 let advice = socratic_explanation(&e, &interner);
401 CompileResult {
402 logic: None,
403 simple_logic: None,
404 kripke_logic: None,
405 ast: None,
406 readings: Vec::new(),
407 simple_readings: Vec::new(),
408 kripke_readings: Vec::new(),
409 tokens,
410 error: Some(advice),
411 }
412 }
413 }
414}
415
416#[derive(Debug, Clone)]
426pub struct ProofCompileResult {
427 pub proof_expr: Option<ProofExpr>,
429 pub logic_string: Option<String>,
431 pub error: Option<String>,
433}
434
435pub fn compile_for_proof(input: &str) -> ProofCompileResult {
437 use logicaffeine_language::proof_convert::logic_expr_to_proof_expr;
438
439 let mut interner = Interner::new();
440 let mut lexer = Lexer::new(input, &mut interner);
441 let lex_tokens = lexer.tokenize();
442
443 let mwe_trie = mwe::build_mwe_trie();
444 let lex_tokens = mwe::apply_mwe_pipeline(lex_tokens, &mwe_trie, &mut interner);
445
446 let type_registry = {
447 let mut discovery = DiscoveryPass::new(&lex_tokens, &mut interner);
448 discovery.run()
449 };
450
451 let expr_arena = Arena::new();
452 let term_arena = Arena::new();
453 let np_arena = Arena::new();
454 let sym_arena = Arena::new();
455 let role_arena = Arena::new();
456 let pp_arena = Arena::new();
457
458 let ctx = AstContext::new(
459 &expr_arena,
460 &term_arena,
461 &np_arena,
462 &sym_arena,
463 &role_arena,
464 &pp_arena,
465 );
466
467 let mut world_state = drs::WorldState::new();
468 let mut parser = Parser::new(lex_tokens, &mut world_state, &mut interner, ctx, type_registry);
469
470 match parser.parse() {
471 Ok(ast) => {
472 let ast = semantics::apply_axioms(ast, ctx.exprs, ctx.terms, &mut interner);
473 let ast = pragmatics::apply_pragmatics(ast, ctx.exprs, &interner);
474
475 let mut registry = SymbolRegistry::new();
476 let logic_string = ast.transpile(&mut registry, &interner, OutputFormat::SimpleFOL);
477 let proof_expr = logic_expr_to_proof_expr(ast, &interner);
478
479 ProofCompileResult {
480 proof_expr: Some(proof_expr),
481 logic_string: Some(logic_string),
482 error: None,
483 }
484 }
485 Err(e) => {
486 let advice = socratic_explanation(&e, &interner);
487 ProofCompileResult {
488 proof_expr: None,
489 logic_string: None,
490 error: Some(advice),
491 }
492 }
493 }
494}
495
496#[derive(Debug, Clone)]
501pub struct TheoremCompileResult {
502 pub name: String,
504 pub premises: Vec<ProofExpr>,
506 pub goal: Option<ProofExpr>,
508 pub goal_string: Option<String>,
510 pub derivation: Option<DerivationTree>,
512 pub error: Option<String>,
514}
515
516pub fn compile_theorem_for_ui(input: &str) -> TheoremCompileResult {
518 use logicaffeine_language::proof_convert::logic_expr_to_proof_expr;
519
520 let mut interner = Interner::new();
521 let mut lexer = Lexer::new(input, &mut interner);
522 let tokens = lexer.tokenize();
523
524 let mwe_trie = mwe::build_mwe_trie();
525 let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
526
527 let type_registry = {
528 let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
529 discovery.run()
530 };
531
532 let expr_arena = Arena::new();
533 let term_arena = Arena::new();
534 let np_arena = Arena::new();
535 let sym_arena = Arena::new();
536 let role_arena = Arena::new();
537 let pp_arena = Arena::new();
538
539 let ctx = AstContext::new(
540 &expr_arena,
541 &term_arena,
542 &np_arena,
543 &sym_arena,
544 &role_arena,
545 &pp_arena,
546 );
547
548 let mut world_state = drs::WorldState::new();
549 let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
550
551 let statements = match parser.parse_program() {
552 Ok(stmts) => stmts,
553 Err(e) => {
554 return TheoremCompileResult {
555 name: String::new(),
556 premises: Vec::new(),
557 goal: None,
558 goal_string: None,
559 derivation: None,
560 error: Some(format!("Parse error: {:?}", e)),
561 };
562 }
563 };
564
565 let theorem = match statements.iter().find_map(|stmt| {
566 if let ast::Stmt::Theorem(t) = stmt {
567 Some(t)
568 } else {
569 None
570 }
571 }) {
572 Some(t) => t,
573 None => {
574 return TheoremCompileResult {
575 name: String::new(),
576 premises: Vec::new(),
577 goal: None,
578 goal_string: None,
579 derivation: None,
580 error: Some("No theorem block found".to_string()),
581 };
582 }
583 };
584
585 let premises: Vec<ProofExpr> = theorem
586 .premises
587 .iter()
588 .map(|p| logic_expr_to_proof_expr(p, &interner))
589 .collect();
590
591 let goal = logic_expr_to_proof_expr(theorem.goal, &interner);
592
593 let mut registry = SymbolRegistry::new();
594 let goal_string = theorem.goal.transpile(&mut registry, &interner, OutputFormat::SimpleFOL);
595
596 let derivation = if matches!(theorem.strategy, ast::theorem::ProofStrategy::Auto) {
597 let mut engine = BackwardChainer::new();
598 for premise in &premises {
599 engine.add_axiom(premise.clone());
600 }
601 engine.prove(goal.clone()).ok()
602 } else {
603 None
604 };
605
606 TheoremCompileResult {
607 name: theorem.name.clone(),
608 premises,
609 goal: Some(goal),
610 goal_string: Some(goal_string),
611 derivation,
612 error: None,
613 }
614}
615
616#[cfg(feature = "codegen")]
622pub fn generate_rust_code(source: &str) -> Result<String, ParseError> {
623 use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
624
625 let mut interner = Interner::new();
626 let mut lexer = Lexer::new(source, &mut interner);
627 let tokens = lexer.tokenize();
628
629 let mwe_trie = mwe::build_mwe_trie();
630 let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
631
632 let (type_registry, policy_registry) = {
633 let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
634 let result = discovery.run_full();
635 (result.types, result.policies)
636 };
637 let codegen_registry = type_registry.clone();
638 let codegen_policies = policy_registry.clone();
639
640 let mut world_state = drs::WorldState::new();
641 let expr_arena = Arena::new();
642 let term_arena = Arena::new();
643 let np_arena = Arena::new();
644 let sym_arena = Arena::new();
645 let role_arena = Arena::new();
646 let pp_arena = Arena::new();
647 let stmt_arena: Arena<Stmt> = Arena::new();
648 let imperative_expr_arena: Arena<Expr> = Arena::new();
649 let type_expr_arena: Arena<TypeExpr> = Arena::new();
650
651 let ast_ctx = AstContext::with_types(
652 &expr_arena,
653 &term_arena,
654 &np_arena,
655 &sym_arena,
656 &role_arena,
657 &pp_arena,
658 &stmt_arena,
659 &imperative_expr_arena,
660 &type_expr_arena,
661 );
662
663 let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ast_ctx, type_registry);
664 let stmts = parser.parse_program()?;
665
666 let rust_code = crate::codegen::codegen_program(&stmts, &codegen_registry, &codegen_policies, &interner);
667 Ok(rust_code)
668}
669
670pub async fn interpret_for_ui(input: &str) -> InterpreterResult {
676 use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
677
678 let mut interner = Interner::new();
679 let mut lexer = Lexer::new(input, &mut interner);
680 let tokens = lexer.tokenize();
681
682 let mwe_trie = mwe::build_mwe_trie();
683 let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
684
685 let (type_registry, policy_registry) = {
686 let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
687 let result = discovery.run_full();
688 (result.types, result.policies)
689 };
690
691 let expr_arena = Arena::new();
692 let term_arena = Arena::new();
693 let np_arena = Arena::new();
694 let sym_arena = Arena::new();
695 let role_arena = Arena::new();
696 let pp_arena = Arena::new();
697 let stmt_arena: Arena<Stmt> = Arena::new();
698 let imperative_expr_arena: Arena<Expr> = Arena::new();
699 let type_expr_arena: Arena<TypeExpr> = Arena::new();
700
701 let ctx = AstContext::with_types(
702 &expr_arena,
703 &term_arena,
704 &np_arena,
705 &sym_arena,
706 &role_arena,
707 &pp_arena,
708 &stmt_arena,
709 &imperative_expr_arena,
710 &type_expr_arena,
711 );
712
713 let mut world_state = drs::WorldState::new();
714 let type_registry_for_interp = type_registry.clone();
715 let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
716
717 match parser.parse_program() {
718 Ok(stmts) => {
719 let mut interp = crate::interpreter::Interpreter::new(&interner)
720 .with_type_registry(&type_registry_for_interp)
721 .with_policies(policy_registry);
722 match interp.run(&stmts).await {
723 Ok(()) => InterpreterResult {
724 lines: interp.output,
725 error: None,
726 },
727 Err(e) => InterpreterResult {
728 lines: interp.output,
729 error: Some(e),
730 },
731 }
732 }
733 Err(e) => {
734 let advice = socratic_explanation(&e, &interner);
735 InterpreterResult {
736 lines: vec![],
737 error: Some(advice),
738 }
739 }
740 }
741}
742
743pub fn interpret_for_ui_sync(input: &str) -> InterpreterResult {
748 use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
749
750 let mut interner = Interner::new();
751 let mut lexer = Lexer::new(input, &mut interner);
752 let tokens = lexer.tokenize();
753
754 let mwe_trie = mwe::build_mwe_trie();
755 let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
756
757 let (type_registry, policy_registry) = {
758 let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
759 let result = discovery.run_full();
760 (result.types, result.policies)
761 };
762
763 let expr_arena = Arena::new();
764 let term_arena = Arena::new();
765 let np_arena = Arena::new();
766 let sym_arena = Arena::new();
767 let role_arena = Arena::new();
768 let pp_arena = Arena::new();
769 let stmt_arena: Arena<Stmt> = Arena::new();
770 let imperative_expr_arena: Arena<Expr> = Arena::new();
771 let type_expr_arena: Arena<TypeExpr> = Arena::new();
772
773 let ctx = AstContext::with_types(
774 &expr_arena,
775 &term_arena,
776 &np_arena,
777 &sym_arena,
778 &role_arena,
779 &pp_arena,
780 &stmt_arena,
781 &imperative_expr_arena,
782 &type_expr_arena,
783 );
784
785 let mut world_state = drs::WorldState::new();
786 let type_registry_for_interp = type_registry.clone();
787 let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
788
789 match parser.parse_program() {
790 Ok(stmts) => {
791 let mut interp = crate::interpreter::Interpreter::new(&interner)
792 .with_type_registry(&type_registry_for_interp)
793 .with_policies(policy_registry);
794
795 if crate::interpreter::needs_async(&stmts) {
796 use futures::executor::block_on;
798 match block_on(interp.run(&stmts)) {
799 Ok(()) => InterpreterResult {
800 lines: interp.output,
801 error: None,
802 },
803 Err(e) => InterpreterResult {
804 lines: interp.output,
805 error: Some(e),
806 },
807 }
808 } else {
809 match interp.run_sync(&stmts) {
811 Ok(()) => InterpreterResult {
812 lines: interp.output,
813 error: None,
814 },
815 Err(e) => InterpreterResult {
816 lines: interp.output,
817 error: Some(e),
818 },
819 }
820 }
821 }
822 Err(e) => {
823 let advice = socratic_explanation(&e, &interner);
824 InterpreterResult {
825 lines: vec![],
826 error: Some(advice),
827 }
828 }
829 }
830}
831
832pub async fn interpret_streaming<F>(input: &str, on_output: std::rc::Rc<std::cell::RefCell<F>>) -> InterpreterResult
855where
856 F: FnMut(String) + 'static,
857{
858 use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
859 use crate::interpreter::OutputCallback;
860
861 let mut interner = Interner::new();
862 let mut lexer = Lexer::new(input, &mut interner);
863 let tokens = lexer.tokenize();
864
865 let mwe_trie = mwe::build_mwe_trie();
866 let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
867
868 let (type_registry, policy_registry) = {
869 let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
870 let result = discovery.run_full();
871 (result.types, result.policies)
872 };
873
874 let expr_arena = Arena::new();
875 let term_arena = Arena::new();
876 let np_arena = Arena::new();
877 let sym_arena = Arena::new();
878 let role_arena = Arena::new();
879 let pp_arena = Arena::new();
880 let stmt_arena: Arena<Stmt> = Arena::new();
881 let imperative_expr_arena: Arena<Expr> = Arena::new();
882 let type_expr_arena: Arena<TypeExpr> = Arena::new();
883
884 let ctx = AstContext::with_types(
885 &expr_arena,
886 &term_arena,
887 &np_arena,
888 &sym_arena,
889 &role_arena,
890 &pp_arena,
891 &stmt_arena,
892 &imperative_expr_arena,
893 &type_expr_arena,
894 );
895
896 let mut world_state = drs::WorldState::new();
897 let type_registry_for_interp = type_registry.clone();
898 let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
899
900 match parser.parse_program() {
901 Ok(stmts) => {
902 let callback: OutputCallback = std::rc::Rc::new(std::cell::RefCell::new(move |line: String| {
904 (on_output.borrow_mut())(line);
905 }));
906
907 let mut interp = crate::interpreter::Interpreter::new(&interner)
908 .with_type_registry(&type_registry_for_interp)
909 .with_policies(policy_registry)
910 .with_output_callback(callback);
911
912 match interp.run(&stmts).await {
913 Ok(()) => InterpreterResult {
914 lines: interp.output,
915 error: None,
916 },
917 Err(e) => InterpreterResult {
918 lines: interp.output,
919 error: Some(e),
920 },
921 }
922 }
923 Err(e) => {
924 let advice = socratic_explanation(&e, &interner);
925 InterpreterResult {
926 lines: vec![],
927 error: Some(advice),
928 }
929 }
930 }
931}
932
933use logicaffeine_language::ast::Stmt;
938use logicaffeine_language::proof_convert::logic_expr_to_proof_expr;
939use crate::kernel;
940
941pub fn verify_theorem(input: &str) -> Result<(kernel::Term, kernel::Context), ParseError> {
951 let mut interner = Interner::new();
953 let mut lexer = Lexer::new(input, &mut interner);
954 let tokens = lexer.tokenize();
955
956 let mwe_trie = mwe::build_mwe_trie();
957 let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
958
959 let type_registry = {
960 let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
961 discovery.run()
962 };
963
964 let expr_arena = Arena::new();
965 let term_arena = Arena::new();
966 let np_arena = Arena::new();
967 let sym_arena = Arena::new();
968 let role_arena = Arena::new();
969 let pp_arena = Arena::new();
970
971 let ctx = AstContext::new(
972 &expr_arena,
973 &term_arena,
974 &np_arena,
975 &sym_arena,
976 &role_arena,
977 &pp_arena,
978 );
979
980 let mut world_state = drs::WorldState::new();
981 let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
982 let statements = parser.parse_program()?;
983
984 let theorem = statements
985 .iter()
986 .find_map(|stmt| {
987 if let Stmt::Theorem(t) = stmt {
988 Some(t)
989 } else {
990 None
991 }
992 })
993 .ok_or_else(|| ParseError {
994 kind: logicaffeine_language::error::ParseErrorKind::Custom("No theorem block found in input".to_string()),
995 span: logicaffeine_language::token::Span::default(),
996 })?;
997
998 let mut kernel_ctx = kernel::Context::new();
1000 kernel::prelude::StandardLibrary::register(&mut kernel_ctx);
1001
1002 let mut proof_exprs: Vec<ProofExpr> = Vec::new();
1004 for premise in &theorem.premises {
1005 let proof_expr = logic_expr_to_proof_expr(premise, &interner);
1006 proof_exprs.push(proof_expr);
1007 }
1008 let goal_expr = logic_expr_to_proof_expr(theorem.goal, &interner);
1009
1010 let mut collector = SymbolCollector::new();
1012 for expr in &proof_exprs {
1013 collector.collect(expr);
1014 }
1015 collector.collect(&goal_expr);
1016
1017 for pred_name in collector.predicates() {
1019 register_predicate(&mut kernel_ctx, pred_name);
1020 }
1021
1022 for const_name in collector.constants() {
1024 register_constant(&mut kernel_ctx, const_name);
1025 }
1026
1027 let mut engine = BackwardChainer::new();
1029 for (i, proof_expr) in proof_exprs.iter().enumerate() {
1030 let hyp_name = format!("h{}", i + 1);
1031 let hyp_type = proof_expr_to_kernel_type(proof_expr)?;
1032 kernel_ctx.add_declaration(&hyp_name, hyp_type);
1033 engine.add_axiom(proof_expr.clone());
1034 }
1035
1036 let derivation = engine.prove(goal_expr.clone()).map_err(|e| ParseError {
1038 kind: logicaffeine_language::error::ParseErrorKind::Custom(format!("Proof failed: {}", e)),
1039 span: logicaffeine_language::token::Span::default(),
1040 })?;
1041
1042 let cert_ctx = logicaffeine_proof::certifier::CertificationContext::new(&kernel_ctx);
1044 let proof_term = logicaffeine_proof::certifier::certify(&derivation, &cert_ctx).map_err(|e| ParseError {
1045 kind: logicaffeine_language::error::ParseErrorKind::Custom(format!("Certification failed: {}", e)),
1046 span: logicaffeine_language::token::Span::default(),
1047 })?;
1048
1049 let _ = kernel::infer_type(&kernel_ctx, &proof_term).map_err(|e| ParseError {
1051 kind: logicaffeine_language::error::ParseErrorKind::Custom(format!("Type check failed: {}", e)),
1052 span: logicaffeine_language::token::Span::default(),
1053 })?;
1054
1055 Ok((proof_term, kernel_ctx))
1057}
1058
1059struct SymbolCollector {
1061 predicates: HashSet<String>,
1062 constants: HashSet<String>,
1063}
1064
1065impl SymbolCollector {
1066 fn new() -> Self {
1067 SymbolCollector {
1068 predicates: HashSet::new(),
1069 constants: HashSet::new(),
1070 }
1071 }
1072
1073 fn collect(&mut self, expr: &ProofExpr) {
1074 match expr {
1075 ProofExpr::Predicate { name, args, .. } => {
1076 self.predicates.insert(name.clone());
1077 for arg in args {
1078 self.collect_term(arg);
1079 }
1080 }
1081 ProofExpr::And(l, r)
1082 | ProofExpr::Or(l, r)
1083 | ProofExpr::Implies(l, r)
1084 | ProofExpr::Iff(l, r) => {
1085 self.collect(l);
1086 self.collect(r);
1087 }
1088 ProofExpr::Not(inner) => {
1089 self.collect(inner);
1090 }
1091 ProofExpr::ForAll { body, .. } | ProofExpr::Exists { body, .. } => {
1092 self.collect(body);
1093 }
1094 ProofExpr::Atom(_) => {
1095 }
1097 ProofExpr::Identity(l, r) => {
1098 self.collect_term(l);
1100 self.collect_term(r);
1101 }
1102 _ => {}
1103 }
1104 }
1105
1106 fn collect_term(&mut self, term: &logicaffeine_proof::ProofTerm) {
1107 match term {
1108 logicaffeine_proof::ProofTerm::Constant(name) => {
1109 if name.chars().next().map(|c| c.is_uppercase()).unwrap_or(false) {
1111 self.constants.insert(name.clone());
1112 }
1113 }
1114 logicaffeine_proof::ProofTerm::Function(name, args) => {
1115 self.predicates.insert(name.clone());
1116 for arg in args {
1117 self.collect_term(arg);
1118 }
1119 }
1120 _ => {}
1121 }
1122 }
1123
1124 fn predicates(&self) -> impl Iterator<Item = &String> {
1125 self.predicates.iter()
1126 }
1127
1128 fn constants(&self) -> impl Iterator<Item = &String> {
1129 self.constants.iter()
1130 }
1131}
1132
1133fn register_predicate(ctx: &mut kernel::Context, name: &str) {
1136 if ctx.get_global(name).is_some() {
1138 return;
1139 }
1140
1141 let pred_type = kernel::Term::Pi {
1142 param: "_".to_string(),
1143 param_type: Box::new(kernel::Term::Global("Entity".to_string())),
1144 body_type: Box::new(kernel::Term::Sort(kernel::Universe::Prop)),
1145 };
1146 ctx.add_declaration(name, pred_type);
1147}
1148
1149fn register_constant(ctx: &mut kernel::Context, name: &str) {
1152 if ctx.get_global(name).is_some() {
1154 return;
1155 }
1156
1157 ctx.add_declaration(name, kernel::Term::Global("Entity".to_string()));
1158}
1159
1160fn proof_expr_to_kernel_type(expr: &ProofExpr) -> Result<kernel::Term, ParseError> {
1162 match expr {
1163 ProofExpr::Predicate { name, args, .. } => {
1164 let mut term = kernel::Term::Global(name.clone());
1166 for arg in args {
1167 let arg_term = proof_term_to_kernel_term(arg)?;
1168 term = kernel::Term::App(Box::new(term), Box::new(arg_term));
1169 }
1170 Ok(term)
1171 }
1172 ProofExpr::ForAll { variable, body } => {
1173 let body_type = proof_expr_to_kernel_type(body)?;
1175 Ok(kernel::Term::Pi {
1176 param: variable.clone(),
1177 param_type: Box::new(kernel::Term::Global("Entity".to_string())),
1178 body_type: Box::new(body_type),
1179 })
1180 }
1181 ProofExpr::Implies(ante, cons) => {
1182 let ante_type = proof_expr_to_kernel_type(ante)?;
1184 let cons_type = proof_expr_to_kernel_type(cons)?;
1185 Ok(kernel::Term::Pi {
1186 param: "_".to_string(),
1187 param_type: Box::new(ante_type),
1188 body_type: Box::new(cons_type),
1189 })
1190 }
1191 ProofExpr::And(l, r) => {
1192 let l_type = proof_expr_to_kernel_type(l)?;
1194 let r_type = proof_expr_to_kernel_type(r)?;
1195 Ok(kernel::Term::App(
1196 Box::new(kernel::Term::App(
1197 Box::new(kernel::Term::Global("And".to_string())),
1198 Box::new(l_type),
1199 )),
1200 Box::new(r_type),
1201 ))
1202 }
1203 ProofExpr::Or(l, r) => {
1204 let l_type = proof_expr_to_kernel_type(l)?;
1206 let r_type = proof_expr_to_kernel_type(r)?;
1207 Ok(kernel::Term::App(
1208 Box::new(kernel::Term::App(
1209 Box::new(kernel::Term::Global("Or".to_string())),
1210 Box::new(l_type),
1211 )),
1212 Box::new(r_type),
1213 ))
1214 }
1215 ProofExpr::Atom(name) => {
1216 Ok(kernel::Term::Global(name.clone()))
1218 }
1219 ProofExpr::Identity(l, r) => {
1220 let l_term = proof_term_to_kernel_term(l)?;
1222 let r_term = proof_term_to_kernel_term(r)?;
1223 Ok(kernel::Term::App(
1224 Box::new(kernel::Term::App(
1225 Box::new(kernel::Term::App(
1226 Box::new(kernel::Term::Global("Eq".to_string())),
1227 Box::new(kernel::Term::Global("Entity".to_string())),
1228 )),
1229 Box::new(l_term),
1230 )),
1231 Box::new(r_term),
1232 ))
1233 }
1234 _ => Err(ParseError {
1235 kind: logicaffeine_language::error::ParseErrorKind::Custom(format!(
1236 "Unsupported ProofExpr for kernel type: {:?}",
1237 expr
1238 )),
1239 span: logicaffeine_language::token::Span::default(),
1240 }),
1241 }
1242}
1243
1244fn proof_term_to_kernel_term(term: &logicaffeine_proof::ProofTerm) -> Result<kernel::Term, ParseError> {
1246 match term {
1247 logicaffeine_proof::ProofTerm::Constant(name) => Ok(kernel::Term::Global(name.clone())),
1248 logicaffeine_proof::ProofTerm::Variable(name) => Ok(kernel::Term::Var(name.clone())),
1249 logicaffeine_proof::ProofTerm::Function(name, args) => {
1250 let mut t = kernel::Term::Global(name.clone());
1251 for arg in args {
1252 let arg_term = proof_term_to_kernel_term(arg)?;
1253 t = kernel::Term::App(Box::new(t), Box::new(arg_term));
1254 }
1255 Ok(t)
1256 }
1257 _ => Err(ParseError {
1258 kind: logicaffeine_language::error::ParseErrorKind::Custom(format!(
1259 "Unsupported ProofTerm for kernel: {:?}",
1260 term
1261 )),
1262 span: logicaffeine_language::token::Span::default(),
1263 }),
1264 }
1265}