Skip to main content

logicaffeine_compile/
ui_bridge.rs

1//! UI bridge for web interface integration.
2//!
3//! This module provides high-level, UI-friendly wrappers around the compilation
4//! pipeline, returning structured results suitable for display in the browser.
5//!
6//! # Key Functions
7//!
8//! | Function | Purpose |
9//! |----------|---------|
10//! | [`compile_for_ui`] | Compile LOGOS to FOL (UI format) |
11//! | [`compile_for_proof`] | Compile and search for proofs |
12//! | [`compile_theorem_for_ui`] | Compile theorems with derivation trees |
13//! | [`verify_theorem`] | Verify a theorem is provable |
14//! | [`interpret_for_ui`] | Run imperative code and return output |
15//! | [`generate_rust_code`] | Generate Rust source (requires `codegen` feature) |
16//!
17//! # Result Types
18//!
19//! All functions return serializable result types:
20//! - [`CompileResult`] - Tokens, FOL, AST, errors
21//! - [`ProofCompileResult`] - FOL with proof search results
22//! - [`TheoremCompileResult`] - Theorem verification with derivation tree
23//! - [`InterpreterResult`] - Program output lines and errors
24//!
25//! # Token Categories
26//!
27//! The [`TokenCategory`] enum provides syntactic highlighting categories:
28//! Quantifier, Noun, Verb, Adjective, Connective, Determiner, etc.
29
30use serde::{Deserialize, Serialize};
31use std::collections::HashSet;
32
33use logicaffeine_base::{Arena, Interner};
34use logicaffeine_language::{
35    ast::{self, LogicExpr, Term},
36    analysis::DiscoveryPass,
37    arena_ctx::AstContext,
38    compile::{compile_forest, compile_forest_with_options},
39    drs,
40    error::socratic_explanation,
41    lexer::Lexer,
42    mwe,
43    parser::Parser,
44    pragmatics,
45    registry::SymbolRegistry,
46    semantics,
47    token::TokenType,
48    CompileOptions, OutputFormat, ParseError,
49};
50use logicaffeine_proof::{BackwardChainer, DerivationTree, ProofExpr};
51
52// Re-export interpreter result from our interpreter module
53pub use crate::interpreter::InterpreterResult;
54
55// ═══════════════════════════════════════════════════════════════════
56// Token Visualization
57// ═══════════════════════════════════════════════════════════════════
58
59/// Syntactic category of a token for UI highlighting.
60#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
61pub enum TokenCategory {
62    /// Universal/existential quantifiers: every, some, no, most
63    Quantifier,
64    /// Common nouns: dog, cat, person
65    Noun,
66    /// Main verbs: runs, loves, gives
67    Verb,
68    /// Adjective modifiers: tall, happy, red
69    Adjective,
70    /// Logical connectives: and, or, not, if
71    Connective,
72    /// Articles and determiners: a, an, the
73    Determiner,
74    /// Prepositional words: in, on, to, with
75    Preposition,
76    /// Personal and relative pronouns: he, she, who
77    Pronoun,
78    /// Modal auxiliaries: can, must, might
79    Modal,
80    /// Sentence-ending punctuation
81    Punctuation,
82    /// Proper names (capitalized)
83    Proper,
84    /// Uncategorized tokens
85    Other,
86}
87
88/// Token information for UI display with position and category.
89#[derive(Debug, Clone, Serialize, Deserialize)]
90pub struct TokenInfo {
91    /// Byte offset of token start in source string.
92    pub start: usize,
93    /// Byte offset of token end (exclusive) in source string.
94    pub end: usize,
95    /// The actual text of the token.
96    pub text: String,
97    /// Syntactic category for highlighting.
98    pub category: TokenCategory,
99}
100
101fn categorize_token(kind: &TokenType, _interner: &Interner) -> TokenCategory {
102    match kind {
103        TokenType::All | TokenType::Some | TokenType::No | TokenType::Any
104        | TokenType::Most | TokenType::Few | TokenType::Many
105        | TokenType::Cardinal(_) | TokenType::AtLeast(_) | TokenType::AtMost(_) => TokenCategory::Quantifier,
106        TokenType::Noun(_) => TokenCategory::Noun,
107        TokenType::Verb { .. } => TokenCategory::Verb,
108        TokenType::Adjective(_) | TokenType::NonIntersectiveAdjective(_) => TokenCategory::Adjective,
109        TokenType::And | TokenType::Or | TokenType::Not | TokenType::If | TokenType::Then
110        | TokenType::Iff | TokenType::Because => TokenCategory::Connective,
111        TokenType::Article(_) => TokenCategory::Determiner,
112        TokenType::Preposition(_) => TokenCategory::Preposition,
113        TokenType::Pronoun { .. } => TokenCategory::Pronoun,
114        TokenType::Must | TokenType::Can | TokenType::Should | TokenType::Shall
115        | TokenType::Would | TokenType::Could | TokenType::May | TokenType::Cannot
116        | TokenType::Might => TokenCategory::Modal,
117        TokenType::Period | TokenType::Comma => TokenCategory::Punctuation,
118        TokenType::ProperName(_) => TokenCategory::Proper,
119        _ => TokenCategory::Other,
120    }
121}
122
123/// Tokenizes input text and returns token information for UI display.
124///
125/// Each token includes its byte position, text content, and syntactic
126/// category for syntax highlighting in the browser interface.
127pub fn tokenize_for_ui(input: &str) -> Vec<TokenInfo> {
128    let mut interner = Interner::new();
129    let mut lexer = Lexer::new(input, &mut interner);
130    let tokens = lexer.tokenize();
131
132    tokens.iter().map(|t| TokenInfo {
133        start: t.span.start,
134        end: t.span.end,
135        text: input[t.span.start..t.span.end].to_string(),
136        category: categorize_token(&t.kind, &interner),
137    }).collect()
138}
139
140// ═══════════════════════════════════════════════════════════════════
141// AST Visualization
142// ═══════════════════════════════════════════════════════════════════
143
144/// AST node for tree visualization in the UI.
145#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
146pub struct AstNode {
147    /// Display label for this node (e.g., "∀x", "Run(x)").
148    pub label: String,
149    /// Node type for styling (e.g., "quantifier", "predicate", "connective").
150    pub node_type: String,
151    /// Child nodes in the AST.
152    pub children: Vec<AstNode>,
153}
154
155impl AstNode {
156    pub fn leaf(label: &str, node_type: &str) -> Self {
157        AstNode {
158            label: label.to_string(),
159            node_type: node_type.to_string(),
160            children: Vec::new(),
161        }
162    }
163
164    pub fn with_children(label: &str, node_type: &str, children: Vec<AstNode>) -> Self {
165        AstNode {
166            label: label.to_string(),
167            node_type: node_type.to_string(),
168            children,
169        }
170    }
171}
172
173/// Converts a logic expression to an AST node for tree visualization.
174///
175/// Recursively builds a tree structure with labeled nodes suitable for
176/// rendering in the UI. Each node includes a display label, node type
177/// for styling, and child nodes.
178pub fn expr_to_ast_node(expr: &LogicExpr, interner: &Interner) -> AstNode {
179    match expr {
180        LogicExpr::Predicate { name, args, .. } => {
181            let name_str = interner.resolve(*name);
182            let arg_nodes: Vec<AstNode> = args.iter()
183                .map(|t| term_to_ast_node(t, interner))
184                .collect();
185            AstNode::with_children(
186                &format!("{}({})", name_str, args.len()),
187                "predicate",
188                arg_nodes,
189            )
190        }
191        LogicExpr::Quantifier { kind, variable, body, .. } => {
192            let var_str = interner.resolve(*variable);
193            let symbol = match kind {
194                ast::QuantifierKind::Universal => "∀",
195                ast::QuantifierKind::Existential => "∃",
196                ast::QuantifierKind::Most => "MOST",
197                ast::QuantifierKind::Few => "FEW",
198                ast::QuantifierKind::Many => "MANY",
199                ast::QuantifierKind::Cardinal(n) => return AstNode::with_children(
200                    &format!("∃={}{}", n, var_str),
201                    "quantifier",
202                    vec![expr_to_ast_node(body, interner)],
203                ),
204                ast::QuantifierKind::AtLeast(n) => return AstNode::with_children(
205                    &format!("∃≥{}{}", n, var_str),
206                    "quantifier",
207                    vec![expr_to_ast_node(body, interner)],
208                ),
209                ast::QuantifierKind::AtMost(n) => return AstNode::with_children(
210                    &format!("∃≤{}{}", n, var_str),
211                    "quantifier",
212                    vec![expr_to_ast_node(body, interner)],
213                ),
214                ast::QuantifierKind::Generic => "GEN",
215            };
216            AstNode::with_children(
217                &format!("{}{}", symbol, var_str),
218                "quantifier",
219                vec![expr_to_ast_node(body, interner)],
220            )
221        }
222        LogicExpr::BinaryOp { left, op, right } => {
223            let op_str = match op {
224                TokenType::And => "∧",
225                TokenType::Or => "∨",
226                TokenType::If | TokenType::Then => "→",
227                TokenType::Iff => "↔",
228                _ => "?",
229            };
230            AstNode::with_children(
231                op_str,
232                "binary_op",
233                vec![
234                    expr_to_ast_node(left, interner),
235                    expr_to_ast_node(right, interner),
236                ],
237            )
238        }
239        LogicExpr::UnaryOp { op, operand } => {
240            let op_str = match op {
241                TokenType::Not => "¬",
242                _ => "?",
243            };
244            AstNode::with_children(
245                op_str,
246                "unary_op",
247                vec![expr_to_ast_node(operand, interner)],
248            )
249        }
250        LogicExpr::Identity { left, right } => {
251            AstNode::with_children(
252                "=",
253                "identity",
254                vec![
255                    term_to_ast_node(left, interner),
256                    term_to_ast_node(right, interner),
257                ],
258            )
259        }
260        LogicExpr::Modal { vector, operand } => {
261            AstNode::with_children(
262                &format!("□{:?}", vector.domain),
263                "modal",
264                vec![expr_to_ast_node(operand, interner)],
265            )
266        }
267        LogicExpr::Lambda { variable, body } => {
268            let var_str = interner.resolve(*variable);
269            AstNode::with_children(
270                &format!("λ{}", var_str),
271                "lambda",
272                vec![expr_to_ast_node(body, interner)],
273            )
274        }
275        _ => AstNode::leaf(&format!("{:?}", expr), "other"),
276    }
277}
278
279fn term_to_ast_node(term: &Term, interner: &Interner) -> AstNode {
280    match term {
281        Term::Constant(sym) => AstNode::leaf(interner.resolve(*sym), "constant"),
282        Term::Variable(sym) => AstNode::leaf(interner.resolve(*sym), "variable"),
283        Term::Function(name, args) => {
284            let name_str = interner.resolve(*name);
285            let arg_nodes: Vec<AstNode> = args.iter()
286                .map(|t| term_to_ast_node(t, interner))
287                .collect();
288            AstNode::with_children(&format!("{}()", name_str), "function", arg_nodes)
289        }
290        Term::Group(terms) => {
291            let term_nodes: Vec<AstNode> = terms.iter()
292                .map(|t| term_to_ast_node(t, interner))
293                .collect();
294            AstNode::with_children("⊕", "group", term_nodes)
295        }
296        _ => AstNode::leaf(&format!("{:?}", term), "term"),
297    }
298}
299
300// ═══════════════════════════════════════════════════════════════════
301// Compilation Results
302// ═══════════════════════════════════════════════════════════════════
303
304#[derive(Debug, Clone, Serialize, Deserialize)]
305/// Result of compiling English input to FOL with UI metadata.
306pub struct CompileResult {
307    /// Primary FOL output (Unicode format), if compilation succeeded.
308    pub logic: Option<String>,
309    /// Simplified FOL with modals stripped (for verification).
310    pub simple_logic: Option<String>,
311    /// Kripke semantics output with explicit world quantification.
312    pub kripke_logic: Option<String>,
313    /// AST tree representation for visualization.
314    pub ast: Option<AstNode>,
315    /// All scope readings in Unicode format.
316    pub readings: Vec<String>,
317    /// All scope readings in simplified format.
318    pub simple_readings: Vec<String>,
319    /// All scope readings in Kripke format.
320    pub kripke_readings: Vec<String>,
321    /// Tokenization with categories for syntax highlighting.
322    pub tokens: Vec<TokenInfo>,
323    /// Parse/compile error message, if any.
324    pub error: Option<String>,
325}
326
327/// Compile English input to FOL with full UI metadata.
328pub fn compile_for_ui(input: &str) -> CompileResult {
329    let tokens = tokenize_for_ui(input);
330    let readings = compile_forest(input);
331
332    // Generate Simple readings (modals stripped) - deduplicated
333    let simple_readings: Vec<String> = {
334        let raw = compile_forest_with_options(input, CompileOptions { format: OutputFormat::SimpleFOL });
335        let mut seen = HashSet::new();
336        raw.into_iter().filter(|r| seen.insert(r.clone())).collect()
337    };
338
339    // Generate Kripke readings with explicit world quantification
340    let kripke_readings = compile_forest_with_options(input, CompileOptions { format: OutputFormat::Kripke });
341
342    let mut interner = Interner::new();
343    let mut lexer = Lexer::new(input, &mut interner);
344    let lex_tokens = lexer.tokenize();
345
346    let mwe_trie = mwe::build_mwe_trie();
347    let lex_tokens = mwe::apply_mwe_pipeline(lex_tokens, &mwe_trie, &mut interner);
348
349    // Pass 1: Discovery
350    let type_registry = {
351        let mut discovery = DiscoveryPass::new(&lex_tokens, &mut interner);
352        discovery.run()
353    };
354
355    let expr_arena = Arena::new();
356    let term_arena = Arena::new();
357    let np_arena = Arena::new();
358    let sym_arena = Arena::new();
359    let role_arena = Arena::new();
360    let pp_arena = Arena::new();
361
362    let ctx = AstContext::new(
363        &expr_arena,
364        &term_arena,
365        &np_arena,
366        &sym_arena,
367        &role_arena,
368        &pp_arena,
369    );
370
371    // Pass 2: Parse
372    let mut world_state = drs::WorldState::new();
373    let mut parser = Parser::new(lex_tokens, &mut world_state, &mut interner, ctx, type_registry);
374
375    match parser.parse() {
376        Ok(ast) => {
377            let ast = semantics::apply_axioms(ast, ctx.exprs, ctx.terms, &mut interner);
378            let ast = pragmatics::apply_pragmatics(ast, ctx.exprs, &interner);
379            let ast_node = expr_to_ast_node(ast, &interner);
380            let mut registry = SymbolRegistry::new();
381            let logic = ast.transpile_discourse(&mut registry, &interner, OutputFormat::Unicode);
382            let simple_logic = ast.transpile_discourse(&mut registry, &interner, OutputFormat::SimpleFOL);
383
384            let kripke_ast = semantics::apply_kripke_lowering(ast, ctx.exprs, ctx.terms, &mut interner);
385            let kripke_logic = kripke_ast.transpile_discourse(&mut registry, &interner, OutputFormat::Kripke);
386
387            CompileResult {
388                logic: Some(logic),
389                simple_logic: Some(simple_logic),
390                kripke_logic: Some(kripke_logic),
391                ast: Some(ast_node),
392                readings,
393                simple_readings,
394                kripke_readings,
395                tokens,
396                error: None,
397            }
398        }
399        Err(e) => {
400            let advice = socratic_explanation(&e, &interner);
401            CompileResult {
402                logic: None,
403                simple_logic: None,
404                kripke_logic: None,
405                ast: None,
406                readings: Vec::new(),
407                simple_readings: Vec::new(),
408                kripke_readings: Vec::new(),
409                tokens,
410                error: Some(advice),
411            }
412        }
413    }
414}
415
416// ═══════════════════════════════════════════════════════════════════
417// Proof Integration
418// ═══════════════════════════════════════════════════════════════════
419
420/// Result of compiling English to a proof expression.
421///
422/// Used by the proof engine to search for derivations. Contains the
423/// converted proof expression, the simplified FOL string representation,
424/// and any compilation error.
425#[derive(Debug, Clone)]
426pub struct ProofCompileResult {
427    /// The compiled proof expression, or `None` on error.
428    pub proof_expr: Option<ProofExpr>,
429    /// Simplified FOL string representation for display.
430    pub logic_string: Option<String>,
431    /// Error message if compilation failed.
432    pub error: Option<String>,
433}
434
435/// Compile English input to ProofExpr for the proof engine.
436pub fn compile_for_proof(input: &str) -> ProofCompileResult {
437    use logicaffeine_language::proof_convert::logic_expr_to_proof_expr;
438
439    let mut interner = Interner::new();
440    let mut lexer = Lexer::new(input, &mut interner);
441    let lex_tokens = lexer.tokenize();
442
443    let mwe_trie = mwe::build_mwe_trie();
444    let lex_tokens = mwe::apply_mwe_pipeline(lex_tokens, &mwe_trie, &mut interner);
445
446    let type_registry = {
447        let mut discovery = DiscoveryPass::new(&lex_tokens, &mut interner);
448        discovery.run()
449    };
450
451    let expr_arena = Arena::new();
452    let term_arena = Arena::new();
453    let np_arena = Arena::new();
454    let sym_arena = Arena::new();
455    let role_arena = Arena::new();
456    let pp_arena = Arena::new();
457
458    let ctx = AstContext::new(
459        &expr_arena,
460        &term_arena,
461        &np_arena,
462        &sym_arena,
463        &role_arena,
464        &pp_arena,
465    );
466
467    let mut world_state = drs::WorldState::new();
468    let mut parser = Parser::new(lex_tokens, &mut world_state, &mut interner, ctx, type_registry);
469
470    match parser.parse() {
471        Ok(ast) => {
472            let ast = semantics::apply_axioms(ast, ctx.exprs, ctx.terms, &mut interner);
473            let ast = pragmatics::apply_pragmatics(ast, ctx.exprs, &interner);
474
475            let mut registry = SymbolRegistry::new();
476            let logic_string = ast.transpile(&mut registry, &interner, OutputFormat::SimpleFOL);
477            let proof_expr = logic_expr_to_proof_expr(ast, &interner);
478
479            ProofCompileResult {
480                proof_expr: Some(proof_expr),
481                logic_string: Some(logic_string),
482                error: None,
483            }
484        }
485        Err(e) => {
486            let advice = socratic_explanation(&e, &interner);
487            ProofCompileResult {
488                proof_expr: None,
489                logic_string: None,
490                error: Some(advice),
491            }
492        }
493    }
494}
495
496/// Result of compiling and verifying a theorem block.
497///
498/// Contains the parsed theorem structure (name, premises, goal) along
499/// with the proof derivation tree if automatic proof search succeeded.
500#[derive(Debug, Clone)]
501pub struct TheoremCompileResult {
502    /// The theorem's declared name.
503    pub name: String,
504    /// Compiled premise expressions (axioms).
505    pub premises: Vec<ProofExpr>,
506    /// The goal expression to prove, or `None` on parse error.
507    pub goal: Option<ProofExpr>,
508    /// Simplified FOL string of the goal for display.
509    pub goal_string: Option<String>,
510    /// Derivation tree from backward chaining, if proof found.
511    pub derivation: Option<DerivationTree>,
512    /// Error message if compilation or proof failed.
513    pub error: Option<String>,
514}
515
516/// Compile a theorem block for UI display.
517pub fn compile_theorem_for_ui(input: &str) -> TheoremCompileResult {
518    use logicaffeine_language::proof_convert::logic_expr_to_proof_expr;
519
520    let mut interner = Interner::new();
521    let mut lexer = Lexer::new(input, &mut interner);
522    let tokens = lexer.tokenize();
523
524    let mwe_trie = mwe::build_mwe_trie();
525    let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
526
527    let type_registry = {
528        let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
529        discovery.run()
530    };
531
532    let expr_arena = Arena::new();
533    let term_arena = Arena::new();
534    let np_arena = Arena::new();
535    let sym_arena = Arena::new();
536    let role_arena = Arena::new();
537    let pp_arena = Arena::new();
538
539    let ctx = AstContext::new(
540        &expr_arena,
541        &term_arena,
542        &np_arena,
543        &sym_arena,
544        &role_arena,
545        &pp_arena,
546    );
547
548    let mut world_state = drs::WorldState::new();
549    let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
550
551    let statements = match parser.parse_program() {
552        Ok(stmts) => stmts,
553        Err(e) => {
554            return TheoremCompileResult {
555                name: String::new(),
556                premises: Vec::new(),
557                goal: None,
558                goal_string: None,
559                derivation: None,
560                error: Some(format!("Parse error: {:?}", e)),
561            };
562        }
563    };
564
565    let theorem = match statements.iter().find_map(|stmt| {
566        if let ast::Stmt::Theorem(t) = stmt {
567            Some(t)
568        } else {
569            None
570        }
571    }) {
572        Some(t) => t,
573        None => {
574            return TheoremCompileResult {
575                name: String::new(),
576                premises: Vec::new(),
577                goal: None,
578                goal_string: None,
579                derivation: None,
580                error: Some("No theorem block found".to_string()),
581            };
582        }
583    };
584
585    let premises: Vec<ProofExpr> = theorem
586        .premises
587        .iter()
588        .map(|p| logic_expr_to_proof_expr(p, &interner))
589        .collect();
590
591    let goal = logic_expr_to_proof_expr(theorem.goal, &interner);
592
593    let mut registry = SymbolRegistry::new();
594    let goal_string = theorem.goal.transpile(&mut registry, &interner, OutputFormat::SimpleFOL);
595
596    let derivation = if matches!(theorem.strategy, ast::theorem::ProofStrategy::Auto) {
597        let mut engine = BackwardChainer::new();
598        for premise in &premises {
599            engine.add_axiom(premise.clone());
600        }
601        engine.prove(goal.clone()).ok()
602    } else {
603        None
604    };
605
606    TheoremCompileResult {
607        name: theorem.name.clone(),
608        premises,
609        goal: Some(goal),
610        goal_string: Some(goal_string),
611        derivation,
612        error: None,
613    }
614}
615
616// ═══════════════════════════════════════════════════════════════════
617// Code Generation
618// ═══════════════════════════════════════════════════════════════════
619
620/// Generate Rust code from LOGOS imperative source.
621#[cfg(feature = "codegen")]
622pub fn generate_rust_code(source: &str) -> Result<String, ParseError> {
623    use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
624
625    let mut interner = Interner::new();
626    let mut lexer = Lexer::new(source, &mut interner);
627    let tokens = lexer.tokenize();
628
629    let mwe_trie = mwe::build_mwe_trie();
630    let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
631
632    let (type_registry, policy_registry) = {
633        let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
634        let result = discovery.run_full();
635        (result.types, result.policies)
636    };
637    let codegen_registry = type_registry.clone();
638    let codegen_policies = policy_registry.clone();
639
640    let mut world_state = drs::WorldState::new();
641    let expr_arena = Arena::new();
642    let term_arena = Arena::new();
643    let np_arena = Arena::new();
644    let sym_arena = Arena::new();
645    let role_arena = Arena::new();
646    let pp_arena = Arena::new();
647    let stmt_arena: Arena<Stmt> = Arena::new();
648    let imperative_expr_arena: Arena<Expr> = Arena::new();
649    let type_expr_arena: Arena<TypeExpr> = Arena::new();
650
651    let ast_ctx = AstContext::with_types(
652        &expr_arena,
653        &term_arena,
654        &np_arena,
655        &sym_arena,
656        &role_arena,
657        &pp_arena,
658        &stmt_arena,
659        &imperative_expr_arena,
660        &type_expr_arena,
661    );
662
663    let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ast_ctx, type_registry);
664    let stmts = parser.parse_program()?;
665
666    let rust_code = crate::codegen::codegen_program(&stmts, &codegen_registry, &codegen_policies, &interner);
667    Ok(rust_code)
668}
669
670// ═══════════════════════════════════════════════════════════════════
671// Interpreter (async)
672// ═══════════════════════════════════════════════════════════════════
673
674/// Interpret LOGOS imperative code and return output lines.
675pub async fn interpret_for_ui(input: &str) -> InterpreterResult {
676    use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
677
678    let mut interner = Interner::new();
679    let mut lexer = Lexer::new(input, &mut interner);
680    let tokens = lexer.tokenize();
681
682    let mwe_trie = mwe::build_mwe_trie();
683    let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
684
685    let (type_registry, policy_registry) = {
686        let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
687        let result = discovery.run_full();
688        (result.types, result.policies)
689    };
690
691    let expr_arena = Arena::new();
692    let term_arena = Arena::new();
693    let np_arena = Arena::new();
694    let sym_arena = Arena::new();
695    let role_arena = Arena::new();
696    let pp_arena = Arena::new();
697    let stmt_arena: Arena<Stmt> = Arena::new();
698    let imperative_expr_arena: Arena<Expr> = Arena::new();
699    let type_expr_arena: Arena<TypeExpr> = Arena::new();
700
701    let ctx = AstContext::with_types(
702        &expr_arena,
703        &term_arena,
704        &np_arena,
705        &sym_arena,
706        &role_arena,
707        &pp_arena,
708        &stmt_arena,
709        &imperative_expr_arena,
710        &type_expr_arena,
711    );
712
713    let mut world_state = drs::WorldState::new();
714    let type_registry_for_interp = type_registry.clone();
715    let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
716
717    match parser.parse_program() {
718        Ok(stmts) => {
719            let mut interp = crate::interpreter::Interpreter::new(&interner)
720                .with_type_registry(&type_registry_for_interp)
721                .with_policies(policy_registry);
722            match interp.run(&stmts).await {
723                Ok(()) => InterpreterResult {
724                    lines: interp.output,
725                    error: None,
726                },
727                Err(e) => InterpreterResult {
728                    lines: interp.output,
729                    error: Some(e),
730                },
731            }
732        }
733        Err(e) => {
734            let advice = socratic_explanation(&e, &interner);
735            InterpreterResult {
736                lines: vec![],
737                error: Some(advice),
738            }
739        }
740    }
741}
742
743/// Interpret LOGOS imperative code synchronously (no async runtime needed).
744///
745/// Uses the sync execution path when the program has no async operations
746/// (no file I/O, sleep, or mount). Falls back to async via block_on otherwise.
747pub fn interpret_for_ui_sync(input: &str) -> InterpreterResult {
748    use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
749
750    let mut interner = Interner::new();
751    let mut lexer = Lexer::new(input, &mut interner);
752    let tokens = lexer.tokenize();
753
754    let mwe_trie = mwe::build_mwe_trie();
755    let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
756
757    let (type_registry, policy_registry) = {
758        let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
759        let result = discovery.run_full();
760        (result.types, result.policies)
761    };
762
763    let expr_arena = Arena::new();
764    let term_arena = Arena::new();
765    let np_arena = Arena::new();
766    let sym_arena = Arena::new();
767    let role_arena = Arena::new();
768    let pp_arena = Arena::new();
769    let stmt_arena: Arena<Stmt> = Arena::new();
770    let imperative_expr_arena: Arena<Expr> = Arena::new();
771    let type_expr_arena: Arena<TypeExpr> = Arena::new();
772
773    let ctx = AstContext::with_types(
774        &expr_arena,
775        &term_arena,
776        &np_arena,
777        &sym_arena,
778        &role_arena,
779        &pp_arena,
780        &stmt_arena,
781        &imperative_expr_arena,
782        &type_expr_arena,
783    );
784
785    let mut world_state = drs::WorldState::new();
786    let type_registry_for_interp = type_registry.clone();
787    let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
788
789    match parser.parse_program() {
790        Ok(stmts) => {
791            let mut interp = crate::interpreter::Interpreter::new(&interner)
792                .with_type_registry(&type_registry_for_interp)
793                .with_policies(policy_registry);
794
795            if crate::interpreter::needs_async(&stmts) {
796                // Fall back to async path
797                use futures::executor::block_on;
798                match block_on(interp.run(&stmts)) {
799                    Ok(()) => InterpreterResult {
800                        lines: interp.output,
801                        error: None,
802                    },
803                    Err(e) => InterpreterResult {
804                        lines: interp.output,
805                        error: Some(e),
806                    },
807                }
808            } else {
809                // Use sync path — no Future allocation overhead
810                match interp.run_sync(&stmts) {
811                    Ok(()) => InterpreterResult {
812                        lines: interp.output,
813                        error: None,
814                    },
815                    Err(e) => InterpreterResult {
816                        lines: interp.output,
817                        error: Some(e),
818                    },
819                }
820            }
821        }
822        Err(e) => {
823            let advice = socratic_explanation(&e, &interner);
824            InterpreterResult {
825                lines: vec![],
826                error: Some(advice),
827            }
828        }
829    }
830}
831
832/// Interpret LOGOS imperative code with streaming output.
833///
834/// The `on_output` callback is called each time `Show` executes, allowing
835/// real-time output display like a REPL. The callback receives the output line.
836///
837/// # Example
838/// ```no_run
839/// use std::rc::Rc;
840/// use std::cell::RefCell;
841/// # use logicaffeine_compile::interpret_streaming;
842///
843/// # fn main() {}
844/// # async fn example() {
845/// # let source = "## Main\nShow \"hello\".";
846/// let lines = Rc::new(RefCell::new(Vec::new()));
847/// let lines_clone = lines.clone();
848///
849/// interpret_streaming(source, Rc::new(RefCell::new(move |line: String| {
850///     lines_clone.borrow_mut().push(line);
851/// }))).await;
852/// # }
853/// ```
854pub async fn interpret_streaming<F>(input: &str, on_output: std::rc::Rc<std::cell::RefCell<F>>) -> InterpreterResult
855where
856    F: FnMut(String) + 'static,
857{
858    use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
859    use crate::interpreter::OutputCallback;
860
861    let mut interner = Interner::new();
862    let mut lexer = Lexer::new(input, &mut interner);
863    let tokens = lexer.tokenize();
864
865    let mwe_trie = mwe::build_mwe_trie();
866    let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
867
868    let (type_registry, policy_registry) = {
869        let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
870        let result = discovery.run_full();
871        (result.types, result.policies)
872    };
873
874    let expr_arena = Arena::new();
875    let term_arena = Arena::new();
876    let np_arena = Arena::new();
877    let sym_arena = Arena::new();
878    let role_arena = Arena::new();
879    let pp_arena = Arena::new();
880    let stmt_arena: Arena<Stmt> = Arena::new();
881    let imperative_expr_arena: Arena<Expr> = Arena::new();
882    let type_expr_arena: Arena<TypeExpr> = Arena::new();
883
884    let ctx = AstContext::with_types(
885        &expr_arena,
886        &term_arena,
887        &np_arena,
888        &sym_arena,
889        &role_arena,
890        &pp_arena,
891        &stmt_arena,
892        &imperative_expr_arena,
893        &type_expr_arena,
894    );
895
896    let mut world_state = drs::WorldState::new();
897    let type_registry_for_interp = type_registry.clone();
898    let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
899
900    match parser.parse_program() {
901        Ok(stmts) => {
902            // Create the callback wrapper that calls the user's callback
903            let callback: OutputCallback = std::rc::Rc::new(std::cell::RefCell::new(move |line: String| {
904                (on_output.borrow_mut())(line);
905            }));
906
907            let mut interp = crate::interpreter::Interpreter::new(&interner)
908                .with_type_registry(&type_registry_for_interp)
909                .with_policies(policy_registry)
910                .with_output_callback(callback);
911
912            match interp.run(&stmts).await {
913                Ok(()) => InterpreterResult {
914                    lines: interp.output,
915                    error: None,
916                },
917                Err(e) => InterpreterResult {
918                    lines: interp.output,
919                    error: Some(e),
920                },
921            }
922        }
923        Err(e) => {
924            let advice = socratic_explanation(&e, &interner);
925            InterpreterResult {
926                lines: vec![],
927                error: Some(advice),
928            }
929        }
930    }
931}
932
933// ═══════════════════════════════════════════════════════════════════
934// Theorem Verification (Kernel-certified)
935// ═══════════════════════════════════════════════════════════════════
936
937use logicaffeine_language::ast::Stmt;
938use logicaffeine_language::proof_convert::logic_expr_to_proof_expr;
939use crate::kernel;
940
941/// Phase 78: Verify a theorem with full kernel certification.
942///
943/// Pipeline:
944/// 1. Parse theorem block
945/// 2. Extract symbols and build kernel context
946/// 3. Run proof engine
947/// 4. Certify derivation tree to kernel term
948/// 5. Type-check the term
949/// 6. Return (proof_term, context)
950pub fn verify_theorem(input: &str) -> Result<(kernel::Term, kernel::Context), ParseError> {
951    // === STEP 1: Parse ===
952    let mut interner = Interner::new();
953    let mut lexer = Lexer::new(input, &mut interner);
954    let tokens = lexer.tokenize();
955
956    let mwe_trie = mwe::build_mwe_trie();
957    let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
958
959    let type_registry = {
960        let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
961        discovery.run()
962    };
963
964    let expr_arena = Arena::new();
965    let term_arena = Arena::new();
966    let np_arena = Arena::new();
967    let sym_arena = Arena::new();
968    let role_arena = Arena::new();
969    let pp_arena = Arena::new();
970
971    let ctx = AstContext::new(
972        &expr_arena,
973        &term_arena,
974        &np_arena,
975        &sym_arena,
976        &role_arena,
977        &pp_arena,
978    );
979
980    let mut world_state = drs::WorldState::new();
981    let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
982    let statements = parser.parse_program()?;
983
984    let theorem = statements
985        .iter()
986        .find_map(|stmt| {
987            if let Stmt::Theorem(t) = stmt {
988                Some(t)
989            } else {
990                None
991            }
992        })
993        .ok_or_else(|| ParseError {
994            kind: logicaffeine_language::error::ParseErrorKind::Custom("No theorem block found in input".to_string()),
995            span: logicaffeine_language::token::Span::default(),
996        })?;
997
998    // === STEP 2: Build Kernel Context ===
999    let mut kernel_ctx = kernel::Context::new();
1000    kernel::prelude::StandardLibrary::register(&mut kernel_ctx);
1001
1002    // Convert premises and goal to ProofExpr
1003    let mut proof_exprs: Vec<ProofExpr> = Vec::new();
1004    for premise in &theorem.premises {
1005        let proof_expr = logic_expr_to_proof_expr(premise, &interner);
1006        proof_exprs.push(proof_expr);
1007    }
1008    let goal_expr = logic_expr_to_proof_expr(theorem.goal, &interner);
1009
1010    // Collect symbols from all expressions
1011    let mut collector = SymbolCollector::new();
1012    for expr in &proof_exprs {
1013        collector.collect(expr);
1014    }
1015    collector.collect(&goal_expr);
1016
1017    // Register predicates: P : Entity → Prop
1018    for pred_name in collector.predicates() {
1019        register_predicate(&mut kernel_ctx, pred_name);
1020    }
1021
1022    // Register constants: Socrates : Entity
1023    for const_name in collector.constants() {
1024        register_constant(&mut kernel_ctx, const_name);
1025    }
1026
1027    // Register axiom hypotheses and build engine
1028    let mut engine = BackwardChainer::new();
1029    for (i, proof_expr) in proof_exprs.iter().enumerate() {
1030        let hyp_name = format!("h{}", i + 1);
1031        let hyp_type = proof_expr_to_kernel_type(proof_expr)?;
1032        kernel_ctx.add_declaration(&hyp_name, hyp_type);
1033        engine.add_axiom(proof_expr.clone());
1034    }
1035
1036    // === STEP 3: Prove ===
1037    let derivation = engine.prove(goal_expr.clone()).map_err(|e| ParseError {
1038        kind: logicaffeine_language::error::ParseErrorKind::Custom(format!("Proof failed: {}", e)),
1039        span: logicaffeine_language::token::Span::default(),
1040    })?;
1041
1042    // === STEP 4: Certify ===
1043    let cert_ctx = logicaffeine_proof::certifier::CertificationContext::new(&kernel_ctx);
1044    let proof_term = logicaffeine_proof::certifier::certify(&derivation, &cert_ctx).map_err(|e| ParseError {
1045        kind: logicaffeine_language::error::ParseErrorKind::Custom(format!("Certification failed: {}", e)),
1046        span: logicaffeine_language::token::Span::default(),
1047    })?;
1048
1049    // === STEP 5: Type-Check ===
1050    let _ = kernel::infer_type(&kernel_ctx, &proof_term).map_err(|e| ParseError {
1051        kind: logicaffeine_language::error::ParseErrorKind::Custom(format!("Type check failed: {}", e)),
1052        span: logicaffeine_language::token::Span::default(),
1053    })?;
1054
1055    // === STEP 6: Return ===
1056    Ok((proof_term, kernel_ctx))
1057}
1058
1059/// Collects predicates and constants from ProofExpr
1060struct SymbolCollector {
1061    predicates: HashSet<String>,
1062    constants: HashSet<String>,
1063}
1064
1065impl SymbolCollector {
1066    fn new() -> Self {
1067        SymbolCollector {
1068            predicates: HashSet::new(),
1069            constants: HashSet::new(),
1070        }
1071    }
1072
1073    fn collect(&mut self, expr: &ProofExpr) {
1074        match expr {
1075            ProofExpr::Predicate { name, args, .. } => {
1076                self.predicates.insert(name.clone());
1077                for arg in args {
1078                    self.collect_term(arg);
1079                }
1080            }
1081            ProofExpr::And(l, r)
1082            | ProofExpr::Or(l, r)
1083            | ProofExpr::Implies(l, r)
1084            | ProofExpr::Iff(l, r) => {
1085                self.collect(l);
1086                self.collect(r);
1087            }
1088            ProofExpr::Not(inner) => {
1089                self.collect(inner);
1090            }
1091            ProofExpr::ForAll { body, .. } | ProofExpr::Exists { body, .. } => {
1092                self.collect(body);
1093            }
1094            ProofExpr::Atom(_) => {
1095                // Atoms are propositional constants, not FOL predicates
1096            }
1097            ProofExpr::Identity(l, r) => {
1098                // Collect constants from identity terms
1099                self.collect_term(l);
1100                self.collect_term(r);
1101            }
1102            _ => {}
1103        }
1104    }
1105
1106    fn collect_term(&mut self, term: &logicaffeine_proof::ProofTerm) {
1107        match term {
1108            logicaffeine_proof::ProofTerm::Constant(name) => {
1109                // Only add if it looks like a proper name (capitalized)
1110                if name.chars().next().map(|c| c.is_uppercase()).unwrap_or(false) {
1111                    self.constants.insert(name.clone());
1112                }
1113            }
1114            logicaffeine_proof::ProofTerm::Function(name, args) => {
1115                self.predicates.insert(name.clone());
1116                for arg in args {
1117                    self.collect_term(arg);
1118                }
1119            }
1120            _ => {}
1121        }
1122    }
1123
1124    fn predicates(&self) -> impl Iterator<Item = &String> {
1125        self.predicates.iter()
1126    }
1127
1128    fn constants(&self) -> impl Iterator<Item = &String> {
1129        self.constants.iter()
1130    }
1131}
1132
1133/// Register a predicate in the kernel context.
1134/// P : Entity → Prop
1135fn register_predicate(ctx: &mut kernel::Context, name: &str) {
1136    // Don't re-register if already present
1137    if ctx.get_global(name).is_some() {
1138        return;
1139    }
1140
1141    let pred_type = kernel::Term::Pi {
1142        param: "_".to_string(),
1143        param_type: Box::new(kernel::Term::Global("Entity".to_string())),
1144        body_type: Box::new(kernel::Term::Sort(kernel::Universe::Prop)),
1145    };
1146    ctx.add_declaration(name, pred_type);
1147}
1148
1149/// Register a constant in the kernel context.
1150/// Socrates : Entity
1151fn register_constant(ctx: &mut kernel::Context, name: &str) {
1152    // Don't re-register if already present
1153    if ctx.get_global(name).is_some() {
1154        return;
1155    }
1156
1157    ctx.add_declaration(name, kernel::Term::Global("Entity".to_string()));
1158}
1159
1160/// Convert ProofExpr (engine) to kernel Term (type)
1161fn proof_expr_to_kernel_type(expr: &ProofExpr) -> Result<kernel::Term, ParseError> {
1162    match expr {
1163        ProofExpr::Predicate { name, args, .. } => {
1164            // P(a, b, c) → ((P a) b) c
1165            let mut term = kernel::Term::Global(name.clone());
1166            for arg in args {
1167                let arg_term = proof_term_to_kernel_term(arg)?;
1168                term = kernel::Term::App(Box::new(term), Box::new(arg_term));
1169            }
1170            Ok(term)
1171        }
1172        ProofExpr::ForAll { variable, body } => {
1173            // ∀x.P(x) → Π(x:Entity). P(x)
1174            let body_type = proof_expr_to_kernel_type(body)?;
1175            Ok(kernel::Term::Pi {
1176                param: variable.clone(),
1177                param_type: Box::new(kernel::Term::Global("Entity".to_string())),
1178                body_type: Box::new(body_type),
1179            })
1180        }
1181        ProofExpr::Implies(ante, cons) => {
1182            // P → Q → Π(_:P). Q
1183            let ante_type = proof_expr_to_kernel_type(ante)?;
1184            let cons_type = proof_expr_to_kernel_type(cons)?;
1185            Ok(kernel::Term::Pi {
1186                param: "_".to_string(),
1187                param_type: Box::new(ante_type),
1188                body_type: Box::new(cons_type),
1189            })
1190        }
1191        ProofExpr::And(l, r) => {
1192            // P ∧ Q → And P Q
1193            let l_type = proof_expr_to_kernel_type(l)?;
1194            let r_type = proof_expr_to_kernel_type(r)?;
1195            Ok(kernel::Term::App(
1196                Box::new(kernel::Term::App(
1197                    Box::new(kernel::Term::Global("And".to_string())),
1198                    Box::new(l_type),
1199                )),
1200                Box::new(r_type),
1201            ))
1202        }
1203        ProofExpr::Or(l, r) => {
1204            // P ∨ Q → Or P Q
1205            let l_type = proof_expr_to_kernel_type(l)?;
1206            let r_type = proof_expr_to_kernel_type(r)?;
1207            Ok(kernel::Term::App(
1208                Box::new(kernel::Term::App(
1209                    Box::new(kernel::Term::Global("Or".to_string())),
1210                    Box::new(l_type),
1211                )),
1212                Box::new(r_type),
1213            ))
1214        }
1215        ProofExpr::Atom(name) => {
1216            // Propositional atoms: P → P (as a global)
1217            Ok(kernel::Term::Global(name.clone()))
1218        }
1219        ProofExpr::Identity(l, r) => {
1220            // a = b → Eq Entity a b
1221            let l_term = proof_term_to_kernel_term(l)?;
1222            let r_term = proof_term_to_kernel_term(r)?;
1223            Ok(kernel::Term::App(
1224                Box::new(kernel::Term::App(
1225                    Box::new(kernel::Term::App(
1226                        Box::new(kernel::Term::Global("Eq".to_string())),
1227                        Box::new(kernel::Term::Global("Entity".to_string())),
1228                    )),
1229                    Box::new(l_term),
1230                )),
1231                Box::new(r_term),
1232            ))
1233        }
1234        _ => Err(ParseError {
1235            kind: logicaffeine_language::error::ParseErrorKind::Custom(format!(
1236                "Unsupported ProofExpr for kernel type: {:?}",
1237                expr
1238            )),
1239            span: logicaffeine_language::token::Span::default(),
1240        }),
1241    }
1242}
1243
1244/// Convert ProofTerm to kernel Term
1245fn proof_term_to_kernel_term(term: &logicaffeine_proof::ProofTerm) -> Result<kernel::Term, ParseError> {
1246    match term {
1247        logicaffeine_proof::ProofTerm::Constant(name) => Ok(kernel::Term::Global(name.clone())),
1248        logicaffeine_proof::ProofTerm::Variable(name) => Ok(kernel::Term::Var(name.clone())),
1249        logicaffeine_proof::ProofTerm::Function(name, args) => {
1250            let mut t = kernel::Term::Global(name.clone());
1251            for arg in args {
1252                let arg_term = proof_term_to_kernel_term(arg)?;
1253                t = kernel::Term::App(Box::new(t), Box::new(arg_term));
1254            }
1255            Ok(t)
1256        }
1257        _ => Err(ParseError {
1258            kind: logicaffeine_language::error::ParseErrorKind::Custom(format!(
1259                "Unsupported ProofTerm for kernel: {:?}",
1260                term
1261            )),
1262            span: logicaffeine_language::token::Span::default(),
1263        }),
1264    }
1265}