use serde::{Deserialize, Serialize};
use std::collections::HashSet;
use logicaffeine_base::{Arena, Interner};
use logicaffeine_language::{
ast::{self, LogicExpr, Term},
analysis::DiscoveryPass,
arena_ctx::AstContext,
compile::{compile_forest, compile_forest_with_options},
drs,
error::socratic_explanation,
lexer::Lexer,
mwe,
parser::Parser,
pragmatics,
registry::SymbolRegistry,
semantics,
token::TokenType,
CompileOptions, OutputFormat, ParseError,
};
use logicaffeine_proof::{BackwardChainer, DerivationTree, ProofExpr};
pub use crate::interpreter::InterpreterResult;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum TokenCategory {
Quantifier,
Noun,
Verb,
Adjective,
Connective,
Determiner,
Preposition,
Pronoun,
Modal,
Punctuation,
Proper,
Other,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TokenInfo {
pub start: usize,
pub end: usize,
pub text: String,
pub category: TokenCategory,
}
fn categorize_token(kind: &TokenType, _interner: &Interner) -> TokenCategory {
match kind {
TokenType::All | TokenType::Some | TokenType::No | TokenType::Any
| TokenType::Most | TokenType::Few | TokenType::Many
| TokenType::Cardinal(_) | TokenType::AtLeast(_) | TokenType::AtMost(_) => TokenCategory::Quantifier,
TokenType::Noun(_) => TokenCategory::Noun,
TokenType::Verb { .. } => TokenCategory::Verb,
TokenType::Adjective(_) | TokenType::NonIntersectiveAdjective(_) => TokenCategory::Adjective,
TokenType::And | TokenType::Or | TokenType::Not | TokenType::If | TokenType::Then
| TokenType::Iff | TokenType::Because => TokenCategory::Connective,
TokenType::Article(_) => TokenCategory::Determiner,
TokenType::Preposition(_) => TokenCategory::Preposition,
TokenType::Pronoun { .. } => TokenCategory::Pronoun,
TokenType::Must | TokenType::Can | TokenType::Should | TokenType::Shall
| TokenType::Would | TokenType::Could | TokenType::May | TokenType::Cannot
| TokenType::Might => TokenCategory::Modal,
TokenType::Period | TokenType::Comma => TokenCategory::Punctuation,
TokenType::ProperName(_) => TokenCategory::Proper,
_ => TokenCategory::Other,
}
}
pub fn tokenize_for_ui(input: &str) -> Vec<TokenInfo> {
let mut interner = Interner::new();
let mut lexer = Lexer::new(input, &mut interner);
let tokens = lexer.tokenize();
tokens.iter().map(|t| TokenInfo {
start: t.span.start,
end: t.span.end,
text: input[t.span.start..t.span.end].to_string(),
category: categorize_token(&t.kind, &interner),
}).collect()
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct AstNode {
pub label: String,
pub node_type: String,
pub children: Vec<AstNode>,
}
impl AstNode {
pub fn leaf(label: &str, node_type: &str) -> Self {
AstNode {
label: label.to_string(),
node_type: node_type.to_string(),
children: Vec::new(),
}
}
pub fn with_children(label: &str, node_type: &str, children: Vec<AstNode>) -> Self {
AstNode {
label: label.to_string(),
node_type: node_type.to_string(),
children,
}
}
}
pub fn expr_to_ast_node(expr: &LogicExpr, interner: &Interner) -> AstNode {
match expr {
LogicExpr::Predicate { name, args, .. } => {
let name_str = interner.resolve(*name);
let arg_nodes: Vec<AstNode> = args.iter()
.map(|t| term_to_ast_node(t, interner))
.collect();
AstNode::with_children(
&format!("{}({})", name_str, args.len()),
"predicate",
arg_nodes,
)
}
LogicExpr::Quantifier { kind, variable, body, .. } => {
let var_str = interner.resolve(*variable);
let symbol = match kind {
ast::QuantifierKind::Universal => "∀",
ast::QuantifierKind::Existential => "∃",
ast::QuantifierKind::Most => "MOST",
ast::QuantifierKind::Few => "FEW",
ast::QuantifierKind::Many => "MANY",
ast::QuantifierKind::Cardinal(n) => return AstNode::with_children(
&format!("∃={}{}", n, var_str),
"quantifier",
vec![expr_to_ast_node(body, interner)],
),
ast::QuantifierKind::AtLeast(n) => return AstNode::with_children(
&format!("∃≥{}{}", n, var_str),
"quantifier",
vec![expr_to_ast_node(body, interner)],
),
ast::QuantifierKind::AtMost(n) => return AstNode::with_children(
&format!("∃≤{}{}", n, var_str),
"quantifier",
vec![expr_to_ast_node(body, interner)],
),
ast::QuantifierKind::Generic => "GEN",
};
AstNode::with_children(
&format!("{}{}", symbol, var_str),
"quantifier",
vec![expr_to_ast_node(body, interner)],
)
}
LogicExpr::BinaryOp { left, op, right } => {
let op_str = match op {
TokenType::And => "∧",
TokenType::Or => "∨",
TokenType::If | TokenType::Then => "→",
TokenType::Iff => "↔",
_ => "?",
};
AstNode::with_children(
op_str,
"binary_op",
vec![
expr_to_ast_node(left, interner),
expr_to_ast_node(right, interner),
],
)
}
LogicExpr::UnaryOp { op, operand } => {
let op_str = match op {
TokenType::Not => "¬",
_ => "?",
};
AstNode::with_children(
op_str,
"unary_op",
vec![expr_to_ast_node(operand, interner)],
)
}
LogicExpr::Identity { left, right } => {
AstNode::with_children(
"=",
"identity",
vec![
term_to_ast_node(left, interner),
term_to_ast_node(right, interner),
],
)
}
LogicExpr::Modal { vector, operand } => {
AstNode::with_children(
&format!("□{:?}", vector.domain),
"modal",
vec![expr_to_ast_node(operand, interner)],
)
}
LogicExpr::Lambda { variable, body } => {
let var_str = interner.resolve(*variable);
AstNode::with_children(
&format!("λ{}", var_str),
"lambda",
vec![expr_to_ast_node(body, interner)],
)
}
_ => AstNode::leaf(&format!("{:?}", expr), "other"),
}
}
fn term_to_ast_node(term: &Term, interner: &Interner) -> AstNode {
match term {
Term::Constant(sym) => AstNode::leaf(interner.resolve(*sym), "constant"),
Term::Variable(sym) => AstNode::leaf(interner.resolve(*sym), "variable"),
Term::Function(name, args) => {
let name_str = interner.resolve(*name);
let arg_nodes: Vec<AstNode> = args.iter()
.map(|t| term_to_ast_node(t, interner))
.collect();
AstNode::with_children(&format!("{}()", name_str), "function", arg_nodes)
}
Term::Group(terms) => {
let term_nodes: Vec<AstNode> = terms.iter()
.map(|t| term_to_ast_node(t, interner))
.collect();
AstNode::with_children("⊕", "group", term_nodes)
}
_ => AstNode::leaf(&format!("{:?}", term), "term"),
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CompileResult {
pub logic: Option<String>,
pub simple_logic: Option<String>,
pub kripke_logic: Option<String>,
pub ast: Option<AstNode>,
pub readings: Vec<String>,
pub simple_readings: Vec<String>,
pub kripke_readings: Vec<String>,
pub tokens: Vec<TokenInfo>,
pub error: Option<String>,
}
pub fn compile_for_ui(input: &str) -> CompileResult {
let tokens = tokenize_for_ui(input);
let readings = compile_forest(input);
let simple_readings: Vec<String> = {
let raw = compile_forest_with_options(input, CompileOptions { format: OutputFormat::SimpleFOL });
let mut seen = HashSet::new();
raw.into_iter().filter(|r| seen.insert(r.clone())).collect()
};
let kripke_readings = compile_forest_with_options(input, CompileOptions { format: OutputFormat::Kripke });
let mut interner = Interner::new();
let mut lexer = Lexer::new(input, &mut interner);
let lex_tokens = lexer.tokenize();
let mwe_trie = mwe::build_mwe_trie();
let lex_tokens = mwe::apply_mwe_pipeline(lex_tokens, &mwe_trie, &mut interner);
let type_registry = {
let mut discovery = DiscoveryPass::new(&lex_tokens, &mut interner);
discovery.run()
};
let expr_arena = Arena::new();
let term_arena = Arena::new();
let np_arena = Arena::new();
let sym_arena = Arena::new();
let role_arena = Arena::new();
let pp_arena = Arena::new();
let ctx = AstContext::new(
&expr_arena,
&term_arena,
&np_arena,
&sym_arena,
&role_arena,
&pp_arena,
);
let mut world_state = drs::WorldState::new();
let mut parser = Parser::new(lex_tokens, &mut world_state, &mut interner, ctx, type_registry);
match parser.parse() {
Ok(ast) => {
let ast = semantics::apply_axioms(ast, ctx.exprs, ctx.terms, &mut interner);
let ast = pragmatics::apply_pragmatics(ast, ctx.exprs, &interner);
let ast_node = expr_to_ast_node(ast, &interner);
let mut registry = SymbolRegistry::new();
let logic = ast.transpile_discourse(&mut registry, &interner, OutputFormat::Unicode);
let simple_logic = ast.transpile_discourse(&mut registry, &interner, OutputFormat::SimpleFOL);
let kripke_ast = semantics::apply_kripke_lowering(ast, ctx.exprs, ctx.terms, &mut interner);
let kripke_logic = kripke_ast.transpile_discourse(&mut registry, &interner, OutputFormat::Kripke);
CompileResult {
logic: Some(logic),
simple_logic: Some(simple_logic),
kripke_logic: Some(kripke_logic),
ast: Some(ast_node),
readings,
simple_readings,
kripke_readings,
tokens,
error: None,
}
}
Err(e) => {
let advice = socratic_explanation(&e, &interner);
CompileResult {
logic: None,
simple_logic: None,
kripke_logic: None,
ast: None,
readings: Vec::new(),
simple_readings: Vec::new(),
kripke_readings: Vec::new(),
tokens,
error: Some(advice),
}
}
}
}
#[derive(Debug, Clone)]
pub struct ProofCompileResult {
pub proof_expr: Option<ProofExpr>,
pub logic_string: Option<String>,
pub error: Option<String>,
}
pub fn compile_for_proof(input: &str) -> ProofCompileResult {
use logicaffeine_language::proof_convert::logic_expr_to_proof_expr;
let mut interner = Interner::new();
let mut lexer = Lexer::new(input, &mut interner);
let lex_tokens = lexer.tokenize();
let mwe_trie = mwe::build_mwe_trie();
let lex_tokens = mwe::apply_mwe_pipeline(lex_tokens, &mwe_trie, &mut interner);
let type_registry = {
let mut discovery = DiscoveryPass::new(&lex_tokens, &mut interner);
discovery.run()
};
let expr_arena = Arena::new();
let term_arena = Arena::new();
let np_arena = Arena::new();
let sym_arena = Arena::new();
let role_arena = Arena::new();
let pp_arena = Arena::new();
let ctx = AstContext::new(
&expr_arena,
&term_arena,
&np_arena,
&sym_arena,
&role_arena,
&pp_arena,
);
let mut world_state = drs::WorldState::new();
let mut parser = Parser::new(lex_tokens, &mut world_state, &mut interner, ctx, type_registry);
match parser.parse() {
Ok(ast) => {
let ast = semantics::apply_axioms(ast, ctx.exprs, ctx.terms, &mut interner);
let ast = pragmatics::apply_pragmatics(ast, ctx.exprs, &interner);
let mut registry = SymbolRegistry::new();
let logic_string = ast.transpile(&mut registry, &interner, OutputFormat::SimpleFOL);
let proof_expr = logic_expr_to_proof_expr(ast, &interner);
ProofCompileResult {
proof_expr: Some(proof_expr),
logic_string: Some(logic_string),
error: None,
}
}
Err(e) => {
let advice = socratic_explanation(&e, &interner);
ProofCompileResult {
proof_expr: None,
logic_string: None,
error: Some(advice),
}
}
}
}
#[derive(Debug, Clone)]
pub struct TheoremCompileResult {
pub name: String,
pub premises: Vec<ProofExpr>,
pub goal: Option<ProofExpr>,
pub goal_string: Option<String>,
pub derivation: Option<DerivationTree>,
pub error: Option<String>,
}
pub fn compile_theorem_for_ui(input: &str) -> TheoremCompileResult {
use logicaffeine_language::proof_convert::logic_expr_to_proof_expr;
let mut interner = Interner::new();
let mut lexer = Lexer::new(input, &mut interner);
let tokens = lexer.tokenize();
let mwe_trie = mwe::build_mwe_trie();
let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
let type_registry = {
let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
discovery.run()
};
let expr_arena = Arena::new();
let term_arena = Arena::new();
let np_arena = Arena::new();
let sym_arena = Arena::new();
let role_arena = Arena::new();
let pp_arena = Arena::new();
let ctx = AstContext::new(
&expr_arena,
&term_arena,
&np_arena,
&sym_arena,
&role_arena,
&pp_arena,
);
let mut world_state = drs::WorldState::new();
let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
let statements = match parser.parse_program() {
Ok(stmts) => stmts,
Err(e) => {
return TheoremCompileResult {
name: String::new(),
premises: Vec::new(),
goal: None,
goal_string: None,
derivation: None,
error: Some(format!("Parse error: {:?}", e)),
};
}
};
let theorem = match statements.iter().find_map(|stmt| {
if let ast::Stmt::Theorem(t) = stmt {
Some(t)
} else {
None
}
}) {
Some(t) => t,
None => {
return TheoremCompileResult {
name: String::new(),
premises: Vec::new(),
goal: None,
goal_string: None,
derivation: None,
error: Some("No theorem block found".to_string()),
};
}
};
let premises: Vec<ProofExpr> = theorem
.premises
.iter()
.map(|p| logic_expr_to_proof_expr(p, &interner))
.collect();
let goal = logic_expr_to_proof_expr(theorem.goal, &interner);
let mut registry = SymbolRegistry::new();
let goal_string = theorem.goal.transpile(&mut registry, &interner, OutputFormat::SimpleFOL);
let derivation = if matches!(theorem.strategy, ast::theorem::ProofStrategy::Auto) {
let mut engine = BackwardChainer::new();
for premise in &premises {
engine.add_axiom(premise.clone());
}
engine.prove(goal.clone()).ok()
} else {
None
};
TheoremCompileResult {
name: theorem.name.clone(),
premises,
goal: Some(goal),
goal_string: Some(goal_string),
derivation,
error: None,
}
}
#[cfg(feature = "codegen")]
pub fn generate_rust_code(source: &str) -> Result<String, ParseError> {
use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
let mut interner = Interner::new();
let mut lexer = Lexer::new(source, &mut interner);
let tokens = lexer.tokenize();
let mwe_trie = mwe::build_mwe_trie();
let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
let (type_registry, policy_registry) = {
let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
let result = discovery.run_full();
(result.types, result.policies)
};
let codegen_registry = type_registry.clone();
let codegen_policies = policy_registry.clone();
let mut world_state = drs::WorldState::new();
let expr_arena = Arena::new();
let term_arena = Arena::new();
let np_arena = Arena::new();
let sym_arena = Arena::new();
let role_arena = Arena::new();
let pp_arena = Arena::new();
let stmt_arena: Arena<Stmt> = Arena::new();
let imperative_expr_arena: Arena<Expr> = Arena::new();
let type_expr_arena: Arena<TypeExpr> = Arena::new();
let ast_ctx = AstContext::with_types(
&expr_arena,
&term_arena,
&np_arena,
&sym_arena,
&role_arena,
&pp_arena,
&stmt_arena,
&imperative_expr_arena,
&type_expr_arena,
);
let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ast_ctx, type_registry);
let stmts = parser.parse_program()?;
let type_env = crate::analysis::types::TypeEnv::infer_program(&stmts, &interner, &codegen_registry);
let rust_code = crate::codegen::codegen_program(&stmts, &codegen_registry, &codegen_policies, &interner, &type_env);
Ok(rust_code)
}
pub async fn interpret_for_ui(input: &str) -> InterpreterResult {
use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
let mut interner = Interner::new();
let mut lexer = Lexer::new(input, &mut interner);
let tokens = lexer.tokenize();
let mwe_trie = mwe::build_mwe_trie();
let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
let (type_registry, policy_registry) = {
let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
let result = discovery.run_full();
(result.types, result.policies)
};
let expr_arena = Arena::new();
let term_arena = Arena::new();
let np_arena = Arena::new();
let sym_arena = Arena::new();
let role_arena = Arena::new();
let pp_arena = Arena::new();
let stmt_arena: Arena<Stmt> = Arena::new();
let imperative_expr_arena: Arena<Expr> = Arena::new();
let type_expr_arena: Arena<TypeExpr> = Arena::new();
let ctx = AstContext::with_types(
&expr_arena,
&term_arena,
&np_arena,
&sym_arena,
&role_arena,
&pp_arena,
&stmt_arena,
&imperative_expr_arena,
&type_expr_arena,
);
let mut world_state = drs::WorldState::new();
let type_registry_for_interp = type_registry.clone();
let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
match parser.parse_program() {
Ok(stmts) => {
let mut interp = crate::interpreter::Interpreter::new(&interner)
.with_type_registry(&type_registry_for_interp)
.with_policies(policy_registry);
match interp.run(&stmts).await {
Ok(()) => InterpreterResult {
lines: interp.output,
error: None,
},
Err(e) => InterpreterResult {
lines: interp.output,
error: Some(e),
},
}
}
Err(e) => {
let advice = socratic_explanation(&e, &interner);
InterpreterResult {
lines: vec![],
error: Some(advice),
}
}
}
}
pub fn interpret_for_ui_sync(input: &str) -> InterpreterResult {
use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
let mut interner = Interner::new();
let mut lexer = Lexer::new(input, &mut interner);
let tokens = lexer.tokenize();
let mwe_trie = mwe::build_mwe_trie();
let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
let (type_registry, policy_registry) = {
let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
let result = discovery.run_full();
(result.types, result.policies)
};
let expr_arena = Arena::new();
let term_arena = Arena::new();
let np_arena = Arena::new();
let sym_arena = Arena::new();
let role_arena = Arena::new();
let pp_arena = Arena::new();
let stmt_arena: Arena<Stmt> = Arena::new();
let imperative_expr_arena: Arena<Expr> = Arena::new();
let type_expr_arena: Arena<TypeExpr> = Arena::new();
let ctx = AstContext::with_types(
&expr_arena,
&term_arena,
&np_arena,
&sym_arena,
&role_arena,
&pp_arena,
&stmt_arena,
&imperative_expr_arena,
&type_expr_arena,
);
let mut world_state = drs::WorldState::new();
let type_registry_for_interp = type_registry.clone();
let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
match parser.parse_program() {
Ok(stmts) => {
let mut interp = crate::interpreter::Interpreter::new(&interner)
.with_type_registry(&type_registry_for_interp)
.with_policies(policy_registry);
if crate::interpreter::needs_async(&stmts) {
use futures::executor::block_on;
match block_on(interp.run(&stmts)) {
Ok(()) => InterpreterResult {
lines: interp.output,
error: None,
},
Err(e) => InterpreterResult {
lines: interp.output,
error: Some(e),
},
}
} else {
match interp.run_sync(&stmts) {
Ok(()) => InterpreterResult {
lines: interp.output,
error: None,
},
Err(e) => InterpreterResult {
lines: interp.output,
error: Some(e),
},
}
}
}
Err(e) => {
let advice = socratic_explanation(&e, &interner);
InterpreterResult {
lines: vec![],
error: Some(advice),
}
}
}
}
pub async fn interpret_streaming<F>(input: &str, on_output: std::rc::Rc<std::cell::RefCell<F>>) -> InterpreterResult
where
F: FnMut(String) + 'static,
{
use logicaffeine_language::ast::stmt::{Stmt, Expr, TypeExpr};
use crate::interpreter::OutputCallback;
let mut interner = Interner::new();
let mut lexer = Lexer::new(input, &mut interner);
let tokens = lexer.tokenize();
let mwe_trie = mwe::build_mwe_trie();
let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
let (type_registry, policy_registry) = {
let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
let result = discovery.run_full();
(result.types, result.policies)
};
let expr_arena = Arena::new();
let term_arena = Arena::new();
let np_arena = Arena::new();
let sym_arena = Arena::new();
let role_arena = Arena::new();
let pp_arena = Arena::new();
let stmt_arena: Arena<Stmt> = Arena::new();
let imperative_expr_arena: Arena<Expr> = Arena::new();
let type_expr_arena: Arena<TypeExpr> = Arena::new();
let ctx = AstContext::with_types(
&expr_arena,
&term_arena,
&np_arena,
&sym_arena,
&role_arena,
&pp_arena,
&stmt_arena,
&imperative_expr_arena,
&type_expr_arena,
);
let mut world_state = drs::WorldState::new();
let type_registry_for_interp = type_registry.clone();
let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
match parser.parse_program() {
Ok(stmts) => {
let callback: OutputCallback = std::rc::Rc::new(std::cell::RefCell::new(move |line: String| {
(on_output.borrow_mut())(line);
}));
let mut interp = crate::interpreter::Interpreter::new(&interner)
.with_type_registry(&type_registry_for_interp)
.with_policies(policy_registry)
.with_output_callback(callback);
match interp.run(&stmts).await {
Ok(()) => InterpreterResult {
lines: interp.output,
error: None,
},
Err(e) => InterpreterResult {
lines: interp.output,
error: Some(e),
},
}
}
Err(e) => {
let advice = socratic_explanation(&e, &interner);
InterpreterResult {
lines: vec![],
error: Some(advice),
}
}
}
}
use logicaffeine_language::ast::Stmt;
use logicaffeine_language::proof_convert::logic_expr_to_proof_expr;
use crate::kernel;
pub fn verify_theorem(input: &str) -> Result<(kernel::Term, kernel::Context), ParseError> {
let mut interner = Interner::new();
let mut lexer = Lexer::new(input, &mut interner);
let tokens = lexer.tokenize();
let mwe_trie = mwe::build_mwe_trie();
let tokens = mwe::apply_mwe_pipeline(tokens, &mwe_trie, &mut interner);
let type_registry = {
let mut discovery = DiscoveryPass::new(&tokens, &mut interner);
discovery.run()
};
let expr_arena = Arena::new();
let term_arena = Arena::new();
let np_arena = Arena::new();
let sym_arena = Arena::new();
let role_arena = Arena::new();
let pp_arena = Arena::new();
let ctx = AstContext::new(
&expr_arena,
&term_arena,
&np_arena,
&sym_arena,
&role_arena,
&pp_arena,
);
let mut world_state = drs::WorldState::new();
let mut parser = Parser::new(tokens, &mut world_state, &mut interner, ctx, type_registry);
let statements = parser.parse_program()?;
let theorem = statements
.iter()
.find_map(|stmt| {
if let Stmt::Theorem(t) = stmt {
Some(t)
} else {
None
}
})
.ok_or_else(|| ParseError {
kind: logicaffeine_language::error::ParseErrorKind::Custom("No theorem block found in input".to_string()),
span: logicaffeine_language::token::Span::default(),
})?;
let mut kernel_ctx = kernel::Context::new();
kernel::prelude::StandardLibrary::register(&mut kernel_ctx);
let mut proof_exprs: Vec<ProofExpr> = Vec::new();
for premise in &theorem.premises {
let proof_expr = logic_expr_to_proof_expr(premise, &interner);
proof_exprs.push(proof_expr);
}
let goal_expr = logic_expr_to_proof_expr(theorem.goal, &interner);
let mut collector = SymbolCollector::new();
for expr in &proof_exprs {
collector.collect(expr);
}
collector.collect(&goal_expr);
for pred_name in collector.predicates() {
register_predicate(&mut kernel_ctx, pred_name);
}
for const_name in collector.constants() {
register_constant(&mut kernel_ctx, const_name);
}
let mut engine = BackwardChainer::new();
for (i, proof_expr) in proof_exprs.iter().enumerate() {
let hyp_name = format!("h{}", i + 1);
let hyp_type = proof_expr_to_kernel_type(proof_expr)?;
kernel_ctx.add_declaration(&hyp_name, hyp_type);
engine.add_axiom(proof_expr.clone());
}
let derivation = engine.prove(goal_expr.clone()).map_err(|e| ParseError {
kind: logicaffeine_language::error::ParseErrorKind::Custom(format!("Proof failed: {}", e)),
span: logicaffeine_language::token::Span::default(),
})?;
let cert_ctx = logicaffeine_proof::certifier::CertificationContext::new(&kernel_ctx);
let proof_term = logicaffeine_proof::certifier::certify(&derivation, &cert_ctx).map_err(|e| ParseError {
kind: logicaffeine_language::error::ParseErrorKind::Custom(format!("Certification failed: {}", e)),
span: logicaffeine_language::token::Span::default(),
})?;
let _ = kernel::infer_type(&kernel_ctx, &proof_term).map_err(|e| ParseError {
kind: logicaffeine_language::error::ParseErrorKind::Custom(format!("Type check failed: {}", e)),
span: logicaffeine_language::token::Span::default(),
})?;
Ok((proof_term, kernel_ctx))
}
struct SymbolCollector {
predicates: HashSet<String>,
constants: HashSet<String>,
}
impl SymbolCollector {
fn new() -> Self {
SymbolCollector {
predicates: HashSet::new(),
constants: HashSet::new(),
}
}
fn collect(&mut self, expr: &ProofExpr) {
match expr {
ProofExpr::Predicate { name, args, .. } => {
self.predicates.insert(name.clone());
for arg in args {
self.collect_term(arg);
}
}
ProofExpr::And(l, r)
| ProofExpr::Or(l, r)
| ProofExpr::Implies(l, r)
| ProofExpr::Iff(l, r) => {
self.collect(l);
self.collect(r);
}
ProofExpr::Not(inner) => {
self.collect(inner);
}
ProofExpr::ForAll { body, .. } | ProofExpr::Exists { body, .. } => {
self.collect(body);
}
ProofExpr::Atom(_) => {
}
ProofExpr::Identity(l, r) => {
self.collect_term(l);
self.collect_term(r);
}
_ => {}
}
}
fn collect_term(&mut self, term: &logicaffeine_proof::ProofTerm) {
match term {
logicaffeine_proof::ProofTerm::Constant(name) => {
if name.chars().next().map(|c| c.is_uppercase()).unwrap_or(false) {
self.constants.insert(name.clone());
}
}
logicaffeine_proof::ProofTerm::Function(name, args) => {
self.predicates.insert(name.clone());
for arg in args {
self.collect_term(arg);
}
}
_ => {}
}
}
fn predicates(&self) -> impl Iterator<Item = &String> {
self.predicates.iter()
}
fn constants(&self) -> impl Iterator<Item = &String> {
self.constants.iter()
}
}
fn register_predicate(ctx: &mut kernel::Context, name: &str) {
if ctx.get_global(name).is_some() {
return;
}
let pred_type = kernel::Term::Pi {
param: "_".to_string(),
param_type: Box::new(kernel::Term::Global("Entity".to_string())),
body_type: Box::new(kernel::Term::Sort(kernel::Universe::Prop)),
};
ctx.add_declaration(name, pred_type);
}
fn register_constant(ctx: &mut kernel::Context, name: &str) {
if ctx.get_global(name).is_some() {
return;
}
ctx.add_declaration(name, kernel::Term::Global("Entity".to_string()));
}
fn proof_expr_to_kernel_type(expr: &ProofExpr) -> Result<kernel::Term, ParseError> {
match expr {
ProofExpr::Predicate { name, args, .. } => {
let mut term = kernel::Term::Global(name.clone());
for arg in args {
let arg_term = proof_term_to_kernel_term(arg)?;
term = kernel::Term::App(Box::new(term), Box::new(arg_term));
}
Ok(term)
}
ProofExpr::ForAll { variable, body } => {
let body_type = proof_expr_to_kernel_type(body)?;
Ok(kernel::Term::Pi {
param: variable.clone(),
param_type: Box::new(kernel::Term::Global("Entity".to_string())),
body_type: Box::new(body_type),
})
}
ProofExpr::Implies(ante, cons) => {
let ante_type = proof_expr_to_kernel_type(ante)?;
let cons_type = proof_expr_to_kernel_type(cons)?;
Ok(kernel::Term::Pi {
param: "_".to_string(),
param_type: Box::new(ante_type),
body_type: Box::new(cons_type),
})
}
ProofExpr::And(l, r) => {
let l_type = proof_expr_to_kernel_type(l)?;
let r_type = proof_expr_to_kernel_type(r)?;
Ok(kernel::Term::App(
Box::new(kernel::Term::App(
Box::new(kernel::Term::Global("And".to_string())),
Box::new(l_type),
)),
Box::new(r_type),
))
}
ProofExpr::Or(l, r) => {
let l_type = proof_expr_to_kernel_type(l)?;
let r_type = proof_expr_to_kernel_type(r)?;
Ok(kernel::Term::App(
Box::new(kernel::Term::App(
Box::new(kernel::Term::Global("Or".to_string())),
Box::new(l_type),
)),
Box::new(r_type),
))
}
ProofExpr::Atom(name) => {
Ok(kernel::Term::Global(name.clone()))
}
ProofExpr::Identity(l, r) => {
let l_term = proof_term_to_kernel_term(l)?;
let r_term = proof_term_to_kernel_term(r)?;
Ok(kernel::Term::App(
Box::new(kernel::Term::App(
Box::new(kernel::Term::App(
Box::new(kernel::Term::Global("Eq".to_string())),
Box::new(kernel::Term::Global("Entity".to_string())),
)),
Box::new(l_term),
)),
Box::new(r_term),
))
}
_ => Err(ParseError {
kind: logicaffeine_language::error::ParseErrorKind::Custom(format!(
"Unsupported ProofExpr for kernel type: {:?}",
expr
)),
span: logicaffeine_language::token::Span::default(),
}),
}
}
fn proof_term_to_kernel_term(term: &logicaffeine_proof::ProofTerm) -> Result<kernel::Term, ParseError> {
match term {
logicaffeine_proof::ProofTerm::Constant(name) => Ok(kernel::Term::Global(name.clone())),
logicaffeine_proof::ProofTerm::Variable(name) => Ok(kernel::Term::Var(name.clone())),
logicaffeine_proof::ProofTerm::Function(name, args) => {
let mut t = kernel::Term::Global(name.clone());
for arg in args {
let arg_term = proof_term_to_kernel_term(arg)?;
t = kernel::Term::App(Box::new(t), Box::new(arg_term));
}
Ok(t)
}
_ => Err(ParseError {
kind: logicaffeine_language::error::ParseErrorKind::Custom(format!(
"Unsupported ProofTerm for kernel: {:?}",
term
)),
span: logicaffeine_language::token::Span::default(),
}),
}
}