tensorlogic_compiler/
lib.rs

1//! TLExpr → EinsumGraph compiler (planning only).
2//!
3//! This crate compiles logical expressions into tensor computation graphs
4//! represented as einsum operations. It provides a bridge between symbolic
5//! logic and numeric tensor computations.
6//!
7//! # Overview
8//!
9//! The tensorlogic-compiler translates high-level logical expressions (predicates,
10//! quantifiers, implications) into low-level tensor operations that can be executed
11//! efficiently on various backends (CPU, GPU, etc.).
12//!
13//! **Key Features:**
14//! - Logic-to-tensor mapping with configurable strategies
15//! - Type checking and scope analysis
16//! - Optimization passes (negation, CSE, einsum optimization)
17//! - Enhanced diagnostics with helpful error messages
18//! - Support for arithmetic, comparison, and conditional expressions
19//!
20//! # Quick Start
21//!
22//! ```rust
23//! use tensorlogic_compiler::{compile_to_einsum_with_context, CompilerContext};
24//! use tensorlogic_ir::{TLExpr, Term};
25//!
26//! let mut ctx = CompilerContext::new();
27//! ctx.add_domain("Person", 100);
28//!
29//! // Define a logic rule: ∃y. knows(x, y)
30//! // "Find all persons x who know someone"
31//! let rule = TLExpr::exists(
32//!     "y",
33//!     "Person",
34//!     TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]),
35//! );
36//!
37//! // Compile to tensor operations
38//! let graph = compile_to_einsum_with_context(&rule, &mut ctx).unwrap();
39//! ```
40//!
41//! # Compilation Pipeline
42//!
43//! The compiler follows a multi-stage pipeline:
44//!
45//! 1. **Pre-compilation passes**:
46//!    - Scope analysis (detect unbound variables)
47//!    - Type checking (validate predicate arity and types)
48//!    - Expression optimization (negation optimization, CSE)
49//!
50//! 2. **Compilation**:
51//!    - Variable axis assignment
52//!    - Logic-to-tensor mapping (using configurable strategies)
53//!    - Einsum graph construction
54//!
55//! 3. **Post-compilation passes**:
56//!    - Dead code elimination
57//!    - Einsum operation merging
58//!    - Identity elimination
59//!
60//! # Modules
61//!
62//! - [`config`]: Compilation configuration and strategy selection
63//! - [`optimize`]: Expression-level optimization passes
64//! - [`passes`]: Analysis and validation passes
65//!
66//! # Examples
67//!
68//! ## Basic Predicate Compilation
69//!
70//! ```rust
71//! use tensorlogic_compiler::compile_to_einsum;
72//! use tensorlogic_ir::{TLExpr, Term};
73//!
74//! let expr = TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]);
75//! let graph = compile_to_einsum(&expr).unwrap();
76//! ```
77//!
78//! ## Compilation with Context
79//!
80//! ```rust
81//! use tensorlogic_compiler::{compile_to_einsum_with_context, CompilerContext};
82//! use tensorlogic_ir::{TLExpr, Term};
83//!
84//! let mut ctx = CompilerContext::new();
85//! ctx.add_domain("Person", 100);
86//!
87//! let expr = TLExpr::exists(
88//!     "y",
89//!     "Person",
90//!     TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]),
91//! );
92//!
93//! let graph = compile_to_einsum_with_context(&expr, &mut ctx).unwrap();
94//! ```
95//!
96//! ## Using Optimization Passes
97//!
98//! ### Unified Pipeline (Recommended)
99//!
100//! ```rust
101//! use tensorlogic_compiler::optimize::{OptimizationPipeline, PipelineConfig};
102//! use tensorlogic_ir::{TLExpr, Term};
103//!
104//! let pipeline = OptimizationPipeline::new();
105//! let expr = TLExpr::add(
106//!     TLExpr::mul(TLExpr::Constant(2.0), TLExpr::Constant(3.0)),
107//!     TLExpr::Constant(0.0)
108//! );
109//! let (optimized, stats) = pipeline.optimize(&expr);
110//! println!("Applied {} optimizations", stats.total_optimizations());
111//! ```
112//!
113//! ### Individual Passes
114//!
115//! ```rust
116//! use tensorlogic_compiler::optimize::optimize_negations;
117//! use tensorlogic_ir::{TLExpr, Term};
118//!
119//! let expr = TLExpr::negate(TLExpr::negate(
120//!     TLExpr::pred("p", vec![Term::var("x")])
121//! ));
122//!
123//! let (optimized, stats) = optimize_negations(&expr);
124//! assert_eq!(stats.double_negations_eliminated, 1);
125//! ```
126
127pub mod cache;
128pub mod compile;
129pub mod config;
130mod context;
131pub mod debug;
132pub mod incremental;
133pub mod optimize;
134pub mod passes;
135
136#[cfg(test)]
137mod property_tests;
138#[cfg(test)]
139mod tests;
140#[cfg(test)]
141mod tests_math_ops;
142
143use anyhow::Result;
144use tensorlogic_ir::{EinsumGraph, TLExpr};
145
146pub use cache::{CacheStats, CompilationCache};
147pub use config::{
148    AndStrategy, CompilationConfig, CompilationConfigBuilder, ExistsStrategy, ForallStrategy,
149    ImplicationStrategy, ModalStrategy, NotStrategy, OrStrategy, TemporalStrategy,
150};
151pub use context::{CompilerContext, DomainInfo};
152
153// Re-export adapter types for convenience
154pub use passes::validate_arity;
155pub use tensorlogic_adapters::{PredicateInfo, SymbolTable};
156
157use compile::{compile_expr, infer_domain};
158
159/// Compile a TLExpr into an EinsumGraph with an empty context.
160///
161/// This is the simplest entry point for compilation. It creates a new
162/// compiler context automatically and infers domains where possible.
163///
164/// # Example
165///
166/// ```
167/// use tensorlogic_compiler::compile_to_einsum;
168/// use tensorlogic_ir::{TLExpr, Term};
169///
170/// let expr = TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]);
171/// let graph = compile_to_einsum(&expr).unwrap();
172/// ```
173pub fn compile_to_einsum(expr: &TLExpr) -> Result<EinsumGraph> {
174    let mut ctx = CompilerContext::new();
175    compile_to_einsum_with_context(expr, &mut ctx)
176}
177
178/// Compile a TLExpr into an EinsumGraph with an existing context.
179///
180/// Use this when you need fine-grained control over domains, variable bindings,
181/// or when compiling multiple related expressions with shared context.
182///
183/// # Example
184///
185/// ```
186/// use tensorlogic_compiler::{compile_to_einsum_with_context, CompilerContext};
187/// use tensorlogic_ir::{TLExpr, Term};
188///
189/// let mut ctx = CompilerContext::new();
190/// ctx.add_domain("Person", 100);
191///
192/// let expr = TLExpr::exists(
193///     "y",
194///     "Person",
195///     TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]),
196/// );
197///
198/// let graph = compile_to_einsum_with_context(&expr, &mut ctx).unwrap();
199/// ```
200pub fn compile_to_einsum_with_context(
201    expr: &TLExpr,
202    ctx: &mut CompilerContext,
203) -> Result<EinsumGraph> {
204    let mut graph = EinsumGraph::new();
205
206    let free_vars = expr.free_vars();
207    for var in free_vars.iter() {
208        if !ctx.var_to_domain.contains_key(var) {
209            if let Some(domain) = infer_domain(expr, var) {
210                ctx.bind_var(var, &domain)?;
211            }
212        }
213        ctx.assign_axis(var);
214    }
215
216    let result = compile_expr(expr, ctx, &mut graph)?;
217
218    // Mark the result tensor as an output
219    graph.outputs.push(result.tensor_idx);
220
221    Ok(graph)
222}