tensorlogic_compiler/lib.rs
1//! TLExpr → EinsumGraph compiler (planning only).
2//!
3//! **Version**: 0.1.0-beta.1 | **Status**: Production Ready
4//!
5//! This crate compiles logical expressions into tensor computation graphs
6//! represented as einsum operations. It provides a bridge between symbolic
7//! logic and numeric tensor computations.
8//!
9//! # Overview
10//!
11//! The tensorlogic-compiler translates high-level logical expressions (predicates,
12//! quantifiers, implications) into low-level tensor operations that can be executed
13//! efficiently on various backends (CPU, GPU, etc.).
14//!
15//! **Key Features:**
16//! - Logic-to-tensor mapping with configurable strategies
17//! - Type checking and scope analysis
18//! - Optimization passes (negation, CSE, einsum optimization)
19//! - Enhanced diagnostics with helpful error messages
20//! - Support for arithmetic, comparison, and conditional expressions
21//!
22//! # Quick Start
23//!
24//! ```rust
25//! use tensorlogic_compiler::{compile_to_einsum_with_context, CompilerContext};
26//! use tensorlogic_ir::{TLExpr, Term};
27//!
28//! let mut ctx = CompilerContext::new();
29//! ctx.add_domain("Person", 100);
30//!
31//! // Define a logic rule: ∃y. knows(x, y)
32//! // "Find all persons x who know someone"
33//! let rule = TLExpr::exists(
34//! "y",
35//! "Person",
36//! TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]),
37//! );
38//!
39//! // Compile to tensor operations
40//! let graph = compile_to_einsum_with_context(&rule, &mut ctx).unwrap();
41//! ```
42//!
43//! # Compilation Pipeline
44//!
45//! The compiler follows a multi-stage pipeline:
46//!
47//! 1. **Pre-compilation passes**:
48//! - Scope analysis (detect unbound variables)
49//! - Type checking (validate predicate arity and types)
50//! - Expression optimization (negation optimization, CSE)
51//!
52//! 2. **Compilation**:
53//! - Variable axis assignment
54//! - Logic-to-tensor mapping (using configurable strategies)
55//! - Einsum graph construction
56//!
57//! 3. **Post-compilation passes**:
58//! - Dead code elimination
59//! - Einsum operation merging
60//! - Identity elimination
61//!
62//! # Modules
63//!
64//! - [`config`]: Compilation configuration and strategy selection
65//! - [`optimize`]: Expression-level optimization passes
66//! - [`passes`]: Analysis and validation passes
67//!
68//! # Examples
69//!
70//! ## Basic Predicate Compilation
71//!
72//! ```rust
73//! use tensorlogic_compiler::compile_to_einsum;
74//! use tensorlogic_ir::{TLExpr, Term};
75//!
76//! let expr = TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]);
77//! let graph = compile_to_einsum(&expr).unwrap();
78//! ```
79//!
80//! ## Compilation with Context
81//!
82//! ```rust
83//! use tensorlogic_compiler::{compile_to_einsum_with_context, CompilerContext};
84//! use tensorlogic_ir::{TLExpr, Term};
85//!
86//! let mut ctx = CompilerContext::new();
87//! ctx.add_domain("Person", 100);
88//!
89//! let expr = TLExpr::exists(
90//! "y",
91//! "Person",
92//! TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]),
93//! );
94//!
95//! let graph = compile_to_einsum_with_context(&expr, &mut ctx).unwrap();
96//! ```
97//!
98//! ## Using Optimization Passes
99//!
100//! ### Unified Pipeline (Recommended)
101//!
102//! ```rust
103//! use tensorlogic_compiler::optimize::{OptimizationPipeline, PipelineConfig};
104//! use tensorlogic_ir::{TLExpr, Term};
105//!
106//! let pipeline = OptimizationPipeline::new();
107//! let expr = TLExpr::add(
108//! TLExpr::mul(TLExpr::Constant(2.0), TLExpr::Constant(3.0)),
109//! TLExpr::Constant(0.0)
110//! );
111//! let (optimized, stats) = pipeline.optimize(&expr);
112//! println!("Applied {} optimizations", stats.total_optimizations());
113//! ```
114//!
115//! ### Individual Passes
116//!
117//! ```rust
118//! use tensorlogic_compiler::optimize::optimize_negations;
119//! use tensorlogic_ir::{TLExpr, Term};
120//!
121//! let expr = TLExpr::negate(TLExpr::negate(
122//! TLExpr::pred("p", vec![Term::var("x")])
123//! ));
124//!
125//! let (optimized, stats) = optimize_negations(&expr);
126//! assert_eq!(stats.double_negations_eliminated, 1);
127//! ```
128
129pub mod cache;
130pub mod compile;
131pub mod config;
132mod context;
133pub mod debug;
134pub mod export;
135pub mod import;
136pub mod incremental;
137pub mod optimize;
138#[cfg(feature = "parallel")]
139pub mod parallel;
140pub mod passes;
141pub mod profiling;
142
143#[cfg(test)]
144mod property_tests;
145#[cfg(test)]
146mod tests;
147#[cfg(test)]
148mod tests_math_ops;
149
150use anyhow::Result;
151use tensorlogic_ir::{EinsumGraph, TLExpr};
152
153pub use cache::{CacheStats, CompilationCache};
154pub use config::{
155 AndStrategy, CompilationConfig, CompilationConfigBuilder, ExistsStrategy, ForallStrategy,
156 ImplicationStrategy, ModalStrategy, NotStrategy, OrStrategy, TemporalStrategy,
157};
158pub use context::{CompilerContext, DomainInfo};
159
160// Re-export adapter types for convenience
161pub use passes::validate_arity;
162pub use tensorlogic_adapters::{PredicateInfo, SymbolTable};
163
164use compile::{compile_expr, infer_domain};
165
166/// Compile a TLExpr into an EinsumGraph with an empty context.
167///
168/// This is the simplest entry point for compilation. It creates a new
169/// compiler context automatically and infers domains where possible.
170///
171/// # Example
172///
173/// ```
174/// use tensorlogic_compiler::compile_to_einsum;
175/// use tensorlogic_ir::{TLExpr, Term};
176///
177/// let expr = TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]);
178/// let graph = compile_to_einsum(&expr).unwrap();
179/// ```
180pub fn compile_to_einsum(expr: &TLExpr) -> Result<EinsumGraph> {
181 let mut ctx = CompilerContext::new();
182 compile_to_einsum_with_context(expr, &mut ctx)
183}
184
185/// Compile a TLExpr into an EinsumGraph with a custom compilation configuration.
186///
187/// This allows you to control how logical operations are compiled to tensor operations,
188/// using different strategies for AND, OR, NOT, quantifiers, and other logic operators.
189///
190/// # Arguments
191///
192/// * `expr` - The logical expression to compile
193/// * `config` - Compilation configuration specifying strategies
194///
195/// # Returns
196///
197/// An `EinsumGraph` representing the compiled tensor computation.
198///
199/// # Example
200///
201/// ```
202/// use tensorlogic_compiler::{compile_to_einsum_with_config, CompilationConfig};
203/// use tensorlogic_ir::{TLExpr, Term};
204///
205/// // Use Łukasiewicz fuzzy logic
206/// let config = CompilationConfig::fuzzy_lukasiewicz();
207/// let expr = TLExpr::and(
208/// TLExpr::pred("P", vec![Term::var("x")]),
209/// TLExpr::pred("Q", vec![Term::var("x")]),
210/// );
211/// let graph = compile_to_einsum_with_config(&expr, &config).unwrap();
212///
213/// // Use hard Boolean logic
214/// let config = CompilationConfig::hard_boolean();
215/// let graph = compile_to_einsum_with_config(&expr, &config).unwrap();
216///
217/// // Use probabilistic logic
218/// let config = CompilationConfig::probabilistic();
219/// let graph = compile_to_einsum_with_config(&expr, &config).unwrap();
220/// ```
221pub fn compile_to_einsum_with_config(
222 expr: &TLExpr,
223 config: &CompilationConfig,
224) -> Result<EinsumGraph> {
225 let mut ctx = CompilerContext::with_config(config.clone());
226 compile_to_einsum_with_context(expr, &mut ctx)
227}
228
229/// Compile a TLExpr into an EinsumGraph with an existing context.
230///
231/// Use this when you need fine-grained control over domains, variable bindings,
232/// or when compiling multiple related expressions with shared context.
233///
234/// # Example
235///
236/// ```
237/// use tensorlogic_compiler::{compile_to_einsum_with_context, CompilerContext};
238/// use tensorlogic_ir::{TLExpr, Term};
239///
240/// let mut ctx = CompilerContext::new();
241/// ctx.add_domain("Person", 100);
242///
243/// let expr = TLExpr::exists(
244/// "y",
245/// "Person",
246/// TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]),
247/// );
248///
249/// let graph = compile_to_einsum_with_context(&expr, &mut ctx).unwrap();
250/// ```
251pub fn compile_to_einsum_with_context(
252 expr: &TLExpr,
253 ctx: &mut CompilerContext,
254) -> Result<EinsumGraph> {
255 let mut graph = EinsumGraph::new();
256
257 let free_vars = expr.free_vars();
258 for var in free_vars.iter() {
259 if !ctx.var_to_domain.contains_key(var) {
260 if let Some(domain) = infer_domain(expr, var) {
261 ctx.bind_var(var, &domain)?;
262 }
263 }
264 ctx.assign_axis(var);
265 }
266
267 let result = compile_expr(expr, ctx, &mut graph)?;
268
269 // Mark the result tensor as an output
270 graph.outputs.push(result.tensor_idx);
271
272 Ok(graph)
273}