tensorlogic_compiler/lib.rs
1//! TLExpr → EinsumGraph compiler (planning only).
2//!
3//! **Version**: 0.1.0 | **Status**: Production Ready
4//!
5//! This crate compiles logical expressions into tensor computation graphs
6//! represented as einsum operations. It provides a bridge between symbolic
7//! logic and numeric tensor computations.
8//!
9//! # Overview
10//!
11//! The tensorlogic-compiler translates high-level logical expressions (predicates,
12//! quantifiers, implications) into low-level tensor operations that can be executed
13//! efficiently on various backends (CPU, GPU, etc.).
14//!
15//! **Key Features:**
16//! - Logic-to-tensor mapping with configurable strategies
17//! - Type checking and scope analysis
18//! - Optimization passes (negation, CSE, einsum optimization)
19//! - Enhanced diagnostics with helpful error messages
20//! - Support for arithmetic, comparison, and conditional expressions
21//!
22//! # Quick Start
23//!
24//! ```rust
25//! use tensorlogic_compiler::{compile_to_einsum_with_context, CompilerContext};
26//! use tensorlogic_ir::{TLExpr, Term};
27//!
28//! let mut ctx = CompilerContext::new();
29//! ctx.add_domain("Person", 100);
30//!
31//! // Define a logic rule: ∃y. knows(x, y)
32//! // "Find all persons x who know someone"
33//! let rule = TLExpr::exists(
34//! "y",
35//! "Person",
36//! TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]),
37//! );
38//!
39//! // Compile to tensor operations
40//! let graph = compile_to_einsum_with_context(&rule, &mut ctx).unwrap();
41//! ```
42//!
43//! # Compilation Pipeline
44//!
45//! The compiler follows a multi-stage pipeline:
46//!
47//! 1. **Pre-compilation passes**:
48//! - Scope analysis (detect unbound variables)
49//! - Type checking (validate predicate arity and types)
50//! - Expression optimization (negation optimization, CSE)
51//!
52//! 2. **Compilation**:
53//! - Variable axis assignment
54//! - Logic-to-tensor mapping (using configurable strategies)
55//! - Einsum graph construction
56//!
57//! 3. **Post-compilation passes**:
58//! - Dead code elimination
59//! - Einsum operation merging
60//! - Identity elimination
61//!
62//! # Modules
63//!
64//! - [`config`]: Compilation configuration and strategy selection
65//! - [`optimize`]: Expression-level optimization passes
66//! - [`passes`]: Analysis and validation passes
67//!
68//! # Examples
69//!
70//! ## Basic Predicate Compilation
71//!
72//! ```rust
73//! use tensorlogic_compiler::compile_to_einsum;
74//! use tensorlogic_ir::{TLExpr, Term};
75//!
76//! let expr = TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]);
77//! let graph = compile_to_einsum(&expr).unwrap();
78//! ```
79//!
80//! ## Compilation with Context
81//!
82//! ```rust
83//! use tensorlogic_compiler::{compile_to_einsum_with_context, CompilerContext};
84//! use tensorlogic_ir::{TLExpr, Term};
85//!
86//! let mut ctx = CompilerContext::new();
87//! ctx.add_domain("Person", 100);
88//!
89//! let expr = TLExpr::exists(
90//! "y",
91//! "Person",
92//! TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]),
93//! );
94//!
95//! let graph = compile_to_einsum_with_context(&expr, &mut ctx).unwrap();
96//! ```
97//!
98//! ## Using Optimization Passes
99//!
100//! ### Unified Pipeline (Recommended)
101//!
102//! ```rust
103//! use tensorlogic_compiler::optimize::{OptimizationPipeline, PipelineConfig};
104//! use tensorlogic_ir::{TLExpr, Term};
105//!
106//! let pipeline = OptimizationPipeline::new();
107//! let expr = TLExpr::add(
108//! TLExpr::mul(TLExpr::Constant(2.0), TLExpr::Constant(3.0)),
109//! TLExpr::Constant(0.0)
110//! );
111//! let (optimized, stats) = pipeline.optimize(&expr);
112//! println!("Applied {} optimizations", stats.total_optimizations());
113//! ```
114//!
115//! ### Individual Passes
116//!
117//! ```rust
118//! use tensorlogic_compiler::optimize::optimize_negations;
119//! use tensorlogic_ir::{TLExpr, Term};
120//!
121//! let expr = TLExpr::negate(TLExpr::negate(
122//! TLExpr::pred("p", vec![Term::var("x")])
123//! ));
124//!
125//! let (optimized, stats) = optimize_negations(&expr);
126//! assert_eq!(stats.double_negations_eliminated, 1);
127//! ```
128
129pub mod bytecode;
130pub mod cache;
131pub mod compile;
132pub mod complexity;
133pub mod config;
134pub mod const_prop;
135mod context;
136pub mod dead_code;
137pub mod debug;
138pub mod error_recovery;
139pub mod export;
140pub mod expr_diff;
141pub mod import;
142pub mod incremental;
143pub mod inline;
144pub mod jit;
145pub mod optimize;
146#[cfg(feature = "parallel")]
147pub mod parallel;
148pub mod partial_eval;
149pub mod passes;
150pub mod pipeline;
151pub mod profiling;
152pub mod rewrite;
153pub mod symbolic_diff;
154pub mod type_infer;
155
156#[cfg(test)]
157mod property_tests;
158#[cfg(test)]
159mod tests;
160#[cfg(test)]
161mod tests_math_ops;
162
163use anyhow::Result;
164use tensorlogic_ir::{EinsumGraph, TLExpr};
165
166pub use cache::{
167 CacheStats, CachedResult, CachingCompiler, CompilationCache, ExprFingerprint,
168 LruCompilationCache,
169};
170pub use complexity::{
171 check_complexity, BatchComplexityStats, ComplexityComparison, ComplexityThresholds,
172 ComplexityWarning, ExprComplexity, WarningSeverity,
173};
174pub use config::{
175 AndStrategy, CompilationConfig, CompilationConfigBuilder, ExistsStrategy, ForallStrategy,
176 ImplicationStrategy, ModalStrategy, NotStrategy, OrStrategy, TemporalStrategy,
177};
178pub use const_prop::{ConstPropConfig, ConstPropStats, ConstantPropagator};
179pub use context::{CompilerContext, DomainInfo};
180pub use dead_code::{DceConfig, DceStats, DeadCodeEliminator};
181pub use error_recovery::{
182 compile_tolerant, compile_tolerant_with_strategy, Diagnostic, DiagnosticCollector,
183 PartialCompilationResult, RecoveryAction, RecoveryStrategy, Severity, SourceSpan,
184 TolerantCompiler,
185};
186pub use expr_diff::{expr_diff, DiffEntry, DiffKind, ExprDiff};
187pub use inline::{InlineConfig, InlineStats, LetInliner};
188pub use jit::{JitCompiler, JitError, JitStats};
189pub use partial_eval::{
190 partially_evaluate, specialize, specialize_batch, PEConfig, PEEnv, PEResult, PEStats, PEValue,
191};
192pub use pipeline::{
193 CompilerPassId, CompilerPassOrder, CompilerPassStats, CompilerPipeline, CompilerPipelineConfig,
194 CompilerPipelineResult, CompilerPipelineStats, PassBenchmark,
195};
196pub use profiling::{profile_phase, CompilationProfiler, ProfileEntry, ProfileReport};
197pub use rewrite::{
198 builtin_rules, EliminateAndTrue, EliminateDoubleNeg, EliminateOrFalse, FlattenNestedAnd,
199 FlattenNestedOr, RewriteEngine, RewriteRule, RewriteStats,
200};
201pub use symbolic_diff::{
202 differentiate, jacobian, simplify_derivative, DiffConfig, DiffError, DiffResult,
203};
204pub use type_infer::{
205 annotate, infer_type, Substitution as TypeSubstitution, TLType, TyVarCounter, TypeEnv,
206 TypeInferError, TypeInferResult, TypedExpr,
207};
208
209// Re-export adapter types for convenience
210pub use passes::validate_arity;
211pub use tensorlogic_adapters::{PredicateInfo, SymbolTable};
212
213// Re-export bytecode VM types and functions
214pub use bytecode::{
215 compile, compile_with_config, execute, execute_with_stats, BytecodeProgram, CompileError,
216 Instruction, VmEnv, VmError, VmStats, VmValue,
217};
218
219use compile::{compile_expr, infer_domain};
220
221/// Compile a TLExpr into an EinsumGraph with an empty context.
222///
223/// This is the simplest entry point for compilation. It creates a new
224/// compiler context automatically and infers domains where possible.
225///
226/// # Example
227///
228/// ```
229/// use tensorlogic_compiler::compile_to_einsum;
230/// use tensorlogic_ir::{TLExpr, Term};
231///
232/// let expr = TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]);
233/// let graph = compile_to_einsum(&expr).unwrap();
234/// ```
235pub fn compile_to_einsum(expr: &TLExpr) -> Result<EinsumGraph> {
236 let mut ctx = CompilerContext::new();
237 compile_to_einsum_with_context(expr, &mut ctx)
238}
239
240/// Compile a TLExpr into an EinsumGraph with a custom compilation configuration.
241///
242/// This allows you to control how logical operations are compiled to tensor operations,
243/// using different strategies for AND, OR, NOT, quantifiers, and other logic operators.
244///
245/// # Arguments
246///
247/// * `expr` - The logical expression to compile
248/// * `config` - Compilation configuration specifying strategies
249///
250/// # Returns
251///
252/// An `EinsumGraph` representing the compiled tensor computation.
253///
254/// # Example
255///
256/// ```
257/// use tensorlogic_compiler::{compile_to_einsum_with_config, CompilationConfig};
258/// use tensorlogic_ir::{TLExpr, Term};
259///
260/// // Use Łukasiewicz fuzzy logic
261/// let config = CompilationConfig::fuzzy_lukasiewicz();
262/// let expr = TLExpr::and(
263/// TLExpr::pred("P", vec![Term::var("x")]),
264/// TLExpr::pred("Q", vec![Term::var("x")]),
265/// );
266/// let graph = compile_to_einsum_with_config(&expr, &config).unwrap();
267///
268/// // Use hard Boolean logic
269/// let config = CompilationConfig::hard_boolean();
270/// let graph = compile_to_einsum_with_config(&expr, &config).unwrap();
271///
272/// // Use probabilistic logic
273/// let config = CompilationConfig::probabilistic();
274/// let graph = compile_to_einsum_with_config(&expr, &config).unwrap();
275/// ```
276pub fn compile_to_einsum_with_config(
277 expr: &TLExpr,
278 config: &CompilationConfig,
279) -> Result<EinsumGraph> {
280 let mut ctx = CompilerContext::with_config(config.clone());
281 compile_to_einsum_with_context(expr, &mut ctx)
282}
283
284/// Compile a TLExpr into an EinsumGraph with an existing context.
285///
286/// Use this when you need fine-grained control over domains, variable bindings,
287/// or when compiling multiple related expressions with shared context.
288///
289/// # Example
290///
291/// ```
292/// use tensorlogic_compiler::{compile_to_einsum_with_context, CompilerContext};
293/// use tensorlogic_ir::{TLExpr, Term};
294///
295/// let mut ctx = CompilerContext::new();
296/// ctx.add_domain("Person", 100);
297///
298/// let expr = TLExpr::exists(
299/// "y",
300/// "Person",
301/// TLExpr::pred("knows", vec![Term::var("x"), Term::var("y")]),
302/// );
303///
304/// let graph = compile_to_einsum_with_context(&expr, &mut ctx).unwrap();
305/// ```
306pub fn compile_to_einsum_with_context(
307 expr: &TLExpr,
308 ctx: &mut CompilerContext,
309) -> Result<EinsumGraph> {
310 let mut graph = EinsumGraph::new();
311
312 let free_vars = expr.free_vars();
313 for var in free_vars.iter() {
314 if !ctx.var_to_domain.contains_key(var) {
315 if let Some(domain) = infer_domain(expr, var) {
316 ctx.bind_var(var, &domain)?;
317 }
318 }
319 ctx.assign_axis(var);
320 }
321
322 let result = compile_expr(expr, ctx, &mut graph)?;
323
324 // Mark the result tensor as an output
325 graph.outputs.push(result.tensor_idx);
326
327 Ok(graph)
328}