eenn 0.1.0

A hybrid neural-symbolic constraint solver with cognitive reasoning capabilities
Documentation
//! Self-Learning Lightning Strike Demo
//!
//! This demo shows how Lightning Strike can learn from itself by training
//! a neural surrogate on its own constraint-solving experience. Over time,
//! the neural predictor gets better and Lightning Strike gets faster!

use anyhow::Result;
use std::time::Instant;

// Import from main EENN crate (no circular dependency!)
use eenn::{SelfLearningConfig, SelfLearningLightningStrike};

// Import theory core for constraint creation
use theory_core::{
    BinaryOp, CognitiveConfig, ConstValue, Constraint, ConstraintIR, Domain, Expr, TheoryTag,
    Variable, VariableMetadata,
};

fn main() -> Result<()> {
    println!("🧠⚡ Self-Learning Lightning Strike Demo");
    println!("=====================================\n");

    println!("🔧 Creating Self-Learning Lightning Strike...");

    // Create self-learning engine with fast learning configuration
    let mut config = SelfLearningConfig::default();
    config.cognitive_config = CognitiveConfig::fastest();
    config.min_examples_for_training = 8; // Train quickly for demo
    config.retrain_frequency = 4; // Retrain often
    config.confidence_threshold = 0.6; // Lower threshold for demo

    let mut self_learning_engine = SelfLearningLightningStrike::with_config(config)?;

    println!("   ✅ Created with fast learning configuration");
    println!("   📊 Will start neural training after 8 examples");
    println!("   🔄 Will retrain every 4 new examples\n");

    // Phase 1: Initial performance (before neural training)
    println!("📊 Phase 1: Initial Performance (No Neural Training Yet)");
    println!("-------------------------------------------------------");

    let test_problem = create_simple_constraint_problem("x + y <= 5");

    let start_time = Instant::now();
    let initial_result = self_learning_engine.solve_and_learn(&test_problem, false)?;
    let initial_duration = start_time.elapsed();

    println!(
        "   Initial solve: {:.2}ms",
        initial_duration.as_secs_f64() * 1000.0
    );
    println!(
        "   Result: {} (confidence: {:.1}%)",
        if initial_result.satisfiable {
            "SAT"
        } else {
            "UNSAT"
        },
        initial_result.confidence * 100.0
    );

    let stats = self_learning_engine.get_performance_stats();
    println!("   Status: {}", stats.summary());

    // Phase 2: Learning phase
    println!("\n🏋️ Phase 2: Learning Phase (Solving Training Problems)");
    println!("-----------------------------------------------------");

    let training_problems = vec![
        ("x + y <= 10", "Easy constraint"),
        ("x * 2 + y <= 8", "Moderate constraint"),
        ("x >= 0 AND y >= 0", "Non-negativity"),
        ("x + y >= 15", "Likely unsatisfiable"),
        ("2*x + 3*y <= 12", "Linear combination"),
        ("x <= 5 AND y <= 3", "Upper bounds"),
        ("x + y = 7", "Equality constraint"),
        ("x >= 10 OR y >= 10", "Disjunctive constraint"),
        ("x^2 + y^2 <= 25", "Nonlinear constraint"),
        ("x + 2*y <= 6", "Another linear"),
    ];

    let mut solve_times = Vec::new();

    for (i, (problem_desc, description)) in training_problems.iter().enumerate() {
        let problem = create_simple_constraint_problem(problem_desc);

        let start = Instant::now();
        let result = self_learning_engine.solve_and_learn(&problem, false)?;
        let duration = start.elapsed();
        solve_times.push(duration);

        let stats = self_learning_engine.get_performance_stats();

        println!(
            "   Problem {}/10: {} - {:.2}ms - {} - {}",
            i + 1,
            description,
            duration.as_secs_f64() * 1000.0,
            if result.satisfiable { "SAT" } else { "UNSAT" },
            if i >= 7 {
                stats.summary()
            } else {
                "Collecting training data...".to_string()
            }
        );

        // Show when neural training kicks in
        if i == 7 {
            println!("      🧠 Neural training should start now!");
        }
    }

    // Phase 3: Post-learning performance
    println!("\n🚀 Phase 3: Post-Learning Performance Test");
    println!("----------------------------------------");

    let final_stats = self_learning_engine.get_performance_stats();
    println!("   Final Status: {}", final_stats.summary());

    // Test on new problems similar to training set
    println!("   Testing neural predictions on new problems...");

    let test_problems = vec![
        ("x + y <= 6", "Similar to training"),
        ("2*x + y <= 9", "Variation on training"),
        ("x <= 4 AND y <= 2", "Similar bounds"),
        ("x + y >= 20", "Should be UNSAT"),
        ("x + 3*y <= 15", "New coefficient pattern"),
    ];

    let mut post_learning_times = Vec::new();

    for (i, (problem_desc, description)) in test_problems.iter().enumerate() {
        let problem = create_simple_constraint_problem(problem_desc);

        let start = Instant::now();
        let result = self_learning_engine.solve_and_learn(&problem, false)?;
        let duration = start.elapsed();
        post_learning_times.push(duration);

        println!(
            "   Test {}: {} - {:.2}ms - {} (conf: {:.1}%)",
            i + 1,
            description,
            duration.as_secs_f64() * 1000.0,
            if result.satisfiable { "SAT" } else { "UNSAT" },
            result.confidence * 100.0
        );
    }

    // Phase 4: Performance analysis
    println!("\n📈 Phase 4: Performance Analysis");
    println!("-------------------------------");

    let final_stats = self_learning_engine.get_performance_stats();

    let avg_training_time: f64 = solve_times[5..]
        .iter()
        .map(|d| d.as_secs_f64())
        .sum::<f64>()
        / 5.0; // Last 5 training problems

    let avg_test_time: f64 = post_learning_times
        .iter()
        .map(|d| d.as_secs_f64())
        .sum::<f64>()
        / post_learning_times.len() as f64;

    println!("   📊 Performance Summary:");
    println!(
        "     Training phase avg:  {:.2}ms",
        avg_training_time * 1000.0
    );
    println!("     Test phase avg:      {:.2}ms", avg_test_time * 1000.0);

    if avg_test_time < avg_training_time {
        let speedup = avg_training_time / avg_test_time;
        println!("     🚀 Neural speedup:   {:.1}x FASTER!", speedup);
    } else {
        println!("     📈 Performance stable (neural learning continues)");
    }

    println!(
        "     Neural success rate: {:.1}%",
        final_stats.neural_success_rate * 100.0
    );
    println!(
        "     Training examples:   {}",
        final_stats.training_examples_collected
    );
    println!(
        "     Total problems:      {}",
        final_stats.total_problems_solved
    );

    // Show the key insight
    println!("\n✨ Key Insight: Self-Improving Constraint Solver");
    println!("===============================================");
    println!("   🧠 Lightning Strike learned from its own SMT solutions");
    println!("   ⚡ Neural surrogate now provides fast constraint predictions");
    println!("   🔄 System gets faster the more problems it solves");
    println!("   📈 No external training data needed - learns from experience!");

    if final_stats.neural_success_rate > 0.3 {
        println!("\n🎉 SUCCESS: Neural surrogate is being used for fast predictions!");
    } else {
        println!("\n📝 Note: Neural surrogate needs more training for higher confidence");
    }

    println!("\n✅ Self-Learning Lightning Strike Demo Complete!");

    Ok(())
}

/// Create a simple constraint problem for demonstration
fn create_simple_constraint_problem(description: &str) -> ConstraintIR {
    let mut ir = ConstraintIR::new();

    // Add two real variables x, y ∈ [0, 10]
    let x_var = Variable {
        name: "x".to_string(),
        domain: Domain::Real {
            min: Some(0.0),
            max: Some(10.0),
        },
        metadata: VariableMetadata::default(),
    };

    let y_var = Variable {
        name: "y".to_string(),
        domain: Domain::Real {
            min: Some(0.0),
            max: Some(10.0),
        },
        metadata: VariableMetadata::default(),
    };

    let x_id = ir.add_variable(x_var);
    let y_id = ir.add_variable(y_var);

    // Add a simple linear constraint based on description
    // For demo purposes, we'll create x + y <= N constraints with different N values
    let constraint_bound = match description {
        desc if desc.contains("5") => 5.0,
        desc if desc.contains("6") => 6.0,
        desc if desc.contains("8") => 8.0,
        desc if desc.contains("10") => 10.0,
        desc if desc.contains("12") => 12.0,
        desc if desc.contains("15") => 15.0, // This should be unsatisfiable with domain [0,10]
        desc if desc.contains("20") => 20.0, // Definitely unsatisfiable
        _ => 7.0,                            // Default
    };

    let constraint = if description.contains(">=") {
        // x + y >= bound -> bound <= x + y (reverse the constraint)
        Constraint::LessEqual {
            lhs: Expr::Const(ConstValue::Real(constraint_bound)),
            rhs: Expr::Binary {
                op: BinaryOp::Add,
                lhs: Box::new(Expr::Var(x_id)),
                rhs: Box::new(Expr::Var(y_id)),
            },
        }
    } else if description.contains("=")
        && !description.contains("<=")
        && !description.contains(">=")
    {
        // x + y = bound
        Constraint::Equal {
            lhs: Expr::Binary {
                op: BinaryOp::Add,
                lhs: Box::new(Expr::Var(x_id)),
                rhs: Box::new(Expr::Var(y_id)),
            },
            rhs: Expr::Const(ConstValue::Real(constraint_bound)),
        }
    } else {
        // x + y <= bound (default)
        Constraint::LessEqual {
            lhs: Expr::Binary {
                op: BinaryOp::Add,
                lhs: Box::new(Expr::Var(x_id)),
                rhs: Box::new(Expr::Var(y_id)),
            },
            rhs: Expr::Const(ConstValue::Real(constraint_bound)),
        }
    };

    ir.add_constraint(constraint);
    ir.add_theory_tag(TheoryTag::LRA); // Linear Real Arithmetic

    ir
}