eenn 0.1.0

A hybrid neural-symbolic constraint solver with cognitive reasoning capabilities
Documentation
// Performance Optimization Engine Demo - Phase 2C
// Demonstrates intelligent backend selection and performance optimization

use std::collections::HashMap;
use theory_core::{
    BinaryOp, ComputeBackend, ConstValue, Constraint, ConstraintIR, Domain, Expr,
    OptimizationConfig, OptimizationRecommendation, PerformanceOptimizer, ReasoningStrategy,
    SessionPerformance, TheoryTag, Variable,
};

fn main() -> Result<(), Box<dyn std::error::Error>> {
    println!("🚀 Performance Optimization Engine Demo - Phase 2C");
    println!("==================================================\n");

    // Initialize performance optimizer
    let config = OptimizationConfig {
        enable_backend_selection: true,
        enable_parameter_tuning: true,
        enable_performance_prediction: true,
        backend_switch_threshold: 0.3,
        max_analysis_time_ms: 100.0,
        calibration_sample_size: 50,
    };

    let mut optimizer = PerformanceOptimizer::new(config);

    // Create test workloads of varying characteristics
    let simple_workload = create_simple_linear_problem();
    let medium_workload = create_nonlinear_optimization();
    let complex_workload = create_mixed_integer_problem();
    let extreme_workload = create_large_scale_problem();

    println!("📊 Testing Workload Analysis and Backend Selection");
    println!("==================================================\n");

    // Test optimization recommendations for different workload types
    test_optimization(&mut optimizer, &simple_workload, "Simple Linear Problem")?;
    test_optimization(&mut optimizer, &medium_workload, "Nonlinear Optimization")?;
    test_optimization(&mut optimizer, &complex_workload, "Mixed Integer Problem")?;
    test_optimization(&mut optimizer, &extreme_workload, "Large Scale Problem")?;

    println!("\n🎯 Testing Performance Learning");
    println!("================================\n");

    // Simulate solving with performance feedback
    simulate_performance_learning(&mut optimizer)?;

    println!("\n🔧 Testing Adaptive Parameter Tuning");
    println!("=====================================\n");

    // Test how parameters adapt to different workloads
    test_parameter_adaptation(&mut optimizer)?;

    println!("\n✅ Performance Optimization Demo Complete!");
    println!("Phase 2C implementation successfully demonstrates:");
    println!("- Intelligent workload analysis and backend selection");
    println!("- Performance prediction and resource optimization");
    println!("- Adaptive parameter tuning based on historical data");
    println!("- Multi-dimensional optimization (time, memory, accuracy)");

    Ok(())
}

fn test_optimization(
    optimizer: &mut PerformanceOptimizer,
    constraints: &ConstraintIR,
    description: &str,
) -> Result<(), Box<dyn std::error::Error>> {
    println!("🔍 Optimizing {} workload:", description);

    let recommendation = optimizer.optimize(constraints, false)?;

    println!("  Recommended Backend: {:?}", recommendation.backend);
    println!("  Recommended Strategy: {:?}", recommendation.strategy);
    println!("  Confidence: {:.2}", recommendation.confidence);
    println!("  Reasoning: {}", recommendation.reasoning);

    let perf = &recommendation.expected_performance;
    println!("  Expected Performance:");
    println!("    Time: {:.1}ms", perf.estimated_time_ms);
    println!(
        "    Success Probability: {:.1}%",
        perf.success_probability * 100.0
    );
    println!("    Memory Usage: {:.1}MB", perf.memory_usage_mb);
    println!(
        "    Accuracy Confidence: {:.1}%",
        perf.accuracy_confidence * 100.0
    );

    let params = &recommendation.parameters;
    println!("  Optimized Parameters:");
    println!("    Timeout: {:.0}ms", params.timeout_ms);
    println!("    Worker Threads: {}", params.worker_threads);
    println!("    Memory Budget: {:.0}MB", params.memory_budget_mb);
    println!("    Precision: {:?}", params.precision_level);
    println!("    Caching: {:?}", params.caching_strategy);

    println!();
    Ok(())
}

fn simulate_performance_learning(
    optimizer: &mut PerformanceOptimizer,
) -> Result<(), Box<dyn std::error::Error>> {
    println!("🎓 Simulating performance learning from solving feedback:");

    let workload = create_simple_linear_problem();

    // Simulate multiple solve sessions with different backends and outcomes
    let scenarios = vec![
        (ComputeBackend::CPU, 150.0, true, 0.95), // CPU: fast and reliable
        (ComputeBackend::GPU, 80.0, true, 0.90),  // GPU: faster but slightly less reliable
        (ComputeBackend::Neural, 25.0, true, 0.85), // Neural: very fast, decent accuracy
        (ComputeBackend::SMT, 800.0, true, 0.99), // SMT: slow but very accurate
        (ComputeBackend::CPU, 200.0, false, 0.10), // CPU failure case
        (ComputeBackend::GPU, 70.0, true, 0.92),  // GPU improvement
        (ComputeBackend::Neural, 30.0, true, 0.88), // Neural consistency
    ];

    println!(
        "  Recording {} solve sessions across different backends...",
        scenarios.len()
    );

    for (backend, time_ms, success, confidence) in scenarios {
        let performance = SessionPerformance {
            total_solve_time_ms: time_ms,
            neural_time_ms: time_ms * 0.3,
            smt_time_ms: time_ms * 0.7,
            confidence_achieved: confidence,
            success,
            iterations_required: 1,
        };

        optimizer.record_performance(&workload, &backend, &performance)?;
    }

    // Get updated recommendation to see learning effect
    let initial_rec = optimizer.optimize(&workload, false)?;
    println!(
        "  After learning, recommended backend: {:?}",
        initial_rec.backend
    );
    println!(
        "  Expected performance: {:.1}ms with {:.1}% success rate",
        initial_rec.expected_performance.estimated_time_ms,
        initial_rec.expected_performance.success_probability * 100.0
    );

    Ok(())
}

fn test_parameter_adaptation(
    optimizer: &mut PerformanceOptimizer,
) -> Result<(), Box<dyn std::error::Error>> {
    println!("🔧 Testing parameter adaptation for different workload patterns:");

    // Test how parameters adapt to workload characteristics
    let workloads = vec![
        (create_simple_linear_problem(), "Simple Linear"),
        (create_nonlinear_optimization(), "Nonlinear Optimization"),
        (create_large_scale_problem(), "Large Scale"),
    ];

    for (workload, name) in workloads {
        let rec = optimizer.optimize(&workload, false)?;
        println!(
            "  {} -> Timeout: {:.0}ms, Threads: {}, Memory: {:.0}MB",
            name,
            rec.parameters.timeout_ms,
            rec.parameters.worker_threads,
            rec.parameters.memory_budget_mb
        );
    }

    Ok(())
}

fn create_simple_linear_problem() -> ConstraintIR {
    // Linear program: minimize x + y subject to x + y >= 1, x >= 0, y >= 0
    let mut ir = ConstraintIR::new();

    let x = Variable {
        name: "x".to_string(),
        domain: Domain::Real {
            min: Some(0.0),
            max: None,
        },
        metadata: Default::default(),
    };
    let y = Variable {
        name: "y".to_string(),
        domain: Domain::Real {
            min: Some(0.0),
            max: None,
        },
        metadata: Default::default(),
    };

    let x_id = ir.add_variable(x);
    let y_id = ir.add_variable(y);

    // Constraint: x + y >= 1
    let sum = Expr::Binary {
        op: BinaryOp::Add,
        lhs: Box::new(Expr::Var(x_id)),
        rhs: Box::new(Expr::Var(y_id)),
    };
    let one = Expr::Const(ConstValue::Real(1.0));
    let constraint = Constraint::LessEqual { lhs: one, rhs: sum };

    ir.add_constraint(constraint);
    ir.add_theory_tag(TheoryTag::LRA);

    ir
}

fn create_nonlinear_optimization() -> ConstraintIR {
    // Nonlinear optimization with quadratic constraints
    let mut ir = ConstraintIR::new();

    let x = Variable {
        name: "x".to_string(),
        domain: Domain::Real {
            min: Some(-5.0),
            max: Some(5.0),
        },
        metadata: Default::default(),
    };
    let y = Variable {
        name: "y".to_string(),
        domain: Domain::Real {
            min: Some(-5.0),
            max: Some(5.0),
        },
        metadata: Default::default(),
    };

    let x_id = ir.add_variable(x);
    let y_id = ir.add_variable(y);

    // Constraint: x^2 + y^2 <= 9 (circle constraint)
    let x_squared = Expr::Binary {
        op: BinaryOp::Mul,
        lhs: Box::new(Expr::Var(x_id)),
        rhs: Box::new(Expr::Var(x_id)),
    };
    let y_squared = Expr::Binary {
        op: BinaryOp::Mul,
        lhs: Box::new(Expr::Var(y_id)),
        rhs: Box::new(Expr::Var(y_id)),
    };
    let sum_squares = Expr::Binary {
        op: BinaryOp::Add,
        lhs: Box::new(x_squared),
        rhs: Box::new(y_squared),
    };
    let nine = Expr::Const(ConstValue::Real(9.0));
    let constraint = Constraint::LessEqual {
        lhs: sum_squares,
        rhs: nine,
    };

    ir.add_constraint(constraint);
    ir.add_theory_tag(TheoryTag::NLA);

    ir
}

fn create_mixed_integer_problem() -> ConstraintIR {
    // Mixed integer programming problem
    let mut ir = ConstraintIR::new();

    // Integer variables
    for i in 0..5 {
        let var = Variable {
            name: format!("x{}", i),
            domain: Domain::Integer {
                min: Some(0),
                max: Some(10),
            },
            metadata: Default::default(),
        };
        ir.add_variable(var);
    }

    // Add some linear constraints
    for i in 0..3 {
        let lhs = Expr::Var(theory_core::VarId(i));
        let rhs = Expr::Const(ConstValue::Integer(5));
        let constraint = Constraint::LessEqual { lhs, rhs };
        ir.add_constraint(constraint);
    }

    ir.add_theory_tag(TheoryTag::LIA);
    ir.add_theory_tag(TheoryTag::BV);

    ir
}

fn create_large_scale_problem() -> ConstraintIR {
    // Large scale optimization problem
    let mut ir = ConstraintIR::new();

    // Create many variables
    for i in 0..50 {
        let var = Variable {
            name: format!("x{}", i),
            domain: Domain::Real {
                min: Some(0.0),
                max: Some(100.0),
            },
            metadata: Default::default(),
        };
        ir.add_variable(var);
    }

    // Add many constraints
    for i in 0..30 {
        let lhs = Expr::Var(theory_core::VarId(i % 50));
        let rhs = Expr::Const(ConstValue::Real(50.0));
        let constraint = Constraint::LessEqual { lhs, rhs };
        ir.add_constraint(constraint);
    }

    ir.add_theory_tag(TheoryTag::LRA);

    ir
}