use anyhow::Result;
use std::time::Instant;
use eenn::{SelfLearningConfig, SelfLearningLightningStrike};
use theory_core::{
BinaryOp, CognitiveConfig, ConstValue, Constraint, ConstraintIR, Domain, Expr, TheoryTag,
Variable, VariableMetadata,
};
fn main() -> Result<()> {
println!("🧠⚡ Self-Learning Lightning Strike Demo");
println!("=====================================\n");
println!("🔧 Creating Self-Learning Lightning Strike...");
let mut config = SelfLearningConfig::default();
config.cognitive_config = CognitiveConfig::fastest();
config.min_examples_for_training = 8; config.retrain_frequency = 4; config.confidence_threshold = 0.6;
let mut self_learning_engine = SelfLearningLightningStrike::with_config(config)?;
println!(" ✅ Created with fast learning configuration");
println!(" 📊 Will start neural training after 8 examples");
println!(" 🔄 Will retrain every 4 new examples\n");
println!("📊 Phase 1: Initial Performance (No Neural Training Yet)");
println!("-------------------------------------------------------");
let test_problem = create_simple_constraint_problem("x + y <= 5");
let start_time = Instant::now();
let initial_result = self_learning_engine.solve_and_learn(&test_problem, false)?;
let initial_duration = start_time.elapsed();
println!(
" Initial solve: {:.2}ms",
initial_duration.as_secs_f64() * 1000.0
);
println!(
" Result: {} (confidence: {:.1}%)",
if initial_result.satisfiable {
"SAT"
} else {
"UNSAT"
},
initial_result.confidence * 100.0
);
let stats = self_learning_engine.get_performance_stats();
println!(" Status: {}", stats.summary());
println!("\n🏋️ Phase 2: Learning Phase (Solving Training Problems)");
println!("-----------------------------------------------------");
let training_problems = vec![
("x + y <= 10", "Easy constraint"),
("x * 2 + y <= 8", "Moderate constraint"),
("x >= 0 AND y >= 0", "Non-negativity"),
("x + y >= 15", "Likely unsatisfiable"),
("2*x + 3*y <= 12", "Linear combination"),
("x <= 5 AND y <= 3", "Upper bounds"),
("x + y = 7", "Equality constraint"),
("x >= 10 OR y >= 10", "Disjunctive constraint"),
("x^2 + y^2 <= 25", "Nonlinear constraint"),
("x + 2*y <= 6", "Another linear"),
];
let mut solve_times = Vec::new();
for (i, (problem_desc, description)) in training_problems.iter().enumerate() {
let problem = create_simple_constraint_problem(problem_desc);
let start = Instant::now();
let result = self_learning_engine.solve_and_learn(&problem, false)?;
let duration = start.elapsed();
solve_times.push(duration);
let stats = self_learning_engine.get_performance_stats();
println!(
" Problem {}/10: {} - {:.2}ms - {} - {}",
i + 1,
description,
duration.as_secs_f64() * 1000.0,
if result.satisfiable { "SAT" } else { "UNSAT" },
if i >= 7 {
stats.summary()
} else {
"Collecting training data...".to_string()
}
);
if i == 7 {
println!(" 🧠 Neural training should start now!");
}
}
println!("\n🚀 Phase 3: Post-Learning Performance Test");
println!("----------------------------------------");
let final_stats = self_learning_engine.get_performance_stats();
println!(" Final Status: {}", final_stats.summary());
println!(" Testing neural predictions on new problems...");
let test_problems = vec![
("x + y <= 6", "Similar to training"),
("2*x + y <= 9", "Variation on training"),
("x <= 4 AND y <= 2", "Similar bounds"),
("x + y >= 20", "Should be UNSAT"),
("x + 3*y <= 15", "New coefficient pattern"),
];
let mut post_learning_times = Vec::new();
for (i, (problem_desc, description)) in test_problems.iter().enumerate() {
let problem = create_simple_constraint_problem(problem_desc);
let start = Instant::now();
let result = self_learning_engine.solve_and_learn(&problem, false)?;
let duration = start.elapsed();
post_learning_times.push(duration);
println!(
" Test {}: {} - {:.2}ms - {} (conf: {:.1}%)",
i + 1,
description,
duration.as_secs_f64() * 1000.0,
if result.satisfiable { "SAT" } else { "UNSAT" },
result.confidence * 100.0
);
}
println!("\n📈 Phase 4: Performance Analysis");
println!("-------------------------------");
let final_stats = self_learning_engine.get_performance_stats();
let avg_training_time: f64 = solve_times[5..]
.iter()
.map(|d| d.as_secs_f64())
.sum::<f64>()
/ 5.0;
let avg_test_time: f64 = post_learning_times
.iter()
.map(|d| d.as_secs_f64())
.sum::<f64>()
/ post_learning_times.len() as f64;
println!(" 📊 Performance Summary:");
println!(
" Training phase avg: {:.2}ms",
avg_training_time * 1000.0
);
println!(" Test phase avg: {:.2}ms", avg_test_time * 1000.0);
if avg_test_time < avg_training_time {
let speedup = avg_training_time / avg_test_time;
println!(" 🚀 Neural speedup: {:.1}x FASTER!", speedup);
} else {
println!(" 📈 Performance stable (neural learning continues)");
}
println!(
" Neural success rate: {:.1}%",
final_stats.neural_success_rate * 100.0
);
println!(
" Training examples: {}",
final_stats.training_examples_collected
);
println!(
" Total problems: {}",
final_stats.total_problems_solved
);
println!("\n✨ Key Insight: Self-Improving Constraint Solver");
println!("===============================================");
println!(" 🧠 Lightning Strike learned from its own SMT solutions");
println!(" ⚡ Neural surrogate now provides fast constraint predictions");
println!(" 🔄 System gets faster the more problems it solves");
println!(" 📈 No external training data needed - learns from experience!");
if final_stats.neural_success_rate > 0.3 {
println!("\n🎉 SUCCESS: Neural surrogate is being used for fast predictions!");
} else {
println!("\n📝 Note: Neural surrogate needs more training for higher confidence");
}
println!("\n✅ Self-Learning Lightning Strike Demo Complete!");
Ok(())
}
fn create_simple_constraint_problem(description: &str) -> ConstraintIR {
let mut ir = ConstraintIR::new();
let x_var = Variable {
name: "x".to_string(),
domain: Domain::Real {
min: Some(0.0),
max: Some(10.0),
},
metadata: VariableMetadata::default(),
};
let y_var = Variable {
name: "y".to_string(),
domain: Domain::Real {
min: Some(0.0),
max: Some(10.0),
},
metadata: VariableMetadata::default(),
};
let x_id = ir.add_variable(x_var);
let y_id = ir.add_variable(y_var);
let constraint_bound = match description {
desc if desc.contains("5") => 5.0,
desc if desc.contains("6") => 6.0,
desc if desc.contains("8") => 8.0,
desc if desc.contains("10") => 10.0,
desc if desc.contains("12") => 12.0,
desc if desc.contains("15") => 15.0, desc if desc.contains("20") => 20.0, _ => 7.0, };
let constraint = if description.contains(">=") {
Constraint::LessEqual {
lhs: Expr::Const(ConstValue::Real(constraint_bound)),
rhs: Expr::Binary {
op: BinaryOp::Add,
lhs: Box::new(Expr::Var(x_id)),
rhs: Box::new(Expr::Var(y_id)),
},
}
} else if description.contains("=")
&& !description.contains("<=")
&& !description.contains(">=")
{
Constraint::Equal {
lhs: Expr::Binary {
op: BinaryOp::Add,
lhs: Box::new(Expr::Var(x_id)),
rhs: Box::new(Expr::Var(y_id)),
},
rhs: Expr::Const(ConstValue::Real(constraint_bound)),
}
} else {
Constraint::LessEqual {
lhs: Expr::Binary {
op: BinaryOp::Add,
lhs: Box::new(Expr::Var(x_id)),
rhs: Box::new(Expr::Var(y_id)),
},
rhs: Expr::Const(ConstValue::Real(constraint_bound)),
}
};
ir.add_constraint(constraint);
ir.add_theory_tag(TheoryTag::LRA);
ir
}