use anyhow::Result;
use eenn::{SelfLearningConfig, SelfLearningLightningStrike};
use std::time::Instant;
use theory_core::{
BinaryOp, ConstValue, Constraint, ConstraintIR, Domain, Expr, TheoryTag, Variable,
VariableMetadata,
};
fn main() -> Result<()> {
println!("🧠⚡ Self-Learning Lightning Strike Demo");
println!("=======================================\n");
let mut config = SelfLearningConfig::default();
config.min_examples_for_training = 10; config.retrain_frequency = 5; config.confidence_threshold = 0.6;
let mut self_learning_engine = SelfLearningLightningStrike::with_config(config)?;
println!("🔧 Created self-learning engine with fast learning configuration");
println!(" 📊 Will start neural training after 10 examples");
println!(" 🔄 Will retrain every 5 new examples\n");
println!("📊 Phase 1: Initial Performance (No Neural Training Yet)");
println!("-------------------------------------------------------");
let test_problem = create_simple_linear_problem();
let start_time = Instant::now();
let initial_result = self_learning_engine.solve_and_learn(&test_problem, false)?;
let initial_duration = start_time.elapsed();
println!(
" Initial solve: {:.2}ms",
initial_duration.as_secs_f64() * 1000.0
);
println!(
" Result: {}",
if initial_result.satisfiable {
"SAT"
} else {
"UNSAT"
}
);
let stats = self_learning_engine.get_performance_stats();
println!(" Status: {}\n", stats.summary());
println!("🏋️ Phase 2: Learning Phase (Solving Training Problems)");
println!("-----------------------------------------------------");
let num_training_problems = 20;
let mut solve_times = Vec::new();
for i in 0..num_training_problems {
let problem = create_varied_problem(i);
let start = Instant::now();
let result = self_learning_engine.solve_and_learn(&problem, false)?;
let duration = start.elapsed();
solve_times.push(duration);
let stats = self_learning_engine.get_performance_stats();
println!(
" Problem {}/{}: {:.2}ms - {} - {}",
i + 1,
num_training_problems,
duration.as_secs_f64() * 1000.0,
if result.satisfiable { "SAT" } else { "UNSAT" },
if stats.is_neural_trained {
format!(
"Neural trained ({} examples)",
stats.training_examples_collected
)
} else {
format!(
"Collecting data ({} examples)",
stats.training_examples_collected
)
}
);
if i == 9 {
println!("\n ✨ Neural network trained after 10 examples!\n");
}
}
println!("\n📊 Phase 3: Performance After Learning");
println!("-------------------------------------");
let final_stats = self_learning_engine.get_performance_stats();
println!(" {}", final_stats.summary());
println!(
" Neural predictions: {}",
final_stats.neural_predictions_used
);
println!(" SMT fallbacks: {}", final_stats.smt_fallbacks_used);
let avg_time: f64 =
solve_times.iter().map(|d| d.as_secs_f64()).sum::<f64>() / solve_times.len() as f64;
println!(" Average solve time: {:.2}ms", avg_time * 1000.0);
println!("\n🧪 Phase 4: Testing on New Problems");
println!("-----------------------------------");
for i in 0..5 {
let test_problem = create_test_problem(i);
let start = Instant::now();
let result = self_learning_engine.solve_and_learn(&test_problem, false)?;
let duration = start.elapsed();
println!(
" Test {}: {:.2}ms - {}",
i + 1,
duration.as_secs_f64() * 1000.0,
if result.satisfiable { "SAT" } else { "UNSAT" }
);
}
let final_stats = self_learning_engine.get_performance_stats();
println!("\n Final: {}", final_stats.summary());
println!("\n✅ Demo complete!");
println!(
" The neural surrogate learned from {} problems",
final_stats.training_examples_collected
);
println!(
" Neural success rate: {:.1}%",
final_stats.neural_success_rate * 100.0
);
Ok(())
}
fn create_simple_linear_problem() -> ConstraintIR {
let mut variables = indexmap::IndexMap::new();
let x_id = theory_core::VarId(0);
let y_id = theory_core::VarId(1);
variables.insert(
x_id,
Variable {
name: "x".to_string(),
domain: Domain::Real {
min: Some(0.0),
max: Some(10.0),
},
metadata: VariableMetadata::default(),
},
);
variables.insert(
y_id,
Variable {
name: "y".to_string(),
domain: Domain::Real {
min: Some(0.0),
max: Some(10.0),
},
metadata: VariableMetadata::default(),
},
);
let constraints = vec![Constraint::LessEqual {
lhs: Expr::Binary {
op: BinaryOp::Add,
lhs: Box::new(Expr::Var(x_id)),
rhs: Box::new(Expr::Var(y_id)),
},
rhs: Expr::Const(ConstValue::Real(8.0)),
}];
ConstraintIR {
variables,
constraints,
version: 1,
objective: None,
theory_tags: smallvec::smallvec![TheoryTag::LRA],
}
}
fn create_varied_problem(index: usize) -> ConstraintIR {
let mut variables = indexmap::IndexMap::new();
let x_id = theory_core::VarId(0);
let y_id = theory_core::VarId(1);
let bound = 10.0 + (index as f64 * 0.5);
variables.insert(
x_id,
Variable {
name: "x".to_string(),
domain: Domain::Real {
min: Some(0.0),
max: Some(bound),
},
metadata: VariableMetadata::default(),
},
);
variables.insert(
y_id,
Variable {
name: "y".to_string(),
domain: Domain::Real {
min: Some(0.0),
max: Some(bound),
},
metadata: VariableMetadata::default(),
},
);
let rhs_value = 5.0 + (index as f64);
let constraints = vec![Constraint::LessEqual {
lhs: Expr::Binary {
op: BinaryOp::Add,
lhs: Box::new(Expr::Var(x_id)),
rhs: Box::new(Expr::Var(y_id)),
},
rhs: Expr::Const(ConstValue::Real(rhs_value)),
}];
ConstraintIR {
variables,
constraints,
version: 1,
objective: None,
theory_tags: smallvec::smallvec![TheoryTag::LRA],
}
}
fn create_test_problem(index: usize) -> ConstraintIR {
let mut variables = indexmap::IndexMap::new();
let x_id = theory_core::VarId(0);
let y_id = theory_core::VarId(1);
variables.insert(
x_id,
Variable {
name: "x".to_string(),
domain: Domain::Real {
min: Some(-5.0),
max: Some(15.0),
},
metadata: VariableMetadata::default(),
},
);
variables.insert(
y_id,
Variable {
name: "y".to_string(),
domain: Domain::Real {
min: Some(-5.0),
max: Some(15.0),
},
metadata: VariableMetadata::default(),
},
);
let mult = if index % 2 == 0 { 2.0 } else { 3.0 };
let constraints = vec![Constraint::LessEqual {
lhs: Expr::Binary {
op: BinaryOp::Add,
lhs: Box::new(Expr::Binary {
op: BinaryOp::Mul,
lhs: Box::new(Expr::Const(ConstValue::Real(mult))),
rhs: Box::new(Expr::Var(x_id)),
}),
rhs: Box::new(Expr::Var(y_id)),
},
rhs: Expr::Const(ConstValue::Real(20.0)),
}];
ConstraintIR {
variables,
constraints,
version: 1,
objective: None,
theory_tags: smallvec::smallvec![TheoryTag::LRA],
}
}