use std::collections::HashMap;
use tensorlogic_train::*;
fn objective_function(lr: f64, batch_size: i64, dropout: f64) -> f64 {
let lr_score = -((lr - 0.01).powi(2)) / 0.001;
let batch_score = -((batch_size as f64 - 64.0).powi(2)) / 500.0;
let dropout_score = -((dropout - 0.2).powi(2)) / 0.05;
let score = (lr_score + batch_score + dropout_score + 10.0) / 12.0;
score + (lr * batch_size as f64 * dropout).sin() * 0.05
}
fn main() {
println!("╔══════════════════════════════════════════════════════════════╗");
println!("║ Bayesian Optimization for Hyperparameter Tuning ║");
println!("╚══════════════════════════════════════════════════════════════╝\n");
println!("📋 Defining hyperparameter search space...\n");
let mut param_space = HashMap::new();
param_space.insert(
"lr".to_string(),
HyperparamSpace::log_uniform(1e-4, 1e-1).expect("unwrap"),
);
param_space.insert(
"batch_size".to_string(),
HyperparamSpace::int_range(16, 128).expect("unwrap"),
);
param_space.insert(
"dropout".to_string(),
HyperparamSpace::continuous(0.0, 0.5).expect("unwrap"),
);
println!("Search space:");
println!(" • lr: log-uniform [1e-4, 1e-1]");
println!(" • batch_size: int [16, 128]");
println!(" • dropout: continuous [0.0, 0.5]\n");
println!("╔══════════════════════════════════════════════════════════════╗");
println!("║ Example 1: Expected Improvement (EI) ║");
println!("╚══════════════════════════════════════════════════════════════╝\n");
let mut bayes_opt_ei = BayesianOptimization::new(
param_space.clone(),
15, 5, 42, )
.with_acquisition(AcquisitionFunction::ExpectedImprovement { xi: 0.01 })
.with_kernel(GpKernel::Rbf {
sigma: 1.0,
length_scale: 1.0,
});
println!("Configuration:");
println!(" • Acquisition: Expected Improvement (xi=0.01)");
println!(" • Kernel: RBF (σ²=1.0, l=1.0)");
println!(" • Budget: {} evaluations\n", bayes_opt_ei.total_budget());
run_optimization(&mut bayes_opt_ei, "EI");
println!("\n╔══════════════════════════════════════════════════════════════╗");
println!("║ Example 2: Upper Confidence Bound (UCB) ║");
println!("╚══════════════════════════════════════════════════════════════╝\n");
let mut bayes_opt_ucb = BayesianOptimization::new(
param_space.clone(),
15,
5,
123, )
.with_acquisition(AcquisitionFunction::UpperConfidenceBound { kappa: 2.0 })
.with_kernel(GpKernel::Rbf {
sigma: 1.0,
length_scale: 0.5, });
println!("Configuration:");
println!(" • Acquisition: Upper Confidence Bound (κ=2.0)");
println!(" • Kernel: RBF (σ²=1.0, l=0.5)");
println!(" • Budget: {} evaluations\n", bayes_opt_ucb.total_budget());
run_optimization(&mut bayes_opt_ucb, "UCB");
println!("\n╔══════════════════════════════════════════════════════════════╗");
println!("║ Example 3: Matérn 3/2 Kernel ║");
println!("╚══════════════════════════════════════════════════════════════╝\n");
let mut bayes_opt_matern = BayesianOptimization::new(param_space.clone(), 15, 5, 456)
.with_acquisition(AcquisitionFunction::ExpectedImprovement { xi: 0.01 })
.with_kernel(GpKernel::Matern32 {
sigma: 1.0,
length_scale: 1.0,
});
println!("Configuration:");
println!(" • Acquisition: Expected Improvement (xi=0.01)");
println!(" • Kernel: Matérn 3/2 (σ²=1.0, l=1.0)");
println!(
" • Budget: {} evaluations\n",
bayes_opt_matern.total_budget()
);
run_optimization(&mut bayes_opt_matern, "Matérn");
println!("\n╔══════════════════════════════════════════════════════════════╗");
println!("║ Comparison: Bayesian vs Grid vs Random ║");
println!("╚══════════════════════════════════════════════════════════════╝\n");
let mut grid_search = GridSearch::new(param_space.clone(), 3);
let grid_configs = grid_search.generate_configs();
let grid_configs = grid_configs.into_iter().take(20).collect::<Vec<_>>();
let mut grid_best_score: f64 = 0.0;
for config in grid_configs {
let lr = config
.get("lr")
.expect("unwrap")
.as_float()
.expect("unwrap");
let batch_size = config
.get("batch_size")
.expect("unwrap")
.as_int()
.expect("unwrap");
let dropout = config
.get("dropout")
.expect("unwrap")
.as_float()
.expect("unwrap");
let score = objective_function(lr, batch_size, dropout);
grid_search.add_result(HyperparamResult::new(config, score));
grid_best_score = grid_best_score.max(score);
}
let mut random_search = RandomSearch::new(param_space.clone(), 20, 789);
let random_configs = random_search.generate_configs();
let mut random_best_score: f64 = 0.0;
for config in random_configs {
let lr = config
.get("lr")
.expect("unwrap")
.as_float()
.expect("unwrap");
let batch_size = config
.get("batch_size")
.expect("unwrap")
.as_int()
.expect("unwrap");
let dropout = config
.get("dropout")
.expect("unwrap")
.as_float()
.expect("unwrap");
let score = objective_function(lr, batch_size, dropout);
random_search.add_result(HyperparamResult::new(config, score));
random_best_score = random_best_score.max(score);
}
let bayes_best_score = bayes_opt_ei.best_result().map(|r| r.score).unwrap_or(0.0);
println!("Results after 20 evaluations:");
println!(" • Grid Search: {:.6}", grid_best_score);
println!(" • Random Search: {:.6}", random_best_score);
println!(" • Bayesian Opt (EI): {:.6}", bayes_best_score);
println!();
let improvement_vs_grid = (bayes_best_score - grid_best_score) / grid_best_score * 100.0;
let improvement_vs_random = (bayes_best_score - random_best_score) / random_best_score * 100.0;
println!("Improvement:");
println!(" • vs Grid: {:>6.2}%", improvement_vs_grid);
println!(" • vs Random: {:>6.2}%", improvement_vs_random);
println!("\n╔══════════════════════════════════════════════════════════════╗");
println!("║ Best Hyperparameters Found ║");
println!("╚══════════════════════════════════════════════════════════════╝\n");
if let Some(best) = bayes_opt_ei.best_result() {
let lr = best
.config
.get("lr")
.expect("unwrap")
.as_float()
.expect("unwrap");
let batch_size = best
.config
.get("batch_size")
.expect("unwrap")
.as_int()
.expect("unwrap");
let dropout = best
.config
.get("dropout")
.expect("unwrap")
.as_float()
.expect("unwrap");
println!("Bayesian Optimization (EI) found:");
println!(" • Learning rate: {:.6}", lr);
println!(" • Batch size: {}", batch_size);
println!(" • Dropout: {:.4}", dropout);
println!(" • Score: {:.6}", best.score);
println!();
println!("Known optimum (for reference):");
println!(" • Learning rate: 0.010000");
println!(" • Batch size: 64");
println!(" • Dropout: 0.2000");
}
println!("\n✅ Bayesian Optimization demonstration complete!");
println!("\nKey takeaways:");
println!(" 1. Bayesian Optimization intelligently explores the search space");
println!(" 2. Uses Gaussian Processes to model the objective function");
println!(" 3. Acquisition functions balance exploration vs exploitation");
println!(" 4. More efficient than grid/random search for expensive objectives");
println!(" 5. Different kernels and acquisition functions suit different problems");
}
fn run_optimization(bayes_opt: &mut BayesianOptimization, _name: &str) {
println!("Running optimization...\n");
let mut iteration = 0;
let mut best_score_so_far: f64 = 0.0;
while !bayes_opt.is_complete() {
let config = bayes_opt.suggest().expect("unwrap");
let lr = config
.get("lr")
.expect("unwrap")
.as_float()
.expect("unwrap");
let batch_size = config
.get("batch_size")
.expect("unwrap")
.as_int()
.expect("unwrap");
let dropout = config
.get("dropout")
.expect("unwrap")
.as_float()
.expect("unwrap");
let score = objective_function(lr, batch_size, dropout);
if score > best_score_so_far {
best_score_so_far = score;
println!(
" ✨ Iter {:2}: score={:.6} (NEW BEST!) lr={:.6}, bs={:3}, dp={:.4}",
iteration, score, lr, batch_size, dropout
);
} else if iteration % 5 == 0 || iteration < 5 {
println!(
" Iter {:2}: score={:.6} lr={:.6}, bs={:3}, dp={:.4}",
iteration, score, lr, batch_size, dropout
);
}
bayes_opt.add_result(HyperparamResult::new(config, score));
iteration += 1;
}
println!();
println!("Optimization complete!");
println!(" • Total evaluations: {}", bayes_opt.current_iteration());
println!(" • Best score: {:.6}", best_score_so_far);
println!("\nTop 3 configurations:");
for (i, result) in bayes_opt.sorted_results().iter().take(3).enumerate() {
let lr = result
.config
.get("lr")
.expect("unwrap")
.as_float()
.expect("unwrap");
let batch_size = result
.config
.get("batch_size")
.expect("unwrap")
.as_int()
.expect("unwrap");
let dropout = result
.config
.get("dropout")
.expect("unwrap")
.as_float()
.expect("unwrap");
println!(
" {}. Score={:.6}: lr={:.6}, bs={:3}, dp={:.4}",
i + 1,
result.score,
lr,
batch_size,
dropout
);
}
}