use optimizer::prelude::*;
fn main() -> optimizer::Result<()> {
let study: Study<f64> = Study::builder()
.minimize()
.sampler(RandomSampler::with_seed(42))
.pruner(
MedianPruner::new(Direction::Minimize)
.n_warmup_steps(3) .n_min_trials(3), )
.build();
let lr = FloatParam::new(1e-4, 1.0).name("learning_rate");
let momentum = FloatParam::new(0.0, 0.99).name("momentum");
let n_epochs: u64 = 20;
study.optimize(30, |trial: &mut optimizer::Trial| {
let lr_val = lr.suggest(trial)?;
let mom = momentum.suggest(trial)?;
let mut loss = 1.0;
for epoch in 0..n_epochs {
let lr_penalty = (lr_val.log10() - 0.01_f64.log10()).powi(2);
let mom_penalty = (mom - 0.8).powi(2);
let base_loss = 0.02 + 0.05 * lr_penalty + 1.5 * mom_penalty;
let progress = (epoch as f64 + 1.0) / n_epochs as f64;
loss = base_loss + (1.0 - base_loss) * (-3.5 * progress).exp();
trial.report(epoch, loss);
if trial.should_prune() {
Err(TrialPruned)?;
}
}
Ok::<_, Error>(loss)
})?;
let best = study.best_trial()?;
println!(
"Recorded {} trials ({} pruned)",
study.n_trials(),
study.n_pruned_trials()
);
println!("Best trial #{}: loss = {:.6}", best.id, best.value);
println!(" learning_rate = {:.6}", best.get(&lr).unwrap());
println!(" momentum = {:.4}", best.get(&momentum).unwrap());
Ok(())
}