Expand description
§hyperopt
Tree-of-Parzen-estimators hyperparameter optimization for Rust
§Examples
§Continuous
use std::f64::consts::{FRAC_PI_2, PI};
use approx::assert_abs_diff_eq;
use fastrand::Rng;
use ordered_float::NotNan;
use hyperopt::Optimizer;
use hyperopt::kernel::continuous::Epanechnikov;
use hyperopt::kernel::universal::Uniform;
fn main() {
let min = NotNan::new(FRAC_PI_2).unwrap();
let max = NotNan::new(PI + FRAC_PI_2).unwrap();
let mut optimizer = Optimizer::new(
min, max, // parameter search limits
Uniform::<NotNan<f64>>::with_bounds(min, max), // our initial guess is just as bad
);
// Run 100 trials for the cosine function and try to find the point `(π, -1)`:
let mut rng = Rng::with_seed(42);
for _ in 0..50 {
// Generate new trials using Epanechnikov kernel:
let x = optimizer.new_trial::<Epanechnikov<_>, NotNan<f64>>(&mut rng);
optimizer.feed_back(x, NotNan::new(x.cos()).unwrap());
}
let best_trial = optimizer.best_trial().unwrap();
assert_abs_diff_eq!(best_trial.parameter.into_inner(), PI, epsilon = 0.02);
assert_abs_diff_eq!(best_trial.metric.into_inner(), -1.0, epsilon = 0.01);
}
§Discrete
use fastrand::Rng;
use ordered_float::OrderedFloat;
use hyperopt::Optimizer;
use hyperopt::kernel::discrete::Binomial;
use hyperopt::kernel::universal::Uniform;
fn main() {
let mut optimizer = Optimizer::new(
-100, 100,
Uniform::with_bounds(-100, 100),
);
let mut rng = Rng::with_seed(42);
for _ in 0..30 {
let x = optimizer.new_trial::<Binomial<i32, OrderedFloat<f64>>, _>(&mut rng);
optimizer.feed_back(x, x * x - 4 * x); // https://www.wolframalpha.com/input?i=x%5E2+-+4x
}
let best_trial = optimizer.best_trial().unwrap();
assert_eq!(best_trial.parameter, 2);
assert_eq!(best_trial.metric, -4);
}
§Features
ordered-float
enables support forOrderedFloat
andNotNan
types
Re-exports§
Modules§
- Additional constants that are missing in the standard library.
- Kernel density estimator implementation.
- Different kernels for
crate::kde::KernelDensityEstimator
.
Structs§
- ✨ Hyperparameter optimizer.