1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
//! Hyperparameter Optimization Module (MLOPS-011)
//!
//! Bayesian optimization with TPE and Hyperband schedulers.
//!
//! # Toyota Way: Kaizen
//!
//! Continuous improvement through intelligent search. Each trial informs the next,
//! building knowledge iteratively rather than wasteful exhaustive search.
//!
//! # Example
//!
//! ```ignore
//! use entrenar::optim::hpo::{HyperparameterSpace, ParameterDomain, TPEOptimizer};
//!
//! let mut space = HyperparameterSpace::new();
//! space.add("learning_rate", ParameterDomain::Continuous {
//! low: 1e-5, high: 1e-1, log_scale: true
//! });
//! space.add("batch_size", ParameterDomain::Discrete { low: 8, high: 128 });
//!
//! let optimizer = TPEOptimizer::new(space);
//! let config = optimizer.suggest(&trials);
//! ```
//!
//! # References
//!
//! \[1\] Bergstra et al. (2011) - Algorithms for Hyper-Parameter Optimization (TPE)
//! \[2\] Li et al. (2018) - Hyperband: A Novel Bandit-Based Approach
pub use ;
pub use GridSearch;
pub use HyperbandScheduler;
pub use TPEOptimizer;
pub use ;