use scirs2_core::ndarray::{Array1, Array2};
use scirs2_core::numeric::{Float, FromPrimitive};
use std::fmt::Debug;
use crate::error::Result;
#[derive(Debug)]
pub struct HyperparameterOptimizer<F: Float + Debug + scirs2_core::ndarray::ScalarOperand> {
method: OptimizationMethod,
search_space: SearchSpace<F>,
best_params: Option<HyperparameterSet<F>>,
best_score: Option<F>,
history: Vec<OptimizationStep<F>>,
max_trials: usize,
}
#[derive(Debug, Clone)]
pub enum OptimizationMethod {
RandomSearch,
GridSearch,
BayesianOptimization,
EvolutionarySearch,
TPE,
}
#[derive(Debug, Clone)]
pub struct SearchSpace<F: Float + Debug> {
pub continuous: Vec<(String, F, F)>,
pub integer: Vec<(String, i32, i32)>,
pub categorical: Vec<(String, Vec<String>)>,
}
#[derive(Debug, Clone)]
pub struct HyperparameterSet<F: Float + Debug> {
pub continuous: Vec<(String, F)>,
pub integer: Vec<(String, i32)>,
pub categorical: Vec<(String, String)>,
}
#[derive(Debug, Clone)]
pub struct OptimizationStep<F: Float + Debug> {
pub trial_id: usize,
pub params: HyperparameterSet<F>,
pub score: F,
pub training_time: F,
}
#[derive(Debug)]
pub struct OptimizationResults<F: Float + Debug> {
pub best_params: Option<HyperparameterSet<F>>,
pub best_score: Option<F>,
pub history: Vec<OptimizationStep<F>>,
pub convergence_curve: Vec<F>,
}
impl<F: Float + Debug + Clone + FromPrimitive + scirs2_core::ndarray::ScalarOperand>
HyperparameterOptimizer<F>
{
pub fn new(
method: OptimizationMethod,
search_space: SearchSpace<F>,
max_trials: usize,
) -> Self {
Self {
method,
search_space,
best_params: None,
best_score: None,
history: Vec::new(),
max_trials,
}
}
pub fn optimize<ModelFn>(&mut self, objectivefn: ModelFn) -> Result<HyperparameterSet<F>>
where
ModelFn: Fn(&HyperparameterSet<F>) -> Result<F>,
{
for trial in 0..self.max_trials {
let params = match self.method {
OptimizationMethod::RandomSearch => self.random_search()?,
OptimizationMethod::GridSearch => self.grid_search(trial)?,
OptimizationMethod::BayesianOptimization => self.bayesian_optimization()?,
OptimizationMethod::EvolutionarySearch => self.evolutionary_search()?,
OptimizationMethod::TPE => self.tpe_search()?,
};
let start_time = std::time::Instant::now();
let score = objectivefn(¶ms)?;
let training_time =
F::from(start_time.elapsed().as_secs_f64()).expect("Operation failed");
let is_better = self.best_score.is_none_or(|best| score > best);
if is_better {
self.best_params = Some(params.clone());
self.best_score = Some(score);
}
self.history.push(OptimizationStep {
trial_id: trial,
params,
score,
training_time,
});
#[allow(clippy::println_print)]
{
println!(
"Trial {}: Score = {:.6}, Best = {:.6}",
trial,
score.to_f64().unwrap_or(0.0),
self.best_score
.expect("Operation failed")
.to_f64()
.unwrap_or(0.0)
);
}
}
self.best_params.clone().ok_or_else(|| {
crate::error::TimeSeriesError::InvalidOperation("No successful trials".to_string())
})
}
fn random_search(&self) -> Result<HyperparameterSet<F>> {
let mut params = HyperparameterSet {
continuous: Vec::new(),
integer: Vec::new(),
categorical: Vec::new(),
};
for (name, min_val, max_val) in &self.search_space.continuous {
let range = *max_val - *min_val;
let random_val =
F::from(scirs2_core::random::random::<f64>()).expect("Operation failed");
let value = *min_val + range * random_val;
params.continuous.push((name.clone(), value));
}
for (name, min_val, max_val) in &self.search_space.integer {
let range = max_val - min_val;
let random_val = (scirs2_core::random::random::<f64>() * (range + 1) as f64) as i32;
let value = min_val + random_val;
params.integer.push((name.clone(), value));
}
for (name, choices) in &self.search_space.categorical {
let idx = (scirs2_core::random::random::<f64>() * choices.len() as f64) as usize;
let value = choices[idx.min(choices.len() - 1)].clone();
params.categorical.push((name.clone(), value));
}
Ok(params)
}
fn grid_search(&self, _trial: usize) -> Result<HyperparameterSet<F>> {
self.random_search()
}
fn bayesian_optimization(&self) -> Result<HyperparameterSet<F>> {
if self.history.is_empty() {
return self.random_search();
}
let mut best_candidate = None;
let mut best_acquisition = F::from(-f64::INFINITY).expect("Failed to convert to float");
for _ in 0..10 {
let candidate = self.random_search()?;
let acquisition = self.compute_acquisition_ucb(&candidate)?;
if acquisition > best_acquisition {
best_acquisition = acquisition;
best_candidate = Some(candidate);
}
}
best_candidate.ok_or_else(|| {
crate::error::TimeSeriesError::InvalidOperation("Failed to find candidate".to_string())
})
}
fn compute_acquisition_ucb(&self, params: &HyperparameterSet<F>) -> Result<F> {
let mean = self.predict_mean(params)?;
let std = self.predict_std(params)?;
let beta = F::from(2.0).expect("Failed to convert constant to float");
Ok(mean + beta * std)
}
fn predict_mean(&self, _params: &HyperparameterSet<F>) -> Result<F> {
if self.history.is_empty() {
return Ok(F::zero());
}
let sum: F = self
.history
.iter()
.map(|step| step.score)
.fold(F::zero(), |acc, x| acc + x);
Ok(sum / F::from(self.history.len()).expect("Operation failed"))
}
fn predict_std(&self, _params: &HyperparameterSet<F>) -> Result<F> {
Ok(F::one())
}
fn evolutionary_search(&self) -> Result<HyperparameterSet<F>> {
if self.history.len() < 5 {
return self.random_search();
}
let mut sorted_history = self.history.clone();
sorted_history.sort_by(|a, b| b.score.partial_cmp(&a.score).expect("Operation failed"));
let parent1 = &sorted_history[0].params;
let parent2 = &sorted_history[1].params;
self.crossover_mutate(parent1, parent2)
}
fn crossover_mutate(
&self,
parent1: &HyperparameterSet<F>,
parent2: &HyperparameterSet<F>,
) -> Result<HyperparameterSet<F>> {
let mut child = HyperparameterSet {
continuous: Vec::new(),
integer: Vec::new(),
categorical: Vec::new(),
};
for ((name1, val1), (_, val2)) in parent1.continuous.iter().zip(&parent2.continuous) {
let alpha = F::from(scirs2_core::random::random::<f64>()).expect("Operation failed");
let crossed_val = *val1 + alpha * (*val2 - *val1);
let mutation = if scirs2_core::random::random::<f64>() < 0.1 {
F::from((scirs2_core::random::random::<f64>() - 0.5) * 0.2)
.expect("Operation failed")
} else {
F::zero()
};
child
.continuous
.push((name1.clone(), crossed_val + mutation));
}
for (name, val) in &parent1.integer {
child.integer.push((name.clone(), *val));
}
for (name, val) in &parent1.categorical {
child.categorical.push((name.clone(), val.clone()));
}
Ok(child)
}
fn tpe_search(&self) -> Result<HyperparameterSet<F>> {
self.random_search()
}
pub fn get_results(&self) -> OptimizationResults<F> {
OptimizationResults {
best_params: self.best_params.clone(),
best_score: self.best_score,
history: self.history.clone(),
convergence_curve: self.get_convergence_curve(),
}
}
fn get_convergence_curve(&self) -> Vec<F> {
let mut best_so_far = Vec::new();
let mut current_best = F::from(-f64::INFINITY).expect("Failed to convert to float");
for step in &self.history {
if step.score > current_best {
current_best = step.score;
}
best_so_far.push(current_best);
}
best_so_far
}
pub fn best_params(&self) -> Option<&HyperparameterSet<F>> {
self.best_params.as_ref()
}
pub fn best_score(&self) -> Option<F> {
self.best_score
}
pub fn history(&self) -> &[OptimizationStep<F>] {
&self.history
}
}
impl<F: Float + Debug> SearchSpace<F> {
pub fn new() -> Self {
Self {
continuous: Vec::new(),
integer: Vec::new(),
categorical: Vec::new(),
}
}
pub fn add_continuous(&mut self, name: String, min_val: F, max_val: F) {
self.continuous.push((name, min_val, max_val));
}
pub fn add_integer(&mut self, name: String, min_val: i32, max_val: i32) {
self.integer.push((name, min_val, max_val));
}
pub fn add_categorical(&mut self, name: String, choices: Vec<String>) {
self.categorical.push((name, choices));
}
}
impl<F: Float + Debug> Default for SearchSpace<F> {
fn default() -> Self {
Self::new()
}
}
impl<F: Float + Debug> HyperparameterSet<F> {
pub fn new() -> Self {
Self {
continuous: Vec::new(),
integer: Vec::new(),
categorical: Vec::new(),
}
}
pub fn get_continuous(&self, name: &str) -> Option<F> {
self.continuous
.iter()
.find(|(param_name, _)| param_name == name)
.map(|(_, value)| *value)
}
pub fn get_integer(&self, name: &str) -> Option<i32> {
self.integer
.iter()
.find(|(param_name, _)| param_name == name)
.map(|(_, value)| *value)
}
pub fn get_categorical(&self, name: &str) -> Option<&str> {
self.categorical
.iter()
.find(|(param_name, _)| param_name == name)
.map(|(_, value)| value.as_str())
}
}
impl<F: Float + Debug> Default for HyperparameterSet<F> {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
use approx::assert_abs_diff_eq;
#[test]
fn test_search_space_creation() {
let mut search_space = SearchSpace::<f64>::new();
search_space.add_continuous("learning_rate".to_string(), 0.001, 0.1);
search_space.add_integer("hidden_size".to_string(), 32, 256);
search_space.add_categorical(
"optimizer".to_string(),
vec!["adam".to_string(), "sgd".to_string()],
);
assert_eq!(search_space.continuous.len(), 1);
assert_eq!(search_space.integer.len(), 1);
assert_eq!(search_space.categorical.len(), 1);
}
#[test]
fn test_hyperparameter_set() {
let mut params = HyperparameterSet::<f64>::new();
params.continuous.push(("learning_rate".to_string(), 0.01));
params.integer.push(("hidden_size".to_string(), 128));
params
.categorical
.push(("optimizer".to_string(), "adam".to_string()));
assert_eq!(params.get_continuous("learning_rate"), Some(0.01));
assert_eq!(params.get_integer("hidden_size"), Some(128));
assert_eq!(params.get_categorical("optimizer"), Some("adam"));
assert_eq!(params.get_continuous("nonexistent"), None);
}
#[test]
fn test_random_search() {
let search_space = SearchSpace {
continuous: vec![
("learning_rate".to_string(), 0.001, 0.1),
("dropout".to_string(), 0.0, 0.5),
],
integer: vec![
("hidden_size".to_string(), 32, 256),
("num_layers".to_string(), 1, 6),
],
categorical: vec![(
"optimizer".to_string(),
vec!["adam".to_string(), "sgd".to_string()],
)],
};
let optimizer =
HyperparameterOptimizer::new(OptimizationMethod::RandomSearch, search_space, 10);
let params = optimizer.random_search().expect("Operation failed");
assert_eq!(params.continuous.len(), 2);
assert_eq!(params.integer.len(), 2);
assert_eq!(params.categorical.len(), 1);
for (name, value) in ¶ms.continuous {
if name == "learning_rate" {
assert!(value >= &0.001 && value <= &0.1);
} else if name == "dropout" {
assert!(value >= &0.0 && value <= &0.5);
}
}
for (name, value) in ¶ms.integer {
if name == "hidden_size" {
assert!(*value >= 32 && *value <= 256);
} else if name == "num_layers" {
assert!(*value >= 1 && *value <= 6);
}
}
}
#[test]
fn test_hyperparameter_optimization() {
let search_space = SearchSpace {
continuous: vec![
("learning_rate".to_string(), 0.001, 0.1),
("dropout".to_string(), 0.0, 0.5),
],
integer: vec![
("hidden_size".to_string(), 32, 256),
("num_layers".to_string(), 1, 6),
],
categorical: vec![(
"optimizer".to_string(),
vec!["adam".to_string(), "sgd".to_string()],
)],
};
let mut optimizer =
HyperparameterOptimizer::new(OptimizationMethod::RandomSearch, search_space, 5);
let objective = |params: &HyperparameterSet<f64>| -> Result<f64> {
let mut score = 0.5;
for (name, value) in ¶ms.continuous {
if name == "learning_rate" {
score += 0.1 * (0.01 - value).abs();
}
}
Ok(score)
};
let best_params = optimizer.optimize(objective).expect("Operation failed");
assert!(!best_params.continuous.is_empty());
let results = optimizer.get_results();
assert!(results.best_score.is_some());
assert_eq!(results.history.len(), 5);
assert_eq!(results.convergence_curve.len(), 5);
}
#[test]
fn test_evolutionary_search() {
let search_space = SearchSpace {
continuous: vec![("x".to_string(), -5.0, 5.0)],
integer: vec![],
categorical: vec![],
};
let mut optimizer =
HyperparameterOptimizer::new(OptimizationMethod::EvolutionarySearch, search_space, 10);
let objective = |params: &HyperparameterSet<f64>| -> Result<f64> {
let x = params.get_continuous("x").unwrap_or(0.0);
Ok(-x * x) };
let best_params = optimizer.optimize(objective).expect("Operation failed");
let best_x = best_params.get_continuous("x").expect("Operation failed");
assert!(best_x.abs() < 4.0, "Expected |x| < 4.0, got x = {best_x}");
}
#[test]
fn test_convergence_curve() {
let search_space = SearchSpace {
continuous: vec![("x".to_string(), 0.0, 1.0)],
integer: vec![],
categorical: vec![],
};
let mut optimizer =
HyperparameterOptimizer::new(OptimizationMethod::RandomSearch, search_space, 3);
let objective = |params: &HyperparameterSet<f64>| -> Result<f64> {
let x = params.get_continuous("x").unwrap_or(0.0);
Ok(x) };
optimizer.optimize(objective).expect("Operation failed");
let convergence = optimizer.get_convergence_curve();
assert_eq!(convergence.len(), 3);
for i in 1..convergence.len() {
assert!(convergence[i] >= convergence[i - 1]);
}
}
}