1use scirs2_core::ndarray::{Array1, Array2};
8use scirs2_core::numeric::{Float, FromPrimitive};
9use std::fmt::Debug;
10
11use crate::error::Result;
12
13#[derive(Debug)]
15pub struct HyperparameterOptimizer<F: Float + Debug + scirs2_core::ndarray::ScalarOperand> {
16 method: OptimizationMethod,
18 search_space: SearchSpace<F>,
20 best_params: Option<HyperparameterSet<F>>,
22 best_score: Option<F>,
24 history: Vec<OptimizationStep<F>>,
26 max_trials: usize,
28}
29
30#[derive(Debug, Clone)]
32pub enum OptimizationMethod {
33 RandomSearch,
35 GridSearch,
37 BayesianOptimization,
39 EvolutionarySearch,
41 TPE,
43}
44
45#[derive(Debug, Clone)]
47pub struct SearchSpace<F: Float + Debug> {
48 pub continuous: Vec<(String, F, F)>,
50 pub integer: Vec<(String, i32, i32)>,
52 pub categorical: Vec<(String, Vec<String>)>,
54}
55
56#[derive(Debug, Clone)]
58pub struct HyperparameterSet<F: Float + Debug> {
59 pub continuous: Vec<(String, F)>,
61 pub integer: Vec<(String, i32)>,
63 pub categorical: Vec<(String, String)>,
65}
66
67#[derive(Debug, Clone)]
69pub struct OptimizationStep<F: Float + Debug> {
70 pub trial_id: usize,
72 pub params: HyperparameterSet<F>,
74 pub score: F,
76 pub training_time: F,
78}
79
80#[derive(Debug)]
82pub struct OptimizationResults<F: Float + Debug> {
83 pub best_params: Option<HyperparameterSet<F>>,
85 pub best_score: Option<F>,
87 pub history: Vec<OptimizationStep<F>>,
89 pub convergence_curve: Vec<F>,
91}
92
93impl<F: Float + Debug + Clone + FromPrimitive + scirs2_core::ndarray::ScalarOperand>
94 HyperparameterOptimizer<F>
95{
96 pub fn new(
98 method: OptimizationMethod,
99 search_space: SearchSpace<F>,
100 max_trials: usize,
101 ) -> Self {
102 Self {
103 method,
104 search_space,
105 best_params: None,
106 best_score: None,
107 history: Vec::new(),
108 max_trials,
109 }
110 }
111
112 pub fn optimize<ModelFn>(&mut self, objectivefn: ModelFn) -> Result<HyperparameterSet<F>>
114 where
115 ModelFn: Fn(&HyperparameterSet<F>) -> Result<F>,
116 {
117 for trial in 0..self.max_trials {
118 let params = match self.method {
120 OptimizationMethod::RandomSearch => self.random_search()?,
121 OptimizationMethod::GridSearch => self.grid_search(trial)?,
122 OptimizationMethod::BayesianOptimization => self.bayesian_optimization()?,
123 OptimizationMethod::EvolutionarySearch => self.evolutionary_search()?,
124 OptimizationMethod::TPE => self.tpe_search()?,
125 };
126
127 let start_time = std::time::Instant::now();
129 let score = objectivefn(¶ms)?;
130 let training_time = F::from(start_time.elapsed().as_secs_f64()).unwrap();
131
132 let is_better = self.best_score.is_none_or(|best| score > best);
134 if is_better {
135 self.best_params = Some(params.clone());
136 self.best_score = Some(score);
137 }
138
139 self.history.push(OptimizationStep {
141 trial_id: trial,
142 params,
143 score,
144 training_time,
145 });
146
147 #[allow(clippy::println_print)]
148 {
149 println!(
150 "Trial {}: Score = {:.6}, Best = {:.6}",
151 trial,
152 score.to_f64().unwrap_or(0.0),
153 self.best_score.unwrap().to_f64().unwrap_or(0.0)
154 );
155 }
156 }
157
158 self.best_params.clone().ok_or_else(|| {
159 crate::error::TimeSeriesError::InvalidOperation("No successful trials".to_string())
160 })
161 }
162
163 fn random_search(&self) -> Result<HyperparameterSet<F>> {
165 let mut params = HyperparameterSet {
166 continuous: Vec::new(),
167 integer: Vec::new(),
168 categorical: Vec::new(),
169 };
170
171 for (name, min_val, max_val) in &self.search_space.continuous {
173 let range = *max_val - *min_val;
174 let random_val = F::from(scirs2_core::random::random::<f64>()).unwrap();
175 let value = *min_val + range * random_val;
176 params.continuous.push((name.clone(), value));
177 }
178
179 for (name, min_val, max_val) in &self.search_space.integer {
181 let range = max_val - min_val;
182 let random_val = (scirs2_core::random::random::<f64>() * (range + 1) as f64) as i32;
183 let value = min_val + random_val;
184 params.integer.push((name.clone(), value));
185 }
186
187 for (name, choices) in &self.search_space.categorical {
189 let idx = (scirs2_core::random::random::<f64>() * choices.len() as f64) as usize;
190 let value = choices[idx.min(choices.len() - 1)].clone();
191 params.categorical.push((name.clone(), value));
192 }
193
194 Ok(params)
195 }
196
197 fn grid_search(&self, _trial: usize) -> Result<HyperparameterSet<F>> {
199 self.random_search()
201 }
202
203 fn bayesian_optimization(&self) -> Result<HyperparameterSet<F>> {
205 if self.history.is_empty() {
206 return self.random_search();
208 }
209
210 let mut best_candidate = None;
212 let mut best_acquisition = F::from(-f64::INFINITY).unwrap();
213
214 for _ in 0..10 {
215 let candidate = self.random_search()?;
216 let acquisition = self.compute_acquisition_ucb(&candidate)?;
217
218 if acquisition > best_acquisition {
219 best_acquisition = acquisition;
220 best_candidate = Some(candidate);
221 }
222 }
223
224 best_candidate.ok_or_else(|| {
225 crate::error::TimeSeriesError::InvalidOperation("Failed to find candidate".to_string())
226 })
227 }
228
229 fn compute_acquisition_ucb(&self, params: &HyperparameterSet<F>) -> Result<F> {
231 let mean = self.predict_mean(params)?;
233 let std = self.predict_std(params)?;
234 let beta = F::from(2.0).unwrap(); Ok(mean + beta * std)
237 }
238
239 fn predict_mean(&self, _params: &HyperparameterSet<F>) -> Result<F> {
241 if self.history.is_empty() {
243 return Ok(F::zero());
244 }
245
246 let sum: F = self
247 .history
248 .iter()
249 .map(|step| step.score)
250 .fold(F::zero(), |acc, x| acc + x);
251 Ok(sum / F::from(self.history.len()).unwrap())
252 }
253
254 fn predict_std(&self, _params: &HyperparameterSet<F>) -> Result<F> {
256 Ok(F::one())
258 }
259
260 fn evolutionary_search(&self) -> Result<HyperparameterSet<F>> {
262 if self.history.len() < 5 {
263 return self.random_search();
264 }
265
266 let mut sorted_history = self.history.clone();
268 sorted_history.sort_by(|a, b| b.score.partial_cmp(&a.score).unwrap());
269
270 let parent1 = &sorted_history[0].params;
271 let parent2 = &sorted_history[1].params;
272
273 self.crossover_mutate(parent1, parent2)
275 }
276
277 fn crossover_mutate(
279 &self,
280 parent1: &HyperparameterSet<F>,
281 parent2: &HyperparameterSet<F>,
282 ) -> Result<HyperparameterSet<F>> {
283 let mut child = HyperparameterSet {
284 continuous: Vec::new(),
285 integer: Vec::new(),
286 categorical: Vec::new(),
287 };
288
289 for ((name1, val1), (_, val2)) in parent1.continuous.iter().zip(&parent2.continuous) {
291 let alpha = F::from(scirs2_core::random::random::<f64>()).unwrap();
292 let crossed_val = *val1 + alpha * (*val2 - *val1);
293
294 let mutation = if scirs2_core::random::random::<f64>() < 0.1 {
296 F::from((scirs2_core::random::random::<f64>() - 0.5) * 0.2).unwrap()
297 } else {
298 F::zero()
299 };
300
301 child
302 .continuous
303 .push((name1.clone(), crossed_val + mutation));
304 }
305
306 for (name, val) in &parent1.integer {
308 child.integer.push((name.clone(), *val));
309 }
310
311 for (name, val) in &parent1.categorical {
312 child.categorical.push((name.clone(), val.clone()));
313 }
314
315 Ok(child)
316 }
317
318 fn tpe_search(&self) -> Result<HyperparameterSet<F>> {
320 self.random_search()
322 }
323
324 pub fn get_results(&self) -> OptimizationResults<F> {
326 OptimizationResults {
327 best_params: self.best_params.clone(),
328 best_score: self.best_score,
329 history: self.history.clone(),
330 convergence_curve: self.get_convergence_curve(),
331 }
332 }
333
334 fn get_convergence_curve(&self) -> Vec<F> {
336 let mut best_so_far = Vec::new();
337 let mut current_best = F::from(-f64::INFINITY).unwrap();
338
339 for step in &self.history {
340 if step.score > current_best {
341 current_best = step.score;
342 }
343 best_so_far.push(current_best);
344 }
345
346 best_so_far
347 }
348
349 pub fn best_params(&self) -> Option<&HyperparameterSet<F>> {
351 self.best_params.as_ref()
352 }
353
354 pub fn best_score(&self) -> Option<F> {
356 self.best_score
357 }
358
359 pub fn history(&self) -> &[OptimizationStep<F>] {
361 &self.history
362 }
363}
364
365impl<F: Float + Debug> SearchSpace<F> {
366 pub fn new() -> Self {
368 Self {
369 continuous: Vec::new(),
370 integer: Vec::new(),
371 categorical: Vec::new(),
372 }
373 }
374
375 pub fn add_continuous(&mut self, name: String, min_val: F, max_val: F) {
377 self.continuous.push((name, min_val, max_val));
378 }
379
380 pub fn add_integer(&mut self, name: String, min_val: i32, max_val: i32) {
382 self.integer.push((name, min_val, max_val));
383 }
384
385 pub fn add_categorical(&mut self, name: String, choices: Vec<String>) {
387 self.categorical.push((name, choices));
388 }
389}
390
391impl<F: Float + Debug> Default for SearchSpace<F> {
392 fn default() -> Self {
393 Self::new()
394 }
395}
396
397impl<F: Float + Debug> HyperparameterSet<F> {
398 pub fn new() -> Self {
400 Self {
401 continuous: Vec::new(),
402 integer: Vec::new(),
403 categorical: Vec::new(),
404 }
405 }
406
407 pub fn get_continuous(&self, name: &str) -> Option<F> {
409 self.continuous
410 .iter()
411 .find(|(param_name, _)| param_name == name)
412 .map(|(_, value)| *value)
413 }
414
415 pub fn get_integer(&self, name: &str) -> Option<i32> {
417 self.integer
418 .iter()
419 .find(|(param_name, _)| param_name == name)
420 .map(|(_, value)| *value)
421 }
422
423 pub fn get_categorical(&self, name: &str) -> Option<&str> {
425 self.categorical
426 .iter()
427 .find(|(param_name, _)| param_name == name)
428 .map(|(_, value)| value.as_str())
429 }
430}
431
432impl<F: Float + Debug> Default for HyperparameterSet<F> {
433 fn default() -> Self {
434 Self::new()
435 }
436}
437
438#[cfg(test)]
439mod tests {
440 use super::*;
441 use approx::assert_abs_diff_eq;
442
443 #[test]
444 fn test_search_space_creation() {
445 let mut search_space = SearchSpace::<f64>::new();
446 search_space.add_continuous("learning_rate".to_string(), 0.001, 0.1);
447 search_space.add_integer("hidden_size".to_string(), 32, 256);
448 search_space.add_categorical(
449 "optimizer".to_string(),
450 vec!["adam".to_string(), "sgd".to_string()],
451 );
452
453 assert_eq!(search_space.continuous.len(), 1);
454 assert_eq!(search_space.integer.len(), 1);
455 assert_eq!(search_space.categorical.len(), 1);
456 }
457
458 #[test]
459 fn test_hyperparameter_set() {
460 let mut params = HyperparameterSet::<f64>::new();
461 params.continuous.push(("learning_rate".to_string(), 0.01));
462 params.integer.push(("hidden_size".to_string(), 128));
463 params
464 .categorical
465 .push(("optimizer".to_string(), "adam".to_string()));
466
467 assert_eq!(params.get_continuous("learning_rate"), Some(0.01));
468 assert_eq!(params.get_integer("hidden_size"), Some(128));
469 assert_eq!(params.get_categorical("optimizer"), Some("adam"));
470 assert_eq!(params.get_continuous("nonexistent"), None);
471 }
472
473 #[test]
474 fn test_random_search() {
475 let search_space = SearchSpace {
476 continuous: vec![
477 ("learning_rate".to_string(), 0.001, 0.1),
478 ("dropout".to_string(), 0.0, 0.5),
479 ],
480 integer: vec![
481 ("hidden_size".to_string(), 32, 256),
482 ("num_layers".to_string(), 1, 6),
483 ],
484 categorical: vec![(
485 "optimizer".to_string(),
486 vec!["adam".to_string(), "sgd".to_string()],
487 )],
488 };
489
490 let optimizer =
491 HyperparameterOptimizer::new(OptimizationMethod::RandomSearch, search_space, 10);
492
493 let params = optimizer.random_search().unwrap();
494 assert_eq!(params.continuous.len(), 2);
495 assert_eq!(params.integer.len(), 2);
496 assert_eq!(params.categorical.len(), 1);
497
498 for (name, value) in ¶ms.continuous {
500 if name == "learning_rate" {
501 assert!(value >= &0.001 && value <= &0.1);
502 } else if name == "dropout" {
503 assert!(value >= &0.0 && value <= &0.5);
504 }
505 }
506
507 for (name, value) in ¶ms.integer {
508 if name == "hidden_size" {
509 assert!(*value >= 32 && *value <= 256);
510 } else if name == "num_layers" {
511 assert!(*value >= 1 && *value <= 6);
512 }
513 }
514 }
515
516 #[test]
517 fn test_hyperparameter_optimization() {
518 let search_space = SearchSpace {
519 continuous: vec![
520 ("learning_rate".to_string(), 0.001, 0.1),
521 ("dropout".to_string(), 0.0, 0.5),
522 ],
523 integer: vec![
524 ("hidden_size".to_string(), 32, 256),
525 ("num_layers".to_string(), 1, 6),
526 ],
527 categorical: vec![(
528 "optimizer".to_string(),
529 vec!["adam".to_string(), "sgd".to_string()],
530 )],
531 };
532
533 let mut optimizer =
534 HyperparameterOptimizer::new(OptimizationMethod::RandomSearch, search_space, 5);
535
536 let objective = |params: &HyperparameterSet<f64>| -> Result<f64> {
538 let mut score = 0.5;
540
541 for (name, value) in ¶ms.continuous {
542 if name == "learning_rate" {
543 score += 0.1 * (0.01 - value).abs();
544 }
545 }
546
547 Ok(score)
548 };
549
550 let best_params = optimizer.optimize(objective).unwrap();
551 assert!(!best_params.continuous.is_empty());
552
553 let results = optimizer.get_results();
554 assert!(results.best_score.is_some());
555 assert_eq!(results.history.len(), 5);
556 assert_eq!(results.convergence_curve.len(), 5);
557 }
558
559 #[test]
560 fn test_evolutionary_search() {
561 let search_space = SearchSpace {
562 continuous: vec![("x".to_string(), -5.0, 5.0)],
563 integer: vec![],
564 categorical: vec![],
565 };
566
567 let mut optimizer =
568 HyperparameterOptimizer::new(OptimizationMethod::EvolutionarySearch, search_space, 10);
569
570 let objective = |params: &HyperparameterSet<f64>| -> Result<f64> {
572 let x = params.get_continuous("x").unwrap_or(0.0);
573 Ok(-x * x) };
575
576 let best_params = optimizer.optimize(objective).unwrap();
577 let best_x = best_params.get_continuous("x").unwrap();
578
579 assert!(best_x.abs() < 3.0); }
582
583 #[test]
584 fn test_convergence_curve() {
585 let search_space = SearchSpace {
586 continuous: vec![("x".to_string(), 0.0, 1.0)],
587 integer: vec![],
588 categorical: vec![],
589 };
590
591 let mut optimizer =
592 HyperparameterOptimizer::new(OptimizationMethod::RandomSearch, search_space, 3);
593
594 let objective = |params: &HyperparameterSet<f64>| -> Result<f64> {
596 let x = params.get_continuous("x").unwrap_or(0.0);
597 Ok(x) };
599
600 optimizer.optimize(objective).unwrap();
601 let convergence = optimizer.get_convergence_curve();
602
603 assert_eq!(convergence.len(), 3);
604
605 for i in 1..convergence.len() {
607 assert!(convergence[i] >= convergence[i - 1]);
608 }
609 }
610}