1use crate::error::{ScirsError, ScirsResult};
8use std::collections::{HashMap, VecDeque};
11use std::time::Instant;
12
13#[derive(Debug, Clone)]
15pub struct SelfTuningConfig {
16 pub adaptation_strategy: AdaptationStrategy,
18 pub update_frequency: usize,
20 pub learning_rate: f64,
22 pub memory_window: usize,
24 pub use_bayesian_tuning: bool,
26 pub exploration_factor: f64,
28}
29
30impl Default for SelfTuningConfig {
31 fn default() -> Self {
32 Self {
33 adaptation_strategy: AdaptationStrategy::PerformanceBased,
34 update_frequency: 50,
35 learning_rate: 0.1,
36 memory_window: 100,
37 use_bayesian_tuning: true,
38 exploration_factor: 0.1,
39 }
40 }
41}
42
43#[derive(Debug, Clone, Copy, PartialEq)]
45pub enum AdaptationStrategy {
46 PerformanceBased,
48 ConvergenceBased,
50 ReinforcementLearning,
52 BayesianOptimization,
54 Hybrid,
56}
57
58pub struct SelfTuningOptimizer {
60 config: SelfTuningConfig,
61 parameter_manager: ParameterManager,
62 performance_tracker: PerformanceTracker,
63 adaptation_engine: AdaptationEngine,
64 tuning_history: TuningHistory,
65}
66
67impl SelfTuningOptimizer {
68 pub fn new(config: SelfTuningConfig) -> Self {
70 Self {
71 parameter_manager: ParameterManager::new(),
72 performance_tracker: PerformanceTracker::new(config.memory_window),
73 adaptation_engine: AdaptationEngine::new(config.adaptation_strategy),
74 tuning_history: TuningHistory::new(),
75 config,
76 }
77 }
78
79 pub fn register_parameter<T>(
81 &mut self,
82 name: &str,
83 param: TunableParameter<T>,
84 ) -> ScirsResult<()>
85 where
86 T: Clone + PartialOrd + std::fmt::Debug + 'static + Send + Sync,
87 {
88 self.parameter_manager.register(name, param)
89 }
90
91 pub fn get_parameters(&self) -> &HashMap<String, ParameterValue> {
93 self.parameter_manager.current_values()
94 }
95
96 pub fn update_parameters(
98 &mut self,
99 iteration: usize,
100 function_value: f64,
101 gradient_norm: Option<f64>,
102 improvement: f64,
103 ) -> ScirsResult<bool> {
104 self.performance_tracker.record_performance(
106 iteration,
107 function_value,
108 gradient_norm,
109 improvement,
110 );
111
112 if iteration % self.config.update_frequency != 0 {
114 return Ok(false);
115 }
116
117 let performance_metrics = self.performance_tracker.compute_metrics();
119 let adaptation_result = self.adaptation_engine.adapt_parameters(
120 &mut self.parameter_manager,
121 &performance_metrics,
122 &self.config,
123 )?;
124
125 if adaptation_result.parameters_changed {
127 self.tuning_history.record_adaptation(
128 iteration,
129 adaptation_result.clone(),
130 performance_metrics.clone(),
131 );
132 }
133
134 Ok(adaptation_result.parameters_changed)
135 }
136
137 pub fn performance_stats(&self) -> &PerformanceTracker {
139 &self.performance_tracker
140 }
141
142 pub fn tuning_history(&self) -> &TuningHistory {
144 &self.tuning_history
145 }
146
147 pub fn generate_report(&self) -> String {
149 let mut report = String::from("Self-Tuning Optimization Report\n");
150 report.push_str("===============================\n\n");
151
152 report.push_str("Current Parameters:\n");
154 for (name, value) in self.parameter_manager.current_values() {
155 report.push_str(&format!(" {}: {:?}\n", name, value));
156 }
157 report.push('\n');
158
159 let metrics = self.performance_tracker.compute_metrics();
161 report.push_str("Performance Metrics:\n");
162 report.push_str(&format!(
163 " Convergence Rate: {:.6}\n",
164 metrics.convergence_rate
165 ));
166 report.push_str(&format!(
167 " Average Improvement: {:.6e}\n",
168 metrics.average_improvement
169 ));
170 report.push_str(&format!(
171 " Stability Score: {:.3}\n",
172 metrics.stability_score
173 ));
174 report.push('\n');
175
176 report.push_str(&format!(
178 "Total Adaptations: {}\n",
179 self.tuning_history.adaptations.len()
180 ));
181 if let Some(last_adaptation) = self.tuning_history.adaptations.last() {
182 report.push_str(&format!(
183 "Last Adaptation at Iteration: {}\n",
184 last_adaptation.iteration
185 ));
186 }
187
188 report
189 }
190}
191
192struct ParameterManager {
194 parameters: HashMap<String, Box<dyn TunableParam>>,
195 current_values: HashMap<String, ParameterValue>,
196 parameter_bounds: HashMap<String, (ParameterValue, ParameterValue)>,
197}
198
199impl ParameterManager {
200 fn new() -> Self {
201 Self {
202 parameters: HashMap::new(),
203 current_values: HashMap::new(),
204 parameter_bounds: HashMap::new(),
205 }
206 }
207
208 fn register<T>(&mut self, name: &str, param: TunableParameter<T>) -> ScirsResult<()>
209 where
210 T: Clone + PartialOrd + std::fmt::Debug + 'static + Send + Sync,
211 {
212 let value = ParameterValue::from_typed(¶m.current_value);
213 let min_bound = ParameterValue::from_typed(¶m.min_value);
214 let max_bound = ParameterValue::from_typed(¶m.max_value);
215
216 self.current_values.insert(name.to_string(), value);
217 self.parameter_bounds
218 .insert(name.to_string(), (min_bound, max_bound));
219 self.parameters.insert(name.to_string(), Box::new(param));
220
221 Ok(())
222 }
223
224 fn update_parameter(&mut self, name: &str, newvalue: ParameterValue) -> ScirsResult<()> {
225 if let Some((min_bound, max_bound)) = self.parameter_bounds.get(name) {
226 if newvalue < *min_bound || newvalue > *max_bound {
227 return Err(ScirsError::InvalidInput(
228 scirs2_core::error::ErrorContext::new(format!(
229 "Parameter {} _value {:?} is out of bounds [{:?}, {:?}]",
230 name, newvalue, min_bound, max_bound
231 )),
232 ));
233 }
234 }
235
236 self.current_values
237 .insert(name.to_string(), newvalue.clone());
238
239 if let Some(param) = self.parameters.get_mut(name) {
240 param.set_value(newvalue)?;
241 }
242
243 Ok(())
244 }
245
246 fn current_values(&self) -> &HashMap<String, ParameterValue> {
247 &self.current_values
248 }
249
250 fn get_bounds(&self, name: &str) -> Option<&(ParameterValue, ParameterValue)> {
251 self.parameter_bounds.get(name)
252 }
253}
254
255trait TunableParam {
257 fn set_value(&mut self, value: ParameterValue) -> ScirsResult<()>;
258 fn get_value(&self) -> ParameterValue;
259 fn get_bounds(&self) -> (ParameterValue, ParameterValue);
260}
261
262#[derive(Debug, Clone)]
264pub struct TunableParameter<T> {
265 pub current_value: T,
266 pub min_value: T,
267 pub max_value: T,
268 pub adaptation_rate: f64,
269}
270
271impl<T> TunableParameter<T>
272where
273 T: Clone + PartialOrd + std::fmt::Debug + 'static + Send + Sync,
274{
275 pub fn new(current: T, min: T, max: T) -> Self {
277 Self {
278 current_value: current,
279 min_value: min,
280 max_value: max,
281 adaptation_rate: 0.1,
282 }
283 }
284
285 pub fn with_adaptation_rate(mut self, rate: f64) -> Self {
287 self.adaptation_rate = rate;
288 self
289 }
290}
291
292impl<T> TunableParam for TunableParameter<T>
293where
294 T: Clone + PartialOrd + std::fmt::Debug + 'static + Send + Sync,
295{
296 fn set_value(&mut self, value: ParameterValue) -> ScirsResult<()> {
297 use std::any::{Any, TypeId};
298
299 let type_id = TypeId::of::<T>();
300
301 if type_id == TypeId::of::<f64>() {
303 if let Some(f_val) = value.as_f64() {
304 if let Some(self_any) =
305 (&mut self.current_value as &mut dyn Any).downcast_mut::<f64>()
306 {
307 *self_any = f_val;
308 return Ok(());
309 }
310 }
311 } else if type_id == TypeId::of::<f32>() {
312 if let Some(f_val) = value.as_f64() {
313 if let Some(self_any) =
314 (&mut self.current_value as &mut dyn Any).downcast_mut::<f32>()
315 {
316 *self_any = f_val as f32;
317 return Ok(());
318 }
319 }
320 } else if type_id == TypeId::of::<i64>() {
321 if let Some(i_val) = value.as_i64() {
322 if let Some(self_any) =
323 (&mut self.current_value as &mut dyn Any).downcast_mut::<i64>()
324 {
325 *self_any = i_val;
326 return Ok(());
327 }
328 }
329 } else if type_id == TypeId::of::<i32>() {
330 if let Some(i_val) = value.as_i64() {
331 if let Some(self_any) =
332 (&mut self.current_value as &mut dyn Any).downcast_mut::<i32>()
333 {
334 *self_any = i_val as i32;
335 return Ok(());
336 }
337 }
338 } else if type_id == TypeId::of::<usize>() {
339 if let Some(i_val) = value.as_i64() {
340 if i_val >= 0 {
341 if let Some(self_any) =
342 (&mut self.current_value as &mut dyn Any).downcast_mut::<usize>()
343 {
344 *self_any = i_val as usize;
345 return Ok(());
346 }
347 }
348 }
349 } else if type_id == TypeId::of::<bool>() {
350 if let Some(b_val) = value.as_bool() {
351 if let Some(self_any) =
352 (&mut self.current_value as &mut dyn Any).downcast_mut::<bool>()
353 {
354 *self_any = b_val;
355 return Ok(());
356 }
357 }
358 } else if type_id == TypeId::of::<String>() {
359 if let ParameterValue::String(ref s_val) = value {
360 if let Some(self_any) =
361 (&mut self.current_value as &mut dyn Any).downcast_mut::<String>()
362 {
363 *self_any = s_val.clone();
364 return Ok(());
365 }
366 }
367 }
368
369 Err(ScirsError::InvalidInput(
370 scirs2_core::error::ErrorContext::new(format!(
371 "Cannot convert parameter value {:?} to type {}",
372 value,
373 std::any::type_name::<T>()
374 )),
375 ))
376 }
377
378 fn get_value(&self) -> ParameterValue {
379 ParameterValue::from_typed(&self.current_value)
380 }
381
382 fn get_bounds(&self) -> (ParameterValue, ParameterValue) {
383 (
384 ParameterValue::from_typed(&self.min_value),
385 ParameterValue::from_typed(&self.max_value),
386 )
387 }
388}
389
390#[derive(Debug, Clone, PartialEq, PartialOrd)]
392pub enum ParameterValue {
393 Float(f64),
394 Integer(i64),
395 Boolean(bool),
396 String(String),
397}
398
399impl ParameterValue {
400 fn from_typed<T>(value: &T) -> Self
401 where
402 T: std::fmt::Debug + 'static,
403 {
404 use std::any::{Any, TypeId};
406
407 let type_id = TypeId::of::<T>();
408
409 if type_id == TypeId::of::<f64>() {
411 if let Some(f_val) = (value as &dyn Any).downcast_ref::<f64>() {
412 return ParameterValue::Float(*f_val);
413 }
414 } else if type_id == TypeId::of::<f32>() {
415 if let Some(f_val) = (value as &dyn Any).downcast_ref::<f32>() {
416 return ParameterValue::Float(*f_val as f64);
417 }
418 } else if type_id == TypeId::of::<i64>() {
419 if let Some(i_val) = (value as &dyn Any).downcast_ref::<i64>() {
420 return ParameterValue::Integer(*i_val);
421 }
422 } else if type_id == TypeId::of::<i32>() {
423 if let Some(i_val) = (value as &dyn Any).downcast_ref::<i32>() {
424 return ParameterValue::Integer(*i_val as i64);
425 }
426 } else if type_id == TypeId::of::<usize>() {
427 if let Some(u_val) = (value as &dyn Any).downcast_ref::<usize>() {
428 return ParameterValue::Integer(*u_val as i64);
429 }
430 } else if type_id == TypeId::of::<bool>() {
431 if let Some(b_val) = (value as &dyn Any).downcast_ref::<bool>() {
432 return ParameterValue::Boolean(*b_val);
433 }
434 } else if type_id == TypeId::of::<String>() {
435 if let Some(s_val) = (value as &dyn Any).downcast_ref::<String>() {
436 return ParameterValue::String(s_val.clone());
437 }
438 } else if type_id == TypeId::of::<&str>() {
439 if let Some(s_val) = (value as &dyn Any).downcast_ref::<&str>() {
440 return ParameterValue::String(s_val.to_string());
441 }
442 }
443
444 let debug_str = format!("{:?}", value);
446 if let Ok(f_val) = debug_str.parse::<f64>() {
447 ParameterValue::Float(f_val)
448 } else {
449 ParameterValue::Float(0.0)
451 }
452 }
453
454 pub fn as_f64(&self) -> Option<f64> {
456 match self {
457 ParameterValue::Float(f) => Some(*f),
458 ParameterValue::Integer(i) => Some(*i as f64),
459 _ => None,
460 }
461 }
462
463 pub fn as_i64(&self) -> Option<i64> {
465 match self {
466 ParameterValue::Integer(i) => Some(*i),
467 ParameterValue::Float(f) => Some(*f as i64),
468 _ => None,
469 }
470 }
471
472 pub fn as_bool(&self) -> Option<bool> {
474 match self {
475 ParameterValue::Boolean(b) => Some(*b),
476 _ => None,
477 }
478 }
479}
480
481struct PerformanceTracker {
483 memory_window: usize,
484 function_values: VecDeque<f64>,
485 gradient_norms: VecDeque<f64>,
486 improvements: VecDeque<f64>,
487 nit: VecDeque<usize>,
488 timestamps: VecDeque<Instant>,
489}
490
491impl PerformanceTracker {
492 fn new(_memorywindow: usize) -> Self {
493 Self {
494 memory_window: _memorywindow,
495 function_values: VecDeque::new(),
496 gradient_norms: VecDeque::new(),
497 improvements: VecDeque::new(),
498 nit: VecDeque::new(),
499 timestamps: VecDeque::new(),
500 }
501 }
502
503 fn record_performance(
504 &mut self,
505 iteration: usize,
506 function_value: f64,
507 gradient_norm: Option<f64>,
508 improvement: f64,
509 ) {
510 if self.function_values.len() >= self.memory_window {
512 self.function_values.pop_front();
513 self.improvements.pop_front();
514 self.nit.pop_front();
515 self.timestamps.pop_front();
516 if !self.gradient_norms.is_empty() {
517 self.gradient_norms.pop_front();
518 }
519 }
520
521 self.function_values.push_back(function_value);
522 self.improvements.push_back(improvement);
523 self.nit.push_back(iteration);
524 self.timestamps.push_back(Instant::now());
525
526 if let Some(grad_norm) = gradient_norm {
527 self.gradient_norms.push_back(grad_norm);
528 }
529 }
530
531 fn compute_metrics(&self) -> PerformanceMetrics {
532 let convergence_rate = self.compute_convergence_rate();
533 let average_improvement =
534 self.improvements.iter().sum::<f64>() / self.improvements.len() as f64;
535 let stability_score = self.compute_stability_score();
536 let progress_rate = self.compute_progress_rate();
537
538 PerformanceMetrics {
539 convergence_rate,
540 average_improvement,
541 stability_score,
542 progress_rate,
543 current_function_value: self.function_values.back().copied().unwrap_or(0.0),
544 current_gradient_norm: self.gradient_norms.back().copied(),
545 }
546 }
547
548 fn compute_convergence_rate(&self) -> f64 {
549 if self.function_values.len() < 2 {
550 return 0.0;
551 }
552
553 let mut rates = Vec::new();
554 let values: Vec<f64> = self.function_values.iter().copied().collect();
555
556 for i in 1..values.len() {
557 if values[i - 1] != 0.0 && values[i - 1] != values[i] {
558 let rate = (values[i - 1] - values[i]).abs() / values[i - 1].abs();
559 if rate.is_finite() {
560 rates.push(rate);
561 }
562 }
563 }
564
565 if rates.is_empty() {
566 0.0
567 } else {
568 rates.iter().sum::<f64>() / rates.len() as f64
569 }
570 }
571
572 fn compute_stability_score(&self) -> f64 {
573 if self.improvements.len() < 2 {
574 return 1.0;
575 }
576
577 let improvements: Vec<f64> = self.improvements.iter().copied().collect();
578 let mean = improvements.iter().sum::<f64>() / improvements.len() as f64;
579 let variance = improvements
580 .iter()
581 .map(|&x| (x - mean).powi(2))
582 .sum::<f64>()
583 / improvements.len() as f64;
584
585 let std_dev = variance.sqrt();
586
587 if std_dev == 0.0 {
589 1.0
590 } else {
591 1.0 / (1.0 + std_dev)
592 }
593 }
594
595 fn compute_progress_rate(&self) -> f64 {
596 if self.nit.len() < 2 || self.timestamps.len() < 2 {
597 return 0.0;
598 }
599
600 let time_elapsed = self
601 .timestamps
602 .back()
603 .unwrap()
604 .duration_since(*self.timestamps.front().unwrap())
605 .as_secs_f64();
606
607 if time_elapsed == 0.0 {
608 return 0.0;
609 }
610
611 let iteration_count = self.nit.len() as f64;
612 iteration_count / time_elapsed
613 }
614}
615
616#[derive(Debug, Clone)]
618pub struct PerformanceMetrics {
619 pub convergence_rate: f64,
621 pub average_improvement: f64,
623 pub stability_score: f64,
625 pub progress_rate: f64,
627 pub current_function_value: f64,
629 pub current_gradient_norm: Option<f64>,
631}
632
633struct AdaptationEngine {
635 strategy: AdaptationStrategy,
636 rl_agent: Option<ReinforcementLearningAgent>,
637 bayesian_optimizer: Option<BayesianParameterOptimizer>,
638}
639
640impl AdaptationEngine {
641 fn new(strategy: AdaptationStrategy) -> Self {
642 let rl_agent = if matches!(
643 strategy,
644 AdaptationStrategy::ReinforcementLearning | AdaptationStrategy::Hybrid
645 ) {
646 Some(ReinforcementLearningAgent::new())
647 } else {
648 None
649 };
650
651 let bayesian_optimizer = if matches!(
652 strategy,
653 AdaptationStrategy::BayesianOptimization | AdaptationStrategy::Hybrid
654 ) {
655 Some(BayesianParameterOptimizer::new())
656 } else {
657 None
658 };
659
660 Self {
661 strategy,
662 rl_agent,
663 bayesian_optimizer,
664 }
665 }
666
667 fn adapt_parameters(
668 &mut self,
669 parameter_manager: &mut ParameterManager,
670 metrics: &PerformanceMetrics,
671 config: &SelfTuningConfig,
672 ) -> ScirsResult<AdaptationResult> {
673 match self.strategy {
674 AdaptationStrategy::PerformanceBased => {
675 self.performance_based_adaptation(parameter_manager, metrics, config)
676 }
677 AdaptationStrategy::ConvergenceBased => {
678 self.convergence_based_adaptation(parameter_manager, metrics, config)
679 }
680 AdaptationStrategy::ReinforcementLearning => {
681 if self.rl_agent.is_some() {
682 let mut agent = self.rl_agent.take().unwrap();
684 let result =
685 self.rl_based_adaptation(&mut agent, parameter_manager, metrics, config);
686 self.rl_agent = Some(agent);
687 result
688 } else {
689 self.performance_based_adaptation(parameter_manager, metrics, config)
690 }
691 }
692 AdaptationStrategy::BayesianOptimization => {
693 if let Some(ref mut optimizer) = self.bayesian_optimizer {
694 self.bayesian_adaptation(parameter_manager, metrics, config)
695 } else {
696 self.performance_based_adaptation(parameter_manager, metrics, config)
697 }
698 }
699 AdaptationStrategy::Hybrid => {
700 self.hybrid_adaptation(parameter_manager, metrics, config)
701 }
702 }
703 }
704
705 fn performance_based_adaptation(
706 &self,
707 parameter_manager: &mut ParameterManager,
708 metrics: &PerformanceMetrics,
709 config: &SelfTuningConfig,
710 ) -> ScirsResult<AdaptationResult> {
711 let mut changes = Vec::new();
712 let mut parameters_changed = false;
713
714 if metrics.convergence_rate < 0.001 {
716 for (name, value) in parameter_manager.current_values().clone() {
718 if name.contains("learning_rate") || name.contains("step_size") {
719 let old_value = value.clone();
720 if let Some(new_value) =
721 self.increase_parameter(value, 1.1, parameter_manager.get_bounds(&name))
722 {
723 parameter_manager.update_parameter(&name, new_value.clone())?;
724 changes.push(ParameterChange {
725 name: name.clone(),
726 old_value,
727 new_value,
728 reason: "Increase step size for slow convergence".to_string(),
729 });
730 parameters_changed = true;
731 }
732 }
733 }
734 } else if metrics.convergence_rate > 0.1 {
735 for (name, value) in parameter_manager.current_values().clone() {
737 if name.contains("learning_rate") || name.contains("step_size") {
738 let old_value = value.clone();
739 if let Some(new_value) =
740 self.decrease_parameter(value, 0.9, parameter_manager.get_bounds(&name))
741 {
742 parameter_manager.update_parameter(&name, new_value.clone())?;
743 changes.push(ParameterChange {
744 name: name.clone(),
745 old_value,
746 new_value,
747 reason: "Decrease step size for fast convergence".to_string(),
748 });
749 parameters_changed = true;
750 }
751 }
752 }
753 }
754
755 Ok(AdaptationResult {
756 parameters_changed,
757 changes,
758 strategy_used: AdaptationStrategy::PerformanceBased,
759 })
760 }
761
762 fn convergence_based_adaptation(
763 &self,
764 parameter_manager: &mut ParameterManager,
765 metrics: &PerformanceMetrics,
766 config: &SelfTuningConfig,
767 ) -> ScirsResult<AdaptationResult> {
768 self.performance_based_adaptation(parameter_manager, metrics, config)
770 }
771
772 fn rl_based_adaptation(
773 &mut self,
774 agent: &mut ReinforcementLearningAgent,
775 parameter_manager: &mut ParameterManager,
776 metrics: &PerformanceMetrics,
777 config: &SelfTuningConfig,
778 ) -> ScirsResult<AdaptationResult> {
779 let action = agent.select_action(metrics);
780 let changes = agent.apply_action(action, parameter_manager)?;
781
782 Ok(AdaptationResult {
783 parameters_changed: !changes.is_empty(),
784 changes,
785 strategy_used: AdaptationStrategy::ReinforcementLearning,
786 })
787 }
788
789 fn bayesian_adaptation(
790 &mut self,
791 parameter_manager: &mut ParameterManager,
792 metrics: &PerformanceMetrics,
793 config: &SelfTuningConfig,
794 ) -> ScirsResult<AdaptationResult> {
795 let suggestions = if let Some(ref mut optimizer) = self.bayesian_optimizer {
796 optimizer.suggest_parameters(parameter_manager.current_values(), metrics)?
797 } else {
798 return Err(ScirsError::ComputationError(
799 scirs2_core::error::ErrorContext::new("Bayesian optimizer not available"),
800 ));
801 };
802 let mut changes = Vec::new();
803
804 for (name, new_value) in suggestions {
805 if let Some(old_value) = parameter_manager.current_values().get(&name) {
806 let old_value_clone = old_value.clone();
807 parameter_manager.update_parameter(&name, new_value.clone())?;
808 changes.push(ParameterChange {
809 name: name.clone(),
810 old_value: old_value_clone,
811 new_value,
812 reason: "Bayesian optimization suggestion".to_string(),
813 });
814 }
815 }
816
817 Ok(AdaptationResult {
818 parameters_changed: !changes.is_empty(),
819 changes,
820 strategy_used: AdaptationStrategy::BayesianOptimization,
821 })
822 }
823
824 fn hybrid_adaptation(
825 &mut self,
826 parameter_manager: &mut ParameterManager,
827 metrics: &PerformanceMetrics,
828 config: &SelfTuningConfig,
829 ) -> ScirsResult<AdaptationResult> {
830 if metrics.stability_score < 0.5 {
832 self.performance_based_adaptation(parameter_manager, metrics, config)
834 } else if config.use_bayesian_tuning && self.bayesian_optimizer.is_some() {
835 self.bayesian_adaptation(parameter_manager, metrics, config)
837 } else {
838 self.performance_based_adaptation(parameter_manager, metrics, config)
839 }
840 }
841
842 fn increase_parameter(
843 &self,
844 value: ParameterValue,
845 factor: f64,
846 bounds: Option<&(ParameterValue, ParameterValue)>,
847 ) -> Option<ParameterValue> {
848 match value {
849 ParameterValue::Float(f) => {
850 let new_value = f * factor;
851 if let Some((_, max_bound)) = bounds {
852 if let Some(max_f) = max_bound.as_f64() {
853 if new_value <= max_f {
854 Some(ParameterValue::Float(new_value))
855 } else {
856 None
857 }
858 } else {
859 Some(ParameterValue::Float(new_value))
860 }
861 } else {
862 Some(ParameterValue::Float(new_value))
863 }
864 }
865 ParameterValue::Integer(i) => {
866 let new_value = ((i as f64) * factor) as i64;
867 if let Some((_, max_bound)) = bounds {
868 if let Some(max_i) = max_bound.as_i64() {
869 if new_value <= max_i {
870 Some(ParameterValue::Integer(new_value))
871 } else {
872 None
873 }
874 } else {
875 Some(ParameterValue::Integer(new_value))
876 }
877 } else {
878 Some(ParameterValue::Integer(new_value))
879 }
880 }
881 _ => None,
882 }
883 }
884
885 fn decrease_parameter(
886 &self,
887 value: ParameterValue,
888 factor: f64,
889 bounds: Option<&(ParameterValue, ParameterValue)>,
890 ) -> Option<ParameterValue> {
891 match value {
892 ParameterValue::Float(f) => {
893 let new_value = f * factor;
894 if let Some((min_bound, _)) = bounds {
895 if let Some(min_f) = min_bound.as_f64() {
896 if new_value >= min_f {
897 Some(ParameterValue::Float(new_value))
898 } else {
899 None
900 }
901 } else {
902 Some(ParameterValue::Float(new_value))
903 }
904 } else {
905 Some(ParameterValue::Float(new_value))
906 }
907 }
908 ParameterValue::Integer(i) => {
909 let new_value = ((i as f64) * factor) as i64;
910 if let Some((min_bound, _)) = bounds {
911 if let Some(min_i) = min_bound.as_i64() {
912 if new_value >= min_i {
913 Some(ParameterValue::Integer(new_value))
914 } else {
915 None
916 }
917 } else {
918 Some(ParameterValue::Integer(new_value))
919 }
920 } else {
921 Some(ParameterValue::Integer(new_value))
922 }
923 }
924 _ => None,
925 }
926 }
927}
928
929#[derive(Debug, Clone)]
931pub struct AdaptationResult {
932 pub parameters_changed: bool,
934 pub changes: Vec<ParameterChange>,
936 pub strategy_used: AdaptationStrategy,
938}
939
940#[derive(Debug, Clone)]
942pub struct ParameterChange {
943 pub name: String,
945 pub old_value: ParameterValue,
947 pub new_value: ParameterValue,
949 pub reason: String,
951}
952
953struct TuningHistory {
955 adaptations: Vec<AdaptationRecord>,
956}
957
958impl TuningHistory {
959 fn new() -> Self {
960 Self {
961 adaptations: Vec::new(),
962 }
963 }
964
965 fn record_adaptation(
966 &mut self,
967 iteration: usize,
968 result: AdaptationResult,
969 metrics: PerformanceMetrics,
970 ) {
971 self.adaptations.push(AdaptationRecord {
972 iteration,
973 result,
974 metrics,
975 timestamp: Instant::now(),
976 });
977 }
978}
979
980#[derive(Debug, Clone)]
982struct AdaptationRecord {
983 iteration: usize,
984 result: AdaptationResult,
985 metrics: PerformanceMetrics,
986 timestamp: Instant,
987}
988
989struct ReinforcementLearningAgent {
991 q_table: HashMap<String, f64>,
992 epsilon: f64,
993 learning_rate: f64,
994 discount_factor: f64,
995}
996
997impl ReinforcementLearningAgent {
998 fn new() -> Self {
999 Self {
1000 q_table: HashMap::new(),
1001 epsilon: 0.1,
1002 learning_rate: 0.1,
1003 discount_factor: 0.9,
1004 }
1005 }
1006
1007 fn select_action(&self, metrics: &PerformanceMetrics) -> RLAction {
1008 if metrics.convergence_rate < 0.01 {
1010 RLAction::IncreaseExploration
1011 } else if metrics.convergence_rate > 0.1 {
1012 RLAction::DecreaseExploration
1013 } else {
1014 RLAction::MaintainParameters
1015 }
1016 }
1017
1018 fn apply_action(
1019 &self,
1020 action: RLAction,
1021 parameter_manager: &mut ParameterManager,
1022 ) -> ScirsResult<Vec<ParameterChange>> {
1023 let mut changes = Vec::new();
1024
1025 match action {
1026 RLAction::IncreaseExploration => {
1027 for (name, value) in parameter_manager.current_values().clone() {
1029 if name.contains("step_size")
1030 || name.contains("learning_rate")
1031 || name.contains("f_scale")
1032 {
1033 if let Some(new_value) = self.multiply_parameter(
1034 value.clone(),
1035 1.2,
1036 parameter_manager.get_bounds(&name),
1037 ) {
1038 parameter_manager.update_parameter(&name, new_value.clone())?;
1039 changes.push(ParameterChange {
1040 name: name.clone(),
1041 old_value: value,
1042 new_value,
1043 reason: "RL: Increase exploration".to_string(),
1044 });
1045 }
1046 }
1047 }
1048 }
1049 RLAction::DecreaseExploration => {
1050 for (name, value) in parameter_manager.current_values().clone() {
1052 if name.contains("step_size")
1053 || name.contains("learning_rate")
1054 || name.contains("f_scale")
1055 {
1056 if let Some(new_value) = self.multiply_parameter(
1057 value.clone(),
1058 0.8,
1059 parameter_manager.get_bounds(&name),
1060 ) {
1061 parameter_manager.update_parameter(&name, new_value.clone())?;
1062 changes.push(ParameterChange {
1063 name: name.clone(),
1064 old_value: value,
1065 new_value,
1066 reason: "RL: Decrease exploration".to_string(),
1067 });
1068 }
1069 }
1070 }
1071 }
1072 RLAction::MaintainParameters => {
1073 }
1075 }
1076
1077 Ok(changes)
1078 }
1079
1080 fn multiply_parameter(
1081 &self,
1082 value: ParameterValue,
1083 factor: f64,
1084 bounds: Option<&(ParameterValue, ParameterValue)>,
1085 ) -> Option<ParameterValue> {
1086 match value {
1087 ParameterValue::Float(f) => {
1088 let new_value = f * factor;
1089 if let Some((min_bound, max_bound)) = bounds {
1090 if let (Some(min_f), Some(max_f)) = (min_bound.as_f64(), max_bound.as_f64()) {
1091 if new_value >= min_f && new_value <= max_f {
1092 Some(ParameterValue::Float(new_value))
1093 } else {
1094 None
1095 }
1096 } else {
1097 Some(ParameterValue::Float(new_value))
1098 }
1099 } else {
1100 Some(ParameterValue::Float(new_value))
1101 }
1102 }
1103 ParameterValue::Integer(i) => {
1104 let new_value = ((i as f64) * factor) as i64;
1105 if let Some((min_bound, max_bound)) = bounds {
1106 if let (Some(min_i), Some(max_i)) = (min_bound.as_i64(), max_bound.as_i64()) {
1107 if new_value >= min_i && new_value <= max_i {
1108 Some(ParameterValue::Integer(new_value))
1109 } else {
1110 None
1111 }
1112 } else {
1113 Some(ParameterValue::Integer(new_value))
1114 }
1115 } else {
1116 Some(ParameterValue::Integer(new_value))
1117 }
1118 }
1119 _ => None,
1120 }
1121 }
1122}
1123
1124#[derive(Debug, Clone, Copy)]
1126enum RLAction {
1127 IncreaseExploration,
1128 DecreaseExploration,
1129 MaintainParameters,
1130}
1131
1132struct BayesianParameterOptimizer {
1134 observations: Vec<(HashMap<String, ParameterValue>, f64)>,
1135}
1136
1137impl BayesianParameterOptimizer {
1138 fn new() -> Self {
1139 Self {
1140 observations: Vec::new(),
1141 }
1142 }
1143
1144 fn suggest_parameters(
1145 &mut self,
1146 current_params: &HashMap<String, ParameterValue>,
1147 metrics: &PerformanceMetrics,
1148 ) -> ScirsResult<HashMap<String, ParameterValue>> {
1149 self.observations
1151 .push((current_params.clone(), metrics.current_function_value));
1152
1153 let mut suggestions = HashMap::new();
1154
1155 if self.observations.len() >= 2 {
1158 let best_observation = self
1160 .observations
1161 .iter()
1162 .min_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal));
1163
1164 if let Some(best_params) = best_observation {
1165 for (name, value) in current_params {
1167 if let Some(best_value) = best_params.0.get(name) {
1168 let suggested_value =
1170 self.interpolate_toward_best(value.clone(), best_value.clone(), 0.3);
1171 if let Some(new_value) = suggested_value {
1172 if new_value != *value {
1173 suggestions.insert(name.clone(), new_value);
1174 }
1175 }
1176 }
1177 }
1178 }
1179 } else {
1180 for (name, value) in current_params {
1182 if name.contains("step_size") || name.contains("learning_rate") {
1183 use rand::{rng, Rng};
1185 let mut rng = rand::rng();
1186 let perturbation_factor = 1.0 + (rng.gen_range(-0.1..=0.1));
1187
1188 let perturbed_value = match value {
1189 ParameterValue::Float(f) => {
1190 Some(ParameterValue::Float(f * perturbation_factor))
1191 }
1192 ParameterValue::Integer(i) => {
1193 let new_val = ((*i as f64) * perturbation_factor) as i64;
1194 Some(ParameterValue::Integer(new_val.max(1)))
1195 }
1196 _ => None,
1197 };
1198
1199 if let Some(new_value) = perturbed_value {
1200 if new_value != *value {
1201 suggestions.insert(name.clone(), new_value);
1202 }
1203 }
1204 }
1205 }
1206 }
1207
1208 Ok(suggestions)
1209 }
1210
1211 fn interpolate_toward_best(
1212 &self,
1213 current: ParameterValue,
1214 best: ParameterValue,
1215 alpha: f64, ) -> Option<ParameterValue> {
1217 match (current, best) {
1218 (ParameterValue::Float(curr), ParameterValue::Float(best_val)) => {
1219 let interpolated = curr * (1.0 - alpha) + best_val * alpha;
1220 Some(ParameterValue::Float(interpolated))
1221 }
1222 (ParameterValue::Integer(curr), ParameterValue::Integer(best_val)) => {
1223 let interpolated =
1224 ((curr as f64) * (1.0 - alpha) + (best_val as f64) * alpha) as i64;
1225 Some(ParameterValue::Integer(interpolated))
1226 }
1227 _ => None,
1228 }
1229 }
1230}
1231
1232pub mod presets {
1234 use super::*;
1235
1236 pub fn differential_evolution_config() -> SelfTuningConfig {
1238 SelfTuningConfig {
1239 adaptation_strategy: AdaptationStrategy::PerformanceBased,
1240 update_frequency: 25,
1241 learning_rate: 0.05,
1242 memory_window: 50,
1243 use_bayesian_tuning: false,
1244 exploration_factor: 0.15,
1245 }
1246 }
1247
1248 pub fn gradient_based_config() -> SelfTuningConfig {
1250 SelfTuningConfig {
1251 adaptation_strategy: AdaptationStrategy::ConvergenceBased,
1252 update_frequency: 10,
1253 learning_rate: 0.2,
1254 memory_window: 20,
1255 use_bayesian_tuning: true,
1256 exploration_factor: 0.05,
1257 }
1258 }
1259
1260 pub fn particle_swarm_config() -> SelfTuningConfig {
1262 SelfTuningConfig {
1263 adaptation_strategy: AdaptationStrategy::Hybrid,
1264 update_frequency: 30,
1265 learning_rate: 0.1,
1266 memory_window: 75,
1267 use_bayesian_tuning: true,
1268 exploration_factor: 0.2,
1269 }
1270 }
1271
1272 pub fn bfgs_parameters() -> HashMap<String, TunableParameter<f64>> {
1274 let mut params = HashMap::new();
1275
1276 params.insert(
1277 "line_search_tolerance".to_string(),
1278 TunableParameter::new(1e-4, 1e-8, 1e-1),
1279 );
1280
1281 params.insert(
1282 "gradient_tolerance".to_string(),
1283 TunableParameter::new(1e-5, 1e-12, 1e-2),
1284 );
1285
1286 params
1287 }
1288
1289 pub fn differential_evolution_parameters() -> HashMap<String, TunableParameter<f64>> {
1291 let mut params = HashMap::new();
1292
1293 params.insert("f_scale".to_string(), TunableParameter::new(0.8, 0.1, 2.0));
1294
1295 params.insert(
1296 "crossover_rate".to_string(),
1297 TunableParameter::new(0.7, 0.1, 1.0),
1298 );
1299
1300 params
1301 }
1302}
1303
1304#[cfg(test)]
1305mod tests {
1306 use super::*;
1307
1308 #[test]
1309 fn test_self_tuning_config() {
1310 let config = SelfTuningConfig::default();
1311 assert_eq!(
1312 config.adaptation_strategy,
1313 AdaptationStrategy::PerformanceBased
1314 );
1315 assert_eq!(config.update_frequency, 50);
1316 assert!(config.use_bayesian_tuning);
1317 }
1318
1319 #[test]
1320 fn test_parameter_value() {
1321 let float_val = ParameterValue::Float(3.5);
1322 assert_eq!(float_val.as_f64(), Some(3.5));
1323
1324 let int_val = ParameterValue::Integer(42);
1325 assert_eq!(int_val.as_i64(), Some(42));
1326 assert_eq!(int_val.as_f64(), Some(42.0));
1327
1328 let bool_val = ParameterValue::Boolean(true);
1329 assert_eq!(bool_val.as_bool(), Some(true));
1330 }
1331
1332 #[test]
1333 fn test_tunable_parameter() {
1334 let param = TunableParameter::new(1.0, 0.0, 10.0).with_adaptation_rate(0.2);
1335
1336 assert_eq!(param.adaptation_rate, 0.2);
1337 assert_eq!(param.current_value, 1.0);
1338 }
1339
1340 #[test]
1341 fn test_performance_tracker() {
1342 let mut tracker = PerformanceTracker::new(10);
1343
1344 tracker.record_performance(1, 100.0, Some(10.0), 5.0);
1345 tracker.record_performance(2, 95.0, Some(8.0), 5.0);
1346 tracker.record_performance(3, 90.0, Some(6.0), 5.0);
1347
1348 let metrics = tracker.compute_metrics();
1349 assert!(metrics.convergence_rate > 0.0);
1350 assert_eq!(metrics.average_improvement, 5.0);
1351 assert!(metrics.stability_score > 0.0);
1352 }
1353
1354 #[test]
1355 fn test_parameter_manager() {
1356 let mut manager = ParameterManager::new();
1357 let param = TunableParameter::new(1.0, 0.0, 10.0);
1358
1359 manager.register("test_param", param).unwrap();
1360 assert!(manager.current_values().contains_key("test_param"));
1361
1362 let new_value = ParameterValue::Float(2.0);
1363 manager
1364 .update_parameter("test_param", new_value.clone())
1365 .unwrap();
1366 assert_eq!(manager.current_values()["test_param"], new_value);
1367 }
1368
1369 #[test]
1370 fn test_presets() {
1371 let de_config = presets::differential_evolution_config();
1372 assert_eq!(de_config.update_frequency, 25);
1373
1374 let grad_config = presets::gradient_based_config();
1375 assert_eq!(
1376 grad_config.adaptation_strategy,
1377 AdaptationStrategy::ConvergenceBased
1378 );
1379
1380 let de_params = presets::differential_evolution_parameters();
1381 assert!(de_params.contains_key("f_scale"));
1382 assert!(de_params.contains_key("crossover_rate"));
1383 }
1384}