1use crate::error::OptimizeError;
14use scirs2_core::ndarray::ArrayView1;
15use std::collections::VecDeque;
16use std::time::{Duration, Instant};
17
18#[derive(Debug, Clone)]
20pub struct ConvergenceDiagnostics {
21 pub nit: Vec<IterationDiagnostic>,
23 pub performance_metrics: PerformanceMetrics,
25 pub convergence_analysis: ConvergenceAnalysis,
27 pub problem_analysis: ProblemAnalysis,
29 pub warnings: Vec<DiagnosticWarning>,
31}
32
33#[derive(Debug, Clone)]
35pub struct IterationDiagnostic {
36 pub iteration: usize,
38 pub f_value: f64,
40 pub grad_norm: f64,
42 pub step_size: f64,
44 pub direction_norm: f64,
46 pub line_search: LineSearchDiagnostic,
48 pub convergence_metrics: ConvergenceMetrics,
50 pub iteration_time: Duration,
52 pub total_time: Duration,
54 pub memory_usage: Option<usize>,
56}
57
58#[derive(Debug, Clone)]
60pub struct LineSearchDiagnostic {
61 pub n_fev: usize,
63 pub n_gev: usize,
65 pub alpha: f64,
67 pub alpha_init: f64,
69 pub success: bool,
71 pub wolfe_satisfied: (bool, bool), }
74
75#[derive(Debug, Clone)]
77pub struct ConvergenceMetrics {
78 pub f_rel_change: f64,
80 pub f_abs_change: f64,
82 pub grad_rel_norm: f64,
84 pub x_rel_change: f64,
86 pub optimality: f64,
88 pub condition_estimate: Option<f64>,
90}
91
92#[derive(Debug, Clone)]
94pub struct PerformanceMetrics {
95 pub total_nit: usize,
97 pub total_fev: usize,
99 pub total_gev: usize,
101 pub total_hev: usize,
103 pub total_time: Duration,
105 pub avg_iteration_time: Duration,
107 pub fev_rate: f64,
109 pub efficiency: EfficiencyMetrics,
111}
112
113#[derive(Debug, Clone)]
115pub struct EfficiencyMetrics {
116 pub progress_per_fev: f64,
118 pub progress_per_gev: f64,
120 pub line_search_efficiency: f64,
122 pub step_acceptance_rate: f64,
124 pub avg_reduction_ratio: f64,
126}
127
128#[derive(Debug, Clone)]
130pub struct ConvergenceAnalysis {
131 pub convergence_rate: ConvergenceRate,
133 pub predicted_nit: Option<usize>,
135 pub confidence_score: f64,
137 pub convergence_phase: ConvergencePhase,
139 pub stagnation: StagnationAnalysis,
141}
142
143#[derive(Debug, Clone)]
145pub enum ConvergenceRate {
146 Superlinear(f64),
148 Linear(f64),
150 Sublinear(f64),
152 Unclear,
154}
155
156#[derive(Debug, Clone, Copy)]
158pub enum ConvergencePhase {
159 Exploration,
161 RapidDescent,
163 FineTuning,
165 Converged,
167 Stagnated,
169}
170
171#[derive(Debug, Clone)]
173pub struct StagnationAnalysis {
174 pub is_stagnated: bool,
176 pub stagnant_nit: usize,
178 pub stagnation_type: StagnationType,
180 pub recommendations: Vec<String>,
182}
183
184#[derive(Debug, Clone, Copy)]
186pub enum StagnationType {
187 None,
189 FunctionPlateau,
191 GradientPlateau,
193 Oscillation,
195 LineSearchFailure,
197 NumericalLimit,
199}
200
201#[derive(Debug, Clone)]
203pub struct ProblemAnalysis {
204 pub difficulty: ProblemDifficulty,
206 pub conditioning: ConditioningAnalysis,
208 pub noise_level: Option<f64>,
210 pub features: Vec<ProblemFeature>,
212}
213
214#[derive(Debug, Clone, Copy)]
216pub enum ProblemDifficulty {
217 Easy,
219 Moderate,
221 Difficult,
223 VeryDifficult,
225}
226
227#[derive(Debug, Clone)]
229pub struct ConditioningAnalysis {
230 pub condition_number: Option<f64>,
232 pub condition_history: Vec<f64>,
234 pub is_ill_conditioned: bool,
236}
237
238#[derive(Debug, Clone, Copy)]
240pub enum ProblemFeature {
241 WellConditioned,
243 IllConditioned,
245 Noisy,
247 NonSmooth,
249 StronglyNonlinear,
251 MultiScale,
253 NarrowValleys,
255}
256
257#[derive(Debug, Clone)]
259pub struct DiagnosticWarning {
260 pub severity: WarningSeverity,
262 pub message: String,
264 pub iteration: usize,
266 pub recommendations: Vec<String>,
268}
269
270#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
272pub enum WarningSeverity {
273 Info,
275 Minor,
277 Moderate,
279 Severe,
281}
282
283#[derive(Debug)]
285pub struct DiagnosticCollector {
286 options: DiagnosticOptions,
288 nit: Vec<IterationDiagnostic>,
290 start_time: Instant,
292 f_history: VecDeque<f64>,
294 grad_history: VecDeque<f64>,
296 step_history: VecDeque<f64>,
298 condition_history: VecDeque<f64>,
300 current_iteration: usize,
302}
303
304#[derive(Debug, Clone)]
306pub struct DiagnosticOptions {
307 pub track_nit: bool,
309 pub estimate_conditioning: bool,
311 pub detect_noise: bool,
313 pub analyze_convergence_rate: bool,
315 pub history_window: usize,
317 pub track_memory: bool,
319 pub export_format: ExportFormat,
321}
322
323#[derive(Debug, Clone, Copy)]
325pub enum ExportFormat {
326 Json,
328 Csv,
330 Numpy,
332}
333
334impl Default for DiagnosticOptions {
335 fn default() -> Self {
336 Self {
337 track_nit: true,
338 estimate_conditioning: true,
339 detect_noise: true,
340 analyze_convergence_rate: true,
341 history_window: 50,
342 track_memory: false,
343 export_format: ExportFormat::Json,
344 }
345 }
346}
347
348impl DiagnosticCollector {
349 pub fn new(options: DiagnosticOptions) -> Self {
351 let history_window = options.history_window;
352 Self {
353 options,
354 nit: Vec::new(),
355 start_time: Instant::now(),
356 f_history: VecDeque::with_capacity(history_window),
357 grad_history: VecDeque::with_capacity(history_window),
358 step_history: VecDeque::with_capacity(history_window),
359 condition_history: VecDeque::with_capacity(history_window),
360 current_iteration: 0,
361 }
362 }
363
364 pub fn record_iteration(
366 &mut self,
367 f_value: f64,
368 grad: &ArrayView1<f64>,
369 step: &ArrayView1<f64>,
370 direction: &ArrayView1<f64>,
371 line_search_info: LineSearchDiagnostic,
372 ) {
373 let iteration_start = Instant::now();
374
375 self.update_history(
377 f_value,
378 grad.mapv(|x| x.abs()).sum(),
379 step.mapv(|x| x.abs()).sum(),
380 );
381
382 let convergence_metrics = self.compute_convergence_metrics(f_value, grad, step);
384
385 let diagnostic = IterationDiagnostic {
387 iteration: self.current_iteration,
388 f_value,
389 grad_norm: grad.mapv(|x| x.abs()).sum(),
390 step_size: step.mapv(|x| x.abs()).sum(),
391 direction_norm: direction.mapv(|x| x.abs()).sum(),
392 line_search: line_search_info,
393 convergence_metrics,
394 iteration_time: iteration_start.elapsed(),
395 total_time: self.start_time.elapsed(),
396 memory_usage: None, };
398
399 self.nit.push(diagnostic);
400 self.current_iteration += 1;
401 }
402
403 fn update_history(&mut self, f_value: f64, grad_norm: f64, step_size: f64) {
405 self.f_history.push_back(f_value);
406 if self.f_history.len() > self.options.history_window {
407 self.f_history.pop_front();
408 }
409
410 self.grad_history.push_back(grad_norm);
411 if self.grad_history.len() > self.options.history_window {
412 self.grad_history.pop_front();
413 }
414
415 self.step_history.push_back(step_size);
416 if self.step_history.len() > self.options.history_window {
417 self.step_history.pop_front();
418 }
419 }
420
421 fn compute_convergence_metrics(
423 &self,
424 f_value: f64,
425 grad: &ArrayView1<f64>,
426 step: &ArrayView1<f64>,
427 ) -> ConvergenceMetrics {
428 let f_prev = self.f_history.back().copied().unwrap_or(f_value);
429 let f_abs_change = (f_value - f_prev).abs();
430 let f_rel_change = if f_prev.abs() > 1e-10 {
431 f_abs_change / f_prev.abs()
432 } else {
433 f_abs_change
434 };
435
436 let grad_norm = grad.mapv(|x| x.abs()).sum();
437 let grad_rel_norm = grad_norm / (1.0 + f_value.abs());
438
439 let step_norm = step.mapv(|x| x.abs()).sum();
440 let x_norm = 1.0; let x_rel_change = step_norm / (1.0 + x_norm);
442
443 let optimality = grad_norm;
444
445 ConvergenceMetrics {
446 f_rel_change,
447 f_abs_change,
448 grad_rel_norm,
449 x_rel_change,
450 optimality,
451 condition_estimate: None, }
453 }
454
455 pub fn finalize(self) -> ConvergenceDiagnostics {
457 let performance_metrics = self.compute_performance_metrics();
458 let convergence_analysis = self.analyze_convergence();
459 let problem_analysis = self.analyze_problem();
460 let warnings = self.generate_warnings();
461
462 ConvergenceDiagnostics {
463 nit: self.nit,
464 performance_metrics,
465 convergence_analysis,
466 problem_analysis,
467 warnings,
468 }
469 }
470
471 fn compute_performance_metrics(&self) -> PerformanceMetrics {
473 let total_nit = self.nit.len();
474 let total_time = self.start_time.elapsed();
475
476 let total_fev = self.nit.iter().map(|it| it.line_search.n_fev).sum();
477 let total_gev = self.nit.iter().map(|it| it.line_search.n_gev).sum();
478
479 let avg_iteration_time = if total_nit > 0 {
480 total_time / total_nit as u32
481 } else {
482 Duration::from_secs(0)
483 };
484
485 let fev_rate = if total_time.as_secs_f64() > 0.0 {
486 total_fev as f64 / total_time.as_secs_f64()
487 } else {
488 0.0
489 };
490
491 let efficiency = self.compute_efficiency_metrics();
492
493 PerformanceMetrics {
494 total_nit,
495 total_fev,
496 total_gev,
497 total_hev: 0, total_time,
499 avg_iteration_time,
500 fev_rate,
501 efficiency,
502 }
503 }
504
505 fn compute_efficiency_metrics(&self) -> EfficiencyMetrics {
507 let total_progress =
508 if let (Some(first), Some(last)) = (self.f_history.front(), self.f_history.back()) {
509 (first - last).abs()
510 } else {
511 0.0
512 };
513
514 let total_fev: usize = self.nit.iter().map(|it| it.line_search.n_fev).sum();
515 let total_gev: usize = self.nit.iter().map(|it| it.line_search.n_gev).sum();
516
517 let progress_per_fev = if total_fev > 0 {
518 total_progress / total_fev as f64
519 } else {
520 0.0
521 };
522
523 let progress_per_gev = if total_gev > 0 {
524 total_progress / total_gev as f64
525 } else {
526 0.0
527 };
528
529 let line_search_efficiency = self
530 .nit
531 .iter()
532 .filter(|it| it.line_search.alpha_init > 0.0)
533 .map(|it| it.line_search.alpha / it.line_search.alpha_init)
534 .sum::<f64>()
535 / self.nit.len().max(1) as f64;
536
537 let step_acceptance_rate = self.nit.iter().filter(|it| it.line_search.success).count()
538 as f64
539 / self.nit.len().max(1) as f64;
540
541 let avg_reduction_ratio = self
542 .nit
543 .iter()
544 .map(|it| it.convergence_metrics.f_rel_change)
545 .sum::<f64>()
546 / self.nit.len().max(1) as f64;
547
548 EfficiencyMetrics {
549 progress_per_fev,
550 progress_per_gev,
551 line_search_efficiency,
552 step_acceptance_rate,
553 avg_reduction_ratio,
554 }
555 }
556
557 fn analyze_convergence(&self) -> ConvergenceAnalysis {
559 let convergence_rate = self.estimate_convergence_rate();
560 let predicted_nit = self.predict_iterations_to_convergence();
561 let confidence_score = self.compute_confidence_score();
562 let convergence_phase = self.detect_convergence_phase();
563 let stagnation = self.analyze_stagnation();
564
565 ConvergenceAnalysis {
566 convergence_rate,
567 predicted_nit,
568 confidence_score,
569 convergence_phase,
570 stagnation,
571 }
572 }
573
574 fn estimate_convergence_rate(&self) -> ConvergenceRate {
576 if self.f_history.len() < 3 {
577 return ConvergenceRate::Unclear;
578 }
579
580 let window = 5.min(self.f_history.len());
582 let recent_f: Vec<f64> = self.f_history.iter().rev().take(window).copied().collect();
583
584 if let Some(rate) = self.compute_rate_from_history(&recent_f) {
586 if rate > 1.5 {
587 ConvergenceRate::Superlinear(rate)
588 } else if rate > 0.0 && rate < 1.0 {
589 ConvergenceRate::Linear(rate)
590 } else if rate > 0.0 {
591 ConvergenceRate::Sublinear(rate)
592 } else {
593 ConvergenceRate::Unclear
594 }
595 } else {
596 ConvergenceRate::Unclear
597 }
598 }
599
600 fn compute_rate_from_history(&self, values: &[f64]) -> Option<f64> {
602 if values.len() < 2 {
603 return None;
604 }
605
606 let mut rates = Vec::new();
608 for i in 1..values.len() {
609 if values[i - 1] > 0.0 && values[i] > 0.0 {
610 let rate = values[i].ln() / values[i - 1].ln();
611 if rate.is_finite() {
612 rates.push(rate);
613 }
614 }
615 }
616
617 if rates.is_empty() {
618 None
619 } else {
620 Some(rates.iter().sum::<f64>() / rates.len() as f64)
621 }
622 }
623
624 fn predict_iterations_to_convergence(&self) -> Option<usize> {
626 if let Some(last_f) = self.f_history.back() {
628 if let Some(last_grad) = self.grad_history.back() {
629 if *last_grad < 1e-5 {
630 return Some(0); }
632
633 if self.nit.len() > 2 {
635 let recent_rate = self.nit.last()?.convergence_metrics.f_rel_change;
636 if recent_rate > 0.0 {
637 let iterations_needed = (last_f.ln() / recent_rate).ceil() as usize;
638 return Some(iterations_needed);
639 }
640 }
641 }
642 }
643 None
644 }
645
646 fn compute_confidence_score(&self) -> f64 {
648 if self.nit.len() < 5 {
650 return 0.0;
651 }
652
653 let mut score = 1.0;
654
655 let decreasing_count = self.count_decreasing_pairs(&self.f_history);
657 score *= decreasing_count as f64 / self.f_history.len().max(1) as f64;
658
659 let grad_decreasing = self.count_decreasing_pairs(&self.grad_history);
661 score *= grad_decreasing as f64 / self.grad_history.len().max(1) as f64;
662
663 score
664 }
665
666 fn count_decreasing_pairs(&self, values: &VecDeque<f64>) -> usize {
668 let mut count = 0;
669 for i in 1..values.len() {
670 if values[i] < values[i - 1] {
671 count += 1;
672 }
673 }
674 count
675 }
676
677 fn is_f_stagnant(&self) -> bool {
679 if self.f_history.len() < 2 {
680 return false;
681 }
682
683 let mut count = 0;
684 let n = self.f_history.len().min(6); for i in (self.f_history.len() - n + 1)..self.f_history.len() {
686 if (self.f_history[i] - self.f_history[i - 1]).abs() < 1e-12 {
687 count += 1;
688 }
689 }
690
691 count >= n - 1 }
693
694 fn detect_convergence_phase(&self) -> ConvergencePhase {
696 if self.nit.is_empty() {
697 return ConvergencePhase::Exploration;
698 }
699
700 let last_grad = self.grad_history.back().copied().unwrap_or(1.0);
701 let last_f_change = self
702 .nit
703 .last()
704 .map(|it| it.convergence_metrics.f_rel_change)
705 .unwrap_or(1.0);
706
707 if last_grad < 1e-8 && last_f_change < 1e-10 {
708 ConvergencePhase::Converged
709 } else if self.is_stagnated() {
710 ConvergencePhase::Stagnated
711 } else if last_f_change > 1e-3 {
712 ConvergencePhase::RapidDescent
713 } else if last_grad < 1e-3 {
714 ConvergencePhase::FineTuning
715 } else {
716 ConvergencePhase::Exploration
717 }
718 }
719
720 fn is_stagnated(&self) -> bool {
722 if self.f_history.len() < 5 {
723 return false;
724 }
725
726 let recent = self.f_history.iter().rev().take(5);
727 let values: Vec<f64> = recent.copied().collect();
728 let max_diff = values
729 .windows(2)
730 .map(|w| (w[1] - w[0]).abs())
731 .max_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal))
732 .unwrap_or(0.0);
733
734 max_diff < 1e-12
735 }
736
737 fn analyze_stagnation(&self) -> StagnationAnalysis {
739 let is_stagnated = self.is_stagnated();
740 let stagnant_nit = self.count_stagnant_nit();
741 let stagnation_type = self.detect_stagnation_type();
742 let recommendations = self.generate_stagnation_recommendations(&stagnation_type);
743
744 StagnationAnalysis {
745 is_stagnated,
746 stagnant_nit,
747 stagnation_type,
748 recommendations,
749 }
750 }
751
752 fn count_stagnant_nit(&self) -> usize {
754 let mut count = 0;
755 for it in self.nit.iter().rev() {
756 if it.convergence_metrics.f_rel_change < 1e-10 {
757 count += 1;
758 } else {
759 break;
760 }
761 }
762 count
763 }
764
765 fn detect_stagnation_type(&self) -> StagnationType {
767 if !self.is_stagnated() {
768 return StagnationType::None;
769 }
770
771 if self.is_oscillating() {
773 return StagnationType::Oscillation;
774 }
775
776 let ls_failures = self
778 .nit
779 .iter()
780 .rev()
781 .take(5)
782 .filter(|it| !it.line_search.success)
783 .count();
784 if ls_failures >= 3 {
785 return StagnationType::LineSearchFailure;
786 }
787
788 let grad_stagnant = self.grad_history.iter().rev().take(5).all(|&g| g < 1e-10);
790 let f_stagnant = self.is_f_stagnant();
791
792 if grad_stagnant && f_stagnant {
793 StagnationType::NumericalLimit
794 } else if f_stagnant {
795 StagnationType::FunctionPlateau
796 } else if grad_stagnant {
797 StagnationType::GradientPlateau
798 } else {
799 StagnationType::None
800 }
801 }
802
803 fn is_oscillating(&self) -> bool {
805 if self.f_history.len() < 4 {
806 return false;
807 }
808
809 let mut sign_changes = 0;
810 let mut diffs = Vec::new();
811
812 for i in 1..self.f_history.len() {
814 diffs.push(self.f_history[i] - self.f_history[i - 1]);
815 }
816
817 for i in 1..diffs.len() {
818 if diffs[i - 1] * diffs[i] < 0.0 {
819 sign_changes += 1;
820 }
821 }
822
823 sign_changes >= diffs.len() / 2
824 }
825
826 fn generate_stagnation_recommendations(&self, stagnation_type: &StagnationType) -> Vec<String> {
828 match stagnation_type {
829 StagnationType::None => vec![],
830 StagnationType::FunctionPlateau => vec![
831 "Consider tightening convergence tolerances".to_string(),
832 "Try a different optimization algorithm".to_string(),
833 "Check if at a saddle point".to_string(),
834 ],
835 StagnationType::GradientPlateau => vec![
836 "May be near optimum with flat gradient".to_string(),
837 "Consider using second-order methods".to_string(),
838 ],
839 StagnationType::Oscillation => vec![
840 "Reduce step size or learning rate".to_string(),
841 "Use momentum or averaging".to_string(),
842 "Consider non-monotone line search".to_string(),
843 ],
844 StagnationType::LineSearchFailure => vec![
845 "Relax line search parameters".to_string(),
846 "Use trust region methods".to_string(),
847 "Check gradient computation".to_string(),
848 ],
849 StagnationType::NumericalLimit => vec![
850 "Reached numerical precision limit".to_string(),
851 "Consider using higher precision arithmetic".to_string(),
852 "Current solution may be optimal within tolerance".to_string(),
853 ],
854 }
855 }
856
857 fn analyze_problem(&self) -> ProblemAnalysis {
859 let difficulty = self.assess_difficulty();
860 let conditioning = self.analyze_conditioning();
861 let noise_level = self.estimate_noise_level();
862 let features = self.detect_problem_features();
863
864 ProblemAnalysis {
865 difficulty,
866 conditioning,
867 noise_level,
868 features,
869 }
870 }
871
872 fn assess_difficulty(&self) -> ProblemDifficulty {
874 let mut difficulty_score = 0.0;
875
876 if matches!(
878 self.estimate_convergence_rate(),
879 ConvergenceRate::Sublinear(_) | ConvergenceRate::Unclear
880 ) {
881 difficulty_score += 2.0;
882 }
883
884 let ls_efficiency = self
886 .nit
887 .iter()
888 .map(|it| it.line_search.alpha / it.line_search.alpha_init.max(1e-10))
889 .sum::<f64>()
890 / self.nit.len().max(1) as f64;
891 if ls_efficiency < 0.1 {
892 difficulty_score += 2.0;
893 } else if ls_efficiency < 0.5 {
894 difficulty_score += 1.0;
895 }
896
897 if self.is_stagnated() {
899 difficulty_score += 1.5;
900 }
901
902 if self.is_oscillating() {
904 difficulty_score += 1.5;
905 }
906
907 if difficulty_score < 1.0 {
908 ProblemDifficulty::Easy
909 } else if difficulty_score < 3.0 {
910 ProblemDifficulty::Moderate
911 } else if difficulty_score < 5.0 {
912 ProblemDifficulty::Difficult
913 } else {
914 ProblemDifficulty::VeryDifficult
915 }
916 }
917
918 fn analyze_conditioning(&self) -> ConditioningAnalysis {
920 let condition_history = self.condition_history.iter().copied().collect();
921 let condition_number = self.condition_history.back().copied();
922 let is_ill_conditioned = condition_number.map(|c| c > 1e6).unwrap_or(false);
923
924 ConditioningAnalysis {
925 condition_number,
926 condition_history,
927 is_ill_conditioned,
928 }
929 }
930
931 fn estimate_noise_level(&self) -> Option<f64> {
933 if self.f_history.len() < 10 {
934 return None;
935 }
936
937 let mean = self.f_history.iter().sum::<f64>() / self.f_history.len() as f64;
939 let variance = self
940 .f_history
941 .iter()
942 .map(|&f| (f - mean).powi(2))
943 .sum::<f64>()
944 / self.f_history.len() as f64;
945
946 Some(variance.sqrt())
947 }
948
949 fn detect_problem_features(&self) -> Vec<ProblemFeature> {
951 let mut features = Vec::new();
952
953 if let Some(cond) = self.condition_history.back() {
955 if *cond < 100.0 {
956 features.push(ProblemFeature::WellConditioned);
957 } else if *cond > 1e6 {
958 features.push(ProblemFeature::IllConditioned);
959 }
960 }
961
962 if let Some(noise) = self.estimate_noise_level() {
964 if noise > 1e-3 {
965 features.push(ProblemFeature::Noisy);
966 }
967 }
968
969 if self.step_history.len() > 5 {
971 let step_variance = self.compute_variance(&self.step_history);
972 if step_variance > 10.0 {
973 features.push(ProblemFeature::StronglyNonlinear);
974 }
975 }
976
977 if self.nit.len() > 5 {
979 let narrow_valley_indicators = self
980 .nit
981 .iter()
982 .filter(|it| it.step_size < 1e-3 && it.grad_norm > 1.0)
983 .count();
984 if narrow_valley_indicators > self.nit.len() / 2 {
985 features.push(ProblemFeature::NarrowValleys);
986 }
987 }
988
989 features
990 }
991
992 fn compute_variance(&self, values: &VecDeque<f64>) -> f64 {
994 if values.is_empty() {
995 return 0.0;
996 }
997
998 let mean = values.iter().sum::<f64>() / values.len() as f64;
999 values.iter().map(|&v| (v - mean).powi(2)).sum::<f64>() / values.len() as f64
1000 }
1001
1002 fn generate_warnings(&self) -> Vec<DiagnosticWarning> {
1004 let mut warnings = Vec::new();
1005
1006 if let Some(last_it) = self.nit.last() {
1008 if !last_it.line_search.success {
1009 warnings.push(DiagnosticWarning {
1010 severity: WarningSeverity::Moderate,
1011 message: "Line search failed in last iteration".to_string(),
1012 iteration: last_it.iteration,
1013 recommendations: vec![
1014 "Consider relaxing line search parameters".to_string(),
1015 "Check gradient computation accuracy".to_string(),
1016 ],
1017 });
1018 }
1019 }
1020
1021 if self.is_stagnated() {
1023 warnings.push(DiagnosticWarning {
1024 severity: WarningSeverity::Severe,
1025 message: "Optimization appears to be stagnated".to_string(),
1026 iteration: self.current_iteration,
1027 recommendations: vec![
1028 "Consider restarting with different initial point".to_string(),
1029 "Try a different optimization algorithm".to_string(),
1030 "Check problem formulation".to_string(),
1031 ],
1032 });
1033 }
1034
1035 if self
1037 .grad_history
1038 .back()
1039 .map(|&g| g < 1e-15)
1040 .unwrap_or(false)
1041 {
1042 warnings.push(DiagnosticWarning {
1043 severity: WarningSeverity::Info,
1044 message: "Gradient norm is near machine precision".to_string(),
1045 iteration: self.current_iteration,
1046 recommendations: vec![
1047 "Solution may be optimal within numerical precision".to_string()
1048 ],
1049 });
1050 }
1051
1052 warnings
1053 }
1054}
1055
1056impl ConvergenceDiagnostics {
1058 pub fn to_json(&self) -> Result<String, OptimizeError> {
1060 Ok(format!(
1062 "{{\"total_nit\": {}}}",
1063 self.performance_metrics.total_nit
1064 ))
1065 }
1066
1067 pub fn to_csv(&self) -> Result<String, OptimizeError> {
1069 let mut csv = String::from("iteration,f_value,grad_norm,step_size\n");
1070 for it in &self.nit {
1071 csv.push_str(&format!(
1072 "{},{},{},{}\n",
1073 it.iteration, it.f_value, it.grad_norm, it.step_size
1074 ));
1075 }
1076 Ok(csv)
1077 }
1078
1079 pub fn to_arrays(&self) -> (Vec<f64>, Vec<f64>, Vec<f64>) {
1081 let f_values: Vec<f64> = self.nit.iter().map(|it| it.f_value).collect();
1082 let grad_norms: Vec<f64> = self.nit.iter().map(|it| it.grad_norm).collect();
1083 let step_sizes: Vec<f64> = self.nit.iter().map(|it| it.step_size).collect();
1084 (f_values, grad_norms, step_sizes)
1085 }
1086
1087 pub fn summary_report(&self) -> String {
1089 format!(
1090 "Optimization Summary:\n\
1091 Total nit: {}\n\
1092 Total function evaluations: {}\n\
1093 Total time: {:?}\n\
1094 Final function value: {:.6e}\n\
1095 Final gradient norm: {:.6e}\n\
1096 Convergence status: {:?}\n\
1097 Problem difficulty: {:?}\n\
1098 Warnings: {}",
1099 self.performance_metrics.total_nit,
1100 self.performance_metrics.total_fev,
1101 self.performance_metrics.total_time,
1102 self.nit.last().map(|it| it.f_value).unwrap_or(0.0),
1103 self.nit.last().map(|it| it.grad_norm).unwrap_or(0.0),
1104 self.convergence_analysis.convergence_phase,
1105 self.problem_analysis.difficulty,
1106 self.warnings.len()
1107 )
1108 }
1109}
1110
1111#[cfg(test)]
1112mod tests {
1113 use super::*;
1114 use scirs2_core::ndarray::Array1;
1115
1116 #[test]
1117 fn test_diagnostic_collector() {
1118 let mut collector = DiagnosticCollector::new(DiagnosticOptions::default());
1119
1120 let grad = Array1::from_vec(vec![1.0, 2.0]);
1122 let step = Array1::from_vec(vec![0.1, 0.2]);
1123 let direction = Array1::from_vec(vec![-1.0, -2.0]);
1124
1125 let ls_info = LineSearchDiagnostic {
1126 n_fev: 3,
1127 n_gev: 1,
1128 alpha: 0.5,
1129 alpha_init: 1.0,
1130 success: true,
1131 wolfe_satisfied: (true, true),
1132 };
1133
1134 collector.record_iteration(10.0, &grad.view(), &step.view(), &direction.view(), ls_info);
1135
1136 let diagnostics = collector.finalize();
1137 assert_eq!(diagnostics.nit.len(), 1);
1138 assert_eq!(diagnostics.performance_metrics.total_nit, 1);
1139 }
1140
1141 #[test]
1142 fn test_convergence_rate_estimation() {
1143 let collector = DiagnosticCollector::new(DiagnosticOptions::default());
1144
1145 let values = vec![1.0, 0.1, 0.01, 0.001];
1147 let rate = collector.compute_rate_from_history(&values);
1148 assert!(rate.is_some());
1149 }
1150}