quantrs2_tytan/visualization/
convergence.rs

1//! Convergence visualization for optimization algorithms
2//!
3//! This module provides tools for tracking and visualizing the convergence
4//! of quantum annealing and optimization algorithms.
5
6use crate::optimization::{adaptive::PerformanceMetrics, tuning::TuningEvaluation};
7use serde::{Deserialize, Serialize};
8use std::collections::HashMap;
9
10#[cfg(feature = "scirs")]
11use crate::scirs_stub::scirs2_plot::{Annotation, MultiPlot, Plot2D};
12
13/// Convergence plot configuration
14#[derive(Debug, Clone, Serialize, Deserialize)]
15pub struct ConvergenceConfig {
16    /// Window size for moving average
17    pub smoothing_window: usize,
18    /// Show confidence intervals
19    pub show_confidence: bool,
20    /// Confidence level (e.g., 0.95 for 95%)
21    pub confidence_level: f64,
22    /// Show constraint violations
23    pub show_constraints: bool,
24    /// Logarithmic y-axis for objective
25    pub log_scale: bool,
26    /// Show best so far line
27    pub show_best: bool,
28    /// Show theoretical bounds if available
29    pub show_bounds: bool,
30}
31
32impl Default for ConvergenceConfig {
33    fn default() -> Self {
34        Self {
35            smoothing_window: 10,
36            show_confidence: true,
37            confidence_level: 0.95,
38            show_constraints: true,
39            log_scale: false,
40            show_best: true,
41            show_bounds: false,
42        }
43    }
44}
45
46/// Convergence data tracker
47pub struct ConvergencePlot {
48    config: ConvergenceConfig,
49    objective_history: Vec<f64>,
50    constraint_history: Vec<HashMap<String, f64>>,
51    parameter_history: Vec<HashMap<String, f64>>,
52    time_history: Vec<std::time::Duration>,
53    metadata: HashMap<String, String>,
54}
55
56/// Convergence analysis results
57#[derive(Debug, Clone, Serialize, Deserialize)]
58pub struct ConvergenceAnalysis {
59    pub final_objective: f64,
60    pub best_objective: f64,
61    pub improvement_rate: f64,
62    pub convergence_iteration: Option<usize>,
63    pub total_iterations: usize,
64    pub total_time: std::time::Duration,
65    pub constraint_satisfaction: ConstraintSatisfaction,
66    pub parameter_stability: ParameterStability,
67    pub convergence_metrics: ConvergenceMetrics,
68}
69
70/// Constraint satisfaction metrics
71#[derive(Debug, Clone, Serialize, Deserialize)]
72pub struct ConstraintSatisfaction {
73    pub final_violations: HashMap<String, f64>,
74    pub satisfaction_rate: f64,
75    pub convergence_iterations: HashMap<String, usize>,
76    pub max_violation: f64,
77}
78
79/// Parameter stability analysis
80#[derive(Debug, Clone, Serialize, Deserialize)]
81pub struct ParameterStability {
82    pub final_values: HashMap<String, f64>,
83    pub variation_coefficients: HashMap<String, f64>,
84    pub convergence_windows: HashMap<String, (usize, usize)>,
85    pub stable_parameters: Vec<String>,
86}
87
88/// Convergence metrics
89#[derive(Debug, Clone, Serialize, Deserialize)]
90pub struct ConvergenceMetrics {
91    pub convergence_rate: f64,
92    pub oscillation_index: f64,
93    pub plateau_regions: Vec<(usize, usize)>,
94    pub improvement_events: Vec<ImprovementEvent>,
95    pub estimated_optimum: Option<f64>,
96}
97
98/// Improvement event in optimization
99#[derive(Debug, Clone, Serialize, Deserialize)]
100pub struct ImprovementEvent {
101    pub iteration: usize,
102    pub old_value: f64,
103    pub new_value: f64,
104    pub improvement: f64,
105    pub relative_improvement: f64,
106}
107
108impl ConvergencePlot {
109    /// Create new convergence plotter
110    pub fn new(config: ConvergenceConfig) -> Self {
111        Self {
112            config,
113            objective_history: Vec::new(),
114            constraint_history: Vec::new(),
115            parameter_history: Vec::new(),
116            time_history: Vec::new(),
117            metadata: HashMap::new(),
118        }
119    }
120
121    /// Add optimization iteration data
122    pub fn add_iteration(
123        &mut self,
124        objective: f64,
125        constraints: HashMap<String, f64>,
126        parameters: HashMap<String, f64>,
127        elapsed: std::time::Duration,
128    ) {
129        self.objective_history.push(objective);
130        self.constraint_history.push(constraints);
131        self.parameter_history.push(parameters);
132        self.time_history.push(elapsed);
133    }
134
135    /// Add metadata
136    pub fn set_metadata(&mut self, key: String, value: String) {
137        self.metadata.insert(key, value);
138    }
139
140    /// Analyze convergence
141    pub fn analyze(&self) -> Result<ConvergenceAnalysis, Box<dyn std::error::Error>> {
142        if self.objective_history.is_empty() {
143            return Err("No convergence data to analyze".into());
144        }
145
146        let final_objective = *self
147            .objective_history
148            .last()
149            .expect("objective_history is non-empty (checked above)");
150        let best_objective = self
151            .objective_history
152            .iter()
153            .fold(f64::INFINITY, |a, &b| a.min(b));
154
155        // Calculate improvement rate
156        let initial_objective = self.objective_history[0];
157        let improvement = initial_objective - final_objective;
158        let improvement_rate = improvement / initial_objective.abs();
159
160        // Find convergence iteration
161        let convergence_iteration = self.find_convergence_iteration()?;
162
163        // Analyze constraints
164        let constraint_satisfaction = self.analyze_constraint_satisfaction()?;
165
166        // Analyze parameter stability
167        let parameter_stability = self.analyze_parameter_stability()?;
168
169        // Calculate convergence metrics
170        let convergence_metrics = self.calculate_convergence_metrics()?;
171
172        let total_time = self
173            .time_history
174            .last()
175            .copied()
176            .unwrap_or(std::time::Duration::from_secs(0));
177
178        Ok(ConvergenceAnalysis {
179            final_objective,
180            best_objective,
181            improvement_rate,
182            convergence_iteration,
183            total_iterations: self.objective_history.len(),
184            total_time,
185            constraint_satisfaction,
186            parameter_stability,
187            convergence_metrics,
188        })
189    }
190
191    /// Find iteration where convergence occurred
192    fn find_convergence_iteration(&self) -> Result<Option<usize>, Box<dyn std::error::Error>> {
193        let tolerance = 1e-6;
194        let window = self.config.smoothing_window;
195
196        if self.objective_history.len() < window {
197            return Ok(None);
198        }
199
200        // Look for plateau in moving average
201        let smoothed = self.moving_average(&self.objective_history, window);
202
203        for i in window..smoothed.len() {
204            let recent_values = &smoothed[i - window..i];
205            let mean = recent_values.iter().sum::<f64>() / window as f64;
206            let variance = recent_values
207                .iter()
208                .map(|v| (v - mean).powi(2))
209                .sum::<f64>()
210                / window as f64;
211
212            if variance.sqrt() < tolerance * mean.abs() {
213                return Ok(Some(i));
214            }
215        }
216
217        Ok(None)
218    }
219
220    /// Analyze constraint satisfaction over time
221    fn analyze_constraint_satisfaction(
222        &self,
223    ) -> Result<ConstraintSatisfaction, Box<dyn std::error::Error>> {
224        let final_violations = self.constraint_history.last().cloned().unwrap_or_default();
225
226        let satisfied_count = final_violations
227            .values()
228            .filter(|&&v| v.abs() < 1e-6)
229            .count();
230        let total_constraints = final_violations.len().max(1);
231        let satisfaction_rate = satisfied_count as f64 / total_constraints as f64;
232
233        // Find when each constraint converged
234        let mut convergence_iterations = HashMap::new();
235        let tolerance = 1e-6;
236
237        for name in final_violations.keys() {
238            for (i, constraints) in self.constraint_history.iter().enumerate() {
239                if let Some(&violation) = constraints.get(name) {
240                    if violation.abs() < tolerance {
241                        convergence_iterations.insert(name.clone(), i);
242                        break;
243                    }
244                }
245            }
246        }
247
248        let max_violation = final_violations
249            .values()
250            .map(|v| v.abs())
251            .fold(0.0, f64::max);
252
253        Ok(ConstraintSatisfaction {
254            final_violations,
255            satisfaction_rate,
256            convergence_iterations,
257            max_violation,
258        })
259    }
260
261    /// Analyze parameter stability
262    fn analyze_parameter_stability(
263        &self,
264    ) -> Result<ParameterStability, Box<dyn std::error::Error>> {
265        if self.parameter_history.is_empty() {
266            return Ok(ParameterStability {
267                final_values: HashMap::new(),
268                variation_coefficients: HashMap::new(),
269                convergence_windows: HashMap::new(),
270                stable_parameters: Vec::new(),
271            });
272        }
273
274        let final_values = self.parameter_history.last().cloned().unwrap_or_default();
275
276        let mut variation_coefficients = HashMap::new();
277        let mut convergence_windows = HashMap::new();
278        let mut stable_parameters = Vec::new();
279
280        for param_name in final_values.keys() {
281            // Extract parameter values over time
282            let values: Vec<f64> = self
283                .parameter_history
284                .iter()
285                .filter_map(|p| p.get(param_name).copied())
286                .collect();
287
288            if values.len() > 1 {
289                // Calculate coefficient of variation
290                let mean = values.iter().sum::<f64>() / values.len() as f64;
291                let variance =
292                    values.iter().map(|v| (v - mean).powi(2)).sum::<f64>() / values.len() as f64;
293                let cv = if mean.abs() > 1e-10 {
294                    variance.sqrt() / mean.abs()
295                } else {
296                    0.0
297                };
298
299                variation_coefficients.insert(param_name.clone(), cv);
300
301                // Find convergence window
302                let window = self.find_parameter_convergence_window(&values)?;
303                if let Some((start, end)) = window {
304                    convergence_windows.insert(param_name.clone(), (start, end));
305
306                    // Consider stable if converged in first half
307                    if end < values.len() / 2 {
308                        stable_parameters.push(param_name.clone());
309                    }
310                }
311            }
312        }
313
314        Ok(ParameterStability {
315            final_values,
316            variation_coefficients,
317            convergence_windows,
318            stable_parameters,
319        })
320    }
321
322    /// Find convergence window for a parameter
323    fn find_parameter_convergence_window(
324        &self,
325        values: &[f64],
326    ) -> Result<Option<(usize, usize)>, Box<dyn std::error::Error>> {
327        if values.len() < self.config.smoothing_window {
328            return Ok(None);
329        }
330
331        let tolerance = 0.01; // 1% variation
332        let window = self.config.smoothing_window;
333
334        for start in 0..values.len() - window {
335            let window_values = &values[start..start + window];
336            let mean = window_values.iter().sum::<f64>() / window as f64;
337            let max_dev = window_values
338                .iter()
339                .map(|v| (v - mean).abs())
340                .fold(0.0, f64::max);
341
342            if max_dev < tolerance * mean.abs() {
343                // Found stable window, now find where it ends
344                let mut end = start + window;
345                while end < values.len() {
346                    if (values[end] - mean).abs() > tolerance * mean.abs() {
347                        break;
348                    }
349                    end += 1;
350                }
351                return Ok(Some((start, end)));
352            }
353        }
354
355        Ok(None)
356    }
357
358    /// Calculate convergence metrics
359    fn calculate_convergence_metrics(
360        &self,
361    ) -> Result<ConvergenceMetrics, Box<dyn std::error::Error>> {
362        let objectives = &self.objective_history;
363
364        // Convergence rate (exponential fit)
365        let convergence_rate = self.estimate_convergence_rate(objectives)?;
366
367        // Oscillation index
368        let oscillation_index = self.calculate_oscillation_index(objectives)?;
369
370        // Find plateau regions
371        let plateau_regions = self.find_plateau_regions(objectives)?;
372
373        // Identify improvement events
374        let improvement_events = self.identify_improvement_events(objectives)?;
375
376        // Estimate optimum (extrapolation)
377        let estimated_optimum = self.estimate_optimum(objectives)?;
378
379        Ok(ConvergenceMetrics {
380            convergence_rate,
381            oscillation_index,
382            plateau_regions,
383            improvement_events,
384            estimated_optimum,
385        })
386    }
387
388    /// Estimate convergence rate
389    fn estimate_convergence_rate(
390        &self,
391        objectives: &[f64],
392    ) -> Result<f64, Box<dyn std::error::Error>> {
393        if objectives.len() < 10 {
394            return Ok(0.0);
395        }
396
397        // Fit exponential decay: f(t) = a * exp(-rate * t) + c
398        // Using simple linear regression on log scale
399        let best_so_far = self.best_so_far(objectives);
400        let final_value = best_so_far.last().copied().unwrap_or(0.0);
401
402        let mut x_sum = 0.0;
403        let mut y_sum = 0.0;
404        let mut xy_sum = 0.0;
405        let mut xx_sum = 0.0;
406        let mut count = 0;
407
408        for (i, &value) in best_so_far.iter().enumerate() {
409            let diff = (value - final_value).abs();
410            if diff > 1e-10 {
411                let x = i as f64;
412                let y = diff.ln();
413
414                x_sum += x;
415                y_sum += y;
416                xy_sum += x * y;
417                xx_sum += x * x;
418                count += 1;
419            }
420        }
421
422        if count < 2 {
423            return Ok(0.0);
424        }
425
426        let n = count as f64;
427        let rate = -(n * xy_sum - x_sum * y_sum) / (n * xx_sum - x_sum * x_sum);
428
429        Ok(rate.max(0.0))
430    }
431
432    /// Calculate oscillation index
433    fn calculate_oscillation_index(
434        &self,
435        objectives: &[f64],
436    ) -> Result<f64, Box<dyn std::error::Error>> {
437        if objectives.len() < 3 {
438            return Ok(0.0);
439        }
440
441        #[allow(unused_variables)]
442        let mut direction_changes = 0;
443        let mut total_variation = 0.0;
444
445        for i in 1..objectives.len() - 1 {
446            let diff1 = objectives[i] - objectives[i - 1];
447            let diff2 = objectives[i + 1] - objectives[i];
448
449            if diff1 * diff2 < 0.0 {
450                direction_changes += 1;
451            }
452
453            total_variation += diff1.abs();
454        }
455
456        let path_length = total_variation;
457        let direct_distance = (objectives
458            .last()
459            .expect("objectives has at least 3 elements (checked above)")
460            - objectives[0])
461            .abs();
462
463        if path_length > 0.0 {
464            Ok((path_length - direct_distance) / path_length)
465        } else {
466            Ok(0.0)
467        }
468    }
469
470    /// Find plateau regions
471    fn find_plateau_regions(
472        &self,
473        objectives: &[f64],
474    ) -> Result<Vec<(usize, usize)>, Box<dyn std::error::Error>> {
475        let mut plateaus = Vec::new();
476        let tolerance = 1e-6;
477        let min_length = 5;
478
479        let mut start = 0;
480        while start < objectives.len() {
481            let base_value = objectives[start];
482            let mut end = start + 1;
483
484            while end < objectives.len()
485                && (objectives[end] - base_value).abs() < tolerance * base_value.abs()
486            {
487                end += 1;
488            }
489
490            if end - start >= min_length {
491                plateaus.push((start, end));
492            }
493
494            start = end;
495        }
496
497        Ok(plateaus)
498    }
499
500    /// Identify significant improvement events
501    fn identify_improvement_events(
502        &self,
503        objectives: &[f64],
504    ) -> Result<Vec<ImprovementEvent>, Box<dyn std::error::Error>> {
505        let mut events = Vec::new();
506        let threshold = 0.01; // 1% improvement
507
508        let best_so_far = self.best_so_far(objectives);
509
510        for i in 1..best_so_far.len() {
511            if best_so_far[i] < best_so_far[i - 1] {
512                let improvement = best_so_far[i - 1] - best_so_far[i];
513                let relative = improvement / best_so_far[i - 1].abs();
514
515                if relative > threshold {
516                    events.push(ImprovementEvent {
517                        iteration: i,
518                        old_value: best_so_far[i - 1],
519                        new_value: best_so_far[i],
520                        improvement,
521                        relative_improvement: relative,
522                    });
523                }
524            }
525        }
526
527        Ok(events)
528    }
529
530    /// Estimate optimum value
531    fn estimate_optimum(
532        &self,
533        objectives: &[f64],
534    ) -> Result<Option<f64>, Box<dyn std::error::Error>> {
535        if objectives.len() < 20 {
536            return Ok(None);
537        }
538
539        // Use exponential extrapolation on best-so-far
540        let best_so_far = self.best_so_far(objectives);
541        let rate = self.estimate_convergence_rate(objectives)?;
542
543        if rate > 0.0 {
544            let current = best_so_far
545                .last()
546                .copied()
547                .expect("best_so_far has at least 20 elements (checked above)");
548            let initial = best_so_far[0];
549
550            // Estimate asymptotic value
551            let estimated = (initial - current).mul_add(-(-rate).exp(), current);
552            Ok(Some(estimated))
553        } else {
554            Ok(None)
555        }
556    }
557
558    /// Calculate best-so-far trajectory
559    fn best_so_far(&self, objectives: &[f64]) -> Vec<f64> {
560        let mut best = Vec::with_capacity(objectives.len());
561        let mut current_best = f64::INFINITY;
562
563        for &obj in objectives {
564            current_best = current_best.min(obj);
565            best.push(current_best);
566        }
567
568        best
569    }
570
571    /// Calculate moving average
572    fn moving_average(&self, data: &[f64], window: usize) -> Vec<f64> {
573        if data.len() < window {
574            return data.to_vec();
575        }
576
577        let mut result = Vec::with_capacity(data.len());
578
579        // Fill initial values
580        for item in data.iter().take(window / 2) {
581            result.push(*item);
582        }
583
584        // Calculate moving average
585        for i in window / 2..data.len() - window / 2 {
586            let sum: f64 = data[i - window / 2..i + window / 2].iter().sum();
587            result.push(sum / window as f64);
588        }
589
590        // Fill final values
591        for item in data.iter().skip(data.len() - window / 2) {
592            result.push(*item);
593        }
594
595        result
596    }
597
598    /// Generate convergence plot
599    pub fn plot(&self) -> Result<(), Box<dyn std::error::Error>> {
600        #[cfg(feature = "scirs")]
601        {
602            use crate::scirs_stub::scirs2_plot::{Figure, Subplot};
603
604            let mut fig = Figure::new();
605            let iterations: Vec<f64> = (0..self.objective_history.len())
606                .map(|i| i as f64)
607                .collect();
608
609            // Main objective plot
610            let subplot = fig.add_subplot(2, 2, 1)?;
611
612            // Raw objectives
613            subplot
614                .plot(&iterations, &self.objective_history)
615                .set_label("Objective")
616                .set_alpha(0.5);
617
618            // Best so far
619            if self.config.show_best {
620                let mut best_so_far = self.best_so_far(&self.objective_history);
621                subplot
622                    .plot(&iterations, &best_so_far)
623                    .set_label("Best so far")
624                    .set_linewidth(2.0);
625            }
626
627            // Smoothed line
628            if self.config.smoothing_window > 1 {
629                let smoothed =
630                    self.moving_average(&self.objective_history, self.config.smoothing_window);
631                subplot
632                    .plot(&iterations, &smoothed)
633                    .set_label("Smoothed")
634                    .set_linestyle("--");
635            }
636
637            subplot
638                .set_xlabel("Iteration")
639                .set_ylabel("Objective")
640                .set_title("Convergence Plot");
641            subplot.legend();
642
643            if self.config.log_scale {
644                subplot.set_yscale("log");
645            }
646
647            // Constraint violations plot
648            if self.config.show_constraints && !self.constraint_history.is_empty() {
649                let subplot = fig.add_subplot(2, 2, 2)?;
650
651                // Plot each constraint
652                let mut constraint_names: Vec<_> = self
653                    .constraint_history
654                    .iter()
655                    .flat_map(|c| c.keys())
656                    .collect::<std::collections::HashSet<_>>()
657                    .into_iter()
658                    .collect();
659                constraint_names.sort();
660
661                for name in constraint_names {
662                    let violations: Vec<f64> = self
663                        .constraint_history
664                        .iter()
665                        .map(|c| c.get(name).copied().unwrap_or(0.0).abs())
666                        .collect();
667
668                    subplot.plot(&iterations, &violations).set_label(name);
669                }
670
671                subplot
672                    .set_xlabel("Iteration")
673                    .set_ylabel("Violation")
674                    .set_title("Constraint Violations")
675                    .set_yscale("log");
676                subplot.legend();
677            }
678
679            // Parameter evolution
680            if !self.parameter_history.is_empty() {
681                let subplot = fig.add_subplot(2, 2, 3)?;
682
683                let param_names: Vec<_> = self.parameter_history
684                    .iter()
685                    .flat_map(|p| p.keys())
686                    .collect::<std::collections::HashSet<_>>()
687                    .into_iter()
688                    .take(5) // Limit to 5 parameters for clarity
689                    .collect();
690
691                for name in param_names {
692                    let values: Vec<f64> = self
693                        .parameter_history
694                        .iter()
695                        .map(|p| p.get(name).copied().unwrap_or(0.0))
696                        .collect();
697
698                    // Normalize to [0, 1]
699                    let min_val = values.iter().fold(f64::INFINITY, |a, &b| a.min(b));
700                    let max_val = values.iter().fold(f64::NEG_INFINITY, |a, &b| a.max(b));
701
702                    if max_val > min_val {
703                        let normalized: Vec<f64> = values
704                            .iter()
705                            .map(|v| (v - min_val) / (max_val - min_val))
706                            .collect();
707
708                        subplot.plot(&iterations, &normalized).set_label(name);
709                    }
710                }
711
712                subplot
713                    .set_xlabel("Iteration")
714                    .set_ylabel("Normalized Value")
715                    .set_title("Parameter Evolution");
716                subplot.legend();
717            }
718
719            // Convergence metrics
720            let analysis = self.analyze()?;
721            let subplot = fig.add_subplot(2, 2, 4)?;
722
723            // Display key metrics as text
724            let text = format!(
725                "Final objective: {:.4e}\n\
726                 Best objective: {:.4e}\n\
727                 Improvement: {:.2}%\n\
728                 Convergence rate: {:.4}\n\
729                 Oscillation: {:.2}\n\
730                 Constraint satisfaction: {:.2}%",
731                analysis.final_objective,
732                analysis.best_objective,
733                analysis.improvement_rate * 100.0,
734                analysis.convergence_metrics.convergence_rate,
735                analysis.convergence_metrics.oscillation_index,
736                analysis.constraint_satisfaction.satisfaction_rate * 100.0
737            );
738
739            subplot
740                .text(0.1, 0.9, &text)
741                .set_fontsize(10)
742                .set_verticalalignment("top");
743            subplot.set_axis_off();
744
745            fig.show()?;
746        }
747
748        #[cfg(not(feature = "scirs"))]
749        {
750            // Export plot data
751            self.export_plot_data("convergence_plot.json")?;
752            println!("Convergence data exported to convergence_plot.json");
753        }
754
755        Ok(())
756    }
757
758    /// Export plot data
759    pub fn export_plot_data(&self, path: &str) -> Result<(), Box<dyn std::error::Error>> {
760        let export = ConvergenceExport {
761            config: self.config.clone(),
762            objective_history: self.objective_history.clone(),
763            constraint_history: self.constraint_history.clone(),
764            parameter_history: self.parameter_history.clone(),
765            time_history: self.time_history.clone(),
766            metadata: self.metadata.clone(),
767            analysis: self.analyze().ok(),
768            timestamp: std::time::SystemTime::now(),
769        };
770
771        let json = serde_json::to_string_pretty(&export)?;
772        std::fs::write(path, json)?;
773
774        Ok(())
775    }
776}
777
778/// Convergence data export format
779#[derive(Debug, Clone, Serialize, Deserialize)]
780pub struct ConvergenceExport {
781    pub config: ConvergenceConfig,
782    pub objective_history: Vec<f64>,
783    pub constraint_history: Vec<HashMap<String, f64>>,
784    pub parameter_history: Vec<HashMap<String, f64>>,
785    pub time_history: Vec<std::time::Duration>,
786    pub metadata: HashMap<String, String>,
787    pub analysis: Option<ConvergenceAnalysis>,
788    pub timestamp: std::time::SystemTime,
789}
790
791/// Create and plot convergence from optimization data
792pub fn plot_convergence(
793    objectives: Vec<f64>,
794    constraints: Option<Vec<HashMap<String, f64>>>,
795    config: Option<ConvergenceConfig>,
796) -> Result<(), Box<dyn std::error::Error>> {
797    let config = config.unwrap_or_default();
798    let mut plotter = ConvergencePlot::new(config);
799
800    // Add data
801    let constraints = constraints.unwrap_or_else(|| vec![HashMap::new(); objectives.len()]);
802    let elapsed = std::time::Duration::from_secs(1);
803
804    for (i, (obj, constr)) in objectives.iter().zip(constraints.iter()).enumerate() {
805        plotter.add_iteration(*obj, constr.clone(), HashMap::new(), elapsed * i as u32);
806    }
807
808    plotter.plot()
809}
810
811/// Create convergence tracker from tuning evaluations
812pub fn track_tuning_convergence(
813    evaluations: &[TuningEvaluation],
814    config: Option<ConvergenceConfig>,
815) -> ConvergencePlot {
816    let config = config.unwrap_or_default();
817    let mut plotter = ConvergencePlot::new(config);
818
819    for eval in evaluations {
820        plotter.add_iteration(
821            eval.objective_value,
822            eval.constraint_violations.clone(),
823            eval.parameters.clone(),
824            eval.evaluation_time,
825        );
826    }
827
828    plotter
829}
830
831/// Create convergence tracker from adaptive optimization
832pub fn track_adaptive_convergence(
833    performance_history: &[PerformanceMetrics],
834    parameter_history: &[HashMap<String, f64>],
835    config: Option<ConvergenceConfig>,
836) -> ConvergencePlot {
837    let config = config.unwrap_or_default();
838    let mut plotter = ConvergencePlot::new(config);
839
840    let elapsed = std::time::Duration::from_secs(1);
841
842    for (i, (perf, params)) in performance_history
843        .iter()
844        .zip(parameter_history.iter())
845        .enumerate()
846    {
847        plotter.add_iteration(
848            perf.best_energy,
849            perf.constraint_violations.clone(),
850            params.clone(),
851            elapsed * i as u32,
852        );
853    }
854
855    plotter
856}