1use std::collections::{HashMap, VecDeque};
4use std::time::Instant;
5
6use super::config::{
7 ConstraintHandling, MultiObjectiveConfig, OptimizationObjective, ParetoFrontierConfig,
8 ScalarizationMethod,
9};
10use super::feature_extraction::{AlgorithmType, OptimizationConfiguration};
11
12pub struct MultiObjectiveOptimizer {
14 pub config: MultiObjectiveConfig,
16 pub pareto_frontier: ParetoFrontier,
18 pub scalarizers: Vec<Scalarizer>,
20 pub constraint_handlers: Vec<ConstraintHandler>,
22 pub decision_maker: DecisionMaker,
24}
25
26impl MultiObjectiveOptimizer {
27 #[must_use]
28 pub fn new(config: MultiObjectiveConfig) -> Self {
29 Self {
30 pareto_frontier: ParetoFrontier::new(&config.pareto_config),
31 scalarizers: vec![Scalarizer::new(config.scalarization.clone())],
32 constraint_handlers: vec![ConstraintHandler::new(config.constraint_handling.clone())],
33 decision_maker: DecisionMaker::new(),
34 config,
35 }
36 }
37
38 pub fn optimize(
39 &mut self,
40 candidates: Vec<OptimizationConfiguration>,
41 ) -> Result<Vec<MultiObjectiveSolution>, String> {
42 let mut solutions = Vec::new();
43
44 for (i, candidate) in candidates.iter().enumerate() {
45 let objective_values = self.evaluate_objectives(candidate)?;
46
47 let solution = MultiObjectiveSolution {
48 id: format!("solution_{i}"),
49 objective_values,
50 decision_variables: candidate.clone(),
51 dominance_rank: 0, crowding_distance: 0.0, };
54
55 solutions.push(solution);
56 }
57
58 self.update_pareto_frontier(&mut solutions)?;
60
61 self.calculate_dominance_ranks(&mut solutions);
63 self.calculate_crowding_distances(&mut solutions);
64
65 Ok(solutions)
66 }
67
68 fn evaluate_objectives(&self, config: &OptimizationConfiguration) -> Result<Vec<f64>, String> {
69 let mut objective_values = Vec::new();
70
71 for objective in &self.config.objectives {
72 let value = match objective {
73 OptimizationObjective::SolutionQuality => {
74 match config.algorithm {
76 AlgorithmType::SimulatedAnnealing => 0.8,
77 AlgorithmType::QuantumAnnealing => 0.9,
78 AlgorithmType::TabuSearch => 0.7,
79 _ => 0.6,
80 }
81 }
82 OptimizationObjective::Runtime => {
83 -config.resources.time.as_secs_f64() / 3600.0 }
86 OptimizationObjective::ResourceUsage => {
87 -(config.resources.memory as f64 / 1024.0 + config.resources.cpu)
89 }
90 OptimizationObjective::EnergyConsumption => {
91 -(config.resources.cpu * config.resources.time.as_secs_f64() / 3600.0)
93 }
94 OptimizationObjective::Robustness => {
95 match config.algorithm {
97 AlgorithmType::QuantumAnnealing => 0.9,
98 AlgorithmType::SimulatedAnnealing => 0.7,
99 _ => 0.6,
100 }
101 }
102 OptimizationObjective::Scalability => {
103 match config.algorithm {
105 AlgorithmType::TabuSearch => 0.9,
106 AlgorithmType::QuantumAnnealing => 0.6,
107 _ => 0.7,
108 }
109 }
110 OptimizationObjective::Custom(_) => 0.5, };
112
113 objective_values.push(value);
114 }
115
116 Ok(objective_values)
117 }
118
119 fn update_pareto_frontier(
120 &mut self,
121 solutions: &Vec<MultiObjectiveSolution>,
122 ) -> Result<(), String> {
123 let mut new_solutions = Vec::new();
124 let mut updated_solutions: Vec<MultiObjectiveSolution> = Vec::new();
125
126 for solution in solutions {
128 let mut is_dominated = false;
129 let mut dominated_solutions = Vec::new();
130
131 for frontier_solution in &self.pareto_frontier.solutions {
133 if self.dominates(
134 &frontier_solution.objective_values,
135 &solution.objective_values,
136 ) {
137 is_dominated = true;
138 break;
139 } else if self.dominates(
140 &solution.objective_values,
141 &frontier_solution.objective_values,
142 ) {
143 dominated_solutions.push(frontier_solution.id.clone());
144 }
145 }
146
147 if !is_dominated {
148 new_solutions.push(solution.clone());
149
150 self.pareto_frontier
152 .solutions
153 .retain(|s| !dominated_solutions.contains(&s.id));
154 }
155 }
156
157 self.pareto_frontier.solutions.extend(new_solutions);
159
160 if self.pareto_frontier.solutions.len() > self.config.pareto_config.max_frontier_size {
162 self.prune_frontier()?;
163 }
164
165 self.update_frontier_statistics();
167
168 Ok(())
169 }
170
171 fn dominates(&self, obj1: &[f64], obj2: &[f64]) -> bool {
172 if obj1.len() != obj2.len() {
173 return false;
174 }
175
176 let mut at_least_one_better = false;
177 for (v1, v2) in obj1.iter().zip(obj2.iter()) {
178 if *v1 < *v2 - self.config.pareto_config.dominance_tolerance {
179 return false; }
181 if *v1 > *v2 + self.config.pareto_config.dominance_tolerance {
182 at_least_one_better = true;
183 }
184 }
185
186 at_least_one_better
187 }
188
189 fn prune_frontier(&mut self) -> Result<(), String> {
190 self.calculate_crowding_distances_frontier();
192
193 self.pareto_frontier.solutions.sort_by(|a, b| {
195 b.crowding_distance
196 .partial_cmp(&a.crowding_distance)
197 .unwrap_or(std::cmp::Ordering::Equal)
198 });
199
200 self.pareto_frontier
201 .solutions
202 .truncate(self.config.pareto_config.max_frontier_size);
203
204 Ok(())
205 }
206
207 fn calculate_crowding_distances_frontier(&mut self) {
208 let num_solutions = self.pareto_frontier.solutions.len();
209 let num_objectives = if let Some(first) = self.pareto_frontier.solutions.first() {
210 first.objective_values.len()
211 } else {
212 return;
213 };
214
215 for solution in &mut self.pareto_frontier.solutions {
217 solution.crowding_distance = 0.0;
218 }
219
220 for obj_idx in 0..num_objectives {
222 self.pareto_frontier.solutions.sort_by(|a, b| {
224 a.objective_values[obj_idx]
225 .partial_cmp(&b.objective_values[obj_idx])
226 .unwrap_or(std::cmp::Ordering::Equal)
227 });
228
229 if num_solutions > 2 {
231 self.pareto_frontier.solutions[0].crowding_distance = f64::INFINITY;
232 self.pareto_frontier.solutions[num_solutions - 1].crowding_distance = f64::INFINITY;
233
234 let obj_range = self.pareto_frontier.solutions[num_solutions - 1].objective_values
236 [obj_idx]
237 - self.pareto_frontier.solutions[0].objective_values[obj_idx];
238
239 if obj_range > 0.0 {
240 for i in 1..num_solutions - 1 {
241 let distance = (self.pareto_frontier.solutions[i + 1].objective_values
242 [obj_idx]
243 - self.pareto_frontier.solutions[i - 1].objective_values[obj_idx])
244 / obj_range;
245 self.pareto_frontier.solutions[i].crowding_distance += distance;
246 }
247 }
248 }
249 }
250 }
251
252 fn calculate_dominance_ranks(&self, solutions: &mut Vec<MultiObjectiveSolution>) {
253 for solution in solutions.iter_mut() {
255 solution.dominance_rank = 1;
256 }
257 }
258
259 fn calculate_crowding_distances(&self, solutions: &mut Vec<MultiObjectiveSolution>) {
260 for (i, solution) in solutions.iter_mut().enumerate() {
262 solution.crowding_distance = i as f64; }
264 }
265
266 fn update_frontier_statistics(&mut self) {
267 self.pareto_frontier.statistics.size = self.pareto_frontier.solutions.len();
268
269 self.pareto_frontier.statistics.hypervolume =
271 self.pareto_frontier.solutions.len() as f64 * 0.1;
272
273 if self.pareto_frontier.solutions.len() > 1 {
275 self.pareto_frontier.statistics.spread = 1.0;
276 } else {
277 self.pareto_frontier.statistics.spread = 0.0;
278 }
279
280 self.pareto_frontier.statistics.convergence = 0.8;
282 self.pareto_frontier.statistics.coverage = 0.9;
283 }
284
285 pub fn make_decision(
286 &mut self,
287 preferences: Option<UserPreferences>,
288 ) -> Result<MultiObjectiveSolution, String> {
289 if self.pareto_frontier.solutions.is_empty() {
290 return Err("No solutions in Pareto frontier".to_string());
291 }
292
293 self.decision_maker
294 .make_decision(&self.pareto_frontier.solutions, preferences)
295 }
296
297 pub fn scalarize(
298 &self,
299 solution: &MultiObjectiveSolution,
300 weights: &[f64],
301 ) -> Result<f64, String> {
302 if let Some(scalarizer) = self.scalarizers.first() {
303 scalarizer.scalarize(&solution.objective_values, weights)
304 } else {
305 Err("No scalarizer available".to_string())
306 }
307 }
308}
309
310#[derive(Debug)]
312pub struct ParetoFrontier {
313 pub solutions: Vec<MultiObjectiveSolution>,
315 pub statistics: FrontierStatistics,
317 pub update_history: VecDeque<FrontierUpdate>,
319}
320
321impl ParetoFrontier {
322 #[must_use]
323 pub const fn new(config: &ParetoFrontierConfig) -> Self {
324 Self {
325 solutions: Vec::new(),
326 statistics: FrontierStatistics {
327 size: 0,
328 hypervolume: 0.0,
329 spread: 0.0,
330 convergence: 0.0,
331 coverage: 0.0,
332 },
333 update_history: VecDeque::new(),
334 }
335 }
336}
337
338#[derive(Debug, Clone)]
340pub struct MultiObjectiveSolution {
341 pub id: String,
343 pub objective_values: Vec<f64>,
345 pub decision_variables: OptimizationConfiguration,
347 pub dominance_rank: usize,
349 pub crowding_distance: f64,
351}
352
353#[derive(Debug, Clone)]
355pub struct FrontierStatistics {
356 pub size: usize,
358 pub hypervolume: f64,
360 pub spread: f64,
362 pub convergence: f64,
364 pub coverage: f64,
366}
367
368#[derive(Debug, Clone)]
370pub struct FrontierUpdate {
371 pub timestamp: Instant,
373 pub solutions_added: Vec<String>,
375 pub solutions_removed: Vec<String>,
377 pub reason: UpdateReason,
379}
380
381#[derive(Debug, Clone, PartialEq, Eq)]
383pub enum UpdateReason {
384 NewNonDominated,
386 DominanceUpdate,
388 CrowdingPruning,
390 SizeLimitReached,
392}
393
394#[derive(Debug)]
396pub struct Scalarizer {
397 pub method: ScalarizationMethod,
399 pub reference_point: Option<Vec<f64>>,
401 pub weights: Vec<f64>,
403}
404
405impl Scalarizer {
406 #[must_use]
407 pub const fn new(method: ScalarizationMethod) -> Self {
408 Self {
409 method,
410 reference_point: None,
411 weights: Vec::new(),
412 }
413 }
414
415 pub fn scalarize(&self, objectives: &[f64], weights: &[f64]) -> Result<f64, String> {
416 if objectives.len() != weights.len() {
417 return Err("Objectives and weights length mismatch".to_string());
418 }
419
420 match &self.method {
421 ScalarizationMethod::WeightedSum => Ok(objectives
422 .iter()
423 .zip(weights.iter())
424 .map(|(obj, w)| obj * w)
425 .sum()),
426 ScalarizationMethod::WeightedTchebycheff => {
427 let default_reference = vec![0.0; objectives.len()];
428 let reference = self.reference_point.as_ref().unwrap_or(&default_reference);
429
430 let mut max_weighted_diff: f64 = 0.0;
431 for ((obj, ref_val), weight) in
432 objectives.iter().zip(reference.iter()).zip(weights.iter())
433 {
434 let weighted_diff = weight * (ref_val - obj).abs();
435 max_weighted_diff = max_weighted_diff.max(weighted_diff);
436 }
437 Ok(max_weighted_diff)
438 }
439 ScalarizationMethod::AchievementScalarizing => {
440 let weighted_sum: f64 = objectives
442 .iter()
443 .zip(weights.iter())
444 .map(|(obj, w)| obj * w)
445 .sum();
446 let max_objective: f64 = objectives.iter().fold(0.0_f64, |acc, &obj| acc.max(obj));
447 Ok(0.01f64.mul_add(max_objective, weighted_sum))
448 }
449 ScalarizationMethod::PenaltyBoundaryIntersection => {
450 let weighted_sum: f64 = objectives
452 .iter()
453 .zip(weights.iter())
454 .map(|(obj, w)| obj * w)
455 .sum();
456 Ok(weighted_sum)
457 }
458 ScalarizationMethod::ReferencePoint => {
459 if let Some(ref_point) = &self.reference_point {
460 let distance: f64 = objectives
461 .iter()
462 .zip(ref_point.iter())
463 .map(|(obj, ref_val)| (obj - ref_val).powi(2))
464 .sum::<f64>()
465 .sqrt();
466 Ok(-distance) } else {
468 Err("Reference point not set for reference point method".to_string())
469 }
470 }
471 }
472 }
473}
474
475#[derive(Debug)]
477pub struct ConstraintHandler {
478 pub method: ConstraintHandling,
480 pub constraints: Vec<Constraint>,
482}
483
484impl ConstraintHandler {
485 #[must_use]
486 pub const fn new(method: ConstraintHandling) -> Self {
487 Self {
488 method,
489 constraints: Vec::new(),
490 }
491 }
492
493 pub const fn handle_constraints(
494 &self,
495 solution: &MultiObjectiveSolution,
496 ) -> Result<f64, String> {
497 match self.method {
499 ConstraintHandling::PenaltyMethod => Ok(0.0), ConstraintHandling::BarrierMethod => Ok(0.0),
501 ConstraintHandling::LagrangianMethod => Ok(0.0),
502 ConstraintHandling::FeasibilityRules => Ok(0.0),
503 ConstraintHandling::MultiObjectiveConstraint => Ok(0.0),
504 }
505 }
506}
507
508#[derive(Debug, Clone)]
510pub struct Constraint {
511 pub id: String,
513 pub constraint_type: ConstraintType,
515 pub parameters: Vec<f64>,
517 pub tolerance: f64,
519}
520
521#[derive(Debug, Clone, PartialEq, Eq)]
523pub enum ConstraintType {
524 Equality,
526 Inequality,
528 Bound,
530 Custom(String),
532}
533
534#[derive(Debug)]
536pub struct DecisionMaker {
537 pub strategy: DecisionStrategy,
539 pub preferences: Option<UserPreferences>,
541 pub decision_history: VecDeque<Decision>,
543}
544
545impl DecisionMaker {
546 #[must_use]
547 pub const fn new() -> Self {
548 Self {
549 strategy: DecisionStrategy::WeightedSum,
550 preferences: None,
551 decision_history: VecDeque::new(),
552 }
553 }
554
555 pub fn make_decision(
556 &mut self,
557 solutions: &[MultiObjectiveSolution],
558 preferences: Option<UserPreferences>,
559 ) -> Result<MultiObjectiveSolution, String> {
560 if solutions.is_empty() {
561 return Err("No solutions to choose from".to_string());
562 }
563
564 let selected_solution = match &self.strategy {
565 DecisionStrategy::WeightedSum => {
566 let weights = if let Some(ref prefs) = preferences {
568 prefs.objective_weights.clone()
569 } else {
570 vec![
571 1.0 / solutions[0].objective_values.len() as f64;
572 solutions[0].objective_values.len()
573 ]
574 };
575
576 solutions
578 .iter()
579 .max_by(|a, b| {
580 let score_a: f64 = a
581 .objective_values
582 .iter()
583 .zip(weights.iter())
584 .map(|(obj, w)| obj * w)
585 .sum();
586 let score_b: f64 = b
587 .objective_values
588 .iter()
589 .zip(weights.iter())
590 .map(|(obj, w)| obj * w)
591 .sum();
592 score_a
593 .partial_cmp(&score_b)
594 .unwrap_or(std::cmp::Ordering::Equal)
595 })
596 .ok_or("Failed to select solution")?
597 }
598 DecisionStrategy::Lexicographic => {
599 solutions
601 .iter()
602 .max_by(|a, b| {
603 a.objective_values[0]
604 .partial_cmp(&b.objective_values[0])
605 .unwrap_or(std::cmp::Ordering::Equal)
606 })
607 .ok_or("Failed to select solution")?
608 }
609 DecisionStrategy::InteractiveMethod => {
610 &solutions[0]
612 }
613 DecisionStrategy::GoalProgramming => {
614 solutions
616 .iter()
617 .min_by(|a, b| {
618 let dist_a: f64 = a
619 .objective_values
620 .iter()
621 .map(|obj| (1.0 - obj).powi(2))
622 .sum::<f64>()
623 .sqrt();
624 let dist_b: f64 = b
625 .objective_values
626 .iter()
627 .map(|obj| (1.0 - obj).powi(2))
628 .sum::<f64>()
629 .sqrt();
630 dist_a
631 .partial_cmp(&dist_b)
632 .unwrap_or(std::cmp::Ordering::Equal)
633 })
634 .ok_or("Failed to select solution")?
635 }
636 DecisionStrategy::TOPSIS => {
637 solutions
639 .iter()
640 .max_by(|a, b| {
641 a.crowding_distance
642 .partial_cmp(&b.crowding_distance)
643 .unwrap_or(std::cmp::Ordering::Equal)
644 })
645 .ok_or("Failed to select solution")?
646 }
647 };
648
649 let decision = Decision {
651 timestamp: Instant::now(),
652 selected_solution_id: selected_solution.id.clone(),
653 strategy_used: self.strategy.clone(),
654 preferences_used: preferences,
655 confidence: 0.8,
656 };
657
658 self.decision_history.push_back(decision);
659
660 while self.decision_history.len() > 100 {
662 self.decision_history.pop_front();
663 }
664
665 Ok(selected_solution.clone())
666 }
667}
668
669#[derive(Debug, Clone, PartialEq, Eq)]
671pub enum DecisionStrategy {
672 WeightedSum,
674 Lexicographic,
676 InteractiveMethod,
678 GoalProgramming,
680 TOPSIS,
682}
683
684#[derive(Debug, Clone)]
686pub struct UserPreferences {
687 pub objective_weights: Vec<f64>,
689 pub preference_functions: Vec<PreferenceFunction>,
691 pub aspiration_levels: Vec<f64>,
693 pub reservation_levels: Vec<f64>,
695}
696
697#[derive(Debug, Clone)]
699pub struct PreferenceFunction {
700 pub function_type: PreferenceFunctionType,
702 pub parameters: Vec<f64>,
704 pub objective_index: usize,
706}
707
708#[derive(Debug, Clone, PartialEq, Eq)]
710pub enum PreferenceFunctionType {
711 Linear,
713 PiecewiseLinear,
715 Exponential,
717 Gaussian,
719 Step,
721}
722
723#[derive(Debug, Clone)]
725pub struct Decision {
726 pub timestamp: Instant,
728 pub selected_solution_id: String,
730 pub strategy_used: DecisionStrategy,
732 pub preferences_used: Option<UserPreferences>,
734 pub confidence: f64,
736}
737
738#[cfg(test)]
739mod tests {
740 use super::*;
741 use crate::meta_learning_optimization::feature_extraction::ResourceAllocation;
742
743 #[test]
744 fn test_multi_objective_optimizer_creation() {
745 let config = MultiObjectiveConfig::default();
746 let optimizer = MultiObjectiveOptimizer::new(config);
747 assert!(optimizer.config.enable_multi_objective);
748 }
749
750 #[test]
751 fn test_pareto_frontier() {
752 let config = ParetoFrontierConfig::default();
753 let frontier = ParetoFrontier::new(&config);
754 assert_eq!(frontier.solutions.len(), 0);
755 }
756
757 #[test]
758 fn test_scalarizer() {
759 let scalarizer = Scalarizer::new(ScalarizationMethod::WeightedSum);
760 let objectives = vec![0.8, 0.6, 0.9];
761 let weights = vec![0.5, 0.3, 0.2];
762
763 let result = scalarizer.scalarize(&objectives, &weights);
764 assert!(result.is_ok());
765
766 let score = result.expect("scalarize should succeed");
767 assert!((score - 0.76).abs() < 1e-10); }
769
770 #[test]
771 fn test_decision_maker() {
772 let mut decision_maker = DecisionMaker::new();
773
774 let solutions = vec![MultiObjectiveSolution {
775 id: "sol1".to_string(),
776 objective_values: vec![0.8, 0.6],
777 decision_variables: OptimizationConfiguration {
778 algorithm: AlgorithmType::SimulatedAnnealing,
779 hyperparameters: HashMap::new(),
780 architecture: None,
781 resources: ResourceAllocation {
782 cpu: 1.0,
783 memory: 256,
784 gpu: 0.0,
785 time: std::time::Duration::from_secs(60),
786 },
787 },
788 dominance_rank: 1,
789 crowding_distance: 1.0,
790 }];
791
792 let result = decision_maker.make_decision(&solutions, None);
793 assert!(result.is_ok());
794 }
795}