sklears_multioutput/optimization/mod.rs
1//! Multi-Output Learning Optimization Framework
2//!
3//! This module provides a comprehensive suite of optimization algorithms for multi-output
4//! learning problems. It has been refactored from a monolithic 2697-line file into 5
5//! specialized modules for improved maintainability and focused functionality.
6//!
7//! ## Module Organization
8//!
9//! - **joint_loss_optimization**: Joint loss optimization with multiple loss function combinations
10//! - **multi_objective_optimization**: Genetic algorithm-based Pareto optimization
11//! - **scalarization_methods**: Scalarization techniques for multi-objective problems
12//! - **nsga2_algorithms**: NSGA-II evolutionary algorithms with advanced operators
13//! - **evolutionary_multi_objective**: Complete NSGA-II implementation with SBX crossover and polynomial mutation
14//! - **tests**: Comprehensive integration test suite
15//!
16//! ## Key Features
17//!
18//! - **Joint Loss Functions**: MSE, MAE, Huber, Cross-entropy, and Hinge losses
19//! - **Loss Combination Strategies**: Sum, weighted sum, max, geometric mean, and adaptive
20//! - **Multi-Objective Optimization**: Pareto-optimal solution discovery
21//! - **Scalarization Methods**: Weighted sum, epsilon-constraint, and Tchebycheff approaches
22//! - **Advanced Evolutionary Algorithms**: NSGA-II with SBX crossover and polynomial mutation
23//! - **Performance Monitoring**: Convergence tracking with hypervolume indicator
24
25pub mod evolutionary_multi_objective;
26pub mod joint_loss_optimization;
27pub mod multi_objective_optimization;
28pub mod nsga2_algorithms;
29pub mod scalarization_methods;
30
31#[allow(non_snake_case)]
32#[cfg(test)]
33pub mod tests;
34
35// Re-export main public items for backward compatibility
36pub use joint_loss_optimization::{
37 JointLossConfig, JointLossOptimizer, JointLossOptimizerTrained, LossCombination, LossFunction,
38};
39
40pub use multi_objective_optimization::{
41 MultiObjectiveConfig, MultiObjectiveOptimizer, MultiObjectiveOptimizerTrained, ParetoSolution,
42};
43
44pub use scalarization_methods::{
45 ScalarizationConfig, ScalarizationMethod, ScalarizationOptimizer, ScalarizationOptimizerTrained,
46};
47
48pub use nsga2_algorithms::{NSGA2Algorithm, NSGA2Config, NSGA2Optimizer, NSGA2OptimizerTrained};
49
50pub use evolutionary_multi_objective::{GenerationStats, Individual, OptimizationResult, NSGAII};