quantrs2_anneal/meta_learning_optimization/
config.rs

1//! Configuration types for meta-learning optimization
2
3use std::time::Duration;
4
5/// Meta-learning optimization engine configuration
6#[derive(Debug, Clone)]
7pub struct MetaLearningConfig {
8    /// Enable transfer learning
9    pub enable_transfer_learning: bool,
10    /// Enable few-shot learning
11    pub enable_few_shot_learning: bool,
12    /// Experience buffer size
13    pub experience_buffer_size: usize,
14    /// Learning rate for meta-updates
15    pub meta_learning_rate: f64,
16    /// Number of inner optimization steps
17    pub inner_steps: usize,
18    /// Feature extraction configuration
19    pub feature_config: FeatureExtractionConfig,
20    /// Neural architecture search settings
21    pub nas_config: NeuralArchitectureSearchConfig,
22    /// Portfolio management settings
23    pub portfolio_config: PortfolioManagementConfig,
24    /// Multi-objective optimization settings
25    pub multi_objective_config: MultiObjectiveConfig,
26}
27
28impl Default for MetaLearningConfig {
29    fn default() -> Self {
30        Self {
31            enable_transfer_learning: true,
32            enable_few_shot_learning: true,
33            experience_buffer_size: 10_000,
34            meta_learning_rate: 0.001,
35            inner_steps: 5,
36            feature_config: FeatureExtractionConfig::default(),
37            nas_config: NeuralArchitectureSearchConfig::default(),
38            portfolio_config: PortfolioManagementConfig::default(),
39            multi_objective_config: MultiObjectiveConfig::default(),
40        }
41    }
42}
43
44/// Feature extraction configuration
45#[derive(Debug, Clone)]
46pub struct FeatureExtractionConfig {
47    /// Enable graph-based features
48    pub enable_graph_features: bool,
49    /// Enable statistical features
50    pub enable_statistical_features: bool,
51    /// Enable spectral features
52    pub enable_spectral_features: bool,
53    /// Enable domain-specific features
54    pub enable_domain_features: bool,
55    /// Feature selection method
56    pub selection_method: FeatureSelectionMethod,
57    /// Dimensionality reduction method
58    pub reduction_method: DimensionalityReduction,
59    /// Feature normalization
60    pub normalization: FeatureNormalization,
61}
62
63impl Default for FeatureExtractionConfig {
64    fn default() -> Self {
65        Self {
66            enable_graph_features: true,
67            enable_statistical_features: true,
68            enable_spectral_features: true,
69            enable_domain_features: true,
70            selection_method: FeatureSelectionMethod::AutomaticRelevance,
71            reduction_method: DimensionalityReduction::PCA,
72            normalization: FeatureNormalization::StandardScaling,
73        }
74    }
75}
76
77/// Feature selection methods
78#[derive(Debug, Clone, PartialEq, Eq)]
79pub enum FeatureSelectionMethod {
80    /// Automatic relevance determination
81    AutomaticRelevance,
82    /// Mutual information
83    MutualInformation,
84    /// Recursive feature elimination
85    RecursiveElimination,
86    /// LASSO regularization
87    LASSO,
88    /// Random forest importance
89    RandomForestImportance,
90}
91
92/// Dimensionality reduction methods
93#[derive(Debug, Clone, PartialEq, Eq)]
94pub enum DimensionalityReduction {
95    /// Principal Component Analysis
96    PCA,
97    /// Independent Component Analysis
98    ICA,
99    /// t-Distributed Stochastic Neighbor Embedding
100    tSNE,
101    /// Uniform Manifold Approximation and Projection
102    UMAP,
103    /// Linear Discriminant Analysis
104    LDA,
105    /// No reduction
106    None,
107}
108
109/// Feature normalization methods
110#[derive(Debug, Clone, PartialEq, Eq)]
111pub enum FeatureNormalization {
112    /// Standard scaling (z-score)
113    StandardScaling,
114    /// Min-max scaling
115    MinMaxScaling,
116    /// Robust scaling
117    RobustScaling,
118    /// Unit vector scaling
119    UnitVector,
120    /// No normalization
121    None,
122}
123
124/// Neural Architecture Search configuration
125#[derive(Debug, Clone)]
126pub struct NeuralArchitectureSearchConfig {
127    /// Enable NAS
128    pub enable_nas: bool,
129    /// Search space definition
130    pub search_space: SearchSpace,
131    /// Search strategy
132    pub search_strategy: SearchStrategy,
133    /// Maximum search iterations
134    pub max_iterations: usize,
135    /// Early stopping criteria
136    pub early_stopping: EarlyStoppingCriteria,
137    /// Resource constraints
138    pub resource_constraints: ResourceConstraints,
139}
140
141impl Default for NeuralArchitectureSearchConfig {
142    fn default() -> Self {
143        Self {
144            enable_nas: true,
145            search_space: SearchSpace::default(),
146            search_strategy: SearchStrategy::DifferentiableNAS,
147            max_iterations: 100,
148            early_stopping: EarlyStoppingCriteria::default(),
149            resource_constraints: ResourceConstraints::default(),
150        }
151    }
152}
153
154/// Neural architecture search space
155#[derive(Debug, Clone)]
156pub struct SearchSpace {
157    /// Layer types to consider
158    pub layer_types: Vec<LayerType>,
159    /// Number of layers range
160    pub num_layers_range: (usize, usize),
161    /// Hidden dimension options
162    pub hidden_dims: Vec<usize>,
163    /// Activation functions
164    pub activations: Vec<ActivationFunction>,
165    /// Dropout rates
166    pub dropout_rates: Vec<f64>,
167    /// Skip connection options
168    pub skip_connections: bool,
169}
170
171impl Default for SearchSpace {
172    fn default() -> Self {
173        Self {
174            layer_types: vec![
175                LayerType::Dense,
176                LayerType::LSTM,
177                LayerType::GRU,
178                LayerType::Attention,
179                LayerType::Convolution1D,
180            ],
181            num_layers_range: (2, 8),
182            hidden_dims: vec![64, 128, 256, 512],
183            activations: vec![
184                ActivationFunction::ReLU,
185                ActivationFunction::Tanh,
186                ActivationFunction::Swish,
187                ActivationFunction::GELU,
188            ],
189            dropout_rates: vec![0.0, 0.1, 0.2, 0.3],
190            skip_connections: true,
191        }
192    }
193}
194
195/// Neural network layer types
196#[derive(Debug, Clone, PartialEq, Eq)]
197pub enum LayerType {
198    /// Dense/Linear layer
199    Dense,
200    /// LSTM layer
201    LSTM,
202    /// GRU layer
203    GRU,
204    /// Attention layer
205    Attention,
206    /// 1D Convolution layer
207    Convolution1D,
208    /// Normalization layer
209    Normalization,
210    /// Residual block
211    ResidualBlock,
212}
213
214/// Activation functions
215#[derive(Debug, Clone, PartialEq)]
216pub enum ActivationFunction {
217    ReLU,
218    Tanh,
219    Sigmoid,
220    Swish,
221    GELU,
222    LeakyReLU(f64),
223    ELU(f64),
224}
225
226/// Search strategies for NAS
227#[derive(Debug, Clone, PartialEq, Eq)]
228pub enum SearchStrategy {
229    /// Differentiable NAS
230    DifferentiableNAS,
231    /// Evolutionary search
232    EvolutionarySearch,
233    /// Reinforcement learning
234    ReinforcementLearning,
235    /// Bayesian optimization
236    BayesianOptimization,
237    /// Random search
238    RandomSearch,
239    /// Progressive search
240    ProgressiveSearch,
241}
242
243/// Early stopping criteria
244#[derive(Debug, Clone)]
245pub struct EarlyStoppingCriteria {
246    /// Patience (iterations without improvement)
247    pub patience: usize,
248    /// Minimum improvement threshold
249    pub min_improvement: f64,
250    /// Maximum runtime
251    pub max_runtime: Duration,
252    /// Target performance threshold
253    pub target_performance: Option<f64>,
254}
255
256impl Default for EarlyStoppingCriteria {
257    fn default() -> Self {
258        Self {
259            patience: 10,
260            min_improvement: 0.001,
261            max_runtime: Duration::from_secs(2 * 3600), // 2 hours
262            target_performance: None,
263        }
264    }
265}
266
267/// Resource constraints for NAS
268#[derive(Debug, Clone)]
269pub struct ResourceConstraints {
270    /// Maximum memory usage (MB)
271    pub max_memory: usize,
272    /// Maximum training time per architecture
273    pub max_training_time: Duration,
274    /// Maximum model parameters
275    pub max_parameters: usize,
276    /// Maximum FLOPs
277    pub max_flops: usize,
278}
279
280impl Default for ResourceConstraints {
281    fn default() -> Self {
282        Self {
283            max_memory: 2048,
284            max_training_time: Duration::from_secs(10 * 60), // 10 minutes
285            max_parameters: 1_000_000,
286            max_flops: 1_000_000_000,
287        }
288    }
289}
290
291/// Portfolio management configuration
292#[derive(Debug, Clone)]
293pub struct PortfolioManagementConfig {
294    /// Enable dynamic portfolio
295    pub enable_dynamic_portfolio: bool,
296    /// Maximum portfolio size
297    pub max_portfolio_size: usize,
298    /// Algorithm selection strategy
299    pub selection_strategy: AlgorithmSelectionStrategy,
300    /// Performance evaluation window
301    pub evaluation_window: Duration,
302    /// Diversity criteria
303    pub diversity_criteria: DiversityCriteria,
304}
305
306impl Default for PortfolioManagementConfig {
307    fn default() -> Self {
308        Self {
309            enable_dynamic_portfolio: true,
310            max_portfolio_size: 10,
311            selection_strategy: AlgorithmSelectionStrategy::MultiArmedBandit,
312            evaluation_window: Duration::from_secs(24 * 3600), // 24 hours
313            diversity_criteria: DiversityCriteria::default(),
314        }
315    }
316}
317
318/// Algorithm selection strategies
319#[derive(Debug, Clone, PartialEq)]
320pub enum AlgorithmSelectionStrategy {
321    /// Multi-armed bandit
322    MultiArmedBandit,
323    /// Upper confidence bound
324    UpperConfidenceBound,
325    /// Thompson sampling
326    ThompsonSampling,
327    /// ε-greedy
328    EpsilonGreedy(f64),
329    /// Collaborative filtering
330    CollaborativeFiltering,
331    /// Meta-learning based
332    MetaLearningBased,
333}
334
335/// Diversity criteria for portfolio
336#[derive(Debug, Clone)]
337pub struct DiversityCriteria {
338    /// Minimum performance diversity
339    pub min_performance_diversity: f64,
340    /// Minimum algorithmic diversity
341    pub min_algorithmic_diversity: f64,
342    /// Diversity measurement method
343    pub diversity_method: DiversityMethod,
344}
345
346impl Default for DiversityCriteria {
347    fn default() -> Self {
348        Self {
349            min_performance_diversity: 0.1,
350            min_algorithmic_diversity: 0.2,
351            diversity_method: DiversityMethod::KullbackLeibler,
352        }
353    }
354}
355
356/// Diversity measurement methods
357#[derive(Debug, Clone, PartialEq, Eq)]
358pub enum DiversityMethod {
359    /// Kullback-Leibler divergence
360    KullbackLeibler,
361    /// Jensen-Shannon divergence
362    JensenShannon,
363    /// Cosine distance
364    CosineDistance,
365    /// Euclidean distance
366    EuclideanDistance,
367    /// Hamming distance
368    HammingDistance,
369}
370
371/// Multi-objective optimization configuration
372#[derive(Debug, Clone)]
373pub struct MultiObjectiveConfig {
374    /// Enable multi-objective optimization
375    pub enable_multi_objective: bool,
376    /// Objectives to optimize
377    pub objectives: Vec<OptimizationObjective>,
378    /// Pareto frontier management
379    pub pareto_config: ParetoFrontierConfig,
380    /// Scalarization method
381    pub scalarization: ScalarizationMethod,
382    /// Constraint handling
383    pub constraint_handling: ConstraintHandling,
384}
385
386impl Default for MultiObjectiveConfig {
387    fn default() -> Self {
388        Self {
389            enable_multi_objective: true,
390            objectives: vec![
391                OptimizationObjective::SolutionQuality,
392                OptimizationObjective::Runtime,
393                OptimizationObjective::ResourceUsage,
394            ],
395            pareto_config: ParetoFrontierConfig::default(),
396            scalarization: ScalarizationMethod::WeightedSum,
397            constraint_handling: ConstraintHandling::PenaltyMethod,
398        }
399    }
400}
401
402/// Optimization objectives
403#[derive(Debug, Clone, PartialEq, Eq)]
404pub enum OptimizationObjective {
405    /// Solution quality
406    SolutionQuality,
407    /// Runtime performance
408    Runtime,
409    /// Resource usage
410    ResourceUsage,
411    /// Energy consumption
412    EnergyConsumption,
413    /// Robustness
414    Robustness,
415    /// Scalability
416    Scalability,
417    /// Custom objective
418    Custom(String),
419}
420
421/// Pareto frontier configuration
422#[derive(Debug, Clone)]
423pub struct ParetoFrontierConfig {
424    /// Maximum frontier size
425    pub max_frontier_size: usize,
426    /// Dominance tolerance
427    pub dominance_tolerance: f64,
428    /// Frontier update strategy
429    pub update_strategy: FrontierUpdateStrategy,
430    /// Crowding distance weight
431    pub crowding_weight: f64,
432}
433
434impl Default for ParetoFrontierConfig {
435    fn default() -> Self {
436        Self {
437            max_frontier_size: 100,
438            dominance_tolerance: 1e-6,
439            update_strategy: FrontierUpdateStrategy::NonDominatedSort,
440            crowding_weight: 0.5,
441        }
442    }
443}
444
445/// Frontier update strategies
446#[derive(Debug, Clone, PartialEq, Eq)]
447pub enum FrontierUpdateStrategy {
448    /// Non-dominated sorting
449    NonDominatedSort,
450    /// ε-dominance
451    EpsilonDominance,
452    /// Hypervolume-based
453    HypervolumeBased,
454    /// Reference point-based
455    ReferencePointBased,
456}
457
458/// Scalarization methods
459#[derive(Debug, Clone, PartialEq, Eq)]
460pub enum ScalarizationMethod {
461    /// Weighted sum
462    WeightedSum,
463    /// Weighted Tchebycheff
464    WeightedTchebycheff,
465    /// Achievement scalarizing function
466    AchievementScalarizing,
467    /// Penalty-based boundary intersection
468    PenaltyBoundaryIntersection,
469    /// Reference point method
470    ReferencePoint,
471}
472
473/// Constraint handling methods
474#[derive(Debug, Clone, PartialEq, Eq)]
475pub enum ConstraintHandling {
476    /// Penalty method
477    PenaltyMethod,
478    /// Barrier method
479    BarrierMethod,
480    /// Lagrangian method
481    LagrangianMethod,
482    /// Feasibility rules
483    FeasibilityRules,
484    /// Multi-objective constraint handling
485    MultiObjectiveConstraint,
486}
487
488#[cfg(test)]
489mod tests {
490    use super::*;
491
492    #[test]
493    fn test_meta_learning_config_default() {
494        let config = MetaLearningConfig::default();
495        assert!(config.enable_transfer_learning);
496        assert!(config.enable_few_shot_learning);
497        assert_eq!(config.experience_buffer_size, 10_000);
498    }
499
500    #[test]
501    fn test_feature_extraction_config() {
502        let config = FeatureExtractionConfig::default();
503        assert!(config.enable_graph_features);
504        assert_eq!(
505            config.selection_method,
506            FeatureSelectionMethod::AutomaticRelevance
507        );
508    }
509
510    #[test]
511    fn test_nas_config() {
512        let config = NeuralArchitectureSearchConfig::default();
513        assert!(config.enable_nas);
514        assert_eq!(config.max_iterations, 100);
515        assert_eq!(config.search_strategy, SearchStrategy::DifferentiableNAS);
516    }
517}