quantrs2_anneal/meta_learning_optimization/
mod.rs

1//! Meta-Learning Optimization Engine for Quantum Annealing Systems
2//!
3//! This module implements a sophisticated meta-learning optimization engine that learns
4//! from historical optimization experiences to automatically improve performance across
5//! different problem types and configurations. It employs advanced machine learning
6//! techniques including transfer learning, few-shot learning, and neural architecture
7//! search to optimize quantum annealing strategies.
8//!
9//! Key Features:
10//! - Experience-based optimization strategy learning
11//! - Transfer learning across problem domains
12//! - Adaptive hyperparameter optimization
13//! - Neural architecture search for annealing schedules
14//! - Few-shot learning for new problem types
15//! - Multi-objective optimization with Pareto frontiers
16//! - Automated feature engineering and selection
17//! - Dynamic algorithm portfolio management
18
19pub mod config;
20pub mod feature_extraction;
21pub mod meta_learning;
22pub mod multi_objective;
23pub mod neural_architecture_search;
24pub mod portfolio_management;
25pub mod transfer_learning;
26
27// Re-export main types for public API
28pub use config::*;
29pub use feature_extraction::{
30    AlgorithmType, ArchitectureSpec, ConvergenceMetrics, ExperienceDatabase, FeatureExtractor,
31    FeatureVector, OptimizationConfiguration, OptimizationExperience, OptimizationResults,
32    ProblemDomain, ProblemFeatures, QualityMetrics, ResourceAllocation, ResourceUsage,
33    SuccessMetrics,
34};
35pub use meta_learning::{
36    CrossValidationStrategy, EvaluationMetric, MetaLearner, MetaLearningAlgorithm,
37    MetaLearningOptimizer, MetaOptimizationResult, PerformanceEvaluator, StatisticalTest,
38    TrainingEpisode,
39};
40pub use multi_objective::{
41    DecisionMaker, FrontierStatistics, FrontierUpdate, MultiObjectiveOptimizer,
42    MultiObjectiveSolution, ParetoFrontier, UpdateReason, UserPreferences,
43};
44pub use neural_architecture_search::{
45    ArchitectureCandidate, GenerationMethod, NeuralArchitectureSearch, PerformancePredictor,
46    ResourceRequirements, SearchIteration,
47};
48pub use portfolio_management::{
49    Algorithm, AlgorithmPerformanceStats, AlgorithmPortfolio, ApplicabilityConditions,
50    GuaranteeType, PerformanceGuarantee, PerformanceRecord, PortfolioComposition,
51};
52pub use transfer_learning::{
53    AdaptationMechanism, DomainCharacteristics, Knowledge, ModelType, SimilarityMethod,
54    SimilarityMetric, SourceDomain, TransferLearner, TransferRecord, TransferStrategy,
55    TransferableModel,
56};
57
58use std::collections::{BTreeMap, HashMap, VecDeque};
59use std::sync::{Arc, Mutex, RwLock};
60use std::thread;
61use std::time::{Duration, Instant};
62
63use crate::applications::{ApplicationError, ApplicationResult};
64use crate::ising::{IsingModel, QuboModel};
65use crate::simulator::{AnnealingParams, AnnealingResult, QuantumAnnealingSimulator};
66
67/// Recommended optimization strategy
68#[derive(Debug, Clone)]
69pub struct RecommendedStrategy {
70    /// Primary optimization algorithm
71    pub algorithm: String,
72    /// Hyperparameters
73    pub hyperparameters: HashMap<String, f64>,
74    /// Confidence score
75    pub confidence: f64,
76    /// Expected performance
77    pub expected_performance: f64,
78    /// Alternative strategies
79    pub alternatives: Vec<AlternativeStrategy>,
80}
81
82/// Alternative strategy option
83#[derive(Debug, Clone)]
84pub struct AlternativeStrategy {
85    /// Algorithm name
86    pub algorithm: String,
87    /// Relative performance
88    pub relative_performance: f64,
89}
90
91/// Meta-learning statistics
92#[derive(Debug, Clone)]
93pub struct MetaLearningStatistics {
94    /// Total optimization episodes
95    pub total_episodes: usize,
96    /// Average improvement over baseline
97    pub average_improvement: f64,
98    /// Transfer learning success rate
99    pub transfer_success_rate: f64,
100    /// Feature extraction time
101    pub feature_extraction_time: Duration,
102    /// Model training time
103    pub model_training_time: Duration,
104    /// Prediction time
105    pub prediction_time: Duration,
106}
107
108/// Create example meta-learning optimizer
109#[must_use]
110pub fn create_meta_learning_optimizer() -> MetaLearningOptimizer {
111    let config = MetaLearningConfig::default();
112    MetaLearningOptimizer::new(config)
113}
114
115#[cfg(test)]
116mod tests {
117    use super::*;
118
119    #[test]
120    fn test_meta_learning_optimizer_creation() {
121        let optimizer = create_meta_learning_optimizer();
122        // Basic creation test
123        assert!(optimizer.config.enable_transfer_learning);
124    }
125
126    #[test]
127    fn test_recommended_strategy() {
128        let strategy = RecommendedStrategy {
129            algorithm: "SimulatedAnnealing".to_string(),
130            hyperparameters: HashMap::new(),
131            confidence: 0.8,
132            expected_performance: 0.95,
133            alternatives: vec![],
134        };
135
136        assert_eq!(strategy.algorithm, "SimulatedAnnealing");
137        assert_eq!(strategy.confidence, 0.8);
138    }
139}