use std::time::Duration;
#[derive(Debug, Clone)]
pub struct MetaLearningConfig {
pub enable_transfer_learning: bool,
pub enable_few_shot_learning: bool,
pub experience_buffer_size: usize,
pub meta_learning_rate: f64,
pub inner_steps: usize,
pub feature_config: FeatureExtractionConfig,
pub nas_config: NeuralArchitectureSearchConfig,
pub portfolio_config: PortfolioManagementConfig,
pub multi_objective_config: MultiObjectiveConfig,
}
impl Default for MetaLearningConfig {
fn default() -> Self {
Self {
enable_transfer_learning: true,
enable_few_shot_learning: true,
experience_buffer_size: 10_000,
meta_learning_rate: 0.001,
inner_steps: 5,
feature_config: FeatureExtractionConfig::default(),
nas_config: NeuralArchitectureSearchConfig::default(),
portfolio_config: PortfolioManagementConfig::default(),
multi_objective_config: MultiObjectiveConfig::default(),
}
}
}
#[derive(Debug, Clone)]
pub struct FeatureExtractionConfig {
pub enable_graph_features: bool,
pub enable_statistical_features: bool,
pub enable_spectral_features: bool,
pub enable_domain_features: bool,
pub selection_method: FeatureSelectionMethod,
pub reduction_method: DimensionalityReduction,
pub normalization: FeatureNormalization,
}
impl Default for FeatureExtractionConfig {
fn default() -> Self {
Self {
enable_graph_features: true,
enable_statistical_features: true,
enable_spectral_features: true,
enable_domain_features: true,
selection_method: FeatureSelectionMethod::AutomaticRelevance,
reduction_method: DimensionalityReduction::PCA,
normalization: FeatureNormalization::StandardScaling,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FeatureSelectionMethod {
AutomaticRelevance,
MutualInformation,
RecursiveElimination,
LASSO,
RandomForestImportance,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum DimensionalityReduction {
PCA,
ICA,
tSNE,
UMAP,
LDA,
None,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FeatureNormalization {
StandardScaling,
MinMaxScaling,
RobustScaling,
UnitVector,
None,
}
#[derive(Debug, Clone)]
pub struct NeuralArchitectureSearchConfig {
pub enable_nas: bool,
pub search_space: SearchSpace,
pub search_strategy: SearchStrategy,
pub max_iterations: usize,
pub early_stopping: EarlyStoppingCriteria,
pub resource_constraints: ResourceConstraints,
}
impl Default for NeuralArchitectureSearchConfig {
fn default() -> Self {
Self {
enable_nas: true,
search_space: SearchSpace::default(),
search_strategy: SearchStrategy::DifferentiableNAS,
max_iterations: 100,
early_stopping: EarlyStoppingCriteria::default(),
resource_constraints: ResourceConstraints::default(),
}
}
}
#[derive(Debug, Clone)]
pub struct SearchSpace {
pub layer_types: Vec<LayerType>,
pub num_layers_range: (usize, usize),
pub hidden_dims: Vec<usize>,
pub activations: Vec<ActivationFunction>,
pub dropout_rates: Vec<f64>,
pub skip_connections: bool,
}
impl Default for SearchSpace {
fn default() -> Self {
Self {
layer_types: vec![
LayerType::Dense,
LayerType::LSTM,
LayerType::GRU,
LayerType::Attention,
LayerType::Convolution1D,
],
num_layers_range: (2, 8),
hidden_dims: vec![64, 128, 256, 512],
activations: vec![
ActivationFunction::ReLU,
ActivationFunction::Tanh,
ActivationFunction::Swish,
ActivationFunction::GELU,
],
dropout_rates: vec![0.0, 0.1, 0.2, 0.3],
skip_connections: true,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum LayerType {
Dense,
LSTM,
GRU,
Attention,
Convolution1D,
Normalization,
ResidualBlock,
}
#[derive(Debug, Clone, PartialEq)]
pub enum ActivationFunction {
ReLU,
Tanh,
Sigmoid,
Swish,
GELU,
LeakyReLU(f64),
ELU(f64),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SearchStrategy {
DifferentiableNAS,
EvolutionarySearch,
ReinforcementLearning,
BayesianOptimization,
RandomSearch,
ProgressiveSearch,
}
#[derive(Debug, Clone)]
pub struct EarlyStoppingCriteria {
pub patience: usize,
pub min_improvement: f64,
pub max_runtime: Duration,
pub target_performance: Option<f64>,
}
impl Default for EarlyStoppingCriteria {
fn default() -> Self {
Self {
patience: 10,
min_improvement: 0.001,
max_runtime: Duration::from_secs(2 * 3600), target_performance: None,
}
}
}
#[derive(Debug, Clone)]
pub struct ResourceConstraints {
pub max_memory: usize,
pub max_training_time: Duration,
pub max_parameters: usize,
pub max_flops: usize,
}
impl Default for ResourceConstraints {
fn default() -> Self {
Self {
max_memory: 2048,
max_training_time: Duration::from_secs(10 * 60), max_parameters: 1_000_000,
max_flops: 1_000_000_000,
}
}
}
#[derive(Debug, Clone)]
pub struct PortfolioManagementConfig {
pub enable_dynamic_portfolio: bool,
pub max_portfolio_size: usize,
pub selection_strategy: AlgorithmSelectionStrategy,
pub evaluation_window: Duration,
pub diversity_criteria: DiversityCriteria,
}
impl Default for PortfolioManagementConfig {
fn default() -> Self {
Self {
enable_dynamic_portfolio: true,
max_portfolio_size: 10,
selection_strategy: AlgorithmSelectionStrategy::MultiArmedBandit,
evaluation_window: Duration::from_secs(24 * 3600), diversity_criteria: DiversityCriteria::default(),
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum AlgorithmSelectionStrategy {
MultiArmedBandit,
UpperConfidenceBound,
ThompsonSampling,
EpsilonGreedy(f64),
CollaborativeFiltering,
MetaLearningBased,
}
#[derive(Debug, Clone)]
pub struct DiversityCriteria {
pub min_performance_diversity: f64,
pub min_algorithmic_diversity: f64,
pub diversity_method: DiversityMethod,
}
impl Default for DiversityCriteria {
fn default() -> Self {
Self {
min_performance_diversity: 0.1,
min_algorithmic_diversity: 0.2,
diversity_method: DiversityMethod::KullbackLeibler,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum DiversityMethod {
KullbackLeibler,
JensenShannon,
CosineDistance,
EuclideanDistance,
HammingDistance,
}
#[derive(Debug, Clone)]
pub struct MultiObjectiveConfig {
pub enable_multi_objective: bool,
pub objectives: Vec<OptimizationObjective>,
pub pareto_config: ParetoFrontierConfig,
pub scalarization: ScalarizationMethod,
pub constraint_handling: ConstraintHandling,
}
impl Default for MultiObjectiveConfig {
fn default() -> Self {
Self {
enable_multi_objective: true,
objectives: vec![
OptimizationObjective::SolutionQuality,
OptimizationObjective::Runtime,
OptimizationObjective::ResourceUsage,
],
pareto_config: ParetoFrontierConfig::default(),
scalarization: ScalarizationMethod::WeightedSum,
constraint_handling: ConstraintHandling::PenaltyMethod,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum OptimizationObjective {
SolutionQuality,
Runtime,
ResourceUsage,
EnergyConsumption,
Robustness,
Scalability,
Custom(String),
}
#[derive(Debug, Clone)]
pub struct ParetoFrontierConfig {
pub max_frontier_size: usize,
pub dominance_tolerance: f64,
pub update_strategy: FrontierUpdateStrategy,
pub crowding_weight: f64,
}
impl Default for ParetoFrontierConfig {
fn default() -> Self {
Self {
max_frontier_size: 100,
dominance_tolerance: 1e-6,
update_strategy: FrontierUpdateStrategy::NonDominatedSort,
crowding_weight: 0.5,
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum FrontierUpdateStrategy {
NonDominatedSort,
EpsilonDominance,
HypervolumeBased,
ReferencePointBased,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ScalarizationMethod {
WeightedSum,
WeightedTchebycheff,
AchievementScalarizing,
PenaltyBoundaryIntersection,
ReferencePoint,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstraintHandling {
PenaltyMethod,
BarrierMethod,
LagrangianMethod,
FeasibilityRules,
MultiObjectiveConstraint,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_meta_learning_config_default() {
let config = MetaLearningConfig::default();
assert!(config.enable_transfer_learning);
assert!(config.enable_few_shot_learning);
assert_eq!(config.experience_buffer_size, 10_000);
}
#[test]
fn test_feature_extraction_config() {
let config = FeatureExtractionConfig::default();
assert!(config.enable_graph_features);
assert_eq!(
config.selection_method,
FeatureSelectionMethod::AutomaticRelevance
);
}
#[test]
fn test_nas_config() {
let config = NeuralArchitectureSearchConfig::default();
assert!(config.enable_nas);
assert_eq!(config.max_iterations, 100);
assert_eq!(config.search_strategy, SearchStrategy::DifferentiableNAS);
}
}