Skip to main content

graphmind_optimization/algorithms/
itlbo.rs

1use crate::common::{Individual, OptimizationResult, Problem, SolverConfig};
2use ndarray::Array1;
3use rand::prelude::*;
4use rayon::prelude::*;
5
6pub struct ITLBOSolver {
7    pub config: SolverConfig,
8    pub elite_size: usize,
9}
10
11impl ITLBOSolver {
12    pub fn new(config: SolverConfig) -> Self {
13        let elite_size = std::cmp::max(1, config.population_size / 10); // 10% elite
14        Self { config, elite_size }
15    }
16
17    pub fn solve<P: Problem>(&self, problem: &P) -> OptimizationResult {
18        let mut rng = thread_rng();
19        let dim = problem.dim();
20        let (lower, upper) = problem.bounds();
21
22        // Initialize population
23        let mut population: Vec<Individual> = (0..self.config.population_size)
24            .map(|_| {
25                let mut vars = Array1::zeros(dim);
26                for i in 0..dim {
27                    vars[i] = rng.gen_range(lower[i]..upper[i]);
28                }
29                let fitness = problem.fitness(&vars);
30                Individual::new(vars, fitness)
31            })
32            .collect();
33
34        let mut history = Vec::with_capacity(self.config.max_iterations);
35
36        for iter in 0..self.config.max_iterations {
37            if iter % 10 == 0 {
38                println!(
39                    "ITLBO Solver: Iteration {}/{}",
40                    iter, self.config.max_iterations
41                );
42            }
43            // Sort to find elites
44            population.sort_by(|a, b| a.fitness.partial_cmp(&b.fitness).unwrap());
45
46            // Save elites
47            let elites: Vec<Individual> =
48                population.iter().take(self.elite_size).cloned().collect();
49
50            let best_fitness = population[0].fitness;
51            let teacher_vars = population[0].variables.clone();
52            let mean_vars = self.calculate_mean(&population, dim);
53
54            history.push(best_fitness);
55
56            // 1. Teacher Phase
57            population = population
58                .into_par_iter()
59                .map(|mut ind| {
60                    let mut local_rng = thread_rng();
61                    // Adaptive TF: Usually between 1 and 2.
62                    let tf: f64 = local_rng.gen_range(1.0..2.0);
63
64                    let mut new_vars = Array1::zeros(dim);
65                    for j in 0..dim {
66                        let r: f64 = local_rng.gen();
67                        let delta = r * (teacher_vars[j] - tf * mean_vars[j]);
68                        new_vars[j] = (ind.variables[j] + delta).clamp(lower[j], upper[j]);
69                    }
70
71                    let new_fitness = problem.fitness(&new_vars);
72                    if new_fitness < ind.fitness {
73                        ind.variables = new_vars;
74                        ind.fitness = new_fitness;
75                    }
76                    ind
77                })
78                .collect();
79
80            // 2. Learner Phase (Enhanced)
81            let pop_len = population.len();
82
83            // Note: Parallel learner phase needs safe random access.
84            // We'll clone the "old" population for reading to allow parallel updates.
85            let old_population = population.clone();
86
87            population = population
88                .into_par_iter()
89                .enumerate()
90                .map(|(i, mut ind)| {
91                    let mut local_rng = thread_rng();
92
93                    let mut learner_j_idx;
94                    loop {
95                        learner_j_idx = local_rng.gen_range(0..pop_len);
96                        if learner_j_idx != i {
97                            break;
98                        }
99                    }
100                    let ind_j = &old_population[learner_j_idx];
101
102                    let mut new_vars = Array1::zeros(dim);
103                    for k in 0..dim {
104                        let r: f64 = local_rng.gen();
105                        let delta = if ind.fitness < ind_j.fitness {
106                            r * (ind.variables[k] - ind_j.variables[k])
107                        } else {
108                            r * (ind_j.variables[k] - ind.variables[k])
109                        };
110                        new_vars[k] = (ind.variables[k] + delta).clamp(lower[k], upper[k]);
111                    }
112
113                    let new_fitness = problem.fitness(&new_vars);
114                    if new_fitness < ind.fitness {
115                        ind.variables = new_vars;
116                        ind.fitness = new_fitness;
117                    }
118                    ind
119                })
120                .collect();
121
122            // 3. Elitism: Replace worst individuals with preserved elites
123            // We need to sort again to find the worst
124            population.sort_by(|a, b| a.fitness.partial_cmp(&b.fitness).unwrap());
125
126            let len = population.len();
127            for k in 0..self.elite_size {
128                // If the elite is better than the worst
129                if elites[k].fitness < population[len - 1 - k].fitness {
130                    population[len - 1 - k] = elites[k].clone();
131                }
132            }
133        }
134
135        let best_idx = 0; // Sorted
136        let final_best = &population[best_idx];
137
138        OptimizationResult {
139            best_variables: final_best.variables.clone(),
140            best_fitness: final_best.fitness,
141            history,
142        }
143    }
144
145    fn calculate_mean(&self, population: &[Individual], dim: usize) -> Array1<f64> {
146        let mut mean = Array1::zeros(dim);
147        for ind in population {
148            mean += &ind.variables;
149        }
150        mean / (population.len() as f64)
151    }
152}