graphmind_optimization/algorithms/
gotlbo.rs1use crate::common::{Individual, OptimizationResult, Problem, SolverConfig};
2use ndarray::Array1;
3use rand::prelude::*;
4use rayon::prelude::*;
5
6pub struct GOTLBOSolver {
7 pub config: SolverConfig,
8}
9
10impl GOTLBOSolver {
11 pub fn new(config: SolverConfig) -> Self {
12 Self { config }
13 }
14
15 pub fn solve<P: Problem>(&self, problem: &P) -> OptimizationResult {
16 let mut rng = thread_rng();
17 let dim = problem.dim();
18 let (lower, upper) = problem.bounds();
19
20 let mut population: Vec<Individual> = (0..self.config.population_size)
22 .map(|_| {
23 let mut vars = Array1::zeros(dim);
24 for i in 0..dim {
25 vars[i] = rng.gen_range(lower[i]..upper[i]);
26 }
27 let fitness = problem.fitness(&vars);
28 Individual::new(vars, fitness)
29 })
30 .collect();
31
32 let mut history = Vec::with_capacity(self.config.max_iterations);
33
34 for iter in 0..self.config.max_iterations {
35 if iter % 10 == 0 {
36 }
38 let best_idx = self.find_best(&population);
39 let teacher_vars = population[best_idx].variables.clone();
40 let best_fitness = population[best_idx].fitness;
41 let mean_vars = self.calculate_mean(&population, dim);
42
43 history.push(best_fitness);
44
45 population = population
47 .into_par_iter()
48 .map(|mut ind| {
49 let mut local_rng = thread_rng();
50 let tf: f64 = local_rng.gen_range(1..3) as f64; let mut new_vars = Array1::zeros(dim);
52
53 for j in 0..dim {
55 let r: f64 = local_rng.gen();
56 let delta = r * (teacher_vars[j] - tf * mean_vars[j]);
57 new_vars[j] = (ind.variables[j] + delta).clamp(lower[j], upper[j]);
58 }
59
60 let new_fitness = problem.fitness(&new_vars);
61
62 if new_fitness < ind.fitness {
64 ind.variables = new_vars;
65 ind.fitness = new_fitness;
66 }
67
68 let mut opp_vars = Array1::zeros(dim);
70 for j in 0..dim {
71 opp_vars[j] =
73 (lower[j] + upper[j] - ind.variables[j]).clamp(lower[j], upper[j]);
74 }
75 let opp_fitness = problem.fitness(&opp_vars);
76
77 if opp_fitness < ind.fitness {
78 ind.variables = opp_vars;
79 ind.fitness = opp_fitness;
80 }
81
82 ind
83 })
84 .collect();
85
86 let pop_len = population.len();
88 for i in 0..pop_len {
89 let mut learner_j_idx;
90 loop {
91 learner_j_idx = rng.gen_range(0..pop_len);
92 if learner_j_idx != i {
93 break;
94 }
95 }
96
97 let ind_i = &population[i];
98 let ind_j = &population[learner_j_idx];
99
100 let mut new_vars = Array1::zeros(dim);
101 for k in 0..dim {
102 let r: f64 = rng.gen();
103 let delta = if ind_i.fitness < ind_j.fitness {
104 r * (ind_i.variables[k] - ind_j.variables[k])
105 } else {
106 r * (ind_j.variables[k] - ind_i.variables[k])
107 };
108 new_vars[k] = (ind_i.variables[k] + delta).clamp(lower[k], upper[k]);
109 }
110
111 let new_fitness = problem.fitness(&new_vars);
112 if new_fitness < population[i].fitness {
113 population[i].variables = new_vars;
114 population[i].fitness = new_fitness;
115 }
116
117 let ind_i_curr = &population[i];
119 let mut opp_vars = Array1::zeros(dim);
120 for k in 0..dim {
121 opp_vars[k] =
122 (lower[k] + upper[k] - ind_i_curr.variables[k]).clamp(lower[k], upper[k]);
123 }
124 let opp_fitness = problem.fitness(&opp_vars);
125
126 if opp_fitness < population[i].fitness {
127 population[i].variables = opp_vars;
128 population[i].fitness = opp_fitness;
129 }
130 }
131 }
132
133 let final_best_idx = self.find_best(&population);
134 let final_best = &population[final_best_idx];
135
136 OptimizationResult {
137 best_variables: final_best.variables.clone(),
138 best_fitness: final_best.fitness,
139 history,
140 }
141 }
142
143 fn find_best(&self, population: &[Individual]) -> usize {
144 let mut best_idx = 0;
145 for (i, ind) in population.iter().enumerate() {
146 if ind.fitness < population[best_idx].fitness {
147 best_idx = i;
148 }
149 }
150 best_idx
151 }
152
153 fn calculate_mean(&self, population: &[Individual], dim: usize) -> Array1<f64> {
154 let mut mean = Array1::zeros(dim);
155 for ind in population {
156 mean += &ind.variables;
157 }
158 mean / (population.len() as f64)
159 }
160}