samyama_optimization/algorithms/
tlbo.rs1use crate::common::{Individual, OptimizationResult, Problem, SolverConfig};
2use ndarray::Array1;
3use rand::prelude::*;
4use rayon::prelude::*;
5
6pub struct TLBOSolver {
7 pub config: SolverConfig,
8}
9
10impl TLBOSolver {
11 pub fn new(config: SolverConfig) -> Self {
12 Self { config }
13 }
14
15 pub fn solve<P: Problem>(&self, problem: &P) -> OptimizationResult {
16 let mut rng = thread_rng();
17 let dim = problem.dim();
18 let (lower, upper) = problem.bounds();
19
20 let mut population: Vec<Individual> = (0..self.config.population_size)
22 .map(|_| {
23 let mut vars = Array1::zeros(dim);
24 for i in 0..dim {
25 vars[i] = rng.gen_range(lower[i]..upper[i]);
26 }
27 let fitness = problem.fitness(&vars);
28 Individual::new(vars, fitness)
29 })
30 .collect();
31
32 let mut history = Vec::with_capacity(self.config.max_iterations);
33
34 for iter in 0..self.config.max_iterations {
35 if iter % 10 == 0 {
36 println!("TLBO Solver: Iteration {}/{}", iter, self.config.max_iterations);
37 }
38 let best_idx = self.find_best(&population);
39 let teacher_vars = population[best_idx].variables.clone();
40 let best_fitness = population[best_idx].fitness;
41 let mean_vars = self.calculate_mean(&population, dim);
42
43 history.push(best_fitness);
44
45 population = population
47 .into_par_iter()
48 .map(|mut ind| {
49 let mut local_rng = thread_rng();
50 let tf: f64 = local_rng.gen_range(1..3) as f64; let mut new_vars = Array1::zeros(dim);
52
53 for j in 0..dim {
54 let r: f64 = local_rng.gen();
55 let delta = r * (teacher_vars[j] - tf * mean_vars[j]);
56 new_vars[j] = (ind.variables[j] + delta).clamp(lower[j], upper[j]);
57 }
58
59 let new_fitness = problem.fitness(&new_vars);
60 if new_fitness < ind.fitness {
61 ind.variables = new_vars;
62 ind.fitness = new_fitness;
63 }
64 ind
65 })
66 .collect();
67
68 let pop_len = population.len();
70 for i in 0..pop_len {
71 let mut learner_j_idx;
72 loop {
73 learner_j_idx = rng.gen_range(0..pop_len);
74 if learner_j_idx != i { break; }
75 }
76
77 let ind_i = &population[i];
78 let ind_j = &population[learner_j_idx];
79
80 let mut new_vars = Array1::zeros(dim);
81 for k in 0..dim {
82 let r: f64 = rng.gen();
83 let delta = if ind_i.fitness < ind_j.fitness {
84 r * (&ind_i.variables[k] - &ind_j.variables[k])
85 } else {
86 r * (&ind_j.variables[k] - &ind_i.variables[k])
87 };
88 new_vars[k] = (ind_i.variables[k] + delta).clamp(lower[k], upper[k]);
89 }
90
91 let new_fitness = problem.fitness(&new_vars);
92 if new_fitness < population[i].fitness {
93 population[i].variables = new_vars;
94 population[i].fitness = new_fitness;
95 }
96 }
97 }
98
99 let final_best_idx = self.find_best(&population);
100 let final_best = &population[final_best_idx];
101
102 OptimizationResult {
103 best_variables: final_best.variables.clone(),
104 best_fitness: final_best.fitness,
105 history,
106 }
107 }
108
109 fn find_best(&self, population: &[Individual]) -> usize {
110 let mut best_idx = 0;
111 for (i, ind) in population.iter().enumerate() {
112 if ind.fitness < population[best_idx].fitness {
113 best_idx = i;
114 }
115 }
116 best_idx
117 }
118
119 fn calculate_mean(&self, population: &[Individual], dim: usize) -> Array1<f64> {
120 let mut mean = Array1::zeros(dim);
121 for ind in population {
122 mean += &ind.variables;
123 }
124 mean / (population.len() as f64)
125 }
126}