samyama_optimization/algorithms/
firefly.rs1use crate::common::{Individual, OptimizationResult, Problem, SolverConfig};
2use ndarray::Array1;
3use rand::prelude::*;
4use rayon::prelude::*;
5
6pub struct FireflySolver {
7 pub config: SolverConfig,
8 pub alpha: f64, pub beta0: f64, pub gamma: f64, }
12
13impl FireflySolver {
14 pub fn new(config: SolverConfig) -> Self {
15 Self {
16 config,
17 alpha: 0.2,
18 beta0: 1.0,
19 gamma: 1.0,
20 }
21 }
22
23 pub fn with_params(config: SolverConfig, alpha: f64, beta0: f64, gamma: f64) -> Self {
24 Self { config, alpha, beta0, gamma }
25 }
26
27 pub fn solve<P: Problem>(&self, problem: &P) -> OptimizationResult {
28 let mut rng = thread_rng();
29 let dim = problem.dim();
30 let (lower, upper) = problem.bounds();
31
32 let mut population: Vec<Individual> = (0..self.config.population_size)
34 .map(|_| {
35 let mut vars = Array1::zeros(dim);
36 for i in 0..dim {
37 vars[i] = rng.gen_range(lower[i]..upper[i]);
38 }
39 let fitness = problem.fitness(&vars);
40 Individual::new(vars, fitness)
41 })
42 .collect();
43
44 let mut history = Vec::with_capacity(self.config.max_iterations);
45 let mut best_idx = 0;
46
47 for (i, ind) in population.iter().enumerate() {
49 if ind.fitness < population[best_idx].fitness {
50 best_idx = i;
51 }
52 }
53
54 for _iter in 0..self.config.max_iterations {
55 history.push(population[best_idx].fitness);
56
57 let old_population = population.clone();
62 let pop_size = self.config.population_size;
63
64 let new_positions: Vec<Option<Array1<f64>>> = (0..pop_size).into_par_iter().map(|i| {
66 let mut rng = thread_rng();
67 let mut moved = false;
68 let mut new_vars = old_population[i].variables.clone();
69 let fitness_i = old_population[i].fitness;
70
71 for j in 0..pop_size {
72 if i == j { continue; }
73
74 let fitness_j = old_population[j].fitness;
75
76 if fitness_j < fitness_i {
78 moved = true;
79 let vars_j = &old_population[j].variables;
80
81 let mut r_sq = 0.0;
83 for k in 0..dim {
84 let diff = new_vars[k] - vars_j[k];
85 r_sq += diff * diff;
86 }
87 let beta = self.beta0 * (-self.gamma * r_sq).exp();
91
92 for k in 0..dim {
93 let random_step = self.alpha * (rng.gen::<f64>() - 0.5) * (upper[k] - lower[k]);
94 let move_step = beta * (vars_j[k] - new_vars[k]);
95
96 new_vars[k] = (new_vars[k] + move_step + random_step).clamp(lower[k], upper[k]);
97 }
98 }
99 }
100
101 if moved {
102 Some(new_vars)
103 } else {
104 None
105 }
106 }).collect();
107
108 for (i, new_pos) in new_positions.into_iter().enumerate() {
110 if let Some(vars) = new_pos {
111 let new_fitness = problem.fitness(&vars);
112 population[i].variables = vars;
118 population[i].fitness = new_fitness;
119 }
120 }
121
122 for (i, ind) in population.iter().enumerate() {
124 if ind.fitness < population[best_idx].fitness {
125 best_idx = i;
126 }
127 }
128 }
129
130 let best_ind = &population[best_idx];
131
132 OptimizationResult {
133 best_variables: best_ind.variables.clone(),
134 best_fitness: best_ind.fitness,
135 history,
136 }
137 }
138}