Skip to main content

graphmind_optimization/algorithms/
firefly.rs

1use crate::common::{Individual, OptimizationResult, Problem, SolverConfig};
2use ndarray::Array1;
3use rand::prelude::*;
4use rayon::prelude::*;
5
6pub struct FireflySolver {
7    pub config: SolverConfig,
8    pub alpha: f64, // Randomization parameter (0.2)
9    pub beta0: f64, // Attractiveness at r=0 (1.0)
10    pub gamma: f64, // Light absorption coefficient (1.0)
11}
12
13impl FireflySolver {
14    pub fn new(config: SolverConfig) -> Self {
15        Self {
16            config,
17            alpha: 0.2,
18            beta0: 1.0,
19            gamma: 1.0,
20        }
21    }
22
23    pub fn with_params(config: SolverConfig, alpha: f64, beta0: f64, gamma: f64) -> Self {
24        Self {
25            config,
26            alpha,
27            beta0,
28            gamma,
29        }
30    }
31
32    pub fn solve<P: Problem>(&self, problem: &P) -> OptimizationResult {
33        let mut rng = thread_rng();
34        let dim = problem.dim();
35        let (lower, upper) = problem.bounds();
36
37        // Initialize population
38        let mut population: Vec<Individual> = (0..self.config.population_size)
39            .map(|_| {
40                let mut vars = Array1::zeros(dim);
41                for i in 0..dim {
42                    vars[i] = rng.gen_range(lower[i]..upper[i]);
43                }
44                let fitness = problem.fitness(&vars);
45                Individual::new(vars, fitness)
46            })
47            .collect();
48
49        let mut history = Vec::with_capacity(self.config.max_iterations);
50        let mut best_idx = 0;
51
52        // Find initial best
53        for (i, ind) in population.iter().enumerate() {
54            if ind.fitness < population[best_idx].fitness {
55                best_idx = i;
56            }
57        }
58
59        for _iter in 0..self.config.max_iterations {
60            history.push(population[best_idx].fitness);
61
62            // Firefly algorithm loop: move i towards j if j is brighter (better fitness)
63            // Note: Parallelizing this double loop is tricky due to mutable updates.
64            // We'll calculate new positions and then update batch-wise.
65
66            let old_population = population.clone();
67            let pop_size = self.config.population_size;
68
69            // We can parallelize the outer loop (i)
70            let new_positions: Vec<Option<Array1<f64>>> = (0..pop_size)
71                .into_par_iter()
72                .map(|i| {
73                    let mut rng = thread_rng();
74                    let mut moved = false;
75                    let mut new_vars = old_population[i].variables.clone();
76                    let fitness_i = old_population[i].fitness;
77
78                    for (j, old_j) in old_population.iter().enumerate() {
79                        if i == j {
80                            continue;
81                        }
82
83                        let fitness_j = old_j.fitness;
84
85                        // For minimization, "brighter" means lower fitness value
86                        if fitness_j < fitness_i {
87                            moved = true;
88                            let vars_j = &old_j.variables;
89
90                            // Calculate distance r
91                            let mut r_sq = 0.0;
92                            for k in 0..dim {
93                                let diff = new_vars[k] - vars_j[k];
94                                r_sq += diff * diff;
95                            }
96                            // Avoid sqrt if possible, or just use r_sq in exp if formula allows
97                            // Standard: exp(-gamma * r^2)
98
99                            let beta = self.beta0 * (-self.gamma * r_sq).exp();
100
101                            for k in 0..dim {
102                                let random_step =
103                                    self.alpha * (rng.gen::<f64>() - 0.5) * (upper[k] - lower[k]);
104                                let move_step = beta * (vars_j[k] - new_vars[k]);
105
106                                new_vars[k] = (new_vars[k] + move_step + random_step)
107                                    .clamp(lower[k], upper[k]);
108                            }
109                        }
110                    }
111
112                    if moved {
113                        Some(new_vars)
114                    } else {
115                        None
116                    }
117                })
118                .collect();
119
120            // Apply updates
121            for (i, new_pos) in new_positions.into_iter().enumerate() {
122                if let Some(vars) = new_pos {
123                    let new_fitness = problem.fitness(&vars);
124                    // Selection: greedy acceptance? Standard FA moves anyway.
125                    // We'll accept if better or just move.
126                    // Standard FA just moves. But ensuring elitism is good.
127                    // Let's adopt standard: simple update.
128                    // But we keep track of global best.
129                    population[i].variables = vars;
130                    population[i].fitness = new_fitness;
131                }
132            }
133
134            // Update best
135            for (i, ind) in population.iter().enumerate() {
136                if ind.fitness < population[best_idx].fitness {
137                    best_idx = i;
138                }
139            }
140        }
141
142        let best_ind = &population[best_idx];
143
144        OptimizationResult {
145            best_variables: best_ind.variables.clone(),
146            best_fitness: best_ind.fitness,
147            history,
148        }
149    }
150}