graphmind_optimization/algorithms/
fpa.rs1use crate::common::{Individual, OptimizationResult, Problem, SolverConfig};
2use ndarray::Array1;
3use rand::prelude::*;
4use rand_distr::Distribution;
5use std::f64::consts::PI;
6
7pub struct FPASolver {
8 pub config: SolverConfig,
9 pub p: f64, }
11
12impl FPASolver {
13 pub fn new(config: SolverConfig) -> Self {
14 Self { config, p: 0.8 }
15 }
16
17 fn levy_flight(&self, dim: usize) -> Array1<f64> {
19 let beta = 1.5;
20 let sigma_u = ((gamma(1.0 + beta) * (PI * beta / 2.0).sin())
21 / (gamma((1.0 + beta) / 2.0) * beta * 2.0f64.powf((beta - 1.0) / 2.0)))
22 .powf(1.0 / beta);
23 let sigma_v = 1.0;
24
25 let mut step = Array1::zeros(dim);
26 let mut rng = thread_rng();
27
28 for i in 0..dim {
29 let u_n: f64 = rand_distr::Normal::new(0.0, sigma_u)
30 .unwrap()
31 .sample(&mut rng);
32 let v_n: f64 = rand_distr::Normal::new(0.0, sigma_v)
33 .unwrap()
34 .sample(&mut rng);
35 let s = u_n / v_n.abs().powf(1.0 / beta);
36 step[i] = s;
37 }
38 step
39 }
40
41 pub fn solve<P: Problem>(&self, problem: &P) -> OptimizationResult {
42 let mut rng = thread_rng();
43 let dim = problem.dim();
44 let (lower, upper) = problem.bounds();
45 let pop_size = self.config.population_size;
46
47 let mut population: Vec<Individual> = (0..pop_size)
49 .map(|_| {
50 let mut vars = Array1::zeros(dim);
51 for i in 0..dim {
52 vars[i] = rng.gen_range(lower[i]..upper[i]);
53 }
54 let fitness = problem.fitness(&vars);
55 Individual::new(vars, fitness)
56 })
57 .collect();
58
59 let mut best_idx = 0;
61 for i in 1..pop_size {
62 if population[i].fitness < population[best_idx].fitness {
63 best_idx = i;
64 }
65 }
66 let mut best_vars = population[best_idx].variables.clone();
67 let mut best_fitness = population[best_idx].fitness;
68
69 let mut history = Vec::with_capacity(self.config.max_iterations);
70
71 for _iter in 0..self.config.max_iterations {
72 history.push(best_fitness);
73
74 for i in 0..pop_size {
75 let mut new_vars = population[i].variables.clone();
76
77 if rng.gen::<f64>() < self.p {
78 let levy = self.levy_flight(dim);
80 for j in 0..dim {
81 let step = levy[j] * (population[i].variables[j] - best_vars[j]);
82 new_vars[j] = (population[i].variables[j] + step).clamp(lower[j], upper[j]);
83 }
84 } else {
85 let mut j_idx;
87 let mut k_idx;
88 loop {
89 j_idx = rng.gen_range(0..pop_size);
90 k_idx = rng.gen_range(0..pop_size);
91 if j_idx != k_idx {
92 break;
93 }
94 }
95
96 let epsilon: f64 = rng.gen();
97 for j in 0..dim {
98 let step = epsilon
99 * (population[j_idx].variables[j] - population[k_idx].variables[j]);
100 new_vars[j] = (population[i].variables[j] + step).clamp(lower[j], upper[j]);
101 }
102 }
103
104 let new_fitness = problem.fitness(&new_vars);
105 if new_fitness < population[i].fitness {
106 population[i] = Individual::new(new_vars, new_fitness);
107 if new_fitness < best_fitness {
108 best_fitness = new_fitness;
109 best_vars = population[i].variables.clone();
110 }
111 }
112 }
113 }
114
115 OptimizationResult {
116 best_variables: best_vars,
117 best_fitness,
118 history,
119 }
120 }
121}
122
123fn gamma(x: f64) -> f64 {
125 if (x - 1.5).abs() < 1e-6 {
126 return 0.886227;
127 }
128 if (x - 2.5).abs() < 1e-6 {
129 return 1.32934;
130 }
131 let term1 = (2.0 * PI / x).sqrt();
132 let term2 = (x / std::f64::consts::E).powf(x);
133 term1 * term2
134}