samyama_optimization/algorithms/
fpa.rs1use crate::common::{Individual, OptimizationResult, Problem, SolverConfig};
2use ndarray::Array1;
3use rand::prelude::*;
4use rand_distr::Distribution;
5use std::f64::consts::PI;
6
7pub struct FPASolver {
8 pub config: SolverConfig,
9 pub p: f64, }
11
12impl FPASolver {
13 pub fn new(config: SolverConfig) -> Self {
14 Self {
15 config,
16 p: 0.8,
17 }
18 }
19
20 fn levy_flight(&self, dim: usize) -> Array1<f64> {
22 let beta = 1.5;
23 let sigma_u = ((gamma(1.0 + beta) * (PI * beta / 2.0).sin()) /
24 (gamma((1.0 + beta) / 2.0) * beta * 2.0f64.powf((beta - 1.0) / 2.0)))
25 .powf(1.0 / beta);
26 let sigma_v = 1.0;
27
28 let mut step = Array1::zeros(dim);
29 let mut rng = thread_rng();
30
31 for i in 0..dim {
32 let u_n: f64 = rand_distr::Normal::new(0.0, sigma_u).unwrap().sample(&mut rng);
33 let v_n: f64 = rand_distr::Normal::new(0.0, sigma_v).unwrap().sample(&mut rng);
34 let s = u_n / v_n.abs().powf(1.0 / beta);
35 step[i] = s;
36 }
37 step
38 }
39
40 pub fn solve<P: Problem>(&self, problem: &P) -> OptimizationResult {
41 let mut rng = thread_rng();
42 let dim = problem.dim();
43 let (lower, upper) = problem.bounds();
44 let pop_size = self.config.population_size;
45
46 let mut population: Vec<Individual> = (0..pop_size)
48 .map(|_| {
49 let mut vars = Array1::zeros(dim);
50 for i in 0..dim {
51 vars[i] = rng.gen_range(lower[i]..upper[i]);
52 }
53 let fitness = problem.fitness(&vars);
54 Individual::new(vars, fitness)
55 })
56 .collect();
57
58 let mut best_idx = 0;
60 for i in 1..pop_size {
61 if population[i].fitness < population[best_idx].fitness {
62 best_idx = i;
63 }
64 }
65 let mut best_vars = population[best_idx].variables.clone();
66 let mut best_fitness = population[best_idx].fitness;
67
68 let mut history = Vec::with_capacity(self.config.max_iterations);
69
70 for _iter in 0..self.config.max_iterations {
71 history.push(best_fitness);
72
73 for i in 0..pop_size {
74 let mut new_vars = population[i].variables.clone();
75
76 if rng.gen::<f64>() < self.p {
77 let levy = self.levy_flight(dim);
79 for j in 0..dim {
80 let step = levy[j] * (population[i].variables[j] - best_vars[j]);
81 new_vars[j] = (population[i].variables[j] + step).clamp(lower[j], upper[j]);
82 }
83 } else {
84 let mut j_idx;
86 let mut k_idx;
87 loop {
88 j_idx = rng.gen_range(0..pop_size);
89 k_idx = rng.gen_range(0..pop_size);
90 if j_idx != k_idx { break; }
91 }
92
93 let epsilon: f64 = rng.gen();
94 for j in 0..dim {
95 let step = epsilon * (population[j_idx].variables[j] - population[k_idx].variables[j]);
96 new_vars[j] = (population[i].variables[j] + step).clamp(lower[j], upper[j]);
97 }
98 }
99
100 let new_fitness = problem.fitness(&new_vars);
101 if new_fitness < population[i].fitness {
102 population[i] = Individual::new(new_vars, new_fitness);
103 if new_fitness < best_fitness {
104 best_fitness = new_fitness;
105 best_vars = population[i].variables.clone();
106 }
107 }
108 }
109 }
110
111 OptimizationResult {
112 best_variables: best_vars,
113 best_fitness,
114 history,
115 }
116 }
117}
118
119fn gamma(x: f64) -> f64 {
121 if (x - 1.5).abs() < 1e-6 { return 0.886227; }
122 if (x - 2.5).abs() < 1e-6 { return 1.32934; }
123 let term1 = (2.0 * PI / x).sqrt();
124 let term2 = (x / std::f64::consts::E).powf(x);
125 term1 * term2
126}