samyama_optimization/algorithms/
cuckoo.rs1use crate::common::{Individual, OptimizationResult, Problem, SolverConfig};
2use ndarray::Array1;
3use rand::prelude::*;
4use rand_distr::Distribution;
5use std::f64::consts::PI;
6
7pub struct CuckooSolver {
8 pub config: SolverConfig,
9 pub pa: f64, }
11
12impl CuckooSolver {
13 pub fn new(config: SolverConfig) -> Self {
14 Self {
15 config,
16 pa: 0.25,
17 }
18 }
19
20 pub fn with_pa(config: SolverConfig, pa: f64) -> Self {
21 Self { config, pa }
22 }
23
24 fn levy_flight(&self, dim: usize) -> Array1<f64> {
26 let beta = 1.5;
28 let sigma_u = ((gamma(1.0 + beta) * (PI * beta / 2.0).sin()) /
29 (gamma((1.0 + beta) / 2.0) * beta * 2.0f64.powf((beta - 1.0) / 2.0)))
30 .powf(1.0 / beta);
31 let sigma_v = 1.0;
32
33 let mut step = Array1::zeros(dim);
34 let mut rng = thread_rng();
35
36 for i in 0..dim {
37 let _u: f64 = rng.gen_range(0.0..1.0) * sigma_u; let u_n: f64 = rand_distr::Normal::new(0.0, sigma_u).unwrap().sample(&mut rng);
40 let v_n: f64 = rand_distr::Normal::new(0.0, sigma_v).unwrap().sample(&mut rng);
41
42 let s = u_n / v_n.abs().powf(1.0 / beta);
43 step[i] = s;
44 }
45 step
46 }
47
48 pub fn solve<P: Problem>(&self, problem: &P) -> OptimizationResult {
49 let mut rng = thread_rng();
50 let dim = problem.dim();
51 let (lower, upper) = problem.bounds();
52
53 let mut nests: Vec<Individual> = (0..self.config.population_size)
55 .map(|_| {
56 let mut vars = Array1::zeros(dim);
57 for i in 0..dim {
58 vars[i] = rng.gen_range(lower[i]..upper[i]);
59 }
60 let fitness = problem.fitness(&vars);
61 Individual::new(vars, fitness)
62 })
63 .collect();
64
65 let mut best_idx = 0;
67 for i in 1..self.config.population_size {
68 if nests[i].fitness < nests[best_idx].fitness {
69 best_idx = i;
70 }
71 }
72 let mut best_ind = nests[best_idx].clone();
73
74 let mut history = Vec::with_capacity(self.config.max_iterations);
75
76 for _iter in 0..self.config.max_iterations {
77 history.push(best_ind.fitness);
78
79 for i in 0..self.config.population_size {
81 let step_size = 0.01; let levy = self.levy_flight(dim);
83 let current_vars = &nests[i].variables;
84
85 let mut new_vars = Array1::zeros(dim);
86 for j in 0..dim {
87 let diff = current_vars[j] - best_ind.variables[j];
92 let _delta = step_size * levy[j] * (if diff.abs() > 1e-6 { diff } else { 1.0 }); let delta_simple = step_size * levy[j] * (upper[j] - lower[j]);
97
98 new_vars[j] = (current_vars[j] + delta_simple).clamp(lower[j], upper[j]);
99 }
100
101 let new_fitness = problem.fitness(&new_vars);
102
103 let j = rng.gen_range(0..self.config.population_size);
106 if new_fitness < nests[j].fitness {
107 nests[j] = Individual::new(new_vars, new_fitness);
108 if nests[j].fitness < best_ind.fitness {
109 best_ind = nests[j].clone();
110 }
111 }
112 }
113
114 nests.sort_by(|a, b| a.fitness.partial_cmp(&b.fitness).unwrap());
118
119 let num_abandon = (self.config.population_size as f64 * self.pa) as usize;
121 let start_abandon_idx = self.config.population_size - num_abandon;
122
123 for i in start_abandon_idx..self.config.population_size {
124 let mut vars = Array1::zeros(dim);
127
128 let r1 = rng.gen_range(0..self.config.population_size);
130 let r2 = rng.gen_range(0..self.config.population_size);
131
132 for j in 0..dim {
133 let step = rng.gen::<f64>() * (nests[r1].variables[j] - nests[r2].variables[j]);
134 vars[j] = (nests[i].variables[j] + step).clamp(lower[j], upper[j]);
135 }
136
137 let fitness = problem.fitness(&vars);
138 nests[i] = Individual::new(vars, fitness);
139
140 if fitness < best_ind.fitness {
141 best_ind = nests[i].clone();
142 }
143 }
144 }
145
146 OptimizationResult {
147 best_variables: best_ind.variables,
148 best_fitness: best_ind.fitness,
149 history,
150 }
151 }
152}
153
154fn gamma(x: f64) -> f64 {
156 if (x - 1.5).abs() < 1e-6 { return 0.886227; }
162 if (x - 2.5).abs() < 1e-6 { return 1.32934; }
163
164 let term1 = (2.0 * PI / x).sqrt();
166 let term2 = (x / std::f64::consts::E).powf(x);
167 term1 * term2
168}