1use nalgebra::{allocator::Allocator, DefaultAllocator, Dim, OMatrix, OVector, U1};
2use rand::{rngs::StdRng, SeedableRng};
3use rayon::prelude::*;
4use std::collections::VecDeque;
5
6use crate::utils::alg_conf::de_conf::{DEConf, DEStrategy, MutationType};
7use crate::utils::opt_prob::{FloatNumber as FloatNum, OptProb, OptimizationAlgorithm, State};
8
9use crate::algorithms::differential_evolution::{
10 archive::Archive,
11 bounds::BoundsCache,
12 mutation::{Best1Bin, Best2Bin, MutationStrategy, Rand1Bin, Rand2Bin, RandToBest1Bin},
13 parameter_adaptation::{
14 JADEParameterAdaptation, ParameterAdaptationType, StandardParameterAdaptation,
15 },
16};
17
18pub struct DE<T, N, D>
19where
20 T: FloatNum,
21 N: Dim,
22 D: Dim,
23 OVector<T, D>: Send + Sync,
24 OVector<T, N>: Send + Sync,
25 OMatrix<T, N, D>: Send + Sync,
26 DefaultAllocator: Allocator<D> + Allocator<N> + Allocator<N, D>,
27{
28 pub conf: DEConf,
29 pub st: State<T, N, D>,
30 pub opt_prob: OptProb<T, D>,
31 pub archive: Archive<T, D>,
32 success_history: VecDeque<bool>,
33 parameter_adaptation: ParameterAdaptationType,
34 bounds_cache: BoundsCache<T, D>,
35 seed: u64,
36}
37
38impl<T, N, D> DE<T, N, D>
39where
40 T: FloatNum,
41 N: Dim,
42 D: Dim,
43 OVector<T, D>: Send + Sync,
44 OVector<T, N>: Send + Sync,
45 OVector<bool, N>: Send + Sync,
46 OMatrix<T, N, D>: Send + Sync,
47 DefaultAllocator: Allocator<D> + Allocator<N> + Allocator<N, D>,
48{
49 pub fn new(
50 conf: DEConf,
51 init_pop: OMatrix<T, N, D>,
52 opt_prob: OptProb<T, D>,
53 seed: u64,
54 ) -> Self {
55 let population_size = init_pop.nrows();
56 let mut fitness = OVector::<T, N>::zeros_generic(N::from_usize(population_size), U1);
57 let mut constraints =
58 OVector::<bool, N>::from_element_generic(N::from_usize(population_size), U1, true);
59
60 let evaluations: Vec<(T, bool)> = (0..population_size)
61 .into_par_iter()
62 .map(|i| {
63 let x = init_pop.row(i).transpose();
64 let fit = opt_prob.evaluate(&x);
65 let constr = opt_prob.is_feasible(&x);
66 (fit, constr)
67 })
68 .collect();
69
70 for (i, (fit, constr)) in evaluations.into_iter().enumerate() {
71 fitness[i] = fit;
72 constraints[i] = constr;
73 }
74
75 let mut best_idx = 0;
76 let mut best_fitness = fitness[0];
77 for i in 1..population_size {
78 if fitness[i] > best_fitness && constraints[i] {
79 best_idx = i;
80 best_fitness = fitness[i];
81 }
82 }
83
84 let archive_size = conf.common.archive_size;
85 let success_history_size = conf.common.success_history_size;
86
87 let parameter_adaptation = match &conf.mutation_type {
88 MutationType::Standard(standard) => {
89 ParameterAdaptationType::Standard(StandardParameterAdaptation::new(
90 standard.f,
91 standard.cr,
92 standard.f * 0.5, standard.f * 1.5, standard.cr * 0.5, standard.cr * 1.5, ))
97 }
98 MutationType::Adaptive(adaptive) => {
99 if adaptive.use_jade {
100 ParameterAdaptationType::JADE(Box::new(JADEParameterAdaptation::new(
101 adaptive.memory_size,
102 seed,
103 )))
104 } else {
105 ParameterAdaptationType::Standard(StandardParameterAdaptation::new(
106 (adaptive.f_min + adaptive.f_max) / 2.0,
107 (adaptive.cr_min + adaptive.cr_max) / 2.0,
108 adaptive.f_min,
109 adaptive.f_max,
110 adaptive.cr_min,
111 adaptive.cr_max,
112 ))
113 }
114 }
115 };
116
117 Self {
118 conf,
119 st: State {
120 pop: init_pop.clone(),
121 fitness,
122 constraints,
123 best_x: init_pop.row(best_idx).transpose(),
124 best_f: best_fitness,
125 iter: 1,
126 },
127 opt_prob,
128 archive: Archive::new(archive_size, seed),
129 success_history: VecDeque::with_capacity(success_history_size),
130 parameter_adaptation,
131 bounds_cache: BoundsCache::new(init_pop.ncols()),
132 seed,
133 }
134 }
135
136 fn select_trial(
137 &self,
138 trial_fitness: T,
139 trial_constraint: bool,
140 current_fitness: T,
141 current_constraint: bool,
142 ) -> bool {
143 match (trial_constraint, current_constraint) {
144 (true, true) => {
145 let eps = T::from_f64(1e-10).unwrap();
147 trial_fitness > current_fitness + eps
148 }
149 (true, false) => true, (false, true) => false, (false, false) => {
152 trial_fitness > current_fitness
154 }
155 }
156 }
157}
158
159impl<T: FloatNum, N: Dim, D: Dim> OptimizationAlgorithm<T, N, D> for DE<T, N, D>
160where
161 T: FloatNum,
162 N: Dim,
163 D: Dim,
164 OVector<T, D>: Send + Sync,
165 OVector<T, N>: Send + Sync,
166 OVector<bool, N>: Send + Sync,
167 OMatrix<T, N, D>: Send + Sync,
168 DefaultAllocator: Allocator<D> + Allocator<N> + Allocator<N, D> + Allocator<U1, D>,
169{
170 fn step(&mut self) {
171 let pop_size = self.st.pop.nrows();
172
173 let sample_individual = self.st.pop.row(0).transpose();
174 let _bounds = self
175 .bounds_cache
176 .get_bounds(&self.opt_prob, &sample_individual);
177
178 let trials: Vec<_> = (0..pop_size)
179 .into_par_iter()
180 .map_init(
181 || {
182 let thread_id = rayon::current_thread_index().unwrap_or(0);
183 StdRng::seed_from_u64(self.seed + self.st.iter as u64 * 1000 + thread_id as u64)
184 },
185 |rng, i| {
186 let strategy = match &self.conf.mutation_type {
187 MutationType::Standard(standard) => &standard.strategy,
188 MutationType::Adaptive(adaptive) => &adaptive.strategy,
189 };
190
191 let strategy: &dyn MutationStrategy<T, N, D> = match strategy {
192 DEStrategy::Rand1Bin => &Rand1Bin,
193 DEStrategy::Best1Bin => &Best1Bin,
194 DEStrategy::RandToBest1Bin => &RandToBest1Bin,
195 DEStrategy::Best2Bin => &Best2Bin,
196 DEStrategy::Rand2Bin => &Rand2Bin,
197 };
198
199 let mut local_pa = self.parameter_adaptation.clone();
200 let (f, cr) = local_pa.generate_parameters();
201
202 let trial = strategy.generate_trial(
203 &self.st.pop,
204 Some(&self.st.best_x),
205 i,
206 T::from_f64(f).unwrap(),
207 T::from_f64(cr).unwrap(),
208 rng,
209 );
210
211 let fitness = self.opt_prob.evaluate(&trial);
212 let constraint = self.opt_prob.is_feasible(&trial);
213
214 let success = self.select_trial(
215 fitness,
216 constraint,
217 self.st.fitness[i],
218 self.st.constraints[i],
219 );
220
221 (i, trial, fitness, constraint, success, f, cr)
222 },
223 )
224 .collect();
225
226 let mut successes = Vec::new();
227
228 let updates: Vec<_> = trials
229 .into_iter()
230 .filter_map(
231 |(i, trial, trial_fitness, trial_constraint, success, f, cr)| {
232 if trial_constraint && trial_fitness > self.st.fitness[i] {
233 self.archive.add_solution(trial.clone(), trial_fitness);
234 self.parameter_adaptation.record_success(f, cr);
235 }
236
237 successes.push(success);
238
239 if success {
240 Some((i, trial, trial_fitness, trial_constraint))
241 } else {
242 None
243 }
244 },
245 )
246 .collect();
247
248 for success in successes {
249 self.success_history.push_back(success);
250 if self.success_history.len() > self.conf.common.success_history_size {
251 self.success_history.pop_front();
252 }
253 }
254
255 self.parameter_adaptation.update_parameters();
256
257 let mut new_population = self.st.pop.clone();
258 let mut new_fitness = self.st.fitness.clone();
259 let mut new_constraints = self.st.constraints.clone();
260
261 for (i, trial, trial_fitness, trial_constraint) in updates {
262 new_population.set_row(i, &trial.transpose());
263 new_fitness[i] = trial_fitness;
264 new_constraints[i] = trial_constraint;
265 }
266
267 self.st.pop = new_population;
268 self.st.fitness = new_fitness;
269 self.st.constraints = new_constraints;
270
271 for i in 0..pop_size {
272 if self.st.constraints[i] && self.st.fitness[i] > self.st.best_f {
273 self.st.best_f = self.st.fitness[i];
274 self.st.best_x = self.st.pop.row(i).transpose();
275 }
276 }
277
278 self.st.iter += 1;
279 }
280
281 fn state(&self) -> &State<T, N, D> {
282 &self.st
283 }
284}