use ndarray::{Array1, Array2};
use rand::Rng;
use crate::{MetaheuristicError, Result};
use super::{ObjectiveFunction, OptimizationParams, OptimizationResult, argmin};
#[derive(Debug, Clone)]
pub struct PSOParams {
pub opt_params: OptimizationParams,
pub w: f64,
pub c1: f64,
pub c2: f64,
pub w_decay: f64,
}
impl Default for PSOParams {
fn default() -> Self {
Self {
opt_params: OptimizationParams::default(),
w: 0.9,
c1: 2.0,
c2: 2.0,
w_decay: 0.0,
}
}
}
pub struct PSO<'a> {
objective: &'a dyn ObjectiveFunction,
params: PSOParams,
}
impl<'a> PSO<'a> {
pub fn new(objective: &'a dyn ObjectiveFunction, params: PSOParams) -> Self {
Self { objective, params }
}
pub fn optimize(&self) -> Result<OptimizationResult> {
let mut rng = rand::thread_rng();
let mut positions = self.initialize_population(&mut rng)?;
let mut velocities: Array2<f64> = Array2::zeros((self.params.opt_params.population_size, self.objective.dimensions()));
for i in 0..self.params.opt_params.population_size {
for j in 0..self.objective.dimensions() {
velocities[[i, j]] = rng.gen_range(-1.0..=1.0);
}
}
let mut personal_best = positions.clone();
let mut personal_best_values = self.evaluate_population(&positions);
let best_idx = argmin(&personal_best_values).expect("Failed to find minimum fitness");
let mut global_best_position = positions.row(best_idx).to_owned();
let mut global_best_value = personal_best_values[best_idx];
let mut evaluations = self.params.opt_params.population_size;
for _iteration in 0..self.params.opt_params.max_iterations {
for i in 0..self.params.opt_params.population_size {
for j in 0..self.objective.dimensions() {
let r1 = rng.gen::<f64>();
let r2 = rng.gen::<f64>();
velocities[[i, j]] = self.params.w * velocities[[i, j]]
+ self.params.c1 * r1 * (personal_best[[i, j]] - positions[[i, j]])
+ self.params.c2 * r2 * (global_best_position[j] - positions[[i, j]]);
positions[[i, j]] += velocities[[i, j]];
let (lower_bounds, upper_bounds) = self.objective.bounds();
positions[[i, j]] = positions[[i, j]].clamp(lower_bounds[j], upper_bounds[j]);
}
}
let current_values = self.evaluate_population(&positions);
evaluations += self.params.opt_params.population_size;
for i in 0..self.params.opt_params.population_size {
if current_values[i] < personal_best_values[i] {
personal_best_values[i] = current_values[i];
personal_best.row_mut(i).assign(&positions.row(i));
if current_values[i] < global_best_value {
global_best_value = current_values[i];
global_best_position = positions.row(i).to_owned();
}
}
}
if let Some(target) = self.params.opt_params.target_value {
if global_best_value <= target {
break;
}
}
}
Ok(OptimizationResult {
best_solution: global_best_position,
best_fitness: global_best_value,
iterations: self.params.opt_params.max_iterations,
evaluations,
})
}
fn initialize_population(&self, rng: &mut impl Rng) -> Result<Array2<f64>> {
let (lower_bounds, upper_bounds) = self.objective.bounds();
let dims = self.objective.dimensions();
if lower_bounds.len() != dims || upper_bounds.len() != dims {
return Err(MetaheuristicError::InvalidDimension {
expected: dims,
got: lower_bounds.len(),
});
}
let mut population = Array2::zeros((self.params.opt_params.population_size, dims));
for i in 0..self.params.opt_params.population_size {
for j in 0..dims {
population[[i, j]] = rng.gen_range(lower_bounds[j]..=upper_bounds[j]);
}
}
Ok(population)
}
fn evaluate_population(&self, population: &Array2<f64>) -> Array1<f64> {
let mut fitness = Array1::zeros(population.nrows());
for (i, row) in population.rows().into_iter().enumerate() {
fitness[i] = self.objective.evaluate(&row.to_owned());
}
fitness
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::test_function::{Sphere, TestFunction};
#[test]
fn test_pso_optimization() {
let problem = Sphere::new(2);
let params = PSOParams::default();
let optimizer = PSO::new(&problem, params);
let result = optimizer.optimize().unwrap();
assert!(result.best_fitness < 1e-2);
let global_min = problem.global_minimum_position();
for (x, x_min) in result.best_solution.iter().zip(global_min.iter()) {
assert!((x - x_min).abs() < 1e-1);
}
}
}