use parking_lot::Mutex;
use super::genetic::{
self, Candidate, EvolutionaryState, Phase, advance_generation, auto_divisions,
collect_evaluated_generation, crossover, das_dennis, extract_trial_params,
generate_random_candidates, mutate, sample_from_candidate, sample_random,
};
use crate::distribution::Distribution;
use crate::multi_objective::MultiObjectiveTrial;
use crate::param::ParamValue;
use crate::types::Direction;
#[derive(Debug, Clone, Default)]
pub enum Decomposition {
WeightedSum,
#[default]
Tchebycheff,
Pbi {
theta: f64,
},
}
pub struct MoeadSampler {
state: Mutex<MoeadState>,
}
impl MoeadSampler {
#[must_use]
pub fn new() -> Self {
Self {
state: Mutex::new(MoeadState::new(MoeadConfig::default(), None)),
}
}
#[must_use]
pub fn with_seed(seed: u64) -> Self {
Self {
state: Mutex::new(MoeadState::new(MoeadConfig::default(), Some(seed))),
}
}
#[must_use]
pub fn builder() -> MoeadSamplerBuilder {
MoeadSamplerBuilder::default()
}
}
impl Default for MoeadSampler {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Clone, Default)]
pub struct MoeadSamplerBuilder {
population_size: Option<usize>,
neighborhood_size: Option<usize>,
decomposition: Decomposition,
crossover_prob: Option<f64>,
crossover_eta: Option<f64>,
mutation_eta: Option<f64>,
seed: Option<u64>,
}
impl MoeadSamplerBuilder {
#[must_use]
pub fn population_size(mut self, size: usize) -> Self {
self.population_size = Some(size);
self
}
#[must_use]
pub fn neighborhood_size(mut self, size: usize) -> Self {
self.neighborhood_size = Some(size);
self
}
#[must_use]
pub fn decomposition(mut self, decomp: Decomposition) -> Self {
self.decomposition = decomp;
self
}
#[must_use]
pub fn crossover_prob(mut self, prob: f64) -> Self {
self.crossover_prob = Some(prob);
self
}
#[must_use]
pub fn crossover_eta(mut self, eta: f64) -> Self {
self.crossover_eta = Some(eta);
self
}
#[must_use]
pub fn mutation_eta(mut self, eta: f64) -> Self {
self.mutation_eta = Some(eta);
self
}
#[must_use]
pub fn seed(mut self, seed: u64) -> Self {
self.seed = Some(seed);
self
}
#[must_use]
pub fn build(self) -> MoeadSampler {
let config = MoeadConfig {
user_population_size: self.population_size,
neighborhood_size: self.neighborhood_size,
decomposition: self.decomposition,
crossover_prob: self.crossover_prob.unwrap_or(1.0),
crossover_eta: self.crossover_eta.unwrap_or(20.0),
mutation_eta: self.mutation_eta.unwrap_or(20.0),
};
MoeadSampler {
state: Mutex::new(MoeadState::new(config, self.seed)),
}
}
}
#[derive(Debug, Clone)]
struct MoeadConfig {
user_population_size: Option<usize>,
neighborhood_size: Option<usize>,
decomposition: Decomposition,
crossover_prob: f64,
crossover_eta: f64,
mutation_eta: f64,
}
impl Default for MoeadConfig {
fn default() -> Self {
Self {
user_population_size: None,
neighborhood_size: None,
decomposition: Decomposition::default(),
crossover_prob: 1.0,
crossover_eta: 20.0,
mutation_eta: 20.0,
}
}
}
struct MoeadState {
evo: EvolutionaryState,
config: MoeadConfig,
weight_vectors: Vec<Vec<f64>>,
neighborhoods: Vec<Vec<usize>>,
ideal_point: Vec<f64>,
population_values: Vec<Vec<f64>>,
population_params: Vec<Vec<ParamValue>>,
initialized: bool,
}
impl MoeadState {
fn new(config: MoeadConfig, seed: Option<u64>) -> Self {
Self {
evo: EvolutionaryState::new(seed),
config,
weight_vectors: Vec::new(),
neighborhoods: Vec::new(),
ideal_point: Vec::new(),
population_values: Vec::new(),
population_params: Vec::new(),
initialized: false,
}
}
}
impl crate::multi_objective::MultiObjectiveSampler for MoeadSampler {
fn sample(
&self,
distribution: &Distribution,
trial_id: u64,
history: &[MultiObjectiveTrial],
directions: &[Direction],
) -> ParamValue {
let mut state = self.state.lock();
match &state.evo.phase {
Phase::Discovery => {
if let Some(value) =
genetic::sample_discovery(&mut state.evo, distribution, trial_id)
{
return value;
}
initialize_moead(&mut state, directions);
generate_random_candidates(&mut state.evo);
sample_from_candidate(&mut state.evo, trial_id)
}
Phase::Active => {
maybe_generate_new_generation(&mut state, history, directions);
sample_from_candidate(&mut state.evo, trial_id)
}
}
}
}
fn initialize_moead(state: &mut MoeadState, directions: &[Direction]) {
let n_obj = directions.len();
let divisions = auto_divisions(n_obj, state.config.user_population_size.unwrap_or(100));
state.weight_vectors = das_dennis(n_obj, divisions);
let pop_size = state
.config
.user_population_size
.unwrap_or(state.weight_vectors.len())
.max(4);
state.weight_vectors.truncate(pop_size);
while state.weight_vectors.len() < pop_size {
let idx = state.evo.rng.usize(0..state.weight_vectors.len());
let w = state.weight_vectors[idx].clone();
state.weight_vectors.push(w);
}
let t = state
.config
.neighborhood_size
.unwrap_or_else(|| 20.min(pop_size));
let t = t.min(pop_size);
state.neighborhoods = compute_neighborhoods(&state.weight_vectors, t);
state.evo.population_size = pop_size;
state.evo.phase = Phase::Active;
state.ideal_point = vec![f64::INFINITY; n_obj];
state.initialized = true;
}
fn compute_neighborhoods(weights: &[Vec<f64>], t: usize) -> Vec<Vec<usize>> {
let n = weights.len();
weights
.iter()
.map(|wi| {
let mut distances: Vec<(usize, f64)> = (0..n)
.map(|j| {
let d: f64 = wi
.iter()
.zip(&weights[j])
.map(|(&a, &b)| (a - b).powi(2))
.sum::<f64>()
.sqrt();
(j, d)
})
.collect();
distances.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(core::cmp::Ordering::Equal));
distances.into_iter().take(t).map(|(idx, _)| idx).collect()
})
.collect()
}
fn to_minimize_space(values: &[f64], directions: &[Direction]) -> Vec<f64> {
values
.iter()
.zip(directions)
.map(|(&v, d)| match d {
Direction::Minimize => v,
Direction::Maximize => -v,
})
.collect()
}
fn maybe_generate_new_generation(
state: &mut MoeadState,
history: &[MultiObjectiveTrial],
directions: &[Direction],
) {
if state.evo.candidates.is_empty() {
generate_random_candidates(&mut state.evo);
return;
}
if let Some(evaluated) = collect_evaluated_generation(&state.evo, history) {
let offspring = moead_generate_offspring(state, &evaluated, directions);
advance_generation(&mut state.evo, offspring);
}
}
fn scalarize_weighted_sum(values: &[f64], weight: &[f64]) -> f64 {
values.iter().zip(weight).map(|(&v, &w)| w * v).sum()
}
fn scalarize_tchebycheff(values: &[f64], weight: &[f64], ideal: &[f64]) -> f64 {
values
.iter()
.zip(weight)
.zip(ideal)
.map(|((&v, &w), &z)| {
let w = if w < 1e-6 { 1e-6 } else { w };
w * (v - z).abs()
})
.fold(f64::NEG_INFINITY, f64::max)
}
fn scalarize_pbi(values: &[f64], weight: &[f64], ideal: &[f64], theta: f64) -> f64 {
let n = values.len();
let diff: Vec<f64> = values.iter().zip(ideal).map(|(&v, &z)| v - z).collect();
let w_norm: f64 = weight.iter().map(|&w| w * w).sum::<f64>().sqrt();
if w_norm < 1e-30 {
return f64::INFINITY;
}
let w_unit: Vec<f64> = weight.iter().map(|&w| w / w_norm).collect();
let d1: f64 = diff.iter().zip(&w_unit).map(|(&d, &w)| d * w).sum();
let d2_sq: f64 = (0..n)
.map(|i| {
let proj = d1 * w_unit[i];
(diff[i] - proj).powi(2)
})
.sum::<f64>();
d1 + theta * d2_sq.sqrt()
}
fn scalarize(values: &[f64], weight: &[f64], ideal: &[f64], decomposition: &Decomposition) -> f64 {
match decomposition {
Decomposition::WeightedSum => scalarize_weighted_sum(values, weight),
Decomposition::Tchebycheff => scalarize_tchebycheff(values, weight, ideal),
Decomposition::Pbi { theta } => scalarize_pbi(values, weight, ideal, *theta),
}
}
fn moead_generate_offspring(
state: &mut MoeadState,
population: &[&MultiObjectiveTrial],
directions: &[Direction],
) -> Vec<Candidate> {
let pop_size = state.evo.population_size;
if population.len() < 2 {
return (0..pop_size)
.map(|_| {
let params = state
.evo
.dimensions
.iter()
.map(|d| sample_random(&mut state.evo.rng, &d.distribution))
.collect();
Candidate { params }
})
.collect();
}
let current_params: Vec<Vec<ParamValue>> = population
.iter()
.map(|t| extract_trial_params(t, &state.evo.dimensions, &mut state.evo.rng))
.collect();
let current_values: Vec<Vec<f64>> = population
.iter()
.map(|t| to_minimize_space(&t.values, directions))
.collect();
for vals in ¤t_values {
for (i, &v) in vals.iter().enumerate() {
if i < state.ideal_point.len() && v < state.ideal_point[i] {
state.ideal_point[i] = v;
}
}
}
let n_weights = state.weight_vectors.len();
let mut best_for_subproblem: Vec<usize> = Vec::with_capacity(n_weights);
for j in 0..n_weights {
let mut best_idx = 0;
let mut best_val = f64::INFINITY;
for (k, vals) in current_values.iter().enumerate() {
let s = scalarize(
vals,
&state.weight_vectors[j],
&state.ideal_point,
&state.config.decomposition,
);
if s < best_val {
best_val = s;
best_idx = k;
}
}
best_for_subproblem.push(best_idx);
}
state.population_values = current_values;
state.population_params = current_params;
let mut offspring = Vec::with_capacity(pop_size);
for i in 0..pop_size.min(state.neighborhoods.len()) {
let neighborhood = &state.neighborhoods[i];
let n1 = neighborhood[state.evo.rng.usize(0..neighborhood.len())];
let n2 = neighborhood[state.evo.rng.usize(0..neighborhood.len())];
let p1_idx = best_for_subproblem[n1 % best_for_subproblem.len()];
let p2_idx = best_for_subproblem[n2 % best_for_subproblem.len()];
let p1 = &state.population_params[p1_idx];
let p2 = &state.population_params[p2_idx];
let (mut child1, _child2) = crossover(
&mut state.evo.rng,
p1,
p2,
&state.evo.dimensions,
state.config.crossover_prob,
state.config.crossover_eta,
);
mutate(
&mut state.evo.rng,
&mut child1,
&state.evo.dimensions,
state.config.mutation_eta,
);
offspring.push(Candidate { params: child1 });
}
while offspring.len() < pop_size {
let i = state.evo.rng.usize(0..state.neighborhoods.len());
let neighborhood = &state.neighborhoods[i];
let n1 = neighborhood[state.evo.rng.usize(0..neighborhood.len())];
let n2 = neighborhood[state.evo.rng.usize(0..neighborhood.len())];
let p1_idx = best_for_subproblem[n1 % best_for_subproblem.len()];
let p2_idx = best_for_subproblem[n2 % best_for_subproblem.len()];
let (mut child1, _) = crossover(
&mut state.evo.rng,
&state.population_params[p1_idx],
&state.population_params[p2_idx],
&state.evo.dimensions,
state.config.crossover_prob,
state.config.crossover_eta,
);
mutate(
&mut state.evo.rng,
&mut child1,
&state.evo.dimensions,
state.config.mutation_eta,
);
offspring.push(Candidate { params: child1 });
}
offspring
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_scalarize_weighted_sum() {
let values = [1.0, 2.0, 3.0];
let weight = [0.5, 0.3, 0.2];
let result = scalarize_weighted_sum(&values, &weight);
assert!((result - (0.5 + 0.6 + 0.6)).abs() < 1e-10);
}
#[test]
fn test_scalarize_tchebycheff() {
let values = [3.0, 2.0];
let weight = [0.5, 0.5];
let ideal = [1.0, 1.0];
let result = scalarize_tchebycheff(&values, &weight, &ideal);
assert!((result - 1.0).abs() < 1e-10);
}
#[test]
fn test_scalarize_pbi() {
let values = [2.0, 2.0];
let weight = [1.0, 1.0];
let ideal = [0.0, 0.0];
let result = scalarize_pbi(&values, &weight, &ideal, 5.0);
let expected_d1 = 2.0 * (2.0_f64).sqrt();
assert!((result - expected_d1).abs() < 1e-10);
}
#[test]
fn test_compute_neighborhoods() {
let weights = vec![vec![1.0, 0.0], vec![0.5, 0.5], vec![0.0, 1.0]];
let neighborhoods = compute_neighborhoods(&weights, 2);
assert_eq!(neighborhoods.len(), 3);
for n in &neighborhoods {
assert_eq!(n.len(), 2);
}
assert_eq!(neighborhoods[0][0], 0); assert_eq!(neighborhoods[0][1], 1); }
}