use super::constraints::{
CeilingConstraintData, CrossoverMonotonicityConstraintData, MinGainConstraintData,
constraint_ceiling, constraint_crossover_monotonicity, constraint_min_gain,
};
use super::optim::{ObjectiveData, PenaltyMode, compute_fitness_penalties};
use crate::LossType;
use nlopt::{Algorithm, Nlopt, Target};
pub fn optimize_filters_nlopt(
x: &mut [f64],
lower_bounds: &[f64],
upper_bounds: &[f64],
objective_data: ObjectiveData,
algo: Algorithm,
population: usize,
maxeval: usize,
) -> Result<(String, f64), (String, f64)> {
let num_params = x.len();
let use_penalties = matches!(
algo,
Algorithm::Bobyqa
| Algorithm::Crs2Lm
| Algorithm::Direct
| Algorithm::DirectL
| Algorithm::GMlsl
| Algorithm::GMlslLds
| Algorithm::Sbplx
| Algorithm::StoGo
| Algorithm::StoGoRand
| Algorithm::Neldermead
);
let ceiling_data = CeilingConstraintData {
freqs: objective_data.freqs.clone(),
srate: objective_data.srate,
max_db: objective_data.max_db,
peq_model: objective_data.peq_model,
};
let min_gain_data = MinGainConstraintData {
min_db: objective_data.min_db,
peq_model: objective_data.peq_model,
};
let crossover_monotonicity_data = if objective_data.loss_type == LossType::DriversFlat {
objective_data.drivers_data.as_ref().map(|drivers_data| {
CrossoverMonotonicityConstraintData {
n_drivers: drivers_data.drivers.len(),
min_log_separation: 0.15,
}
})
} else {
None
};
let mut objective_data = objective_data;
let penalty_mode = if use_penalties {
PenaltyMode::Standard
} else {
PenaltyMode::Disabled
};
objective_data.configure_penalties(penalty_mode);
let mut optimizer = Nlopt::new(
algo,
num_params,
compute_fitness_penalties,
Target::Minimize,
objective_data,
);
optimizer
.set_lower_bounds(lower_bounds)
.expect("lower bounds should have correct dimension");
optimizer
.set_upper_bounds(upper_bounds)
.expect("upper bounds should have correct dimension");
if !use_penalties {
if let Err(e) = optimizer.add_inequality_constraint(constraint_ceiling, ceiling_data, 1e-6)
{
log::warn!("Failed to add ceiling constraint: {:?}", e);
}
if let Err(e) =
optimizer.add_inequality_constraint(constraint_min_gain, min_gain_data, 1e-6)
{
log::warn!("Failed to add min gain constraint: {:?}", e);
}
if let Some(xover_data) = crossover_monotonicity_data
&& let Err(e) = optimizer.add_inequality_constraint(
constraint_crossover_monotonicity,
xover_data,
1e-6,
)
{
log::warn!("Failed to add crossover monotonicity constraint: {:?}", e);
}
}
if let Err(e) = optimizer.set_population(population) {
log::warn!("Failed to set population size {}: {:?}", population, e);
}
if let Err(e) = optimizer.set_maxeval(maxeval as u32) {
log::warn!("Failed to set max evaluations {}: {:?}", maxeval, e);
}
optimizer
.set_stopval(1e-4)
.expect("stopval should be valid");
optimizer
.set_ftol_rel(1e-6)
.expect("ftol_rel should be valid");
optimizer
.set_xtol_rel(1e-4)
.expect("xtol_rel should be valid");
let result = optimizer.optimize(x);
match result {
Ok((status, val)) => Ok((format!("{:?}", status), val)),
Err((e, val)) => Err((format!("{:?}", e), val)),
}
}