use crate::calibration::config::OptimizationConfig;
use crate::calibration::types::{MarketDataRow, ModelCalibrator};
use cmaes_lbfgsb::cmaes::{canonical_cmaes_optimize, CmaesCanonicalConfig};
use cmaes_lbfgsb::lbfgsb_optimize::lbfgsb_optimize;
pub struct CalibrationProcess {
model: Box<dyn ModelCalibrator>,
config: OptimizationConfig,
market_data: Vec<MarketDataRow>,
initial_guess: Option<Vec<f64>>,
}
impl CalibrationProcess {
pub fn new(
model: Box<dyn ModelCalibrator>,
config: OptimizationConfig,
market_data: Vec<MarketDataRow>,
) -> Self {
Self {
model,
config,
market_data,
initial_guess: None,
}
}
pub fn with_initial_guess(mut self, guess: Vec<f64>) -> Self {
self.initial_guess = Some(guess);
self
}
pub fn run(&self) -> (f64, Vec<f64>) {
let (best_obj, best_params) = calibrate_model(
&*self.model,
&self.market_data,
&self.config,
self.initial_guess.clone(),
);
(best_obj, best_params)
}
}
pub fn calibrate_model(
model: &dyn ModelCalibrator,
market_data: &[MarketDataRow],
config: &OptimizationConfig,
initial_guess: Option<Vec<f64>>,
) -> (f64, Vec<f64>) {
let bounds = model.param_bounds();
let obj_fn = |x: &[f64]| model.evaluate_objective(x, market_data);
let (best_obj, best_sol) = {
let relaxed_bounds = model.param_bounds();
let relaxed_obj_fn = |x: &[f64]| model.evaluate_objective(x, market_data);
let cmaes_config = CmaesCanonicalConfig {
population_size: config.pop_size,
max_generations: config.max_gen,
seed: config.cmaes.seed.unwrap_or(123456),
c1: None, c_mu: None,
c_sigma: None,
d_sigma: None,
parallel_eval: config.cmaes.parallel_eval,
verbosity: config.cmaes.verbosity,
ipop_restarts: config.cmaes.ipop_restarts,
ipop_increase_factor: config.cmaes.ipop_increase_factor,
bipop_restarts: config.cmaes.bipop_restarts,
total_evals_budget: config.cmaes.total_evals_budget,
use_subrun_budgeting: config.cmaes.use_subrun_budgeting,
alpha_mu: None,
hsig_threshold_factor: None,
bipop_small_population_factor: None,
bipop_small_budget_factor: None,
bipop_large_budget_factor: None,
bipop_large_pop_increase_factor: None,
max_bound_iterations: None,
eig_precision_threshold: None,
min_eig_value: None,
matrix_op_threshold: None,
stagnation_limit: None,
min_sigma: None,
};
if let Some(ref guess) = initial_guess {
let use_mini_cmaes = config.cmaes.mini_cmaes_on_refinement;
if use_mini_cmaes {
if config.cmaes.verbosity > 0 {
println!(
"Using provided initial guess => launching mini CMA-ES around it. \
Then local L-BFGS refinement."
);
}
let guess_obj = relaxed_obj_fn(guess);
if config.cmaes.verbosity > 0 {
println!(" Initial guess objective = {:.6}", guess_obj);
}
let cmaes_result = canonical_cmaes_optimize(
relaxed_obj_fn,
relaxed_bounds,
cmaes_config,
Some(guess.clone()),
);
let (_, relaxed_params) = cmaes_result.best_solution;
let standard_obj = obj_fn(&relaxed_params);
(standard_obj, relaxed_params)
} else {
if config.cmaes.verbosity > 0 {
println!(
"Using provided initial guess => skipping mini CMA-ES and proceeding directly to L-BFGS-B."
);
}
let guess_obj = obj_fn(guess);
if config.cmaes.verbosity > 0 {
println!(" Initial guess objective = {:.6}", guess_obj);
}
(guess_obj, guess.clone())
}
} else {
if config.cmaes.verbosity > 0 {
println!("No initial guess provided => running full CMA-ES with BIPOP restarts");
}
let cmaes_result =
canonical_cmaes_optimize(relaxed_obj_fn, relaxed_bounds, cmaes_config, None);
let (_, relaxed_params) = cmaes_result.best_solution;
let standard_obj = obj_fn(&relaxed_params);
(standard_obj, relaxed_params)
}
};
if config.cmaes.lbfgsb_enabled {
if config.cmaes.verbosity > 0 {
println!("Running L-BFGS-B refinement on best CMA-ES solution...");
}
let mut refined_solution = best_sol.clone();
let refine_res = lbfgsb_optimize(
&mut refined_solution,
bounds,
&obj_fn,
config.cmaes.lbfgsb_max_iterations,
config.tolerance,
if config.cmaes.verbosity >= 1 {
Some(|_current_x: &[f64], current_obj: f64| {
println!("L-BFGS-B iteration => objective = {:.6}", current_obj);
})
} else {
None
},
None, );
match refine_res {
Ok((loc_obj, loc_sol)) => {
if loc_obj < best_obj {
if config.cmaes.verbosity > 0 {
println!(
"L-BFGS-B improved objective: {:.6} -> {:.6}",
best_obj, loc_obj
);
}
(loc_obj, loc_sol)
} else {
if config.cmaes.verbosity > 0 {
println!("L-BFGS-B did not improve objective, keeping CMA-ES solution");
}
(best_obj, best_sol)
}
}
Err(e) => {
if config.cmaes.verbosity > 0 {
println!("L-BFGS-B failed: {:?}, keeping CMA-ES solution", e);
}
(best_obj, best_sol)
}
}
} else {
if config.cmaes.verbosity > 0 {
println!("L-BFGS-B refinement disabled, using CMA-ES solution directly");
}
(best_obj, best_sol)
}
}
pub fn calibrate_model_adaptive(
mut model: Box<dyn ModelCalibrator>,
market_data: &[MarketDataRow],
config: &OptimizationConfig,
initial_guess: Option<Vec<f64>>,
) -> (f64, Vec<f64>, Vec<(f64, f64)>) {
if !config.adaptive_bounds.enabled {
let (obj, params) = calibrate_model(&*model, market_data, config, initial_guess);
let bounds = model.param_bounds().to_vec();
return (obj, params, bounds);
}
let mut best_obj = f64::MAX;
let mut best_params = Vec::new();
for iter in 0..config.adaptive_bounds.max_iterations {
let (obj, params) = calibrate_model(&*model, market_data, config, initial_guess.clone());
if obj < best_obj {
best_obj = obj;
best_params = params.clone();
}
let adjusted = model.expand_bounds_if_needed(
¶ms,
config.adaptive_bounds.proximity_threshold,
config.adaptive_bounds.expansion_factor,
);
if config.cmaes.verbosity > 0 {
if adjusted {
println!(
"Adaptive iteration {}: Expanded bounds for next iteration",
iter + 1
);
} else {
println!(
"Adaptive iteration {}: No expansion needed, stopping early",
iter + 1
);
}
}
if !adjusted {
break;
}
}
let bounds = model.param_bounds().to_vec();
(best_obj, best_params, bounds)
}