use scirs2_core::ndarray::{Array1, Array2, ArrayView1};
use scirs2_core::random::rngs::StdRng;
use scirs2_core::random::{Rng, RngExt, SeedableRng};
use crate::error::{OptimizeError, OptimizeResult};
use super::acquisition::{AcquisitionFn, AcquisitionType, ExpectedImprovement};
use super::gp::{GpSurrogate, GpSurrogateConfig, RbfKernel, SurrogateKernel};
use super::sampling::{generate_samples, SamplingConfig, SamplingStrategy};
#[derive(Clone)]
pub struct BayesianOptimizerConfig {
pub acquisition: AcquisitionType,
pub initial_design: SamplingStrategy,
pub n_initial: usize,
pub acq_n_restarts: usize,
pub acq_n_candidates: usize,
pub gp_config: GpSurrogateConfig,
pub seed: Option<u64>,
pub verbose: usize,
}
impl Default for BayesianOptimizerConfig {
fn default() -> Self {
Self {
acquisition: AcquisitionType::EI { xi: 0.01 },
initial_design: SamplingStrategy::LatinHypercube,
n_initial: 10,
acq_n_restarts: 5,
acq_n_candidates: 200,
gp_config: GpSurrogateConfig::default(),
seed: None,
verbose: 0,
}
}
}
#[derive(Debug, Clone)]
pub struct Observation {
pub x: Array1<f64>,
pub y: f64,
pub constraints: Vec<f64>,
pub feasible: bool,
}
#[derive(Debug, Clone)]
pub struct BayesianOptResult {
pub x_best: Array1<f64>,
pub f_best: f64,
pub observations: Vec<Observation>,
pub n_evals: usize,
pub best_history: Vec<f64>,
pub success: bool,
pub message: String,
}
pub struct Constraint {
pub func: Box<dyn Fn(&ArrayView1<f64>) -> f64 + Send + Sync>,
pub name: String,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DimensionType {
Continuous,
Integer,
}
pub struct BayesianOptimizer {
bounds: Vec<(f64, f64)>,
config: BayesianOptimizerConfig,
surrogate: GpSurrogate,
observations: Vec<Observation>,
best_idx: Option<usize>,
constraints: Vec<Constraint>,
rng: StdRng,
dim_types: Option<Vec<DimensionType>>,
}
impl BayesianOptimizer {
pub fn new(bounds: Vec<(f64, f64)>, config: BayesianOptimizerConfig) -> OptimizeResult<Self> {
if bounds.is_empty() {
return Err(OptimizeError::InvalidInput(
"Bounds must have at least one dimension".to_string(),
));
}
for (i, &(lo, hi)) in bounds.iter().enumerate() {
if lo >= hi {
return Err(OptimizeError::InvalidInput(format!(
"Invalid bounds for dimension {}: [{}, {}]",
i, lo, hi
)));
}
}
let seed = config.seed.unwrap_or_else(|| {
let s: u64 = scirs2_core::random::rng().random();
s
});
let rng = StdRng::seed_from_u64(seed);
let kernel: Box<dyn SurrogateKernel> = Box::new(RbfKernel::default());
let surrogate = GpSurrogate::new(kernel, config.gp_config.clone());
Ok(Self {
bounds,
config,
surrogate,
observations: Vec::new(),
best_idx: None,
constraints: Vec::new(),
rng,
dim_types: None,
})
}
pub fn with_kernel(
bounds: Vec<(f64, f64)>,
kernel: Box<dyn SurrogateKernel>,
config: BayesianOptimizerConfig,
) -> OptimizeResult<Self> {
let mut opt = Self::new(bounds, config)?;
opt.surrogate = GpSurrogate::new(kernel, opt.config.gp_config.clone());
Ok(opt)
}
pub fn add_constraint<F>(&mut self, name: &str, func: F)
where
F: Fn(&ArrayView1<f64>) -> f64 + Send + Sync + 'static,
{
self.constraints.push(Constraint {
func: Box::new(func),
name: name.to_string(),
});
}
pub fn set_dimension_types(&mut self, types: Vec<DimensionType>) -> OptimizeResult<()> {
if types.len() != self.bounds.len() {
return Err(OptimizeError::InvalidInput(format!(
"dimension_types length ({}) must match bounds length ({})",
types.len(),
self.bounds.len()
)));
}
self.dim_types = Some(types);
Ok(())
}
fn enforce_dim_types(&self, x: &mut Array1<f64>) {
if let Some(ref types) = self.dim_types {
for (d, dt) in types.iter().enumerate() {
if *dt == DimensionType::Integer {
let (lo, hi) = self.bounds[d];
x[d] = x[d].round().clamp(lo, hi);
}
}
}
}
pub fn warm_start(&mut self, x_data: &Array2<f64>, y_data: &Array1<f64>) -> OptimizeResult<()> {
if x_data.nrows() != y_data.len() {
return Err(OptimizeError::InvalidInput(
"x_data and y_data row counts must match".to_string(),
));
}
for i in 0..x_data.nrows() {
let obs = Observation {
x: x_data.row(i).to_owned(),
y: y_data[i],
constraints: Vec::new(),
feasible: true,
};
match self.best_idx {
Some(best) if obs.y < self.observations[best].y => {
self.best_idx = Some(self.observations.len());
}
None => {
self.best_idx = Some(self.observations.len());
}
_ => {}
}
self.observations.push(obs);
}
if !self.observations.is_empty() {
self.fit_surrogate()?;
}
Ok(())
}
pub fn optimize<F>(&mut self, objective: F, n_iter: usize) -> OptimizeResult<BayesianOptResult>
where
F: Fn(&ArrayView1<f64>) -> f64,
{
let n_initial = if self.observations.is_empty() {
self.config.n_initial
} else {
self.config
.n_initial
.saturating_sub(self.observations.len())
};
if n_initial > 0 {
let sampling_config = SamplingConfig {
seed: Some(self.rng.random()),
..Default::default()
};
let initial_points = generate_samples(
n_initial,
&self.bounds,
self.config.initial_design,
Some(sampling_config),
)?;
for i in 0..initial_points.nrows() {
let mut x = initial_points.row(i).to_owned();
self.enforce_dim_types(&mut x);
let y = objective(&x.view());
self.record_observation(x, y);
}
self.fit_surrogate()?;
}
let mut best_history = Vec::with_capacity(n_iter);
if let Some(best_idx) = self.best_idx {
best_history.push(self.observations[best_idx].y);
}
for _iter in 0..n_iter {
let next_x = self.suggest_next()?;
let y = objective(&next_x.view());
self.record_observation(next_x, y);
self.fit_surrogate()?;
if let Some(best_idx) = self.best_idx {
best_history.push(self.observations[best_idx].y);
}
}
let best_idx = self.best_idx.ok_or_else(|| {
OptimizeError::ComputationError("No observations collected".to_string())
})?;
let best_obs = &self.observations[best_idx];
Ok(BayesianOptResult {
x_best: best_obs.x.clone(),
f_best: best_obs.y,
observations: self.observations.clone(),
n_evals: self.observations.len(),
best_history,
success: true,
message: format!(
"Optimization completed: {} evaluations, best f = {:.6e}",
self.observations.len(),
best_obs.y
),
})
}
pub fn optimize_batch<F>(
&mut self,
objective: F,
n_rounds: usize,
batch_size: usize,
) -> OptimizeResult<BayesianOptResult>
where
F: Fn(&ArrayView1<f64>) -> f64,
{
let batch_size = batch_size.max(1);
let n_initial = if self.observations.is_empty() {
self.config.n_initial
} else {
self.config
.n_initial
.saturating_sub(self.observations.len())
};
if n_initial > 0 {
let sampling_config = SamplingConfig {
seed: Some(self.rng.random()),
..Default::default()
};
let initial_points = generate_samples(
n_initial,
&self.bounds,
self.config.initial_design,
Some(sampling_config),
)?;
for i in 0..initial_points.nrows() {
let mut x = initial_points.row(i).to_owned();
self.enforce_dim_types(&mut x);
let y = objective(&x.view());
self.record_observation(x, y);
}
self.fit_surrogate()?;
}
let mut best_history = Vec::with_capacity(n_rounds);
if let Some(best_idx) = self.best_idx {
best_history.push(self.observations[best_idx].y);
}
for _round in 0..n_rounds {
let batch = self.suggest_batch(batch_size)?;
for x in &batch {
let y = objective(&x.view());
self.record_observation(x.clone(), y);
}
self.fit_surrogate()?;
if let Some(best_idx) = self.best_idx {
best_history.push(self.observations[best_idx].y);
}
}
let best_idx = self.best_idx.ok_or_else(|| {
OptimizeError::ComputationError("No observations collected".to_string())
})?;
let best_obs = &self.observations[best_idx];
Ok(BayesianOptResult {
x_best: best_obs.x.clone(),
f_best: best_obs.y,
observations: self.observations.clone(),
n_evals: self.observations.len(),
best_history,
success: true,
message: format!(
"Batch optimization completed: {} evaluations, best f = {:.6e}",
self.observations.len(),
best_obs.y
),
})
}
pub fn optimize_multi_objective<F>(
&mut self,
objectives: &[F],
n_iter: usize,
) -> OptimizeResult<BayesianOptResult>
where
F: Fn(&ArrayView1<f64>) -> f64,
{
if objectives.is_empty() {
return Err(OptimizeError::InvalidInput(
"At least one objective is required".to_string(),
));
}
if objectives.len() == 1 {
return self.optimize(&objectives[0], n_iter);
}
let n_obj = objectives.len();
let n_initial = if self.observations.is_empty() {
self.config.n_initial
} else {
self.config
.n_initial
.saturating_sub(self.observations.len())
};
let mut all_obj_values: Vec<Vec<f64>> = vec![Vec::new(); n_obj];
if n_initial > 0 {
let sampling_config = SamplingConfig {
seed: Some(self.rng.random()),
..Default::default()
};
let initial_points = generate_samples(
n_initial,
&self.bounds,
self.config.initial_design,
Some(sampling_config),
)?;
for i in 0..initial_points.nrows() {
let mut x = initial_points.row(i).to_owned();
self.enforce_dim_types(&mut x);
let obj_vals: Vec<f64> = objectives.iter().map(|f| f(&x.view())).collect();
let scalarized = parego_scalarize(&obj_vals, &vec![1.0 / n_obj as f64; n_obj]);
self.record_observation(x, scalarized);
for (k, &v) in obj_vals.iter().enumerate() {
all_obj_values[k].push(v);
}
}
self.fit_surrogate()?;
}
let mut best_history = Vec::new();
if let Some(best_idx) = self.best_idx {
best_history.push(self.observations[best_idx].y);
}
for _iter in 0..n_iter {
let weights = random_simplex_point(n_obj, &mut self.rng);
let next_x = self.suggest_next()?;
let obj_vals: Vec<f64> = objectives.iter().map(|f| f(&next_x.view())).collect();
for (k, &v) in obj_vals.iter().enumerate() {
all_obj_values[k].push(v);
}
let normalized: Vec<f64> = (0..n_obj)
.map(|k| {
let vals = &all_obj_values[k];
let min_v = vals.iter().copied().fold(f64::INFINITY, f64::min);
let max_v = vals.iter().copied().fold(f64::NEG_INFINITY, f64::max);
let range = (max_v - min_v).max(1e-12);
(obj_vals[k] - min_v) / range
})
.collect();
let scalarized = parego_scalarize(&normalized, &weights);
self.record_observation(next_x, scalarized);
self.fit_surrogate()?;
if let Some(best_idx) = self.best_idx {
best_history.push(self.observations[best_idx].y);
}
}
let best_idx = self.best_idx.ok_or_else(|| {
OptimizeError::ComputationError("No observations collected".to_string())
})?;
let best_obs = &self.observations[best_idx];
Ok(BayesianOptResult {
x_best: best_obs.x.clone(),
f_best: best_obs.y,
observations: self.observations.clone(),
n_evals: self.observations.len(),
best_history,
success: true,
message: format!(
"ParEGO multi-objective optimization completed: {} evaluations",
self.observations.len()
),
})
}
pub fn ask(&mut self) -> OptimizeResult<Array1<f64>> {
if self.observations.is_empty() || self.observations.len() < self.config.n_initial {
let sampling_config = SamplingConfig {
seed: Some(self.rng.random()),
..Default::default()
};
let points = generate_samples(
1,
&self.bounds,
self.config.initial_design,
Some(sampling_config),
)?;
Ok(points.row(0).to_owned())
} else {
self.suggest_next()
}
}
pub fn tell(&mut self, x: Array1<f64>, y: f64) -> OptimizeResult<()> {
self.record_observation(x, y);
if self.observations.len() >= 2 {
self.fit_surrogate()?;
}
Ok(())
}
pub fn best(&self) -> Option<&Observation> {
self.best_idx.map(|i| &self.observations[i])
}
pub fn observations(&self) -> &[Observation] {
&self.observations
}
pub fn n_observations(&self) -> usize {
self.observations.len()
}
pub fn surrogate(&self) -> &GpSurrogate {
&self.surrogate
}
fn record_observation(&mut self, x: Array1<f64>, y: f64) {
let feasible = self.evaluate_constraints(&x);
let obs = Observation {
x,
y,
constraints: Vec::new(), feasible,
};
let idx = self.observations.len();
match self.best_idx {
Some(best) => {
let cur_best = &self.observations[best];
let new_is_better = if obs.feasible && !cur_best.feasible {
true
} else if obs.feasible == cur_best.feasible {
obs.y < cur_best.y
} else {
false
};
if new_is_better {
self.best_idx = Some(idx);
}
}
None => {
self.best_idx = Some(idx);
}
}
self.observations.push(obs);
}
fn evaluate_constraints(&self, x: &Array1<f64>) -> bool {
self.constraints.iter().all(|c| (c.func)(&x.view()) <= 0.0)
}
fn fit_surrogate(&mut self) -> OptimizeResult<()> {
let n = self.observations.len();
if n == 0 {
return Ok(());
}
let n_dims = self.observations[0].x.len();
let mut x_data = Array2::zeros((n, n_dims));
let mut y_data = Array1::zeros(n);
for (i, obs) in self.observations.iter().enumerate() {
for j in 0..n_dims {
x_data[[i, j]] = obs.x[j];
}
y_data[i] = obs.y;
}
self.surrogate.fit(&x_data, &y_data)
}
fn suggest_next(&mut self) -> OptimizeResult<Array1<f64>> {
let f_best = self.best_idx.map(|i| self.observations[i].y).unwrap_or(0.0);
let n = self.observations.len();
let n_dims = self.bounds.len();
let ref_points = if n > 0 {
let mut pts = Array2::zeros((n, n_dims));
for (i, obs) in self.observations.iter().enumerate() {
for j in 0..n_dims {
pts[[i, j]] = obs.x[j];
}
}
Some(pts)
} else {
None
};
let acq = self.config.acquisition.build(f_best, ref_points.as_ref());
self.optimize_acquisition(acq.as_ref())
}
fn suggest_batch(&mut self, batch_size: usize) -> OptimizeResult<Vec<Array1<f64>>> {
let mut batch = Vec::with_capacity(batch_size);
for _ in 0..batch_size {
let next = self.suggest_next()?;
let (mu, _sigma) = self.surrogate.predict_single(&next.view())?;
self.record_observation(next.clone(), mu);
self.fit_surrogate()?;
batch.push(next);
}
let n_real = self.observations.len() - batch_size;
self.observations.truncate(n_real);
if !self.observations.is_empty() {
self.best_idx = None;
for (i, obs) in self.observations.iter().enumerate() {
match self.best_idx {
Some(best) if obs.y < self.observations[best].y => {
self.best_idx = Some(i);
}
None => {
self.best_idx = Some(i);
}
_ => {}
}
}
self.fit_surrogate()?;
}
Ok(batch)
}
fn optimize_acquisition(&mut self, acq: &dyn AcquisitionFn) -> OptimizeResult<Array1<f64>> {
let n_dims = self.bounds.len();
let n_candidates = self.config.acq_n_candidates;
let n_restarts = self.config.acq_n_restarts;
let sampling_config = SamplingConfig {
seed: Some(self.rng.random()),
..Default::default()
};
let candidates = generate_samples(
n_candidates,
&self.bounds,
SamplingStrategy::Random,
Some(sampling_config),
)?;
let mut best_x = candidates.row(0).to_owned();
let mut best_val = f64::NEG_INFINITY;
for i in 0..candidates.nrows() {
match acq.evaluate(&candidates.row(i), &self.surrogate) {
Ok(val) if val > best_val => {
best_val = val;
best_x = candidates.row(i).to_owned();
}
_ => {}
}
}
if let Some(best_idx) = self.best_idx {
let obs_x = &self.observations[best_idx].x;
if let Ok(val) = acq.evaluate(&obs_x.view(), &self.surrogate) {
if val > best_val {
best_val = val;
best_x = obs_x.clone();
}
}
}
let mut scored: Vec<(f64, usize)> = Vec::new();
for i in 0..candidates.nrows() {
if let Ok(val) = acq.evaluate(&candidates.row(i), &self.surrogate) {
scored.push((val, i));
}
}
scored.sort_by(|a, b| b.0.partial_cmp(&a.0).unwrap_or(std::cmp::Ordering::Equal));
let n_refine = n_restarts.min(scored.len());
for k in 0..n_refine {
let mut x_current = candidates.row(scored[k].1).to_owned();
let mut f_current = scored[k].0;
for _round in 0..3 {
for d in 0..n_dims {
let (lo, hi) = self.bounds[d];
let (refined_x, refined_f) =
golden_section_1d(acq, &self.surrogate, &x_current, d, lo, hi, 20)?;
if refined_f > f_current {
x_current[d] = refined_x;
f_current = refined_f;
}
}
}
if f_current > best_val {
best_val = f_current;
best_x = x_current;
}
}
for (d, &(lo, hi)) in self.bounds.iter().enumerate() {
best_x[d] = best_x[d].clamp(lo, hi);
}
self.enforce_dim_types(&mut best_x);
Ok(best_x)
}
}
fn golden_section_1d(
acq: &dyn AcquisitionFn,
surrogate: &GpSurrogate,
x_base: &Array1<f64>,
dim: usize,
lo: f64,
hi: f64,
max_iters: usize,
) -> OptimizeResult<(f64, f64)> {
let gr = (5.0_f64.sqrt() - 1.0) / 2.0; let mut a = lo;
let mut b = hi;
let eval_at = |t: f64| -> OptimizeResult<f64> {
let mut x = x_base.clone();
x[dim] = t;
acq.evaluate(&x.view(), surrogate)
};
let mut c = b - gr * (b - a);
let mut d = a + gr * (b - a);
let mut fc = eval_at(c)?;
let mut fd = eval_at(d)?;
for _ in 0..max_iters {
if (b - a).abs() < 1e-8 {
break;
}
if fc < fd {
a = c;
c = d;
fc = fd;
d = a + gr * (b - a);
fd = eval_at(d)?;
} else {
b = d;
d = c;
fd = fc;
c = b - gr * (b - a);
fc = eval_at(c)?;
}
}
let mid = (a + b) / 2.0;
let f_mid = eval_at(mid)?;
Ok((mid, f_mid))
}
fn parego_scalarize(obj_values: &[f64], weights: &[f64]) -> f64 {
let rho = 0.05;
let mut max_wf = f64::NEG_INFINITY;
let mut sum_wf = 0.0;
for (k, (&fk, &wk)) in obj_values.iter().zip(weights.iter()).enumerate() {
let wf = wk * fk;
if wf > max_wf {
max_wf = wf;
}
sum_wf += wf;
}
max_wf + rho * sum_wf
}
fn random_simplex_point(n: usize, rng: &mut StdRng) -> Vec<f64> {
if n == 0 {
return Vec::new();
}
if n == 1 {
return vec![1.0];
}
let mut values: Vec<f64> = (0..n)
.map(|_| {
let u: f64 = rng.random_range(1e-10..1.0);
-u.ln()
})
.collect();
let sum: f64 = values.iter().sum();
if sum > 0.0 {
for v in &mut values {
*v /= sum;
}
} else {
let w = 1.0 / n as f64;
values.fill(w);
}
values
}
pub fn optimize<F>(
objective: F,
bounds: &[(f64, f64)],
n_iter: usize,
config: Option<BayesianOptimizerConfig>,
) -> OptimizeResult<BayesianOptResult>
where
F: Fn(&ArrayView1<f64>) -> f64,
{
let config = config.unwrap_or_default();
let mut optimizer = BayesianOptimizer::new(bounds.to_vec(), config)?;
optimizer.optimize(objective, n_iter)
}
#[cfg(test)]
mod tests {
use super::*;
use scirs2_core::ndarray::array;
fn sphere(x: &ArrayView1<f64>) -> f64 {
x.iter().map(|&v| v * v).sum()
}
fn rosenbrock_2d(x: &ArrayView1<f64>) -> f64 {
(1.0 - x[0]).powi(2) + 100.0 * (x[1] - x[0].powi(2)).powi(2)
}
#[test]
fn test_optimize_sphere_2d() {
let config = BayesianOptimizerConfig {
n_initial: 8,
seed: Some(42),
gp_config: GpSurrogateConfig {
optimize_hyperparams: false,
noise_variance: 1e-4,
..Default::default()
},
..Default::default()
};
let result = optimize(sphere, &[(-5.0, 5.0), (-5.0, 5.0)], 25, Some(config))
.expect("optimization should succeed");
assert!(result.success);
assert!(result.f_best < 2.0, "f_best = {:.4}", result.f_best);
}
#[test]
fn test_optimizer_ask_tell() {
let config = BayesianOptimizerConfig {
n_initial: 5,
seed: Some(42),
gp_config: GpSurrogateConfig {
optimize_hyperparams: false,
noise_variance: 1e-4,
..Default::default()
},
..Default::default()
};
let mut opt =
BayesianOptimizer::new(vec![(-5.0, 5.0), (-5.0, 5.0)], config).expect("create ok");
for _ in 0..15 {
let x = opt.ask().expect("ask ok");
let y = sphere(&x.view());
opt.tell(x, y).expect("tell ok");
}
let best = opt.best().expect("should have a best");
assert!(best.y < 5.0, "best y = {:.4}", best.y);
}
#[test]
fn test_warm_start() {
let config = BayesianOptimizerConfig {
n_initial: 3,
seed: Some(42),
gp_config: GpSurrogateConfig {
optimize_hyperparams: false,
noise_variance: 1e-4,
..Default::default()
},
..Default::default()
};
let mut opt =
BayesianOptimizer::new(vec![(-5.0, 5.0), (-5.0, 5.0)], config).expect("create ok");
let x_prev =
Array2::from_shape_vec((3, 2), vec![0.1, 0.2, -0.3, 0.1, 0.5, -0.5]).expect("shape ok");
let y_prev = array![0.05, 0.1, 0.5];
opt.warm_start(&x_prev, &y_prev).expect("warm start ok");
assert_eq!(opt.n_observations(), 3);
let result = opt.optimize(sphere, 10).expect("optimize ok");
assert!(result.f_best < 0.5);
}
#[test]
fn test_batch_optimization() {
let config = BayesianOptimizerConfig {
n_initial: 5,
seed: Some(42),
gp_config: GpSurrogateConfig {
optimize_hyperparams: false,
noise_variance: 1e-4,
..Default::default()
},
..Default::default()
};
let mut opt =
BayesianOptimizer::new(vec![(-5.0, 5.0), (-5.0, 5.0)], config).expect("create ok");
let result = opt
.optimize_batch(sphere, 5, 3)
.expect("batch optimization ok");
assert!(result.success);
assert_eq!(result.n_evals, 20);
}
#[test]
fn test_constrained_optimization() {
let config = BayesianOptimizerConfig {
n_initial: 8,
seed: Some(42),
gp_config: GpSurrogateConfig {
optimize_hyperparams: false,
noise_variance: 1e-4,
..Default::default()
},
..Default::default()
};
let mut opt =
BayesianOptimizer::new(vec![(-5.0, 5.0), (-5.0, 5.0)], config).expect("create ok");
opt.add_constraint("x0_ge_1", |x: &ArrayView1<f64>| 1.0 - x[0]);
let result = opt.optimize(sphere, 20).expect("optimize ok");
assert!(result.success);
assert!(result.x_best[0] >= 0.5, "x[0] should be near >= 1");
}
#[test]
fn test_multi_objective_parego() {
let config = BayesianOptimizerConfig {
n_initial: 8,
seed: Some(42),
gp_config: GpSurrogateConfig {
optimize_hyperparams: false,
noise_variance: 1e-4,
..Default::default()
},
..Default::default()
};
let mut opt =
BayesianOptimizer::new(vec![(-5.0, 5.0), (-5.0, 5.0)], config).expect("create ok");
let f1 = |x: &ArrayView1<f64>| (x[0] - 1.0).powi(2) + x[1].powi(2);
let f2 = |x: &ArrayView1<f64>| (x[0] + 1.0).powi(2) + x[1].powi(2);
let objectives: Vec<Box<dyn Fn(&ArrayView1<f64>) -> f64>> =
vec![Box::new(f1), Box::new(f2)];
let obj_refs: Vec<&dyn Fn(&ArrayView1<f64>) -> f64> = objectives
.iter()
.map(|f| f.as_ref() as &dyn Fn(&ArrayView1<f64>) -> f64)
.collect();
let result = opt
.optimize_multi_objective(&obj_refs[..], 15)
.expect("multi-objective ok");
assert!(result.success);
assert!(result.x_best[0].abs() <= 5.0);
}
#[test]
fn test_different_acquisition_functions() {
let bounds = vec![(-3.0, 3.0)];
for acq in &[
AcquisitionType::EI { xi: 0.01 },
AcquisitionType::PI { xi: 0.01 },
AcquisitionType::UCB { kappa: 2.0 },
AcquisitionType::Thompson { seed: 42 },
] {
let config = BayesianOptimizerConfig {
acquisition: acq.clone(),
n_initial: 5,
seed: Some(42),
gp_config: GpSurrogateConfig {
optimize_hyperparams: false,
noise_variance: 1e-4,
..Default::default()
},
..Default::default()
};
let result = optimize(
|x: &ArrayView1<f64>| x[0].powi(2),
&bounds,
10,
Some(config),
)
.expect("optimize ok");
assert!(
result.f_best < 3.0,
"Acquisition {:?} failed: f_best = {}",
acq,
result.f_best
);
}
}
#[test]
fn test_invalid_bounds_rejected() {
let result = BayesianOptimizer::new(
vec![(5.0, 1.0)], BayesianOptimizerConfig::default(),
);
assert!(result.is_err());
}
#[test]
fn test_empty_bounds_rejected() {
let result = BayesianOptimizer::new(vec![], BayesianOptimizerConfig::default());
assert!(result.is_err());
}
#[test]
fn test_best_history_monotonic() {
let config = BayesianOptimizerConfig {
n_initial: 5,
seed: Some(42),
gp_config: GpSurrogateConfig {
optimize_hyperparams: false,
noise_variance: 1e-4,
..Default::default()
},
..Default::default()
};
let result =
optimize(sphere, &[(-5.0, 5.0), (-5.0, 5.0)], 10, Some(config)).expect("optimize ok");
for i in 1..result.best_history.len() {
assert!(
result.best_history[i] <= result.best_history[i - 1] + 1e-12,
"Best history not monotonic at index {}: {} > {}",
i,
result.best_history[i],
result.best_history[i - 1]
);
}
}
#[test]
fn test_parego_scalarize() {
let obj = [0.3, 0.7];
let w = [0.5, 0.5];
let s = parego_scalarize(&obj, &w);
assert!((s - 0.375).abs() < 1e-10);
}
#[test]
fn test_random_simplex_point_sums_to_one() {
let mut rng = StdRng::seed_from_u64(42);
for n in 1..6 {
let pt = random_simplex_point(n, &mut rng);
assert_eq!(pt.len(), n);
let sum: f64 = pt.iter().sum();
assert!((sum - 1.0).abs() < 1e-10, "Simplex sum = {}", sum);
for &v in &pt {
assert!(v >= 0.0, "Simplex component negative: {}", v);
}
}
}
#[test]
fn test_optimize_1d() {
let config = BayesianOptimizerConfig {
n_initial: 5,
seed: Some(42),
gp_config: GpSurrogateConfig {
optimize_hyperparams: false,
noise_variance: 1e-4,
..Default::default()
},
..Default::default()
};
let result = optimize(
|x: &ArrayView1<f64>| (x[0] - 2.0).powi(2),
&[(-5.0, 5.0)],
15,
Some(config),
)
.expect("optimize ok");
assert!(
(result.x_best[0] - 2.0).abs() < 1.5,
"x_best = {:.4}, expected ~2.0",
result.x_best[0]
);
assert!(result.f_best < 2.0);
}
#[test]
fn test_integer_dimension_enforcement() {
let bounds = vec![(0.0, 3.0)];
let config = BayesianOptimizerConfig {
n_initial: 4,
seed: Some(42),
acq_n_candidates: 50,
..Default::default()
};
let mut opt = BayesianOptimizer::new(bounds, config).expect("Failed to create optimizer");
opt.set_dimension_types(vec![DimensionType::Integer])
.expect("Failed to set dim types");
let result = opt
.optimize(
|x| {
let v = x[0];
(v - 2.0).powi(2)
},
6,
)
.expect("Optimization failed");
for obs in &result.observations {
let v = obs.x[0];
assert!(v >= 0.0 && v <= 3.0, "Out of bounds: {}", v);
assert!((v - v.round()).abs() < 1e-12, "Not integer: {}", v);
}
assert!(
(result.x_best[0] - 2.0).abs() < 1e-12,
"Best x should be 2, got {}",
result.x_best[0]
);
}
#[test]
fn test_set_dimension_types_length_mismatch() {
let bounds = vec![(0.0, 5.0), (0.0, 5.0)];
let config = BayesianOptimizerConfig::default();
let mut opt = BayesianOptimizer::new(bounds, config).expect("Failed to create optimizer");
let result = opt.set_dimension_types(vec![DimensionType::Integer]);
assert!(result.is_err());
}
}