scirs2_optimize/bayesian/mod.rs
1//! Bayesian Optimization module for `scirs2-optimize`.
2//!
3//! Provides a comprehensive Bayesian optimization framework for black-box,
4//! expensive-to-evaluate objective functions. The approach uses a Gaussian
5//! Process surrogate to model the objective and acquisition functions to
6//! decide where to sample next.
7//!
8//! # Architecture
9//!
10//! ```text
11//! +-----------------+ +-------------------+ +------------------+
12//! | GP Surrogate |<--->| Acquisition Func |<--->| Bayesian Optim |
13//! | (gp.rs) | | (acquisition.rs) | | (optimizer.rs) |
14//! +-----------------+ +-------------------+ +------------------+
15//! ^ |
16//! | +-------------------+ |
17//! +-----------------| Sampling Design |<-----------+
18//! | (sampling.rs) |
19//! +-------------------+
20//! ```
21//!
22//! # Modules
23//!
24//! - [`gp`] -- Gaussian Process surrogate with multiple kernels
25//! - [`acquisition`] -- Acquisition functions (EI, PI, UCB, KG, Thompson, batch variants)
26//! - [`optimizer`] -- Main optimizer loop (sequential, batch, constrained, multi-objective)
27//! - [`sampling`] -- Initial design strategies (LHS, Sobol, Halton, random)
28//!
29//! # Quick Start
30//!
31//! ```rust
32//! use scirs2_optimize::bayesian::optimize;
33//! use scirs2_core::ndarray::ArrayView1;
34//!
35//! // Minimize a simple quadratic function
36//! let result = optimize(
37//! |x: &ArrayView1<f64>| x[0].powi(2) + x[1].powi(2),
38//! &[(-5.0, 5.0), (-5.0, 5.0)],
39//! 20,
40//! None,
41//! ).expect("optimization failed");
42//!
43//! println!("Best x: {:?}", result.x_best);
44//! println!("Best f: {:.6}", result.f_best);
45//! ```
46//!
47//! # Advanced Usage
48//!
49//! ## Custom Kernel & Acquisition
50//!
51//! ```rust
52//! use scirs2_optimize::bayesian::{
53//! BayesianOptimizer, BayesianOptimizerConfig,
54//! MaternKernel, MaternVariant,
55//! AcquisitionType, GpSurrogateConfig,
56//! };
57//! use scirs2_core::ndarray::ArrayView1;
58//!
59//! let config = BayesianOptimizerConfig {
60//! acquisition: AcquisitionType::UCB { kappa: 2.5 },
61//! n_initial: 8,
62//! seed: Some(42),
63//! gp_config: GpSurrogateConfig {
64//! noise_variance: 1e-4,
65//! optimize_hyperparams: false,
66//! ..Default::default()
67//! },
68//! ..Default::default()
69//! };
70//!
71//! let kernel = Box::new(MaternKernel::new(MaternVariant::FiveHalves, 1.0, 1.0));
72//! let mut opt = BayesianOptimizer::with_kernel(
73//! vec![(-5.0, 5.0), (-5.0, 5.0)],
74//! kernel,
75//! config,
76//! ).expect("create optimizer");
77//!
78//! let result = opt.optimize(
79//! |x: &ArrayView1<f64>| x[0].powi(2) + x[1].powi(2),
80//! 20,
81//! ).expect("optimization ok");
82//! ```
83
84pub mod acquisition;
85pub mod gp;
86pub mod optimizer;
87pub mod sampling;
88
89// ---- Re-exports for convenient access ----
90
91// GP surrogate
92pub use gp::{
93 GpSurrogate,
94 GpSurrogateConfig,
95 // Kernels
96 MaternKernel,
97 MaternVariant,
98 ProductKernel,
99 RationalQuadraticKernel,
100 RbfKernel,
101 SumKernel,
102 SurrogateKernel,
103};
104
105// Acquisition functions
106pub use acquisition::{
107 AcquisitionFn, AcquisitionType, BatchExpectedImprovement, BatchUpperConfidenceBound,
108 ExpectedImprovement, KnowledgeGradient, ProbabilityOfImprovement, ThompsonSampling,
109 UpperConfidenceBound,
110};
111
112// Optimizer
113pub use optimizer::{
114 optimize, BayesianOptResult, BayesianOptimizer, BayesianOptimizerConfig, Constraint,
115 Observation,
116};
117
118// Sampling
119pub use sampling::{generate_samples, SamplingConfig, SamplingStrategy};