scirs2_optimize/
lib.rs

1//! Optimization module for SciRS
2//!
3//! This module provides implementations of various optimization algorithms,
4//! modeled after SciPy's `optimize` module.
5//!
6
7#![allow(clippy::field_reassign_with_default)]
8//! ## Submodules
9//!
10//! * `unconstrained`: Unconstrained optimization algorithms
11//! * `constrained`: Constrained optimization algorithms
12//! * `least_squares`: Least squares minimization (including robust methods)
13//! * `roots`: Root finding algorithms
14//! * `scalar`: Scalar (univariate) optimization algorithms
15//! * `global`: Global optimization algorithms
16//!
17//! ## Optimization Methods
18//!
19//! The following optimization methods are currently implemented:
20//!
21//! ### Unconstrained:
22//! - **Nelder-Mead**: A derivative-free method using simplex-based approach
23//! - **Powell**: Derivative-free method using conjugate directions
24//! - **BFGS**: Quasi-Newton method with BFGS update
25//! - **CG**: Nonlinear conjugate gradient method
26//!
27//! ### Constrained:
28//! - **SLSQP**: Sequential Least SQuares Programming
29//! - **TrustConstr**: Trust-region constrained optimizer
30//!
31//! ### Scalar (Univariate) Optimization:
32//! - **Brent**: Combines parabolic interpolation with golden section search
33//! - **Bounded**: Brent's method with bounds constraints
34//! - **Golden**: Golden section search
35//!
36//! ### Global:
37//! - **Differential Evolution**: Stochastic global optimization method
38//! - **Basin-hopping**: Random perturbations with local minimization
39//! - **Dual Annealing**: Simulated annealing with fast annealing
40//! - **Particle Swarm**: Population-based optimization inspired by swarm behavior
41//! - **Simulated Annealing**: Probabilistic optimization with cooling schedule
42//!
43//! ### Least Squares:
44//! - **Levenberg-Marquardt**: Trust-region algorithm for nonlinear least squares
45//! - **Trust Region Reflective**: Bounds-constrained least squares
46//! - **Robust Least Squares**: M-estimators for outlier-resistant regression
47//!   - Huber loss: Reduces influence of moderate outliers
48//!   - Bisquare loss: Completely rejects extreme outliers
49//!   - Cauchy loss: Provides very strong outlier resistance
50//! - **Weighted Least Squares**: Handles heteroscedastic data (varying variance)
51//! - **Bounded Least Squares**: Box constraints on parameters
52//! - **Separable Least Squares**: Variable projection for partially linear models
53//! - **Total Least Squares**: Errors-in-variables regression
54//! ## Bounds Support
55//!
56//! The `unconstrained` module now supports bounds constraints for variables.
57//! You can specify lower and upper bounds for each variable, and the optimizer
58//! will ensure that all iterates remain within these bounds.
59//!
60//! The following methods support bounds constraints:
61//! - Powell
62//! - Nelder-Mead
63//! - BFGS
64//! - CG (Conjugate Gradient)
65//!
66//! ## Examples
67//!
68//! ### Basic Optimization
69//!
70//! ```
71//! // Example of minimizing a function using BFGS
72//! use ndarray::{array, ArrayView1};
73//! use scirs2_optimize::unconstrained::{minimize, Method};
74//!
75//! fn rosenbrock(x: &ArrayView1<f64>) -> f64 {
76//!     let a = 1.0;
77//!     let b = 100.0;
78//!     let x0 = x[0];
79//!     let x1 = x[1];
80//!     (a - x0).powi(2) + b * (x1 - x0.powi(2)).powi(2)
81//! }
82//!
83//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
84//! let initial_guess = [0.0, 0.0];
85//! let result = minimize(rosenbrock, &initial_guess, Method::BFGS, None)?;
86//!
87//! println!("Solution: {:?}", result.x);
88//! println!("Function value at solution: {}", result.fun);
89//! println!("Number of iterations: {}", result.iterations);
90//! println!("Success: {}", result.success);
91//! # Ok(())
92//! # }
93//! ```
94//!
95//! ### Optimization with Bounds
96//!
97//! ```
98//! // Example of minimizing a function with bounds constraints
99//! use ndarray::{array, ArrayView1};
100//! use scirs2_optimize::{Bounds, unconstrained::{minimize, Method, Options}};
101//!
102//! // A function with minimum at (-1, -1)
103//! fn func(x: &ArrayView1<f64>) -> f64 {
104//!     (x[0] + 1.0).powi(2) + (x[1] + 1.0).powi(2)
105//! }
106//!
107//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
108//! // Create bounds: x >= 0, y >= 0
109//! // This will constrain the optimization to the positive quadrant
110//! let bounds = Bounds::new(&[(Some(0.0), None), (Some(0.0), None)]);
111//!
112//! let initial_guess = [0.5, 0.5];
113//! let mut options = Options::default();
114//! options.bounds = Some(bounds);
115//!
116//! // Use Powell's method which supports bounds
117//! let result = minimize(func, &initial_guess, Method::Powell, Some(options))?;
118//!
119//! // The constrained minimum should be at [0, 0] with value 2.0
120//! println!("Solution: {:?}", result.x);
121//! println!("Function value at solution: {}", result.fun);
122//! # Ok(())
123//! # }
124//! ```
125//!
126//! ### Bounds Creation Options
127//!
128//! ```
129//! use scirs2_optimize::Bounds;
130//!
131//! // Create bounds from pairs
132//! // Format: [(min_x1, max_x1), (min_x2, max_x2), ...] where None = unbounded
133//! let bounds1 = Bounds::new(&[
134//!     (Some(0.0), Some(1.0)),  // 0 <= x[0] <= 1
135//!     (Some(-1.0), None),      // x[1] >= -1, no upper bound
136//!     (None, Some(10.0)),      // x[2] <= 10, no lower bound
137//!     (None, None)             // x[3] is completely unbounded
138//! ]);
139//!
140//! // Alternative: create from separate lower and upper bound vectors
141//! let lb = vec![Some(0.0), Some(-1.0), None, None];
142//! let ub = vec![Some(1.0), None, Some(10.0), None];
143//! let bounds2 = Bounds::from_vecs(lb, ub).unwrap();
144//! ```
145//!
146//! ### Robust Least Squares Example
147//!
148//! ```
149//! use ndarray::{array, Array1, Array2};
150//! use scirs2_optimize::least_squares::{robust_least_squares, HuberLoss};
151//!
152//! // Define residual function for linear regression
153//! fn residual(params: &[f64], data: &[f64]) -> Array1<f64> {
154//!     let n = data.len() / 2;
155//!     let x_vals = &data[0..n];
156//!     let y_vals = &data[n..];
157//!     
158//!     let mut res = Array1::zeros(n);
159//!     for i in 0..n {
160//!         res[i] = y_vals[i] - (params[0] + params[1] * x_vals[i]);
161//!     }
162//!     res
163//! }
164//!
165//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
166//! // Data with outliers
167//! let data = array![0., 1., 2., 3., 4., 0.1, 0.9, 2.1, 2.9, 10.0];
168//! let x0 = array![0.0, 0.0];
169//!
170//! // Use Huber loss for robustness
171//! let huber_loss = HuberLoss::new(1.0);
172//! let result = robust_least_squares(
173//!     residual,
174//!     &x0,
175//!     huber_loss,
176//!     None::<fn(&[f64], &[f64]) -> Array2<f64>>,
177//!     &data,
178//!     None
179//! )?;
180//!
181//! println!("Robust solution: intercept={:.3}, slope={:.3}",
182//!          result.x[0], result.x[1]);
183//! # Ok(())
184//! # }
185//! ```
186
187extern crate openblas_src;
188
189// Export error types
190pub mod error;
191pub use error::{OptimizeError, OptimizeResult};
192
193// Module structure
194pub mod constrained;
195pub mod global;
196pub mod least_squares;
197pub mod parallel;
198pub mod roots;
199pub mod roots_anderson;
200pub mod roots_krylov;
201pub mod scalar;
202pub mod sparse_numdiff;
203pub mod unconstrained;
204
205// Common optimization result structure
206pub mod result;
207pub use result::OptimizeResults;
208
209// Convenience re-exports for common functions
210pub use constrained::minimize_constrained;
211pub use global::{
212    basinhopping, bayesian_optimization, differential_evolution, dual_annealing, multi_start,
213    particle_swarm, simulated_annealing,
214};
215pub use least_squares::{
216    bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
217    total_least_squares, weighted_least_squares, BisquareLoss, CauchyLoss, HuberLoss,
218};
219pub use roots::root;
220pub use scalar::minimize_scalar;
221pub use sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
222pub use unconstrained::{minimize, Bounds};
223
224// Prelude module for convenient imports
225pub mod prelude {
226    pub use crate::constrained::{minimize_constrained, Method as ConstrainedMethod};
227    pub use crate::error::{OptimizeError, OptimizeResult};
228    pub use crate::global::{
229        basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
230        particle_swarm, simulated_annealing, AcquisitionFunctionType, BasinHoppingOptions,
231        BayesianOptimizationOptions, BayesianOptimizer, DifferentialEvolutionOptions,
232        DualAnnealingOptions, InitialPointGenerator, KernelType, Parameter, ParticleSwarmOptions,
233        SimulatedAnnealingOptions, Space,
234    };
235    pub use crate::least_squares::{
236        bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
237        total_least_squares, weighted_least_squares, BisquareLoss, BoundedOptions, CauchyLoss,
238        HuberLoss, LinearSolver, Method as LeastSquaresMethod, RobustLoss, RobustOptions,
239        SeparableOptions, SeparableResult, TLSMethod, TotalLeastSquaresOptions,
240        TotalLeastSquaresResult, WeightedOptions,
241    };
242    pub use crate::parallel::{
243        parallel_evaluate_batch, parallel_finite_diff_gradient, ParallelOptions,
244    };
245    pub use crate::result::OptimizeResults;
246    pub use crate::roots::{root, Method as RootMethod};
247    pub use crate::scalar::{
248        minimize_scalar, Method as ScalarMethod, Options as ScalarOptions, ScalarOptimizeResult,
249    };
250    pub use crate::sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
251    pub use crate::unconstrained::{minimize, Bounds, Method as UnconstrainedMethod, Options};
252}
253
254#[cfg(test)]
255mod tests {
256    #[test]
257    fn it_works() {
258        assert_eq!(2 + 2, 4);
259    }
260}