scirs2_optimize/lib.rs
1//! Optimization module for SciRS
2//!
3//! This module provides implementations of various optimization algorithms,
4//! modeled after SciPy's `optimize` module.
5//!
6
7#![allow(clippy::field_reassign_with_default)]
8// Allow common mathematical conventions in optimization code
9#![allow(clippy::many_single_char_names)] // x, f, g, h, n, m etc. are standard in optimization
10#![allow(clippy::similar_names)] // x_pp, x_pm, x_mp, x_mm are standard for finite differences
11//! ## Submodules
12//!
13//! * `unconstrained`: Unconstrained optimization algorithms
14//! * `constrained`: Constrained optimization algorithms
15//! * `least_squares`: Least squares minimization (including robust methods)
16//! * `roots`: Root finding algorithms
17//! * `scalar`: Scalar (univariate) optimization algorithms
18//! * `global`: Global optimization algorithms
19//!
20//! ## Optimization Methods
21//!
22//! The following optimization methods are currently implemented:
23//!
24//! ### Unconstrained:
25//! - **Nelder-Mead**: A derivative-free method using simplex-based approach
26//! - **Powell**: Derivative-free method using conjugate directions
27//! - **BFGS**: Quasi-Newton method with BFGS update
28//! - **CG**: Nonlinear conjugate gradient method
29//!
30//! ### Constrained:
31//! - **SLSQP**: Sequential Least SQuares Programming
32//! - **TrustConstr**: Trust-region constrained optimizer
33//!
34//! ### Scalar (Univariate) Optimization:
35//! - **Brent**: Combines parabolic interpolation with golden section search
36//! - **Bounded**: Brent's method with bounds constraints
37//! - **Golden**: Golden section search
38//!
39//! ### Global:
40//! - **Differential Evolution**: Stochastic global optimization method
41//! - **Basin-hopping**: Random perturbations with local minimization
42//! - **Dual Annealing**: Simulated annealing with fast annealing
43//! - **Particle Swarm**: Population-based optimization inspired by swarm behavior
44//! - **Simulated Annealing**: Probabilistic optimization with cooling schedule
45//!
46//! ### Least Squares:
47//! - **Levenberg-Marquardt**: Trust-region algorithm for nonlinear least squares
48//! - **Trust Region Reflective**: Bounds-constrained least squares
49//! - **Robust Least Squares**: M-estimators for outlier-resistant regression
50//! - Huber loss: Reduces influence of moderate outliers
51//! - Bisquare loss: Completely rejects extreme outliers
52//! - Cauchy loss: Provides very strong outlier resistance
53//! - **Weighted Least Squares**: Handles heteroscedastic data (varying variance)
54//! - **Bounded Least Squares**: Box constraints on parameters
55//! - **Separable Least Squares**: Variable projection for partially linear models
56//! - **Total Least Squares**: Errors-in-variables regression
57//! ## Bounds Support
58//!
59//! The `unconstrained` module now supports bounds constraints for variables.
60//! You can specify lower and upper bounds for each variable, and the optimizer
61//! will ensure that all iterates remain within these bounds.
62//!
63//! The following methods support bounds constraints:
64//! - Powell
65//! - Nelder-Mead
66//! - BFGS
67//! - CG (Conjugate Gradient)
68//!
69//! ## Examples
70//!
71//! ### Basic Optimization
72//!
73//! ```
74//! // Example of minimizing a function using BFGS
75//! use ndarray::{array, ArrayView1};
76//! use scirs2_optimize::unconstrained::{minimize, Method};
77//!
78//! fn rosenbrock(x: &ArrayView1<f64>) -> f64 {
79//! let a = 1.0;
80//! let b = 100.0;
81//! let x0 = x[0];
82//! let x1 = x[1];
83//! (a - x0).powi(2) + b * (x1 - x0.powi(2)).powi(2)
84//! }
85//!
86//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
87//! let initial_guess = [0.0, 0.0];
88//! let result = minimize(rosenbrock, &initial_guess, Method::BFGS, None)?;
89//!
90//! println!("Solution: {:?}", result.x);
91//! println!("Function value at solution: {}", result.fun);
92//! println!("Number of iterations: {}", result.iterations);
93//! println!("Success: {}", result.success);
94//! # Ok(())
95//! # }
96//! ```
97//!
98//! ### Optimization with Bounds
99//!
100//! ```
101//! // Example of minimizing a function with bounds constraints
102//! use ndarray::{array, ArrayView1};
103//! use scirs2_optimize::{Bounds, unconstrained::{minimize, Method, Options}};
104//!
105//! // A function with minimum at (-1, -1)
106//! fn func(x: &ArrayView1<f64>) -> f64 {
107//! (x[0] + 1.0).powi(2) + (x[1] + 1.0).powi(2)
108//! }
109//!
110//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
111//! // Create bounds: x >= 0, y >= 0
112//! // This will constrain the optimization to the positive quadrant
113//! let bounds = Bounds::new(&[(Some(0.0), None), (Some(0.0), None)]);
114//!
115//! let initial_guess = [0.5, 0.5];
116//! let mut options = Options::default();
117//! options.bounds = Some(bounds);
118//!
119//! // Use Powell's method which supports bounds
120//! let result = minimize(func, &initial_guess, Method::Powell, Some(options))?;
121//!
122//! // The constrained minimum should be at [0, 0] with value 2.0
123//! println!("Solution: {:?}", result.x);
124//! println!("Function value at solution: {}", result.fun);
125//! # Ok(())
126//! # }
127//! ```
128//!
129//! ### Bounds Creation Options
130//!
131//! ```
132//! use scirs2_optimize::Bounds;
133//!
134//! // Create bounds from pairs
135//! // Format: [(min_x1, max_x1), (min_x2, max_x2), ...] where None = unbounded
136//! let bounds1 = Bounds::new(&[
137//! (Some(0.0), Some(1.0)), // 0 <= x[0] <= 1
138//! (Some(-1.0), None), // x[1] >= -1, no upper bound
139//! (None, Some(10.0)), // x[2] <= 10, no lower bound
140//! (None, None) // x[3] is completely unbounded
141//! ]);
142//!
143//! // Alternative: create from separate lower and upper bound vectors
144//! let lb = vec![Some(0.0), Some(-1.0), None, None];
145//! let ub = vec![Some(1.0), None, Some(10.0), None];
146//! let bounds2 = Bounds::from_vecs(lb, ub).unwrap();
147//! ```
148//!
149//! ### Robust Least Squares Example
150//!
151//! ```
152//! use ndarray::{array, Array1, Array2};
153//! use scirs2_optimize::least_squares::{robust_least_squares, HuberLoss};
154//!
155//! // Define residual function for linear regression
156//! fn residual(params: &[f64], data: &[f64]) -> Array1<f64> {
157//! let n = data.len() / 2;
158//! let x_vals = &data[0..n];
159//! let y_vals = &data[n..];
160//!
161//! let mut res = Array1::zeros(n);
162//! for i in 0..n {
163//! res[i] = y_vals[i] - (params[0] + params[1] * x_vals[i]);
164//! }
165//! res
166//! }
167//!
168//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
169//! // Data with outliers
170//! let data = array![0., 1., 2., 3., 4., 0.1, 0.9, 2.1, 2.9, 10.0];
171//! let x0 = array![0.0, 0.0];
172//!
173//! // Use Huber loss for robustness
174//! let huber_loss = HuberLoss::new(1.0);
175//! let result = robust_least_squares(
176//! residual,
177//! &x0,
178//! huber_loss,
179//! None::<fn(&[f64], &[f64]) -> Array2<f64>>,
180//! &data,
181//! None
182//! )?;
183//!
184//! println!("Robust solution: intercept={:.3}, slope={:.3}",
185//! result.x[0], result.x[1]);
186//! # Ok(())
187//! # }
188//! ```
189
190// BLAS backend linking handled through scirs2-core
191
192// Export error types
193pub mod error;
194pub use error::{OptimizeError, OptimizeResult};
195
196// Module structure
197#[cfg(feature = "async")]
198pub mod async_parallel;
199pub mod automatic_differentiation;
200pub mod constrained;
201pub mod global;
202pub mod jit_optimization;
203pub mod least_squares;
204pub mod ml_optimizers;
205pub mod multi_objective;
206pub mod neural_integration;
207pub mod parallel;
208pub mod roots;
209pub mod roots_anderson;
210pub mod roots_krylov;
211pub mod scalar;
212pub mod simd_ops;
213pub mod sparse_numdiff; // Refactored into a module with submodules
214pub mod stochastic;
215pub mod unconstrained;
216
217// Common optimization result structure
218pub mod result;
219pub use result::OptimizeResults;
220
221// Convenience re-exports for common functions
222#[cfg(feature = "async")]
223pub use async_parallel::{
224 AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
225 SlowEvaluationStrategy,
226};
227pub use automatic_differentiation::{
228 autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
229 AutoDiffFunction, AutoDiffOptions,
230};
231pub use constrained::minimize_constrained;
232pub use global::{
233 basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
234 generate_diverse_start_points, multi_start, multi_start_with_clustering, particle_swarm,
235 simulated_annealing,
236};
237pub use jit_optimization::{optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats};
238pub use least_squares::{
239 bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
240 total_least_squares, weighted_least_squares, BisquareLoss, CauchyLoss, HuberLoss,
241};
242pub use ml_optimizers::{
243 ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
244 GroupLassoOptimizer, LassoOptimizer,
245};
246pub use multi_objective::{
247 scalarization, MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII,
248 NSGAIII,
249};
250pub use neural_integration::{optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer};
251pub use roots::root;
252pub use scalar::minimize_scalar;
253pub use sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
254pub use stochastic::{
255 minimize_adam, minimize_adamw, minimize_rmsprop, minimize_sgd, minimize_sgd_momentum,
256 minimize_stochastic, AdamOptions, AdamWOptions, DataProvider, InMemoryDataProvider,
257 LearningRateSchedule, MomentumOptions, RMSPropOptions, SGDOptions, StochasticGradientFunction,
258 StochasticMethod, StochasticOptions,
259};
260pub use unconstrained::{minimize, Bounds};
261
262// Prelude module for convenient imports
263pub mod prelude {
264 #[cfg(feature = "async")]
265 pub use crate::async_parallel::{
266 AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
267 SlowEvaluationStrategy,
268 };
269 pub use crate::automatic_differentiation::{
270 autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
271 AutoDiffFunction, AutoDiffOptions, Dual, DualNumber,
272 };
273 pub use crate::constrained::{minimize_constrained, Method as ConstrainedMethod};
274 pub use crate::error::{OptimizeError, OptimizeResult};
275 pub use crate::global::{
276 basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
277 generate_diverse_start_points, multi_start_with_clustering, particle_swarm,
278 simulated_annealing, AcquisitionFunctionType, BasinHoppingOptions,
279 BayesianOptimizationOptions, BayesianOptimizer, ClusterCentroid, ClusteringAlgorithm,
280 ClusteringOptions, ClusteringResult, DifferentialEvolutionOptions, DualAnnealingOptions,
281 InitialPointGenerator, KernelType, LocalMinimum, Parameter, ParticleSwarmOptions,
282 SimulatedAnnealingOptions, Space, StartPointStrategy,
283 };
284 pub use crate::jit_optimization::{
285 optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats,
286 };
287 pub use crate::least_squares::{
288 bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
289 total_least_squares, weighted_least_squares, BisquareLoss, BoundedOptions, CauchyLoss,
290 HuberLoss, LinearSolver, Method as LeastSquaresMethod, RobustLoss, RobustOptions,
291 SeparableOptions, SeparableResult, TLSMethod, TotalLeastSquaresOptions,
292 TotalLeastSquaresResult, WeightedOptions,
293 };
294 pub use crate::ml_optimizers::{
295 ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
296 GroupLassoOptimizer, LassoOptimizer,
297 };
298 pub use crate::multi_objective::{
299 scalarization, MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII,
300 NSGAIII,
301 };
302 pub use crate::neural_integration::{
303 optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer,
304 };
305 pub use crate::parallel::{
306 parallel_evaluate_batch, parallel_finite_diff_gradient, ParallelOptions,
307 };
308 pub use crate::result::OptimizeResults;
309 pub use crate::roots::{root, Method as RootMethod};
310 pub use crate::scalar::{
311 minimize_scalar, Method as ScalarMethod, Options as ScalarOptions, ScalarOptimizeResult,
312 };
313 pub use crate::sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
314 pub use crate::unconstrained::{minimize, Bounds, Method as UnconstrainedMethod, Options};
315}
316
317#[cfg(test)]
318mod tests {
319 #[test]
320 fn it_works() {
321 assert_eq!(2 + 2, 4);
322 }
323}