1use crate::error::OptimizeError;
6use scirs2_core::ndarray::{Array1, ArrayView1};
7use std::fmt;
8
9pub mod adaptive_convergence;
11pub mod advanced_line_search;
12pub mod bfgs;
13pub mod callback_diagnostics;
14pub mod conjugate_gradient;
15pub mod convergence_diagnostics;
16pub mod efficient_sparse;
17pub mod lbfgs;
18pub mod line_search;
19pub mod memory_efficient;
20pub mod memory_efficient_sparse;
21pub mod nelder_mead;
22pub mod newton;
23pub mod powell;
24pub mod quasi_newton;
25pub mod result;
26pub mod robust_convergence;
27pub mod simd_bfgs;
28pub mod sparse_optimization;
29pub mod strong_wolfe;
30pub mod subspace_methods;
31pub mod truncated_newton;
32pub mod trust_region;
33pub mod utils;
34
35pub use result::OptimizeResult;
37
38pub use adaptive_convergence::{
40 check_convergence_adaptive, create_adaptive_options_for_problem, AdaptationStats,
41 AdaptiveToleranceOptions, AdaptiveToleranceState, ConvergenceStatus,
42};
43pub use advanced_line_search::{
44 advanced_line_search, create_non_monotone_state, AdvancedLineSearchOptions,
45 InterpolationStrategy, LineSearchMethod, LineSearchResult, LineSearchStats,
46};
47pub use bfgs::minimize_bfgs;
48pub use callback_diagnostics::{
49 minimize_with_diagnostics, optimize_with_diagnostics, CallbackInfo, CallbackResult,
50 DiagnosticOptimizer, OptimizationCallback,
51};
52pub use conjugate_gradient::minimize_conjugate_gradient;
53pub use convergence_diagnostics::{
54 ConvergenceDiagnostics, DiagnosticCollector, DiagnosticOptions, DiagnosticWarning,
55 ExportFormat, IterationDiagnostic, LineSearchDiagnostic, PerformanceMetrics, ProblemAnalysis,
56 ProblemDifficulty, WarningSeverity,
57};
58pub use efficient_sparse::{
59 minimize_efficient_sparse_newton, EfficientSparseOptions, SparsityInfo,
60};
61pub use lbfgs::{minimize_lbfgs, minimize_lbfgsb};
62pub use memory_efficient::{
63 create_memory_efficient_optimizer, minimize_memory_efficient_lbfgs, MemoryOptions,
64};
65pub use memory_efficient_sparse::{
66 create_advanced_scale_optimizer, minimize_advanced_scale, AdvancedScaleOptions,
67};
68pub use nelder_mead::minimize_nelder_mead;
69pub use newton::minimize_newton_cg;
70pub use powell::minimize_powell;
71pub use quasi_newton::{minimize_dfp, minimize_quasi_newton, minimize_sr1, UpdateFormula};
72pub use robust_convergence::{
73 create_robust_options_for_problem, RobustConvergenceOptions, RobustConvergenceResult,
74 RobustConvergenceState,
75};
76pub use simd_bfgs::{minimize_simd_bfgs, minimize_simd_bfgs_default, SimdBfgsOptions};
77pub use sparse_optimization::{
78 auto_detect_sparsity, minimize_sparse_bfgs, SparseOptimizationOptions,
79};
80pub use strong_wolfe::{
81 create_strong_wolfe_options_for_method, strong_wolfe_line_search, StrongWolfeOptions,
82 StrongWolfeResult,
83};
84pub use subspace_methods::{
85 minimize_adaptive_subspace, minimize_block_coordinate_descent,
86 minimize_cyclical_coordinate_descent, minimize_random_coordinate_descent,
87 minimize_random_subspace, minimize_subspace, SubspaceMethod, SubspaceOptions,
88};
89pub use truncated_newton::{
90 minimize_truncated_newton, minimize_trust_region_newton, Preconditioner, TruncatedNewtonOptions,
91};
92pub use trust_region::{minimize_trust_exact, minimize_trust_krylov, minimize_trust_ncg};
93
94#[derive(Debug, Clone, Copy)]
96pub enum Method {
97 NelderMead,
99 Powell,
101 CG,
103 BFGS,
105 SR1,
107 DFP,
109 LBFGS,
111 LBFGSB,
113 NewtonCG,
115 TrustNCG,
117 TrustKrylov,
119 TrustExact,
121 TruncatedNewton,
123 TrustRegionNewton,
125}
126
127#[derive(Debug, Clone)]
129pub struct Bounds {
130 pub lower: Vec<Option<f64>>,
132 pub upper: Vec<Option<f64>>,
134}
135
136#[derive(Debug, Clone)]
138pub struct Options {
139 pub max_iter: usize,
141 pub max_fev: Option<usize>,
143 pub ftol: f64,
145 pub xtol: f64,
147 pub gtol: f64,
149 pub initial_step: Option<f64>,
151 pub maxstep: Option<f64>,
153 pub finite_diff: bool,
155 pub eps: f64,
157 pub trust_radius: Option<f64>,
159 pub max_trust_radius: Option<f64>,
161 pub min_trust_radius: Option<f64>,
163 pub trust_tol: Option<f64>,
165 pub trust_max_iter: Option<usize>,
167 pub trust_eta: Option<f64>,
169 pub bounds: Option<Bounds>,
171}
172
173impl fmt::Display for Method {
175 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
176 match self {
177 Method::NelderMead => write!(f, "Nelder-Mead"),
178 Method::Powell => write!(f, "Powell"),
179 Method::CG => write!(f, "Conjugate Gradient"),
180 Method::BFGS => write!(f, "BFGS"),
181 Method::SR1 => write!(f, "SR1"),
182 Method::DFP => write!(f, "DFP"),
183 Method::LBFGS => write!(f, "L-BFGS"),
184 Method::LBFGSB => write!(f, "L-BFGS-B"),
185 Method::NewtonCG => write!(f, "Newton-CG"),
186 Method::TrustNCG => write!(f, "Trust-NCG"),
187 Method::TrustKrylov => write!(f, "Trust-Krylov"),
188 Method::TrustExact => write!(f, "Trust-Exact"),
189 Method::TruncatedNewton => write!(f, "Truncated Newton"),
190 Method::TrustRegionNewton => write!(f, "Trust-Region Newton"),
191 }
192 }
193}
194
195impl Default for Options {
197 fn default() -> Self {
198 Options {
199 max_iter: 1000,
200 max_fev: None,
201 ftol: 1e-8,
202 xtol: 1e-8,
203 gtol: 1e-5,
204 initial_step: None,
205 maxstep: None,
206 finite_diff: false,
207 eps: 1.4901161193847656e-8,
208 trust_radius: Some(1.0),
209 max_trust_radius: Some(100.0),
210 min_trust_radius: Some(1e-10),
211 trust_tol: Some(1e-8),
212 trust_max_iter: Some(100),
213 trust_eta: Some(0.1),
214 bounds: None,
215 }
216 }
217}
218
219impl Bounds {
221 pub fn new(bounds: &[(Option<f64>, Option<f64>)]) -> Self {
223 let (lower, upper): (Vec<_>, Vec<_>) = bounds.iter().cloned().unzip();
224 Self { lower, upper }
225 }
226
227 pub fn from_vecs(lb: Vec<Option<f64>>, ub: Vec<Option<f64>>) -> Result<Self, OptimizeError> {
229 if lb.len() != ub.len() {
230 return Err(OptimizeError::ValueError(
231 "Lower and upper bounds must have the same length".to_string(),
232 ));
233 }
234
235 for (l, u) in lb.iter().zip(ub.iter()) {
236 if let (Some(l_val), Some(u_val)) = (l, u) {
237 if l_val > u_val {
238 return Err(OptimizeError::ValueError(
239 "Lower bound must be less than or equal to upper bound".to_string(),
240 ));
241 }
242 }
243 }
244
245 Ok(Self {
246 lower: lb,
247 upper: ub,
248 })
249 }
250
251 pub fn is_feasible(&self, x: &[f64]) -> bool {
253 if x.len() != self.lower.len() {
254 return false;
255 }
256
257 for (&xi, (&lb, &ub)) in x.iter().zip(self.lower.iter().zip(self.upper.iter())) {
258 if let Some(l) = lb {
259 if xi < l {
260 return false;
261 }
262 }
263 if let Some(u) = ub {
264 if xi > u {
265 return false;
266 }
267 }
268 }
269 true
270 }
271
272 pub fn project(&self, x: &mut [f64]) {
274 for (xi, (&lb, &ub)) in x.iter_mut().zip(self.lower.iter().zip(self.upper.iter())) {
275 if let Some(l) = lb {
276 if *xi < l {
277 *xi = l;
278 }
279 }
280 if let Some(u) = ub {
281 if *xi > u {
282 *xi = u;
283 }
284 }
285 }
286 }
287
288 pub fn has_bounds(&self) -> bool {
290 self.lower.iter().any(|b| b.is_some()) || self.upper.iter().any(|b| b.is_some())
291 }
292}
293
294#[allow(dead_code)]
296pub fn minimize<F, S>(
297 fun: F,
298 x0: &[f64],
299 method: Method,
300 options: Option<Options>,
301) -> Result<OptimizeResult<S>, OptimizeError>
302where
303 F: FnMut(&ArrayView1<f64>) -> S + Clone,
304 S: Into<f64> + Clone + From<f64>,
305{
306 let options = &options.unwrap_or_default();
307 let x0 = Array1::from_vec(x0.to_vec());
308
309 if let Some(ref bounds) = options.bounds {
311 if !bounds.is_feasible(x0.as_slice().unwrap()) {
312 return Err(OptimizeError::ValueError(
313 "Initial point is not feasible".to_string(),
314 ));
315 }
316 }
317
318 match method {
319 Method::NelderMead => nelder_mead::minimize_nelder_mead(fun, x0, options),
320 Method::Powell => powell::minimize_powell(fun, x0, options),
321 Method::CG => conjugate_gradient::minimize_conjugate_gradient(fun, x0, options),
322 Method::BFGS => bfgs::minimize_bfgs(fun, x0, options),
323 Method::SR1 => quasi_newton::minimize_sr1(fun, x0, options),
324 Method::DFP => quasi_newton::minimize_dfp(fun, x0, options),
325 Method::LBFGS => lbfgs::minimize_lbfgs(fun, x0, options),
326 Method::LBFGSB => lbfgs::minimize_lbfgsb(fun, x0, options),
327 Method::NewtonCG => newton::minimize_newton_cg(fun, x0, options),
328 Method::TrustNCG => trust_region::minimize_trust_ncg(fun, x0, options),
329 Method::TrustKrylov => trust_region::minimize_trust_krylov(fun, x0, options),
330 Method::TrustExact => trust_region::minimize_trust_exact(fun, x0, options),
331 Method::TruncatedNewton => truncated_newton_wrapper(fun, x0, options),
332 Method::TrustRegionNewton => trust_region_newton_wrapper(fun, x0, options),
333 }
334}
335
336#[allow(dead_code)]
338fn truncated_newton_wrapper<F, S>(
339 mut fun: F,
340 x0: Array1<f64>,
341 options: &Options,
342) -> Result<OptimizeResult<S>, OptimizeError>
343where
344 F: FnMut(&ArrayView1<f64>) -> S + Clone,
345 S: Into<f64> + Clone + From<f64>,
346{
347 let fun_f64 = move |x: &ArrayView1<f64>| fun(x).into();
348
349 let truncated_options = TruncatedNewtonOptions {
350 max_iter: options.max_iter,
351 tol: options.gtol,
352 max_cg_iter: options.trust_max_iter.unwrap_or(100),
353 cg_tol: options.trust_tol.unwrap_or(0.1),
354 ..Default::default()
355 };
356
357 let result = truncated_newton::minimize_truncated_newton(
359 fun_f64,
360 None::<fn(&ArrayView1<f64>) -> Array1<f64>>,
361 x0,
362 Some(truncated_options),
363 )?;
364
365 Ok(OptimizeResult {
366 x: result.x,
367 fun: result.fun.into(),
368 nit: result.nit,
369 func_evals: result.func_evals,
370 nfev: result.nfev,
371 jacobian: result.jacobian,
372 hessian: result.hessian,
373 success: result.success,
374 message: result.message,
375 })
376}
377
378#[allow(dead_code)]
380fn trust_region_newton_wrapper<F, S>(
381 mut fun: F,
382 x0: Array1<f64>,
383 options: &Options,
384) -> Result<OptimizeResult<S>, OptimizeError>
385where
386 F: FnMut(&ArrayView1<f64>) -> S + Clone,
387 S: Into<f64> + Clone + From<f64>,
388{
389 let fun_f64 = move |x: &ArrayView1<f64>| fun(x).into();
390
391 let truncated_options = TruncatedNewtonOptions {
392 max_iter: options.max_iter,
393 tol: options.gtol,
394 max_cg_iter: options.trust_max_iter.unwrap_or(100),
395 cg_tol: options.trust_tol.unwrap_or(0.1),
396 trust_radius: options.trust_radius,
397 ..Default::default()
398 };
399
400 let result = truncated_newton::minimize_trust_region_newton(
402 fun_f64,
403 None::<fn(&ArrayView1<f64>) -> Array1<f64>>,
404 x0,
405 Some(truncated_options),
406 )?;
407
408 Ok(OptimizeResult {
409 x: result.x,
410 fun: result.fun.into(),
411 nit: result.nit,
412 func_evals: result.func_evals,
413 nfev: result.nfev,
414 jacobian: result.jacobian,
415 hessian: result.hessian,
416 success: result.success,
417 message: result.message,
418 })
419}
420
421#[cfg(test)]
422mod tests {
423 use super::*;
424 use approx::assert_abs_diff_eq;
425
426 #[test]
427 fn test_simple_quadratic() {
428 let quadratic = |x: &ArrayView1<f64>| -> f64 { x[0] * x[0] + x[1] * x[1] };
429
430 let x0 = vec![1.0, 1.0];
431 let result = minimize(quadratic, &x0, Method::BFGS, None);
432 assert!(result.is_ok());
433
434 let result = result.unwrap();
435 assert!(result.success);
436 assert_abs_diff_eq!(result.x[0], 0.0, epsilon = 1e-6);
437 assert_abs_diff_eq!(result.x[1], 0.0, epsilon = 1e-6);
438 }
439}