scirs2_optimize/
lib.rs

1#![allow(deprecated)]
2#![allow(dead_code)]
3#![allow(unreachable_patterns)]
4#![allow(unused_assignments)]
5#![allow(unused_variables)]
6#![allow(private_interfaces)]
7//! Optimization module for SciRS
8//!
9//! This module provides implementations of various optimization algorithms,
10//! modeled after SciPy's `optimize` module.
11//!
12
13#![allow(clippy::field_reassign_with_default)]
14#![recursion_limit = "512"]
15// Allow common mathematical conventions in optimization code
16#![allow(clippy::many_single_char_names)] // x, f, g, h, n, m etc. are standard in optimization
17#![allow(clippy::similar_names)] // x_pp, x_pm, x_mp, x_mm are standard for finite differences
18//! ## Submodules
19//!
20//! * `unconstrained`: Unconstrained optimization algorithms
21//! * `constrained`: Constrained optimization algorithms
22//! * `least_squares`: Least squares minimization (including robust methods)
23//! * `roots`: Root finding algorithms
24//! * `scalar`: Scalar (univariate) optimization algorithms
25//! * `global`: Global optimization algorithms
26//!
27//! ## Optimization Methods
28//!
29//! The following optimization methods are currently implemented:
30//!
31//! ### Unconstrained:
32//! - **Nelder-Mead**: A derivative-free method using simplex-based approach
33//! - **Powell**: Derivative-free method using conjugate directions
34//! - **BFGS**: Quasi-Newton method with BFGS update
35//! - **CG**: Nonlinear conjugate gradient method
36//!
37//! ### Constrained:
38//! - **SLSQP**: Sequential Least SQuares Programming
39//! - **TrustConstr**: Trust-region constrained optimizer
40//!
41//! ### Scalar (Univariate) Optimization:
42//! - **Brent**: Combines parabolic interpolation with golden section search
43//! - **Bounded**: Brent's method with bounds constraints
44//! - **Golden**: Golden section search
45//!
46//! ### Global:
47//! - **Differential Evolution**: Stochastic global optimization method
48//! - **Basin-hopping**: Random perturbations with local minimization
49//! - **Dual Annealing**: Simulated annealing with fast annealing
50//! - **Particle Swarm**: Population-based optimization inspired by swarm behavior
51//! - **Simulated Annealing**: Probabilistic optimization with cooling schedule
52//!
53//! ### Least Squares:
54//! - **Levenberg-Marquardt**: Trust-region algorithm for nonlinear least squares
55//! - **Trust Region Reflective**: Bounds-constrained least squares
56//! - **Robust Least Squares**: M-estimators for outlier-resistant regression
57//!   - Huber loss: Reduces influence of moderate outliers
58//!   - Bisquare loss: Completely rejects extreme outliers
59//!   - Cauchy loss: Provides very strong outlier resistance
60//! - **Weighted Least Squares**: Handles heteroscedastic data (varying variance)
61//! - **Bounded Least Squares**: Box constraints on parameters
62//! - **Separable Least Squares**: Variable projection for partially linear models
63//! - **Total Least Squares**: Errors-in-variables regression
64//! ## Bounds Support
65//!
66//! The `unconstrained` module now supports bounds constraints for variables.
67//! You can specify lower and upper bounds for each variable, and the optimizer
68//! will ensure that all iterates remain within these bounds.
69//!
70//! The following methods support bounds constraints:
71//! - Powell
72//! - Nelder-Mead
73//! - BFGS
74//! - CG (Conjugate Gradient)
75//!
76//! ## Examples
77//!
78//! ### Basic Optimization
79//!
80//! ```
81//! // Example of minimizing a function using BFGS
82//! use ndarray::{array, ArrayView1};
83//! use scirs2_optimize::unconstrained::{minimize, Method};
84//!
85//! fn rosenbrock(x: &ArrayView1<f64>) -> f64 {
86//!     let a = 1.0;
87//!     let b = 100.0;
88//!     let x0 = x[0];
89//!     let x1 = x[1];
90//!     (a - x0).powi(2) + b * (x1 - x0.powi(2)).powi(2)
91//! }
92//!
93//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
94//! let initial_guess = [0.0, 0.0];
95//! let result = minimize(rosenbrock, &initial_guess, Method::BFGS, None)?;
96//!
97//! println!("Solution: {:?}", result.x);
98//! println!("Function value at solution: {}", result.fun);
99//! println!("Number of nit: {}", result.nit);
100//! println!("Success: {}", result.success);
101//! # Ok(())
102//! # }
103//! ```
104//!
105//! ### Optimization with Bounds
106//!
107//! ```
108//! // Example of minimizing a function with bounds constraints
109//! use ndarray::{array, ArrayView1};
110//! use scirs2_optimize::{Bounds, unconstrained::{minimize, Method, Options}};
111//!
112//! // A function with minimum at (-1, -1)
113//! fn func(x: &ArrayView1<f64>) -> f64 {
114//!     (x[0] + 1.0).powi(2) + (x[1] + 1.0).powi(2)
115//! }
116//!
117//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
118//! // Create bounds: x >= 0, y >= 0
119//! // This will constrain the optimization to the positive quadrant
120//! let bounds = Bounds::new(&[(Some(0.0), None), (Some(0.0), None)]);
121//!
122//! let initial_guess = [0.5, 0.5];
123//! let mut options = Options::default();
124//! options.bounds = Some(bounds);
125//!
126//! // Use Powell's method which supports bounds
127//! let result = minimize(func, &initial_guess, Method::Powell, Some(options))?;
128//!
129//! // The constrained minimum should be at [0, 0] with value 2.0
130//! println!("Solution: {:?}", result.x);
131//! println!("Function value at solution: {}", result.fun);
132//! # Ok(())
133//! # }
134//! ```
135//!
136//! ### Bounds Creation Options
137//!
138//! ```
139//! use scirs2_optimize::Bounds;
140//!
141//! // Create bounds from pairs
142//! // Format: [(min_x1, max_x1), (min_x2, max_x2), ...] where None = unbounded
143//! let bounds1 = Bounds::new(&[
144//!     (Some(0.0), Some(1.0)),  // 0 <= x[0] <= 1
145//!     (Some(-1.0), None),      // x[1] >= -1, no upper bound
146//!     (None, Some(10.0)),      // x[2] <= 10, no lower bound
147//!     (None, None)             // x[3] is completely unbounded
148//! ]);
149//!
150//! // Alternative: create from separate lower and upper bound vectors
151//! let lb = vec![Some(0.0), Some(-1.0), None, None];
152//! let ub = vec![Some(1.0), None, Some(10.0), None];
153//! let bounds2 = Bounds::from_vecs(lb, ub).unwrap();
154//! ```
155//!
156//! ### Robust Least Squares Example
157//!
158//! ```
159//! use ndarray::{array, Array1, Array2};
160//! use scirs2_optimize::least_squares::{robust_least_squares, HuberLoss};
161//!
162//! // Define residual function for linear regression
163//! fn residual(params: &[f64], data: &[f64]) -> Array1<f64> {
164//!     let n = data.len() / 2;
165//!     let x_vals = &data[0..n];
166//!     let y_vals = &data[n..];
167//!     
168//!     let mut res = Array1::zeros(n);
169//!     for i in 0..n {
170//!         res[i] = y_vals[i] - (params[0] + params[1] * x_vals[i]);
171//!     }
172//!     res
173//! }
174//!
175//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
176//! // Data with outliers
177//! let data = array![0., 1., 2., 3., 4., 0.1, 0.9, 2.1, 2.9, 10.0];
178//! let x0 = array![0.0, 0.0];
179//!
180//! // Use Huber loss for robustness
181//! let huber_loss = HuberLoss::new(1.0);
182//! let result = robust_least_squares(
183//!     residual,
184//!     &x0,
185//!     huber_loss,
186//!     None::<fn(&[f64], &[f64]) -> Array2<f64>>,
187//!     &data,
188//!     None
189//! )?;
190//!
191//! println!("Robust solution: intercept={:.3}, slope={:.3}",
192//!          result.x[0], result.x[1]);
193//! # Ok(())
194//! # }
195//! ```
196
197// BLAS backend linking handled through scirs2-core
198
199// Export error types
200pub mod error;
201pub use error::{OptimizeError, OptimizeResult};
202
203// Module structure
204pub mod advanced_coordinator;
205#[cfg(feature = "async")]
206pub mod async_parallel;
207pub mod automatic_differentiation;
208pub mod benchmarking;
209pub mod constrained;
210pub mod distributed;
211pub mod distributed_gpu;
212pub mod global;
213pub mod gpu;
214pub mod jit_optimization;
215pub mod learned_optimizers;
216pub mod least_squares;
217pub mod ml_optimizers;
218pub mod multi_objective;
219pub mod neural_integration;
220pub mod neuromorphic;
221pub mod parallel;
222pub mod quantum_inspired;
223pub mod reinforcement_learning;
224pub mod roots;
225pub mod roots_anderson;
226pub mod roots_krylov;
227pub mod scalar;
228pub mod self_tuning;
229pub mod simd_ops;
230pub mod sparse_numdiff; // Refactored into a module with submodules
231pub mod stochastic;
232pub mod streaming;
233pub mod unconstrained;
234pub mod unified_pipeline;
235pub mod visualization;
236
237// Common optimization result structure
238pub mod result;
239pub use result::OptimizeResults;
240
241// Convenience re-exports for common functions
242pub use advanced_coordinator::{
243    advanced_optimize, AdvancedConfig, AdvancedCoordinator, AdvancedStats, AdvancedStrategy,
244    StrategyPerformance,
245};
246#[cfg(feature = "async")]
247pub use async_parallel::{
248    AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
249    SlowEvaluationStrategy,
250};
251pub use automatic_differentiation::{
252    autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
253    AutoDiffFunction, AutoDiffOptions,
254};
255pub use benchmarking::{
256    benchmark_suites, test_functions, AlgorithmRanking, BenchmarkConfig, BenchmarkResults,
257    BenchmarkRun, BenchmarkSummary, BenchmarkSystem, ProblemCharacteristics, RuntimeStats,
258    TestProblem,
259};
260pub use constrained::minimize_constrained;
261pub use distributed::{
262    algorithms::{DistributedDifferentialEvolution, DistributedParticleSwarm},
263    DistributedConfig, DistributedOptimizationContext, DistributedStats, DistributionStrategy,
264    MPIInterface, WorkAssignment,
265};
266pub use distributed_gpu::{
267    DistributedGpuConfig, DistributedGpuOptimizer, DistributedGpuResults, DistributedGpuStats,
268    GpuCommunicationStrategy, IterationStats,
269};
270pub use global::{
271    basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
272    generate_diverse_start_points, multi_start, multi_start_with_clustering, particle_swarm,
273    simulated_annealing,
274};
275pub use gpu::{
276    acceleration::{
277        AccelerationConfig, AccelerationManager, AccelerationStrategy, PerformanceStats,
278    },
279    algorithms::{GpuDifferentialEvolution, GpuParticleSwarm},
280    GpuFunction, GpuOptimizationConfig, GpuOptimizationContext, GpuPrecision,
281};
282pub use jit_optimization::{optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats};
283pub use learned_optimizers::{
284    learned_optimize, ActivationType, AdaptationStatistics, AdaptiveNASSystem,
285    AdaptiveTransformerOptimizer, FewShotLearningOptimizer, LearnedHyperparameterTuner,
286    LearnedOptimizationConfig, LearnedOptimizer, MetaOptimizerState, NeuralAdaptiveOptimizer,
287    OptimizationNetwork, OptimizationProblem, ParameterDistribution, ProblemEncoder, TrainingTask,
288};
289pub use least_squares::{
290    bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
291    total_least_squares, weighted_least_squares, BisquareLoss, CauchyLoss, HuberLoss,
292};
293pub use ml_optimizers::{
294    ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
295    GroupLassoOptimizer, LassoOptimizer,
296};
297pub use multi_objective::{
298    MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII, NSGAIII,
299};
300pub use neural_integration::{optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer};
301pub use neuromorphic::{
302    neuromorphic_optimize, BasicNeuromorphicOptimizer, NeuromorphicConfig, NeuromorphicNetwork,
303    NeuromorphicOptimizer, NeuronState, SpikeEvent,
304};
305pub use quantum_inspired::{
306    quantum_optimize, quantum_particle_swarm_optimize, Complex, CoolingSchedule,
307    QuantumAnnealingSchedule, QuantumInspiredOptimizer, QuantumOptimizationStats, QuantumState,
308};
309pub use reinforcement_learning::{
310    actor_critic_optimize, bandit_optimize, evolutionary_optimize, meta_learning_optimize,
311    policy_gradient_optimize, BanditOptimizer, EvolutionaryStrategy, Experience,
312    MetaLearningOptimizer, OptimizationAction, OptimizationState, QLearningOptimizer,
313    RLOptimizationConfig, RLOptimizer,
314};
315pub use roots::root;
316pub use scalar::minimize_scalar;
317pub use self_tuning::{
318    presets, AdaptationResult, AdaptationStrategy, ParameterChange, ParameterValue,
319    PerformanceMetrics, SelfTuningConfig, SelfTuningOptimizer, TunableParameter,
320};
321pub use sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
322pub use stochastic::{
323    minimize_adam, minimize_adamw, minimize_rmsprop, minimize_sgd, minimize_sgd_momentum,
324    minimize_stochastic, AdamOptions, AdamWOptions, DataProvider, InMemoryDataProvider,
325    LearningRateSchedule, MomentumOptions, RMSPropOptions, SGDOptions, StochasticGradientFunction,
326    StochasticMethod, StochasticOptions,
327};
328pub use streaming::{
329    exponentially_weighted_rls, incremental_bfgs, incremental_lbfgs,
330    incremental_lbfgs_linear_regression, kalman_filter_estimator, online_gradient_descent,
331    online_linear_regression, online_logistic_regression, real_time_linear_regression,
332    recursive_least_squares, rolling_window_gradient_descent, rolling_window_least_squares,
333    rolling_window_linear_regression, rolling_window_weighted_least_squares,
334    streaming_trust_region_linear_regression, streaming_trust_region_logistic_regression,
335    IncrementalNewton, IncrementalNewtonMethod, LinearRegressionObjective,
336    LogisticRegressionObjective, RealTimeEstimator, RealTimeMethod, RollingWindowOptimizer,
337    StreamingConfig, StreamingDataPoint, StreamingObjective, StreamingOptimizer, StreamingStats,
338    StreamingTrustRegion,
339};
340pub use unconstrained::{minimize, Bounds};
341pub use unified_pipeline::{
342    presets as unified_presets, UnifiedOptimizationConfig, UnifiedOptimizationResults,
343    UnifiedOptimizer,
344};
345pub use visualization::{
346    tracking::TrajectoryTracker, ColorScheme, OptimizationTrajectory, OptimizationVisualizer,
347    OutputFormat, VisualizationConfig,
348};
349
350// Prelude module for convenient imports
351pub mod prelude {
352    pub use crate::advanced_coordinator::{
353        advanced_optimize, AdvancedConfig, AdvancedCoordinator, AdvancedStats, AdvancedStrategy,
354        StrategyPerformance,
355    };
356    #[cfg(feature = "async")]
357    pub use crate::async_parallel::{
358        AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
359        SlowEvaluationStrategy,
360    };
361    pub use crate::automatic_differentiation::{
362        autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
363        AutoDiffFunction, AutoDiffOptions, Dual, DualNumber,
364    };
365    pub use crate::benchmarking::{
366        benchmark_suites, test_functions, AlgorithmRanking, BenchmarkConfig, BenchmarkResults,
367        BenchmarkRun, BenchmarkSummary, BenchmarkSystem, ProblemCharacteristics, RuntimeStats,
368        TestProblem,
369    };
370    pub use crate::constrained::{minimize_constrained, Method as ConstrainedMethod};
371    pub use crate::distributed::{
372        algorithms::{DistributedDifferentialEvolution, DistributedParticleSwarm},
373        DistributedConfig, DistributedOptimizationContext, DistributedStats, DistributionStrategy,
374        MPIInterface, WorkAssignment,
375    };
376    pub use crate::distributed_gpu::{
377        DistributedGpuConfig, DistributedGpuOptimizer, DistributedGpuResults, DistributedGpuStats,
378        GpuCommunicationStrategy, IterationStats,
379    };
380    pub use crate::error::{OptimizeError, OptimizeResult};
381    pub use crate::global::{
382        basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
383        generate_diverse_start_points, multi_start_with_clustering, particle_swarm,
384        simulated_annealing, AcquisitionFunctionType, BasinHoppingOptions,
385        BayesianOptimizationOptions, BayesianOptimizer, ClusterCentroid, ClusteringAlgorithm,
386        ClusteringOptions, ClusteringResult, DifferentialEvolutionOptions, DualAnnealingOptions,
387        InitialPointGenerator, KernelType, LocalMinimum, Parameter, ParticleSwarmOptions,
388        SimulatedAnnealingOptions, Space, StartPointStrategy,
389    };
390    pub use crate::gpu::{
391        acceleration::{
392            AccelerationConfig, AccelerationManager, AccelerationStrategy, PerformanceStats,
393        },
394        algorithms::{GpuDifferentialEvolution, GpuParticleSwarm},
395        GpuFunction, GpuOptimizationConfig, GpuOptimizationContext, GpuPrecision,
396    };
397    pub use crate::jit_optimization::{
398        optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats,
399    };
400    pub use crate::learned_optimizers::{
401        learned_optimize, ActivationType, AdaptationStatistics, AdaptiveNASSystem,
402        AdaptiveTransformerOptimizer, FewShotLearningOptimizer, LearnedHyperparameterTuner,
403        LearnedOptimizationConfig, LearnedOptimizer, MetaOptimizerState, NeuralAdaptiveOptimizer,
404        OptimizationNetwork, OptimizationProblem, ParameterDistribution, ProblemEncoder,
405        TrainingTask,
406    };
407    pub use crate::least_squares::{
408        bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
409        total_least_squares, weighted_least_squares, BisquareLoss, BoundedOptions, CauchyLoss,
410        HuberLoss, LinearSolver, Method as LeastSquaresMethod, RobustLoss, RobustOptions,
411        SeparableOptions, SeparableResult, TLSMethod, TotalLeastSquaresOptions,
412        TotalLeastSquaresResult, WeightedOptions,
413    };
414    pub use crate::ml_optimizers::{
415        ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
416        GroupLassoOptimizer, LassoOptimizer,
417    };
418    pub use crate::multi_objective::{
419        MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII, NSGAIII,
420    };
421    pub use crate::neural_integration::{
422        optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer,
423    };
424    pub use crate::neuromorphic::{
425        neuromorphic_optimize, BasicNeuromorphicOptimizer, NeuromorphicConfig, NeuromorphicNetwork,
426        NeuromorphicOptimizer, NeuronState, SpikeEvent,
427    };
428    pub use crate::parallel::{
429        parallel_evaluate_batch, parallel_finite_diff_gradient, ParallelOptions,
430    };
431    pub use crate::quantum_inspired::{
432        quantum_optimize, quantum_particle_swarm_optimize, Complex, CoolingSchedule,
433        QuantumAnnealingSchedule, QuantumInspiredOptimizer, QuantumOptimizationStats, QuantumState,
434    };
435    pub use crate::reinforcement_learning::{
436        bandit_optimize, evolutionary_optimize, meta_learning_optimize, policy_gradient_optimize,
437        BanditOptimizer, EvolutionaryStrategy, Experience, MetaLearningOptimizer,
438        OptimizationAction, OptimizationState, QLearningOptimizer, RLOptimizationConfig,
439        RLOptimizer,
440    };
441    pub use crate::result::OptimizeResults;
442    pub use crate::roots::{root, Method as RootMethod};
443    pub use crate::scalar::{
444        minimize_scalar, Method as ScalarMethod, Options as ScalarOptions, ScalarOptimizeResult,
445    };
446    pub use crate::self_tuning::{
447        presets, AdaptationResult, AdaptationStrategy, ParameterChange, ParameterValue,
448        PerformanceMetrics, SelfTuningConfig, SelfTuningOptimizer, TunableParameter,
449    };
450    pub use crate::sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
451    pub use crate::streaming::{
452        exponentially_weighted_rls, incremental_bfgs, incremental_lbfgs,
453        incremental_lbfgs_linear_regression, kalman_filter_estimator, online_gradient_descent,
454        online_linear_regression, online_logistic_regression, real_time_linear_regression,
455        recursive_least_squares, rolling_window_gradient_descent, rolling_window_least_squares,
456        rolling_window_linear_regression, rolling_window_weighted_least_squares,
457        streaming_trust_region_linear_regression, streaming_trust_region_logistic_regression,
458        IncrementalNewton, IncrementalNewtonMethod, LinearRegressionObjective,
459        LogisticRegressionObjective, RealTimeEstimator, RealTimeMethod, RollingWindowOptimizer,
460        StreamingConfig, StreamingDataPoint, StreamingObjective, StreamingOptimizer,
461        StreamingStats, StreamingTrustRegion,
462    };
463    pub use crate::unconstrained::{minimize, Bounds, Method as UnconstrainedMethod, Options};
464    pub use crate::unified_pipeline::{
465        presets as unified_presets, UnifiedOptimizationConfig, UnifiedOptimizationResults,
466        UnifiedOptimizer,
467    };
468    pub use crate::visualization::{
469        tracking::TrajectoryTracker, ColorScheme, OptimizationTrajectory, OptimizationVisualizer,
470        OutputFormat, VisualizationConfig,
471    };
472}
473
474#[cfg(test)]
475mod tests {
476    #[test]
477    fn it_works() {
478        assert_eq!(2 + 2, 4);
479    }
480}