scirs2_optimize/
lib.rs

1#![allow(deprecated)]
2#![allow(clippy::all)]
3#![allow(dead_code)]
4#![allow(unreachable_patterns)]
5#![allow(unused_assignments)]
6#![allow(unused_variables)]
7#![allow(private_interfaces)]
8//! # SciRS2 Optimize - Mathematical Optimization for Rust
9//!
10//! **scirs2-optimize** provides comprehensive optimization algorithms modeled after SciPy's
11//! `optimize` module, offering everything from simple function minimization to complex
12//! constrained optimization and global search.
13//!
14//! ## 🎯 Key Features
15//!
16//! - **Unconstrained Optimization**: BFGS, CG, Nelder-Mead, Powell
17//! - **Constrained Optimization**: SLSQP, Trust-region methods
18//! - **Global Optimization**: Differential Evolution, Basin-hopping, Simulated Annealing
19//! - **Least Squares**: Levenberg-Marquardt, robust fitting, bounded problems
20//! - **Root Finding**: Newton, Brent, Bisection methods
21//! - **Scalar Optimization**: Brent, Golden section search
22//! - **Bounds Support**: Box constraints for all major algorithms
23
24#![allow(clippy::field_reassign_with_default)]
25#![recursion_limit = "512"]
26// Allow common mathematical conventions in optimization code
27#![allow(clippy::many_single_char_names)] // x, f, g, h, n, m etc. are standard in optimization
28#![allow(clippy::similar_names)] // x_pp, x_pm, x_mp, x_mm are standard for finite differences
29//!
30//! ## 📦 Module Overview
31//!
32//! | Module | Description | SciPy Equivalent |
33//! |--------|-------------|------------------|
34//! | [`unconstrained`] | Unconstrained minimization (BFGS, CG, Powell) | `scipy.optimize.minimize` |
35//! | [`constrained`] | Constrained optimization (SLSQP, Trust-region) | `scipy.optimize.minimize` with constraints |
36//! | [`global`] | Global optimization (DE, Basin-hopping) | `scipy.optimize.differential_evolution` |
37//! | [`mod@least_squares`] | Nonlinear least squares (LM, robust methods) | `scipy.optimize.least_squares` |
38//! | [`roots`] | Root finding algorithms | `scipy.optimize.root` |
39//! | [`scalar`] | 1-D minimization | `scipy.optimize.minimize_scalar` |
40//!
41//! ## 🚀 Quick Start
42//!
43//! ### Installation
44//!
45//! ```toml
46//! [dependencies]
47//! scirs2-optimize = "0.1.0-rc.1"
48//! ```
49//!
50//! ### Unconstrained Minimization (Rosenbrock Function)
51//!
52//! ```rust
53//! use scirs2_optimize::unconstrained::{minimize, Method};
54//! use scirs2_core::ndarray::ArrayView1;
55//!
56//! // Rosenbrock function: (1-x)² + 100(y-x²)²
57//! fn rosenbrock(x: &ArrayView1<f64>) -> f64 {
58//!     let x0 = x[0];
59//!     let x1 = x[1];
60//!     (1.0 - x0).powi(2) + 100.0 * (x1 - x0.powi(2)).powi(2)
61//! }
62//!
63//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
64//! let initial_guess = [0.0, 0.0];
65//! let result = minimize(rosenbrock, &initial_guess, Method::BFGS, None)?;
66//!
67//! println!("Minimum at: {:?}", result.x);
68//! println!("Function value: {}", result.fun);
69//! println!("Converged: {}", result.success);
70//! # Ok(())
71//! # }
72//! ```
73//!
74//! ### Optimization with Bounds
75//!
76//! Constrain variables to specific ranges:
77//!
78//! ```rust
79//! use scirs2_optimize::{Bounds, unconstrained::{minimize, Method, Options}};
80//! use scirs2_core::ndarray::ArrayView1;
81//!
82//! fn objective(x: &ArrayView1<f64>) -> f64 {
83//!     (x[0] + 1.0).powi(2) + (x[1] + 1.0).powi(2)
84//! }
85//!
86//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
87//! // Constrain to positive quadrant: x >= 0, y >= 0
88//! let bounds = Bounds::new(&[
89//!     (Some(0.0), None),  // x >= 0
90//!     (Some(0.0), None),  // y >= 0
91//! ]);
92//!
93//! let mut options = Options::default();
94//! options.bounds = Some(bounds);
95//!
96//! let result = minimize(objective, &[0.5, 0.5], Method::Powell, Some(options))?;
97//! println!("Constrained minimum: {:?}", result.x);  // [0.0, 0.0]
98//! # Ok(())
99//! # }
100//! ```
101//!
102//! ### Robust Least Squares
103//!
104//! Fit data with outliers using robust loss functions:
105//!
106//! ```rust
107//! use scirs2_optimize::least_squares::{robust_least_squares, HuberLoss};
108//! use scirs2_core::ndarray::{array, Array1};
109//!
110//! // Linear model residual: y - (a + b*x)
111//! fn residual(params: &[f64], data: &[f64]) -> Array1<f64> {
112//!     let n = data.len() / 2;
113//!     let x = &data[0..n];
114//!     let y = &data[n..];
115//!
116//!     let mut res = Array1::zeros(n);
117//!     for i in 0..n {
118//!         res[i] = y[i] - (params[0] + params[1] * x[i]);
119//!     }
120//!     res
121//! }
122//!
123//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
124//! // Data: x = [0,1,2,3,4], y = [0.1,0.9,2.1,2.9,10.0] (last point is outlier)
125//! let data = array![0.,1.,2.,3.,4., 0.1,0.9,2.1,2.9,10.0];
126//!
127//! let huber = HuberLoss::new(1.0);  // Robust to outliers
128//! let x0 = array![0.0, 0.0];
129//! let result = robust_least_squares(
130//!     residual, &x0, huber, None::<fn(&[f64], &[f64]) -> scirs2_core::ndarray::Array2<f64>>, &data, None
131//! )?;
132//!
133//! println!("Robust fit: y = {:.3} + {:.3}x", result.x[0], result.x[1]);
134//! # Ok(())
135//! # }
136//! ```
137//!
138//! ### Global Optimization
139//!
140//! Find global minimum of multi-modal functions:
141//!
142//! ```rust,no_run
143//! use scirs2_optimize::global::{differential_evolution, DifferentialEvolutionOptions};
144//! use scirs2_core::ndarray::ArrayView1;
145//!
146//! // Rastrigin function (multiple local minima)
147//! fn rastrigin(x: &ArrayView1<f64>) -> f64 {
148//!     let n = x.len() as f64;
149//!     10.0 * n + x.iter().map(|xi| xi.powi(2) - 10.0 * (2.0 * std::f64::consts::PI * xi).cos()).sum::<f64>()
150//! }
151//!
152//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
153//! let bounds = vec![(-5.12, 5.12); 5];  // 5-dimensional search space
154//! let options = Some(DifferentialEvolutionOptions::default());
155//!
156//! let result = differential_evolution(rastrigin, bounds, options, None)?;
157//! println!("Global minimum: {:?}", result.x);
158//! # Ok(())
159//! # }
160//! ```
161//!
162//! ### Root Finding
163//!
164//! Solve equations f(x) = 0:
165//!
166//! ```rust,no_run
167//! use scirs2_optimize::roots::{root, Method};
168//! use scirs2_core::ndarray::{array, Array1};
169//!
170//! // Find root of x² - 2 = 0 (i.e., √2)
171//! fn f(x: &[f64]) -> Array1<f64> {
172//!     array![x[0] * x[0] - 2.0]
173//! }
174//!
175//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
176//! let x0 = array![1.5];  // Initial guess
177//! let result = root(f, &x0, Method::Hybr, None::<fn(&[f64]) -> scirs2_core::ndarray::Array2<f64>>, None)?;
178//! println!("√2 ≈ {:.10}", result.x[0]);  // 1.4142135624
179//! # Ok(())
180//! # }
181//! ```
182//! ## Submodules
183//!
184//! * `unconstrained`: Unconstrained optimization algorithms
185//! * `constrained`: Constrained optimization algorithms
186//! * `least_squares`: Least squares minimization (including robust methods)
187//! * `roots`: Root finding algorithms
188//! * `scalar`: Scalar (univariate) optimization algorithms
189//! * `global`: Global optimization algorithms
190//!
191//! ## Optimization Methods
192//!
193//! The following optimization methods are currently implemented:
194//!
195//! ### Unconstrained:
196//! - **Nelder-Mead**: A derivative-free method using simplex-based approach
197//! - **Powell**: Derivative-free method using conjugate directions
198//! - **BFGS**: Quasi-Newton method with BFGS update
199//! - **CG**: Nonlinear conjugate gradient method
200//!
201//! ### Constrained:
202//! - **SLSQP**: Sequential Least SQuares Programming
203//! - **TrustConstr**: Trust-region constrained optimizer
204//!
205//! ### Scalar (Univariate) Optimization:
206//! - **Brent**: Combines parabolic interpolation with golden section search
207//! - **Bounded**: Brent's method with bounds constraints
208//! - **Golden**: Golden section search
209//!
210//! ### Global:
211//! - **Differential Evolution**: Stochastic global optimization method
212//! - **Basin-hopping**: Random perturbations with local minimization
213//! - **Dual Annealing**: Simulated annealing with fast annealing
214//! - **Particle Swarm**: Population-based optimization inspired by swarm behavior
215//! - **Simulated Annealing**: Probabilistic optimization with cooling schedule
216//!
217//! ### Least Squares:
218//! - **Levenberg-Marquardt**: Trust-region algorithm for nonlinear least squares
219//! - **Trust Region Reflective**: Bounds-constrained least squares
220//! - **Robust Least Squares**: M-estimators for outlier-resistant regression
221//!   - Huber loss: Reduces influence of moderate outliers
222//!   - Bisquare loss: Completely rejects extreme outliers
223//!   - Cauchy loss: Provides very strong outlier resistance
224//! - **Weighted Least Squares**: Handles heteroscedastic data (varying variance)
225//! - **Bounded Least Squares**: Box constraints on parameters
226//! - **Separable Least Squares**: Variable projection for partially linear models
227//! - **Total Least Squares**: Errors-in-variables regression
228//! ## Bounds Support
229//!
230//! The `unconstrained` module now supports bounds constraints for variables.
231//! You can specify lower and upper bounds for each variable, and the optimizer
232//! will ensure that all iterates remain within these bounds.
233//!
234//! The following methods support bounds constraints:
235//! - Powell
236//! - Nelder-Mead
237//! - BFGS
238//! - CG (Conjugate Gradient)
239//!
240//! ## Examples
241//!
242//! ### Basic Optimization
243//!
244//! ```
245//! // Example of minimizing a function using BFGS
246//! use scirs2_core::ndarray::{array, ArrayView1};
247//! use scirs2_optimize::unconstrained::{minimize, Method};
248//!
249//! fn rosenbrock(x: &ArrayView1<f64>) -> f64 {
250//!     let a = 1.0;
251//!     let b = 100.0;
252//!     let x0 = x[0];
253//!     let x1 = x[1];
254//!     (a - x0).powi(2) + b * (x1 - x0.powi(2)).powi(2)
255//! }
256//!
257//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
258//! let initial_guess = [0.0, 0.0];
259//! let result = minimize(rosenbrock, &initial_guess, Method::BFGS, None)?;
260//!
261//! println!("Solution: {:?}", result.x);
262//! println!("Function value at solution: {}", result.fun);
263//! println!("Number of nit: {}", result.nit);
264//! println!("Success: {}", result.success);
265//! # Ok(())
266//! # }
267//! ```
268//!
269//! ### Optimization with Bounds
270//!
271//! ```
272//! // Example of minimizing a function with bounds constraints
273//! use scirs2_core::ndarray::{array, ArrayView1};
274//! use scirs2_optimize::{Bounds, unconstrained::{minimize, Method, Options}};
275//!
276//! // A function with minimum at (-1, -1)
277//! fn func(x: &ArrayView1<f64>) -> f64 {
278//!     (x[0] + 1.0).powi(2) + (x[1] + 1.0).powi(2)
279//! }
280//!
281//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
282//! // Create bounds: x >= 0, y >= 0
283//! // This will constrain the optimization to the positive quadrant
284//! let bounds = Bounds::new(&[(Some(0.0), None), (Some(0.0), None)]);
285//!
286//! let initial_guess = [0.5, 0.5];
287//! let mut options = Options::default();
288//! options.bounds = Some(bounds);
289//!
290//! // Use Powell's method which supports bounds
291//! let result = minimize(func, &initial_guess, Method::Powell, Some(options))?;
292//!
293//! // The constrained minimum should be at [0, 0] with value 2.0
294//! println!("Solution: {:?}", result.x);
295//! println!("Function value at solution: {}", result.fun);
296//! # Ok(())
297//! # }
298//! ```
299//!
300//! ### Bounds Creation Options
301//!
302//! ```
303//! use scirs2_optimize::Bounds;
304//!
305//! // Create bounds from pairs
306//! // Format: [(min_x1, max_x1), (min_x2, max_x2), ...] where None = unbounded
307//! let bounds1 = Bounds::new(&[
308//!     (Some(0.0), Some(1.0)),  // 0 <= x[0] <= 1
309//!     (Some(-1.0), None),      // x[1] >= -1, no upper bound
310//!     (None, Some(10.0)),      // x[2] <= 10, no lower bound
311//!     (None, None)             // x[3] is completely unbounded
312//! ]);
313//!
314//! // Alternative: create from separate lower and upper bound vectors
315//! let lb = vec![Some(0.0), Some(-1.0), None, None];
316//! let ub = vec![Some(1.0), None, Some(10.0), None];
317//! let bounds2 = Bounds::from_vecs(lb, ub).unwrap();
318//! ```
319//!
320//! ### Robust Least Squares Example
321//!
322//! ```
323//! use scirs2_core::ndarray::{array, Array1, Array2};
324//! use scirs2_optimize::least_squares::{robust_least_squares, HuberLoss};
325//!
326//! // Define residual function for linear regression
327//! fn residual(params: &[f64], data: &[f64]) -> Array1<f64> {
328//!     let n = data.len() / 2;
329//!     let x_vals = &data[0..n];
330//!     let y_vals = &data[n..];
331//!     
332//!     let mut res = Array1::zeros(n);
333//!     for i in 0..n {
334//!         res[i] = y_vals[i] - (params[0] + params[1] * x_vals[i]);
335//!     }
336//!     res
337//! }
338//!
339//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
340//! // Data with outliers
341//! let data = array![0., 1., 2., 3., 4., 0.1, 0.9, 2.1, 2.9, 10.0];
342//! let x0 = array![0.0, 0.0];
343//!
344//! // Use Huber loss for robustness
345//! let huber_loss = HuberLoss::new(1.0);
346//! let result = robust_least_squares(
347//!     residual,
348//!     &x0,
349//!     huber_loss,
350//!     None::<fn(&[f64], &[f64]) -> Array2<f64>>,
351//!     &data,
352//!     None
353//! )?;
354//!
355//! println!("Robust solution: intercept={:.3}, slope={:.3}",
356//!          result.x[0], result.x[1]);
357//! # Ok(())
358//! # }
359//! ```
360
361// BLAS backend linking handled through scirs2-core
362
363// Export error types
364pub mod error;
365pub use error::{OptimizeError, OptimizeResult};
366
367// Module structure
368pub mod advanced_coordinator;
369#[cfg(feature = "async")]
370pub mod async_parallel;
371pub mod automatic_differentiation;
372pub mod benchmarking;
373pub mod constrained;
374pub mod distributed;
375pub mod distributed_gpu;
376pub mod global;
377pub mod gpu;
378pub mod jit_optimization;
379pub mod learned_optimizers;
380pub mod least_squares;
381pub mod ml_optimizers;
382pub mod multi_objective;
383pub mod neural_integration;
384pub mod neuromorphic;
385pub mod parallel;
386pub mod quantum_inspired;
387pub mod reinforcement_learning;
388pub mod roots;
389pub mod roots_anderson;
390pub mod roots_krylov;
391pub mod scalar;
392pub mod self_tuning;
393pub mod simd_ops;
394pub mod sparse_numdiff; // Refactored into a module with submodules
395pub mod stochastic;
396pub mod streaming;
397pub mod unconstrained;
398pub mod unified_pipeline;
399pub mod visualization;
400
401// Common optimization result structure
402pub mod result;
403pub use result::OptimizeResults;
404
405// Convenience re-exports for common functions
406pub use advanced_coordinator::{
407    advanced_optimize, AdvancedConfig, AdvancedCoordinator, AdvancedStats, AdvancedStrategy,
408    StrategyPerformance,
409};
410#[cfg(feature = "async")]
411pub use async_parallel::{
412    AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
413    SlowEvaluationStrategy,
414};
415pub use automatic_differentiation::{
416    autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
417    AutoDiffFunction, AutoDiffOptions,
418};
419pub use benchmarking::{
420    benchmark_suites, test_functions, AlgorithmRanking, BenchmarkConfig, BenchmarkResults,
421    BenchmarkRun, BenchmarkSummary, BenchmarkSystem, ProblemCharacteristics, RuntimeStats,
422    TestProblem,
423};
424pub use constrained::minimize_constrained;
425pub use distributed::{
426    algorithms::{DistributedDifferentialEvolution, DistributedParticleSwarm},
427    DistributedConfig, DistributedOptimizationContext, DistributedStats, DistributionStrategy,
428    MPIInterface, WorkAssignment,
429};
430pub use distributed_gpu::{
431    DistributedGpuConfig, DistributedGpuOptimizer, DistributedGpuResults, DistributedGpuStats,
432    GpuCommunicationStrategy, IterationStats,
433};
434pub use global::{
435    basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
436    generate_diverse_start_points, multi_start, multi_start_with_clustering, particle_swarm,
437    simulated_annealing,
438};
439pub use gpu::{
440    acceleration::{
441        AccelerationConfig, AccelerationManager, AccelerationStrategy, PerformanceStats,
442    },
443    algorithms::{GpuDifferentialEvolution, GpuParticleSwarm},
444    GpuFunction, GpuOptimizationConfig, GpuOptimizationContext, GpuPrecision,
445};
446pub use jit_optimization::{optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats};
447pub use learned_optimizers::{
448    learned_optimize, ActivationType, AdaptationStatistics, AdaptiveNASSystem,
449    AdaptiveTransformerOptimizer, FewShotLearningOptimizer, LearnedHyperparameterTuner,
450    LearnedOptimizationConfig, LearnedOptimizer, MetaOptimizerState, NeuralAdaptiveOptimizer,
451    OptimizationNetwork, OptimizationProblem, ParameterDistribution, ProblemEncoder, TrainingTask,
452};
453pub use least_squares::{
454    bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
455    total_least_squares, weighted_least_squares, BisquareLoss, CauchyLoss, HuberLoss,
456};
457pub use ml_optimizers::{
458    ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
459    GroupLassoOptimizer, LassoOptimizer,
460};
461pub use multi_objective::{
462    MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII, NSGAIII,
463};
464pub use neural_integration::{optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer};
465pub use neuromorphic::{
466    neuromorphic_optimize, BasicNeuromorphicOptimizer, NeuromorphicConfig, NeuromorphicNetwork,
467    NeuromorphicOptimizer, NeuronState, SpikeEvent,
468};
469pub use quantum_inspired::{
470    quantum_optimize, quantum_particle_swarm_optimize, Complex, CoolingSchedule,
471    QuantumAnnealingSchedule, QuantumInspiredOptimizer, QuantumOptimizationStats, QuantumState,
472};
473pub use reinforcement_learning::{
474    actor_critic_optimize, bandit_optimize, evolutionary_optimize, meta_learning_optimize,
475    policy_gradient_optimize, BanditOptimizer, EvolutionaryStrategy, Experience,
476    MetaLearningOptimizer, OptimizationAction, OptimizationState, QLearningOptimizer,
477    RLOptimizationConfig, RLOptimizer,
478};
479pub use roots::root;
480pub use scalar::minimize_scalar;
481pub use self_tuning::{
482    presets, AdaptationResult, AdaptationStrategy, ParameterChange, ParameterValue,
483    PerformanceMetrics, SelfTuningConfig, SelfTuningOptimizer, TunableParameter,
484};
485pub use sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
486pub use stochastic::{
487    minimize_adam, minimize_adamw, minimize_rmsprop, minimize_sgd, minimize_sgd_momentum,
488    minimize_stochastic, AdamOptions, AdamWOptions, DataProvider, InMemoryDataProvider,
489    LearningRateSchedule, MomentumOptions, RMSPropOptions, SGDOptions, StochasticGradientFunction,
490    StochasticMethod, StochasticOptions,
491};
492pub use streaming::{
493    exponentially_weighted_rls, incremental_bfgs, incremental_lbfgs,
494    incremental_lbfgs_linear_regression, kalman_filter_estimator, online_gradient_descent,
495    online_linear_regression, online_logistic_regression, real_time_linear_regression,
496    recursive_least_squares, rolling_window_gradient_descent, rolling_window_least_squares,
497    rolling_window_linear_regression, rolling_window_weighted_least_squares,
498    streaming_trust_region_linear_regression, streaming_trust_region_logistic_regression,
499    IncrementalNewton, IncrementalNewtonMethod, LinearRegressionObjective,
500    LogisticRegressionObjective, RealTimeEstimator, RealTimeMethod, RollingWindowOptimizer,
501    StreamingConfig, StreamingDataPoint, StreamingObjective, StreamingOptimizer, StreamingStats,
502    StreamingTrustRegion,
503};
504pub use unconstrained::{minimize, Bounds};
505pub use unified_pipeline::{
506    presets as unified_presets, UnifiedOptimizationConfig, UnifiedOptimizationResults,
507    UnifiedOptimizer,
508};
509pub use visualization::{
510    tracking::TrajectoryTracker, ColorScheme, OptimizationTrajectory, OptimizationVisualizer,
511    OutputFormat, VisualizationConfig,
512};
513
514// Prelude module for convenient imports
515pub mod prelude {
516    pub use crate::advanced_coordinator::{
517        advanced_optimize, AdvancedConfig, AdvancedCoordinator, AdvancedStats, AdvancedStrategy,
518        StrategyPerformance,
519    };
520    #[cfg(feature = "async")]
521    pub use crate::async_parallel::{
522        AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
523        SlowEvaluationStrategy,
524    };
525    pub use crate::automatic_differentiation::{
526        autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
527        AutoDiffFunction, AutoDiffOptions, Dual, DualNumber,
528    };
529    pub use crate::benchmarking::{
530        benchmark_suites, test_functions, AlgorithmRanking, BenchmarkConfig, BenchmarkResults,
531        BenchmarkRun, BenchmarkSummary, BenchmarkSystem, ProblemCharacteristics, RuntimeStats,
532        TestProblem,
533    };
534    pub use crate::constrained::{minimize_constrained, Method as ConstrainedMethod};
535    pub use crate::distributed::{
536        algorithms::{DistributedDifferentialEvolution, DistributedParticleSwarm},
537        DistributedConfig, DistributedOptimizationContext, DistributedStats, DistributionStrategy,
538        MPIInterface, WorkAssignment,
539    };
540    pub use crate::distributed_gpu::{
541        DistributedGpuConfig, DistributedGpuOptimizer, DistributedGpuResults, DistributedGpuStats,
542        GpuCommunicationStrategy, IterationStats,
543    };
544    pub use crate::error::{OptimizeError, OptimizeResult};
545    pub use crate::global::{
546        basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
547        generate_diverse_start_points, multi_start_with_clustering, particle_swarm,
548        simulated_annealing, AcquisitionFunctionType, BasinHoppingOptions,
549        BayesianOptimizationOptions, BayesianOptimizer, ClusterCentroid, ClusteringAlgorithm,
550        ClusteringOptions, ClusteringResult, DifferentialEvolutionOptions, DualAnnealingOptions,
551        InitialPointGenerator, KernelType, LocalMinimum, Parameter, ParticleSwarmOptions,
552        SimulatedAnnealingOptions, Space, StartPointStrategy,
553    };
554    pub use crate::gpu::{
555        acceleration::{
556            AccelerationConfig, AccelerationManager, AccelerationStrategy, PerformanceStats,
557        },
558        algorithms::{GpuDifferentialEvolution, GpuParticleSwarm},
559        GpuFunction, GpuOptimizationConfig, GpuOptimizationContext, GpuPrecision,
560    };
561    pub use crate::jit_optimization::{
562        optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats,
563    };
564    pub use crate::learned_optimizers::{
565        learned_optimize, ActivationType, AdaptationStatistics, AdaptiveNASSystem,
566        AdaptiveTransformerOptimizer, FewShotLearningOptimizer, LearnedHyperparameterTuner,
567        LearnedOptimizationConfig, LearnedOptimizer, MetaOptimizerState, NeuralAdaptiveOptimizer,
568        OptimizationNetwork, OptimizationProblem, ParameterDistribution, ProblemEncoder,
569        TrainingTask,
570    };
571    pub use crate::least_squares::{
572        bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
573        total_least_squares, weighted_least_squares, BisquareLoss, BoundedOptions, CauchyLoss,
574        HuberLoss, LinearSolver, Method as LeastSquaresMethod, RobustLoss, RobustOptions,
575        SeparableOptions, SeparableResult, TLSMethod, TotalLeastSquaresOptions,
576        TotalLeastSquaresResult, WeightedOptions,
577    };
578    pub use crate::ml_optimizers::{
579        ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
580        GroupLassoOptimizer, LassoOptimizer,
581    };
582    pub use crate::multi_objective::{
583        MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII, NSGAIII,
584    };
585    pub use crate::neural_integration::{
586        optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer,
587    };
588    pub use crate::neuromorphic::{
589        neuromorphic_optimize, BasicNeuromorphicOptimizer, NeuromorphicConfig, NeuromorphicNetwork,
590        NeuromorphicOptimizer, NeuronState, SpikeEvent,
591    };
592    pub use crate::parallel::{
593        parallel_evaluate_batch, parallel_finite_diff_gradient, ParallelOptions,
594    };
595    pub use crate::quantum_inspired::{
596        quantum_optimize, quantum_particle_swarm_optimize, Complex, CoolingSchedule,
597        QuantumAnnealingSchedule, QuantumInspiredOptimizer, QuantumOptimizationStats, QuantumState,
598    };
599    pub use crate::reinforcement_learning::{
600        bandit_optimize, evolutionary_optimize, meta_learning_optimize, policy_gradient_optimize,
601        BanditOptimizer, EvolutionaryStrategy, Experience, MetaLearningOptimizer,
602        OptimizationAction, OptimizationState, QLearningOptimizer, RLOptimizationConfig,
603        RLOptimizer,
604    };
605    pub use crate::result::OptimizeResults;
606    pub use crate::roots::{root, Method as RootMethod};
607    pub use crate::scalar::{
608        minimize_scalar, Method as ScalarMethod, Options as ScalarOptions, ScalarOptimizeResult,
609    };
610    pub use crate::self_tuning::{
611        presets, AdaptationResult, AdaptationStrategy, ParameterChange, ParameterValue,
612        PerformanceMetrics, SelfTuningConfig, SelfTuningOptimizer, TunableParameter,
613    };
614    pub use crate::sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
615    pub use crate::streaming::{
616        exponentially_weighted_rls, incremental_bfgs, incremental_lbfgs,
617        incremental_lbfgs_linear_regression, kalman_filter_estimator, online_gradient_descent,
618        online_linear_regression, online_logistic_regression, real_time_linear_regression,
619        recursive_least_squares, rolling_window_gradient_descent, rolling_window_least_squares,
620        rolling_window_linear_regression, rolling_window_weighted_least_squares,
621        streaming_trust_region_linear_regression, streaming_trust_region_logistic_regression,
622        IncrementalNewton, IncrementalNewtonMethod, LinearRegressionObjective,
623        LogisticRegressionObjective, RealTimeEstimator, RealTimeMethod, RollingWindowOptimizer,
624        StreamingConfig, StreamingDataPoint, StreamingObjective, StreamingOptimizer,
625        StreamingStats, StreamingTrustRegion,
626    };
627    pub use crate::unconstrained::{minimize, Bounds, Method as UnconstrainedMethod, Options};
628    pub use crate::unified_pipeline::{
629        presets as unified_presets, UnifiedOptimizationConfig, UnifiedOptimizationResults,
630        UnifiedOptimizer,
631    };
632    pub use crate::visualization::{
633        tracking::TrajectoryTracker, ColorScheme, OptimizationTrajectory, OptimizationVisualizer,
634        OutputFormat, VisualizationConfig,
635    };
636}
637
638#[cfg(test)]
639mod tests {
640    #[test]
641    fn it_works() {
642        assert_eq!(2 + 2, 4);
643    }
644}