scirs2_optimize/
lib.rs

1#![allow(deprecated)]
2#![allow(clippy::all)]
3#![allow(dead_code)]
4#![allow(unreachable_patterns)]
5#![allow(unused_assignments)]
6#![allow(unused_variables)]
7#![allow(private_interfaces)]
8//! # SciRS2 Optimize - Mathematical Optimization for Rust
9//!
10//! **scirs2-optimize** provides comprehensive optimization algorithms modeled after SciPy's
11//! `optimize` module, offering everything from simple function minimization to complex
12//! constrained optimization and global search.
13//!
14//! ## 🎯 Key Features
15//!
16//! - **Unconstrained Optimization**: BFGS, CG, Nelder-Mead, Powell
17//! - **Constrained Optimization**: SLSQP, Trust-region methods
18//! - **Global Optimization**: Differential Evolution, Basin-hopping, Simulated Annealing
19//! - **Least Squares**: Levenberg-Marquardt, robust fitting, bounded problems
20//! - **Root Finding**: Newton, Brent, Bisection methods
21//! - **Scalar Optimization**: Brent, Golden section search
22//! - **Bounds Support**: Box constraints for all major algorithms
23
24#![allow(clippy::field_reassign_with_default)]
25#![recursion_limit = "512"]
26// Allow common mathematical conventions in optimization code
27#![allow(clippy::many_single_char_names)] // x, f, g, h, n, m etc. are standard in optimization
28#![allow(clippy::similar_names)] // x_pp, x_pm, x_mp, x_mm are standard for finite differences
29//!
30//! ## 📦 Module Overview
31//!
32//! | Module | Description | SciPy Equivalent |
33//! |--------|-------------|------------------|
34//! | [`unconstrained`] | Unconstrained minimization (BFGS, CG, Powell) | `scipy.optimize.minimize` |
35//! | [`constrained`] | Constrained optimization (SLSQP, Trust-region) | `scipy.optimize.minimize` with constraints |
36//! | [`global`] | Global optimization (DE, Basin-hopping) | `scipy.optimize.differential_evolution` |
37//! | [`mod@least_squares`] | Nonlinear least squares (LM, robust methods) | `scipy.optimize.least_squares` |
38//! | [`roots`] | Root finding algorithms | `scipy.optimize.root` |
39//! | [`scalar`] | 1-D minimization | `scipy.optimize.minimize_scalar` |
40//!
41//! ## 🚀 Quick Start
42//!
43//! ### Installation
44//!
45//! ```toml
46//! [dependencies]
47//! scirs2-optimize = "0.1.0-rc.4"
48//! ```
49//!
50//! ### Unconstrained Minimization (Rosenbrock Function)
51//!
52//! ```rust
53//! use scirs2_optimize::unconstrained::{minimize, Method};
54//! use scirs2_core::ndarray::ArrayView1;
55//!
56//! // Rosenbrock function: (1-x)² + 100(y-x²)²
57//! fn rosenbrock(x: &ArrayView1<f64>) -> f64 {
58//!     let x0 = x[0];
59//!     let x1 = x[1];
60//!     (1.0 - x0).powi(2) + 100.0 * (x1 - x0.powi(2)).powi(2)
61//! }
62//!
63//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
64//! let initial_guess = [0.0, 0.0];
65//! let result = minimize(rosenbrock, &initial_guess, Method::BFGS, None)?;
66//!
67//! println!("Minimum at: {:?}", result.x);
68//! println!("Function value: {}", result.fun);
69//! println!("Converged: {}", result.success);
70//! # Ok(())
71//! # }
72//! ```
73//!
74//! ### Optimization with Bounds
75//!
76//! Constrain variables to specific ranges:
77//!
78//! ```rust
79//! use scirs2_optimize::{Bounds, unconstrained::{minimize, Method, Options}};
80//! use scirs2_core::ndarray::ArrayView1;
81//!
82//! fn objective(x: &ArrayView1<f64>) -> f64 {
83//!     (x[0] + 1.0).powi(2) + (x[1] + 1.0).powi(2)
84//! }
85//!
86//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
87//! // Constrain to positive quadrant: x >= 0, y >= 0
88//! let bounds = Bounds::new(&[
89//!     (Some(0.0), None),  // x >= 0
90//!     (Some(0.0), None),  // y >= 0
91//! ]);
92//!
93//! let mut options = Options::default();
94//! options.bounds = Some(bounds);
95//!
96//! let result = minimize(objective, &[0.5, 0.5], Method::Powell, Some(options))?;
97//! println!("Constrained minimum: {:?}", result.x);  // [0.0, 0.0]
98//! # Ok(())
99//! # }
100//! ```
101//!
102//! ### Robust Least Squares
103//!
104//! Fit data with outliers using robust loss functions:
105//!
106//! ```rust
107//! use scirs2_optimize::least_squares::{robust_least_squares, HuberLoss};
108//! use scirs2_core::ndarray::{array, Array1};
109//!
110//! // Linear model residual: y - (a + b*x)
111//! fn residual(params: &[f64], data: &[f64]) -> Array1<f64> {
112//!     let n = data.len() / 2;
113//!     let x = &data[0..n];
114//!     let y = &data[n..];
115//!
116//!     let mut res = Array1::zeros(n);
117//!     for i in 0..n {
118//!         res[i] = y[i] - (params[0] + params[1] * x[i]);
119//!     }
120//!     res
121//! }
122//!
123//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
124//! // Data: x = [0,1,2,3,4], y = [0.1,0.9,2.1,2.9,10.0] (last point is outlier)
125//! let data = array![0.,1.,2.,3.,4., 0.1,0.9,2.1,2.9,10.0];
126//!
127//! let huber = HuberLoss::new(1.0);  // Robust to outliers
128//! let x0 = array![0.0, 0.0];
129//! let result = robust_least_squares(
130//!     residual, &x0, huber, None::<fn(&[f64], &[f64]) -> scirs2_core::ndarray::Array2<f64>>, &data, None
131//! )?;
132//!
133//! println!("Robust fit: y = {:.3} + {:.3}x", result.x[0], result.x[1]);
134//! # Ok(())
135//! # }
136//! ```
137//!
138//! ### Global Optimization
139//!
140//! Find global minimum of multi-modal functions:
141//!
142//! ```rust,no_run
143//! use scirs2_optimize::global::{differential_evolution, DifferentialEvolutionOptions};
144//! use scirs2_core::ndarray::ArrayView1;
145//!
146//! // Rastrigin function (multiple local minima)
147//! fn rastrigin(x: &ArrayView1<f64>) -> f64 {
148//!     let n = x.len() as f64;
149//!     10.0 * n + x.iter().map(|xi| xi.powi(2) - 10.0 * (2.0 * std::f64::consts::PI * xi).cos()).sum::<f64>()
150//! }
151//!
152//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
153//! let bounds = vec![(-5.12, 5.12); 5];  // 5-dimensional search space
154//! let options = Some(DifferentialEvolutionOptions::default());
155//!
156//! let result = differential_evolution(rastrigin, bounds, options, None)?;
157//! println!("Global minimum: {:?}", result.x);
158//! # Ok(())
159//! # }
160//! ```
161//!
162//! ### Root Finding
163//!
164//! Solve equations f(x) = 0:
165//!
166//! ```rust,no_run
167//! use scirs2_optimize::roots::{root, Method};
168//! use scirs2_core::ndarray::{array, Array1};
169//!
170//! // Find root of x² - 2 = 0 (i.e., √2)
171//! fn f(x: &[f64]) -> Array1<f64> {
172//!     array![x[0] * x[0] - 2.0]
173//! }
174//!
175//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
176//! let x0 = array![1.5];  // Initial guess
177//! let result = root(f, &x0, Method::Hybr, None::<fn(&[f64]) -> scirs2_core::ndarray::Array2<f64>>, None)?;
178//! println!("√2 ≈ {:.10}", result.x[0]);  // 1.4142135624
179//! # Ok(())
180//! # }
181//! ```
182//! ## Submodules
183//!
184//! * `unconstrained`: Unconstrained optimization algorithms
185//! * `constrained`: Constrained optimization algorithms
186//! * `least_squares`: Least squares minimization (including robust methods)
187//! * `roots`: Root finding algorithms
188//! * `scalar`: Scalar (univariate) optimization algorithms
189//! * `global`: Global optimization algorithms
190//!
191//! ## Optimization Methods
192//!
193//! The following optimization methods are currently implemented:
194//!
195//! ### Unconstrained:
196//! - **Nelder-Mead**: A derivative-free method using simplex-based approach
197//! - **Powell**: Derivative-free method using conjugate directions
198//! - **BFGS**: Quasi-Newton method with BFGS update
199//! - **CG**: Nonlinear conjugate gradient method
200//!
201//! ### Constrained:
202//! - **SLSQP**: Sequential Least SQuares Programming
203//! - **TrustConstr**: Trust-region constrained optimizer
204//!
205//! ### Scalar (Univariate) Optimization:
206//! - **Brent**: Combines parabolic interpolation with golden section search
207//! - **Bounded**: Brent's method with bounds constraints
208//! - **Golden**: Golden section search
209//!
210//! ### Global:
211//! - **Differential Evolution**: Stochastic global optimization method
212//! - **Basin-hopping**: Random perturbations with local minimization
213//! - **Dual Annealing**: Simulated annealing with fast annealing
214//! - **Particle Swarm**: Population-based optimization inspired by swarm behavior
215//! - **Simulated Annealing**: Probabilistic optimization with cooling schedule
216//!
217//! ### Least Squares:
218//! - **Levenberg-Marquardt**: Trust-region algorithm for nonlinear least squares
219//! - **Trust Region Reflective**: Bounds-constrained least squares
220//! - **Robust Least Squares**: M-estimators for outlier-resistant regression
221//!   - Huber loss: Reduces influence of moderate outliers
222//!   - Bisquare loss: Completely rejects extreme outliers
223//!   - Cauchy loss: Provides very strong outlier resistance
224//! - **Weighted Least Squares**: Handles heteroscedastic data (varying variance)
225//! - **Bounded Least Squares**: Box constraints on parameters
226//! - **Separable Least Squares**: Variable projection for partially linear models
227//! - **Total Least Squares**: Errors-in-variables regression
228//! ## Bounds Support
229//!
230//! The `unconstrained` module now supports bounds constraints for variables.
231//! You can specify lower and upper bounds for each variable, and the optimizer
232//! will ensure that all iterates remain within these bounds.
233//!
234//! The following methods support bounds constraints:
235//! - Powell
236//! - Nelder-Mead
237//! - BFGS
238//! - CG (Conjugate Gradient)
239//!
240//! ## Examples
241//!
242//! ### Basic Optimization
243//!
244//! ```
245//! // Example of minimizing a function using BFGS
246//! use scirs2_core::ndarray::{array, ArrayView1};
247//! use scirs2_optimize::unconstrained::{minimize, Method};
248//!
249//! fn rosenbrock(x: &ArrayView1<f64>) -> f64 {
250//!     let a = 1.0;
251//!     let b = 100.0;
252//!     let x0 = x[0];
253//!     let x1 = x[1];
254//!     (a - x0).powi(2) + b * (x1 - x0.powi(2)).powi(2)
255//! }
256//!
257//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
258//! let initial_guess = [0.0, 0.0];
259//! let result = minimize(rosenbrock, &initial_guess, Method::BFGS, None)?;
260//!
261//! println!("Solution: {:?}", result.x);
262//! println!("Function value at solution: {}", result.fun);
263//! println!("Number of nit: {}", result.nit);
264//! println!("Success: {}", result.success);
265//! # Ok(())
266//! # }
267//! ```
268//!
269//! ### Optimization with Bounds
270//!
271//! ```
272//! // Example of minimizing a function with bounds constraints
273//! use scirs2_core::ndarray::{array, ArrayView1};
274//! use scirs2_optimize::{Bounds, unconstrained::{minimize, Method, Options}};
275//!
276//! // A function with minimum at (-1, -1)
277//! fn func(x: &ArrayView1<f64>) -> f64 {
278//!     (x[0] + 1.0).powi(2) + (x[1] + 1.0).powi(2)
279//! }
280//!
281//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
282//! // Create bounds: x >= 0, y >= 0
283//! // This will constrain the optimization to the positive quadrant
284//! let bounds = Bounds::new(&[(Some(0.0), None), (Some(0.0), None)]);
285//!
286//! let initial_guess = [0.5, 0.5];
287//! let mut options = Options::default();
288//! options.bounds = Some(bounds);
289//!
290//! // Use Powell's method which supports bounds
291//! let result = minimize(func, &initial_guess, Method::Powell, Some(options))?;
292//!
293//! // The constrained minimum should be at [0, 0] with value 2.0
294//! println!("Solution: {:?}", result.x);
295//! println!("Function value at solution: {}", result.fun);
296//! # Ok(())
297//! # }
298//! ```
299//!
300//! ### Bounds Creation Options
301//!
302//! ```
303//! use scirs2_optimize::Bounds;
304//!
305//! // Create bounds from pairs
306//! // Format: [(min_x1, max_x1), (min_x2, max_x2), ...] where None = unbounded
307//! let bounds1 = Bounds::new(&[
308//!     (Some(0.0), Some(1.0)),  // 0 <= x[0] <= 1
309//!     (Some(-1.0), None),      // x[1] >= -1, no upper bound
310//!     (None, Some(10.0)),      // x[2] <= 10, no lower bound
311//!     (None, None)             // x[3] is completely unbounded
312//! ]);
313//!
314//! // Alternative: create from separate lower and upper bound vectors
315//! let lb = vec![Some(0.0), Some(-1.0), None, None];
316//! let ub = vec![Some(1.0), None, Some(10.0), None];
317//! let bounds2 = Bounds::from_vecs(lb, ub).unwrap();
318//! ```
319//!
320//! ### Robust Least Squares Example
321//!
322//! ```
323//! use scirs2_core::ndarray::{array, Array1, Array2};
324//! use scirs2_optimize::least_squares::{robust_least_squares, HuberLoss};
325//!
326//! // Define residual function for linear regression
327//! fn residual(params: &[f64], data: &[f64]) -> Array1<f64> {
328//!     let n = data.len() / 2;
329//!     let x_vals = &data[0..n];
330//!     let y_vals = &data[n..];
331//!     
332//!     let mut res = Array1::zeros(n);
333//!     for i in 0..n {
334//!         res[i] = y_vals[i] - (params[0] + params[1] * x_vals[i]);
335//!     }
336//!     res
337//! }
338//!
339//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
340//! // Data with outliers
341//! let data = array![0., 1., 2., 3., 4., 0.1, 0.9, 2.1, 2.9, 10.0];
342//! let x0 = array![0.0, 0.0];
343//!
344//! // Use Huber loss for robustness
345//! let huber_loss = HuberLoss::new(1.0);
346//! let result = robust_least_squares(
347//!     residual,
348//!     &x0,
349//!     huber_loss,
350//!     None::<fn(&[f64], &[f64]) -> Array2<f64>>,
351//!     &data,
352//!     None
353//! )?;
354//!
355//! println!("Robust solution: intercept={:.3}, slope={:.3}",
356//!          result.x[0], result.x[1]);
357//! # Ok(())
358//! # }
359//! ```
360
361// BLAS backend linking handled through scirs2-core
362
363// Export error types
364pub mod error;
365pub use error::{OptimizeError, OptimizeResult};
366
367// Python API wrappers
368// Note: python_api module not yet implemented
369// #[cfg(feature = "python")]
370// pub mod python_api;
371
372// Module structure (used by other modules, must be unconditional)
373pub mod advanced_coordinator;
374#[cfg(feature = "async")]
375pub mod async_parallel;
376pub mod automatic_differentiation;
377pub mod benchmarking;
378pub mod constrained;
379pub mod distributed;
380pub mod distributed_gpu;
381pub mod global;
382pub mod gpu;
383pub mod jit_optimization;
384pub mod learned_optimizers;
385pub mod least_squares;
386pub mod ml_optimizers;
387pub mod multi_objective;
388pub mod neural_integration;
389pub mod neuromorphic;
390pub mod parallel;
391pub mod quantum_inspired;
392pub mod reinforcement_learning;
393pub mod roots;
394pub mod roots_anderson;
395pub mod roots_krylov;
396pub mod scalar;
397pub mod self_tuning;
398pub mod simd_ops;
399pub mod sparse_numdiff; // Refactored into a module with submodules
400pub mod stochastic;
401pub mod streaming;
402pub mod unconstrained;
403pub mod unified_pipeline;
404pub mod visualization;
405
406// Common optimization result structure
407pub mod result;
408pub use result::OptimizeResults;
409
410// Convenience re-exports for common functions
411pub use advanced_coordinator::{
412    advanced_optimize, AdvancedConfig, AdvancedCoordinator, AdvancedStats, AdvancedStrategy,
413    StrategyPerformance,
414};
415#[cfg(feature = "async")]
416pub use async_parallel::{
417    AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
418    SlowEvaluationStrategy,
419};
420pub use automatic_differentiation::{
421    autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
422    AutoDiffFunction, AutoDiffOptions,
423};
424pub use benchmarking::{
425    benchmark_suites, test_functions, AlgorithmRanking, BenchmarkConfig, BenchmarkResults,
426    BenchmarkRun, BenchmarkSummary, BenchmarkSystem, ProblemCharacteristics, RuntimeStats,
427    TestProblem,
428};
429pub use constrained::minimize_constrained;
430pub use distributed::{
431    algorithms::{DistributedDifferentialEvolution, DistributedParticleSwarm},
432    DistributedConfig, DistributedOptimizationContext, DistributedStats, DistributionStrategy,
433    MPIInterface, WorkAssignment,
434};
435pub use distributed_gpu::{
436    DistributedGpuConfig, DistributedGpuOptimizer, DistributedGpuResults, DistributedGpuStats,
437    GpuCommunicationStrategy, IterationStats,
438};
439pub use global::{
440    basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
441    generate_diverse_start_points, multi_start, multi_start_with_clustering, particle_swarm,
442    simulated_annealing,
443};
444pub use gpu::{
445    acceleration::{
446        AccelerationConfig, AccelerationManager, AccelerationStrategy, PerformanceStats,
447    },
448    algorithms::{GpuDifferentialEvolution, GpuParticleSwarm},
449    GpuFunction, GpuOptimizationConfig, GpuOptimizationContext, GpuPrecision,
450};
451pub use jit_optimization::{optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats};
452pub use learned_optimizers::{
453    learned_optimize, ActivationType, AdaptationStatistics, AdaptiveNASSystem,
454    AdaptiveTransformerOptimizer, FewShotLearningOptimizer, LearnedHyperparameterTuner,
455    LearnedOptimizationConfig, LearnedOptimizer, MetaOptimizerState, NeuralAdaptiveOptimizer,
456    OptimizationNetwork, OptimizationProblem, ParameterDistribution, ProblemEncoder, TrainingTask,
457};
458pub use least_squares::{
459    bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
460    total_least_squares, weighted_least_squares, BisquareLoss, CauchyLoss, HuberLoss,
461};
462pub use ml_optimizers::{
463    ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
464    GroupLassoOptimizer, LassoOptimizer,
465};
466pub use multi_objective::{
467    MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII, NSGAIII,
468};
469pub use neural_integration::{optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer};
470pub use neuromorphic::{
471    neuromorphic_optimize, BasicNeuromorphicOptimizer, NeuromorphicConfig, NeuromorphicNetwork,
472    NeuromorphicOptimizer, NeuronState, SpikeEvent,
473};
474pub use quantum_inspired::{
475    quantum_optimize, quantum_particle_swarm_optimize, Complex, CoolingSchedule,
476    QuantumAnnealingSchedule, QuantumInspiredOptimizer, QuantumOptimizationStats, QuantumState,
477};
478pub use reinforcement_learning::{
479    actor_critic_optimize, bandit_optimize, evolutionary_optimize, meta_learning_optimize,
480    policy_gradient_optimize, BanditOptimizer, EvolutionaryStrategy, Experience,
481    MetaLearningOptimizer, OptimizationAction, OptimizationState, QLearningOptimizer,
482    RLOptimizationConfig, RLOptimizer,
483};
484pub use roots::root;
485pub use scalar::minimize_scalar;
486pub use self_tuning::{
487    presets, AdaptationResult, AdaptationStrategy, ParameterChange, ParameterValue,
488    PerformanceMetrics, SelfTuningConfig, SelfTuningOptimizer, TunableParameter,
489};
490pub use sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
491pub use stochastic::{
492    minimize_adam, minimize_adamw, minimize_rmsprop, minimize_sgd, minimize_sgd_momentum,
493    minimize_stochastic, AdamOptions, AdamWOptions, DataProvider, InMemoryDataProvider,
494    LearningRateSchedule, MomentumOptions, RMSPropOptions, SGDOptions, StochasticGradientFunction,
495    StochasticMethod, StochasticOptions,
496};
497pub use streaming::{
498    exponentially_weighted_rls, incremental_bfgs, incremental_lbfgs,
499    incremental_lbfgs_linear_regression, kalman_filter_estimator, online_gradient_descent,
500    online_linear_regression, online_logistic_regression, real_time_linear_regression,
501    recursive_least_squares, rolling_window_gradient_descent, rolling_window_least_squares,
502    rolling_window_linear_regression, rolling_window_weighted_least_squares,
503    streaming_trust_region_linear_regression, streaming_trust_region_logistic_regression,
504    IncrementalNewton, IncrementalNewtonMethod, LinearRegressionObjective,
505    LogisticRegressionObjective, RealTimeEstimator, RealTimeMethod, RollingWindowOptimizer,
506    StreamingConfig, StreamingDataPoint, StreamingObjective, StreamingOptimizer, StreamingStats,
507    StreamingTrustRegion,
508};
509pub use unconstrained::{minimize, Bounds};
510pub use unified_pipeline::{
511    presets as unified_presets, UnifiedOptimizationConfig, UnifiedOptimizationResults,
512    UnifiedOptimizer,
513};
514pub use visualization::{
515    tracking::TrajectoryTracker, ColorScheme, OptimizationTrajectory, OptimizationVisualizer,
516    OutputFormat, VisualizationConfig,
517};
518
519// Prelude module for convenient imports
520pub mod prelude {
521    pub use crate::advanced_coordinator::{
522        advanced_optimize, AdvancedConfig, AdvancedCoordinator, AdvancedStats, AdvancedStrategy,
523        StrategyPerformance,
524    };
525    #[cfg(feature = "async")]
526    pub use crate::async_parallel::{
527        AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
528        SlowEvaluationStrategy,
529    };
530    pub use crate::automatic_differentiation::{
531        autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
532        AutoDiffFunction, AutoDiffOptions, Dual, DualNumber,
533    };
534    pub use crate::benchmarking::{
535        benchmark_suites, test_functions, AlgorithmRanking, BenchmarkConfig, BenchmarkResults,
536        BenchmarkRun, BenchmarkSummary, BenchmarkSystem, ProblemCharacteristics, RuntimeStats,
537        TestProblem,
538    };
539    pub use crate::constrained::{minimize_constrained, Method as ConstrainedMethod};
540    pub use crate::distributed::{
541        algorithms::{DistributedDifferentialEvolution, DistributedParticleSwarm},
542        DistributedConfig, DistributedOptimizationContext, DistributedStats, DistributionStrategy,
543        MPIInterface, WorkAssignment,
544    };
545    pub use crate::distributed_gpu::{
546        DistributedGpuConfig, DistributedGpuOptimizer, DistributedGpuResults, DistributedGpuStats,
547        GpuCommunicationStrategy, IterationStats,
548    };
549    pub use crate::error::{OptimizeError, OptimizeResult};
550    pub use crate::global::{
551        basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
552        generate_diverse_start_points, multi_start_with_clustering, particle_swarm,
553        simulated_annealing, AcquisitionFunctionType, BasinHoppingOptions,
554        BayesianOptimizationOptions, BayesianOptimizer, ClusterCentroid, ClusteringAlgorithm,
555        ClusteringOptions, ClusteringResult, DifferentialEvolutionOptions, DualAnnealingOptions,
556        InitialPointGenerator, KernelType, LocalMinimum, Parameter, ParticleSwarmOptions,
557        SimulatedAnnealingOptions, Space, StartPointStrategy,
558    };
559    pub use crate::gpu::{
560        acceleration::{
561            AccelerationConfig, AccelerationManager, AccelerationStrategy, PerformanceStats,
562        },
563        algorithms::{GpuDifferentialEvolution, GpuParticleSwarm},
564        GpuFunction, GpuOptimizationConfig, GpuOptimizationContext, GpuPrecision,
565    };
566    pub use crate::jit_optimization::{
567        optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats,
568    };
569    pub use crate::learned_optimizers::{
570        learned_optimize, ActivationType, AdaptationStatistics, AdaptiveNASSystem,
571        AdaptiveTransformerOptimizer, FewShotLearningOptimizer, LearnedHyperparameterTuner,
572        LearnedOptimizationConfig, LearnedOptimizer, MetaOptimizerState, NeuralAdaptiveOptimizer,
573        OptimizationNetwork, OptimizationProblem, ParameterDistribution, ProblemEncoder,
574        TrainingTask,
575    };
576    pub use crate::least_squares::{
577        bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
578        total_least_squares, weighted_least_squares, BisquareLoss, BoundedOptions, CauchyLoss,
579        HuberLoss, LinearSolver, Method as LeastSquaresMethod, RobustLoss, RobustOptions,
580        SeparableOptions, SeparableResult, TLSMethod, TotalLeastSquaresOptions,
581        TotalLeastSquaresResult, WeightedOptions,
582    };
583    pub use crate::ml_optimizers::{
584        ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
585        GroupLassoOptimizer, LassoOptimizer,
586    };
587    pub use crate::multi_objective::{
588        MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII, NSGAIII,
589    };
590    pub use crate::neural_integration::{
591        optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer,
592    };
593    pub use crate::neuromorphic::{
594        neuromorphic_optimize, BasicNeuromorphicOptimizer, NeuromorphicConfig, NeuromorphicNetwork,
595        NeuromorphicOptimizer, NeuronState, SpikeEvent,
596    };
597    pub use crate::parallel::{
598        parallel_evaluate_batch, parallel_finite_diff_gradient, ParallelOptions,
599    };
600    pub use crate::quantum_inspired::{
601        quantum_optimize, quantum_particle_swarm_optimize, Complex, CoolingSchedule,
602        QuantumAnnealingSchedule, QuantumInspiredOptimizer, QuantumOptimizationStats, QuantumState,
603    };
604    pub use crate::reinforcement_learning::{
605        bandit_optimize, evolutionary_optimize, meta_learning_optimize, policy_gradient_optimize,
606        BanditOptimizer, EvolutionaryStrategy, Experience, MetaLearningOptimizer,
607        OptimizationAction, OptimizationState, QLearningOptimizer, RLOptimizationConfig,
608        RLOptimizer,
609    };
610    pub use crate::result::OptimizeResults;
611    pub use crate::roots::{root, Method as RootMethod};
612    pub use crate::scalar::{
613        minimize_scalar, Method as ScalarMethod, Options as ScalarOptions, ScalarOptimizeResult,
614    };
615    pub use crate::self_tuning::{
616        presets, AdaptationResult, AdaptationStrategy, ParameterChange, ParameterValue,
617        PerformanceMetrics, SelfTuningConfig, SelfTuningOptimizer, TunableParameter,
618    };
619    pub use crate::sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
620    pub use crate::streaming::{
621        exponentially_weighted_rls, incremental_bfgs, incremental_lbfgs,
622        incremental_lbfgs_linear_regression, kalman_filter_estimator, online_gradient_descent,
623        online_linear_regression, online_logistic_regression, real_time_linear_regression,
624        recursive_least_squares, rolling_window_gradient_descent, rolling_window_least_squares,
625        rolling_window_linear_regression, rolling_window_weighted_least_squares,
626        streaming_trust_region_linear_regression, streaming_trust_region_logistic_regression,
627        IncrementalNewton, IncrementalNewtonMethod, LinearRegressionObjective,
628        LogisticRegressionObjective, RealTimeEstimator, RealTimeMethod, RollingWindowOptimizer,
629        StreamingConfig, StreamingDataPoint, StreamingObjective, StreamingOptimizer,
630        StreamingStats, StreamingTrustRegion,
631    };
632    pub use crate::unconstrained::{minimize, Bounds, Method as UnconstrainedMethod, Options};
633    pub use crate::unified_pipeline::{
634        presets as unified_presets, UnifiedOptimizationConfig, UnifiedOptimizationResults,
635        UnifiedOptimizer,
636    };
637    pub use crate::visualization::{
638        tracking::TrajectoryTracker, ColorScheme, OptimizationTrajectory, OptimizationVisualizer,
639        OutputFormat, VisualizationConfig,
640    };
641}
642
643#[cfg(test)]
644mod tests {
645    #[test]
646    fn it_works() {
647        assert_eq!(2 + 2, 4);
648    }
649}