scirs2_optimize/
lib.rs

1#![allow(deprecated)]
2#![allow(dead_code)]
3#![allow(unreachable_patterns)]
4#![allow(unused_assignments)]
5#![allow(unused_variables)]
6#![allow(private_interfaces)]
7//! # SciRS2 Optimize - Mathematical Optimization for Rust
8//!
9//! **scirs2-optimize** provides comprehensive optimization algorithms modeled after SciPy's
10//! `optimize` module, offering everything from simple function minimization to complex
11//! constrained optimization and global search.
12//!
13//! ## 🎯 Key Features
14//!
15//! - **Unconstrained Optimization**: BFGS, CG, Nelder-Mead, Powell
16//! - **Constrained Optimization**: SLSQP, Trust-region methods
17//! - **Global Optimization**: Differential Evolution, Basin-hopping, Simulated Annealing
18//! - **Least Squares**: Levenberg-Marquardt, robust fitting, bounded problems
19//! - **Root Finding**: Newton, Brent, Bisection methods
20//! - **Scalar Optimization**: Brent, Golden section search
21//! - **Bounds Support**: Box constraints for all major algorithms
22
23#![allow(clippy::field_reassign_with_default)]
24#![recursion_limit = "512"]
25// Allow common mathematical conventions in optimization code
26#![allow(clippy::many_single_char_names)] // x, f, g, h, n, m etc. are standard in optimization
27#![allow(clippy::similar_names)] // x_pp, x_pm, x_mp, x_mm are standard for finite differences
28//!
29//! ## 📦 Module Overview
30//!
31//! | Module | Description | SciPy Equivalent |
32//! |--------|-------------|------------------|
33//! | [`unconstrained`] | Unconstrained minimization (BFGS, CG, Powell) | `scipy.optimize.minimize` |
34//! | [`constrained`] | Constrained optimization (SLSQP, Trust-region) | `scipy.optimize.minimize` with constraints |
35//! | [`global`] | Global optimization (DE, Basin-hopping) | `scipy.optimize.differential_evolution` |
36//! | [`mod@least_squares`] | Nonlinear least squares (LM, robust methods) | `scipy.optimize.least_squares` |
37//! | [`roots`] | Root finding algorithms | `scipy.optimize.root` |
38//! | [`scalar`] | 1-D minimization | `scipy.optimize.minimize_scalar` |
39//!
40//! ## 🚀 Quick Start
41//!
42//! ### Installation
43//!
44//! ```toml
45//! [dependencies]
46//! scirs2-optimize = "0.1.0-beta.4"
47//! ```
48//!
49//! ### Unconstrained Minimization (Rosenbrock Function)
50//!
51//! ```rust
52//! use scirs2_optimize::unconstrained::{minimize, Method};
53//! use ndarray::ArrayView1;
54//!
55//! // Rosenbrock function: (1-x)² + 100(y-x²)²
56//! fn rosenbrock(x: &ArrayView1<f64>) -> f64 {
57//!     let x0 = x[0];
58//!     let x1 = x[1];
59//!     (1.0 - x0).powi(2) + 100.0 * (x1 - x0.powi(2)).powi(2)
60//! }
61//!
62//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
63//! let initial_guess = [0.0, 0.0];
64//! let result = minimize(rosenbrock, &initial_guess, Method::BFGS, None)?;
65//!
66//! println!("Minimum at: {:?}", result.x);
67//! println!("Function value: {}", result.fun);
68//! println!("Converged: {}", result.success);
69//! # Ok(())
70//! # }
71//! ```
72//!
73//! ### Optimization with Bounds
74//!
75//! Constrain variables to specific ranges:
76//!
77//! ```rust
78//! use scirs2_optimize::{Bounds, unconstrained::{minimize, Method, Options}};
79//! use ndarray::ArrayView1;
80//!
81//! fn objective(x: &ArrayView1<f64>) -> f64 {
82//!     (x[0] + 1.0).powi(2) + (x[1] + 1.0).powi(2)
83//! }
84//!
85//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
86//! // Constrain to positive quadrant: x >= 0, y >= 0
87//! let bounds = Bounds::new(&[
88//!     (Some(0.0), None),  // x >= 0
89//!     (Some(0.0), None),  // y >= 0
90//! ]);
91//!
92//! let mut options = Options::default();
93//! options.bounds = Some(bounds);
94//!
95//! let result = minimize(objective, &[0.5, 0.5], Method::Powell, Some(options))?;
96//! println!("Constrained minimum: {:?}", result.x);  // [0.0, 0.0]
97//! # Ok(())
98//! # }
99//! ```
100//!
101//! ### Robust Least Squares
102//!
103//! Fit data with outliers using robust loss functions:
104//!
105//! ```rust
106//! use scirs2_optimize::least_squares::{robust_least_squares, HuberLoss};
107//! use ndarray::{array, Array1};
108//!
109//! // Linear model residual: y - (a + b*x)
110//! fn residual(params: &[f64], data: &[f64]) -> Array1<f64> {
111//!     let n = data.len() / 2;
112//!     let x = &data[0..n];
113//!     let y = &data[n..];
114//!
115//!     let mut res = Array1::zeros(n);
116//!     for i in 0..n {
117//!         res[i] = y[i] - (params[0] + params[1] * x[i]);
118//!     }
119//!     res
120//! }
121//!
122//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
123//! // Data: x = [0,1,2,3,4], y = [0.1,0.9,2.1,2.9,10.0] (last point is outlier)
124//! let data = array![0.,1.,2.,3.,4., 0.1,0.9,2.1,2.9,10.0];
125//!
126//! let huber = HuberLoss::new(1.0);  // Robust to outliers
127//! let x0 = array![0.0, 0.0];
128//! let result = robust_least_squares(
129//!     residual, &x0, huber, None::<fn(&[f64], &[f64]) -> ndarray::Array2<f64>>, &data, None
130//! )?;
131//!
132//! println!("Robust fit: y = {:.3} + {:.3}x", result.x[0], result.x[1]);
133//! # Ok(())
134//! # }
135//! ```
136//!
137//! ### Global Optimization
138//!
139//! Find global minimum of multi-modal functions:
140//!
141//! ```rust,no_run
142//! use scirs2_optimize::global::{differential_evolution, DifferentialEvolutionOptions};
143//! use ndarray::ArrayView1;
144//!
145//! // Rastrigin function (multiple local minima)
146//! fn rastrigin(x: &ArrayView1<f64>) -> f64 {
147//!     let n = x.len() as f64;
148//!     10.0 * n + x.iter().map(|xi| xi.powi(2) - 10.0 * (2.0 * std::f64::consts::PI * xi).cos()).sum::<f64>()
149//! }
150//!
151//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
152//! let bounds = vec![(-5.12, 5.12); 5];  // 5-dimensional search space
153//! let options = Some(DifferentialEvolutionOptions::default());
154//!
155//! let result = differential_evolution(rastrigin, bounds, options, None)?;
156//! println!("Global minimum: {:?}", result.x);
157//! # Ok(())
158//! # }
159//! ```
160//!
161//! ### Root Finding
162//!
163//! Solve equations f(x) = 0:
164//!
165//! ```rust,no_run
166//! use scirs2_optimize::roots::{root, Method};
167//! use ndarray::{array, Array1};
168//!
169//! // Find root of x² - 2 = 0 (i.e., √2)
170//! fn f(x: &[f64]) -> Array1<f64> {
171//!     array![x[0] * x[0] - 2.0]
172//! }
173//!
174//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
175//! let x0 = array![1.5];  // Initial guess
176//! let result = root(f, &x0, Method::Hybr, None::<fn(&[f64]) -> ndarray::Array2<f64>>, None)?;
177//! println!("√2 ≈ {:.10}", result.x[0]);  // 1.4142135624
178//! # Ok(())
179//! # }
180//! ```
181//! ## Submodules
182//!
183//! * `unconstrained`: Unconstrained optimization algorithms
184//! * `constrained`: Constrained optimization algorithms
185//! * `least_squares`: Least squares minimization (including robust methods)
186//! * `roots`: Root finding algorithms
187//! * `scalar`: Scalar (univariate) optimization algorithms
188//! * `global`: Global optimization algorithms
189//!
190//! ## Optimization Methods
191//!
192//! The following optimization methods are currently implemented:
193//!
194//! ### Unconstrained:
195//! - **Nelder-Mead**: A derivative-free method using simplex-based approach
196//! - **Powell**: Derivative-free method using conjugate directions
197//! - **BFGS**: Quasi-Newton method with BFGS update
198//! - **CG**: Nonlinear conjugate gradient method
199//!
200//! ### Constrained:
201//! - **SLSQP**: Sequential Least SQuares Programming
202//! - **TrustConstr**: Trust-region constrained optimizer
203//!
204//! ### Scalar (Univariate) Optimization:
205//! - **Brent**: Combines parabolic interpolation with golden section search
206//! - **Bounded**: Brent's method with bounds constraints
207//! - **Golden**: Golden section search
208//!
209//! ### Global:
210//! - **Differential Evolution**: Stochastic global optimization method
211//! - **Basin-hopping**: Random perturbations with local minimization
212//! - **Dual Annealing**: Simulated annealing with fast annealing
213//! - **Particle Swarm**: Population-based optimization inspired by swarm behavior
214//! - **Simulated Annealing**: Probabilistic optimization with cooling schedule
215//!
216//! ### Least Squares:
217//! - **Levenberg-Marquardt**: Trust-region algorithm for nonlinear least squares
218//! - **Trust Region Reflective**: Bounds-constrained least squares
219//! - **Robust Least Squares**: M-estimators for outlier-resistant regression
220//!   - Huber loss: Reduces influence of moderate outliers
221//!   - Bisquare loss: Completely rejects extreme outliers
222//!   - Cauchy loss: Provides very strong outlier resistance
223//! - **Weighted Least Squares**: Handles heteroscedastic data (varying variance)
224//! - **Bounded Least Squares**: Box constraints on parameters
225//! - **Separable Least Squares**: Variable projection for partially linear models
226//! - **Total Least Squares**: Errors-in-variables regression
227//! ## Bounds Support
228//!
229//! The `unconstrained` module now supports bounds constraints for variables.
230//! You can specify lower and upper bounds for each variable, and the optimizer
231//! will ensure that all iterates remain within these bounds.
232//!
233//! The following methods support bounds constraints:
234//! - Powell
235//! - Nelder-Mead
236//! - BFGS
237//! - CG (Conjugate Gradient)
238//!
239//! ## Examples
240//!
241//! ### Basic Optimization
242//!
243//! ```
244//! // Example of minimizing a function using BFGS
245//! use ndarray::{array, ArrayView1};
246//! use scirs2_optimize::unconstrained::{minimize, Method};
247//!
248//! fn rosenbrock(x: &ArrayView1<f64>) -> f64 {
249//!     let a = 1.0;
250//!     let b = 100.0;
251//!     let x0 = x[0];
252//!     let x1 = x[1];
253//!     (a - x0).powi(2) + b * (x1 - x0.powi(2)).powi(2)
254//! }
255//!
256//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
257//! let initial_guess = [0.0, 0.0];
258//! let result = minimize(rosenbrock, &initial_guess, Method::BFGS, None)?;
259//!
260//! println!("Solution: {:?}", result.x);
261//! println!("Function value at solution: {}", result.fun);
262//! println!("Number of nit: {}", result.nit);
263//! println!("Success: {}", result.success);
264//! # Ok(())
265//! # }
266//! ```
267//!
268//! ### Optimization with Bounds
269//!
270//! ```
271//! // Example of minimizing a function with bounds constraints
272//! use ndarray::{array, ArrayView1};
273//! use scirs2_optimize::{Bounds, unconstrained::{minimize, Method, Options}};
274//!
275//! // A function with minimum at (-1, -1)
276//! fn func(x: &ArrayView1<f64>) -> f64 {
277//!     (x[0] + 1.0).powi(2) + (x[1] + 1.0).powi(2)
278//! }
279//!
280//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
281//! // Create bounds: x >= 0, y >= 0
282//! // This will constrain the optimization to the positive quadrant
283//! let bounds = Bounds::new(&[(Some(0.0), None), (Some(0.0), None)]);
284//!
285//! let initial_guess = [0.5, 0.5];
286//! let mut options = Options::default();
287//! options.bounds = Some(bounds);
288//!
289//! // Use Powell's method which supports bounds
290//! let result = minimize(func, &initial_guess, Method::Powell, Some(options))?;
291//!
292//! // The constrained minimum should be at [0, 0] with value 2.0
293//! println!("Solution: {:?}", result.x);
294//! println!("Function value at solution: {}", result.fun);
295//! # Ok(())
296//! # }
297//! ```
298//!
299//! ### Bounds Creation Options
300//!
301//! ```
302//! use scirs2_optimize::Bounds;
303//!
304//! // Create bounds from pairs
305//! // Format: [(min_x1, max_x1), (min_x2, max_x2), ...] where None = unbounded
306//! let bounds1 = Bounds::new(&[
307//!     (Some(0.0), Some(1.0)),  // 0 <= x[0] <= 1
308//!     (Some(-1.0), None),      // x[1] >= -1, no upper bound
309//!     (None, Some(10.0)),      // x[2] <= 10, no lower bound
310//!     (None, None)             // x[3] is completely unbounded
311//! ]);
312//!
313//! // Alternative: create from separate lower and upper bound vectors
314//! let lb = vec![Some(0.0), Some(-1.0), None, None];
315//! let ub = vec![Some(1.0), None, Some(10.0), None];
316//! let bounds2 = Bounds::from_vecs(lb, ub).unwrap();
317//! ```
318//!
319//! ### Robust Least Squares Example
320//!
321//! ```
322//! use ndarray::{array, Array1, Array2};
323//! use scirs2_optimize::least_squares::{robust_least_squares, HuberLoss};
324//!
325//! // Define residual function for linear regression
326//! fn residual(params: &[f64], data: &[f64]) -> Array1<f64> {
327//!     let n = data.len() / 2;
328//!     let x_vals = &data[0..n];
329//!     let y_vals = &data[n..];
330//!     
331//!     let mut res = Array1::zeros(n);
332//!     for i in 0..n {
333//!         res[i] = y_vals[i] - (params[0] + params[1] * x_vals[i]);
334//!     }
335//!     res
336//! }
337//!
338//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
339//! // Data with outliers
340//! let data = array![0., 1., 2., 3., 4., 0.1, 0.9, 2.1, 2.9, 10.0];
341//! let x0 = array![0.0, 0.0];
342//!
343//! // Use Huber loss for robustness
344//! let huber_loss = HuberLoss::new(1.0);
345//! let result = robust_least_squares(
346//!     residual,
347//!     &x0,
348//!     huber_loss,
349//!     None::<fn(&[f64], &[f64]) -> Array2<f64>>,
350//!     &data,
351//!     None
352//! )?;
353//!
354//! println!("Robust solution: intercept={:.3}, slope={:.3}",
355//!          result.x[0], result.x[1]);
356//! # Ok(())
357//! # }
358//! ```
359
360// BLAS backend linking handled through scirs2-core
361
362// Export error types
363pub mod error;
364pub use error::{OptimizeError, OptimizeResult};
365
366// Module structure
367pub mod advanced_coordinator;
368#[cfg(feature = "async")]
369pub mod async_parallel;
370pub mod automatic_differentiation;
371pub mod benchmarking;
372pub mod constrained;
373pub mod distributed;
374pub mod distributed_gpu;
375pub mod global;
376pub mod gpu;
377pub mod jit_optimization;
378pub mod learned_optimizers;
379pub mod least_squares;
380pub mod ml_optimizers;
381pub mod multi_objective;
382pub mod neural_integration;
383pub mod neuromorphic;
384pub mod parallel;
385pub mod quantum_inspired;
386pub mod reinforcement_learning;
387pub mod roots;
388pub mod roots_anderson;
389pub mod roots_krylov;
390pub mod scalar;
391pub mod self_tuning;
392pub mod simd_ops;
393pub mod sparse_numdiff; // Refactored into a module with submodules
394pub mod stochastic;
395pub mod streaming;
396pub mod unconstrained;
397pub mod unified_pipeline;
398pub mod visualization;
399
400// Common optimization result structure
401pub mod result;
402pub use result::OptimizeResults;
403
404// Convenience re-exports for common functions
405pub use advanced_coordinator::{
406    advanced_optimize, AdvancedConfig, AdvancedCoordinator, AdvancedStats, AdvancedStrategy,
407    StrategyPerformance,
408};
409#[cfg(feature = "async")]
410pub use async_parallel::{
411    AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
412    SlowEvaluationStrategy,
413};
414pub use automatic_differentiation::{
415    autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
416    AutoDiffFunction, AutoDiffOptions,
417};
418pub use benchmarking::{
419    benchmark_suites, test_functions, AlgorithmRanking, BenchmarkConfig, BenchmarkResults,
420    BenchmarkRun, BenchmarkSummary, BenchmarkSystem, ProblemCharacteristics, RuntimeStats,
421    TestProblem,
422};
423pub use constrained::minimize_constrained;
424pub use distributed::{
425    algorithms::{DistributedDifferentialEvolution, DistributedParticleSwarm},
426    DistributedConfig, DistributedOptimizationContext, DistributedStats, DistributionStrategy,
427    MPIInterface, WorkAssignment,
428};
429pub use distributed_gpu::{
430    DistributedGpuConfig, DistributedGpuOptimizer, DistributedGpuResults, DistributedGpuStats,
431    GpuCommunicationStrategy, IterationStats,
432};
433pub use global::{
434    basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
435    generate_diverse_start_points, multi_start, multi_start_with_clustering, particle_swarm,
436    simulated_annealing,
437};
438pub use gpu::{
439    acceleration::{
440        AccelerationConfig, AccelerationManager, AccelerationStrategy, PerformanceStats,
441    },
442    algorithms::{GpuDifferentialEvolution, GpuParticleSwarm},
443    GpuFunction, GpuOptimizationConfig, GpuOptimizationContext, GpuPrecision,
444};
445pub use jit_optimization::{optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats};
446pub use learned_optimizers::{
447    learned_optimize, ActivationType, AdaptationStatistics, AdaptiveNASSystem,
448    AdaptiveTransformerOptimizer, FewShotLearningOptimizer, LearnedHyperparameterTuner,
449    LearnedOptimizationConfig, LearnedOptimizer, MetaOptimizerState, NeuralAdaptiveOptimizer,
450    OptimizationNetwork, OptimizationProblem, ParameterDistribution, ProblemEncoder, TrainingTask,
451};
452pub use least_squares::{
453    bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
454    total_least_squares, weighted_least_squares, BisquareLoss, CauchyLoss, HuberLoss,
455};
456pub use ml_optimizers::{
457    ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
458    GroupLassoOptimizer, LassoOptimizer,
459};
460pub use multi_objective::{
461    MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII, NSGAIII,
462};
463pub use neural_integration::{optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer};
464pub use neuromorphic::{
465    neuromorphic_optimize, BasicNeuromorphicOptimizer, NeuromorphicConfig, NeuromorphicNetwork,
466    NeuromorphicOptimizer, NeuronState, SpikeEvent,
467};
468pub use quantum_inspired::{
469    quantum_optimize, quantum_particle_swarm_optimize, Complex, CoolingSchedule,
470    QuantumAnnealingSchedule, QuantumInspiredOptimizer, QuantumOptimizationStats, QuantumState,
471};
472pub use reinforcement_learning::{
473    actor_critic_optimize, bandit_optimize, evolutionary_optimize, meta_learning_optimize,
474    policy_gradient_optimize, BanditOptimizer, EvolutionaryStrategy, Experience,
475    MetaLearningOptimizer, OptimizationAction, OptimizationState, QLearningOptimizer,
476    RLOptimizationConfig, RLOptimizer,
477};
478pub use roots::root;
479pub use scalar::minimize_scalar;
480pub use self_tuning::{
481    presets, AdaptationResult, AdaptationStrategy, ParameterChange, ParameterValue,
482    PerformanceMetrics, SelfTuningConfig, SelfTuningOptimizer, TunableParameter,
483};
484pub use sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
485pub use stochastic::{
486    minimize_adam, minimize_adamw, minimize_rmsprop, minimize_sgd, minimize_sgd_momentum,
487    minimize_stochastic, AdamOptions, AdamWOptions, DataProvider, InMemoryDataProvider,
488    LearningRateSchedule, MomentumOptions, RMSPropOptions, SGDOptions, StochasticGradientFunction,
489    StochasticMethod, StochasticOptions,
490};
491pub use streaming::{
492    exponentially_weighted_rls, incremental_bfgs, incremental_lbfgs,
493    incremental_lbfgs_linear_regression, kalman_filter_estimator, online_gradient_descent,
494    online_linear_regression, online_logistic_regression, real_time_linear_regression,
495    recursive_least_squares, rolling_window_gradient_descent, rolling_window_least_squares,
496    rolling_window_linear_regression, rolling_window_weighted_least_squares,
497    streaming_trust_region_linear_regression, streaming_trust_region_logistic_regression,
498    IncrementalNewton, IncrementalNewtonMethod, LinearRegressionObjective,
499    LogisticRegressionObjective, RealTimeEstimator, RealTimeMethod, RollingWindowOptimizer,
500    StreamingConfig, StreamingDataPoint, StreamingObjective, StreamingOptimizer, StreamingStats,
501    StreamingTrustRegion,
502};
503pub use unconstrained::{minimize, Bounds};
504pub use unified_pipeline::{
505    presets as unified_presets, UnifiedOptimizationConfig, UnifiedOptimizationResults,
506    UnifiedOptimizer,
507};
508pub use visualization::{
509    tracking::TrajectoryTracker, ColorScheme, OptimizationTrajectory, OptimizationVisualizer,
510    OutputFormat, VisualizationConfig,
511};
512
513// Prelude module for convenient imports
514pub mod prelude {
515    pub use crate::advanced_coordinator::{
516        advanced_optimize, AdvancedConfig, AdvancedCoordinator, AdvancedStats, AdvancedStrategy,
517        StrategyPerformance,
518    };
519    #[cfg(feature = "async")]
520    pub use crate::async_parallel::{
521        AsyncDifferentialEvolution, AsyncOptimizationConfig, AsyncOptimizationStats,
522        SlowEvaluationStrategy,
523    };
524    pub use crate::automatic_differentiation::{
525        autodiff, create_ad_gradient, create_ad_hessian, optimize_ad_mode, ADMode, ADResult,
526        AutoDiffFunction, AutoDiffOptions, Dual, DualNumber,
527    };
528    pub use crate::benchmarking::{
529        benchmark_suites, test_functions, AlgorithmRanking, BenchmarkConfig, BenchmarkResults,
530        BenchmarkRun, BenchmarkSummary, BenchmarkSystem, ProblemCharacteristics, RuntimeStats,
531        TestProblem,
532    };
533    pub use crate::constrained::{minimize_constrained, Method as ConstrainedMethod};
534    pub use crate::distributed::{
535        algorithms::{DistributedDifferentialEvolution, DistributedParticleSwarm},
536        DistributedConfig, DistributedOptimizationContext, DistributedStats, DistributionStrategy,
537        MPIInterface, WorkAssignment,
538    };
539    pub use crate::distributed_gpu::{
540        DistributedGpuConfig, DistributedGpuOptimizer, DistributedGpuResults, DistributedGpuStats,
541        GpuCommunicationStrategy, IterationStats,
542    };
543    pub use crate::error::{OptimizeError, OptimizeResult};
544    pub use crate::global::{
545        basinhopping, bayesian_optimization, differential_evolution, dual_annealing,
546        generate_diverse_start_points, multi_start_with_clustering, particle_swarm,
547        simulated_annealing, AcquisitionFunctionType, BasinHoppingOptions,
548        BayesianOptimizationOptions, BayesianOptimizer, ClusterCentroid, ClusteringAlgorithm,
549        ClusteringOptions, ClusteringResult, DifferentialEvolutionOptions, DualAnnealingOptions,
550        InitialPointGenerator, KernelType, LocalMinimum, Parameter, ParticleSwarmOptions,
551        SimulatedAnnealingOptions, Space, StartPointStrategy,
552    };
553    pub use crate::gpu::{
554        acceleration::{
555            AccelerationConfig, AccelerationManager, AccelerationStrategy, PerformanceStats,
556        },
557        algorithms::{GpuDifferentialEvolution, GpuParticleSwarm},
558        GpuFunction, GpuOptimizationConfig, GpuOptimizationContext, GpuPrecision,
559    };
560    pub use crate::jit_optimization::{
561        optimize_function, FunctionPattern, JitCompiler, JitOptions, JitStats,
562    };
563    pub use crate::learned_optimizers::{
564        learned_optimize, ActivationType, AdaptationStatistics, AdaptiveNASSystem,
565        AdaptiveTransformerOptimizer, FewShotLearningOptimizer, LearnedHyperparameterTuner,
566        LearnedOptimizationConfig, LearnedOptimizer, MetaOptimizerState, NeuralAdaptiveOptimizer,
567        OptimizationNetwork, OptimizationProblem, ParameterDistribution, ProblemEncoder,
568        TrainingTask,
569    };
570    pub use crate::least_squares::{
571        bounded_least_squares, least_squares, robust_least_squares, separable_least_squares,
572        total_least_squares, weighted_least_squares, BisquareLoss, BoundedOptions, CauchyLoss,
573        HuberLoss, LinearSolver, Method as LeastSquaresMethod, RobustLoss, RobustOptions,
574        SeparableOptions, SeparableResult, TLSMethod, TotalLeastSquaresOptions,
575        TotalLeastSquaresResult, WeightedOptions,
576    };
577    pub use crate::ml_optimizers::{
578        ml_problems, ADMMOptimizer, CoordinateDescentOptimizer, ElasticNetOptimizer,
579        GroupLassoOptimizer, LassoOptimizer,
580    };
581    pub use crate::multi_objective::{
582        MultiObjectiveConfig, MultiObjectiveResult, MultiObjectiveSolution, NSGAII, NSGAIII,
583    };
584    pub use crate::neural_integration::{
585        optimizers, NeuralOptimizer, NeuralParameters, NeuralTrainer,
586    };
587    pub use crate::neuromorphic::{
588        neuromorphic_optimize, BasicNeuromorphicOptimizer, NeuromorphicConfig, NeuromorphicNetwork,
589        NeuromorphicOptimizer, NeuronState, SpikeEvent,
590    };
591    pub use crate::parallel::{
592        parallel_evaluate_batch, parallel_finite_diff_gradient, ParallelOptions,
593    };
594    pub use crate::quantum_inspired::{
595        quantum_optimize, quantum_particle_swarm_optimize, Complex, CoolingSchedule,
596        QuantumAnnealingSchedule, QuantumInspiredOptimizer, QuantumOptimizationStats, QuantumState,
597    };
598    pub use crate::reinforcement_learning::{
599        bandit_optimize, evolutionary_optimize, meta_learning_optimize, policy_gradient_optimize,
600        BanditOptimizer, EvolutionaryStrategy, Experience, MetaLearningOptimizer,
601        OptimizationAction, OptimizationState, QLearningOptimizer, RLOptimizationConfig,
602        RLOptimizer,
603    };
604    pub use crate::result::OptimizeResults;
605    pub use crate::roots::{root, Method as RootMethod};
606    pub use crate::scalar::{
607        minimize_scalar, Method as ScalarMethod, Options as ScalarOptions, ScalarOptimizeResult,
608    };
609    pub use crate::self_tuning::{
610        presets, AdaptationResult, AdaptationStrategy, ParameterChange, ParameterValue,
611        PerformanceMetrics, SelfTuningConfig, SelfTuningOptimizer, TunableParameter,
612    };
613    pub use crate::sparse_numdiff::{sparse_hessian, sparse_jacobian, SparseFiniteDiffOptions};
614    pub use crate::streaming::{
615        exponentially_weighted_rls, incremental_bfgs, incremental_lbfgs,
616        incremental_lbfgs_linear_regression, kalman_filter_estimator, online_gradient_descent,
617        online_linear_regression, online_logistic_regression, real_time_linear_regression,
618        recursive_least_squares, rolling_window_gradient_descent, rolling_window_least_squares,
619        rolling_window_linear_regression, rolling_window_weighted_least_squares,
620        streaming_trust_region_linear_regression, streaming_trust_region_logistic_regression,
621        IncrementalNewton, IncrementalNewtonMethod, LinearRegressionObjective,
622        LogisticRegressionObjective, RealTimeEstimator, RealTimeMethod, RollingWindowOptimizer,
623        StreamingConfig, StreamingDataPoint, StreamingObjective, StreamingOptimizer,
624        StreamingStats, StreamingTrustRegion,
625    };
626    pub use crate::unconstrained::{minimize, Bounds, Method as UnconstrainedMethod, Options};
627    pub use crate::unified_pipeline::{
628        presets as unified_presets, UnifiedOptimizationConfig, UnifiedOptimizationResults,
629        UnifiedOptimizer,
630    };
631    pub use crate::visualization::{
632        tracking::TrajectoryTracker, ColorScheme, OptimizationTrajectory, OptimizationVisualizer,
633        OutputFormat, VisualizationConfig,
634    };
635}
636
637#[cfg(test)]
638mod tests {
639    #[test]
640    fn it_works() {
641        assert_eq!(2 + 2, 4);
642    }
643}