pub fn differential_evolution<F>(
func: &F,
bounds: &[(f64, f64)],
config: DEConfig,
) -> Result<DEReport>Expand description
Runs Differential Evolution optimization on a function.
This is a convenience function that mirrors SciPy’s differential_evolution API.
It creates a DE optimizer with the given bounds and configuration, then runs
the optimization to find the global minimum.
§Arguments
func- The objective function to minimize, mapping&Array1<f64>tof64bounds- Vector of (lower, upper) bound pairs for each dimensionconfig- DE configuration (useDEConfigBuilderto construct)
§Returns
Returns Ok(DEReport) containing the optimization result on success.
§Errors
Returns DEError::InvalidBounds if any bound pair has upper < lower.
§Example
use math_audio_optimisation::{differential_evolution, DEConfigBuilder};
let config = DEConfigBuilder::new()
.maxiter(50)
.seed(42)
.build()
.expect("invalid config");
let result = differential_evolution(
&|x| x[0].powi(2) + x[1].powi(2),
&[(-5.0, 5.0), (-5.0, 5.0)],
config,
).expect("optimization failed");
assert!(result.fun < 0.01);Examples found in repository?
examples/optde_linear_constraints.rs (line 40)
7fn main() {
8 // Objective: sphere in 2D
9 let sphere = |x: &Array1<f64>| x.iter().map(|v| v * v).sum::<f64>();
10
11 // Bounds
12 let bounds = [(-5.0, 5.0), (-5.0, 5.0)];
13
14 // Linear constraint example: lb <= A x <= ub
15 // 1) x0 + x1 <= 1.0
16 // 2) 0.2 <= x0 - x1 <= 0.4
17 let a = Array2::from_shape_vec((2, 2), vec![1.0, 1.0, 1.0, -1.0]).unwrap();
18 let lb = Array1::from(vec![-f64::INFINITY, 0.2]);
19 let ub = Array1::from(vec![1.0, 0.4]);
20 let lc = LinearConstraintHelper { a, lb, ub };
21
22 // Strategy parsing from string (mirrors SciPy names)
23 let strategy = Strategy::from_str("randtobest1exp").unwrap_or(Strategy::RandToBest1Exp);
24
25 // Build config using the fluent builder
26 let mut cfg = DEConfigBuilder::new()
27 .seed(123)
28 .maxiter(600)
29 .popsize(30)
30 .strategy(strategy)
31 .recombination(0.9)
32 .mutation(Mutation::Range { min: 0.4, max: 1.0 })
33 .crossover(Crossover::Exponential)
34 .build()
35 .expect("popsize must be >= 4");
36
37 // Apply linear constraints with a penalty weight
38 lc.apply_to(&mut cfg, 1e3);
39
40 let rep = differential_evolution(&sphere, &bounds, cfg).expect("optimization failed");
41 println!(
42 "success={} message=\"{}\"\nbest f={:.6e}\nbest x={:?}",
43 rep.success, rep.message, rep.fun, rep.x
44 );
45}More examples
examples/optde_nonlinear_constraints.rs (line 41)
8fn main() {
9 // Himmelblau as objective, but with nonlinear constraints to demonstrate helper
10 let himmelblau =
11 |x: &Array1<f64>| (x[0] * x[0] + x[1] - 11.0).powi(2) + (x[0] + x[1] * x[1] - 7.0).powi(2);
12
13 // Bounds
14 let bounds = [(-6.0, 6.0), (-6.0, 6.0)];
15
16 // Nonlinear vector function f(x) with 2 components
17 // 1) Circle-ish constraint: x0^2 + x1^2 <= 10 -> f0(x) = x0^2 + x1^2, lb=-inf, ub=10
18 // 2) Sum equality: x0 + x1 = 1 -> f1(x) = x0 + x1, lb=1, ub=1
19 let fun =
20 Arc::new(|x: &Array1<f64>| Array1::from(vec![x[0] * x[0] + x[1] * x[1], x[0] + x[1]]));
21 let lb = Array1::from(vec![-f64::INFINITY, 1.0]);
22 let ub = Array1::from(vec![10.0, 1.0]);
23 let nlc = NonlinearConstraintHelper { fun, lb, ub };
24
25 // Strategy parsing from string
26 let strategy = Strategy::from_str("best1exp").unwrap_or(Strategy::Best1Exp);
27
28 let mut cfg = DEConfigBuilder::new()
29 .seed(456)
30 .maxiter(800)
31 .popsize(30)
32 .strategy(strategy)
33 .recombination(0.9)
34 .crossover(Crossover::Exponential)
35 .build()
36 .expect("popsize must be >= 4");
37
38 // Apply nonlinear constraints with penalties
39 nlc.apply_to(&mut cfg, 1e3, 1e3);
40
41 let rep = differential_evolution(&himmelblau, &bounds, cfg).expect("optimization failed");
42 println!(
43 "success={} message=\"{}\"\nbest f={:.6e}\nbest x={:?}",
44 rep.success, rep.message, rep.fun, rep.x
45 );
46}examples/optde_basic.rs (line 62)
7fn main() {
8 // Ackley function (2D)
9 let ackley = |x: &Array1<f64>| {
10 let x0 = x[0];
11 let x1 = x[1];
12 let s = 0.5 * (x0 * x0 + x1 * x1);
13 let c = 0.5
14 * ((2.0 * std::f64::consts::PI * x0).cos() + (2.0 * std::f64::consts::PI * x1).cos());
15 -20.0 * (-0.2 * s.sqrt()).exp() - c.exp() + 20.0 + std::f64::consts::E
16 };
17
18 let bounds = [(-5.0, 5.0), (-5.0, 5.0)];
19
20 let mut cfg = DEConfig {
21 maxiter: 300,
22 popsize: 20,
23 strategy: Strategy::Best1Bin,
24 crossover: Crossover::Exponential, // demonstrate exponential crossover
25 mutation: Mutation::Range { min: 0.5, max: 1.0 }, // dithering
26 recombination: 0.9,
27 seed: Some(42),
28 ..Default::default()
29 };
30
31 // Penalty examples (here just a dummy inequality fc(x) <= 0):
32 // Circle of radius 3: x0^2 + x1^2 - 9 <= 0
33 cfg.penalty_ineq.push((
34 Arc::new(|x: &Array1<f64>| x[0] * x[0] + x[1] * x[1] - 9.0),
35 1e3,
36 ));
37
38 // Callback every generation: stop early when convergence small enough
39 let mut iter_log = 0usize;
40 cfg.callback = Some(Box::new(move |inter| {
41 if iter_log.is_multiple_of(25) {
42 eprintln!(
43 "iter {:4} best_f={:.6e} conv(stdE)={:.3e}",
44 inter.iter, inter.fun, inter.convergence
45 );
46 }
47 iter_log += 1;
48 if inter.convergence < 1e-6 {
49 CallbackAction::Stop
50 } else {
51 CallbackAction::Continue
52 }
53 }));
54
55 // Optional polishing with a local optimizer
56 cfg.polish = Some(PolishConfig {
57 enabled: true,
58 algo: "neldermead".into(),
59 maxeval: 400,
60 });
61
62 let report = differential_evolution(&ackley, &bounds, cfg).expect("optimization failed");
63
64 println!(
65 "success={} message=\"{}\"\nbest f={:.6e}\nbest x={:?}",
66 report.success, report.message, report.fun, report.x
67 );
68}examples/optde_parallel.rs (line 46)
7fn main() {
8 // Rastrigin function with artificial compute delay to simulate expensive evaluations
9 let dimension = 10;
10 let rastrigin = move |x: &Array1<f64>| -> f64 {
11 // Add some compute-intensive work to make parallelization beneficial
12 let mut sum = 0.0;
13 for _ in 0..1000 {
14 for &xi in x.iter() {
15 sum += xi.sin().cos().exp().ln_1p();
16 }
17 }
18
19 // Actual Rastrigin function
20 let a = 10.0;
21 let n = x.len() as f64;
22 let result = a * n
23 + x.iter()
24 .map(|&xi| xi * xi - a * (2.0 * std::f64::consts::PI * xi).cos())
25 .sum::<f64>();
26 result + sum * 1e-10 // Add tiny contribution from expensive computation
27 };
28
29 let bounds: Vec<(f64, f64)> = vec![(-5.12, 5.12); dimension];
30
31 // Test sequential evaluation
32 println!("Testing Sequential Evaluation:");
33 let cfg_seq = DEConfig {
34 maxiter: 100,
35 popsize: 15,
36 strategy: Strategy::Best1Bin,
37 mutation: Mutation::Factor(0.8),
38 recombination: 0.9,
39 seed: Some(42),
40 disp: true,
41 ..Default::default() // parallel.enabled = false by default
42 };
43
44 let start_seq = Instant::now();
45 let report_seq =
46 differential_evolution(&rastrigin, &bounds, cfg_seq).expect("optimization failed");
47 let duration_seq = start_seq.elapsed();
48
49 println!("\nSequential Results:");
50 println!(" Success: {}", report_seq.success);
51 println!(" Best f: {:.6e}", report_seq.fun);
52 println!(" Iterations: {}", report_seq.nit);
53 println!(" Function evaluations: {}", report_seq.nfev);
54 println!(" Time: {:.3} seconds", duration_seq.as_secs_f64());
55
56 // Test parallel evaluation
57 println!("\n\nTesting Parallel Evaluation:");
58 let cfg_par = DEConfig {
59 maxiter: 100,
60 popsize: 15,
61 strategy: Strategy::Best1Bin,
62 mutation: Mutation::Factor(0.8),
63 recombination: 0.9,
64 seed: Some(42),
65 disp: true,
66 parallel: ParallelConfig {
67 enabled: true,
68 num_threads: None, // Use all available cores
69 },
70 ..Default::default()
71 };
72
73 let start_par = Instant::now();
74 let report_par =
75 differential_evolution(&rastrigin, &bounds, cfg_par).expect("optimization failed");
76 let duration_par = start_par.elapsed();
77
78 println!("\nParallel Results:");
79 println!(" Success: {}", report_par.success);
80 println!(" Best f: {:.6e}", report_par.fun);
81 println!(" Iterations: {}", report_par.nit);
82 println!(" Function evaluations: {}", report_par.nfev);
83 println!(" Time: {:.3} seconds", duration_par.as_secs_f64());
84
85 // Compare results
86 println!("\n\nComparison:");
87 println!(
88 " Speedup: {:.2}x",
89 duration_seq.as_secs_f64() / duration_par.as_secs_f64()
90 );
91 println!(
92 " Result difference: {:.6e}",
93 (report_seq.fun - report_par.fun).abs()
94 );
95
96 // Test with different thread counts
97 println!("\n\nTesting with different thread counts:");
98 for num_threads in [1, 2, 4, 8] {
99 let cfg_threads = DEConfig {
100 maxiter: 50,
101 popsize: 15,
102 strategy: Strategy::Best1Bin,
103 mutation: Mutation::Factor(0.8),
104 recombination: 0.9,
105 seed: Some(42),
106 disp: false,
107 parallel: ParallelConfig {
108 enabled: true,
109 num_threads: Some(num_threads),
110 },
111 ..Default::default()
112 };
113
114 let start = Instant::now();
115 let _ =
116 differential_evolution(&rastrigin, &bounds, cfg_threads).expect("optimization failed");
117 let duration = start.elapsed();
118
119 println!(
120 " {} thread(s): {:.3} seconds",
121 num_threads,
122 duration.as_secs_f64()
123 );
124 }
125}examples/optde_adaptive_demo.rs (line 110)
13fn main() {
14 println!("🧬 Adaptive Differential Evolution Demo");
15 println!("=====================================");
16 println!();
17
18 // Test functions to evaluate
19 let test_functions = [
20 (
21 "Quadratic (f(x) = x₁² + x₂²)",
22 quadratic as fn(&Array1<f64>) -> f64,
23 [(-5.0, 5.0), (-5.0, 5.0)],
24 ),
25 (
26 "Rosenbrock 2D",
27 rosenbrock as fn(&Array1<f64>) -> f64,
28 [(-5.0, 5.0), (-5.0, 5.0)],
29 ),
30 ("Ackley", ackley, [(-32.0, 32.0), (-32.0, 32.0)]),
31 ];
32
33 for (name, func, bounds) in test_functions.iter() {
34 println!("🎯 Function: {}", name);
35 println!(
36 " Bounds: [{:.1}, {:.1}] × [{:.1}, {:.1}]",
37 bounds[0].0, bounds[0].1, bounds[1].0, bounds[1].1
38 );
39
40 // Traditional DE
41 println!(" 📊 Traditional DE:");
42 let traditional_result = run_traditional_de(*func, bounds);
43
44 // Adaptive DE with SAM only
45 println!(" 🧬 Adaptive DE (SAM only):");
46 let sam_result = run_adaptive_de(*func, bounds, false);
47
48 // Adaptive DE with SAM + WLS
49 println!(" 🔧 Adaptive DE (SAM + WLS):");
50 let sam_wls_result = run_adaptive_de(*func, bounds, true);
51
52 // Compare results
53 println!(" 🏆 Comparison:");
54 println!(
55 " Traditional: f = {:.6e}, {} iterations",
56 traditional_result.fun, traditional_result.nit
57 );
58 println!(
59 " SAM only: f = {:.6e}, {} iterations",
60 sam_result.fun, sam_result.nit
61 );
62 println!(
63 " SAM + WLS: f = {:.6e}, {} iterations",
64 sam_wls_result.fun, sam_wls_result.nit
65 );
66
67 let improvement_sam =
68 ((traditional_result.fun - sam_result.fun) / traditional_result.fun * 100.0).max(0.0);
69 let improvement_wls =
70 ((traditional_result.fun - sam_wls_result.fun) / traditional_result.fun * 100.0)
71 .max(0.0);
72
73 println!(" 📈 Improvement with SAM: {:.1}%", improvement_sam);
74 println!(" 📈 Improvement with WLS: {:.1}%", improvement_wls);
75 println!();
76 }
77
78 // Demonstrate parameter adaptation tracking
79 println!("🔄 Parameter Adaptation Demo");
80 println!("===========================");
81
82 // Use a recording callback to track parameter evolution
83 let bounds = [(-5.0, 5.0), (-5.0, 5.0)];
84
85 let adaptive_config = AdaptiveConfig {
86 adaptive_mutation: true,
87 wls_enabled: true,
88 w_max: 0.9, // Start with 90% of population for selection
89 w_min: 0.1, // End with 10% of population
90 w_f: 0.9, // F parameter adaptation rate
91 w_cr: 0.9, // CR parameter adaptation rate
92 f_m: 0.5, // Initial F location parameter
93 cr_m: 0.6, // Initial CR location parameter
94 wls_prob: 0.2, // Apply WLS to 20% of population
95 wls_scale: 0.1, // WLS perturbation scale
96 };
97
98 let config = DEConfigBuilder::new()
99 .seed(42)
100 .maxiter(50)
101 .popsize(40)
102 .strategy(Strategy::AdaptiveBin)
103 .mutation(Mutation::Adaptive { initial_f: 0.8 })
104 .adaptive(adaptive_config)
105 .disp(true) // Enable progress display
106 .build()
107 .expect("popsize must be >= 4");
108
109 println!("Running adaptive DE on Rosenbrock function with progress display...");
110 let result = differential_evolution(&rosenbrock, &bounds, config).expect("optimization failed");
111
112 println!(
113 "Final result: f = {:.6e} at x = [{:.4}, {:.4}]",
114 result.fun, result.x[0], result.x[1]
115 );
116 println!(
117 "Converged in {} iterations with {} function evaluations",
118 result.nit, result.nfev
119 );
120
121 if result.success {
122 println!("✅ Optimization succeeded: {}", result.message);
123 } else {
124 println!("⚠️ Optimization status: {}", result.message);
125 }
126}
127
128fn run_traditional_de(
129 func: fn(&Array1<f64>) -> f64,
130 bounds: &[(f64, f64)],
131) -> math_audio_optimisation::DEReport {
132 let config = DEConfigBuilder::new()
133 .seed(42)
134 .maxiter(100)
135 .popsize(30)
136 .strategy(Strategy::Best1Bin)
137 .mutation(Mutation::Factor(0.8))
138 .recombination(0.7)
139 .build()
140 .expect("popsize must be >= 4");
141
142 differential_evolution(&func, bounds, config).expect("optimization failed")
143}
144
145fn run_adaptive_de(
146 func: fn(&Array1<f64>) -> f64,
147 bounds: &[(f64, f64)],
148 enable_wls: bool,
149) -> math_audio_optimisation::DEReport {
150 let adaptive_config = AdaptiveConfig {
151 adaptive_mutation: true,
152 wls_enabled: enable_wls,
153 w_max: 0.9,
154 w_min: 0.1,
155 wls_prob: 0.15,
156 wls_scale: 0.1,
157 ..AdaptiveConfig::default()
158 };
159
160 let config = DEConfigBuilder::new()
161 .seed(42)
162 .maxiter(100)
163 .popsize(30)
164 .strategy(Strategy::AdaptiveBin)
165 .mutation(Mutation::Adaptive { initial_f: 0.8 })
166 .adaptive(adaptive_config)
167 .build()
168 .expect("popsize must be >= 4");
169
170 differential_evolution(&func, bounds, config).expect("optimization failed")
171}