Skip to main content

differential_evolution

Function differential_evolution 

Source
pub fn differential_evolution<F>(
    func: &F,
    bounds: &[(f64, f64)],
    config: DEConfig,
) -> Result<DEReport>
where F: Fn(&Array1<f64>) -> f64 + Sync,
Expand description

Runs Differential Evolution optimization on a function.

This is a convenience function that mirrors SciPy’s differential_evolution API. It creates a DE optimizer with the given bounds and configuration, then runs the optimization to find the global minimum.

§Arguments

  • func - The objective function to minimize, mapping &Array1<f64> to f64
  • bounds - Vector of (lower, upper) bound pairs for each dimension
  • config - DE configuration (use DEConfigBuilder to construct)

§Returns

Returns Ok(DEReport) containing the optimization result on success.

§Errors

Returns DEError::InvalidBounds if any bound pair has upper < lower.

§Example

use math_audio_optimisation::{differential_evolution, DEConfigBuilder};

let config = DEConfigBuilder::new()
    .maxiter(50)
    .seed(42)
    .build()
    .expect("invalid config");

let result = differential_evolution(
    &|x| x[0].powi(2) + x[1].powi(2),
    &[(-5.0, 5.0), (-5.0, 5.0)],
    config,
).expect("optimization failed");

assert!(result.fun < 0.01);
Examples found in repository?
examples/optde_linear_constraints.rs (line 40)
7fn main() {
8    // Objective: sphere in 2D
9    let sphere = |x: &Array1<f64>| x.iter().map(|v| v * v).sum::<f64>();
10
11    // Bounds
12    let bounds = [(-5.0, 5.0), (-5.0, 5.0)];
13
14    // Linear constraint example: lb <= A x <= ub
15    // 1) x0 + x1 <= 1.0
16    // 2) 0.2 <= x0 - x1 <= 0.4
17    let a = Array2::from_shape_vec((2, 2), vec![1.0, 1.0, 1.0, -1.0]).unwrap();
18    let lb = Array1::from(vec![-f64::INFINITY, 0.2]);
19    let ub = Array1::from(vec![1.0, 0.4]);
20    let lc = LinearConstraintHelper { a, lb, ub };
21
22    // Strategy parsing from string (mirrors SciPy names)
23    let strategy = Strategy::from_str("randtobest1exp").unwrap_or(Strategy::RandToBest1Exp);
24
25    // Build config using the fluent builder
26    let mut cfg = DEConfigBuilder::new()
27        .seed(123)
28        .maxiter(600)
29        .popsize(30)
30        .strategy(strategy)
31        .recombination(0.9)
32        .mutation(Mutation::Range { min: 0.4, max: 1.0 })
33        .crossover(Crossover::Exponential)
34        .build()
35        .expect("popsize must be >= 4");
36
37    // Apply linear constraints with a penalty weight
38    lc.apply_to(&mut cfg, 1e3);
39
40    let rep = differential_evolution(&sphere, &bounds, cfg).expect("optimization failed");
41    println!(
42        "success={} message=\"{}\"\nbest f={:.6e}\nbest x={:?}",
43        rep.success, rep.message, rep.fun, rep.x
44    );
45}
More examples
Hide additional examples
examples/optde_nonlinear_constraints.rs (line 41)
8fn main() {
9    // Himmelblau as objective, but with nonlinear constraints to demonstrate helper
10    let himmelblau =
11        |x: &Array1<f64>| (x[0] * x[0] + x[1] - 11.0).powi(2) + (x[0] + x[1] * x[1] - 7.0).powi(2);
12
13    // Bounds
14    let bounds = [(-6.0, 6.0), (-6.0, 6.0)];
15
16    // Nonlinear vector function f(x) with 2 components
17    // 1) Circle-ish constraint: x0^2 + x1^2 <= 10  -> f0(x) = x0^2 + x1^2,  lb=-inf, ub=10
18    // 2) Sum equality: x0 + x1 = 1  -> f1(x) = x0 + x1,  lb=1, ub=1
19    let fun =
20        Arc::new(|x: &Array1<f64>| Array1::from(vec![x[0] * x[0] + x[1] * x[1], x[0] + x[1]]));
21    let lb = Array1::from(vec![-f64::INFINITY, 1.0]);
22    let ub = Array1::from(vec![10.0, 1.0]);
23    let nlc = NonlinearConstraintHelper { fun, lb, ub };
24
25    // Strategy parsing from string
26    let strategy = Strategy::from_str("best1exp").unwrap_or(Strategy::Best1Exp);
27
28    let mut cfg = DEConfigBuilder::new()
29        .seed(456)
30        .maxiter(800)
31        .popsize(30)
32        .strategy(strategy)
33        .recombination(0.9)
34        .crossover(Crossover::Exponential)
35        .build()
36        .expect("popsize must be >= 4");
37
38    // Apply nonlinear constraints with penalties
39    nlc.apply_to(&mut cfg, 1e3, 1e3);
40
41    let rep = differential_evolution(&himmelblau, &bounds, cfg).expect("optimization failed");
42    println!(
43        "success={} message=\"{}\"\nbest f={:.6e}\nbest x={:?}",
44        rep.success, rep.message, rep.fun, rep.x
45    );
46}
examples/optde_basic.rs (line 60)
7fn main() {
8    // Ackley function (2D)
9    let ackley = |x: &Array1<f64>| {
10        let x0 = x[0];
11        let x1 = x[1];
12        let s = 0.5 * (x0 * x0 + x1 * x1);
13        let c = 0.5
14            * ((2.0 * std::f64::consts::PI * x0).cos() + (2.0 * std::f64::consts::PI * x1).cos());
15        -20.0 * (-0.2 * s.sqrt()).exp() - c.exp() + 20.0 + std::f64::consts::E
16    };
17
18    let bounds = [(-5.0, 5.0), (-5.0, 5.0)];
19
20    let mut cfg = DEConfig::default();
21    cfg.maxiter = 300;
22    cfg.popsize = 20;
23    cfg.strategy = Strategy::Best1Bin;
24    cfg.crossover = Crossover::Exponential; // demonstrate exponential crossover
25    cfg.mutation = Mutation::Range { min: 0.5, max: 1.0 }; // dithering
26    cfg.recombination = 0.9;
27    cfg.seed = Some(42);
28
29    // Penalty examples (here just a dummy inequality fc(x) <= 0):
30    // Circle of radius 3: x0^2 + x1^2 - 9 <= 0
31    cfg.penalty_ineq.push((
32        Arc::new(|x: &Array1<f64>| x[0] * x[0] + x[1] * x[1] - 9.0),
33        1e3,
34    ));
35
36    // Callback every generation: stop early when convergence small enough
37    let mut iter_log = 0usize;
38    cfg.callback = Some(Box::new(move |inter| {
39        if iter_log % 25 == 0 {
40            eprintln!(
41                "iter {:4}  best_f={:.6e}  conv(stdE)={:.3e}",
42                inter.iter, inter.fun, inter.convergence
43            );
44        }
45        iter_log += 1;
46        if inter.convergence < 1e-6 {
47            CallbackAction::Stop
48        } else {
49            CallbackAction::Continue
50        }
51    }));
52
53    // Optional polishing with a local optimizer
54    cfg.polish = Some(PolishConfig {
55        enabled: true,
56        algo: "neldermead".into(),
57        maxeval: 400,
58    });
59
60    let report = differential_evolution(&ackley, &bounds, cfg).expect("optimization failed");
61
62    println!(
63        "success={} message=\"{}\"\nbest f={:.6e}\nbest x={:?}",
64        report.success, report.message, report.fun, report.x
65    );
66}
examples/optde_parallel.rs (line 45)
7fn main() {
8    // Rastrigin function with artificial compute delay to simulate expensive evaluations
9    let dimension = 10;
10    let rastrigin = move |x: &Array1<f64>| -> f64 {
11        // Add some compute-intensive work to make parallelization beneficial
12        let mut sum = 0.0;
13        for _ in 0..1000 {
14            for &xi in x.iter() {
15                sum += xi.sin().cos().exp().ln_1p();
16            }
17        }
18
19        // Actual Rastrigin function
20        let a = 10.0;
21        let n = x.len() as f64;
22        let result = a * n
23            + x.iter()
24                .map(|&xi| xi * xi - a * (2.0 * std::f64::consts::PI * xi).cos())
25                .sum::<f64>();
26        result + sum * 1e-10 // Add tiny contribution from expensive computation
27    };
28
29    let bounds: Vec<(f64, f64)> = vec![(-5.12, 5.12); dimension];
30
31    // Test sequential evaluation
32    println!("Testing Sequential Evaluation:");
33    let mut cfg_seq = DEConfig::default();
34    cfg_seq.maxiter = 100;
35    cfg_seq.popsize = 15;
36    cfg_seq.strategy = Strategy::Best1Bin;
37    cfg_seq.mutation = Mutation::Factor(0.8);
38    cfg_seq.recombination = 0.9;
39    cfg_seq.seed = Some(42);
40    cfg_seq.disp = true;
41    cfg_seq.parallel.enabled = false; // Disable parallel evaluation
42
43    let start_seq = Instant::now();
44    let report_seq =
45        differential_evolution(&rastrigin, &bounds, cfg_seq).expect("optimization failed");
46    let duration_seq = start_seq.elapsed();
47
48    println!("\nSequential Results:");
49    println!("  Success: {}", report_seq.success);
50    println!("  Best f: {:.6e}", report_seq.fun);
51    println!("  Iterations: {}", report_seq.nit);
52    println!("  Function evaluations: {}", report_seq.nfev);
53    println!("  Time: {:.3} seconds", duration_seq.as_secs_f64());
54
55    // Test parallel evaluation
56    println!("\n\nTesting Parallel Evaluation:");
57    let mut cfg_par = DEConfig::default();
58    cfg_par.maxiter = 100;
59    cfg_par.popsize = 15;
60    cfg_par.strategy = Strategy::Best1Bin;
61    cfg_par.mutation = Mutation::Factor(0.8);
62    cfg_par.recombination = 0.9;
63    cfg_par.seed = Some(42);
64    cfg_par.disp = true;
65    cfg_par.parallel = ParallelConfig {
66        enabled: true,
67        num_threads: None, // Use all available cores
68    };
69
70    let start_par = Instant::now();
71    let report_par =
72        differential_evolution(&rastrigin, &bounds, cfg_par).expect("optimization failed");
73    let duration_par = start_par.elapsed();
74
75    println!("\nParallel Results:");
76    println!("  Success: {}", report_par.success);
77    println!("  Best f: {:.6e}", report_par.fun);
78    println!("  Iterations: {}", report_par.nit);
79    println!("  Function evaluations: {}", report_par.nfev);
80    println!("  Time: {:.3} seconds", duration_par.as_secs_f64());
81
82    // Compare results
83    println!("\n\nComparison:");
84    println!(
85        "  Speedup: {:.2}x",
86        duration_seq.as_secs_f64() / duration_par.as_secs_f64()
87    );
88    println!(
89        "  Result difference: {:.6e}",
90        (report_seq.fun - report_par.fun).abs()
91    );
92
93    // Test with different thread counts
94    println!("\n\nTesting with different thread counts:");
95    for num_threads in [1, 2, 4, 8] {
96        let mut cfg_threads = DEConfig::default();
97        cfg_threads.maxiter = 50;
98        cfg_threads.popsize = 15;
99        cfg_threads.strategy = Strategy::Best1Bin;
100        cfg_threads.mutation = Mutation::Factor(0.8);
101        cfg_threads.recombination = 0.9;
102        cfg_threads.seed = Some(42);
103        cfg_threads.disp = false;
104        cfg_threads.parallel = ParallelConfig {
105            enabled: true,
106            num_threads: Some(num_threads),
107        };
108
109        let start = Instant::now();
110        let _ =
111            differential_evolution(&rastrigin, &bounds, cfg_threads).expect("optimization failed");
112        let duration = start.elapsed();
113
114        println!(
115            "  {} thread(s): {:.3} seconds",
116            num_threads,
117            duration.as_secs_f64()
118        );
119    }
120}
examples/optde_adaptive_demo.rs (line 110)
13fn main() {
14    println!("🧬 Adaptive Differential Evolution Demo");
15    println!("=====================================");
16    println!();
17
18    // Test functions to evaluate
19    let test_functions = [
20        (
21            "Quadratic (f(x) = x₁² + x₂²)",
22            quadratic as fn(&Array1<f64>) -> f64,
23            [(-5.0, 5.0), (-5.0, 5.0)],
24        ),
25        (
26            "Rosenbrock 2D",
27            rosenbrock as fn(&Array1<f64>) -> f64,
28            [(-5.0, 5.0), (-5.0, 5.0)],
29        ),
30        ("Ackley", ackley, [(-32.0, 32.0), (-32.0, 32.0)]),
31    ];
32
33    for (name, func, bounds) in test_functions.iter() {
34        println!("🎯 Function: {}", name);
35        println!(
36            "   Bounds: [{:.1}, {:.1}] × [{:.1}, {:.1}]",
37            bounds[0].0, bounds[0].1, bounds[1].0, bounds[1].1
38        );
39
40        // Traditional DE
41        println!("   📊 Traditional DE:");
42        let traditional_result = run_traditional_de(*func, bounds);
43
44        // Adaptive DE with SAM only
45        println!("   🧬 Adaptive DE (SAM only):");
46        let sam_result = run_adaptive_de(*func, bounds, false);
47
48        // Adaptive DE with SAM + WLS
49        println!("   🔧 Adaptive DE (SAM + WLS):");
50        let sam_wls_result = run_adaptive_de(*func, bounds, true);
51
52        // Compare results
53        println!("   🏆 Comparison:");
54        println!(
55            "      Traditional: f = {:.6e}, {} iterations",
56            traditional_result.fun, traditional_result.nit
57        );
58        println!(
59            "      SAM only:    f = {:.6e}, {} iterations",
60            sam_result.fun, sam_result.nit
61        );
62        println!(
63            "      SAM + WLS:   f = {:.6e}, {} iterations",
64            sam_wls_result.fun, sam_wls_result.nit
65        );
66
67        let improvement_sam =
68            ((traditional_result.fun - sam_result.fun) / traditional_result.fun * 100.0).max(0.0);
69        let improvement_wls =
70            ((traditional_result.fun - sam_wls_result.fun) / traditional_result.fun * 100.0)
71                .max(0.0);
72
73        println!("      📈 Improvement with SAM: {:.1}%", improvement_sam);
74        println!("      📈 Improvement with WLS: {:.1}%", improvement_wls);
75        println!();
76    }
77
78    // Demonstrate parameter adaptation tracking
79    println!("🔄 Parameter Adaptation Demo");
80    println!("===========================");
81
82    // Use a recording callback to track parameter evolution
83    let bounds = [(-5.0, 5.0), (-5.0, 5.0)];
84
85    let adaptive_config = AdaptiveConfig {
86        adaptive_mutation: true,
87        wls_enabled: true,
88        w_max: 0.9,     // Start with 90% of population for selection
89        w_min: 0.1,     // End with 10% of population
90        w_f: 0.9,       // F parameter adaptation rate
91        w_cr: 0.9,      // CR parameter adaptation rate
92        f_m: 0.5,       // Initial F location parameter
93        cr_m: 0.6,      // Initial CR location parameter
94        wls_prob: 0.2,  // Apply WLS to 20% of population
95        wls_scale: 0.1, // WLS perturbation scale
96    };
97
98    let config = DEConfigBuilder::new()
99        .seed(42)
100        .maxiter(50)
101        .popsize(40)
102        .strategy(Strategy::AdaptiveBin)
103        .mutation(Mutation::Adaptive { initial_f: 0.8 })
104        .adaptive(adaptive_config)
105        .disp(true) // Enable progress display
106        .build()
107        .expect("popsize must be >= 4");
108
109    println!("Running adaptive DE on Rosenbrock function with progress display...");
110    let result = differential_evolution(&rosenbrock, &bounds, config).expect("optimization failed");
111
112    println!(
113        "Final result: f = {:.6e} at x = [{:.4}, {:.4}]",
114        result.fun, result.x[0], result.x[1]
115    );
116    println!(
117        "Converged in {} iterations with {} function evaluations",
118        result.nit, result.nfev
119    );
120
121    if result.success {
122        println!("✅ Optimization succeeded: {}", result.message);
123    } else {
124        println!("⚠️ Optimization status: {}", result.message);
125    }
126}
127
128fn run_traditional_de(
129    func: fn(&Array1<f64>) -> f64,
130    bounds: &[(f64, f64)],
131) -> math_audio_optimisation::DEReport {
132    let config = DEConfigBuilder::new()
133        .seed(42)
134        .maxiter(100)
135        .popsize(30)
136        .strategy(Strategy::Best1Bin)
137        .mutation(Mutation::Factor(0.8))
138        .recombination(0.7)
139        .build()
140        .expect("popsize must be >= 4");
141
142    differential_evolution(&func, bounds, config).expect("optimization failed")
143}
144
145fn run_adaptive_de(
146    func: fn(&Array1<f64>) -> f64,
147    bounds: &[(f64, f64)],
148    enable_wls: bool,
149) -> math_audio_optimisation::DEReport {
150    let adaptive_config = AdaptiveConfig {
151        adaptive_mutation: true,
152        wls_enabled: enable_wls,
153        w_max: 0.9,
154        w_min: 0.1,
155        wls_prob: 0.15,
156        wls_scale: 0.1,
157        ..AdaptiveConfig::default()
158    };
159
160    let config = DEConfigBuilder::new()
161        .seed(42)
162        .maxiter(100)
163        .popsize(30)
164        .strategy(Strategy::AdaptiveBin)
165        .mutation(Mutation::Adaptive { initial_f: 0.8 })
166        .adaptive(adaptive_config)
167        .build()
168        .expect("popsize must be >= 4");
169
170    differential_evolution(&func, bounds, config).expect("optimization failed")
171}