broyden_bounded_example/
broyden_bounded_example.rs

1use nalgebra::DVector;
2use optimization_solvers::{BroydenB, FuncEvalMultivariate, LineSearchSolver, MoreThuente, Tracer};
3
4fn main() {
5    // Setting up logging
6    std::env::set_var("RUST_LOG", "info");
7    let _ = Tracer::default().with_normal_stdout_layer().build();
8
9    // Convex function: f(x,y) = x^2 + 2y^2 + xy
10    // This function is convex and has a unique minimum
11    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12        let x1 = x[0];
13        let x2 = x[1];
14
15        // Function value
16        let f = x1.powi(2) + 2.0 * x2.powi(2) + x1 * x2;
17
18        // Gradient
19        let g1 = 2.0 * x1 + x2;
20        let g2 = 4.0 * x2 + x1;
21        let g = DVector::from_vec(vec![g1, g2]);
22
23        FuncEvalMultivariate::new(f, g)
24    };
25
26    // Setting up the line search (More-Thuente line search)
27    let mut ls = MoreThuente::default();
28
29    // Setting up the solver with box constraints
30    let tol = 1e-6;
31    let x0 = DVector::from_vec(vec![0.5, 0.5]); // Starting point
32    let lower_bound = DVector::from_vec(vec![0.0, 0.0]); // x >= 0, y >= 0
33    let upper_bound = DVector::from_vec(vec![1.0, 1.0]); // x <= 1, y <= 1
34    let mut solver = BroydenB::new(tol, x0.clone(), lower_bound.clone(), upper_bound.clone());
35
36    // Running the solver
37    let max_iter_solver = 100;
38    let max_iter_line_search = 20;
39
40    println!("=== BroydenB (Bounded Broyden) Example ===");
41    println!("Objective: f(x,y) = x^2 + 2y^2 + xy (convex quadratic)");
42    println!("Unconstrained minimum: solution of ∇f(x) = 0");
43    println!("Constraints: 0 <= x <= 1, 0 <= y <= 1");
44    println!("Starting point: {:?}", x0);
45    println!("Lower bounds: {:?}", lower_bound);
46    println!("Upper bounds: {:?}", upper_bound);
47    println!("Tolerance: {}", tol);
48    println!();
49
50    match solver.minimize(
51        &mut ls,
52        f_and_g,
53        max_iter_solver,
54        max_iter_line_search,
55        None,
56    ) {
57        Ok(()) => {
58            let x = solver.x();
59            let eval = f_and_g(x);
60            println!("✅ Optimization completed successfully!");
61            println!("Final iterate: {:?}", x);
62            println!("Function value: {:.6}", eval.f());
63            println!("Gradient norm: {:.6}", eval.g().norm());
64            println!("Iterations: {}", solver.k());
65
66            // Check constraint satisfaction
67            println!("Constraint satisfaction:");
68            for i in 0..x.len() {
69                println!(
70                    "  x[{}] = {:.6} (bounds: [{:.1}, {:.1}])",
71                    i, x[i], lower_bound[i], upper_bound[i]
72                );
73            }
74
75            // Verify optimality conditions
76            let gradient_at_solution = eval.g();
77            println!("Gradient at solution: {:?}", gradient_at_solution);
78            println!(
79                "Gradient norm should be close to 0: {}",
80                gradient_at_solution.norm()
81            );
82
83            // Show some properties of BroydenB
84            println!("BroydenB properties:");
85            println!("  - Broyden's method with box constraints");
86            println!("  - Uses rank-1 Hessian updates");
87            println!("  - Alternative to BFGS and DFP methods");
88            println!("  - Handles bounds through projection");
89        }
90        Err(e) => {
91            println!("❌ Optimization failed: {:?}", e);
92        }
93    }
94}