projected_gradient_example/
projected_gradient_example.rs

1use nalgebra::DVector;
2use optimization_solvers::{
3    FuncEvalMultivariate, LineSearchSolver, MoreThuente, ProjectedGradientDescent, Tracer,
4};
5
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Convex quadratic function: f(x,y) = (x-2)^2 + (y-3)^2
12    // This function has a minimum at (2, 3), but we'll constrain it to a box
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16
17        // Function value
18        let f = (x1 - 2.0).powi(2) + (x2 - 3.0).powi(2);
19
20        // Gradient
21        let g1 = 2.0 * (x1 - 2.0);
22        let g2 = 2.0 * (x2 - 3.0);
23        let g = DVector::from_vec(vec![g1, g2]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (backtracking)
29    let mut ls = MoreThuente::default();
30
31    // Setting up the solver with box constraints
32    let tol = 1e-6;
33    let x0 = DVector::from_vec(vec![0.0, 0.0]); // Starting point
34    let lower_bound = DVector::from_vec(vec![0.0, 0.0]); // x >= 0, y >= 0
35    let upper_bound = DVector::from_vec(vec![1.0, 1.0]); // x <= 1, y <= 1
36    let mut solver =
37        ProjectedGradientDescent::new(tol, x0.clone(), lower_bound.clone(), upper_bound.clone());
38
39    // Running the solver
40    let max_iter_solver = 100;
41    let max_iter_line_search = 20;
42
43    println!("=== Projected Gradient Descent Example ===");
44    println!("Objective: f(x,y) = (x-2)^2 + (y-3)^2 (convex quadratic)");
45    println!("Unconstrained minimum: (2, 3) with f(2,3) = 0");
46    println!("Constraints: 0 <= x <= 1, 0 <= y <= 1");
47    println!("Constrained minimum: (1, 1) with f(1,1) = 5");
48    println!("Starting point: {:?}", x0);
49    println!("Lower bounds: {:?}", lower_bound);
50    println!("Upper bounds: {:?}", upper_bound);
51    println!("Tolerance: {}", tol);
52    println!();
53
54    match solver.minimize(
55        &mut ls,
56        f_and_g,
57        max_iter_solver,
58        max_iter_line_search,
59        None,
60    ) {
61        Ok(()) => {
62            let x = solver.x();
63            let eval = f_and_g(x);
64            println!("✅ Optimization completed successfully!");
65            println!("Final iterate: {:?}", x);
66            println!("Function value: {:.6}", eval.f());
67            println!("Gradient norm: {:.6}", eval.g().norm());
68            println!("Iterations: {}", solver.k());
69
70            // Check constraint satisfaction
71            println!("Constraint satisfaction:");
72            for i in 0..x.len() {
73                println!(
74                    "  x[{}] = {:.6} (bounds: [{:.1}, {:.1}])",
75                    i, x[i], lower_bound[i], upper_bound[i]
76                );
77            }
78
79            // The constrained minimum should be at (1, 1) since the unconstrained minimum (2, 3) is outside the box
80            let expected_min = DVector::from_vec(vec![1.0, 1.0]);
81            let distance_to_expected = (x - expected_min).norm();
82            println!(
83                "Distance to expected constrained minimum (1,1): {:.6}",
84                distance_to_expected
85            );
86            println!("Expected function value at (1,1): 5.0");
87        }
88        Err(e) => {
89            println!("❌ Optimization failed: {:?}", e);
90        }
91    }
92}