gradient_descent_example/
gradient_descent_example.rs1use nalgebra::DVector;
2use optimization_solvers::{
3 BackTracking, FuncEvalMultivariate, GradientDescent, LineSearchSolver, Tracer,
4};
5
6fn main() {
7 std::env::set_var("RUST_LOG", "info");
9 let _ = Tracer::default().with_normal_stdout_layer().build();
10
11 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14 let x1 = x[0];
15 let x2 = x[1];
16
17 let f = x1.powi(2) + 2.0 * x2.powi(2);
19
20 let g1 = 2.0 * x1;
22 let g2 = 4.0 * x2;
23 let g = DVector::from_vec(vec![g1, g2]);
24
25 FuncEvalMultivariate::new(f, g)
26 };
27
28 let armijo_factor = 1e-4;
30 let beta = 0.5;
31 let mut ls = BackTracking::new(armijo_factor, beta);
32
33 let tol = 1e-6;
35 let x0 = DVector::from_vec(vec![2.0, 1.0]); let mut solver = GradientDescent::new(tol, x0.clone());
37
38 let max_iter_solver = 100;
40 let max_iter_line_search = 20;
41
42 println!("=== Gradient Descent Example ===");
43 println!("Objective: f(x,y) = x^2 + 2y^2 (convex quadratic)");
44 println!("Global minimum: (0, 0) with f(0,0) = 0");
45 println!("Starting point: {:?}", x0);
46 println!("Tolerance: {}", tol);
47 println!();
48
49 match solver.minimize(
50 &mut ls,
51 f_and_g,
52 max_iter_solver,
53 max_iter_line_search,
54 None,
55 ) {
56 Ok(()) => {
57 let x = solver.x();
58 let eval = f_and_g(x);
59 println!("✅ Optimization completed successfully!");
60 println!("Final iterate: {:?}", x);
61 println!("Function value: {:.6}", eval.f());
62 println!("Gradient norm: {:.6}", eval.g().norm());
63 println!("Iterations: {}", solver.k());
64
65 let true_min = DVector::from_vec(vec![0.0, 0.0]);
67 let distance_to_min = (x - true_min).norm();
68 println!("Distance to true minimum: {:.6}", distance_to_min);
69 println!("Expected function value: 0.0");
70 }
71 Err(e) => {
72 println!("❌ Optimization failed: {:?}", e);
73 }
74 }
75}