bfgs_example/
bfgs_example.rs1use nalgebra::DVector;
2use optimization_solvers::{FuncEvalMultivariate, LineSearchSolver, MoreThuente, Tracer, BFGS};
3
4fn main() {
5 std::env::set_var("RUST_LOG", "info");
7 let _ = Tracer::default().with_normal_stdout_layer().build();
8
9 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12 let x1 = x[0];
13 let x2 = x[1];
14 let x3 = x[2];
15
16 let f = x1.powi(2) + 2.0 * x2.powi(2) + 3.0 * x3.powi(2) + x1 * x2 + x2 * x3;
18
19 let g1 = 2.0 * x1 + x2;
21 let g2 = 4.0 * x2 + x1 + x3;
22 let g3 = 6.0 * x3 + x2;
23 let g = DVector::from_vec(vec![g1, g2, g3]);
24
25 FuncEvalMultivariate::new(f, g)
26 };
27
28 let mut ls = MoreThuente::default();
30
31 let tol = 1e-8;
33 let x0 = DVector::from_vec(vec![1.0, 1.0, 1.0]); let mut solver = BFGS::new(tol, x0.clone());
35
36 let max_iter_solver = 50;
38 let max_iter_line_search = 20;
39
40 println!("=== BFGS Quasi-Newton Example ===");
41 println!("Objective: f(x,y,z) = x^2 + 2y^2 + 3z^2 + xy + yz (convex quadratic)");
42 println!("Starting point: {:?}", x0);
43 println!("Tolerance: {}", tol);
44 println!();
45
46 match solver.minimize(
47 &mut ls,
48 f_and_g,
49 max_iter_solver,
50 max_iter_line_search,
51 None,
52 ) {
53 Ok(()) => {
54 let x = solver.x();
55 let eval = f_and_g(x);
56 println!("✅ Optimization completed successfully!");
57 println!("Final iterate: {:?}", x);
58 println!("Function value: {:.8}", eval.f());
59 println!("Gradient norm: {:.8}", eval.g().norm());
60 println!("Iterations: {}", solver.k());
61
62 let gradient_at_solution = eval.g();
64 println!("Gradient at solution: {:?}", gradient_at_solution);
65 println!(
66 "Gradient norm should be close to 0: {}",
67 gradient_at_solution.norm()
68 );
69
70 println!("Expected minimum: solution of ∇f(x) = 0");
73 }
74 Err(e) => {
75 println!("❌ Optimization failed: {:?}", e);
76 }
77 }
78}