dfp_example/
dfp_example.rs1use nalgebra::DVector;
2use optimization_solvers::{FuncEvalMultivariate, LineSearchSolver, MoreThuente, Tracer, DFP};
3
4fn main() {
5 std::env::set_var("RUST_LOG", "info");
7 let _ = Tracer::default().with_normal_stdout_layer().build();
8
9 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
12 let x1 = x[0];
13 let x2 = x[1];
14
15 let f = x1.powi(2) + 5.0 * x2.powi(2) + x1 * x2;
17
18 let g1 = 2.0 * x1 + x2;
20 let g2 = 10.0 * x2 + x1;
21 let g = DVector::from_vec(vec![g1, g2]);
22
23 FuncEvalMultivariate::new(f, g)
24 };
25
26 let mut ls = MoreThuente::default();
28
29 let tol = 1e-6;
31 let x0 = DVector::from_vec(vec![2.0, 1.0]); let mut solver = DFP::new(tol, x0.clone());
33
34 let max_iter_solver = 100;
36 let max_iter_line_search = 20;
37
38 println!("=== DFP (Davidon-Fletcher-Powell) Quasi-Newton Example ===");
39 println!("Objective: f(x,y) = x^2 + 5y^2 + xy (convex quadratic)");
40 println!("Starting point: {:?}", x0);
41 println!("Tolerance: {}", tol);
42 println!();
43
44 match solver.minimize(
45 &mut ls,
46 f_and_g,
47 max_iter_solver,
48 max_iter_line_search,
49 None,
50 ) {
51 Ok(()) => {
52 let x = solver.x();
53 let eval = f_and_g(x);
54 println!("✅ Optimization completed successfully!");
55 println!("Final iterate: {:?}", x);
56 println!("Function value: {:.6}", eval.f());
57 println!("Gradient norm: {:.6}", eval.g().norm());
58 println!("Iterations: {}", solver.k());
59
60 let gradient_at_solution = eval.g();
62 println!("Gradient at solution: {:?}", gradient_at_solution);
63 println!(
64 "Gradient norm should be close to 0: {}",
65 gradient_at_solution.norm()
66 );
67
68 let expected_min = DVector::from_vec(vec![0.0, 0.0]);
72 let distance_to_expected = (x - expected_min).norm();
73 println!(
74 "Distance to expected minimum (0,0): {:.6}",
75 distance_to_expected
76 );
77 println!("Expected function value at (0,0): 0.0");
78 }
79 Err(e) => {
80 println!("❌ Optimization failed: {:?}", e);
81 }
82 }
83}