pub struct BackTracking { /* private fields */ }Implementations§
Source§impl BackTracking
impl BackTracking
Sourcepub fn new(c1: Floating, beta: Floating) -> Self
pub fn new(c1: Floating, beta: Floating) -> Self
Examples found in repository?
examples/quadratic_with_plots.rs (line 20)
6fn main() {
7 // Setting up log verbosity and _.
8 std::env::set_var("RUST_LOG", "debug");
9 let _ = Tracer::default().with_normal_stdout_layer().build();
10 // Setting up the oracle
11 let matrix = DMatrix::from_vec(2, 2, vec![100., 0., 0., 100.]);
12 let mut f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
13 let f = x.dot(&(&matrix * x));
14 let g = 2. * &matrix * x;
15 FuncEvalMultivariate::new(f, g)
16 };
17 // Setting up the line search
18 let armijo_factr = 1e-4;
19 let beta = 0.5; // (beta in (0, 1), ntice that beta = 0.5 corresponds to bisection)
20 let mut ls = BackTracking::new(armijo_factr, beta);
21 // Setting up the main solver, with its parameters and the initial guess
22 let tol = 1e-6;
23 let x0 = DVector::from_vec(vec![10., 10.]);
24 let mut solver = GradientDescent::new(tol, x0);
25 // We define a callback to store iterates and function evaluations
26 let mut iterates = vec![];
27 let mut solver_callback = |s: &GradientDescent| {
28 iterates.push(s.x().clone());
29 };
30 // Running the solver
31 let max_iter_solver = 100;
32 let max_iter_line_search = 10;
33
34 solver
35 .minimize(
36 &mut ls,
37 f_and_g,
38 max_iter_solver,
39 max_iter_line_search,
40 Some(&mut solver_callback),
41 )
42 .unwrap();
43 // Printing the result
44 let x = solver.x();
45 let eval = f_and_g(x);
46 println!("x: {:?}", x);
47 println!("f(x): {}", eval.f());
48 println!("g(x): {:?}", eval.g());
49
50 // Plotting the iterates
51 let n = 50;
52 let start = -5.0;
53 let end = 5.0;
54 let plotter = Plotter3d::new(start, end, start, end, n)
55 .append_plot(&mut f_and_g, "Objective function", 0.5)
56 .append_scatter_points(&mut f_and_g, &iterates, "Iterates")
57 .set_layout_size(1600, 1000);
58 plotter.build("quadratic.html");
59}More examples
examples/gradient_descent_example.rs (line 31)
6fn main() {
7 // Setting up logging
8 std::env::set_var("RUST_LOG", "info");
9 let _ = Tracer::default().with_normal_stdout_layer().build();
10
11 // Convex quadratic function: f(x,y) = x^2 + 2y^2
12 // Global minimum at (0, 0) with f(0,0) = 0
13 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14 let x1 = x[0];
15 let x2 = x[1];
16
17 // Function value
18 let f = x1.powi(2) + 2.0 * x2.powi(2);
19
20 // Gradient
21 let g1 = 2.0 * x1;
22 let g2 = 4.0 * x2;
23 let g = DVector::from_vec(vec![g1, g2]);
24
25 FuncEvalMultivariate::new(f, g)
26 };
27
28 // Setting up the line search (backtracking with Armijo condition)
29 let armijo_factor = 1e-4;
30 let beta = 0.5;
31 let mut ls = BackTracking::new(armijo_factor, beta);
32
33 // Setting up the solver
34 let tol = 1e-6;
35 let x0 = DVector::from_vec(vec![2.0, 1.0]); // Starting point
36 let mut solver = GradientDescent::new(tol, x0.clone());
37
38 // Running the solver
39 let max_iter_solver = 100;
40 let max_iter_line_search = 20;
41
42 println!("=== Gradient Descent Example ===");
43 println!("Objective: f(x,y) = x^2 + 2y^2 (convex quadratic)");
44 println!("Global minimum: (0, 0) with f(0,0) = 0");
45 println!("Starting point: {:?}", x0);
46 println!("Tolerance: {}", tol);
47 println!();
48
49 match solver.minimize(
50 &mut ls,
51 f_and_g,
52 max_iter_solver,
53 max_iter_line_search,
54 None,
55 ) {
56 Ok(()) => {
57 let x = solver.x();
58 let eval = f_and_g(x);
59 println!("✅ Optimization completed successfully!");
60 println!("Final iterate: {:?}", x);
61 println!("Function value: {:.6}", eval.f());
62 println!("Gradient norm: {:.6}", eval.g().norm());
63 println!("Iterations: {}", solver.k());
64
65 // Check if we're close to the known minimum
66 let true_min = DVector::from_vec(vec![0.0, 0.0]);
67 let distance_to_min = (x - true_min).norm();
68 println!("Distance to true minimum: {:.6}", distance_to_min);
69 println!("Expected function value: 0.0");
70 }
71 Err(e) => {
72 println!("❌ Optimization failed: {:?}", e);
73 }
74 }
75}examples/coordinate_descent_example.rs (line 33)
6fn main() {
7 // Setting up logging
8 std::env::set_var("RUST_LOG", "info");
9 let _ = Tracer::default().with_normal_stdout_layer().build();
10
11 // Separable convex function: f(x,y,z) = x^2 + 2y^2 + 3z^2
12 // This function is separable and has a minimum at (0, 0, 0)
13 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14 let x1 = x[0];
15 let x2 = x[1];
16 let x3 = x[2];
17
18 // Function value
19 let f = x1.powi(2) + 2.0 * x2.powi(2) + 3.0 * x3.powi(2);
20
21 // Gradient
22 let g1 = 2.0 * x1;
23 let g2 = 4.0 * x2;
24 let g3 = 6.0 * x3;
25 let g = DVector::from_vec(vec![g1, g2, g3]);
26
27 FuncEvalMultivariate::new(f, g)
28 };
29
30 // Setting up the line search (backtracking)
31 let armijo_factor = 1e-4;
32 let beta = 0.5;
33 let mut ls = BackTracking::new(armijo_factor, beta);
34
35 // Setting up the solver
36 let tol = 1e-6;
37 let x0 = DVector::from_vec(vec![1.0, 1.0, 1.0]); // Starting point
38 let mut solver = CoordinateDescent::new(tol, x0.clone());
39
40 // Running the solver
41 let max_iter_solver = 100;
42 let max_iter_line_search = 10;
43
44 println!("=== Coordinate Descent Example ===");
45 println!("Objective: f(x,y,z) = x^2 + 2y^2 + 3z^2 (separable convex)");
46 println!("Global minimum: (0, 0, 0) with f(0,0,0) = 0");
47 println!("Starting point: {:?}", x0);
48 println!("Tolerance: {}", tol);
49 println!();
50
51 match solver.minimize(
52 &mut ls,
53 f_and_g,
54 max_iter_solver,
55 max_iter_line_search,
56 None,
57 ) {
58 Ok(()) => {
59 let x = solver.x();
60 let eval = f_and_g(x);
61 println!("✅ Optimization completed successfully!");
62 println!("Final iterate: {:?}", x);
63 println!("Function value: {:.6}", eval.f());
64 println!("Gradient norm: {:.6}", eval.g().norm());
65 println!("Iterations: {}", solver.k());
66
67 // Check if we're close to the known minimum
68 let true_min = DVector::from_vec(vec![0.0, 0.0, 0.0]);
69 let distance_to_min = (x - true_min).norm();
70 println!("Distance to true minimum: {:.6}", distance_to_min);
71 println!("Expected function value: 0.0");
72
73 // Verify optimality conditions
74 let gradient_at_solution = eval.g();
75 println!("Gradient at solution: {:?}", gradient_at_solution);
76 println!(
77 "Gradient norm should be close to 0: {}",
78 gradient_at_solution.norm()
79 );
80 }
81 Err(e) => {
82 println!("❌ Optimization failed: {:?}", e);
83 }
84 }
85}examples/pnorm_descent_example.rs (line 31)
6fn main() {
7 // Setting up logging
8 std::env::set_var("RUST_LOG", "info");
9 let _ = Tracer::default().with_normal_stdout_layer().build();
10
11 // Convex quadratic function: f(x,y) = x^2 + 4y^2
12 // This function has a minimum at (0, 0)
13 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14 let x1 = x[0];
15 let x2 = x[1];
16
17 // Function value
18 let f = x1.powi(2) + 4.0 * x2.powi(2);
19
20 // Gradient
21 let g1 = 2.0 * x1;
22 let g2 = 8.0 * x2;
23 let g = DVector::from_vec(vec![g1, g2]);
24
25 FuncEvalMultivariate::new(f, g)
26 };
27
28 // Setting up the line search (backtracking)
29 let armijo_factor = 1e-4;
30 let beta = 0.5;
31 let mut ls = BackTracking::new(armijo_factor, beta);
32
33 // Setting up the solver with a diagonal preconditioner
34 let tol = 1e-6;
35 let x0 = DVector::from_vec(vec![2.0, 1.0]); // Starting point
36
37 // Create a diagonal preconditioner matrix P = diag(1, 1/4) to improve conditioning
38 let inverse_p = DMatrix::from_vec(2, 2, vec![1.0, 0.0, 0.0, 0.25]);
39 let mut solver = PnormDescent::new(tol, x0.clone(), inverse_p);
40
41 // Running the solver
42 let max_iter_solver = 50;
43 let max_iter_line_search = 20;
44
45 println!("=== P-Norm Descent Example ===");
46 println!("Objective: f(x,y) = x^2 + 4y^2 (convex quadratic)");
47 println!("Global minimum: (0, 0) with f(0,0) = 0");
48 println!("Preconditioner: P = diag(1, 1/4)");
49 println!("Starting point: {:?}", x0);
50 println!("Tolerance: {}", tol);
51 println!();
52
53 match solver.minimize(
54 &mut ls,
55 f_and_g,
56 max_iter_solver,
57 max_iter_line_search,
58 None,
59 ) {
60 Ok(()) => {
61 let x = solver.x();
62 let eval = f_and_g(x);
63 println!("✅ Optimization completed successfully!");
64 println!("Final iterate: {:?}", x);
65 println!("Function value: {:.6}", eval.f());
66 println!("Gradient norm: {:.6}", eval.g().norm());
67 println!("Iterations: {}", solver.k());
68
69 // Check if we're close to the known minimum
70 let true_min = DVector::from_vec(vec![0.0, 0.0]);
71 let distance_to_min = (x - true_min).norm();
72 println!("Distance to true minimum: {:.6}", distance_to_min);
73 println!("Expected function value: 0.0");
74
75 // Verify optimality conditions
76 let gradient_at_solution = eval.g();
77 println!("Gradient at solution: {:?}", gradient_at_solution);
78 println!(
79 "Gradient norm should be close to 0: {}",
80 gradient_at_solution.norm()
81 );
82
83 // Show some properties of P-norm descent
84 println!("P-norm descent properties:");
85 println!(" - Uses a preconditioner P to improve convergence");
86 println!(" - Equivalent to steepest descent with P = identity");
87 println!(" - Good preconditioner can significantly improve convergence rate");
88 }
89 Err(e) => {
90 println!("❌ Optimization failed: {:?}", e);
91 }
92 }
93}examples/spg_example.rs (line 32)
6fn main() {
7 // Setting up logging
8 std::env::set_var("RUST_LOG", "info");
9 let _ = Tracer::default().with_normal_stdout_layer().build();
10
11 // Convex function: f(x,y) = x^2 + y^2 + exp(x^2 + y^2)
12 // This function is convex and has a minimum at (0, 0)
13 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14 let x1 = x[0];
15 let x2 = x[1];
16
17 // Function value
18 let f = x1.powi(2) + x2.powi(2) + (x1.powi(2) + x2.powi(2)).exp();
19
20 // Gradient
21 let exp_term = (x1.powi(2) + x2.powi(2)).exp();
22 let g1 = 2.0 * x1 * (1.0 + exp_term);
23 let g2 = 2.0 * x2 * (1.0 + exp_term);
24 let g = DVector::from_vec(vec![g1, g2]);
25
26 FuncEvalMultivariate::new(f, g)
27 };
28
29 // Setting up the line search (backtracking)
30 let armijo_factor = 1e-4;
31 let beta = 0.5;
32 let mut ls = BackTracking::new(armijo_factor, beta);
33
34 // Setting up the solver with box constraints
35 let tol = 1e-6;
36 let x0 = DVector::from_vec(vec![0.5, 0.5]); // Starting point
37 let lower_bound = DVector::from_vec(vec![-1.0, -1.0]); // -1 <= x <= 1, -1 <= y <= 1
38 let upper_bound = DVector::from_vec(vec![1.0, 1.0]);
39
40 // Create a mutable oracle for SPG initialization
41 let mut oracle_for_init = f_and_g;
42 let mut solver = SpectralProjectedGradient::new(
43 tol,
44 x0.clone(),
45 &mut oracle_for_init,
46 lower_bound.clone(),
47 upper_bound.clone(),
48 );
49
50 // Running the solver
51 let max_iter_solver = 100;
52 let max_iter_line_search = 20;
53
54 println!("=== Spectral Projected Gradient (SPG) Example ===");
55 println!("Objective: f(x,y) = x^2 + y^2 + exp(x^2 + y^2) (convex)");
56 println!("Global minimum: (0, 0) with f(0,0) = 1");
57 println!("Constraints: -1 <= x <= 1, -1 <= y <= 1");
58 println!("Starting point: {:?}", x0);
59 println!("Lower bounds: {:?}", lower_bound);
60 println!("Upper bounds: {:?}", upper_bound);
61 println!("Tolerance: {}", tol);
62 println!();
63
64 match solver.minimize(
65 &mut ls,
66 f_and_g,
67 max_iter_solver,
68 max_iter_line_search,
69 None,
70 ) {
71 Ok(()) => {
72 let x = solver.x();
73 let eval = f_and_g(x);
74 println!("✅ Optimization completed successfully!");
75 println!("Final iterate: {:?}", x);
76 println!("Function value: {:.6}", eval.f());
77 println!("Gradient norm: {:.6}", eval.g().norm());
78 println!("Iterations: {}", solver.k());
79
80 // Check constraint satisfaction
81 println!("Constraint satisfaction:");
82 for i in 0..x.len() {
83 println!(
84 " x[{}] = {:.6} (bounds: [{:.1}, {:.1}])",
85 i, x[i], lower_bound[i], upper_bound[i]
86 );
87 }
88
89 // Check if we're close to the known minimum
90 let true_min = DVector::from_vec(vec![0.0, 0.0]);
91 let distance_to_min = (x - true_min).norm();
92 println!("Distance to true minimum: {:.6}", distance_to_min);
93 println!("Expected function value: 1.0");
94
95 // Show some properties of SPG
96 println!("SPG properties:");
97 println!(" - Uses spectral step length estimation");
98 println!(" - Handles box constraints efficiently");
99 println!(" - Often faster than standard projected gradient");
100 }
101 Err(e) => {
102 println!("❌ Optimization failed: {:?}", e);
103 }
104 }
105}Trait Implementations§
Source§impl LineSearch for BackTracking
impl LineSearch for BackTracking
fn compute_step_len( &mut self, x_k: &DVector<Floating>, eval_x_k: &FuncEvalMultivariate, direction_k: &DVector<Floating>, oracle: &mut impl FnMut(&DVector<Floating>) -> FuncEvalMultivariate, max_iter: usize, ) -> Floating
Auto Trait Implementations§
impl Freeze for BackTracking
impl RefUnwindSafe for BackTracking
impl Send for BackTracking
impl Sync for BackTracking
impl Unpin for BackTracking
impl UnwindSafe for BackTracking
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.