pub struct ProjectedGradientDescent { /* private fields */ }Implementations§
Source§impl ProjectedGradientDescent
Auto-generated by derive_getters::Getters.
impl ProjectedGradientDescent
Auto-generated by derive_getters::Getters.
Sourcepub fn grad_tol(&self) -> &Floating
pub fn grad_tol(&self) -> &Floating
Get field grad_tol from instance of ProjectedGradientDescent.
Sourcepub fn x(&self) -> &DVector<Floating>
pub fn x(&self) -> &DVector<Floating>
Get field x from instance of ProjectedGradientDescent.
Examples found in repository?
examples/projected_gradient_example.rs (line 62)
6fn main() {
7 // Setting up logging
8 std::env::set_var("RUST_LOG", "info");
9 let _ = Tracer::default().with_normal_stdout_layer().build();
10
11 // Convex quadratic function: f(x,y) = (x-2)^2 + (y-3)^2
12 // This function has a minimum at (2, 3), but we'll constrain it to a box
13 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14 let x1 = x[0];
15 let x2 = x[1];
16
17 // Function value
18 let f = (x1 - 2.0).powi(2) + (x2 - 3.0).powi(2);
19
20 // Gradient
21 let g1 = 2.0 * (x1 - 2.0);
22 let g2 = 2.0 * (x2 - 3.0);
23 let g = DVector::from_vec(vec![g1, g2]);
24
25 FuncEvalMultivariate::new(f, g)
26 };
27
28 // Setting up the line search (backtracking)
29 let mut ls = MoreThuente::default();
30
31 // Setting up the solver with box constraints
32 let tol = 1e-6;
33 let x0 = DVector::from_vec(vec![0.0, 0.0]); // Starting point
34 let lower_bound = DVector::from_vec(vec![0.0, 0.0]); // x >= 0, y >= 0
35 let upper_bound = DVector::from_vec(vec![1.0, 1.0]); // x <= 1, y <= 1
36 let mut solver =
37 ProjectedGradientDescent::new(tol, x0.clone(), lower_bound.clone(), upper_bound.clone());
38
39 // Running the solver
40 let max_iter_solver = 100;
41 let max_iter_line_search = 20;
42
43 println!("=== Projected Gradient Descent Example ===");
44 println!("Objective: f(x,y) = (x-2)^2 + (y-3)^2 (convex quadratic)");
45 println!("Unconstrained minimum: (2, 3) with f(2,3) = 0");
46 println!("Constraints: 0 <= x <= 1, 0 <= y <= 1");
47 println!("Constrained minimum: (1, 1) with f(1,1) = 5");
48 println!("Starting point: {:?}", x0);
49 println!("Lower bounds: {:?}", lower_bound);
50 println!("Upper bounds: {:?}", upper_bound);
51 println!("Tolerance: {}", tol);
52 println!();
53
54 match solver.minimize(
55 &mut ls,
56 f_and_g,
57 max_iter_solver,
58 max_iter_line_search,
59 None,
60 ) {
61 Ok(()) => {
62 let x = solver.x();
63 let eval = f_and_g(x);
64 println!("✅ Optimization completed successfully!");
65 println!("Final iterate: {:?}", x);
66 println!("Function value: {:.6}", eval.f());
67 println!("Gradient norm: {:.6}", eval.g().norm());
68 println!("Iterations: {}", solver.k());
69
70 // Check constraint satisfaction
71 println!("Constraint satisfaction:");
72 for i in 0..x.len() {
73 println!(
74 " x[{}] = {:.6} (bounds: [{:.1}, {:.1}])",
75 i, x[i], lower_bound[i], upper_bound[i]
76 );
77 }
78
79 // The constrained minimum should be at (1, 1) since the unconstrained minimum (2, 3) is outside the box
80 let expected_min = DVector::from_vec(vec![1.0, 1.0]);
81 let distance_to_expected = (x - expected_min).norm();
82 println!(
83 "Distance to expected constrained minimum (1,1): {:.6}",
84 distance_to_expected
85 );
86 println!("Expected function value at (1,1): 5.0");
87 }
88 Err(e) => {
89 println!("❌ Optimization failed: {:?}", e);
90 }
91 }
92}Sourcepub fn k(&self) -> &usize
pub fn k(&self) -> &usize
Get field k from instance of ProjectedGradientDescent.
Examples found in repository?
examples/projected_gradient_example.rs (line 68)
6fn main() {
7 // Setting up logging
8 std::env::set_var("RUST_LOG", "info");
9 let _ = Tracer::default().with_normal_stdout_layer().build();
10
11 // Convex quadratic function: f(x,y) = (x-2)^2 + (y-3)^2
12 // This function has a minimum at (2, 3), but we'll constrain it to a box
13 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14 let x1 = x[0];
15 let x2 = x[1];
16
17 // Function value
18 let f = (x1 - 2.0).powi(2) + (x2 - 3.0).powi(2);
19
20 // Gradient
21 let g1 = 2.0 * (x1 - 2.0);
22 let g2 = 2.0 * (x2 - 3.0);
23 let g = DVector::from_vec(vec![g1, g2]);
24
25 FuncEvalMultivariate::new(f, g)
26 };
27
28 // Setting up the line search (backtracking)
29 let mut ls = MoreThuente::default();
30
31 // Setting up the solver with box constraints
32 let tol = 1e-6;
33 let x0 = DVector::from_vec(vec![0.0, 0.0]); // Starting point
34 let lower_bound = DVector::from_vec(vec![0.0, 0.0]); // x >= 0, y >= 0
35 let upper_bound = DVector::from_vec(vec![1.0, 1.0]); // x <= 1, y <= 1
36 let mut solver =
37 ProjectedGradientDescent::new(tol, x0.clone(), lower_bound.clone(), upper_bound.clone());
38
39 // Running the solver
40 let max_iter_solver = 100;
41 let max_iter_line_search = 20;
42
43 println!("=== Projected Gradient Descent Example ===");
44 println!("Objective: f(x,y) = (x-2)^2 + (y-3)^2 (convex quadratic)");
45 println!("Unconstrained minimum: (2, 3) with f(2,3) = 0");
46 println!("Constraints: 0 <= x <= 1, 0 <= y <= 1");
47 println!("Constrained minimum: (1, 1) with f(1,1) = 5");
48 println!("Starting point: {:?}", x0);
49 println!("Lower bounds: {:?}", lower_bound);
50 println!("Upper bounds: {:?}", upper_bound);
51 println!("Tolerance: {}", tol);
52 println!();
53
54 match solver.minimize(
55 &mut ls,
56 f_and_g,
57 max_iter_solver,
58 max_iter_line_search,
59 None,
60 ) {
61 Ok(()) => {
62 let x = solver.x();
63 let eval = f_and_g(x);
64 println!("✅ Optimization completed successfully!");
65 println!("Final iterate: {:?}", x);
66 println!("Function value: {:.6}", eval.f());
67 println!("Gradient norm: {:.6}", eval.g().norm());
68 println!("Iterations: {}", solver.k());
69
70 // Check constraint satisfaction
71 println!("Constraint satisfaction:");
72 for i in 0..x.len() {
73 println!(
74 " x[{}] = {:.6} (bounds: [{:.1}, {:.1}])",
75 i, x[i], lower_bound[i], upper_bound[i]
76 );
77 }
78
79 // The constrained minimum should be at (1, 1) since the unconstrained minimum (2, 3) is outside the box
80 let expected_min = DVector::from_vec(vec![1.0, 1.0]);
81 let distance_to_expected = (x - expected_min).norm();
82 println!(
83 "Distance to expected constrained minimum (1,1): {:.6}",
84 distance_to_expected
85 );
86 println!("Expected function value at (1,1): 5.0");
87 }
88 Err(e) => {
89 println!("❌ Optimization failed: {:?}", e);
90 }
91 }
92}Sourcepub fn lower_bound(&self) -> &DVector<Floating>
pub fn lower_bound(&self) -> &DVector<Floating>
Get field lower_bound from instance of ProjectedGradientDescent.
Sourcepub fn upper_bound(&self) -> &DVector<Floating>
pub fn upper_bound(&self) -> &DVector<Floating>
Get field upper_bound from instance of ProjectedGradientDescent.
Source§impl ProjectedGradientDescent
impl ProjectedGradientDescent
Sourcepub fn new(
grad_tol: Floating,
x0: DVector<Floating>,
lower_bound: DVector<Floating>,
upper_bound: DVector<Floating>,
) -> Self
pub fn new( grad_tol: Floating, x0: DVector<Floating>, lower_bound: DVector<Floating>, upper_bound: DVector<Floating>, ) -> Self
Examples found in repository?
examples/projected_gradient_example.rs (line 37)
6fn main() {
7 // Setting up logging
8 std::env::set_var("RUST_LOG", "info");
9 let _ = Tracer::default().with_normal_stdout_layer().build();
10
11 // Convex quadratic function: f(x,y) = (x-2)^2 + (y-3)^2
12 // This function has a minimum at (2, 3), but we'll constrain it to a box
13 let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14 let x1 = x[0];
15 let x2 = x[1];
16
17 // Function value
18 let f = (x1 - 2.0).powi(2) + (x2 - 3.0).powi(2);
19
20 // Gradient
21 let g1 = 2.0 * (x1 - 2.0);
22 let g2 = 2.0 * (x2 - 3.0);
23 let g = DVector::from_vec(vec![g1, g2]);
24
25 FuncEvalMultivariate::new(f, g)
26 };
27
28 // Setting up the line search (backtracking)
29 let mut ls = MoreThuente::default();
30
31 // Setting up the solver with box constraints
32 let tol = 1e-6;
33 let x0 = DVector::from_vec(vec![0.0, 0.0]); // Starting point
34 let lower_bound = DVector::from_vec(vec![0.0, 0.0]); // x >= 0, y >= 0
35 let upper_bound = DVector::from_vec(vec![1.0, 1.0]); // x <= 1, y <= 1
36 let mut solver =
37 ProjectedGradientDescent::new(tol, x0.clone(), lower_bound.clone(), upper_bound.clone());
38
39 // Running the solver
40 let max_iter_solver = 100;
41 let max_iter_line_search = 20;
42
43 println!("=== Projected Gradient Descent Example ===");
44 println!("Objective: f(x,y) = (x-2)^2 + (y-3)^2 (convex quadratic)");
45 println!("Unconstrained minimum: (2, 3) with f(2,3) = 0");
46 println!("Constraints: 0 <= x <= 1, 0 <= y <= 1");
47 println!("Constrained minimum: (1, 1) with f(1,1) = 5");
48 println!("Starting point: {:?}", x0);
49 println!("Lower bounds: {:?}", lower_bound);
50 println!("Upper bounds: {:?}", upper_bound);
51 println!("Tolerance: {}", tol);
52 println!();
53
54 match solver.minimize(
55 &mut ls,
56 f_and_g,
57 max_iter_solver,
58 max_iter_line_search,
59 None,
60 ) {
61 Ok(()) => {
62 let x = solver.x();
63 let eval = f_and_g(x);
64 println!("✅ Optimization completed successfully!");
65 println!("Final iterate: {:?}", x);
66 println!("Function value: {:.6}", eval.f());
67 println!("Gradient norm: {:.6}", eval.g().norm());
68 println!("Iterations: {}", solver.k());
69
70 // Check constraint satisfaction
71 println!("Constraint satisfaction:");
72 for i in 0..x.len() {
73 println!(
74 " x[{}] = {:.6} (bounds: [{:.1}, {:.1}])",
75 i, x[i], lower_bound[i], upper_bound[i]
76 );
77 }
78
79 // The constrained minimum should be at (1, 1) since the unconstrained minimum (2, 3) is outside the box
80 let expected_min = DVector::from_vec(vec![1.0, 1.0]);
81 let distance_to_expected = (x - expected_min).norm();
82 println!(
83 "Distance to expected constrained minimum (1,1): {:.6}",
84 distance_to_expected
85 );
86 println!("Expected function value at (1,1): 5.0");
87 }
88 Err(e) => {
89 println!("❌ Optimization failed: {:?}", e);
90 }
91 }
92}Trait Implementations§
Source§impl ComputeDirection for ProjectedGradientDescent
impl ComputeDirection for ProjectedGradientDescent
fn compute_direction( &mut self, eval: &FuncEvalMultivariate, ) -> Result<DVector<Floating>, SolverError>
Source§impl HasBounds for ProjectedGradientDescent
impl HasBounds for ProjectedGradientDescent
fn lower_bound(&self) -> &DVector<Floating>
fn upper_bound(&self) -> &DVector<Floating>
fn set_lower_bound(&mut self, lower_bound: DVector<Floating>)
fn set_upper_bound(&mut self, upper_bound: DVector<Floating>)
Source§impl LineSearchSolver for ProjectedGradientDescent
impl LineSearchSolver for ProjectedGradientDescent
fn xk(&self) -> &DVector<Floating>
fn xk_mut(&mut self) -> &mut DVector<Floating>
fn k(&self) -> &usize
fn k_mut(&mut self) -> &mut usize
fn has_converged(&self, eval: &FuncEvalMultivariate) -> bool
fn update_next_iterate<LS: LineSearch>( &mut self, line_search: &mut LS, eval_x_k: &FuncEvalMultivariate, oracle: &mut impl FnMut(&DVector<Floating>) -> FuncEvalMultivariate, direction: &DVector<Floating>, max_iter_line_search: usize, ) -> Result<(), SolverError>
fn setup(&mut self)
fn evaluate_x_k( &mut self, oracle: &mut impl FnMut(&DVector<Floating>) -> FuncEvalMultivariate, ) -> Result<FuncEvalMultivariate, SolverError>
fn minimize<LS: LineSearch>( &mut self, line_search: &mut LS, oracle: impl FnMut(&DVector<Floating>) -> FuncEvalMultivariate, max_iter_solver: usize, max_iter_line_search: usize, callback: Option<&mut dyn FnMut(&Self)>, ) -> Result<(), SolverError>
Auto Trait Implementations§
impl Freeze for ProjectedGradientDescent
impl RefUnwindSafe for ProjectedGradientDescent
impl Send for ProjectedGradientDescent
impl Sync for ProjectedGradientDescent
impl Unpin for ProjectedGradientDescent
impl UnwindSafe for ProjectedGradientDescent
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more
Source§impl<T> HasProjectedGradient for Twhere
T: LineSearchSolver + HasBounds,
impl<T> HasProjectedGradient for Twhere
T: LineSearchSolver + HasBounds,
fn projected_gradient(&self, eval: &FuncEvalMultivariate) -> DVector<Floating>
Source§impl<T> Instrument for T
impl<T> Instrument for T
Source§fn instrument(self, span: Span) -> Instrumented<Self>
fn instrument(self, span: Span) -> Instrumented<Self>
Source§fn in_current_span(self) -> Instrumented<Self>
fn in_current_span(self) -> Instrumented<Self>
Source§impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
impl<SS, SP> SupersetOf<SS> for SPwhere
SS: SubsetOf<SP>,
Source§fn to_subset(&self) -> Option<SS>
fn to_subset(&self) -> Option<SS>
The inverse inclusion map: attempts to construct
self from the equivalent element of its
superset. Read moreSource§fn is_in_subset(&self) -> bool
fn is_in_subset(&self) -> bool
Checks if
self is actually part of its subset T (and can be converted to it).Source§fn to_subset_unchecked(&self) -> SS
fn to_subset_unchecked(&self) -> SS
Use with care! Same as
self.to_subset but without any property checks. Always succeeds.Source§fn from_subset(element: &SS) -> SP
fn from_subset(element: &SS) -> SP
The inclusion map: converts
self to the equivalent element of its superset.