ProjectedGradientDescent

Struct ProjectedGradientDescent 

Source
pub struct ProjectedGradientDescent { /* private fields */ }

Implementations§

Source§

impl ProjectedGradientDescent

Auto-generated by derive_getters::Getters.

Source

pub fn grad_tol(&self) -> &Floating

Get field grad_tol from instance of ProjectedGradientDescent.

Source

pub fn x(&self) -> &DVector<Floating>

Get field x from instance of ProjectedGradientDescent.

Examples found in repository?
examples/projected_gradient_example.rs (line 62)
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Convex quadratic function: f(x,y) = (x-2)^2 + (y-3)^2
12    // This function has a minimum at (2, 3), but we'll constrain it to a box
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16
17        // Function value
18        let f = (x1 - 2.0).powi(2) + (x2 - 3.0).powi(2);
19
20        // Gradient
21        let g1 = 2.0 * (x1 - 2.0);
22        let g2 = 2.0 * (x2 - 3.0);
23        let g = DVector::from_vec(vec![g1, g2]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (backtracking)
29    let mut ls = MoreThuente::default();
30
31    // Setting up the solver with box constraints
32    let tol = 1e-6;
33    let x0 = DVector::from_vec(vec![0.0, 0.0]); // Starting point
34    let lower_bound = DVector::from_vec(vec![0.0, 0.0]); // x >= 0, y >= 0
35    let upper_bound = DVector::from_vec(vec![1.0, 1.0]); // x <= 1, y <= 1
36    let mut solver =
37        ProjectedGradientDescent::new(tol, x0.clone(), lower_bound.clone(), upper_bound.clone());
38
39    // Running the solver
40    let max_iter_solver = 100;
41    let max_iter_line_search = 20;
42
43    println!("=== Projected Gradient Descent Example ===");
44    println!("Objective: f(x,y) = (x-2)^2 + (y-3)^2 (convex quadratic)");
45    println!("Unconstrained minimum: (2, 3) with f(2,3) = 0");
46    println!("Constraints: 0 <= x <= 1, 0 <= y <= 1");
47    println!("Constrained minimum: (1, 1) with f(1,1) = 5");
48    println!("Starting point: {:?}", x0);
49    println!("Lower bounds: {:?}", lower_bound);
50    println!("Upper bounds: {:?}", upper_bound);
51    println!("Tolerance: {}", tol);
52    println!();
53
54    match solver.minimize(
55        &mut ls,
56        f_and_g,
57        max_iter_solver,
58        max_iter_line_search,
59        None,
60    ) {
61        Ok(()) => {
62            let x = solver.x();
63            let eval = f_and_g(x);
64            println!("✅ Optimization completed successfully!");
65            println!("Final iterate: {:?}", x);
66            println!("Function value: {:.6}", eval.f());
67            println!("Gradient norm: {:.6}", eval.g().norm());
68            println!("Iterations: {}", solver.k());
69
70            // Check constraint satisfaction
71            println!("Constraint satisfaction:");
72            for i in 0..x.len() {
73                println!(
74                    "  x[{}] = {:.6} (bounds: [{:.1}, {:.1}])",
75                    i, x[i], lower_bound[i], upper_bound[i]
76                );
77            }
78
79            // The constrained minimum should be at (1, 1) since the unconstrained minimum (2, 3) is outside the box
80            let expected_min = DVector::from_vec(vec![1.0, 1.0]);
81            let distance_to_expected = (x - expected_min).norm();
82            println!(
83                "Distance to expected constrained minimum (1,1): {:.6}",
84                distance_to_expected
85            );
86            println!("Expected function value at (1,1): 5.0");
87        }
88        Err(e) => {
89            println!("❌ Optimization failed: {:?}", e);
90        }
91    }
92}
Source

pub fn k(&self) -> &usize

Get field k from instance of ProjectedGradientDescent.

Examples found in repository?
examples/projected_gradient_example.rs (line 68)
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Convex quadratic function: f(x,y) = (x-2)^2 + (y-3)^2
12    // This function has a minimum at (2, 3), but we'll constrain it to a box
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16
17        // Function value
18        let f = (x1 - 2.0).powi(2) + (x2 - 3.0).powi(2);
19
20        // Gradient
21        let g1 = 2.0 * (x1 - 2.0);
22        let g2 = 2.0 * (x2 - 3.0);
23        let g = DVector::from_vec(vec![g1, g2]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (backtracking)
29    let mut ls = MoreThuente::default();
30
31    // Setting up the solver with box constraints
32    let tol = 1e-6;
33    let x0 = DVector::from_vec(vec![0.0, 0.0]); // Starting point
34    let lower_bound = DVector::from_vec(vec![0.0, 0.0]); // x >= 0, y >= 0
35    let upper_bound = DVector::from_vec(vec![1.0, 1.0]); // x <= 1, y <= 1
36    let mut solver =
37        ProjectedGradientDescent::new(tol, x0.clone(), lower_bound.clone(), upper_bound.clone());
38
39    // Running the solver
40    let max_iter_solver = 100;
41    let max_iter_line_search = 20;
42
43    println!("=== Projected Gradient Descent Example ===");
44    println!("Objective: f(x,y) = (x-2)^2 + (y-3)^2 (convex quadratic)");
45    println!("Unconstrained minimum: (2, 3) with f(2,3) = 0");
46    println!("Constraints: 0 <= x <= 1, 0 <= y <= 1");
47    println!("Constrained minimum: (1, 1) with f(1,1) = 5");
48    println!("Starting point: {:?}", x0);
49    println!("Lower bounds: {:?}", lower_bound);
50    println!("Upper bounds: {:?}", upper_bound);
51    println!("Tolerance: {}", tol);
52    println!();
53
54    match solver.minimize(
55        &mut ls,
56        f_and_g,
57        max_iter_solver,
58        max_iter_line_search,
59        None,
60    ) {
61        Ok(()) => {
62            let x = solver.x();
63            let eval = f_and_g(x);
64            println!("✅ Optimization completed successfully!");
65            println!("Final iterate: {:?}", x);
66            println!("Function value: {:.6}", eval.f());
67            println!("Gradient norm: {:.6}", eval.g().norm());
68            println!("Iterations: {}", solver.k());
69
70            // Check constraint satisfaction
71            println!("Constraint satisfaction:");
72            for i in 0..x.len() {
73                println!(
74                    "  x[{}] = {:.6} (bounds: [{:.1}, {:.1}])",
75                    i, x[i], lower_bound[i], upper_bound[i]
76                );
77            }
78
79            // The constrained minimum should be at (1, 1) since the unconstrained minimum (2, 3) is outside the box
80            let expected_min = DVector::from_vec(vec![1.0, 1.0]);
81            let distance_to_expected = (x - expected_min).norm();
82            println!(
83                "Distance to expected constrained minimum (1,1): {:.6}",
84                distance_to_expected
85            );
86            println!("Expected function value at (1,1): 5.0");
87        }
88        Err(e) => {
89            println!("❌ Optimization failed: {:?}", e);
90        }
91    }
92}
Source

pub fn lower_bound(&self) -> &DVector<Floating>

Get field lower_bound from instance of ProjectedGradientDescent.

Source

pub fn upper_bound(&self) -> &DVector<Floating>

Get field upper_bound from instance of ProjectedGradientDescent.

Source§

impl ProjectedGradientDescent

Source

pub fn new( grad_tol: Floating, x0: DVector<Floating>, lower_bound: DVector<Floating>, upper_bound: DVector<Floating>, ) -> Self

Examples found in repository?
examples/projected_gradient_example.rs (line 37)
6fn main() {
7    // Setting up logging
8    std::env::set_var("RUST_LOG", "info");
9    let _ = Tracer::default().with_normal_stdout_layer().build();
10
11    // Convex quadratic function: f(x,y) = (x-2)^2 + (y-3)^2
12    // This function has a minimum at (2, 3), but we'll constrain it to a box
13    let f_and_g = |x: &DVector<f64>| -> FuncEvalMultivariate {
14        let x1 = x[0];
15        let x2 = x[1];
16
17        // Function value
18        let f = (x1 - 2.0).powi(2) + (x2 - 3.0).powi(2);
19
20        // Gradient
21        let g1 = 2.0 * (x1 - 2.0);
22        let g2 = 2.0 * (x2 - 3.0);
23        let g = DVector::from_vec(vec![g1, g2]);
24
25        FuncEvalMultivariate::new(f, g)
26    };
27
28    // Setting up the line search (backtracking)
29    let mut ls = MoreThuente::default();
30
31    // Setting up the solver with box constraints
32    let tol = 1e-6;
33    let x0 = DVector::from_vec(vec![0.0, 0.0]); // Starting point
34    let lower_bound = DVector::from_vec(vec![0.0, 0.0]); // x >= 0, y >= 0
35    let upper_bound = DVector::from_vec(vec![1.0, 1.0]); // x <= 1, y <= 1
36    let mut solver =
37        ProjectedGradientDescent::new(tol, x0.clone(), lower_bound.clone(), upper_bound.clone());
38
39    // Running the solver
40    let max_iter_solver = 100;
41    let max_iter_line_search = 20;
42
43    println!("=== Projected Gradient Descent Example ===");
44    println!("Objective: f(x,y) = (x-2)^2 + (y-3)^2 (convex quadratic)");
45    println!("Unconstrained minimum: (2, 3) with f(2,3) = 0");
46    println!("Constraints: 0 <= x <= 1, 0 <= y <= 1");
47    println!("Constrained minimum: (1, 1) with f(1,1) = 5");
48    println!("Starting point: {:?}", x0);
49    println!("Lower bounds: {:?}", lower_bound);
50    println!("Upper bounds: {:?}", upper_bound);
51    println!("Tolerance: {}", tol);
52    println!();
53
54    match solver.minimize(
55        &mut ls,
56        f_and_g,
57        max_iter_solver,
58        max_iter_line_search,
59        None,
60    ) {
61        Ok(()) => {
62            let x = solver.x();
63            let eval = f_and_g(x);
64            println!("✅ Optimization completed successfully!");
65            println!("Final iterate: {:?}", x);
66            println!("Function value: {:.6}", eval.f());
67            println!("Gradient norm: {:.6}", eval.g().norm());
68            println!("Iterations: {}", solver.k());
69
70            // Check constraint satisfaction
71            println!("Constraint satisfaction:");
72            for i in 0..x.len() {
73                println!(
74                    "  x[{}] = {:.6} (bounds: [{:.1}, {:.1}])",
75                    i, x[i], lower_bound[i], upper_bound[i]
76                );
77            }
78
79            // The constrained minimum should be at (1, 1) since the unconstrained minimum (2, 3) is outside the box
80            let expected_min = DVector::from_vec(vec![1.0, 1.0]);
81            let distance_to_expected = (x - expected_min).norm();
82            println!(
83                "Distance to expected constrained minimum (1,1): {:.6}",
84                distance_to_expected
85            );
86            println!("Expected function value at (1,1): 5.0");
87        }
88        Err(e) => {
89            println!("❌ Optimization failed: {:?}", e);
90        }
91    }
92}

Trait Implementations§

Source§

impl ComputeDirection for ProjectedGradientDescent

Source§

impl HasBounds for ProjectedGradientDescent

Source§

fn lower_bound(&self) -> &DVector<Floating>

Source§

fn upper_bound(&self) -> &DVector<Floating>

Source§

fn set_lower_bound(&mut self, lower_bound: DVector<Floating>)

Source§

fn set_upper_bound(&mut self, upper_bound: DVector<Floating>)

Source§

impl LineSearchSolver for ProjectedGradientDescent

Source§

fn xk(&self) -> &DVector<Floating>

Source§

fn xk_mut(&mut self) -> &mut DVector<Floating>

Source§

fn k(&self) -> &usize

Source§

fn k_mut(&mut self) -> &mut usize

Source§

fn has_converged(&self, eval: &FuncEvalMultivariate) -> bool

Source§

fn update_next_iterate<LS: LineSearch>( &mut self, line_search: &mut LS, eval_x_k: &FuncEvalMultivariate, oracle: &mut impl FnMut(&DVector<Floating>) -> FuncEvalMultivariate, direction: &DVector<Floating>, max_iter_line_search: usize, ) -> Result<(), SolverError>

Source§

fn setup(&mut self)

Source§

fn evaluate_x_k( &mut self, oracle: &mut impl FnMut(&DVector<Floating>) -> FuncEvalMultivariate, ) -> Result<FuncEvalMultivariate, SolverError>

Source§

fn minimize<LS: LineSearch>( &mut self, line_search: &mut LS, oracle: impl FnMut(&DVector<Floating>) -> FuncEvalMultivariate, max_iter_solver: usize, max_iter_line_search: usize, callback: Option<&mut dyn FnMut(&Self)>, ) -> Result<(), SolverError>

Auto Trait Implementations§

Blanket Implementations§

Source§

impl<T> Any for T
where T: 'static + ?Sized,

Source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
Source§

impl<T> Borrow<T> for T
where T: ?Sized,

Source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
Source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

Source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
Source§

impl<T> From<T> for T

Source§

fn from(t: T) -> T

Returns the argument unchanged.

Source§

impl<T> HasProjectedGradient for T

Source§

impl<T> Instrument for T

Source§

fn instrument(self, span: Span) -> Instrumented<Self>

Instruments this type with the provided Span, returning an Instrumented wrapper. Read more
Source§

fn in_current_span(self) -> Instrumented<Self>

Instruments this type with the current Span, returning an Instrumented wrapper. Read more
Source§

impl<T, U> Into<U> for T
where U: From<T>,

Source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

Source§

impl<T> Same for T

Source§

type Output = T

Should always be Self
Source§

impl<SS, SP> SupersetOf<SS> for SP
where SS: SubsetOf<SP>,

Source§

fn to_subset(&self) -> Option<SS>

The inverse inclusion map: attempts to construct self from the equivalent element of its superset. Read more
Source§

fn is_in_subset(&self) -> bool

Checks if self is actually part of its subset T (and can be converted to it).
Source§

fn to_subset_unchecked(&self) -> SS

Use with care! Same as self.to_subset but without any property checks. Always succeeds.
Source§

fn from_subset(element: &SS) -> SP

The inclusion map: converts self to the equivalent element of its superset.
Source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

Source§

type Error = Infallible

The type returned in the event of a conversion error.
Source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
Source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

Source§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
Source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
Source§

impl<V, T> VZip<V> for T
where V: MultiLane<T>,

Source§

fn vzip(self) -> V

Source§

impl<T> WithSubscriber for T

Source§

fn with_subscriber<S>(self, subscriber: S) -> WithDispatch<Self>
where S: Into<Dispatch>,

Attaches the provided Subscriber to this type, returning a WithDispatch wrapper. Read more
Source§

fn with_current_subscriber(self) -> WithDispatch<Self>

Attaches the current default Subscriber to this type, returning a WithDispatch wrapper. Read more