optimization_solvers/steepest_descent/
projected_gradient_descent.rs1use super::*;
2
3#[derive(derive_getters::Getters)]
6pub struct ProjectedGradientDescent {
7 grad_tol: Floating,
8 x: DVector<Floating>,
9 k: usize,
10 lower_bound: DVector<Floating>,
11 upper_bound: DVector<Floating>,
12}
13
14impl ProjectedGradientDescent {
15 pub fn new(
16 grad_tol: Floating,
17 x0: DVector<Floating>,
18 lower_bound: DVector<Floating>,
19 upper_bound: DVector<Floating>,
20 ) -> Self {
21 let x0 = x0.box_projection(&lower_bound, &upper_bound);
22 Self {
25 grad_tol,
26 x: x0,
27 k: 0,
28 lower_bound,
29 upper_bound,
30 }
32 }
33}
34
35impl HasBounds for ProjectedGradientDescent {
36 fn lower_bound(&self) -> &DVector<Floating> {
37 &self.lower_bound
38 }
39 fn upper_bound(&self) -> &DVector<Floating> {
40 &self.upper_bound
41 }
42 fn set_lower_bound(&mut self, lower_bound: DVector<Floating>) {
43 self.lower_bound = lower_bound;
44 }
45 fn set_upper_bound(&mut self, upper_bound: DVector<Floating>) {
46 self.upper_bound = upper_bound;
47 }
48}
49
50impl ComputeDirection for ProjectedGradientDescent {
51 fn compute_direction(
52 &mut self,
53 eval: &FuncEvalMultivariate,
54 ) -> Result<DVector<Floating>, SolverError> {
55 let direction = &self.x - eval.g();
57 let direction = direction.box_projection(&self.lower_bound, &self.upper_bound);
58 let direction = direction - &self.x;
59 Ok(direction)
60 }
61}
62
63impl LineSearchSolver for ProjectedGradientDescent {
64 fn xk(&self) -> &DVector<Floating> {
65 &self.x
66 }
67 fn xk_mut(&mut self) -> &mut DVector<Floating> {
68 &mut self.x
69 }
70 fn k(&self) -> &usize {
71 &self.k
72 }
73 fn k_mut(&mut self) -> &mut usize {
74 &mut self.k
75 }
76 fn has_converged(&self, eval: &FuncEvalMultivariate) -> bool {
77 let proj_grad = self.projected_gradient(eval);
80 proj_grad.infinity_norm() < self.grad_tol
83 }
84
85 fn update_next_iterate<LS: LineSearch>(
86 &mut self,
87 line_search: &mut LS,
88 eval_x_k: &FuncEvalMultivariate, oracle: &mut impl FnMut(&DVector<Floating>) -> FuncEvalMultivariate,
90 direction: &DVector<Floating>,
91 max_iter_line_search: usize,
92 ) -> Result<(), SolverError> {
93 let step = line_search.compute_step_len(
94 self.xk(),
95 eval_x_k,
96 direction,
97 oracle,
98 max_iter_line_search,
99 );
100
101 debug!(target: "projected_gradient_descent", "ITERATE: {} + {} * {} = {}", self.xk(), step, direction, self.xk() + step * direction);
102
103 let next_iterate = self.xk() + step * direction;
104
105 *self.xk_mut() = next_iterate;
106
107 Ok(())
108 }
109}
110#[cfg(test)]
111mod projected_gradient_test {
112 use super::*;
113 #[test]
114 pub fn constrained_grad_desc_backtracking() {
115 std::env::set_var("RUST_LOG", "info");
116
117 let _ = Tracer::default()
118 .with_stdout_layer(Some(LogFormat::Normal))
119 .build();
120 let gamma = 999.0;
121 let f_and_g = |x: &DVector<Floating>| -> FuncEvalMultivariate {
122 let f = 0.5 * (x[0].powi(2) + gamma * x[1].powi(2));
123 let g = DVector::from(vec![x[0], gamma * x[1]]);
124 (f, g).into()
125 };
126
127 let lower_bounds = DVector::from_vec(vec![-f64::INFINITY, -f64::INFINITY]);
129 let upper_oounds = DVector::from_vec(vec![f64::INFINITY, f64::INFINITY]);
130 let alpha = 1e-4;
132 let beta = 0.5;
133 let mut ls = BackTrackingB::new(alpha, beta, lower_bounds.clone(), upper_oounds.clone());
134
135 let tol = 1e-6;
137 let x_0 = DVector::from(vec![180.0, 152.0]);
138 let mut gd = ProjectedGradientDescent::new(tol, x_0, lower_bounds, upper_oounds);
139
140 let max_iter_solver = 10000;
142 let max_iter_line_search = 1000;
143
144 gd.minimize(
145 &mut ls,
146 f_and_g,
147 max_iter_solver,
148 max_iter_line_search,
149 None,
150 )
151 .unwrap();
152
153 println!("Iterate: {:?}", gd.xk());
154
155 let eval = f_and_g(gd.xk());
156 println!("Function eval: {:?}", eval);
157 println!(
158 "projected Gradient norm: {:?}",
159 gd.projected_gradient(&eval).infinity_norm()
160 );
161 println!("tol: {:?}", tol);
162
163 let convergence = gd.has_converged(&eval);
164 println!("Convergence: {:?}", convergence);
165 }
166}